mirror of
https://git.savannah.gnu.org/git/guile.git
synced 2025-04-30 03:40:34 +02:00
Add Boucher's lalr-scm' as the
(system base lalr)' module.
Taken from r51 of <http://lalr-scm.googlecode.com/svn/trunk>. * module/Makefile.am (SYSTEM_BASE_SOURCES): Add `system/base/lalr.scm'. (NOCOMP_SOURCES): Add `system/base/lalr.upstream.scm'. * module/system/base/lalr.scm, module/system/base/lalr.upstream.scm: New files. * test-suite/Makefile.am (LALR_TESTS, LALR_EXTRA, TESTS, TESTS_ENVIRONMENT): New variables. (EXTRA_DIST): Add $(LALR_EXTRA). * test-suite/lalr/common-test.scm, test-suite/lalr/glr-test.scm, test-suite/lalr/test-glr-associativity.scm, test-suite/lalr/test-glr-basics-01.scm, test-suite/lalr/test-glr-basics-02.scm, test-suite/lalr/test-glr-basics-03.scm, test-suite/lalr/test-glr-basics-04.scm, test-suite/lalr/test-glr-basics-05.scm, test-suite/lalr/test-glr-script-expression.scm, test-suite/lalr/test-glr-single-expressions.scm, test-suite/lalr/test-lr-associativity-01.scm, test-suite/lalr/test-lr-associativity-02.scm, test-suite/lalr/test-lr-associativity-03.scm, test-suite/lalr/test-lr-associativity-04.scm, test-suite/lalr/test-lr-basics-01.scm, test-suite/lalr/test-lr-basics-02.scm, test-suite/lalr/test-lr-basics-03.scm, test-suite/lalr/test-lr-basics-04.scm, test-suite/lalr/test-lr-basics-05.scm, test-suite/lalr/test-lr-error-recovery-01.scm, test-suite/lalr/test-lr-error-recovery-02.scm, test-suite/lalr/test-lr-no-clause.scm, test-suite/lalr/test-lr-script-expression.scm, test-suite/lalr/test-lr-single-expressions.scm: New files.
This commit is contained in:
parent
3ffd1ba96e
commit
1b10152215
29 changed files with 3759 additions and 1 deletions
|
@ -165,11 +165,12 @@ SCRIPTS_SOURCES = \
|
||||||
scripts/read-rfc822.scm \
|
scripts/read-rfc822.scm \
|
||||||
scripts/snarf-guile-m4-docs.scm
|
scripts/snarf-guile-m4-docs.scm
|
||||||
|
|
||||||
SYSTEM_BASE_SOURCES = \
|
SYSTEM_BASE_SOURCES = \
|
||||||
system/base/pmatch.scm \
|
system/base/pmatch.scm \
|
||||||
system/base/syntax.scm \
|
system/base/syntax.scm \
|
||||||
system/base/compile.scm \
|
system/base/compile.scm \
|
||||||
system/base/language.scm \
|
system/base/language.scm \
|
||||||
|
system/base/lalr.scm \
|
||||||
system/base/message.scm
|
system/base/message.scm
|
||||||
|
|
||||||
ICE_9_SOURCES = \
|
ICE_9_SOURCES = \
|
||||||
|
@ -316,6 +317,7 @@ NOCOMP_SOURCES = \
|
||||||
ice-9/gds-client.scm \
|
ice-9/gds-client.scm \
|
||||||
ice-9/psyntax.scm \
|
ice-9/psyntax.scm \
|
||||||
ice-9/quasisyntax.scm \
|
ice-9/quasisyntax.scm \
|
||||||
|
system/base/lalr.upstream.scm \
|
||||||
system/repl/describe.scm \
|
system/repl/describe.scm \
|
||||||
ice-9/debugger/command-loop.scm \
|
ice-9/debugger/command-loop.scm \
|
||||||
ice-9/debugger/commands.scm \
|
ice-9/debugger/commands.scm \
|
||||||
|
|
45
module/system/base/lalr.scm
Normal file
45
module/system/base/lalr.scm
Normal file
|
@ -0,0 +1,45 @@
|
||||||
|
;;; -*- mode: scheme; coding: utf-8; -*-
|
||||||
|
;;;
|
||||||
|
;;; Copyright (C) 2010 Free Software Foundation, Inc.
|
||||||
|
;;;
|
||||||
|
;;; This library is free software; you can redistribute it and/or modify it
|
||||||
|
;;; under the terms of the GNU Lesser General Public License as published by
|
||||||
|
;;; the Free Software Foundation; either version 3 of the License, or (at
|
||||||
|
;;; your option) any later version.
|
||||||
|
;;;
|
||||||
|
;;; This library is distributed in the hope that it will be useful, but
|
||||||
|
;;; WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser
|
||||||
|
;;; General Public License for more details.
|
||||||
|
;;;
|
||||||
|
;;; You should have received a copy of the GNU Lesser General Public License
|
||||||
|
;;; along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
(define-module (system base lalr)
|
||||||
|
|
||||||
|
;; XXX: In theory this import is not needed but the evaluator (not the
|
||||||
|
;; compiler) complains about `lexical-token' being unbound when expanding
|
||||||
|
;; `(define-record-type lexical-token ...)' if we omit it.
|
||||||
|
#:use-module (srfi srfi-9)
|
||||||
|
|
||||||
|
#:export (lalr-parser print-states
|
||||||
|
|
||||||
|
make-lexical-token lexical-token?
|
||||||
|
lexical-token-category
|
||||||
|
lexical-token-source
|
||||||
|
lexical-token-value
|
||||||
|
|
||||||
|
make-source-location source-location?
|
||||||
|
source-location-input
|
||||||
|
source-location-line
|
||||||
|
source-location-column
|
||||||
|
source-location-offset
|
||||||
|
source-location-length
|
||||||
|
|
||||||
|
;; `lalr-parser' is a defmacro, which produces code that refers to
|
||||||
|
;; these drivers.
|
||||||
|
lr-driver glr-driver))
|
||||||
|
|
||||||
|
;; The LALR parser generator was written by Dominique Boucher. It's available
|
||||||
|
;; from http://code.google.com/p/lalr-scm/ and released under the LGPLv3+.
|
||||||
|
(include-from-path "system/base/lalr.upstream.scm")
|
2077
module/system/base/lalr.upstream.scm
Executable file
2077
module/system/base/lalr.upstream.scm
Executable file
File diff suppressed because it is too large
Load diff
|
@ -120,3 +120,46 @@ SCM_TESTS = tests/alist.test \
|
||||||
tests/weaks.test
|
tests/weaks.test
|
||||||
|
|
||||||
EXTRA_DIST = guile-test lib.scm $(SCM_TESTS) ChangeLog-2008
|
EXTRA_DIST = guile-test lib.scm $(SCM_TESTS) ChangeLog-2008
|
||||||
|
|
||||||
|
|
||||||
|
# Test suite of Dominique Boucher's `lalr-scm'.
|
||||||
|
# From http://code.google.com/p/lalr-scm/.
|
||||||
|
|
||||||
|
LALR_TESTS = \
|
||||||
|
lalr/test-glr-associativity.scm \
|
||||||
|
lalr/test-glr-basics-01.scm \
|
||||||
|
lalr/test-glr-basics-02.scm \
|
||||||
|
lalr/test-glr-basics-03.scm \
|
||||||
|
lalr/test-glr-basics-04.scm \
|
||||||
|
lalr/test-glr-basics-05.scm \
|
||||||
|
lalr/test-glr-script-expression.scm \
|
||||||
|
lalr/test-glr-single-expressions.scm \
|
||||||
|
\
|
||||||
|
lalr/test-lr-associativity-01.scm \
|
||||||
|
lalr/test-lr-basics-01.scm \
|
||||||
|
lalr/test-lr-basics-02.scm \
|
||||||
|
lalr/test-lr-basics-03.scm \
|
||||||
|
lalr/test-lr-basics-04.scm \
|
||||||
|
lalr/test-lr-basics-05.scm \
|
||||||
|
lalr/test-lr-error-recovery-01.scm \
|
||||||
|
lalr/test-lr-error-recovery-02.scm \
|
||||||
|
lalr/test-lr-no-clause.scm \
|
||||||
|
lalr/test-lr-script-expression.scm \
|
||||||
|
lalr/test-lr-single-expressions.scm
|
||||||
|
|
||||||
|
# Tests not listed in `run-guile-test.sh' and which should not be run.
|
||||||
|
LALR_EXTRA = \
|
||||||
|
lalr/test-lr-associativity-02.scm \
|
||||||
|
lalr/test-lr-associativity-03.scm \
|
||||||
|
lalr/test-lr-associativity-04.scm
|
||||||
|
|
||||||
|
# Test framework.
|
||||||
|
LALR_EXTRA += \
|
||||||
|
lalr/common-test.scm \
|
||||||
|
lalr/glr-test.scm \
|
||||||
|
lalr/run-guile-test.sh
|
||||||
|
|
||||||
|
TESTS = $(LALR_TESTS)
|
||||||
|
TESTS_ENVIRONMENT = $(top_builddir)/meta/guile --no-autocompile
|
||||||
|
|
||||||
|
EXTRA_DIST += $(LALR_EXTRA) $(LALR_TESTS)
|
||||||
|
|
63
test-suite/lalr/common-test.scm
Normal file
63
test-suite/lalr/common-test.scm
Normal file
|
@ -0,0 +1,63 @@
|
||||||
|
;;; common-test.scm --
|
||||||
|
;;;
|
||||||
|
|
||||||
|
;; Slightly modified for Guile by Ludovic Courtès <ludo@gnu.org>, 2010.
|
||||||
|
|
||||||
|
(use-modules (system base lalr)
|
||||||
|
(ice-9 pretty-print))
|
||||||
|
|
||||||
|
(define *error* '())
|
||||||
|
|
||||||
|
(define-syntax when
|
||||||
|
(syntax-rules ()
|
||||||
|
((_ ?expr ?body ...)
|
||||||
|
(if ?expr
|
||||||
|
(let () ?body ...)
|
||||||
|
#f))))
|
||||||
|
|
||||||
|
(define-syntax check
|
||||||
|
(syntax-rules (=>)
|
||||||
|
((_ ?expr => ?expected-result)
|
||||||
|
(check ?expr (=> equal?) ?expected-result))
|
||||||
|
|
||||||
|
((_ ?expr (=> ?equal) ?expected-result)
|
||||||
|
(let ((result ?expr)
|
||||||
|
(expected ?expected-result))
|
||||||
|
(set! *error* '())
|
||||||
|
(when (not (?equal result expected))
|
||||||
|
(display "Failed test: \n")
|
||||||
|
(pretty-print (quote ?expr))(newline)
|
||||||
|
(display "\tresult was: ")
|
||||||
|
(pretty-print result)(newline)
|
||||||
|
(display "\texpected: ")
|
||||||
|
(pretty-print expected)(newline)
|
||||||
|
(exit 1))))))
|
||||||
|
|
||||||
|
;;; --------------------------------------------------------------------
|
||||||
|
|
||||||
|
(define (display-result v)
|
||||||
|
(if v
|
||||||
|
(begin
|
||||||
|
(display "==> ")
|
||||||
|
(display v)
|
||||||
|
(newline))))
|
||||||
|
|
||||||
|
(define eoi-token
|
||||||
|
(make-lexical-token '*eoi* #f #f))
|
||||||
|
|
||||||
|
(define (make-lexer tokens)
|
||||||
|
(lambda ()
|
||||||
|
(if (null? tokens)
|
||||||
|
eoi-token
|
||||||
|
(let ((t (car tokens)))
|
||||||
|
(set! tokens (cdr tokens))
|
||||||
|
t))))
|
||||||
|
|
||||||
|
(define (error-handler message . args)
|
||||||
|
(set! *error* (cons `(error-handler ,message . ,(if (pair? args)
|
||||||
|
(lexical-token-category (car args))
|
||||||
|
'()))
|
||||||
|
*error*))
|
||||||
|
(cons message args))
|
||||||
|
|
||||||
|
;;; end of file
|
88
test-suite/lalr/glr-test.scm
Normal file
88
test-suite/lalr/glr-test.scm
Normal file
|
@ -0,0 +1,88 @@
|
||||||
|
":";exec snow -- "$0" "$@"
|
||||||
|
;;;
|
||||||
|
;;;; Tests for the GLR parser generator
|
||||||
|
;;;
|
||||||
|
;;
|
||||||
|
;; @created "Fri Aug 19 11:23:48 EDT 2005"
|
||||||
|
;;
|
||||||
|
|
||||||
|
(package* glr-test/v1.0.0
|
||||||
|
(require: lalr/v2.4.0))
|
||||||
|
|
||||||
|
|
||||||
|
(define (syntax-error msg . args)
|
||||||
|
(display msg (current-error-port))
|
||||||
|
(for-each (cut format (current-error-port) " ~A" <>) args)
|
||||||
|
(newline (current-error-port))
|
||||||
|
(throw 'misc-error))
|
||||||
|
|
||||||
|
|
||||||
|
(define (make-lexer words)
|
||||||
|
(let ((phrase words))
|
||||||
|
(lambda ()
|
||||||
|
(if (null? phrase)
|
||||||
|
'*eoi*
|
||||||
|
(let ((word (car phrase)))
|
||||||
|
(set! phrase (cdr phrase))
|
||||||
|
word)))))
|
||||||
|
|
||||||
|
|
||||||
|
;;;
|
||||||
|
;;;; Test 1
|
||||||
|
;;;
|
||||||
|
|
||||||
|
|
||||||
|
(define parser-1
|
||||||
|
;; Grammar taken from Tomita's "An Efficient Augmented-Context-Free Parsing Algorithm"
|
||||||
|
(lalr-parser
|
||||||
|
(driver: glr)
|
||||||
|
(expect: 2)
|
||||||
|
(*n *v *d *p)
|
||||||
|
(<s> (<np> <vp>)
|
||||||
|
(<s> <pp>))
|
||||||
|
(<np> (*n)
|
||||||
|
(*d *n)
|
||||||
|
(<np> <pp>))
|
||||||
|
(<pp> (*p <np>))
|
||||||
|
(<vp> (*v <np>))))
|
||||||
|
|
||||||
|
|
||||||
|
(define *phrase-1* '(*n *v *d *n *p *d *n *p *d *n *p *d *n))
|
||||||
|
|
||||||
|
(define (test-1)
|
||||||
|
(parser-1 (make-lexer *phrase-1*) syntax-error))
|
||||||
|
|
||||||
|
|
||||||
|
;;;
|
||||||
|
;;;; Test 2
|
||||||
|
;;;
|
||||||
|
|
||||||
|
|
||||||
|
(define parser-2
|
||||||
|
;; The dangling-else problem
|
||||||
|
(lalr-parser
|
||||||
|
(driver: glr)
|
||||||
|
(expect: 1)
|
||||||
|
((nonassoc: if then else e s))
|
||||||
|
(<s> (s)
|
||||||
|
(if e then <s>)
|
||||||
|
(if e then <s> else <s>))))
|
||||||
|
|
||||||
|
|
||||||
|
(define *phrase-2* '(if e then if e then s else s))
|
||||||
|
|
||||||
|
(define (test-2)
|
||||||
|
(parser-2 (make-lexer *phrase-2*) syntax-error))
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
(define (assert-length l n test-name)
|
||||||
|
(display "Test '")
|
||||||
|
(display test-name)
|
||||||
|
(display (if (not (= (length l) n)) "' failed!" "' passed!"))
|
||||||
|
(newline))
|
||||||
|
|
||||||
|
(assert-length (test-1) 14 1)
|
||||||
|
(assert-length (test-2) 2 2)
|
||||||
|
|
30
test-suite/lalr/run-guile-test.sh
Normal file
30
test-suite/lalr/run-guile-test.sh
Normal file
|
@ -0,0 +1,30 @@
|
||||||
|
# guile-test.sh --
|
||||||
|
#
|
||||||
|
|
||||||
|
for item in \
|
||||||
|
test-glr-basics-01.scm \
|
||||||
|
test-glr-basics-02.scm \
|
||||||
|
test-glr-basics-03.scm \
|
||||||
|
test-glr-basics-04.scm \
|
||||||
|
test-glr-basics-05.scm \
|
||||||
|
test-glr-associativity.scm \
|
||||||
|
test-glr-script-expression.scm \
|
||||||
|
test-glr-single-expressions.scm \
|
||||||
|
\
|
||||||
|
test-lr-basics-01.scm \
|
||||||
|
test-lr-basics-02.scm \
|
||||||
|
test-lr-basics-03.scm \
|
||||||
|
test-lr-basics-04.scm \
|
||||||
|
test-lr-basics-05.scm \
|
||||||
|
test-lr-error-recovery-01.scm \
|
||||||
|
test-lr-error-recovery-02.scm \
|
||||||
|
test-lr-no-clause.scm \
|
||||||
|
test-lr-associativity-01.scm \
|
||||||
|
test-lr-script-expression.scm \
|
||||||
|
test-lr-single-expressions.scm
|
||||||
|
do
|
||||||
|
printf "\n\n*** Running $item\n"
|
||||||
|
guile $item
|
||||||
|
done
|
||||||
|
|
||||||
|
### end of file
|
102
test-suite/lalr/test-glr-associativity.scm
Normal file
102
test-suite/lalr/test-glr-associativity.scm
Normal file
|
@ -0,0 +1,102 @@
|
||||||
|
;;; test-glr-associativity.scm
|
||||||
|
;;
|
||||||
|
;;With the GLR parser both the terminal precedence and the non-terminal
|
||||||
|
;;associativity are not respected; rather they generate two child
|
||||||
|
;;processes.
|
||||||
|
;;
|
||||||
|
|
||||||
|
(load "common-test.scm")
|
||||||
|
|
||||||
|
(define parser
|
||||||
|
(lalr-parser
|
||||||
|
(driver: glr)
|
||||||
|
(expect: 0)
|
||||||
|
|
||||||
|
(N LPAREN RPAREN
|
||||||
|
(left: + -)
|
||||||
|
(right: * /)
|
||||||
|
(nonassoc: uminus))
|
||||||
|
|
||||||
|
(output (expr) : $1)
|
||||||
|
(expr (expr + expr) : (list $1 '+ $3)
|
||||||
|
(expr - expr) : (list $1 '- $3)
|
||||||
|
(expr * expr) : (list $1 '* $3)
|
||||||
|
(expr / expr) : (list $1 '/ $3)
|
||||||
|
(- expr (prec: uminus)) : (list '- $2)
|
||||||
|
(N) : $1
|
||||||
|
(LPAREN expr RPAREN) : $2)))
|
||||||
|
|
||||||
|
(define (doit . tokens)
|
||||||
|
(parser (make-lexer tokens) error-handler))
|
||||||
|
|
||||||
|
;;; --------------------------------------------------------------------
|
||||||
|
|
||||||
|
;;Remember that the result of the GLR driver is a list of parses, not a
|
||||||
|
;;single parse.
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit (make-lexical-token 'N #f 1))
|
||||||
|
=> '(1))
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit (make-lexical-token 'N #f 1)
|
||||||
|
(make-lexical-token '+ #f '+)
|
||||||
|
(make-lexical-token 'N #f 2))
|
||||||
|
=> '((1 + 2)))
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit (make-lexical-token 'N #f 1)
|
||||||
|
(make-lexical-token '* #f '*)
|
||||||
|
(make-lexical-token 'N #f 2))
|
||||||
|
=> '((1 * 2)))
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit (make-lexical-token '- #f '-)
|
||||||
|
(make-lexical-token 'N #f 1))
|
||||||
|
=> '((- 1)))
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit (make-lexical-token '- #f '-)
|
||||||
|
(make-lexical-token '- #f '-)
|
||||||
|
(make-lexical-token 'N #f 1))
|
||||||
|
=> '((- (- 1))))
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit (make-lexical-token 'N #f 1)
|
||||||
|
(make-lexical-token '+ #f '+)
|
||||||
|
(make-lexical-token '- #f '-)
|
||||||
|
(make-lexical-token 'N #f 2))
|
||||||
|
=> '((1 + (- 2))))
|
||||||
|
|
||||||
|
;;; --------------------------------------------------------------------
|
||||||
|
|
||||||
|
(check
|
||||||
|
;;left-associativity
|
||||||
|
(doit (make-lexical-token 'N #f 1)
|
||||||
|
(make-lexical-token '+ #f '+)
|
||||||
|
(make-lexical-token 'N #f 2)
|
||||||
|
(make-lexical-token '+ #f '+)
|
||||||
|
(make-lexical-token 'N #f 3))
|
||||||
|
=> '(((1 + 2) + 3)))
|
||||||
|
|
||||||
|
(check
|
||||||
|
;;right-associativity
|
||||||
|
(doit (make-lexical-token 'N #f 1)
|
||||||
|
(make-lexical-token '* #f '*)
|
||||||
|
(make-lexical-token 'N #f 2)
|
||||||
|
(make-lexical-token '* #f '*)
|
||||||
|
(make-lexical-token 'N #f 3))
|
||||||
|
=> '(((1 * 2) * 3)
|
||||||
|
(1 * (2 * 3))))
|
||||||
|
|
||||||
|
(check
|
||||||
|
;;precedence
|
||||||
|
(doit (make-lexical-token 'N #f 1)
|
||||||
|
(make-lexical-token '+ #f '+)
|
||||||
|
(make-lexical-token 'N #f 2)
|
||||||
|
(make-lexical-token '* #f '*)
|
||||||
|
(make-lexical-token 'N #f 3))
|
||||||
|
=> '(((1 + 2) * 3)
|
||||||
|
(1 + (2 * 3))))
|
||||||
|
|
||||||
|
;;; end of file
|
35
test-suite/lalr/test-glr-basics-01.scm
Normal file
35
test-suite/lalr/test-glr-basics-01.scm
Normal file
|
@ -0,0 +1,35 @@
|
||||||
|
;;; test-lr-basics-01.scm --
|
||||||
|
;;
|
||||||
|
;;A grammar that only accept a single terminal as input. It refuses the
|
||||||
|
;;end-of-input as first token.
|
||||||
|
;;
|
||||||
|
|
||||||
|
(load "common-test.scm")
|
||||||
|
|
||||||
|
(define (doit . tokens)
|
||||||
|
(let* ((lexer (make-lexer tokens))
|
||||||
|
(parser (lalr-parser (expect: 0)
|
||||||
|
(driver: glr)
|
||||||
|
(A)
|
||||||
|
(e (A) : $1))))
|
||||||
|
(parser lexer error-handler)))
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit (make-lexical-token 'A #f 1))
|
||||||
|
=> '(1))
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit)
|
||||||
|
=> '())
|
||||||
|
|
||||||
|
(check
|
||||||
|
;;Parse correctly the first A and reduce it. The second A triggers
|
||||||
|
;;an error which empties the stack and consumes all the input
|
||||||
|
;;tokens. Finally, an unexpected end-of-input error is returned
|
||||||
|
;;because EOI is invalid as first token after the start.
|
||||||
|
(doit (make-lexical-token 'A #f 1)
|
||||||
|
(make-lexical-token 'A #f 2)
|
||||||
|
(make-lexical-token 'A #f 3))
|
||||||
|
=> '())
|
||||||
|
|
||||||
|
;;; end of file
|
30
test-suite/lalr/test-glr-basics-02.scm
Normal file
30
test-suite/lalr/test-glr-basics-02.scm
Normal file
|
@ -0,0 +1,30 @@
|
||||||
|
;;; test-lr-basics-02.scm --
|
||||||
|
;;
|
||||||
|
;;A grammar that only accept a single terminal or the EOI.
|
||||||
|
;;
|
||||||
|
|
||||||
|
(load "common-test.scm")
|
||||||
|
|
||||||
|
(define (doit . tokens)
|
||||||
|
(let ((parser (lalr-parser (expect: 0)
|
||||||
|
(driver: glr)
|
||||||
|
(A)
|
||||||
|
(e (A) : $1
|
||||||
|
() : 0))))
|
||||||
|
(parser (make-lexer tokens) error-handler)))
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit)
|
||||||
|
=> '(0))
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit (make-lexical-token 'A #f 1))
|
||||||
|
=> '(1))
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit (make-lexical-token 'A #f 1)
|
||||||
|
(make-lexical-token 'A #f 2)
|
||||||
|
(make-lexical-token 'A #f 3))
|
||||||
|
=> '())
|
||||||
|
|
||||||
|
;;; end of file
|
37
test-suite/lalr/test-glr-basics-03.scm
Normal file
37
test-suite/lalr/test-glr-basics-03.scm
Normal file
|
@ -0,0 +1,37 @@
|
||||||
|
;;; test-lr-basics-03.scm --
|
||||||
|
;;
|
||||||
|
;;A grammar that accepts fixed sequences of a single terminal or the
|
||||||
|
;;EOI.
|
||||||
|
|
||||||
|
(load "common-test.scm")
|
||||||
|
|
||||||
|
(define (doit . tokens)
|
||||||
|
(let ((parser (lalr-parser (expect: 0)
|
||||||
|
(driver: glr)
|
||||||
|
(A)
|
||||||
|
(e (A) : (list $1)
|
||||||
|
(A A) : (list $1 $2)
|
||||||
|
(A A A) : (list $1 $2 $3)
|
||||||
|
() : 0))))
|
||||||
|
(parser (make-lexer tokens) error-handler)))
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit (make-lexical-token 'A #f 1))
|
||||||
|
=> '((1)))
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit (make-lexical-token 'A #f 1)
|
||||||
|
(make-lexical-token 'A #f 2))
|
||||||
|
=> '((1 2)))
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit (make-lexical-token 'A #f 1)
|
||||||
|
(make-lexical-token 'A #f 2)
|
||||||
|
(make-lexical-token 'A #f 3))
|
||||||
|
=> '((1 2 3)))
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit)
|
||||||
|
=> '(0))
|
||||||
|
|
||||||
|
;;; end of file
|
43
test-suite/lalr/test-glr-basics-04.scm
Normal file
43
test-suite/lalr/test-glr-basics-04.scm
Normal file
|
@ -0,0 +1,43 @@
|
||||||
|
;;; test-lr-basics-04.scm --
|
||||||
|
;;
|
||||||
|
;;A grammar accepting a sequence of equal tokens of arbitrary length.
|
||||||
|
;;The return value is the value of the last parsed token.
|
||||||
|
|
||||||
|
|
||||||
|
(load "common-test.scm")
|
||||||
|
|
||||||
|
(define (doit . tokens)
|
||||||
|
(let ((parser (lalr-parser (expect: 0)
|
||||||
|
(driver: glr)
|
||||||
|
(A)
|
||||||
|
(e (e A) : $2
|
||||||
|
(A) : $1
|
||||||
|
() : 0))))
|
||||||
|
(parser (make-lexer tokens) error-handler)))
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit)
|
||||||
|
=> '(0))
|
||||||
|
|
||||||
|
(check
|
||||||
|
;;Two results because there is a shift/reduce conflict, so two
|
||||||
|
;;processes are generated.
|
||||||
|
(doit (make-lexical-token 'A #f 1))
|
||||||
|
=> '(1 1))
|
||||||
|
|
||||||
|
(check
|
||||||
|
;;Two results because there is a shift/reduce conflict, so two
|
||||||
|
;;processes are generated. Notice that the rules:
|
||||||
|
;;
|
||||||
|
;; (e A) (A)
|
||||||
|
;;
|
||||||
|
;;generate only one conflict when the second "A" comes. The third
|
||||||
|
;;"A" comes when the state is inside the rule "(e A)", so there is
|
||||||
|
;;no conflict.
|
||||||
|
;;
|
||||||
|
(doit (make-lexical-token 'A #f 1)
|
||||||
|
(make-lexical-token 'A #f 2)
|
||||||
|
(make-lexical-token 'A #f 3))
|
||||||
|
=> '(3 3))
|
||||||
|
|
||||||
|
;;; end of file
|
40
test-suite/lalr/test-glr-basics-05.scm
Normal file
40
test-suite/lalr/test-glr-basics-05.scm
Normal file
|
@ -0,0 +1,40 @@
|
||||||
|
;;; test-lr-basics-05.scm --
|
||||||
|
;;
|
||||||
|
;;A grammar accepting a sequence of equal tokens of arbitrary length.
|
||||||
|
;;The return value is the list of values.
|
||||||
|
;;
|
||||||
|
|
||||||
|
(load "common-test.scm")
|
||||||
|
|
||||||
|
(define (doit . tokens)
|
||||||
|
(let ((parser (lalr-parser (expect: 0)
|
||||||
|
(driver: glr)
|
||||||
|
(A)
|
||||||
|
(e (e A) : (cons $2 $1)
|
||||||
|
(A) : (list $1)
|
||||||
|
() : (list 0)))))
|
||||||
|
(parser (make-lexer tokens) error-handler)))
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit)
|
||||||
|
=> '((0)))
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit (make-lexical-token 'A #f 1))
|
||||||
|
=> '((1 0)
|
||||||
|
(1)))
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit (make-lexical-token 'A #f 1)
|
||||||
|
(make-lexical-token 'A #f 2))
|
||||||
|
=> '((2 1 0)
|
||||||
|
(2 1)))
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit (make-lexical-token 'A #f 1)
|
||||||
|
(make-lexical-token 'A #f 2)
|
||||||
|
(make-lexical-token 'A #f 3))
|
||||||
|
=> '((3 2 1 0)
|
||||||
|
(3 2 1)))
|
||||||
|
|
||||||
|
;;; end of file
|
125
test-suite/lalr/test-glr-script-expression.scm
Normal file
125
test-suite/lalr/test-glr-script-expression.scm
Normal file
|
@ -0,0 +1,125 @@
|
||||||
|
;;; test-lr-script-expression.scm --
|
||||||
|
;;
|
||||||
|
;;Parse scripts, each line an expression.
|
||||||
|
;;
|
||||||
|
|
||||||
|
(load "common-test.scm")
|
||||||
|
|
||||||
|
(define (doit . tokens)
|
||||||
|
(let ((parser (lalr-parser (expect: 0)
|
||||||
|
(driver: glr)
|
||||||
|
(N O C T (left: A) (left: M) (nonassoc: U))
|
||||||
|
|
||||||
|
(script (lines) : (reverse $1))
|
||||||
|
|
||||||
|
(lines (lines line) : (cons $2 $1)
|
||||||
|
(line) : (list $1))
|
||||||
|
|
||||||
|
(line (T) : #\newline
|
||||||
|
(E T) : $1
|
||||||
|
(error T) : (list 'error-clause $2))
|
||||||
|
|
||||||
|
(E (N) : $1
|
||||||
|
(E A E) : ($2 $1 $3)
|
||||||
|
(E M E) : ($2 $1 $3)
|
||||||
|
(A E (prec: U)) : ($1 $2)
|
||||||
|
(O E C) : $2))))
|
||||||
|
(parser (make-lexer tokens) error-handler)))
|
||||||
|
|
||||||
|
;;; --------------------------------------------------------------------
|
||||||
|
;;; Correct input
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit (make-lexical-token 'T #f #\newline))
|
||||||
|
=> '((#\newline)))
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit (make-lexical-token 'N #f 1)
|
||||||
|
(make-lexical-token 'T #f #\newline))
|
||||||
|
=> '((1)))
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit (make-lexical-token 'N #f 1)
|
||||||
|
(make-lexical-token 'A #f +)
|
||||||
|
(make-lexical-token 'N #f 2)
|
||||||
|
(make-lexical-token 'T #f #\newline))
|
||||||
|
=> '((3)))
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit (make-lexical-token 'N #f 1)
|
||||||
|
(make-lexical-token 'A #f +)
|
||||||
|
(make-lexical-token 'N #f 2)
|
||||||
|
(make-lexical-token 'M #f *)
|
||||||
|
(make-lexical-token 'N #f 3)
|
||||||
|
(make-lexical-token 'T #f #\newline))
|
||||||
|
=> '((9) (7)))
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit (make-lexical-token 'N #f 10)
|
||||||
|
(make-lexical-token 'M #f *)
|
||||||
|
(make-lexical-token 'N #f 2)
|
||||||
|
(make-lexical-token 'A #f +)
|
||||||
|
(make-lexical-token 'N #f 3)
|
||||||
|
(make-lexical-token 'T #f #\newline))
|
||||||
|
=> '((23)))
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit (make-lexical-token 'O #f #\()
|
||||||
|
(make-lexical-token 'N #f 1)
|
||||||
|
(make-lexical-token 'A #f +)
|
||||||
|
(make-lexical-token 'N #f 2)
|
||||||
|
(make-lexical-token 'C #f #\))
|
||||||
|
(make-lexical-token 'M #f *)
|
||||||
|
(make-lexical-token 'N #f 3)
|
||||||
|
(make-lexical-token 'T #f #\newline))
|
||||||
|
=> '((9)))
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit (make-lexical-token 'O #f #\()
|
||||||
|
(make-lexical-token 'N #f 1)
|
||||||
|
(make-lexical-token 'A #f +)
|
||||||
|
(make-lexical-token 'N #f 2)
|
||||||
|
(make-lexical-token 'C #f #\))
|
||||||
|
(make-lexical-token 'M #f *)
|
||||||
|
(make-lexical-token 'N #f 3)
|
||||||
|
(make-lexical-token 'T #f #\newline)
|
||||||
|
|
||||||
|
(make-lexical-token 'N #f 4)
|
||||||
|
(make-lexical-token 'M #f /)
|
||||||
|
(make-lexical-token 'N #f 5)
|
||||||
|
(make-lexical-token 'T #f #\newline))
|
||||||
|
=> '((9 4/5)))
|
||||||
|
|
||||||
|
;;; --------------------------------------------------------------------
|
||||||
|
|
||||||
|
(check
|
||||||
|
;;Successful error recovery.
|
||||||
|
(doit (make-lexical-token 'O #f #\()
|
||||||
|
(make-lexical-token 'N #f 1)
|
||||||
|
(make-lexical-token 'A #f +)
|
||||||
|
(make-lexical-token 'N #f 2)
|
||||||
|
(make-lexical-token 'M #f *)
|
||||||
|
(make-lexical-token 'N #f 3)
|
||||||
|
(make-lexical-token 'T #f #\newline)
|
||||||
|
|
||||||
|
(make-lexical-token 'N #f 4)
|
||||||
|
(make-lexical-token 'M #f /)
|
||||||
|
(make-lexical-token 'N #f 5)
|
||||||
|
(make-lexical-token 'T #f #\newline))
|
||||||
|
=> '())
|
||||||
|
|
||||||
|
(check
|
||||||
|
;;Unexpected end of input.
|
||||||
|
(doit (make-lexical-token 'N #f 1)
|
||||||
|
(make-lexical-token 'A #f +)
|
||||||
|
(make-lexical-token 'N #f 2))
|
||||||
|
=> '())
|
||||||
|
|
||||||
|
(check
|
||||||
|
;;Unexpected end of input.
|
||||||
|
(doit (make-lexical-token 'N #f 1)
|
||||||
|
(make-lexical-token 'A #f +)
|
||||||
|
(make-lexical-token 'T #f #\newline))
|
||||||
|
=> '())
|
||||||
|
|
||||||
|
;;; end of file
|
60
test-suite/lalr/test-glr-single-expressions.scm
Normal file
60
test-suite/lalr/test-glr-single-expressions.scm
Normal file
|
@ -0,0 +1,60 @@
|
||||||
|
;;; test-lr-single-expressions.scm --
|
||||||
|
;;
|
||||||
|
;;Grammar accepting single expressions.
|
||||||
|
;;
|
||||||
|
|
||||||
|
(load "common-test.scm")
|
||||||
|
|
||||||
|
(define (doit . tokens)
|
||||||
|
(let ((parser (lalr-parser (expect: 0)
|
||||||
|
(driver: glr)
|
||||||
|
(N O C (left: A) (left: M) (nonassoc: U))
|
||||||
|
|
||||||
|
(E (N) : $1
|
||||||
|
(E A E) : ($2 $1 $3)
|
||||||
|
(E M E) : ($2 $1 $3)
|
||||||
|
(A E (prec: U)) : ($1 $2)
|
||||||
|
(O E C) : $2))))
|
||||||
|
(parser (make-lexer tokens) error-handler)))
|
||||||
|
|
||||||
|
;;; --------------------------------------------------------------------
|
||||||
|
|
||||||
|
(check ;correct input
|
||||||
|
(doit (make-lexical-token 'N #f 1))
|
||||||
|
=> '(1))
|
||||||
|
|
||||||
|
(check ;correct input
|
||||||
|
(doit (make-lexical-token 'A #f -)
|
||||||
|
(make-lexical-token 'N #f 1))
|
||||||
|
=> '(-1))
|
||||||
|
|
||||||
|
(check ;correct input
|
||||||
|
(doit (make-lexical-token 'A #f +)
|
||||||
|
(make-lexical-token 'N #f 1))
|
||||||
|
=> '(1))
|
||||||
|
|
||||||
|
(check ;correct input
|
||||||
|
(doit (make-lexical-token 'N #f 1)
|
||||||
|
(make-lexical-token 'A #f +)
|
||||||
|
(make-lexical-token 'N #f 2))
|
||||||
|
=> '(3))
|
||||||
|
|
||||||
|
(check ;correct input
|
||||||
|
(doit (make-lexical-token 'N #f 1)
|
||||||
|
(make-lexical-token 'A #f +)
|
||||||
|
(make-lexical-token 'N #f 2)
|
||||||
|
(make-lexical-token 'M #f *)
|
||||||
|
(make-lexical-token 'N #f 3))
|
||||||
|
=> '(9 7))
|
||||||
|
|
||||||
|
(check ;correct input
|
||||||
|
(doit (make-lexical-token 'O #f #\()
|
||||||
|
(make-lexical-token 'N #f 1)
|
||||||
|
(make-lexical-token 'A #f +)
|
||||||
|
(make-lexical-token 'N #f 2)
|
||||||
|
(make-lexical-token 'C #f #\))
|
||||||
|
(make-lexical-token 'M #f *)
|
||||||
|
(make-lexical-token 'N #f 3))
|
||||||
|
=> '(9))
|
||||||
|
|
||||||
|
;;; end of file
|
91
test-suite/lalr/test-lr-associativity-01.scm
Normal file
91
test-suite/lalr/test-lr-associativity-01.scm
Normal file
|
@ -0,0 +1,91 @@
|
||||||
|
;;; test-lr-associativity-01.scm --
|
||||||
|
;;
|
||||||
|
;;Show how to use left and right associativity. Notice that the
|
||||||
|
;;terminal M is declared as right associative; this influences the
|
||||||
|
;;binding of values to the $n symbols in the semantic clauses. The
|
||||||
|
;;semantic clause in the rule:
|
||||||
|
;;
|
||||||
|
;; (E M E M E) : (list $1 $2 (list $3 $4 $5))
|
||||||
|
;;
|
||||||
|
;;looks like it is right-associated, and it is because we have declared
|
||||||
|
;;M as "right:".
|
||||||
|
;;
|
||||||
|
|
||||||
|
(load "common-test.scm")
|
||||||
|
|
||||||
|
(define (doit . tokens)
|
||||||
|
(let ((parser (lalr-parser
|
||||||
|
(expect: 0)
|
||||||
|
(N (left: A)
|
||||||
|
(right: M)
|
||||||
|
(nonassoc: U))
|
||||||
|
(E (N) : $1
|
||||||
|
(E A E) : (list $1 $2 $3)
|
||||||
|
(E M E) : (list $1 $2 $3)
|
||||||
|
(E M E M E) : (list $1 $2 (list $3 $4 $5))
|
||||||
|
(A E (prec: U)) : (list '- $2)))))
|
||||||
|
(parser (make-lexer tokens) error-handler)))
|
||||||
|
|
||||||
|
;;; --------------------------------------------------------------------
|
||||||
|
;;; Single operator.
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit (make-lexical-token 'N #f 1))
|
||||||
|
=> 1)
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit (make-lexical-token 'N #f 1)
|
||||||
|
(make-lexical-token 'A #f '+)
|
||||||
|
(make-lexical-token 'N #f 2))
|
||||||
|
=> '(1 + 2))
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit (make-lexical-token 'N #f 1)
|
||||||
|
(make-lexical-token 'M #f '*)
|
||||||
|
(make-lexical-token 'N #f 2))
|
||||||
|
=> '(1 * 2))
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit (make-lexical-token 'A #f '-)
|
||||||
|
(make-lexical-token 'N #f 1))
|
||||||
|
=> '(- 1))
|
||||||
|
|
||||||
|
;;; --------------------------------------------------------------------
|
||||||
|
;;; Precedence.
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit (make-lexical-token 'N #f 1)
|
||||||
|
(make-lexical-token 'A #f '+)
|
||||||
|
(make-lexical-token 'N #f 2)
|
||||||
|
(make-lexical-token 'M #f '*)
|
||||||
|
(make-lexical-token 'N #f 3))
|
||||||
|
=> '(1 + (2 * 3)))
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit (make-lexical-token 'N #f 1)
|
||||||
|
(make-lexical-token 'M #f '*)
|
||||||
|
(make-lexical-token 'N #f 2)
|
||||||
|
(make-lexical-token 'A #f '+)
|
||||||
|
(make-lexical-token 'N #f 3))
|
||||||
|
=> '((1 * 2) + 3))
|
||||||
|
|
||||||
|
;;; --------------------------------------------------------------------
|
||||||
|
;;; Associativity.
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit (make-lexical-token 'N #f 1)
|
||||||
|
(make-lexical-token 'A #f '+)
|
||||||
|
(make-lexical-token 'N #f 2)
|
||||||
|
(make-lexical-token 'A #f '+)
|
||||||
|
(make-lexical-token 'N #f 3))
|
||||||
|
=> '((1 + 2) + 3))
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit (make-lexical-token 'N #f 1)
|
||||||
|
(make-lexical-token 'M #f '*)
|
||||||
|
(make-lexical-token 'N #f 2)
|
||||||
|
(make-lexical-token 'M #f '*)
|
||||||
|
(make-lexical-token 'N #f 3))
|
||||||
|
=> '(1 * (2 * 3)))
|
||||||
|
|
||||||
|
;;; end of file
|
91
test-suite/lalr/test-lr-associativity-02.scm
Normal file
91
test-suite/lalr/test-lr-associativity-02.scm
Normal file
|
@ -0,0 +1,91 @@
|
||||||
|
;;; test-lr-associativity-02.scm --
|
||||||
|
;;
|
||||||
|
;;Show how to use left and right associativity. Notice that the
|
||||||
|
;;terminal M is declared as left associative; this influences the
|
||||||
|
;;binding of values to the $n symbols in the semantic clauses. The
|
||||||
|
;;semantic clause in the rule:
|
||||||
|
;;
|
||||||
|
;; (E M E M E) : (list $1 $2 (list $3 $4 $5))
|
||||||
|
;;
|
||||||
|
;;looks like it is right-associated, but the result is left-associated
|
||||||
|
;;because we have declared M as "left:".
|
||||||
|
;;
|
||||||
|
|
||||||
|
(load "common-test.scm")
|
||||||
|
|
||||||
|
(define (doit . tokens)
|
||||||
|
(let ((parser (lalr-parser
|
||||||
|
(expect: 0)
|
||||||
|
(N (left: A)
|
||||||
|
(left: M)
|
||||||
|
(nonassoc: U))
|
||||||
|
(E (N) : $1
|
||||||
|
(E A E) : (list $1 $2 $3)
|
||||||
|
(E M E) : (list $1 $2 $3)
|
||||||
|
(E M E M E) : (list $1 $2 (list $3 $4 $5))
|
||||||
|
(A E (prec: U)) : (list '- $2)))))
|
||||||
|
(parser (make-lexer tokens) error-handler)))
|
||||||
|
|
||||||
|
;;; --------------------------------------------------------------------
|
||||||
|
;;; Single operator.
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit (make-lexical-token 'N #f 1))
|
||||||
|
=> 1)
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit (make-lexical-token 'N #f 1)
|
||||||
|
(make-lexical-token 'A #f '+)
|
||||||
|
(make-lexical-token 'N #f 2))
|
||||||
|
=> '(1 + 2))
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit (make-lexical-token 'N #f 1)
|
||||||
|
(make-lexical-token 'M #f '*)
|
||||||
|
(make-lexical-token 'N #f 2))
|
||||||
|
=> '(1 * 2))
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit (make-lexical-token 'A #f '-)
|
||||||
|
(make-lexical-token 'N #f 1))
|
||||||
|
=> '(- 1))
|
||||||
|
|
||||||
|
;;; --------------------------------------------------------------------
|
||||||
|
;;; Precedence.
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit (make-lexical-token 'N #f 1)
|
||||||
|
(make-lexical-token 'A #f '+)
|
||||||
|
(make-lexical-token 'N #f 2)
|
||||||
|
(make-lexical-token 'M #f '*)
|
||||||
|
(make-lexical-token 'N #f 3))
|
||||||
|
=> '(1 + (2 * 3)))
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit (make-lexical-token 'N #f 1)
|
||||||
|
(make-lexical-token 'M #f '*)
|
||||||
|
(make-lexical-token 'N #f 2)
|
||||||
|
(make-lexical-token 'A #f '+)
|
||||||
|
(make-lexical-token 'N #f 3))
|
||||||
|
=> '((1 * 2) + 3))
|
||||||
|
|
||||||
|
;;; --------------------------------------------------------------------
|
||||||
|
;;; Associativity.
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit (make-lexical-token 'N #f 1)
|
||||||
|
(make-lexical-token 'A #f '+)
|
||||||
|
(make-lexical-token 'N #f 2)
|
||||||
|
(make-lexical-token 'A #f '+)
|
||||||
|
(make-lexical-token 'N #f 3))
|
||||||
|
=> '((1 + 2) + 3))
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit (make-lexical-token 'N #f 1)
|
||||||
|
(make-lexical-token 'M #f '*)
|
||||||
|
(make-lexical-token 'N #f 2)
|
||||||
|
(make-lexical-token 'M #f '*)
|
||||||
|
(make-lexical-token 'N #f 3))
|
||||||
|
=> '((1 * 2) * 3))
|
||||||
|
|
||||||
|
;;; end of file
|
85
test-suite/lalr/test-lr-associativity-03.scm
Normal file
85
test-suite/lalr/test-lr-associativity-03.scm
Normal file
|
@ -0,0 +1,85 @@
|
||||||
|
;;; test-lr-associativity-01.scm --
|
||||||
|
;;
|
||||||
|
;;Show how to use left and right associativity. Notice that the
|
||||||
|
;;terminal M is declared as non-associative; this influences the binding
|
||||||
|
;;of values to the $n symbols in the semantic clauses. The semantic
|
||||||
|
;;clause in the rule:
|
||||||
|
;;
|
||||||
|
;; (E M E M E) : (list $1 $2 (list $3 $4 $5))
|
||||||
|
;;
|
||||||
|
;;looks like it is right-associated, and it is because we have declared
|
||||||
|
;;M as "right:".
|
||||||
|
;;
|
||||||
|
|
||||||
|
(load "common-test.scm")
|
||||||
|
|
||||||
|
(define (doit . tokens)
|
||||||
|
(let ((parser (lalr-parser
|
||||||
|
(expect: 0)
|
||||||
|
(N (nonassoc: A)
|
||||||
|
(nonassoc: M))
|
||||||
|
(E (N) : $1
|
||||||
|
(E A E) : (list $1 $2 $3)
|
||||||
|
(E A E A E) : (list (list $1 $2 $3) $4 $5)
|
||||||
|
(E M E) : (list $1 $2 $3)
|
||||||
|
(E M E M E) : (list $1 $2 (list $3 $4 $5))))))
|
||||||
|
(parser (make-lexer tokens) error-handler)))
|
||||||
|
|
||||||
|
;;; --------------------------------------------------------------------
|
||||||
|
;;; Single operator.
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit (make-lexical-token 'N #f 1))
|
||||||
|
=> 1)
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit (make-lexical-token 'N #f 1)
|
||||||
|
(make-lexical-token 'A #f '+)
|
||||||
|
(make-lexical-token 'N #f 2))
|
||||||
|
=> '(1 + 2))
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit (make-lexical-token 'N #f 1)
|
||||||
|
(make-lexical-token 'M #f '*)
|
||||||
|
(make-lexical-token 'N #f 2))
|
||||||
|
=> '(1 * 2))
|
||||||
|
|
||||||
|
;;; --------------------------------------------------------------------
|
||||||
|
;;; Precedence.
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit (make-lexical-token 'N #f 1)
|
||||||
|
(make-lexical-token 'A #f '+)
|
||||||
|
(make-lexical-token 'N #f 2)
|
||||||
|
(make-lexical-token 'M #f '*)
|
||||||
|
(make-lexical-token 'N #f 3))
|
||||||
|
=> '(1 + (2 * 3)))
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit (make-lexical-token 'N #f 1)
|
||||||
|
(make-lexical-token 'M #f '*)
|
||||||
|
(make-lexical-token 'N #f 2)
|
||||||
|
(make-lexical-token 'A #f '+)
|
||||||
|
(make-lexical-token 'N #f 3))
|
||||||
|
=> '((1 * 2) + 3))
|
||||||
|
|
||||||
|
;;; --------------------------------------------------------------------
|
||||||
|
;;; Associativity.
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit (make-lexical-token 'N #f 1)
|
||||||
|
(make-lexical-token 'A #f '+)
|
||||||
|
(make-lexical-token 'N #f 2)
|
||||||
|
(make-lexical-token 'A #f '+)
|
||||||
|
(make-lexical-token 'N #f 3))
|
||||||
|
=> '((1 + 2) + 3))
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit (make-lexical-token 'N #f 1)
|
||||||
|
(make-lexical-token 'M #f '*)
|
||||||
|
(make-lexical-token 'N #f 2)
|
||||||
|
(make-lexical-token 'M #f '*)
|
||||||
|
(make-lexical-token 'N #f 3))
|
||||||
|
=> '(1 * (2 * 3)))
|
||||||
|
|
||||||
|
;;; end of file
|
83
test-suite/lalr/test-lr-associativity-04.scm
Normal file
83
test-suite/lalr/test-lr-associativity-04.scm
Normal file
|
@ -0,0 +1,83 @@
|
||||||
|
;;; test-lr-associativity-04.scm --
|
||||||
|
;;
|
||||||
|
;;Show how to use associativity.
|
||||||
|
;;
|
||||||
|
|
||||||
|
(load "common-test.scm")
|
||||||
|
|
||||||
|
(define (doit . tokens)
|
||||||
|
(let ((parser (lalr-parser
|
||||||
|
(expect: 0)
|
||||||
|
(N (left: A)
|
||||||
|
(left: M))
|
||||||
|
(E (N) : $1
|
||||||
|
|
||||||
|
(E A E) : (list $1 $2 $3)
|
||||||
|
(E A E A E) : (list (list $1 $2 $3) $4 $5)
|
||||||
|
|
||||||
|
(E M E) : (list $1 $2 $3)
|
||||||
|
(E M E M E) : (list $1 $2 (list $3 $4 $5))
|
||||||
|
|
||||||
|
(E A E M E) : (list $1 $2 $3 $4 $5)
|
||||||
|
(E M E A E) : (list $1 $2 $3 $4 $5)
|
||||||
|
))))
|
||||||
|
(parser (make-lexer tokens) error-handler)))
|
||||||
|
|
||||||
|
;;; --------------------------------------------------------------------
|
||||||
|
;;; Single operator.
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit (make-lexical-token 'N #f 1))
|
||||||
|
=> 1)
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit (make-lexical-token 'N #f 1)
|
||||||
|
(make-lexical-token 'A #f '+)
|
||||||
|
(make-lexical-token 'N #f 2))
|
||||||
|
=> '(1 + 2))
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit (make-lexical-token 'N #f 1)
|
||||||
|
(make-lexical-token 'M #f '*)
|
||||||
|
(make-lexical-token 'N #f 2))
|
||||||
|
=> '(1 * 2))
|
||||||
|
|
||||||
|
;;; --------------------------------------------------------------------
|
||||||
|
;;; Precedence.
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit (make-lexical-token 'N #f 1)
|
||||||
|
(make-lexical-token 'A #f '+)
|
||||||
|
(make-lexical-token 'N #f 2)
|
||||||
|
(make-lexical-token 'M #f '*)
|
||||||
|
(make-lexical-token 'N #f 3))
|
||||||
|
=> '(1 + (2 * 3)))
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit (make-lexical-token 'N #f 1)
|
||||||
|
(make-lexical-token 'M #f '*)
|
||||||
|
(make-lexical-token 'N #f 2)
|
||||||
|
(make-lexical-token 'A #f '+)
|
||||||
|
(make-lexical-token 'N #f 3))
|
||||||
|
=> '((1 * 2) + 3))
|
||||||
|
|
||||||
|
;;; --------------------------------------------------------------------
|
||||||
|
;;; Associativity.
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit (make-lexical-token 'N #f 1)
|
||||||
|
(make-lexical-token 'A #f '+)
|
||||||
|
(make-lexical-token 'N #f 2)
|
||||||
|
(make-lexical-token 'A #f '+)
|
||||||
|
(make-lexical-token 'N #f 3))
|
||||||
|
=> '((1 + 2) + 3))
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit (make-lexical-token 'N #f 1)
|
||||||
|
(make-lexical-token 'M #f '*)
|
||||||
|
(make-lexical-token 'N #f 2)
|
||||||
|
(make-lexical-token 'M #f '*)
|
||||||
|
(make-lexical-token 'N #f 3))
|
||||||
|
=> '((1 * 2) * 3))
|
||||||
|
|
||||||
|
;;; end of file
|
38
test-suite/lalr/test-lr-basics-01.scm
Normal file
38
test-suite/lalr/test-lr-basics-01.scm
Normal file
|
@ -0,0 +1,38 @@
|
||||||
|
;;; test-lr-basics-01.scm --
|
||||||
|
;;
|
||||||
|
;;A grammar that only accept a single terminal as input. It refuses the
|
||||||
|
;;end-of-input as first token.
|
||||||
|
;;
|
||||||
|
|
||||||
|
(load "common-test.scm")
|
||||||
|
|
||||||
|
(define (doit . tokens)
|
||||||
|
(let* ((lexer (make-lexer tokens))
|
||||||
|
(parser (lalr-parser (expect: 0)
|
||||||
|
(A)
|
||||||
|
(e (A) : $1))))
|
||||||
|
(parser lexer error-handler)))
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit (make-lexical-token 'A #f 1))
|
||||||
|
=> 1)
|
||||||
|
|
||||||
|
(check
|
||||||
|
(let ((r (doit)))
|
||||||
|
(cons r *error*))
|
||||||
|
=> '(#f (error-handler "Syntax error: unexpected end of input")))
|
||||||
|
|
||||||
|
(check
|
||||||
|
;;Parse correctly the first A and reduce it. The second A triggers
|
||||||
|
;;an error which empties the stack and consumes all the input
|
||||||
|
;;tokens. Finally, an unexpected end-of-input error is returned
|
||||||
|
;;because EOI is invalid as first token after the start.
|
||||||
|
(let ((r (doit (make-lexical-token 'A #f 1)
|
||||||
|
(make-lexical-token 'A #f 2)
|
||||||
|
(make-lexical-token 'A #f 3))))
|
||||||
|
(cons r *error*))
|
||||||
|
=> '(#f
|
||||||
|
(error-handler "Syntax error: unexpected end of input")
|
||||||
|
(error-handler "Syntax error: unexpected token : " . A)))
|
||||||
|
|
||||||
|
;;; end of file
|
33
test-suite/lalr/test-lr-basics-02.scm
Normal file
33
test-suite/lalr/test-lr-basics-02.scm
Normal file
|
@ -0,0 +1,33 @@
|
||||||
|
;;; test-lr-basics-02.scm --
|
||||||
|
;;
|
||||||
|
;;A grammar that only accept a single terminal or the EOI.
|
||||||
|
;;
|
||||||
|
|
||||||
|
(load "common-test.scm")
|
||||||
|
|
||||||
|
(define (doit . tokens)
|
||||||
|
(let ((parser (lalr-parser (expect: 0)
|
||||||
|
(A)
|
||||||
|
(e (A) : $1
|
||||||
|
() : 0))))
|
||||||
|
(parser (make-lexer tokens) error-handler)))
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit)
|
||||||
|
=> 0)
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit (make-lexical-token 'A #f 1))
|
||||||
|
=> 1)
|
||||||
|
|
||||||
|
(check
|
||||||
|
;;Parse correctly the first A and reduce it. The second A triggers
|
||||||
|
;;an error which empties the stack and consumes all the input
|
||||||
|
;;tokens. Finally, the end-of-input token is correctly parsed.
|
||||||
|
(let ((r (doit (make-lexical-token 'A #f 1)
|
||||||
|
(make-lexical-token 'A #f 2)
|
||||||
|
(make-lexical-token 'A #f 3))))
|
||||||
|
(cons r *error*))
|
||||||
|
=> '(0 (error-handler "Syntax error: unexpected token : " . A)))
|
||||||
|
|
||||||
|
;;; end of file
|
36
test-suite/lalr/test-lr-basics-03.scm
Normal file
36
test-suite/lalr/test-lr-basics-03.scm
Normal file
|
@ -0,0 +1,36 @@
|
||||||
|
;;; test-lr-basics-03.scm --
|
||||||
|
;;
|
||||||
|
;;A grammar that accepts fixed sequences of a single terminal or the
|
||||||
|
;;EOI.
|
||||||
|
|
||||||
|
(load "common-test.scm")
|
||||||
|
|
||||||
|
(define (doit . tokens)
|
||||||
|
(let ((parser (lalr-parser (expect: 0)
|
||||||
|
(A)
|
||||||
|
(e (A) : (list $1)
|
||||||
|
(A A) : (list $1 $2)
|
||||||
|
(A A A) : (list $1 $2 $3)
|
||||||
|
() : 0))))
|
||||||
|
(parser (make-lexer tokens) error-handler)))
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit (make-lexical-token 'A #f 1))
|
||||||
|
=> '(1))
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit (make-lexical-token 'A #f 1)
|
||||||
|
(make-lexical-token 'A #f 2))
|
||||||
|
=> '(1 2))
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit (make-lexical-token 'A #f 1)
|
||||||
|
(make-lexical-token 'A #f 2)
|
||||||
|
(make-lexical-token 'A #f 3))
|
||||||
|
=> '(1 2 3))
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit)
|
||||||
|
=> 0)
|
||||||
|
|
||||||
|
;;; end of file
|
31
test-suite/lalr/test-lr-basics-04.scm
Normal file
31
test-suite/lalr/test-lr-basics-04.scm
Normal file
|
@ -0,0 +1,31 @@
|
||||||
|
;;; test-lr-basics-04.scm --
|
||||||
|
;;
|
||||||
|
;;A grammar accepting a sequence of equal tokens of arbitrary length.
|
||||||
|
;;The return value is the value of the last parsed token.
|
||||||
|
|
||||||
|
|
||||||
|
(load "common-test.scm")
|
||||||
|
|
||||||
|
(define (doit . tokens)
|
||||||
|
(let ((parser (lalr-parser (expect: 0)
|
||||||
|
(A)
|
||||||
|
(e (e A) : $2
|
||||||
|
(A) : $1
|
||||||
|
() : 0))))
|
||||||
|
(parser (make-lexer tokens) error-handler)))
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit)
|
||||||
|
=> 0)
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit (make-lexical-token 'A #f 1))
|
||||||
|
=> 1)
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit (make-lexical-token 'A #f 1)
|
||||||
|
(make-lexical-token 'A #f 2)
|
||||||
|
(make-lexical-token 'A #f 3))
|
||||||
|
=> 3)
|
||||||
|
|
||||||
|
;;; end of file
|
36
test-suite/lalr/test-lr-basics-05.scm
Normal file
36
test-suite/lalr/test-lr-basics-05.scm
Normal file
|
@ -0,0 +1,36 @@
|
||||||
|
;;; test-lr-basics-05.scm --
|
||||||
|
;;
|
||||||
|
;;A grammar accepting a sequence of equal tokens of arbitrary length.
|
||||||
|
;;The return value is the list of values.
|
||||||
|
;;
|
||||||
|
|
||||||
|
(load "common-test.scm")
|
||||||
|
|
||||||
|
(define (doit . tokens)
|
||||||
|
(let ((parser (lalr-parser (expect: 0)
|
||||||
|
(A)
|
||||||
|
(e (e A) : (cons $2 $1)
|
||||||
|
(A) : (list $1)
|
||||||
|
() : 0))))
|
||||||
|
(parser (make-lexer tokens) error-handler)))
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit)
|
||||||
|
=> 0)
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit (make-lexical-token 'A #f 1))
|
||||||
|
=> '(1))
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit (make-lexical-token 'A #f 1)
|
||||||
|
(make-lexical-token 'A #f 2))
|
||||||
|
=> '(2 1))
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit (make-lexical-token 'A #f 1)
|
||||||
|
(make-lexical-token 'A #f 2)
|
||||||
|
(make-lexical-token 'A #f 3))
|
||||||
|
=> '(3 2 1))
|
||||||
|
|
||||||
|
;;; end of file
|
145
test-suite/lalr/test-lr-error-recovery-01.scm
Normal file
145
test-suite/lalr/test-lr-error-recovery-01.scm
Normal file
|
@ -0,0 +1,145 @@
|
||||||
|
;;; test-lr-error-recovery-01.scm --
|
||||||
|
;;
|
||||||
|
;;Test error recovery with a terminator terminal.
|
||||||
|
;;
|
||||||
|
|
||||||
|
(load "common-test.scm")
|
||||||
|
|
||||||
|
(define (doit . tokens)
|
||||||
|
(let ((parser (lalr-parser
|
||||||
|
(expect: 0)
|
||||||
|
(NUMBER BAD NEWLINE)
|
||||||
|
|
||||||
|
(script (lines) : (reverse $1)
|
||||||
|
() : 0)
|
||||||
|
(lines (lines line) : (cons $2 $1)
|
||||||
|
(line) : (list $1))
|
||||||
|
(line (NEWLINE) : (list 'line $1)
|
||||||
|
(NUMBER NEWLINE) : (list 'line $1 $2)
|
||||||
|
(NUMBER NUMBER NEWLINE) : (list 'line $1 $2 $3)
|
||||||
|
|
||||||
|
;;This semantic action will cause "(recover $1
|
||||||
|
;;$2)" to be the result of the offending line.
|
||||||
|
(error NEWLINE) : (list 'recover $1 $2)))))
|
||||||
|
(parser (make-lexer tokens) error-handler)))
|
||||||
|
|
||||||
|
;;; --------------------------------------------------------------------
|
||||||
|
;;; No errors, grammar tests.
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit)
|
||||||
|
=> 0)
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit (make-lexical-token 'NEWLINE #f #\newline))
|
||||||
|
=> '((line #\newline)))
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit (make-lexical-token 'NUMBER #f 1)
|
||||||
|
(make-lexical-token 'NEWLINE #f #\newline))
|
||||||
|
=> '((line 1 #\newline)))
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit (make-lexical-token 'NUMBER #f 1)
|
||||||
|
(make-lexical-token 'NUMBER #f 2)
|
||||||
|
(make-lexical-token 'NEWLINE #f #\newline))
|
||||||
|
=> '((line 1 2 #\newline)))
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit (make-lexical-token 'NUMBER #f 1)
|
||||||
|
(make-lexical-token 'NEWLINE #f #\newline)
|
||||||
|
(make-lexical-token 'NUMBER #f 2)
|
||||||
|
(make-lexical-token 'NEWLINE #f #\newline))
|
||||||
|
=> '((line 1 #\newline)
|
||||||
|
(line 2 #\newline)))
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit (make-lexical-token 'NUMBER #f 1)
|
||||||
|
(make-lexical-token 'NEWLINE #f #\newline)
|
||||||
|
(make-lexical-token 'NUMBER #f 2)
|
||||||
|
(make-lexical-token 'NEWLINE #f #\newline)
|
||||||
|
(make-lexical-token 'NUMBER #f 3)
|
||||||
|
(make-lexical-token 'NEWLINE #f #\newline))
|
||||||
|
=> '((line 1 #\newline)
|
||||||
|
(line 2 #\newline)
|
||||||
|
(line 3 #\newline)))
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit (make-lexical-token 'NUMBER #f 1)
|
||||||
|
(make-lexical-token 'NEWLINE #f #\newline)
|
||||||
|
(make-lexical-token 'NUMBER #f 2)
|
||||||
|
(make-lexical-token 'NEWLINE #f #\newline)
|
||||||
|
(make-lexical-token 'NUMBER #f 3)
|
||||||
|
(make-lexical-token 'NEWLINE #f #\newline)
|
||||||
|
(make-lexical-token 'NUMBER #f 41)
|
||||||
|
(make-lexical-token 'NUMBER #f 42)
|
||||||
|
(make-lexical-token 'NEWLINE #f #\newline))
|
||||||
|
=> '((line 1 #\newline)
|
||||||
|
(line 2 #\newline)
|
||||||
|
(line 3 #\newline)
|
||||||
|
(line 41 42 #\newline)))
|
||||||
|
|
||||||
|
;;; --------------------------------------------------------------------
|
||||||
|
;;; Successful error recovery.
|
||||||
|
|
||||||
|
(check
|
||||||
|
;;The BAD triggers an error, recovery happens, the first NEWLINE is
|
||||||
|
;;correctly parsed as recovery token; the second line is correct.
|
||||||
|
(let ((r (doit (make-lexical-token 'NUMBER #f 1)
|
||||||
|
(make-lexical-token 'BAD #f 'alpha)
|
||||||
|
(make-lexical-token 'NEWLINE #f #\newline)
|
||||||
|
(make-lexical-token 'NUMBER #f 2)
|
||||||
|
(make-lexical-token 'NEWLINE #f #\newline))))
|
||||||
|
(cons r *error*))
|
||||||
|
=> '(((recover #f #f)
|
||||||
|
(line 2 #\newline))
|
||||||
|
(error-handler "Syntax error: unexpected token : " . BAD)))
|
||||||
|
|
||||||
|
|
||||||
|
(check
|
||||||
|
;;The first BAD triggers an error, recovery happens skipping the
|
||||||
|
;;second and third BADs, the first NEWLINE is detected as
|
||||||
|
;;synchronisation token; the second line is correct.
|
||||||
|
(let ((r (doit (make-lexical-token 'NUMBER #f 1)
|
||||||
|
(make-lexical-token 'BAD #f 'alpha)
|
||||||
|
(make-lexical-token 'BAD #f 'beta)
|
||||||
|
(make-lexical-token 'BAD #f 'delta)
|
||||||
|
(make-lexical-token 'NEWLINE #f #\newline)
|
||||||
|
(make-lexical-token 'NUMBER #f 2)
|
||||||
|
(make-lexical-token 'NEWLINE #f #\newline))))
|
||||||
|
(cons r *error*))
|
||||||
|
=> '(((recover #f #f)
|
||||||
|
(line 2 #\newline))
|
||||||
|
(error-handler "Syntax error: unexpected token : " . BAD)))
|
||||||
|
|
||||||
|
;;; --------------------------------------------------------------------
|
||||||
|
;;; Failed error recovery.
|
||||||
|
|
||||||
|
(check
|
||||||
|
;;End-of-input is found after NUMBER.
|
||||||
|
(let ((r (doit (make-lexical-token 'NUMBER #f 1))))
|
||||||
|
(cons r *error*))
|
||||||
|
=> '(#f (error-handler "Syntax error: unexpected end of input")))
|
||||||
|
|
||||||
|
(check
|
||||||
|
;;The BAD triggers the error, the stack is rewind up to the start,
|
||||||
|
;;then end-of-input happens while trying to skip tokens until the
|
||||||
|
;;synchronisation one is found. End-of-input is an acceptable token
|
||||||
|
;;after the start.
|
||||||
|
(let ((r (doit (make-lexical-token 'NUMBER #f 1)
|
||||||
|
(make-lexical-token 'BAD #f 'alpha)
|
||||||
|
(make-lexical-token 'BAD #f 'beta)
|
||||||
|
(make-lexical-token 'BAD #f 'delta))))
|
||||||
|
(cons r *error*))
|
||||||
|
=> '(0 (error-handler "Syntax error: unexpected token : " . BAD)))
|
||||||
|
|
||||||
|
(check
|
||||||
|
;;The BAD triggers the error, the stack is rewind up to the start,
|
||||||
|
;;then end-of-input happens while trying to skip tokens until the
|
||||||
|
;;synchronisation one is found. End-of-input is an acceptable token
|
||||||
|
;;after the start.
|
||||||
|
(let ((r (doit (make-lexical-token 'BAD #f 'alpha))))
|
||||||
|
(cons r *error*))
|
||||||
|
=> '(0 (error-handler "Syntax error: unexpected token : " . BAD)))
|
||||||
|
|
||||||
|
;;; end of file
|
51
test-suite/lalr/test-lr-error-recovery-02.scm
Normal file
51
test-suite/lalr/test-lr-error-recovery-02.scm
Normal file
|
@ -0,0 +1,51 @@
|
||||||
|
;;; test-lr-error-recovery-02.scm --
|
||||||
|
;;
|
||||||
|
;;Test error recovery policy when the synchronisation terminal has the
|
||||||
|
;;same category of the lookahead that raises the error.
|
||||||
|
;;
|
||||||
|
|
||||||
|
(load "common-test.scm")
|
||||||
|
|
||||||
|
(define (doit . tokens)
|
||||||
|
(let ((parser (lalr-parser (expect: 0)
|
||||||
|
(A B C)
|
||||||
|
(alphas (alpha) : $1
|
||||||
|
(alphas alpha) : $2)
|
||||||
|
(alpha (A B) : (list $1 $2)
|
||||||
|
(C) : $1
|
||||||
|
(error C) : 'error-form))))
|
||||||
|
(parser (make-lexer tokens) error-handler)))
|
||||||
|
|
||||||
|
;;; --------------------------------------------------------------------
|
||||||
|
;;; No error, just grammar tests.
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit (make-lexical-token 'A #f 1)
|
||||||
|
(make-lexical-token 'B #f 2))
|
||||||
|
=> '(1 2))
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit (make-lexical-token 'C #f 3))
|
||||||
|
=> '3)
|
||||||
|
|
||||||
|
;;; --------------------------------------------------------------------
|
||||||
|
;;; Successful error recovery.
|
||||||
|
|
||||||
|
(check
|
||||||
|
;;Error, recovery, end-of-input.
|
||||||
|
(let ((r (doit (make-lexical-token 'A #f 1)
|
||||||
|
(make-lexical-token 'C #f 3))))
|
||||||
|
(cons r *error*))
|
||||||
|
=> '(error-form (error-handler "Syntax error: unexpected token : " . C)))
|
||||||
|
|
||||||
|
(check
|
||||||
|
;;Error, recovery, correct parse of "A B".
|
||||||
|
(let ((r (doit (make-lexical-token 'A #f 1)
|
||||||
|
(make-lexical-token 'C #f 3)
|
||||||
|
(make-lexical-token 'A #f 1)
|
||||||
|
(make-lexical-token 'B #f 2))))
|
||||||
|
(cons r *error*))
|
||||||
|
=> '((1 2)
|
||||||
|
(error-handler "Syntax error: unexpected token : " . C)))
|
||||||
|
|
||||||
|
;;; end of file
|
40
test-suite/lalr/test-lr-no-clause.scm
Normal file
40
test-suite/lalr/test-lr-no-clause.scm
Normal file
|
@ -0,0 +1,40 @@
|
||||||
|
;;; test-lr-no-clause.scm --
|
||||||
|
;;
|
||||||
|
|
||||||
|
(load "common-test.scm")
|
||||||
|
|
||||||
|
(define (doit . tokens)
|
||||||
|
(let ((parser (lalr-parser (expect: 0)
|
||||||
|
(NUMBER COMMA NEWLINE)
|
||||||
|
|
||||||
|
(lines (lines line) : (list $2)
|
||||||
|
(line) : (list $1))
|
||||||
|
(line (NEWLINE) : #\newline
|
||||||
|
(NUMBER NEWLINE) : $1
|
||||||
|
;;this is a rule with no semantic action
|
||||||
|
(COMMA NUMBER NEWLINE)))))
|
||||||
|
(parser (make-lexer tokens) error-handler)))
|
||||||
|
|
||||||
|
(check
|
||||||
|
;;correct input
|
||||||
|
(doit (make-lexical-token 'NUMBER #f 1)
|
||||||
|
(make-lexical-token 'NEWLINE #f #\newline))
|
||||||
|
=> '(1))
|
||||||
|
|
||||||
|
(check
|
||||||
|
;;correct input with comma, which is a rule with no client form
|
||||||
|
(doit (make-lexical-token 'COMMA #f #\,)
|
||||||
|
(make-lexical-token 'NUMBER #f 1)
|
||||||
|
(make-lexical-token 'NEWLINE #f #\newline))
|
||||||
|
=> '(#(line-3 #\, 1 #\newline)))
|
||||||
|
|
||||||
|
(check
|
||||||
|
;;correct input with comma, which is a rule with no client form
|
||||||
|
(doit (make-lexical-token 'NUMBER #f 1)
|
||||||
|
(make-lexical-token 'NEWLINE #f #\newline)
|
||||||
|
(make-lexical-token 'COMMA #f #\,)
|
||||||
|
(make-lexical-token 'NUMBER #f 2)
|
||||||
|
(make-lexical-token 'NEWLINE #f #\newline))
|
||||||
|
=> '(#(line-3 #\, 2 #\newline)))
|
||||||
|
|
||||||
|
;;; end of file
|
119
test-suite/lalr/test-lr-script-expression.scm
Normal file
119
test-suite/lalr/test-lr-script-expression.scm
Normal file
|
@ -0,0 +1,119 @@
|
||||||
|
;;; test-lr-script-expression.scm --
|
||||||
|
;;
|
||||||
|
;;Parse scripts, each line an expression.
|
||||||
|
;;
|
||||||
|
|
||||||
|
(load "common-test.scm")
|
||||||
|
|
||||||
|
(define (doit . tokens)
|
||||||
|
(let ((parser (lalr-parser (expect: 0)
|
||||||
|
(N O C T (left: A) (left: M) (nonassoc: U))
|
||||||
|
|
||||||
|
(script (lines) : (reverse $1))
|
||||||
|
|
||||||
|
(lines (lines line) : (cons $2 $1)
|
||||||
|
(line) : (list $1))
|
||||||
|
|
||||||
|
(line (T) : #\newline
|
||||||
|
(E T) : $1
|
||||||
|
(error T) : (list 'error-clause $2))
|
||||||
|
|
||||||
|
(E (N) : $1
|
||||||
|
(E A E) : ($2 $1 $3)
|
||||||
|
(E M E) : ($2 $1 $3)
|
||||||
|
(A E (prec: U)) : ($1 $2)
|
||||||
|
(O E C) : $2))))
|
||||||
|
(parser (make-lexer tokens) error-handler)))
|
||||||
|
|
||||||
|
;;; --------------------------------------------------------------------
|
||||||
|
;;; Correct input
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit (make-lexical-token 'T #f #\newline))
|
||||||
|
=> '(#\newline))
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit (make-lexical-token 'N #f 1)
|
||||||
|
(make-lexical-token 'T #f #\newline))
|
||||||
|
=> '(1))
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit (make-lexical-token 'N #f 1)
|
||||||
|
(make-lexical-token 'A #f +)
|
||||||
|
(make-lexical-token 'N #f 2)
|
||||||
|
(make-lexical-token 'T #f #\newline))
|
||||||
|
=> '(3))
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit (make-lexical-token 'N #f 1)
|
||||||
|
(make-lexical-token 'A #f +)
|
||||||
|
(make-lexical-token 'N #f 2)
|
||||||
|
(make-lexical-token 'M #f *)
|
||||||
|
(make-lexical-token 'N #f 3)
|
||||||
|
(make-lexical-token 'T #f #\newline))
|
||||||
|
=> '(7))
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit (make-lexical-token 'O #f #\()
|
||||||
|
(make-lexical-token 'N #f 1)
|
||||||
|
(make-lexical-token 'A #f +)
|
||||||
|
(make-lexical-token 'N #f 2)
|
||||||
|
(make-lexical-token 'C #f #\))
|
||||||
|
(make-lexical-token 'M #f *)
|
||||||
|
(make-lexical-token 'N #f 3)
|
||||||
|
(make-lexical-token 'T #f #\newline))
|
||||||
|
=> '(9))
|
||||||
|
|
||||||
|
(check
|
||||||
|
(doit (make-lexical-token 'O #f #\()
|
||||||
|
(make-lexical-token 'N #f 1)
|
||||||
|
(make-lexical-token 'A #f +)
|
||||||
|
(make-lexical-token 'N #f 2)
|
||||||
|
(make-lexical-token 'C #f #\))
|
||||||
|
(make-lexical-token 'M #f *)
|
||||||
|
(make-lexical-token 'N #f 3)
|
||||||
|
(make-lexical-token 'T #f #\newline)
|
||||||
|
|
||||||
|
(make-lexical-token 'N #f 4)
|
||||||
|
(make-lexical-token 'M #f /)
|
||||||
|
(make-lexical-token 'N #f 5)
|
||||||
|
(make-lexical-token 'T #f #\newline))
|
||||||
|
=> '(9 4/5))
|
||||||
|
|
||||||
|
;;; --------------------------------------------------------------------
|
||||||
|
|
||||||
|
(check
|
||||||
|
;;Successful error recovery.
|
||||||
|
(doit (make-lexical-token 'O #f #\()
|
||||||
|
(make-lexical-token 'N #f 1)
|
||||||
|
(make-lexical-token 'A #f +)
|
||||||
|
(make-lexical-token 'N #f 2)
|
||||||
|
(make-lexical-token 'M #f *)
|
||||||
|
(make-lexical-token 'N #f 3)
|
||||||
|
(make-lexical-token 'T #f #\newline)
|
||||||
|
|
||||||
|
(make-lexical-token 'N #f 4)
|
||||||
|
(make-lexical-token 'M #f /)
|
||||||
|
(make-lexical-token 'N #f 5)
|
||||||
|
(make-lexical-token 'T #f #\newline))
|
||||||
|
=> '((error-clause #f)
|
||||||
|
4/5))
|
||||||
|
|
||||||
|
(check
|
||||||
|
;;Unexpected end of input.
|
||||||
|
(let ((r (doit (make-lexical-token 'N #f 1)
|
||||||
|
(make-lexical-token 'A #f +)
|
||||||
|
(make-lexical-token 'N #f 2))))
|
||||||
|
(cons r *error*))
|
||||||
|
=> '(#f (error-handler "Syntax error: unexpected end of input")))
|
||||||
|
|
||||||
|
(check
|
||||||
|
;;Unexpected end of input.
|
||||||
|
(let ((r (doit (make-lexical-token 'N #f 1)
|
||||||
|
(make-lexical-token 'A #f +)
|
||||||
|
(make-lexical-token 'T #f #\newline))))
|
||||||
|
(cons r *error*))
|
||||||
|
=> '(((error-clause #f))
|
||||||
|
(error-handler "Syntax error: unexpected token : " . T)))
|
||||||
|
|
||||||
|
;;; end of file
|
59
test-suite/lalr/test-lr-single-expressions.scm
Normal file
59
test-suite/lalr/test-lr-single-expressions.scm
Normal file
|
@ -0,0 +1,59 @@
|
||||||
|
;;; test-lr-single-expressions.scm --
|
||||||
|
;;
|
||||||
|
;;Grammar accepting single expressions.
|
||||||
|
;;
|
||||||
|
|
||||||
|
(load "common-test.scm")
|
||||||
|
|
||||||
|
(define (doit . tokens)
|
||||||
|
(let ((parser (lalr-parser (expect: 0)
|
||||||
|
(N O C (left: A) (left: M) (nonassoc: U))
|
||||||
|
|
||||||
|
(E (N) : $1
|
||||||
|
(E A E) : ($2 $1 $3)
|
||||||
|
(E M E) : ($2 $1 $3)
|
||||||
|
(A E (prec: U)) : ($1 $2)
|
||||||
|
(O E C) : $2))))
|
||||||
|
(parser (make-lexer tokens) error-handler)))
|
||||||
|
|
||||||
|
;;; --------------------------------------------------------------------
|
||||||
|
|
||||||
|
(check ;correct input
|
||||||
|
(doit (make-lexical-token 'N #f 1))
|
||||||
|
=> 1)
|
||||||
|
|
||||||
|
(check ;correct input
|
||||||
|
(doit (make-lexical-token 'A #f -)
|
||||||
|
(make-lexical-token 'N #f 1))
|
||||||
|
=> -1)
|
||||||
|
|
||||||
|
(check ;correct input
|
||||||
|
(doit (make-lexical-token 'A #f +)
|
||||||
|
(make-lexical-token 'N #f 1))
|
||||||
|
=> 1)
|
||||||
|
|
||||||
|
(check ;correct input
|
||||||
|
(doit (make-lexical-token 'N #f 1)
|
||||||
|
(make-lexical-token 'A #f +)
|
||||||
|
(make-lexical-token 'N #f 2))
|
||||||
|
=> 3)
|
||||||
|
|
||||||
|
(check ;correct input
|
||||||
|
(doit (make-lexical-token 'N #f 1)
|
||||||
|
(make-lexical-token 'A #f +)
|
||||||
|
(make-lexical-token 'N #f 2)
|
||||||
|
(make-lexical-token 'M #f *)
|
||||||
|
(make-lexical-token 'N #f 3))
|
||||||
|
=> 7)
|
||||||
|
|
||||||
|
(check ;correct input
|
||||||
|
(doit (make-lexical-token 'O #f #\()
|
||||||
|
(make-lexical-token 'N #f 1)
|
||||||
|
(make-lexical-token 'A #f +)
|
||||||
|
(make-lexical-token 'N #f 2)
|
||||||
|
(make-lexical-token 'C #f #\))
|
||||||
|
(make-lexical-token 'M #f *)
|
||||||
|
(make-lexical-token 'N #f 3))
|
||||||
|
=> 9)
|
||||||
|
|
||||||
|
;;; end of file
|
Loading…
Add table
Add a link
Reference in a new issue