style(3p/lisp): expand tabs in npg, mime4cl and sclf

Done using

    find third_party/lisp/{sclf,mime4cl,npg} \
      -name '*.lisp' -or -name '*.asd' \
      -exec bash -c 'expand -i -t 8 "$0" | sponge "$0"' {} \;

Change-Id: If84afac9c1d5cbc74e137a5aa0ae61472f0f1e90
Reviewed-on: https://cl.tvl.fyi/c/depot/+/5066
Tested-by: BuildkiteCI
Reviewed-by: sterni <sternenseemann@systemli.org>
This commit is contained in:
sterni 2022-01-19 14:39:58 +01:00
parent 40014c70b3
commit 25cb0ad32f
25 changed files with 2467 additions and 2467 deletions

View file

@ -43,9 +43,9 @@ Tune this if your grammar is unusually complex.")
(when *debug*
(format *debug* "reducing ~S on ~S~%" production arguments))
(flet ((safe-token-value (token)
(if (token-p token)
(token-value token)
token)))
(if (token-p token)
(token-value token)
token)))
(apply (prod-action production) (mapcar #'safe-token-value arguments))))
(defgeneric later-position (pos1 pos2)
@ -75,120 +75,120 @@ supposed to specialise this method."))
Return the reduced values according to the nonterminal actions. Raise
an error on failure."
(declare (type grammar grammar)
(type symbol start))
(type symbol start))
(labels
((match-token (expected token)
(when *debug*
(format *debug* "match-token ~S ~S -> " expected token))
(let ((res (cond ((symbolp expected)
;; non-costant terminal (like identifiers)
(eq expected (token-type token)))
((and (stringp expected)
(stringp (token-value token)))
;; string costant terminal
(funcall (the function (grammar-equal-p grammar)) expected (token-value token)))
((functionp expected)
;; custom equality predicate (must be able
;; to deal with token objects)
(funcall expected token))
;; all the rest
(t (equal expected (token-value token))))))
(when *debug*
(format *debug* "~Amatched~%" (if res "" "not ")))
res))
(when *debug*
(format *debug* "match-token ~S ~S -> " expected token))
(let ((res (cond ((symbolp expected)
;; non-costant terminal (like identifiers)
(eq expected (token-type token)))
((and (stringp expected)
(stringp (token-value token)))
;; string costant terminal
(funcall (the function (grammar-equal-p grammar)) expected (token-value token)))
((functionp expected)
;; custom equality predicate (must be able
;; to deal with token objects)
(funcall expected token))
;; all the rest
(t (equal expected (token-value token))))))
(when *debug*
(format *debug* "~Amatched~%" (if res "" "not ")))
res))
(match (expected matched #+debug depth)
(declare (list expected matched)
#+debug (fixnum depth))
(let ((first-expected (car expected)))
(cond #+debug ((> depth *maximum-recursion-depth*)
(error "endless recursion on ~A ~A at ~A expecting ~S"
(token-type (car matched)) (token-value (car matched))
(token-position (car matched)) expected))
((eq first-expected :any)
(match (cdr expected) (cdr matched) #+debug depth))
;; This is a trick to obtain partial parses. When we
;; reach this expected token we assume we succeeded
;; the parsing and return the remaining tokens as
;; part of the match.
((eq first-expected :rest)
;; we could be at the end of input so we check this
(unless (cdr matched)
(setf (cdr matched) (list :rest)))
(list nil nil))
((rule-p first-expected)
;; If it's a rule, then we try to match all its
;; productions. We return the first that succeeds.
(loop
for production in (rule-productions first-expected)
for production-tokens of-type list = (prod-tokens production)
with last-error-position = nil
with last-error = nil
for (error-position error-descr) =
(progn
(when *debug*
(format *debug* "trying to match ~A: ~S~%"
(rule-name first-expected) production-tokens))
(match (append production-tokens (cdr expected)) matched #+debug (1+ depth)))
do (cond ((not error-position)
(return (let ((args-count (prod-tokens-length production)))
(setf (cdr matched)
(cons (reduce-production
production
(subseq (the list (cdr matched)) 0 args-count))
(nthcdr (1+ args-count) matched)))
(list nil nil))))
((or (not last-error)
(later-position error-position last-error-position))
(setf last-error-position error-position
last-error error-descr)))
;; if everything fails return the "best" error
finally (return (list last-error-position
(if *debug*
#'(lambda ()
(format nil "~A, trying to match ~A"
(funcall (the function last-error))
(rule-name first-expected)))
last-error)))))
(t
;; if necessary load the next tokens
(when (null (cdr matched))
(setf (cdr matched) (read-next-tokens tokenizer)))
(cond ((and (or (null expected) (eq first-expected :eof))
(null (cdr matched)))
;; This point is reached only once for each complete
;; parsing. The expected tokens and the input
;; tokens have been exhausted at the same time.
;; Hence we succeeded the parsing.
(setf (cdr matched) (list :eof))
(list nil nil))
((null expected)
;; Garbage at end of parsing. This may mean that we
;; have considered a production completed too soon.
(list (token-position (car matched))
#'(lambda ()
"garbage at end of parsing")))
((null (cdr matched))
;; EOF error
(list :eof
#'(lambda ()
(format nil "end of input expecting ~S" expected))))
(t ;; normal token
(let ((first-token (cadr matched)))
(if (match-token first-expected first-token)
(match (cdr expected) (cdr matched) #+debug depth)
;; failed: we return the error
(list (token-position first-token)
#'(lambda ()
(format nil "expected ~S but got ~S ~S"
first-expected (token-type first-token)
(token-value first-token)))))))))))))
(declare (list expected matched)
#+debug (fixnum depth))
(let ((first-expected (car expected)))
(cond #+debug ((> depth *maximum-recursion-depth*)
(error "endless recursion on ~A ~A at ~A expecting ~S"
(token-type (car matched)) (token-value (car matched))
(token-position (car matched)) expected))
((eq first-expected :any)
(match (cdr expected) (cdr matched) #+debug depth))
;; This is a trick to obtain partial parses. When we
;; reach this expected token we assume we succeeded
;; the parsing and return the remaining tokens as
;; part of the match.
((eq first-expected :rest)
;; we could be at the end of input so we check this
(unless (cdr matched)
(setf (cdr matched) (list :rest)))
(list nil nil))
((rule-p first-expected)
;; If it's a rule, then we try to match all its
;; productions. We return the first that succeeds.
(loop
for production in (rule-productions first-expected)
for production-tokens of-type list = (prod-tokens production)
with last-error-position = nil
with last-error = nil
for (error-position error-descr) =
(progn
(when *debug*
(format *debug* "trying to match ~A: ~S~%"
(rule-name first-expected) production-tokens))
(match (append production-tokens (cdr expected)) matched #+debug (1+ depth)))
do (cond ((not error-position)
(return (let ((args-count (prod-tokens-length production)))
(setf (cdr matched)
(cons (reduce-production
production
(subseq (the list (cdr matched)) 0 args-count))
(nthcdr (1+ args-count) matched)))
(list nil nil))))
((or (not last-error)
(later-position error-position last-error-position))
(setf last-error-position error-position
last-error error-descr)))
;; if everything fails return the "best" error
finally (return (list last-error-position
(if *debug*
#'(lambda ()
(format nil "~A, trying to match ~A"
(funcall (the function last-error))
(rule-name first-expected)))
last-error)))))
(t
;; if necessary load the next tokens
(when (null (cdr matched))
(setf (cdr matched) (read-next-tokens tokenizer)))
(cond ((and (or (null expected) (eq first-expected :eof))
(null (cdr matched)))
;; This point is reached only once for each complete
;; parsing. The expected tokens and the input
;; tokens have been exhausted at the same time.
;; Hence we succeeded the parsing.
(setf (cdr matched) (list :eof))
(list nil nil))
((null expected)
;; Garbage at end of parsing. This may mean that we
;; have considered a production completed too soon.
(list (token-position (car matched))
#'(lambda ()
"garbage at end of parsing")))
((null (cdr matched))
;; EOF error
(list :eof
#'(lambda ()
(format nil "end of input expecting ~S" expected))))
(t ;; normal token
(let ((first-token (cadr matched)))
(if (match-token first-expected first-token)
(match (cdr expected) (cdr matched) #+debug depth)
;; failed: we return the error
(list (token-position first-token)
#'(lambda ()
(format nil "expected ~S but got ~S ~S"
first-expected (token-type first-token)
(token-value first-token)))))))))))))
(declare (inline match-token))
(let ((result (list :head)))
(destructuring-bind (error-position error)
(match (list (find-rule start (grammar-rules grammar))) result #+debug 0)
(when error-position
(error "~A at ~A~%" (funcall (the function error)) error-position))
(cadr result)))))
(match (list (find-rule start (grammar-rules grammar))) result #+debug 0)
(when error-position
(error "~A at ~A~%" (funcall (the function error)) error-position))
(cadr result)))))
(defgeneric terminals-in-grammar (grammar-or-hashtable)
(:documentation
@ -199,11 +199,11 @@ an error on failure."
for rule being each hash-value of grammar
with terminals = '()
do (loop
for prod in (rule-productions rule)
do (loop
for tok in (prod-tokens prod)
when (symbolp tok)
do (pushnew tok terminals)))
for prod in (rule-productions rule)
do (loop
for tok in (prod-tokens prod)
when (symbolp tok)
do (pushnew tok terminals)))
finally (return terminals)))
(defmethod terminals-in-grammar ((grammar grammar))
@ -211,9 +211,9 @@ an error on failure."
(defun print-grammar-figures (grammar &optional (stream *standard-output*))
(format stream "rules: ~A~%constant terminals: ~A~%variable terminals: ~S~%"
(hash-table-count (grammar-rules grammar))
(hash-table-count (grammar-keywords grammar))
(terminals-in-grammar (grammar-rules grammar))))
(hash-table-count (grammar-rules grammar))
(hash-table-count (grammar-keywords grammar))
(terminals-in-grammar (grammar-rules grammar))))
(defun grammar-keyword-p (keyword grammar)