fix a bug in the module lexer; it was returning the wrong length for the tokens
it creates when the #lang line isn't well-formed (eg "#lang racke"). closes PR 12399
This commit is contained in:
parent
c4a8cd65fb
commit
11994bd4f8
|
@ -95,7 +95,7 @@ added get-regions
|
||||||
(define pairs '())
|
(define pairs '())
|
||||||
|
|
||||||
;; ---------------------- Lexing state ------------------------------
|
;; ---------------------- Lexing state ------------------------------
|
||||||
|
|
||||||
(define-struct lexer-state
|
(define-struct lexer-state
|
||||||
(start-pos
|
(start-pos
|
||||||
end-pos
|
end-pos
|
||||||
|
@ -120,10 +120,10 @@ added get-regions
|
||||||
parens
|
parens
|
||||||
)
|
)
|
||||||
#:mutable #:transparent)
|
#:mutable #:transparent)
|
||||||
|
|
||||||
;; The lexer
|
;; The lexer
|
||||||
(define get-token #f)
|
(define get-token #f)
|
||||||
|
|
||||||
(define/private (make-new-lexer-state start end)
|
(define/private (make-new-lexer-state start end)
|
||||||
(make-lexer-state start
|
(make-lexer-state start
|
||||||
end
|
end
|
||||||
|
@ -135,11 +135,11 @@ added get-regions
|
||||||
start
|
start
|
||||||
#f
|
#f
|
||||||
(new paren-tree% (matches pairs))))
|
(new paren-tree% (matches pairs))))
|
||||||
|
|
||||||
(define lexer-states (list (make-new-lexer-state 0 'end)))
|
(define lexer-states (list (make-new-lexer-state 0 'end)))
|
||||||
(define/public (get-up-to-date?)
|
(define/public (get-up-to-date?)
|
||||||
(andmap lexer-state-up-to-date? lexer-states))
|
(andmap lexer-state-up-to-date? lexer-states))
|
||||||
|
|
||||||
(define/private (find-ls pos)
|
(define/private (find-ls pos)
|
||||||
(ormap (lambda (ls)
|
(ormap (lambda (ls)
|
||||||
(and (<= (lexer-state-start-pos ls)
|
(and (<= (lexer-state-start-pos ls)
|
||||||
|
@ -150,7 +150,7 @@ added get-regions
|
||||||
end)))
|
end)))
|
||||||
ls))
|
ls))
|
||||||
lexer-states))
|
lexer-states))
|
||||||
|
|
||||||
;; ---------------------- Interactions state ------------------------
|
;; ---------------------- Interactions state ------------------------
|
||||||
;; The positions right before and right after the area to be tokenized
|
;; The positions right before and right after the area to be tokenized
|
||||||
|
|
||||||
|
@ -175,42 +175,42 @@ added get-regions
|
||||||
(let loop ([regions _regions]
|
(let loop ([regions _regions]
|
||||||
[pos 0])
|
[pos 0])
|
||||||
(cond
|
(cond
|
||||||
[(null? regions) (void)]
|
[(null? regions) (void)]
|
||||||
[(pair? regions)
|
[(pair? regions)
|
||||||
(let ([region (car regions)])
|
(let ([region (car regions)])
|
||||||
(unless (and (list? region)
|
(unless (and (list? region)
|
||||||
(= 2 (length region))
|
(= 2 (length region))
|
||||||
(number? (list-ref region 0))
|
(number? (list-ref region 0))
|
||||||
(or (number? (list-ref region 1))
|
(or (number? (list-ref region 1))
|
||||||
(and (null? (cdr regions))
|
(and (null? (cdr regions))
|
||||||
(eq? 'end (list-ref region 1)))))
|
(eq? 'end (list-ref region 1)))))
|
||||||
(error 'reset-regions
|
(error 'reset-regions
|
||||||
"got a region that is not a list of two numbers (or 'end if it is the last region): ~e, all regions ~e"
|
"got a region that is not a list of two numbers (or 'end if it is the last region): ~e, all regions ~e"
|
||||||
region
|
region
|
||||||
regions))
|
regions))
|
||||||
(unless (and (<= pos (list-ref region 0))
|
(unless (and (<= pos (list-ref region 0))
|
||||||
(or (eq? 'end (list-ref region 1))
|
(or (eq? 'end (list-ref region 1))
|
||||||
(<= (list-ref region 0) (list-ref region 1))))
|
(<= (list-ref region 0) (list-ref region 1))))
|
||||||
(error 'reset-regions "found regions with numbers out of order ~e" regions))
|
(error 'reset-regions "found regions with numbers out of order ~e" regions))
|
||||||
(loop (cdr regions) (list-ref region 1)))]
|
(loop (cdr regions) (list-ref region 1)))]
|
||||||
[else
|
[else
|
||||||
(error 'reset-regions "expected a list of regions, got ~e" regions)]))
|
(error 'reset-regions "expected a list of regions, got ~e" regions)]))
|
||||||
|
|
||||||
(set! lexer-states
|
(set! lexer-states
|
||||||
(let loop ([old lexer-states]
|
(let loop ([old lexer-states]
|
||||||
[new _regions])
|
[new _regions])
|
||||||
(cond
|
(cond
|
||||||
[(null? new) null]
|
[(null? new) null]
|
||||||
[(and (pair? old)
|
[(and (pair? old)
|
||||||
(equal? (caar new) (lexer-state-start-pos (car old)))
|
(equal? (caar new) (lexer-state-start-pos (car old)))
|
||||||
(equal? (cadar new) (lexer-state-end-pos (car old))))
|
(equal? (cadar new) (lexer-state-end-pos (car old))))
|
||||||
(cons (car old)
|
(cons (car old)
|
||||||
(loop (cdr old) (cdr new)))]
|
(loop (cdr old) (cdr new)))]
|
||||||
[else
|
[else
|
||||||
(cons (make-new-lexer-state (caar new) (cadar new))
|
(cons (make-new-lexer-state (caar new) (cadar new))
|
||||||
(loop null (cdr new)))])))
|
(loop null (cdr new)))])))
|
||||||
(update-lexer-state-observers))
|
(update-lexer-state-observers))
|
||||||
|
|
||||||
|
|
||||||
(define/public (get-regions)
|
(define/public (get-regions)
|
||||||
(map (lambda (ls)
|
(map (lambda (ls)
|
||||||
|
@ -290,62 +290,68 @@ added get-regions
|
||||||
(sync-invalid ls))))
|
(sync-invalid ls))))
|
||||||
|
|
||||||
(define/private (re-tokenize ls in in-start-pos in-lexer-mode enable-suspend)
|
(define/private (re-tokenize ls in in-start-pos in-lexer-mode enable-suspend)
|
||||||
(let-values ([(lexeme type data new-token-start new-token-end backup-delta new-lexer-mode)
|
(enable-suspend #f)
|
||||||
(begin
|
;(define-values (_line1 _col1 pos-before) (port-next-location in))
|
||||||
(enable-suspend #f)
|
(define-values (lexeme type data new-token-start new-token-end backup-delta new-lexer-mode)
|
||||||
(begin0
|
(get-token in in-start-pos in-lexer-mode))
|
||||||
(get-token in in-start-pos in-lexer-mode)
|
;(define-values (_line2 _col2 pos-after) (port-next-location in))
|
||||||
(enable-suspend #t)))])
|
(enable-suspend #t)
|
||||||
(unless (eq? 'eof type)
|
(unless (eq? 'eof type)
|
||||||
(unless (exact-nonnegative-integer? new-token-start)
|
(unless (exact-nonnegative-integer? new-token-start)
|
||||||
(error 'color:text<%> "expected an exact nonnegative integer for the token start, got ~e" new-token-start))
|
(error 'color:text<%> "expected an exact nonnegative integer for the token start, got ~e" new-token-start))
|
||||||
(unless (exact-nonnegative-integer? new-token-end)
|
(unless (exact-nonnegative-integer? new-token-end)
|
||||||
(error 'color:text<%> "expected an exact nonnegative integer for the token end, got ~e" new-token-end))
|
(error 'color:text<%> "expected an exact nonnegative integer for the token end, got ~e" new-token-end))
|
||||||
(unless (exact-nonnegative-integer? backup-delta)
|
(unless (exact-nonnegative-integer? backup-delta)
|
||||||
(error 'color:text<%> "expected an exact nonnegative integer for the backup delta, got ~e" backup-delta))
|
(error 'color:text<%> "expected an exact nonnegative integer for the backup delta, got ~e" backup-delta))
|
||||||
(unless (0 . < . (- new-token-end new-token-start))
|
(unless (0 . < . (- new-token-end new-token-start))
|
||||||
(error 'color:text<%> "expected the distance between the start and end position for each token to be positive, but start was ~e and end was ~e" new-token-start new-token-end))
|
(error 'color:text<%> "expected the distance between the start and end position for each token to be positive, but start was ~e and end was ~e" new-token-start new-token-end))
|
||||||
(enable-suspend #f)
|
(enable-suspend #f)
|
||||||
#; (printf "~a at ~a to ~a\n" lexeme (+ in-start-pos (sub1 new-token-start))
|
#; (printf "~s at ~a to ~a\n" lexeme (+ in-start-pos (sub1 new-token-start))
|
||||||
(+ in-start-pos (sub1 new-token-end)))
|
(+ in-start-pos (sub1 new-token-end)))
|
||||||
(let ((len (- new-token-end new-token-start)))
|
(let ((len (- new-token-end new-token-start)))
|
||||||
(set-lexer-state-current-pos! ls (+ len (lexer-state-current-pos ls)))
|
#;
|
||||||
(set-lexer-state-current-lexer-mode! ls new-lexer-mode)
|
(unless (= len (- pos-after pos-before))
|
||||||
(sync-invalid ls)
|
;; this check requires the two calls to port-next-location to be also uncommented
|
||||||
(when (and should-color? (should-color-type? type) (not frozen?))
|
;; when this check fails, bad things can happen non-deterministically later on
|
||||||
(set! colors
|
(eprintf "pos changed bad ; len ~s pos-before ~s pos-after ~s (token ~s mode ~s)\n"
|
||||||
(cons
|
len pos-before pos-after lexeme new-lexer-mode))
|
||||||
(let* ([style-name (token-sym->style type)]
|
(set-lexer-state-current-pos! ls (+ len (lexer-state-current-pos ls)))
|
||||||
(color (send (get-style-list) find-named-style style-name))
|
(set-lexer-state-current-lexer-mode! ls new-lexer-mode)
|
||||||
(sp (+ in-start-pos (sub1 new-token-start)))
|
(sync-invalid ls)
|
||||||
(ep (+ in-start-pos (sub1 new-token-end))))
|
(when (and should-color? (should-color-type? type) (not frozen?))
|
||||||
(λ ()
|
(set! colors
|
||||||
(change-style color sp ep #f)))
|
(cons
|
||||||
colors)))
|
(let* ([style-name (token-sym->style type)]
|
||||||
;; Using the non-spec version takes 3 times as long as the spec
|
(color (send (get-style-list) find-named-style style-name))
|
||||||
;; version. In other words, the new greatly outweighs the tree
|
(sp (+ in-start-pos (sub1 new-token-start)))
|
||||||
;; operations.
|
(ep (+ in-start-pos (sub1 new-token-end))))
|
||||||
;;(insert-last! tokens (new token-tree% (length len) (data type)))
|
(λ ()
|
||||||
(insert-last-spec! (lexer-state-tokens ls) len (make-data type new-lexer-mode backup-delta))
|
(change-style color sp ep #f)))
|
||||||
#; (show-tree (lexer-state-tokens ls))
|
colors)))
|
||||||
(send (lexer-state-parens ls) add-token data len)
|
;; Using the non-spec version takes 3 times as long as the spec
|
||||||
(cond
|
;; version. In other words, the new greatly outweighs the tree
|
||||||
((and (not (send (lexer-state-invalid-tokens ls) is-empty?))
|
;; operations.
|
||||||
(= (lexer-state-invalid-tokens-start ls)
|
;;(insert-last! tokens (new token-tree% (length len) (data type)))
|
||||||
(lexer-state-current-pos ls))
|
(insert-last-spec! (lexer-state-tokens ls) len (make-data type new-lexer-mode backup-delta))
|
||||||
(equal? new-lexer-mode
|
#; (show-tree (lexer-state-tokens ls))
|
||||||
(lexer-state-invalid-tokens-mode ls)))
|
(send (lexer-state-parens ls) add-token data len)
|
||||||
(send (lexer-state-invalid-tokens ls) search-max!)
|
(cond
|
||||||
(send (lexer-state-parens ls) merge-tree
|
[(and (not (send (lexer-state-invalid-tokens ls) is-empty?))
|
||||||
(send (lexer-state-invalid-tokens ls) get-root-end-position))
|
(= (lexer-state-invalid-tokens-start ls)
|
||||||
(insert-last! (lexer-state-tokens ls)
|
(lexer-state-current-pos ls))
|
||||||
(lexer-state-invalid-tokens ls))
|
(equal? new-lexer-mode
|
||||||
(set-lexer-state-invalid-tokens-start! ls +inf.0)
|
(lexer-state-invalid-tokens-mode ls)))
|
||||||
(enable-suspend #t))
|
(send (lexer-state-invalid-tokens ls) search-max!)
|
||||||
(else
|
(send (lexer-state-parens ls) merge-tree
|
||||||
(enable-suspend #t)
|
(send (lexer-state-invalid-tokens ls) get-root-end-position))
|
||||||
(re-tokenize ls in in-start-pos new-lexer-mode enable-suspend)))))))
|
(insert-last! (lexer-state-tokens ls)
|
||||||
|
(lexer-state-invalid-tokens ls))
|
||||||
|
(set-lexer-state-invalid-tokens-start! ls +inf.0)
|
||||||
|
(enable-suspend #t)]
|
||||||
|
[else
|
||||||
|
(enable-suspend #t)
|
||||||
|
(re-tokenize ls in in-start-pos new-lexer-mode enable-suspend)]))))
|
||||||
|
|
||||||
(define/private (show-tree t)
|
(define/private (show-tree t)
|
||||||
(printf "Tree:\n")
|
(printf "Tree:\n")
|
||||||
(send t search-min!)
|
(send t search-min!)
|
||||||
|
@ -356,7 +362,7 @@ added get-regions
|
||||||
(printf " ~s\n" (list s e))
|
(printf " ~s\n" (list s e))
|
||||||
(send t search! e)
|
(send t search! e)
|
||||||
(loop s)))))
|
(loop s)))))
|
||||||
|
|
||||||
(define/private (split-backward ls valid-tree pos)
|
(define/private (split-backward ls valid-tree pos)
|
||||||
(let loop ([pos pos][valid-tree valid-tree][old-invalid-tree #f])
|
(let loop ([pos pos][valid-tree valid-tree][old-invalid-tree #f])
|
||||||
(let-values (((orig-token-start orig-token-end valid-tree invalid-tree orig-data)
|
(let-values (((orig-token-start orig-token-end valid-tree invalid-tree orig-data)
|
||||||
|
@ -373,63 +379,63 @@ added get-regions
|
||||||
(unless (lexer-state-up-to-date? ls)
|
(unless (lexer-state-up-to-date? ls)
|
||||||
(sync-invalid ls))
|
(sync-invalid ls))
|
||||||
(cond
|
(cond
|
||||||
((lexer-state-up-to-date? ls)
|
((lexer-state-up-to-date? ls)
|
||||||
(let-values (((orig-token-start orig-token-end valid-tree invalid-tree orig-data)
|
(let-values (((orig-token-start orig-token-end valid-tree invalid-tree orig-data)
|
||||||
(split-backward ls (lexer-state-tokens ls) edit-start-pos)))
|
(split-backward ls (lexer-state-tokens ls) edit-start-pos)))
|
||||||
(send (lexer-state-parens ls) split-tree orig-token-start)
|
(send (lexer-state-parens ls) split-tree orig-token-start)
|
||||||
(set-lexer-state-invalid-tokens! ls invalid-tree)
|
(set-lexer-state-invalid-tokens! ls invalid-tree)
|
||||||
(set-lexer-state-tokens! ls valid-tree)
|
(set-lexer-state-tokens! ls valid-tree)
|
||||||
(set-lexer-state-invalid-tokens-start!
|
(set-lexer-state-invalid-tokens-start!
|
||||||
ls
|
ls
|
||||||
(if (send (lexer-state-invalid-tokens ls) is-empty?)
|
(if (send (lexer-state-invalid-tokens ls) is-empty?)
|
||||||
+inf.0
|
+inf.0
|
||||||
(+ (lexer-state-start-pos ls) orig-token-end change-length)))
|
(+ (lexer-state-start-pos ls) orig-token-end change-length)))
|
||||||
(set-lexer-state-invalid-tokens-mode! ls (and orig-data (data-lexer-mode orig-data)))
|
(set-lexer-state-invalid-tokens-mode! ls (and orig-data (data-lexer-mode orig-data)))
|
||||||
(let ([start (+ (lexer-state-start-pos ls) orig-token-start)])
|
(let ([start (+ (lexer-state-start-pos ls) orig-token-start)])
|
||||||
(set-lexer-state-current-pos! ls start)
|
(set-lexer-state-current-pos! ls start)
|
||||||
(set-lexer-state-current-lexer-mode! ls
|
(set-lexer-state-current-lexer-mode! ls
|
||||||
(if (= start (lexer-state-start-pos ls))
|
(if (= start (lexer-state-start-pos ls))
|
||||||
#f
|
#f
|
||||||
(begin
|
(begin
|
||||||
(send valid-tree search-max!)
|
(send valid-tree search-max!)
|
||||||
(data-lexer-mode (send valid-tree get-root-data))))))
|
(data-lexer-mode (send valid-tree get-root-data))))))
|
||||||
(set-lexer-state-up-to-date?! ls #f)
|
(set-lexer-state-up-to-date?! ls #f)
|
||||||
(update-lexer-state-observers)
|
(update-lexer-state-observers)
|
||||||
(queue-callback (λ () (colorer-callback)) #f)))
|
(queue-callback (λ () (colorer-callback)) #f)))
|
||||||
((>= edit-start-pos (lexer-state-invalid-tokens-start ls))
|
((>= edit-start-pos (lexer-state-invalid-tokens-start ls))
|
||||||
(let-values (((tok-start tok-end valid-tree invalid-tree orig-data)
|
(let-values (((tok-start tok-end valid-tree invalid-tree orig-data)
|
||||||
(split-backward ls (lexer-state-invalid-tokens ls) edit-start-pos)))
|
(split-backward ls (lexer-state-invalid-tokens ls) edit-start-pos)))
|
||||||
(set-lexer-state-invalid-tokens! ls invalid-tree)
|
(set-lexer-state-invalid-tokens! ls invalid-tree)
|
||||||
(set-lexer-state-invalid-tokens-start!
|
(set-lexer-state-invalid-tokens-start!
|
||||||
ls
|
ls
|
||||||
(+ (lexer-state-invalid-tokens-start ls) tok-end change-length))
|
(+ (lexer-state-invalid-tokens-start ls) tok-end change-length))
|
||||||
(set-lexer-state-invalid-tokens-mode! ls (and orig-data (data-lexer-mode orig-data)))))
|
(set-lexer-state-invalid-tokens-mode! ls (and orig-data (data-lexer-mode orig-data)))))
|
||||||
((> edit-start-pos (lexer-state-current-pos ls))
|
((> edit-start-pos (lexer-state-current-pos ls))
|
||||||
(set-lexer-state-invalid-tokens-start!
|
(set-lexer-state-invalid-tokens-start!
|
||||||
ls
|
ls
|
||||||
(+ change-length (lexer-state-invalid-tokens-start ls))))
|
(+ change-length (lexer-state-invalid-tokens-start ls))))
|
||||||
(else
|
(else
|
||||||
(let-values (((tok-start tok-end valid-tree invalid-tree data)
|
(let-values (((tok-start tok-end valid-tree invalid-tree data)
|
||||||
(split-backward ls (lexer-state-tokens ls) edit-start-pos)))
|
(split-backward ls (lexer-state-tokens ls) edit-start-pos)))
|
||||||
(send (lexer-state-parens ls) truncate tok-start)
|
(send (lexer-state-parens ls) truncate tok-start)
|
||||||
(set-lexer-state-tokens! ls valid-tree)
|
(set-lexer-state-tokens! ls valid-tree)
|
||||||
(set-lexer-state-invalid-tokens-start! ls (+ change-length (lexer-state-invalid-tokens-start ls)))
|
(set-lexer-state-invalid-tokens-start! ls (+ change-length (lexer-state-invalid-tokens-start ls)))
|
||||||
(let ([start (+ (lexer-state-start-pos ls) tok-start)])
|
(let ([start (+ (lexer-state-start-pos ls) tok-start)])
|
||||||
(set-lexer-state-current-pos! ls start)
|
(set-lexer-state-current-pos! ls start)
|
||||||
(set-lexer-state-current-lexer-mode!
|
(set-lexer-state-current-lexer-mode!
|
||||||
ls
|
ls
|
||||||
(if (= start (lexer-state-start-pos ls))
|
(if (= start (lexer-state-start-pos ls))
|
||||||
#f
|
#f
|
||||||
(begin
|
(begin
|
||||||
(send valid-tree search-max!)
|
(send valid-tree search-max!)
|
||||||
(data-lexer-mode (send valid-tree get-root-data))))))))))
|
(data-lexer-mode (send valid-tree get-root-data))))))))))
|
||||||
|
|
||||||
(define/private (do-insert/delete edit-start-pos change-length)
|
(define/private (do-insert/delete edit-start-pos change-length)
|
||||||
(unless (or stopped? force-stop?)
|
(unless (or stopped? force-stop?)
|
||||||
(let ([ls (find-ls edit-start-pos)])
|
(let ([ls (find-ls edit-start-pos)])
|
||||||
(when ls
|
(when ls
|
||||||
(do-insert/delete/ls ls edit-start-pos change-length)))))
|
(do-insert/delete/ls ls edit-start-pos change-length)))))
|
||||||
|
|
||||||
(define/private (do-insert/delete-all)
|
(define/private (do-insert/delete-all)
|
||||||
(for-each (lambda (ls)
|
(for-each (lambda (ls)
|
||||||
(do-insert/delete/ls ls (lexer-state-start-pos ls) 0))
|
(do-insert/delete/ls ls (lexer-state-start-pos ls) 0))
|
||||||
|
@ -447,35 +453,30 @@ added get-regions
|
||||||
(set! tok-cor
|
(set! tok-cor
|
||||||
(coroutine
|
(coroutine
|
||||||
(λ (enable-suspend)
|
(λ (enable-suspend)
|
||||||
(parameterize ((port-count-lines-enabled #t))
|
(parameterize ((port-count-lines-enabled #t))
|
||||||
(when (getenv "PLTDRDRTEST")
|
(for-each
|
||||||
(printf "colorer-driver: lexer-states ~s\n" lexer-states)
|
(lambda (ls)
|
||||||
(printf "colorer-driver: text ~s\n" (send this get-text)))
|
(re-tokenize ls
|
||||||
(for-each
|
(begin
|
||||||
(lambda (ls)
|
(enable-suspend #f)
|
||||||
(re-tokenize ls
|
(begin0
|
||||||
(begin
|
|
||||||
(enable-suspend #f)
|
|
||||||
(begin0
|
|
||||||
(open-input-text-editor this
|
(open-input-text-editor this
|
||||||
(lexer-state-current-pos ls)
|
(lexer-state-current-pos ls)
|
||||||
(lexer-state-end-pos ls)
|
(lexer-state-end-pos ls)
|
||||||
(λ (x) #f))
|
(λ (x) #f))
|
||||||
(enable-suspend #t)))
|
(enable-suspend #t)))
|
||||||
(lexer-state-current-pos ls)
|
(lexer-state-current-pos ls)
|
||||||
(lexer-state-current-lexer-mode ls)
|
(lexer-state-current-lexer-mode ls)
|
||||||
enable-suspend))
|
enable-suspend))
|
||||||
lexer-states)))))
|
lexer-states)))))
|
||||||
(set! rev (get-revision-number)))
|
(set! rev (get-revision-number)))
|
||||||
(with-handlers ((exn:fail?
|
(with-handlers ((exn:fail?
|
||||||
(λ (exn)
|
(λ (exn)
|
||||||
(parameterize ((print-struct #t))
|
(parameterize ((print-struct #t))
|
||||||
(when (getenv "PLTDRDRTEST")
|
((error-display-handler)
|
||||||
(printf "colorer-driver: error ~a\n" (and (exn? exn) (exn-message exn))))
|
(format "exception in colorer thread: ~s" exn)
|
||||||
((error-display-handler)
|
exn))
|
||||||
(format "exception in colorer thread: ~s" exn)
|
(set! tok-cor #f))))
|
||||||
exn))
|
|
||||||
(set! tok-cor #f))))
|
|
||||||
#;(printf "begin lexing\n")
|
#;(printf "begin lexing\n")
|
||||||
(when (coroutine-run 10 tok-cor)
|
(when (coroutine-run 10 tok-cor)
|
||||||
(for-each (lambda (ls)
|
(for-each (lambda (ls)
|
||||||
|
@ -493,13 +494,13 @@ added get-regions
|
||||||
|
|
||||||
(define/private (colorer-callback)
|
(define/private (colorer-callback)
|
||||||
(cond
|
(cond
|
||||||
((is-locked?)
|
((is-locked?)
|
||||||
(set! restart-callback #t))
|
(set! restart-callback #t))
|
||||||
(else
|
(else
|
||||||
(unless (in-edit-sequence?)
|
(unless (in-edit-sequence?)
|
||||||
(colorer-driver))
|
(colorer-driver))
|
||||||
(unless (andmap lexer-state-up-to-date? lexer-states)
|
(unless (andmap lexer-state-up-to-date? lexer-states)
|
||||||
(queue-callback (λ () (colorer-callback)) #f)))))
|
(queue-callback (λ () (colorer-callback)) #f)))))
|
||||||
|
|
||||||
;; Must not be called when the editor is locked
|
;; Must not be called when the editor is locked
|
||||||
(define/private (finish-now)
|
(define/private (finish-now)
|
||||||
|
@ -569,53 +570,53 @@ added get-regions
|
||||||
;; See docs
|
;; See docs
|
||||||
(define/public thaw-colorer
|
(define/public thaw-colorer
|
||||||
(lambda ((recolor? #t)
|
(lambda ((recolor? #t)
|
||||||
(retokenize? #f))
|
(retokenize? #f))
|
||||||
(when frozen?
|
(when frozen?
|
||||||
(set! frozen? #f)
|
(set! frozen? #f)
|
||||||
(cond
|
(cond
|
||||||
(stopped?
|
(stopped?
|
||||||
(stop-colorer))
|
(stop-colorer))
|
||||||
((or force-recolor-after-freeze recolor?)
|
((or force-recolor-after-freeze recolor?)
|
||||||
(cond
|
(cond
|
||||||
(retokenize?
|
(retokenize?
|
||||||
(let ((tn token-sym->style)
|
(let ((tn token-sym->style)
|
||||||
(gt get-token)
|
(gt get-token)
|
||||||
(p pairs))
|
(p pairs))
|
||||||
(stop-colorer (not should-color?))
|
(stop-colorer (not should-color?))
|
||||||
(start-colorer tn gt p)))
|
(start-colorer tn gt p)))
|
||||||
(else
|
(else
|
||||||
(begin-edit-sequence #f #f)
|
(begin-edit-sequence #f #f)
|
||||||
(finish-now)
|
(finish-now)
|
||||||
(when should-color?
|
(when should-color?
|
||||||
(for-each
|
(for-each
|
||||||
(lambda (ls)
|
(lambda (ls)
|
||||||
(let ([tokens (lexer-state-tokens ls)]
|
(let ([tokens (lexer-state-tokens ls)]
|
||||||
[start-pos (lexer-state-start-pos ls)])
|
[start-pos (lexer-state-start-pos ls)])
|
||||||
(send tokens for-each
|
(send tokens for-each
|
||||||
(λ (start len data)
|
(λ (start len data)
|
||||||
(let ([type (data-type data)])
|
(let ([type (data-type data)])
|
||||||
(when (should-color-type? type)
|
(when (should-color-type? type)
|
||||||
(let ((color (send (get-style-list) find-named-style
|
(let ((color (send (get-style-list) find-named-style
|
||||||
(token-sym->style type)))
|
(token-sym->style type)))
|
||||||
(sp (+ start-pos start))
|
(sp (+ start-pos start))
|
||||||
(ep (+ start-pos (+ start len))))
|
(ep (+ start-pos (+ start len))))
|
||||||
(change-style color sp ep #f))))))))
|
(change-style color sp ep #f))))))))
|
||||||
lexer-states))
|
lexer-states))
|
||||||
(end-edit-sequence))))))))
|
(end-edit-sequence))))))))
|
||||||
|
|
||||||
|
|
||||||
(define/private (toggle-color on?)
|
(define/private (toggle-color on?)
|
||||||
(cond
|
(cond
|
||||||
((and frozen? (not (equal? on? should-color?)))
|
((and frozen? (not (equal? on? should-color?)))
|
||||||
(set! should-color? on?)
|
(set! should-color? on?)
|
||||||
(set! force-recolor-after-freeze #t))
|
(set! force-recolor-after-freeze #t))
|
||||||
((and (not should-color?) on?)
|
((and (not should-color?) on?)
|
||||||
(set! should-color? on?)
|
(set! should-color? on?)
|
||||||
(reset-tokens)
|
(reset-tokens)
|
||||||
(do-insert/delete-all))
|
(do-insert/delete-all))
|
||||||
((and should-color? (not on?))
|
((and should-color? (not on?))
|
||||||
(set! should-color? on?)
|
(set! should-color? on?)
|
||||||
(clear-colors))))
|
(clear-colors))))
|
||||||
|
|
||||||
;; see docs
|
;; see docs
|
||||||
(define/public (force-stop-colorer stop?)
|
(define/public (force-stop-colorer stop?)
|
||||||
|
@ -630,8 +631,8 @@ added get-regions
|
||||||
|
|
||||||
(define mismatch-color (make-object color% "PINK"))
|
(define mismatch-color (make-object color% "PINK"))
|
||||||
(define/private (get-match-color) (preferences:get 'framework:paren-match-color))
|
(define/private (get-match-color) (preferences:get 'framework:paren-match-color))
|
||||||
|
|
||||||
|
|
||||||
;; higlight : number number number (or/c color any)
|
;; higlight : number number number (or/c color any)
|
||||||
;; if color is a color, then it uses that color to higlight
|
;; if color is a color, then it uses that color to higlight
|
||||||
;; Otherwise, it treats it like a boolean, where a true value
|
;; Otherwise, it treats it like a boolean, where a true value
|
||||||
|
@ -648,8 +649,8 @@ added get-regions
|
||||||
(set! clear-old-locations
|
(set! clear-old-locations
|
||||||
(let ([old clear-old-locations])
|
(let ([old clear-old-locations])
|
||||||
(λ ()
|
(λ ()
|
||||||
(old)
|
(old)
|
||||||
(off))))))
|
(off))))))
|
||||||
|
|
||||||
(define in-match-parens? #f)
|
(define in-match-parens? #f)
|
||||||
|
|
||||||
|
@ -660,7 +661,7 @@ added get-regions
|
||||||
(<= (+ (lexer-state-start-pos ls) error)
|
(<= (+ (lexer-state-start-pos ls) error)
|
||||||
(lexer-state-current-pos ls))
|
(lexer-state-current-pos ls))
|
||||||
(not (lexer-state-up-to-date? ls))))
|
(not (lexer-state-up-to-date? ls))))
|
||||||
|
|
||||||
;; If there is no match because the buffer isn't lexed far enough yet,
|
;; If there is no match because the buffer isn't lexed far enough yet,
|
||||||
;; this will do nothing, but the edit sequence for changing the colors
|
;; this will do nothing, but the edit sequence for changing the colors
|
||||||
;; will trigger a callback that will call this to try and match again.
|
;; will trigger a callback that will call this to try and match again.
|
||||||
|
@ -707,7 +708,7 @@ added get-regions
|
||||||
(highlight-nested-region ls start-b end-b here)))))))))
|
(highlight-nested-region ls start-b end-b here)))))))))
|
||||||
(end-edit-sequence)
|
(end-edit-sequence)
|
||||||
(set! in-match-parens? #f))))
|
(set! in-match-parens? #f))))
|
||||||
|
|
||||||
;; highlight-nested-region : lexer-state number number number -> void
|
;; highlight-nested-region : lexer-state number number number -> void
|
||||||
;; colors nested regions of parentheses.
|
;; colors nested regions of parentheses.
|
||||||
(define/private (highlight-nested-region ls orig-start orig-end here)
|
(define/private (highlight-nested-region ls orig-start orig-end here)
|
||||||
|
@ -751,17 +752,17 @@ added get-regions
|
||||||
(send (lexer-state-parens ls) match-forward
|
(send (lexer-state-parens ls) match-forward
|
||||||
(- position (lexer-state-start-pos ls)))))
|
(- position (lexer-state-start-pos ls)))))
|
||||||
(cond
|
(cond
|
||||||
((f-match-false-error ls start end error)
|
((f-match-false-error ls start end error)
|
||||||
(colorer-driver)
|
(colorer-driver)
|
||||||
(do-forward-match position cutoff #f))
|
(do-forward-match position cutoff #f))
|
||||||
((and start end (not error))
|
((and start end (not error))
|
||||||
(let ((match-pos (+ (lexer-state-start-pos ls) end)))
|
(let ((match-pos (+ (lexer-state-start-pos ls) end)))
|
||||||
(cond
|
(cond
|
||||||
((<= match-pos cutoff) match-pos)
|
((<= match-pos cutoff) match-pos)
|
||||||
(else #f))))
|
(else #f))))
|
||||||
((and start end error) #f)
|
((and start end error) #f)
|
||||||
(else
|
(else
|
||||||
(skip-past-token ls position))))))))
|
(skip-past-token ls position))))))))
|
||||||
|
|
||||||
(define/private (skip-past-token ls position)
|
(define/private (skip-past-token ls position)
|
||||||
(let-values (((tok-start tok-end)
|
(let-values (((tok-start tok-end)
|
||||||
|
@ -772,18 +773,18 @@ added get-regions
|
||||||
(values (send (lexer-state-tokens ls) get-root-start-position)
|
(values (send (lexer-state-tokens ls) get-root-start-position)
|
||||||
(send (lexer-state-tokens ls) get-root-end-position)))))
|
(send (lexer-state-tokens ls) get-root-end-position)))))
|
||||||
(cond
|
(cond
|
||||||
((or (send (lexer-state-parens ls) is-close-pos? tok-start)
|
((or (send (lexer-state-parens ls) is-close-pos? tok-start)
|
||||||
(= (+ (lexer-state-start-pos ls) tok-end) position))
|
(= (+ (lexer-state-start-pos ls) tok-end) position))
|
||||||
#f)
|
#f)
|
||||||
(else
|
(else
|
||||||
(+ (lexer-state-start-pos ls) tok-end)))))
|
(+ (lexer-state-start-pos ls) tok-end)))))
|
||||||
|
|
||||||
;; See docs
|
;; See docs
|
||||||
(define/public (backward-match position cutoff)
|
(define/public (backward-match position cutoff)
|
||||||
(let ((x (internal-backward-match position cutoff)))
|
(let ((x (internal-backward-match position cutoff)))
|
||||||
(cond
|
(cond
|
||||||
((eq? x 'open) #f)
|
((eq? x 'open) #f)
|
||||||
(else x))))
|
(else x))))
|
||||||
|
|
||||||
(define/private (internal-backward-match position cutoff)
|
(define/private (internal-backward-match position cutoff)
|
||||||
(when stopped?
|
(when stopped?
|
||||||
|
@ -796,27 +797,27 @@ added get-regions
|
||||||
(let-values (((start end error)
|
(let-values (((start end error)
|
||||||
(send (lexer-state-parens ls) match-backward (- position start-pos))))
|
(send (lexer-state-parens ls) match-backward (- position start-pos))))
|
||||||
(cond
|
(cond
|
||||||
((and start end (not error))
|
((and start end (not error))
|
||||||
(let ((match-pos (+ start-pos start)))
|
(let ((match-pos (+ start-pos start)))
|
||||||
(cond
|
(cond
|
||||||
((>= match-pos cutoff) match-pos)
|
((>= match-pos cutoff) match-pos)
|
||||||
(else #f))))
|
(else #f))))
|
||||||
((and start end error) #f)
|
((and start end error) #f)
|
||||||
(else
|
(else
|
||||||
(let-values (((tok-start tok-end)
|
(let-values (((tok-start tok-end)
|
||||||
(begin
|
(begin
|
||||||
(send (lexer-state-tokens ls) search!
|
(send (lexer-state-tokens ls) search!
|
||||||
(if (> position start-pos)
|
(if (> position start-pos)
|
||||||
(- position start-pos 1)
|
(- position start-pos 1)
|
||||||
0))
|
0))
|
||||||
(values (send (lexer-state-tokens ls) get-root-start-position)
|
(values (send (lexer-state-tokens ls) get-root-start-position)
|
||||||
(send (lexer-state-tokens ls) get-root-end-position)))))
|
(send (lexer-state-tokens ls) get-root-end-position)))))
|
||||||
(cond
|
(cond
|
||||||
((or (send (lexer-state-parens ls) is-open-pos? tok-start)
|
((or (send (lexer-state-parens ls) is-open-pos? tok-start)
|
||||||
(= (+ start-pos tok-start) position))
|
(= (+ start-pos tok-start) position))
|
||||||
'open)
|
'open)
|
||||||
(else
|
(else
|
||||||
(+ start-pos tok-start))))))))))
|
(+ start-pos tok-start))))))))))
|
||||||
|
|
||||||
;; See docs
|
;; See docs
|
||||||
(define/public (backward-containing-sexp position cutoff)
|
(define/public (backward-containing-sexp position cutoff)
|
||||||
|
@ -825,9 +826,9 @@ added get-regions
|
||||||
(let loop ((cur-pos position))
|
(let loop ((cur-pos position))
|
||||||
(let ((p (internal-backward-match cur-pos cutoff)))
|
(let ((p (internal-backward-match cur-pos cutoff)))
|
||||||
(cond
|
(cond
|
||||||
((eq? 'open p) cur-pos)
|
((eq? 'open p) cur-pos)
|
||||||
((not p) #f)
|
((not p) #f)
|
||||||
(else (loop p))))))
|
(else (loop p))))))
|
||||||
|
|
||||||
;; Determines whether a position is a 'comment, 'string, etc.
|
;; Determines whether a position is a 'comment, 'string, etc.
|
||||||
(define/public (classify-position position)
|
(define/public (classify-position position)
|
||||||
|
@ -836,7 +837,7 @@ added get-regions
|
||||||
(let ([root-data (send tokens get-root-data)])
|
(let ([root-data (send tokens get-root-data)])
|
||||||
(and root-data
|
(and root-data
|
||||||
(data-type root-data)))))
|
(data-type root-data)))))
|
||||||
|
|
||||||
(define/public (get-token-range position)
|
(define/public (get-token-range position)
|
||||||
(define-values (tokens ls) (get-tokens-at-position 'get-token-range position))
|
(define-values (tokens ls) (get-tokens-at-position 'get-token-range position))
|
||||||
(values (and tokens ls
|
(values (and tokens ls
|
||||||
|
@ -845,15 +846,15 @@ added get-regions
|
||||||
(and tokens ls
|
(and tokens ls
|
||||||
(+ (lexer-state-start-pos ls)
|
(+ (lexer-state-start-pos ls)
|
||||||
(send tokens get-root-end-position)))))
|
(send tokens get-root-end-position)))))
|
||||||
|
|
||||||
(define/private (get-tokens-at-position who position)
|
(define/private (get-tokens-at-position who position)
|
||||||
(when stopped?
|
(when stopped?
|
||||||
(error who "called on a color:text<%> whose colorer is stopped."))
|
(error who "called on a color:text<%> whose colorer is stopped."))
|
||||||
(let ([ls (find-ls position)])
|
(let ([ls (find-ls position)])
|
||||||
(if ls
|
(if ls
|
||||||
(let ([tokens (lexer-state-tokens ls)])
|
(let ([tokens (lexer-state-tokens ls)])
|
||||||
(tokenize-to-pos ls position)
|
(tokenize-to-pos ls position)
|
||||||
(send tokens search! (- position (lexer-state-start-pos ls)))
|
(send tokens search! (- position (lexer-state-start-pos ls)))
|
||||||
(values tokens ls))
|
(values tokens ls))
|
||||||
(values #f #f))))
|
(values #f #f))))
|
||||||
|
|
||||||
|
@ -874,55 +875,55 @@ added get-regions
|
||||||
[end-pos (lexer-state-end-pos ls)]
|
[end-pos (lexer-state-end-pos ls)]
|
||||||
[tokens (lexer-state-tokens ls)])
|
[tokens (lexer-state-tokens ls)])
|
||||||
(cond
|
(cond
|
||||||
((and (eq? direction 'forward)
|
((and (eq? direction 'forward)
|
||||||
(>= position (if (eq? 'end end-pos) (last-position) end-pos)))
|
(>= position (if (eq? 'end end-pos) (last-position) end-pos)))
|
||||||
position)
|
position)
|
||||||
((and (eq? direction 'backward) (<= position start-pos))
|
((and (eq? direction 'backward) (<= position start-pos))
|
||||||
position)
|
position)
|
||||||
(else
|
(else
|
||||||
(tokenize-to-pos ls position)
|
(tokenize-to-pos ls position)
|
||||||
(send tokens search! (- (if (eq? direction 'backward) (sub1 position) position)
|
(send tokens search! (- (if (eq? direction 'backward) (sub1 position) position)
|
||||||
start-pos))
|
start-pos))
|
||||||
(cond
|
(cond
|
||||||
((and (send tokens get-root-data)
|
((and (send tokens get-root-data)
|
||||||
(or (eq? 'white-space (data-type (send tokens get-root-data)))
|
(or (eq? 'white-space (data-type (send tokens get-root-data)))
|
||||||
(and comments? (eq? 'comment (data-type (send tokens get-root-data))))))
|
(and comments? (eq? 'comment (data-type (send tokens get-root-data))))))
|
||||||
(skip-whitespace (+ start-pos
|
(skip-whitespace (+ start-pos
|
||||||
(if (eq? direction 'forward)
|
(if (eq? direction 'forward)
|
||||||
(send tokens get-root-end-position)
|
(send tokens get-root-end-position)
|
||||||
(send tokens get-root-start-position)))
|
(send tokens get-root-start-position)))
|
||||||
direction
|
direction
|
||||||
comments?))
|
comments?))
|
||||||
(else position))))))))
|
(else position))))))))
|
||||||
|
|
||||||
(define/private (get-close-paren pos closers continue-after-non-paren?)
|
(define/private (get-close-paren pos closers continue-after-non-paren?)
|
||||||
(cond
|
(cond
|
||||||
((null? closers) #f)
|
((null? closers) #f)
|
||||||
(else
|
(else
|
||||||
(let* ((c (car closers))
|
(let* ((c (car closers))
|
||||||
(l (string-length c)))
|
(l (string-length c)))
|
||||||
(let ([ls (find-ls pos)])
|
(let ([ls (find-ls pos)])
|
||||||
(if ls
|
(if ls
|
||||||
(let ([start-pos (lexer-state-start-pos ls)])
|
(let ([start-pos (lexer-state-start-pos ls)])
|
||||||
(insert c pos)
|
(insert c pos)
|
||||||
(let ((cls (classify-position pos)))
|
(let ((cls (classify-position pos)))
|
||||||
(if (eq? cls 'parenthesis)
|
(if (eq? cls 'parenthesis)
|
||||||
(let ((m (backward-match (+ l pos) start-pos)))
|
(let ((m (backward-match (+ l pos) start-pos)))
|
||||||
(cond
|
(cond
|
||||||
((and m
|
((and m
|
||||||
(send (lexer-state-parens ls) is-open-pos? (- m start-pos))
|
(send (lexer-state-parens ls) is-open-pos? (- m start-pos))
|
||||||
(send (lexer-state-parens ls) is-close-pos? (- pos start-pos)))
|
(send (lexer-state-parens ls) is-close-pos? (- pos start-pos)))
|
||||||
(delete pos (+ l pos))
|
(delete pos (+ l pos))
|
||||||
c)
|
c)
|
||||||
(else
|
(else
|
||||||
(delete pos (+ l pos))
|
(delete pos (+ l pos))
|
||||||
(get-close-paren pos (cdr closers) #t))))
|
(get-close-paren pos (cdr closers) #t))))
|
||||||
(begin
|
(begin
|
||||||
(delete pos (+ l pos))
|
(delete pos (+ l pos))
|
||||||
(if continue-after-non-paren?
|
(if continue-after-non-paren?
|
||||||
(get-close-paren pos (cdr closers) #t)
|
(get-close-paren pos (cdr closers) #t)
|
||||||
#f)))))
|
#f)))))
|
||||||
c))))))
|
c))))))
|
||||||
|
|
||||||
(inherit insert delete flash-on on-default-char)
|
(inherit insert delete flash-on on-default-char)
|
||||||
;; See docs
|
;; See docs
|
||||||
|
|
|
@ -75,7 +75,7 @@ to delegate to the scheme-lexer (in the 'no-lang-line mode).
|
||||||
;; sync ports
|
;; sync ports
|
||||||
(for ([i (in-range (file-position in) (file-position p))])
|
(for ([i (in-range (file-position in) (file-position p))])
|
||||||
(read-byte-or-special in))
|
(read-byte-or-special in))
|
||||||
(values lexeme 'error data 1 (+ end-pos 1) 0 'no-lang-line)]
|
(values lexeme 'error data 1 end-pos 0 'no-lang-line)]
|
||||||
[else
|
[else
|
||||||
(for ([i (in-range (file-position in) (file-position lexer-port))])
|
(for ([i (in-range (file-position in) (file-position lexer-port))])
|
||||||
(read-byte-or-special in))
|
(read-byte-or-special in))
|
||||||
|
|
|
@ -62,7 +62,7 @@
|
||||||
`(("#lang" other 1 25 #f)
|
`(("#lang" other 1 25 #f)
|
||||||
(,eof eof 25 25 ((proc scribble-lexer) . #f))))
|
(,eof eof 25 25 ((proc scribble-lexer) . #f))))
|
||||||
(check-equal? (lex "#lang at-exp racket/baseBOGUS" #t)
|
(check-equal? (lex "#lang at-exp racket/baseBOGUS" #t)
|
||||||
`(("#lang at-exp" error 1 31 #f)
|
`(("#lang at-exp" error 1 30 #f)
|
||||||
(,eof eof #f #f no-lang-line)))
|
(,eof eof #f #f no-lang-line)))
|
||||||
(check same?
|
(check same?
|
||||||
(lex "#lang at-exp racket/base\n1\n" #t)
|
(lex "#lang at-exp racket/base\n1\n" #t)
|
||||||
|
|
Loading…
Reference in New Issue
Block a user