It appears that the colorer was always creating a new port to read

from, each time it starts on a new event boundary (this means that in
the old (5.3) version of the colorer, it also created a new co-routine
on each event boundary! (in other words, most of the reason one would
want co-routines here was bogus))

So, refactor the code to just always do this and eliminate a bunch of
set!'s and private fields in favor of just passing arguments like sane
code does. (We can't eliminate all of that, because we still do need
to be able to abort and thus all calls must be tail calls.)
This commit is contained in:
Robby Findler 2012-11-02 09:59:29 -05:00
parent 95841b9303
commit 4ead534227

View File

@ -236,11 +236,11 @@ added get-regions
(start-colorer token-sym->style get-token pairs))) (start-colorer token-sym->style get-token pairs)))
;; ---------------------- Multi-threading --------------------------- ;; ---------------------- Multi-threading ---------------------------
;; If there is some incomplete coloring waiting to happen
(define colorer-pending? #f)
;; The editor revision when the last coloring was started ;; The editor revision when the last coloring was started
(define rev #f) (define revision-when-started-parsing #f)
;; The editor revision when after the last edit to the buffer
(define revision-after-last-edit #f)
(inherit change-style begin-edit-sequence end-edit-sequence highlight-range (inherit change-style begin-edit-sequence end-edit-sequence highlight-range
get-style-list in-edit-sequence? get-start-position get-end-position get-style-list in-edit-sequence? get-start-position get-end-position
@ -272,8 +272,7 @@ added get-regions
(update-lexer-state-observers) (update-lexer-state-observers)
(set! restart-callback #f) (set! restart-callback #f)
(set! force-recolor-after-freeze #f) (set! force-recolor-after-freeze #f)
(set! colorer-pending? #f) (set! revision-when-started-parsing #f))
(set! rev #f))
;; Discard extra tokens at the first of invalid-tokens ;; Discard extra tokens at the first of invalid-tokens
(define/private (sync-invalid ls) (define/private (sync-invalid ls)
@ -290,46 +289,38 @@ added get-regions
(set-lexer-state-invalid-tokens-mode! ls mode)) (set-lexer-state-invalid-tokens-mode! ls mode))
(sync-invalid ls)))) (sync-invalid ls))))
(define/private (start-re-tokenize start-time) (define/private (re-tokenize-move-to-next-ls start-time did-something?)
(set! re-tokenize-lses lexer-states)
(re-tokenize-move-to-next-ls start-time))
(define/private (re-tokenize-move-to-next-ls start-time)
(cond (cond
[(null? re-tokenize-lses) [(null? re-tokenize-lses)
;; done: return #t ;; done: return #t
#t] #t]
[else [else
(set! re-tokenize-ls-argument (car re-tokenize-lses)) (define ls (car re-tokenize-lses))
(set! re-tokenize-lses (cdr re-tokenize-lses)) (set! re-tokenize-lses (cdr re-tokenize-lses))
(set! re-tokenize-in-start-pos (lexer-state-current-pos re-tokenize-ls-argument)) (define in
(set! re-tokenize-lexer-mode-argument (lexer-state-current-lexer-mode re-tokenize-ls-argument))
(set! re-tokenize-in-argument
(open-input-text-editor this (open-input-text-editor this
(lexer-state-current-pos re-tokenize-ls-argument) (lexer-state-current-pos ls)
(lexer-state-end-pos re-tokenize-ls-argument) (lexer-state-end-pos ls)
(λ (x) #f))) (λ (x) #f)))
(port-count-lines! re-tokenize-in-argument) (port-count-lines! in)
(set! rev (get-revision-number)) (continue-re-tokenize start-time did-something? ls in
(continue-re-tokenize start-time #t)])) (lexer-state-current-pos ls)
(lexer-state-current-lexer-mode ls))]))
(define re-tokenize-lses #f) (define re-tokenize-lses #f)
(define re-tokenize-ls-argument #f)
(define re-tokenize-in-argument #f) (define/private (continue-re-tokenize start-time did-something? ls in in-start-pos lexer-mode)
(define re-tokenize-in-start-pos #f)
(define re-tokenize-lexer-mode-argument #f)
(define/private (continue-re-tokenize start-time did-something?)
(cond (cond
[(and did-something? ((+ start-time 20) . <= . (current-inexact-milliseconds))) [(and did-something? ((+ start-time 20.0) . <= . (current-inexact-milliseconds)))
#f] #f]
[else [else
;(define-values (_line1 _col1 pos-before) (port-next-location in)) ;(define-values (_line1 _col1 pos-before) (port-next-location in))
(define-values (lexeme type data new-token-start new-token-end backup-delta new-lexer-mode) (define-values (lexeme type data new-token-start new-token-end backup-delta new-lexer-mode)
(get-token re-tokenize-in-argument re-tokenize-in-start-pos re-tokenize-lexer-mode-argument)) (get-token in in-start-pos lexer-mode))
;(define-values (_line2 _col2 pos-after) (port-next-location in)) ;(define-values (_line2 _col2 pos-after) (port-next-location in))
(cond (cond
[(eq? 'eof type) [(eq? 'eof type)
(re-tokenize-move-to-next-ls start-time)] (re-tokenize-move-to-next-ls start-time #t)]
[else [else
(unless (exact-nonnegative-integer? new-token-start) (unless (exact-nonnegative-integer? new-token-start)
(error 'color:text<%> "expected an exact nonnegative integer for the token start, got ~e" new-token-start)) (error 'color:text<%> "expected an exact nonnegative integer for the token start, got ~e" new-token-start))
@ -337,10 +328,10 @@ added get-regions
(error 'color:text<%> "expected an exact nonnegative integer for the token end, got ~e" new-token-end)) (error 'color:text<%> "expected an exact nonnegative integer for the token end, got ~e" new-token-end))
(unless (exact-nonnegative-integer? backup-delta) (unless (exact-nonnegative-integer? backup-delta)
(error 'color:text<%> "expected an exact nonnegative integer for the backup delta, got ~e" backup-delta)) (error 'color:text<%> "expected an exact nonnegative integer for the backup delta, got ~e" backup-delta))
(unless (0 . < . (- new-token-end new-token-start)) (unless (new-token-start . < . new-token-end)
(error 'color:text<%> "expected the distance between the start and end position for each token to be positive, but start was ~e and end was ~e" new-token-start new-token-end)) (error 'color:text<%>
#; (printf "~s at ~a to ~a\n" lexeme (+ in-start-pos (sub1 new-token-start)) "expected the distance between the start and end position for each token to be positive, but start was ~e and end was ~e"
(+ in-start-pos (sub1 new-token-end))) new-token-start new-token-end))
(let ((len (- new-token-end new-token-start))) (let ((len (- new-token-end new-token-start)))
#; #;
(unless (= len (- pos-after pos-before)) (unless (= len (- pos-after pos-before))
@ -348,34 +339,33 @@ added get-regions
;; when this check fails, bad things can happen non-deterministically later on ;; when this check fails, bad things can happen non-deterministically later on
(eprintf "pos changed bad ; len ~s pos-before ~s pos-after ~s (token ~s mode ~s)\n" (eprintf "pos changed bad ; len ~s pos-before ~s pos-after ~s (token ~s mode ~s)\n"
len pos-before pos-after lexeme new-lexer-mode)) len pos-before pos-after lexeme new-lexer-mode))
(set-lexer-state-current-pos! re-tokenize-ls-argument (+ len (lexer-state-current-pos re-tokenize-ls-argument))) (set-lexer-state-current-pos! ls (+ len (lexer-state-current-pos ls)))
(set-lexer-state-current-lexer-mode! re-tokenize-ls-argument new-lexer-mode) (set-lexer-state-current-lexer-mode! ls new-lexer-mode)
(sync-invalid re-tokenize-ls-argument) (sync-invalid ls)
(when (and should-color? (should-color-type? type) (not frozen?)) (when (and should-color? (should-color-type? type) (not frozen?))
(add-colorings type re-tokenize-in-start-pos new-token-start new-token-end)) (add-colorings type in-start-pos new-token-start new-token-end))
;; Using the non-spec version takes 3 times as long as the spec ;; Using the non-spec version takes 3 times as long as the spec
;; version. In other words, the new greatly outweighs the tree ;; version. In other words, the new greatly outweighs the tree
;; operations. ;; operations.
;;(insert-last! tokens (new token-tree% (length len) (data type))) ;;(insert-last! tokens (new token-tree% (length len) (data type)))
(insert-last-spec! (lexer-state-tokens re-tokenize-ls-argument) len (make-data type new-lexer-mode backup-delta)) (insert-last-spec! (lexer-state-tokens ls) len (make-data type new-lexer-mode backup-delta))
#; (show-tree (lexer-state-tokens ls)) #; (show-tree (lexer-state-tokens ls))
(send (lexer-state-parens re-tokenize-ls-argument) add-token data len) (send (lexer-state-parens ls) add-token data len)
(cond (cond
[(and (not (send (lexer-state-invalid-tokens re-tokenize-ls-argument) is-empty?)) [(and (not (send (lexer-state-invalid-tokens ls) is-empty?))
(= (lexer-state-invalid-tokens-start re-tokenize-ls-argument) (= (lexer-state-invalid-tokens-start ls)
(lexer-state-current-pos re-tokenize-ls-argument)) (lexer-state-current-pos ls))
(equal? new-lexer-mode (equal? new-lexer-mode
(lexer-state-invalid-tokens-mode re-tokenize-ls-argument))) (lexer-state-invalid-tokens-mode ls)))
(send (lexer-state-invalid-tokens re-tokenize-ls-argument) search-max!) (send (lexer-state-invalid-tokens ls) search-max!)
(send (lexer-state-parens re-tokenize-ls-argument) merge-tree (send (lexer-state-parens ls) merge-tree
(send (lexer-state-invalid-tokens re-tokenize-ls-argument) get-root-end-position)) (send (lexer-state-invalid-tokens ls) get-root-end-position))
(insert-last! (lexer-state-tokens re-tokenize-ls-argument) (insert-last! (lexer-state-tokens ls)
(lexer-state-invalid-tokens re-tokenize-ls-argument)) (lexer-state-invalid-tokens ls))
(set-lexer-state-invalid-tokens-start! re-tokenize-ls-argument +inf.0) (set-lexer-state-invalid-tokens-start! ls +inf.0)
(re-tokenize-move-to-next-ls start-time)] (re-tokenize-move-to-next-ls start-time #t)]
[else [else
(set! re-tokenize-lexer-mode-argument new-lexer-mode) (continue-re-tokenize start-time #t ls in in-start-pos new-lexer-mode)]))])]))
(continue-re-tokenize start-time #t)]))])]))
(define/private (add-colorings type in-start-pos new-token-start new-token-end) (define/private (add-colorings type in-start-pos new-token-start new-token-end)
(define sp (+ in-start-pos (sub1 new-token-start))) (define sp (+ in-start-pos (sub1 new-token-start)))
@ -509,24 +499,17 @@ added get-regions
(unless (andmap lexer-state-up-to-date? lexer-states) (unless (andmap lexer-state-up-to-date? lexer-states)
(begin-edit-sequence #f #f) (begin-edit-sequence #f #f)
(c-log "starting to color") (c-log "starting to color")
(define finished? (set! re-tokenize-lses lexer-states)
(cond (define finished? (re-tokenize-move-to-next-ls (current-inexact-milliseconds) #f))
[(and colorer-pending? (= rev (get-revision-number)))
(continue-re-tokenize (current-inexact-milliseconds) #f)]
[else
(start-re-tokenize (current-inexact-milliseconds))]))
(c-log (format "coloring stopped ~a" (if finished? "because it finished" "with more to do"))) (c-log (format "coloring stopped ~a" (if finished? "because it finished" "with more to do")))
(cond (when finished?
[finished? (for ([ls (in-list lexer-states)])
(set! colorer-pending? #f)
(for-each (lambda (ls)
(set-lexer-state-up-to-date?! ls #t)) (set-lexer-state-up-to-date?! ls #t))
lexer-states)
(update-lexer-state-observers) (update-lexer-state-observers)
(c-log "updated observers")] (c-log "updated observers"))
[else (c-log "starting end-edit-sequence")
(set! colorer-pending? #t)]) (end-edit-sequence)
(end-edit-sequence))) (c-log "finished end-edit-sequence")))
(define/private (colorer-callback) (define/private (colorer-callback)
(cond (cond