Skip to content

Refactored lsp--create-filter-function for lower latency and higher efficiency #4796

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
110 changes: 48 additions & 62 deletions lsp-mode.el
Original file line number Diff line number Diff line change
Expand Up @@ -7092,6 +7092,23 @@ server. WORKSPACE is the active workspace."
(json-false nil))
(json-read-from-string ,str))))

(defmacro lsp-json-read-string (str)
"Read json from the current buffer."
(if (progn
(require 'json)
(fboundp 'json-parse-string))
`(json-parse-string ,str :object-type (if lsp-use-plists
'plist
'hash-table)
:null-object nil
:false-object nil)
`(let ((json-array-type 'vector)
(json-object-type (if lsp-use-plists
'plist
'hash-table))
(json-false nil))
(json-read-string ,str))))

(defmacro lsp-json-read-buffer ()
"Read json from the current buffer."
(if (progn
Expand Down Expand Up @@ -7151,69 +7168,38 @@ server. WORKSPACE is the active workspace."
('request (lsp--on-request workspace json-data)))))))

(defun lsp--create-filter-function (workspace)
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Can we have a benchmark to see if this is really faster than the old one?
The old one already using temp buffer at the final step as part of optimization, this change expands that more so that we don't keep the list of input chunks but concat them right away.

Another thing with lsp--parser-on-message is that we can dispatch it on a timer, so we don't have to wait for a handler to run to parse the next one. The previous approach is we parsing all available message first and dispatch them one by one. The new approach on the other hand will try to execute lsp--parser-on-message as soon as we have data, however that can led to a long waiting period, especially around something like waiting for completion result.

"Make filter for the workspace."
(let ((body-received 0)
leftovers body-length body chunk)
"Efficiently filter/process LSP messages for WORKSPACE."
(let ((input-buffer (generate-new-buffer " *lsp-input*")))
(lambda (_proc input)
(setf chunk (if (s-blank? leftovers)
(encode-coding-string input 'utf-8-unix t)
(concat leftovers (encode-coding-string input 'utf-8-unix t))))

(let (messages)
(while (not (s-blank? chunk))
(if (not body-length)
;; Read headers
(if-let* ((body-sep-pos (string-match-p "\r\n\r\n" chunk)))
;; We've got all the headers, handle them all at once:
(setf body-length (lsp--get-body-length
(mapcar #'lsp--parse-header
(split-string
(substring-no-properties chunk
(or (string-match-p "Content-Length" chunk)
(error "Unable to find Content-Length header."))
body-sep-pos)
"\r\n")))
body-received 0
leftovers nil
chunk (substring-no-properties chunk (+ body-sep-pos 4)))

;; Haven't found the end of the headers yet. Save everything
;; for when the next chunk arrives and await further input.
(setf leftovers chunk
chunk nil))
(let* ((chunk-length (string-bytes chunk))
(left-to-receive (- body-length body-received))
(this-body (if (< left-to-receive chunk-length)
(prog1 (substring-no-properties chunk 0 left-to-receive)
(setf chunk (substring-no-properties chunk left-to-receive)))
(prog1 chunk
(setf chunk nil))))
(body-bytes (string-bytes this-body)))
(push this-body body)
(setf body-received (+ body-received body-bytes))
(when (>= chunk-length left-to-receive)
(condition-case err
(with-temp-buffer
(apply #'insert
(nreverse
(prog1 body
(setf leftovers nil
body-length nil
body-received nil
body nil))))
(decode-coding-region (point-min)
(point-max)
'utf-8)
(goto-char (point-min))
(push (lsp-json-read-buffer) messages))

(error
(lsp-warn "Failed to parse the following chunk:\n'''\n%s\n'''\nwith message %s"
(concat leftovers input)
err)))))))
(mapc (lambda (msg)
(lsp--parser-on-message msg workspace))
(nreverse messages))))))
(with-current-buffer input-buffer
;; Insert raw input at the end
(goto-char (point-max))
(insert input) ; Keep as raw bytes initially
(goto-char (point-min))
(cl-loop
while (let ((header-end (search-forward "\r\n\r\n" nil t))
(header-start (search-backward "Content-Length:" nil t)))
(when header-end
(let* ((headers (buffer-substring-no-properties header-start (- header-end 4)))
(header-lines (split-string headers "\r\n" t))
(parsed-headers (mapcar #'lsp--parse-header header-lines))
(body-length (lsp--get-body-length parsed-headers))
(body-start header-end)
(body-end (+ header-end body-length)))
(when (<= body-end (point-max))
;; Extract and decode the JSON body separately
(let ((json-string (decode-coding-string
(buffer-substring-no-properties body-start body-end)
'utf-8)))
(condition-case err
(let ((msg (lsp-json-read-string json-string)))
(lsp--parser-on-message msg workspace))
(error
(message "LSP JSON parse error: %S" err))))
;; Remove the processed message
(delete-region (point-min) body-end)
(goto-char (point-min))
t)))))))))

(defvar-local lsp--line-col-to-point-hash-table nil
"Hash table with keys (line . col) and values that are either point positions
Expand Down
Loading