From b560aa029810dcfdee278b4b3614c938d996fa6c Mon Sep 17 00:00:00 2001 From: dickmao Date: Thu, 5 Aug 2021 12:41:39 -0400 Subject: [PATCH] Rationalize url-retrieve-synchronously It's impossible to reason about this function without the conveniences of throw-catch and unwind-protect. Related bug#49861. * lisp/url/url.el (url-retrieve-synchronously): Accept-process-output on a null process. That is the safer, more conventional way of achieving non-blocking sleep-for. --- lisp/url/url.el | 125 ++++++++++++++++++------------------------------ 1 file changed, 46 insertions(+), 79 deletions(-) diff --git a/lisp/url/url.el b/lisp/url/url.el index a6565e2cdb..be66510dd8 100644 --- a/lisp/url/url.el +++ b/lisp/url/url.el @@ -235,85 +235,52 @@ url-retrieve-synchronously TIMEOUT is passed, it should be a number that says (in seconds) how long to wait for a response before giving up." (url-do-setup) - - (let ((retrieval-done nil) - (start-time (current-time)) - (url-asynchronous nil) - (asynch-buffer nil) - (timed-out nil)) - (setq asynch-buffer - (url-retrieve url (lambda (&rest ignored) - (url-debug 'retrieval "Synchronous fetching done (%S)" (current-buffer)) - (setq retrieval-done t - asynch-buffer (current-buffer))) - nil silent inhibit-cookies)) - (if (null asynch-buffer) - ;; We do not need to do anything, it was a mailto or something - ;; similar that takes processing completely outside of the URL - ;; package. - nil - (let ((proc (get-buffer-process asynch-buffer))) - ;; If the access method was synchronous, `retrieval-done' should - ;; hopefully already be set to t. If it is nil, and `proc' is also - ;; nil, it implies that the async process is not running in - ;; asynch-buffer. This happens e.g. for FTP files. In such a case - ;; url-file.el should probably set something like a `url-process' - ;; buffer-local variable so we can find the exact process that we - ;; should be waiting for. In the mean time, we'll just wait for any - ;; process output. - (while (and (not retrieval-done) - (or (not timeout) - (not (setq timed-out - (time-less-p timeout - (time-since start-time)))))) - (url-debug 'retrieval - "Spinning in url-retrieve-synchronously: %S (%S)" - retrieval-done asynch-buffer) - (if (buffer-local-value 'url-redirect-buffer asynch-buffer) - (setq proc (get-buffer-process - (setq asynch-buffer - (buffer-local-value 'url-redirect-buffer - asynch-buffer)))) - (if (and proc (memq (process-status proc) - '(closed exit signal failed)) - ;; Make sure another process hasn't been started. - (eq proc (or (get-buffer-process asynch-buffer) proc))) - ;; FIXME: It's not clear whether url-retrieve's callback is - ;; guaranteed to be called or not. It seems that url-http - ;; decides sometimes consciously not to call it, so it's not - ;; clear that it's a bug, but even then we need to decide how - ;; url-http can then warn us that the download has completed. - ;; In the mean time, we use this here workaround. - ;; XXX: The callback must always be called. Any - ;; exception is a bug that should be fixed, not worked - ;; around. - (progn ;; Call delete-process so we run any sentinel now. - (delete-process proc) - (setq retrieval-done t))) - ;; We used to use `sit-for' here, but in some cases it wouldn't - ;; work because apparently pending keyboard input would always - ;; interrupt it before it got a chance to handle process input. - ;; `sleep-for' was tried but it lead to other forms of - ;; hanging. --Stef - (unless (or (with-local-quit - (accept-process-output proc 1)) - (null proc)) - ;; accept-process-output returned nil, maybe because the process - ;; exited (and may have been replaced with another). If we got - ;; a quit, just stop. - (when quit-flag - (delete-process proc)) - (setq proc (and (not quit-flag) - (get-buffer-process asynch-buffer)))))) - ;; On timeouts, make sure we kill any pending processes. - ;; There may be more than one if we had a redirect. - (when timed-out - (when (process-live-p proc) - (delete-process proc)) - (when-let ((aproc (get-buffer-process asynch-buffer))) - (when (process-live-p aproc) - (delete-process aproc)))))) - asynch-buffer)) + (let* (url-asynchronous + data-buffer + (callback (lambda (&rest _args) + (setq data-buffer (current-buffer)) + (url-debug 'retrieval + "Synchronous fetching done (%S)" + data-buffer)))) + (if-let ((start-time (current-time)) + (proc-buffer (url-retrieve url callback nil silent inhibit-cookies))) + (unwind-protect + (catch 'done + (while (not data-buffer) + (when (and timeout (time-less-p timeout (time-since start-time))) + (url-debug 'retrieval "Timed out %s (after %ss)" url + (float-time (time-since start-time))) + (throw 'done 'timeout)) + (url-debug 'retrieval + "Spinning in url-retrieve-synchronously: nil (%S)" + proc-buffer) + (when-let ((redirect-buffer (buffer-local-value + 'url-redirect-buffer + proc-buffer))) + (unless (eq redirect-buffer proc-buffer) + (url-debug 'retrieval + "Redirect in url-retrieve-synchronously: %S -> %S" + proc-buffer redirect-buffer) + (let (kill-buffer-query-functions) + (kill-buffer proc-buffer)) + ;; Accommodate self-admitted hack in commit 55d1d8b + (setq proc-buffer redirect-buffer))) + (when-let ((proc (get-buffer-process proc-buffer))) + (when (memq (process-status proc) + '(closed exit signal failed)) + ;; Process sentinel vagaries occasionally cause + ;; url-retrieve to fail calling callback. + (unless data-buffer + (url-debug 'retrieval "Dead process %s" url) + (throw 'done 'exception)))) + ;; Querying over consumer internet in the US takes 100 ms, + ;; so split the difference. + (accept-process-output nil 0.05))) + (unless (eq data-buffer proc-buffer) + (let (kill-buffer-query-functions) + (kill-buffer proc-buffer)))) + (url-debug 'retrieval "Synchronous fetching unnecessary %s" url)) + data-buffer)) ;; url-mm-callback called from url-mm, which requires mm-decode. (declare-function mm-dissect-buffer "mm-decode" -- 2.26.2