(defun url-retrieve-synchronously (url &optional silent inhibit-cookies timeout) "Retrieve URL synchronously. Return the buffer containing the data, or nil if there are no data associated with it (the case for dired, info, or mailto URLs that need no further processing). URL is either a string or a parsed URL. If SILENT is non-nil, don't do any messaging while retrieving. If INHIBIT-COOKIES is non-nil, refuse to store cookies. If TIMEOUT is passed, it should be a number that says (in seconds) how long to wait for a response before giving up." (url-do-setup) (let ((inhibit-quit t) (retrieval-done nil) (start-time (current-time)) (url-asynchronous nil) (asynch-buffer nil) (timed-out nil) (abort-hang nil)) (setq asynch-buffer (url-retrieve url (lambda (&rest ignored) (url-debug 'retrieval "Synchronous fetching done (%S)" (current-buffer)) (setq retrieval-done t asynch-buffer (current-buffer))) nil silent inhibit-cookies)) (if (null asynch-buffer) ;; We do not need to do anything, it was a mailto or something ;; similar that takes processing completely outside of the URL ;; package. nil (let ((proc (get-buffer-process asynch-buffer)) (counter 0)) ;; If the access method was synchronous, `retrieval-done' should ;; hopefully already be set to t. If it is nil, and `proc' is also ;; nil, it implies that the async process is not running in ;; asynch-buffer. This happens e.g. for FTP files. In such a case ;; url-file.el should probably set something like a `url-process' ;; buffer-local variable so we can find the exact process that we ;; should be waiting for. In the mean time, we'll just wait for any ;; process output. (while (and (not retrieval-done) (not abort-hang) (or (not timeout) (not (setq timed-out (time-less-p timeout (time-since start-time)))))) (url-debug 'retrieval "Spinning in url-retrieve-synchronously: %S (%S)" retrieval-done asynch-buffer) (if (buffer-local-value 'url-redirect-buffer asynch-buffer) (setq proc (get-buffer-process (setq asynch-buffer (buffer-local-value 'url-redirect-buffer asynch-buffer)))) (if (and proc (memq (process-status proc) '(closed exit signal failed)) ;; Make sure another process hasn't been started. (eq proc (or (get-buffer-process asynch-buffer) proc))) ;; FIXME: It's not clear whether url-retrieve's callback is ;; guaranteed to be called or not. It seems that url-http ;; decides sometimes consciously not to call it, so it's not ;; clear that it's a bug, but even then we need to decide how ;; url-http can then warn us that the download has completed. ;; In the mean time, we use this here workaround. ;; XXX: The callback must always be called. Any ;; exception is a bug that should be fixed, not worked ;; around. (progn ;; Call delete-process so we run any sentinel now. (delete-process proc) (setq retrieval-done t))) ;; We used to use `sit-for' here, but in some cases it wouldn't ;; work because apparently pending keyboard input would always ;; interrupt it before it got a chance to handle process input. ;; `sleep-for' was tried but it lead to other forms of ;; hanging. --Stef (if (input-pending-p) (progn (setq counter (1+ counter)) (if (> counter 20) (setq abort-hang t)))) ;; accept-process-output hangs without while-no-input; input has ;; nowhere to go. So avoid it. (unless (or (while-no-input (with-local-quit (accept-process-output proc 0.1))) (null proc)) ;; accept-process-output returned nil, maybe because the process ;; exited (and may have been replaced with another). If we got ;; a quit, just stop. (when quit-flag (delete-process proc)) (setq proc (and (not quit-flag) (get-buffer-process asynch-buffer)))))) ;; On timeouts, make sure we kill any pending processes. ;; There may be more than one if we had a redirect. (when timed-out (when (process-live-p proc) (delete-process proc)) (when-let ((aproc (get-buffer-process asynch-buffer))) (when (process-live-p aproc) (delete-process aproc)))))) asynch-buffer))