c95b61c43fb6410e8a18be1f912e5d81b5ce1eb0
[bpt/emacs.git] / lisp / url / url.el
1 ;;; url.el --- Uniform Resource Locator retrieval tool
2
3 ;; Copyright (C) 1996-1999, 2001, 2004-2011 Free Software Foundation, Inc.
4
5 ;; Author: Bill Perry <wmperry@gnu.org>
6 ;; Keywords: comm, data, processes, hypermedia
7
8 ;; This file is part of GNU Emacs.
9 ;;
10 ;; GNU Emacs is free software: you can redistribute it and/or modify
11 ;; it under the terms of the GNU General Public License as published by
12 ;; the Free Software Foundation, either version 3 of the License, or
13 ;; (at your option) any later version.
14
15 ;; GNU Emacs is distributed in the hope that it will be useful,
16 ;; but WITHOUT ANY WARRANTY; without even the implied warranty of
17 ;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 ;; GNU General Public License for more details.
19
20 ;; You should have received a copy of the GNU General Public License
21 ;; along with GNU Emacs. If not, see <http://www.gnu.org/licenses/>.
22
23 ;;; Commentary:
24
25 ;; Registered URI schemes: http://www.iana.org/assignments/uri-schemes
26
27 ;;; Code:
28
29 (eval-when-compile (require 'cl))
30
31 (require 'mailcap)
32
33 (eval-when-compile
34 (require 'mm-decode)
35 (require 'mm-view))
36
37 (require 'url-vars)
38 (require 'url-cookie)
39 (require 'url-history)
40 (require 'url-expand)
41 (require 'url-privacy)
42 (require 'url-methods)
43 (require 'url-proxy)
44 (require 'url-parse)
45 (require 'url-util)
46
47
48 (defcustom url-configuration-directory
49 (locate-user-emacs-file "url/" ".url/")
50 "Directory used by the URL package for cookies, history, etc."
51 :type 'directory
52 :group 'url)
53
54 (defun url-do-setup ()
55 "Setup the URL package.
56 This is to avoid conflict with user settings if URL is dumped with
57 Emacs."
58 (unless url-setup-done
59
60 ;; Make OS/2 happy
61 ;;(push '("http" "80") tcp-binary-process-input-services)
62
63 (mailcap-parse-mailcaps)
64 (mailcap-parse-mimetypes)
65
66 ;; Register all the authentication schemes we can handle
67 (url-register-auth-scheme "basic" nil 4)
68 (url-register-auth-scheme "digest" nil 7)
69
70 (setq url-cookie-file
71 (or url-cookie-file
72 (expand-file-name "cookies" url-configuration-directory)))
73
74 (setq url-history-file
75 (or url-history-file
76 (expand-file-name "history" url-configuration-directory)))
77
78 ;; Parse the global history file if it exists, so that it can be used
79 ;; for URL completion, etc.
80 (url-history-parse-history)
81 (url-history-setup-save-timer)
82
83 ;; Ditto for cookies
84 (url-cookie-setup-save-timer)
85 (url-cookie-parse-file url-cookie-file)
86
87 ;; Read in proxy gateways
88 (let ((noproxy (and (not (assoc "no_proxy" url-proxy-services))
89 (or (getenv "NO_PROXY")
90 (getenv "no_PROXY")
91 (getenv "no_proxy")))))
92 (if noproxy
93 (setq url-proxy-services
94 (cons (cons "no_proxy"
95 (concat "\\("
96 (mapconcat
97 (lambda (x)
98 (cond
99 ((= x ?,) "\\|")
100 ((= x ? ) "")
101 ((= x ?.) (regexp-quote "."))
102 ((= x ?*) ".*")
103 ((= x ??) ".")
104 (t (char-to-string x))))
105 noproxy "") "\\)"))
106 url-proxy-services))))
107
108 (url-setup-privacy-info)
109 (run-hooks 'url-load-hook)
110 (setq url-setup-done t)))
111
112 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
113 ;;; Retrieval functions
114 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
115
116 (defvar url-redirect-buffer nil
117 "New buffer into which the retrieval will take place.
118 Sometimes while retrieving a URL, the URL library needs to use another buffer
119 than the one returned initially by `url-retrieve'. In this case, it sets this
120 variable in the original buffer as a forwarding pointer.")
121
122 ;;;###autoload
123 (defun url-retrieve (url callback &optional cbargs silent)
124 "Retrieve URL asynchronously and call CALLBACK with CBARGS when finished.
125 URL is either a string or a parsed URL.
126
127 CALLBACK is called when the object has been completely retrieved, with
128 the current buffer containing the object, and any MIME headers associated
129 with it. It is called as (apply CALLBACK STATUS CBARGS).
130 STATUS is a list with an even number of elements representing
131 what happened during the request, with most recent events first,
132 or an empty list if no events have occurred. Each pair is one of:
133
134 \(:redirect REDIRECTED-TO) - the request was redirected to this URL
135 \(:error (ERROR-SYMBOL . DATA)) - an error occurred. The error can be
136 signaled with (signal ERROR-SYMBOL DATA).
137
138 Return the buffer URL will load into, or nil if the process has
139 already completed (i.e. URL was a mailto URL or similar; in this case
140 the callback is not called).
141
142 The variables `url-request-data', `url-request-method' and
143 `url-request-extra-headers' can be dynamically bound around the
144 request; dynamic binding of other variables doesn't necessarily
145 take effect.
146
147 If SILENT, then don't message progress reports and the like."
148 ;;; XXX: There is code in Emacs that does dynamic binding
149 ;;; of the following variables around url-retrieve:
150 ;;; url-standalone-mode, url-gateway-unplugged, w3-honor-stylesheets,
151 ;;; url-confirmation-func, url-cookie-multiple-line,
152 ;;; url-cookie-{{,secure-}storage,confirmation}
153 ;;; url-standalone-mode and url-gateway-unplugged should work as
154 ;;; usual. url-confirmation-func is only used in nnwarchive.el and
155 ;;; webmail.el; the latter should be updated. Is
156 ;;; url-cookie-multiple-line needed anymore? The other url-cookie-*
157 ;;; are (for now) only used in synchronous retrievals.
158 (url-retrieve-internal url callback (cons nil cbargs) silent))
159
160 (defun url-retrieve-internal (url callback cbargs &optional silent)
161 "Internal function; external interface is `url-retrieve'.
162 CBARGS is what the callback will actually receive - the first item is
163 the list of events, as described in the docstring of `url-retrieve'.
164
165 If SILENT, don't message progress reports and the like."
166 (url-do-setup)
167 (url-gc-dead-buffers)
168 (if (stringp url)
169 (set-text-properties 0 (length url) nil url))
170 (if (not (vectorp url))
171 (setq url (url-generic-parse-url url)))
172 (if (not (functionp callback))
173 (error "Must provide a callback function to url-retrieve"))
174 (unless (url-type url)
175 (error "Bad url: %s" (url-recreate-url url)))
176 (setf (url-silent url) silent)
177 (let ((loader (url-scheme-get-property (url-type url) 'loader))
178 (url-using-proxy (if (url-host url)
179 (url-find-proxy-for-url url (url-host url))))
180 (buffer nil)
181 (asynch (url-scheme-get-property (url-type url) 'asynchronous-p)))
182 (if url-using-proxy
183 (setq asynch t
184 loader 'url-proxy))
185 (if asynch
186 (let ((url-current-object url))
187 (setq buffer (funcall loader url callback cbargs)))
188 (setq buffer (funcall loader url))
189 (if buffer
190 (with-current-buffer buffer
191 (apply callback cbargs))))
192 (if url-history-track
193 (url-history-update-url url (current-time)))
194 buffer))
195
196 ;;;###autoload
197 (defun url-retrieve-synchronously (url)
198 "Retrieve URL synchronously.
199 Return the buffer containing the data, or nil if there are no data
200 associated with it (the case for dired, info, or mailto URLs that need
201 no further processing). URL is either a string or a parsed URL."
202 (url-do-setup)
203
204 (lexical-let ((retrieval-done nil)
205 (asynch-buffer nil))
206 (setq asynch-buffer
207 (url-retrieve url (lambda (&rest ignored)
208 (url-debug 'retrieval "Synchronous fetching done (%S)" (current-buffer))
209 (setq retrieval-done t
210 asynch-buffer (current-buffer)))))
211 (if (null asynch-buffer)
212 ;; We do not need to do anything, it was a mailto or something
213 ;; similar that takes processing completely outside of the URL
214 ;; package.
215 nil
216 (let ((proc (get-buffer-process asynch-buffer)))
217 ;; If the access method was synchronous, `retrieval-done' should
218 ;; hopefully already be set to t. If it is nil, and `proc' is also
219 ;; nil, it implies that the async process is not running in
220 ;; asynch-buffer. This happens e.g. for FTP files. In such a case
221 ;; url-file.el should probably set something like a `url-process'
222 ;; buffer-local variable so we can find the exact process that we
223 ;; should be waiting for. In the mean time, we'll just wait for any
224 ;; process output.
225 (while (not retrieval-done)
226 (url-debug 'retrieval
227 "Spinning in url-retrieve-synchronously: %S (%S)"
228 retrieval-done asynch-buffer)
229 (if (buffer-local-value 'url-redirect-buffer asynch-buffer)
230 (setq proc (get-buffer-process
231 (setq asynch-buffer
232 (buffer-local-value 'url-redirect-buffer
233 asynch-buffer))))
234 (if (and proc (memq (process-status proc)
235 '(closed exit signal failed))
236 ;; Make sure another process hasn't been started.
237 (eq proc (or (get-buffer-process asynch-buffer) proc)))
238 ;; FIXME: It's not clear whether url-retrieve's callback is
239 ;; guaranteed to be called or not. It seems that url-http
240 ;; decides sometimes consciously not to call it, so it's not
241 ;; clear that it's a bug, but even then we need to decide how
242 ;; url-http can then warn us that the download has completed.
243 ;; In the mean time, we use this here workaround.
244 ;; XXX: The callback must always be called. Any
245 ;; exception is a bug that should be fixed, not worked
246 ;; around.
247 (progn ;; Call delete-process so we run any sentinel now.
248 (delete-process proc)
249 (setq retrieval-done t)))
250 ;; We used to use `sit-for' here, but in some cases it wouldn't
251 ;; work because apparently pending keyboard input would always
252 ;; interrupt it before it got a chance to handle process input.
253 ;; `sleep-for' was tried but it lead to other forms of
254 ;; hanging. --Stef
255 (unless (or (with-local-quit
256 (accept-process-output proc))
257 (null proc))
258 ;; accept-process-output returned nil, maybe because the process
259 ;; exited (and may have been replaced with another). If we got
260 ;; a quit, just stop.
261 (when quit-flag
262 (delete-process proc))
263 (setq proc (and (not quit-flag)
264 (get-buffer-process asynch-buffer)))))))
265 asynch-buffer)))
266
267 (defun url-mm-callback (&rest ignored)
268 (let ((handle (mm-dissect-buffer t)))
269 (url-mark-buffer-as-dead (current-buffer))
270 (with-current-buffer
271 (generate-new-buffer (url-recreate-url url-current-object))
272 (if (eq (mm-display-part handle) 'external)
273 (progn
274 (set-process-sentinel
275 ;; Fixme: this shouldn't have to know the form of the
276 ;; undisplayer produced by `mm-display-part'.
277 (get-buffer-process (cdr (mm-handle-undisplayer handle)))
278 `(lambda (proc event)
279 (mm-destroy-parts (quote ,handle))))
280 (message "Viewing externally")
281 (kill-buffer (current-buffer)))
282 (display-buffer (current-buffer))
283 (add-hook 'kill-buffer-hook
284 `(lambda () (mm-destroy-parts ',handle))
285 nil
286 t)))))
287
288 (defun url-mm-url (url)
289 "Retrieve URL and pass to the appropriate viewing application."
290 ;; These requires could advantageously be moved to url-mm-callback or
291 ;; turned into autoloads, but I suspect that it would introduce some bugs
292 ;; because loading those files from a process sentinel or filter may
293 ;; result in some undesirable corner cases.
294 (require 'mm-decode)
295 (require 'mm-view)
296 (url-retrieve url 'url-mm-callback nil))
297
298 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
299 ;;; Miscellaneous
300 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
301 (defvar url-dead-buffer-list nil)
302
303 (defun url-mark-buffer-as-dead (buff)
304 (push buff url-dead-buffer-list))
305
306 (defun url-gc-dead-buffers ()
307 (let ((buff))
308 (while (setq buff (pop url-dead-buffer-list))
309 (if (buffer-live-p buff)
310 (kill-buffer buff)))))
311
312 (cond
313 ((fboundp 'display-warning)
314 (defalias 'url-warn 'display-warning))
315 ((fboundp 'warn)
316 (defun url-warn (class message &optional level)
317 (warn "(%s/%s) %s" class (or level 'warning) message)))
318 (t
319 (defun url-warn (class message &optional level)
320 (with-current-buffer (get-buffer-create "*URL-WARNINGS*")
321 (goto-char (point-max))
322 (save-excursion
323 (insert (format "(%s/%s) %s\n" class (or level 'warning) message)))
324 (display-buffer (current-buffer))))))
325
326 (provide 'url)
327
328 ;;; url.el ends here