1 ;;; url-http.el --- HTTP retrieval routines
3 ;; Copyright (c) 1999, 2001, 2004 Free Software Foundation, Inc.
5 ;; Author: Bill Perry <wmperry@gnu.org>
6 ;; Keywords: comm, data, processes
8 ;; This file is part of GNU Emacs.
10 ;; GNU Emacs is free software; you can redistribute it and/or modify
11 ;; it under the terms of the GNU General Public License as published by
12 ;; the Free Software Foundation; either version 2, or (at your option)
15 ;; GNU Emacs is distributed in the hope that it will be useful,
16 ;; but WITHOUT ANY WARRANTY; without even the implied warranty of
17 ;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 ;; GNU General Public License for more details.
20 ;; You should have received a copy of the GNU General Public License
21 ;; along with GNU Emacs; see the file COPYING. If not, write to the
22 ;; Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
23 ;; Boston, MA 02110-1301, USA.
31 (defvar url-http-extra-headers
))
38 (autoload 'url-retrieve-synchronously
"url")
39 (autoload 'url-retrieve
"url")
40 (autoload 'url-cache-create-filename
"url-cache")
41 (autoload 'url-mark-buffer-as-dead
"url")
43 (defconst url-http-default-port
80 "Default HTTP port.")
44 (defconst url-http-asynchronous-p t
"HTTP retrievals are asynchronous.")
45 (defalias 'url-http-expand-file-name
'url-default-expander
)
47 (defvar url-http-real-basic-auth-storage nil
)
48 (defvar url-http-proxy-basic-auth-storage nil
)
50 (defvar url-http-open-connections
(make-hash-table :test
'equal
52 "A hash table of all open network connections.")
54 (defvar url-http-version
"1.1"
55 "What version of HTTP we advertise, as a string.
56 Valid values are 1.1 and 1.0.
57 This is only useful when debugging the HTTP subsystem.
59 Setting this to 1.0 will tell servers not to send chunked encoding,
60 and other HTTP/1.1 specific features.
63 (defvar url-http-attempt-keepalives t
64 "Whether to use a single TCP connection multiple times in HTTP.
65 This is only useful when debugging the HTTP subsystem. Setting to
66 `nil' will explicitly close the connection to the server after every
71 ;; These are all macros so that they are hidden from external sight
72 ;; when the file is byte-compiled.
74 ;; This allows us to expose just the entry points we want.
76 ;; These routines will allow us to implement persistent HTTP
78 (defsubst url-http-debug
(&rest args
)
80 (let ((proc (get-buffer-process (current-buffer))))
81 ;; The user hit C-g, honor it! Some things can get in an
82 ;; incredibly tight loop (chunked encoding)
85 (set-process-sentinel proc nil
)
86 (set-process-filter proc nil
)))
87 (error "Transfer interrupted!")))
88 (apply 'url-debug
'http args
))
90 (defun url-http-mark-connection-as-busy (host port proc
)
91 (url-http-debug "Marking connection as busy: %s:%d %S" host port proc
)
92 (puthash (cons host port
)
93 (delq proc
(gethash (cons host port
) url-http-open-connections
))
94 url-http-open-connections
)
97 (defun url-http-mark-connection-as-free (host port proc
)
98 (url-http-debug "Marking connection as free: %s:%d %S" host port proc
)
99 (set-process-buffer proc nil
)
100 (set-process-sentinel proc
'url-http-idle-sentinel
)
101 (puthash (cons host port
)
102 (cons proc
(gethash (cons host port
) url-http-open-connections
))
103 url-http-open-connections
)
106 (defun url-http-find-free-connection (host port
)
107 (let ((conns (gethash (cons host port
) url-http-open-connections
))
109 (while (and conns
(not found
))
110 (if (not (memq (process-status (car conns
)) '(run open
)))
112 (url-http-debug "Cleaning up dead process: %s:%d %S"
113 host port
(car conns
))
114 (url-http-idle-sentinel (car conns
) nil
))
115 (setq found
(car conns
))
116 (url-http-debug "Found existing connection: %s:%d %S" host port found
))
119 (url-http-debug "Reusing existing connection: %s:%d" host port
)
120 (url-http-debug "Contacting host: %s:%d" host port
))
121 (url-lazy-message "Contacting host: %s:%d" host port
)
122 (url-http-mark-connection-as-busy host port
124 (url-open-stream host nil host
127 ;; Building an HTTP request
128 (defun url-http-user-agent-string ()
129 (if (or (eq url-privacy-level
'paranoid
)
130 (and (listp url-privacy-level
)
131 (memq 'agent url-privacy-level
)))
133 (format "User-Agent: %sURL/%s%s\r\n"
135 (concat url-package-name
"/" url-package-version
" ")
139 ((and url-os-type url-system-type
)
140 (concat " (" url-os-type
"; " url-system-type
")"))
141 ((or url-os-type url-system-type
)
142 (concat " (" (or url-system-type url-os-type
) ")"))
145 (defun url-http-create-request (url &optional ref-url
)
146 "Create an HTTP request for URL, referred to by REF-URL."
147 (declare (special proxy-object proxy-info
))
148 (let* ((extra-headers)
150 (no-cache (cdr-safe (assoc "Pragma" url-request-extra-headers
)))
151 (proxy-obj (and (boundp 'proxy-object
) proxy-object
))
152 (proxy-auth (if (or (cdr-safe (assoc "Proxy-Authorization"
153 url-request-extra-headers
))
156 (let ((url-basic-auth-storage
157 'url-http-proxy-basic-auth-storage
))
158 (url-get-authentication url nil
'any nil
))))
159 (real-fname (url-filename (or proxy-obj url
)))
160 (host (url-host (or proxy-obj url
)))
161 (auth (if (cdr-safe (assoc "Authorization" url-request-extra-headers
))
163 (url-get-authentication (or
164 (and (boundp 'proxy-info
)
166 url
) nil
'any nil
))))
167 (if (equal "" real-fname
)
168 (setq real-fname
"/"))
169 (setq no-cache
(and no-cache
(string-match "no-cache" no-cache
)))
171 (setq auth
(concat "Authorization: " auth
"\r\n")))
173 (setq proxy-auth
(concat "Proxy-Authorization: " proxy-auth
"\r\n")))
175 ;; Protection against stupid values in the referer
176 (if (and ref-url
(stringp ref-url
) (or (string= ref-url
"file:nil")
177 (string= ref-url
"")))
180 ;; We do not want to expose the referer if the user is paranoid.
181 (if (or (memq url-privacy-level
'(low high paranoid
))
182 (and (listp url-privacy-level
)
183 (memq 'lastloc url-privacy-level
)))
186 ;; url-request-extra-headers contains an assoc-list of
187 ;; header/value pairs that we need to put into the request.
188 (setq extra-headers
(mapconcat
190 (concat (car x
) ": " (cdr x
)))
191 url-request-extra-headers
"\r\n"))
192 (if (not (equal extra-headers
""))
193 (setq extra-headers
(concat extra-headers
"\r\n")))
195 ;; This was done with a call to `format'. Concatting parts has
196 ;; the advantage of keeping the parts of each header togther and
197 ;; allows us to elide null lines directly, at the cost of making
198 ;; the layout less clear.
202 (or url-request-method
"GET") " "
203 (if proxy-obj
(url-recreate-url proxy-obj
) real-fname
)
204 " HTTP/" url-http-version
"\r\n"
205 ;; Version of MIME we speak
206 "MIME-Version: 1.0\r\n"
207 ;; (maybe) Try to keep the connection open
208 "Connection: " (if (or proxy-obj
209 (not url-http-attempt-keepalives
))
210 "close" "keep-alive") "\r\n"
211 ;; HTTP extensions we support
212 (if url-extensions-header
214 "Extension: %s\r\n" url-extensions-header
))
215 ;; Who we want to talk to
216 (if (/= (url-port (or proxy-obj url
))
217 (url-scheme-get-property
218 (url-type (or proxy-obj url
)) 'default-port
))
220 "Host: %s:%d\r\n" host
(url-port (or proxy-obj url
)))
221 (format "Host: %s\r\n" host
))
223 (if url-personal-mail-address
225 "From: " url-personal-mail-address
"\r\n"))
226 ;; Encodings we understand
227 (if url-mime-encoding-string
229 "Accept-encoding: " url-mime-encoding-string
"\r\n"))
230 (if url-mime-charset-string
232 "Accept-charset: " url-mime-charset-string
"\r\n"))
233 ;; Languages we understand
234 (if url-mime-language-string
236 "Accept-language: " url-mime-language-string
"\r\n"))
237 ;; Types we understand
238 "Accept: " (or url-mime-accept-string
"*/*") "\r\n"
240 (url-http-user-agent-string)
241 ;; Proxy Authorization
246 (url-cookie-generate-header-lines host real-fname
247 (equal "https" (url-type url
)))
249 (if (and (not no-cache
)
250 (member url-request-method
'("GET" nil
)))
251 (let ((tm (url-is-cached (or proxy-obj url
))))
253 (concat "If-modified-since: "
254 (url-get-normalized-date tm
) "\r\n"))))
257 "Referer: " ref-url
"\r\n"))
262 "Content-length: " (number-to-string
263 (length url-request-data
))
269 (url-http-debug "Request is: \n%s" request
)
273 (defun url-http-clean-headers ()
274 "Remove trailing \r from header lines.
275 This allows us to use `mail-fetch-field', etc."
276 (declare (special url-http-end-of-headers
))
277 (goto-char (point-min))
278 (while (re-search-forward "\r$" url-http-end-of-headers t
)
281 (defun url-http-handle-authentication (proxy)
282 (declare (special status success url-http-method url-http-data
283 url-callback-function url-callback-arguments
))
284 (url-http-debug "Handling %s authentication" (if proxy
"proxy" "normal"))
285 (let ((auth (or (mail-fetch-field (if proxy
"proxy-authenticate" "www-authenticate"))
288 (url (url-recreate-url url-current-object
))
289 (url-basic-auth-storage 'url-http-real-basic-auth-storage
)
292 ;; Cheating, but who cares? :)
294 (setq url-basic-auth-storage
'url-http-proxy-basic-auth-storage
))
296 (setq auth
(url-eat-trailing-space (url-strip-leading-spaces auth
)))
297 (if (string-match "[ \t]" auth
)
298 (setq type
(downcase (substring auth
0 (match-beginning 0))))
299 (setq type
(downcase auth
)))
301 (if (not (url-auth-registered type
))
304 (goto-char (point-max))
305 (insert "<hr>Sorry, but I do not know how to handle " type
306 " authentication. If you'd like to write it,"
307 " send it to " url-bug-address
".<hr>")
310 (ctr (1- (length args
)))
313 (if (char-equal ?
, (aref args ctr
))
316 (setq args
(url-parse-args args
)
317 auth
(url-get-authentication url
(cdr-safe (assoc "realm" args
))
321 (push (cons (if proxy
"Proxy-Authorization" "Authorization") auth
)
322 url-http-extra-headers
)
323 (let ((url-request-method url-http-method
)
324 (url-request-data url-http-data
)
325 (url-request-extra-headers url-http-extra-headers
))
326 (url-retrieve url url-callback-function
327 url-callback-arguments
)))))))
329 (defun url-http-parse-response ()
330 "Parse just the response code."
331 (declare (special url-http-end-of-headers url-http-response-status
))
332 (if (not url-http-end-of-headers
)
333 (error "Trying to parse HTTP response code in odd buffer: %s" (buffer-name)))
334 (url-http-debug "url-http-parse-response called in (%s)" (buffer-name))
335 (goto-char (point-min))
336 (skip-chars-forward " \t\n") ; Skip any blank crap
337 (skip-chars-forward "HTTP/") ; Skip HTTP Version
338 (read (current-buffer))
339 (setq url-http-response-status
(read (current-buffer))))
341 (defun url-http-handle-cookies ()
342 "Handle all set-cookie / set-cookie2 headers in an HTTP response.
343 The buffer must already be narrowed to the headers, so mail-fetch-field will
345 (let ((cookies (mail-fetch-field "Set-Cookie" nil nil t
))
346 (cookies2 (mail-fetch-field "Set-Cookie2" nil nil t
))
347 (url-current-object url-http-cookies-sources
))
348 (and cookies
(url-http-debug "Found %d Set-Cookie headers" (length cookies
)))
349 (and cookies2
(url-http-debug "Found %d Set-Cookie2 headers" (length cookies2
)))
351 (url-cookie-handle-set-cookie (pop cookies
)))
353 ;;; (url-cookie-handle-set-cookie2 (pop cookies)))
357 (defun url-http-parse-headers ()
358 "Parse and handle HTTP specific headers.
359 Return t if and only if the current buffer is still active and
360 should be shown to the user."
361 ;; The comments after each status code handled are taken from RFC
363 (declare (special url-http-end-of-headers url-http-response-status
364 url-http-method url-http-data url-http-process
365 url-callback-function url-callback-arguments
))
367 (url-http-mark-connection-as-free (url-host url-current-object
)
368 (url-port url-current-object
)
371 (if (or (not (boundp 'url-http-end-of-headers
))
372 (not url-http-end-of-headers
))
373 (error "Trying to parse headers in odd buffer: %s" (buffer-name)))
374 (goto-char (point-min))
375 (url-http-debug "url-http-parse-headers called in (%s)" (buffer-name))
376 (url-http-parse-response)
377 (mail-narrow-to-head)
378 ;;(narrow-to-region (point-min) url-http-end-of-headers)
381 (setq class
(/ url-http-response-status
100))
382 (url-http-debug "Parsed HTTP headers: class=%d status=%d" class url-http-response-status
)
383 (url-http-handle-cookies)
386 ;; Classes of response codes
388 ;; 5xx = Server Error
389 ;; 4xx = Client Error
392 ;; 1xx = Informational
393 (1 ; Information messages
394 ;; 100 = Continue with request
395 ;; 101 = Switching protocols
396 ;; 102 = Processing (Added by DAV)
397 (url-mark-buffer-as-dead (current-buffer))
398 (error "HTTP responses in class 1xx not supported (%d)" url-http-response-status
))
403 ;; 203 Non-authoritative information
406 ;; 206 Partial content
407 ;; 207 Multi-status (Added by DAV)
408 (case url-http-response-status
410 ;; No new data, just stay at the same document
411 (url-mark-buffer-as-dead (current-buffer))
414 ;; Generic success for all others. Store in the cache, and
415 ;; mark it as successful.
417 (if (and url-automatic-caching
(equal url-http-method
"GET"))
418 (url-store-in-cache (current-buffer)))
421 ;; 300 Multiple choices
422 ;; 301 Moved permanently
427 ;; 307 Temporary redirect
428 (let ((redirect-uri (or (mail-fetch-field "Location")
429 (mail-fetch-field "URI"))))
430 (case url-http-response-status
432 ;; Quoth the spec (section 10.3.1)
433 ;; -------------------------------
434 ;; The requested resource corresponds to any one of a set of
435 ;; representations, each with its own specific location and
436 ;; agent-driven negotiation information is being provided so
437 ;; that the user can select a preferred representation and
438 ;; redirect its request to that location.
440 ;; If the server has a preferred choice of representation, it
441 ;; SHOULD include the specific URI for that representation in
442 ;; the Location field; user agents MAY use the Location field
443 ;; value for automatic redirection.
444 ;; -------------------------------
445 ;; We do not support agent-driven negotiation, so we just
446 ;; redirect to the preferred URI if one is provided.
449 ;; If the 301|302 status code is received in response to a
450 ;; request other than GET or HEAD, the user agent MUST NOT
451 ;; automatically redirect the request unless it can be
452 ;; confirmed by the user, since this might change the
453 ;; conditions under which the request was issued.
454 (if (member url-http-method
'("HEAD" "GET"))
455 ;; Automatic redirection is ok
457 ;; It is just too big of a pain in the ass to get this
458 ;; prompt all the time. We will just silently lose our
459 ;; data and convert to a GET method.
460 (url-http-debug "Converting `%s' request to `GET' because of REDIRECT(%d)"
461 url-http-method url-http-response-status
)
462 (setq url-http-method
"GET"
465 ;; The response to the request can be found under a different
466 ;; URI and SHOULD be retrieved using a GET method on that
468 (setq url-http-method
"GET"
471 ;; The 304 response MUST NOT contain a message-body.
472 (url-http-debug "Extracting document from cache... (%s)"
473 (url-cache-create-filename (url-view-url t
)))
474 (url-cache-extract (url-cache-create-filename (url-view-url t
)))
475 (setq redirect-uri nil
478 ;; The requested resource MUST be accessed through the
479 ;; proxy given by the Location field. The Location field
480 ;; gives the URI of the proxy. The recipient is expected
481 ;; to repeat this single request via the proxy. 305
482 ;; responses MUST only be generated by origin servers.
483 (error "Redirection thru a proxy server not supported: %s"
486 ;; Treat everything like '300'
489 ;; Clean off any whitespace and/or <...> cruft.
490 (if (string-match "\\([^ \t]+\\)[ \t]" redirect-uri
)
491 (setq redirect-uri
(match-string 1 redirect-uri
)))
492 (if (string-match "^<\\(.*\\)>$" redirect-uri
)
493 (setq redirect-uri
(match-string 1 redirect-uri
)))
495 ;; Some stupid sites (like sourceforge) send a
496 ;; non-fully-qualified URL (ie: /), which royally confuses
498 (if (not (string-match url-nonrelative-link redirect-uri
))
499 (setq redirect-uri
(url-expand-file-name redirect-uri
)))
500 (let ((url-request-method url-http-method
)
501 (url-request-data url-http-data
)
502 (url-request-extra-headers url-http-extra-headers
))
503 (url-retrieve redirect-uri url-callback-function
505 (cdr url-callback-arguments
)))
506 (url-mark-buffer-as-dead (current-buffer))))))
510 ;; 402 Payment required
513 ;; 405 Method not allowed
514 ;; 406 Not acceptable
515 ;; 407 Proxy authentication required
516 ;; 408 Request time-out
519 ;; 411 Length required
520 ;; 412 Precondition failed
521 ;; 413 Request entity too large
522 ;; 414 Request-URI too large
523 ;; 415 Unsupported media type
524 ;; 416 Requested range not satisfiable
525 ;; 417 Expectation failed
526 ;; 422 Unprocessable Entity (Added by DAV)
528 ;; 424 Failed Dependency
529 (case url-http-response-status
531 ;; The request requires user authentication. The response
532 ;; MUST include a WWW-Authenticate header field containing a
533 ;; challenge applicable to the requested resource. The
534 ;; client MAY repeat the request with a suitable
535 ;; Authorization header field.
536 (url-http-handle-authentication nil
))
538 ;; This code is reserved for future use
539 (url-mark-buffer-as-dead (current-buffer))
540 (error "Somebody wants you to give them money"))
542 ;; The server understood the request, but is refusing to
543 ;; fulfill it. Authorization will not help and the request
544 ;; SHOULD NOT be repeated.
550 ;; The method specified in the Request-Line is not allowed
551 ;; for the resource identified by the Request-URI. The
552 ;; response MUST include an Allow header containing a list of
553 ;; valid methods for the requested resource.
556 ;; The resource identified by the request is only capable of
557 ;; generating response entities which have content
558 ;; characteristics nota cceptable according to the accept
559 ;; headers sent in the request.
562 ;; This code is similar to 401 (Unauthorized), but indicates
563 ;; that the client must first authenticate itself with the
564 ;; proxy. The proxy MUST return a Proxy-Authenticate header
565 ;; field containing a challenge applicable to the proxy for
566 ;; the requested resource.
567 (url-http-handle-authentication t
))
569 ;; The client did not produce a request within the time that
570 ;; the server was prepared to wait. The client MAY repeat
571 ;; the request without modifications at any later time.
574 ;; The request could not be completed due to a conflict with
575 ;; the current state of the resource. This code is only
576 ;; allowed in situations where it is expected that the user
577 ;; mioght be able to resolve the conflict and resubmit the
578 ;; request. The response body SHOULD include enough
579 ;; information for the user to recognize the source of the
583 ;; The requested resource is no longer available at the
584 ;; server and no forwarding address is known.
587 ;; The server refuses to accept the request without a defined
588 ;; Content-Length. The client MAY repeat the request if it
589 ;; adds a valid Content-Length header field containing the
590 ;; length of the message-body in the request message.
592 ;; NOTE - this will never happen because
593 ;; `url-http-create-request' automatically calculates the
597 ;; The precondition given in one or more of the
598 ;; request-header fields evaluated to false when it was
599 ;; tested on the server.
602 ;; The server is refusing to process a request because the
603 ;; request entity|URI is larger than the server is willing or
607 ;; The server is refusing to service the request because the
608 ;; entity of the request is in a format not supported by the
609 ;; requested resource for the requested method.
612 ;; A server SHOULD return a response with this status code if
613 ;; a request included a Range request-header field, and none
614 ;; of the range-specifier values in this field overlap the
615 ;; current extent of the selected resource, and the request
616 ;; did not include an If-Range request-header field.
619 ;; The expectation given in an Expect request-header field
620 ;; could not be met by this server, or, if the server is a
621 ;; proxy, the server has unambiguous evidence that the
622 ;; request could not be met by the next-hop server.
625 ;; The request could not be understood by the server due to
626 ;; malformed syntax. The client SHOULD NOT repeat the
627 ;; request without modifications.
630 ;; 500 Internal server error
631 ;; 501 Not implemented
633 ;; 503 Service unavailable
634 ;; 504 Gateway time-out
635 ;; 505 HTTP version not supported
636 ;; 507 Insufficient storage
638 (case url-http-response-status
640 ;; The server does not support the functionality required to
641 ;; fulfill the request.
644 ;; The server, while acting as a gateway or proxy, received
645 ;; an invalid response from the upstream server it accessed
646 ;; in attempting to fulfill the request.
649 ;; The server is currently unable to handle the request due
650 ;; to a temporary overloading or maintenance of the server.
651 ;; The implication is that this is a temporary condition
652 ;; which will be alleviated after some delay. If known, the
653 ;; length of the delay MAY be indicated in a Retry-After
654 ;; header. If no Retry-After is given, the client SHOULD
655 ;; handle the response as it would for a 500 response.
658 ;; The server, while acting as a gateway or proxy, did not
659 ;; receive a timely response from the upstream server
660 ;; specified by the URI (e.g. HTTP, FTP, LDAP) or some other
661 ;; auxiliary server (e.g. DNS) it needed to access in
662 ;; attempting to complete the request.
665 ;; The server does not support, or refuses to support, the
666 ;; HTTP protocol version that was used in the request
670 ;; The method could not be performed on the resource
671 ;; because the server is unable to store the representation
672 ;; needed to successfully complete the request. This
673 ;; condition is considered to be temporary. If the request
674 ;; which received this status code was the result of a user
675 ;; action, the request MUST NOT be repeated until it is
676 ;; requested by a separate user action.
679 (error "Unknown class of HTTP response code: %d (%d)"
680 class url-http-response-status
)))
682 (url-mark-buffer-as-dead (current-buffer)))
683 (url-http-debug "Finished parsing HTTP headers: %S" success
)
688 (defun url-http-activate-callback ()
689 "Activate callback specified when this buffer was created."
690 (declare (special url-http-process
691 url-callback-function
692 url-callback-arguments
))
693 (url-http-mark-connection-as-free (url-host url-current-object
)
694 (url-port url-current-object
)
696 (url-http-debug "Activating callback in buffer (%s)" (buffer-name))
697 (apply url-callback-function url-callback-arguments
))
701 ;; These unfortunately cannot be macros... please ignore them!
702 (defun url-http-idle-sentinel (proc why
)
703 "Remove this (now defunct) process PROC from the list of open connections."
704 (maphash (lambda (key val
)
706 (puthash key
(delq proc val
) url-http-open-connections
)))
707 url-http-open-connections
))
709 (defun url-http-end-of-document-sentinel (proc why
)
710 ;; Sentinel used for old HTTP/0.9 or connections we know are going
711 ;; to die as the 'end of document' notifier.
712 (url-http-debug "url-http-end-of-document-sentinel in buffer (%s)"
713 (process-buffer proc
))
714 (url-http-idle-sentinel proc why
)
716 (set-buffer (process-buffer proc
))
717 (goto-char (point-min))
718 (if (not (looking-at "HTTP/"))
719 ;; HTTP/0.9 just gets passed back no matter what
720 (url-http-activate-callback)
721 (if (url-http-parse-headers)
722 (url-http-activate-callback)))))
724 (defun url-http-simple-after-change-function (st nd length
)
725 ;; Function used when we do NOT know how long the document is going to be
726 ;; Just _very_ simple 'downloaded %d' type of info.
727 (declare (special url-http-end-of-headers
))
728 (url-lazy-message "Reading %s..." (url-pretty-length nd
)))
730 (defun url-http-content-length-after-change-function (st nd length
)
731 "Function used when we DO know how long the document is going to be.
732 More sophisticated percentage downloaded, etc.
733 Also does minimal parsing of HTTP headers and will actually cause
734 the callback to be triggered."
735 (declare (special url-current-object
736 url-http-end-of-headers
737 url-http-content-length
738 url-http-content-type
740 (if url-http-content-type
741 (url-display-percentage
742 "Reading [%s]... %s of %s (%d%%)"
743 (url-percentage (- nd url-http-end-of-headers
)
744 url-http-content-length
)
745 url-http-content-type
746 (url-pretty-length (- nd url-http-end-of-headers
))
747 (url-pretty-length url-http-content-length
)
748 (url-percentage (- nd url-http-end-of-headers
)
749 url-http-content-length
))
750 (url-display-percentage
751 "Reading... %s of %s (%d%%)"
752 (url-percentage (- nd url-http-end-of-headers
)
753 url-http-content-length
)
754 (url-pretty-length (- nd url-http-end-of-headers
))
755 (url-pretty-length url-http-content-length
)
756 (url-percentage (- nd url-http-end-of-headers
)
757 url-http-content-length
)))
759 (if (> (- nd url-http-end-of-headers
) url-http-content-length
)
761 ;; Found the end of the document! Wheee!
762 (url-display-percentage nil nil
)
763 (message "Reading... done.")
764 (if (url-http-parse-headers)
765 (url-http-activate-callback)))))
767 (defun url-http-chunked-encoding-after-change-function (st nd length
)
768 "Function used when dealing with 'chunked' encoding.
769 Cannot give a sophisticated percentage, but we need a different
770 function to look for the special 0-length chunk that signifies
771 the end of the document."
772 (declare (special url-current-object
773 url-http-end-of-headers
774 url-http-content-type
775 url-http-chunked-length
776 url-http-chunked-counter
777 url-http-process url-http-chunked-start
))
780 (let ((read-next-chunk t
)
783 (no-initial-crlf nil
))
784 ;; We need to loop thru looking for more chunks even within
785 ;; one after-change-function call.
786 (while read-next-chunk
787 (setq no-initial-crlf
(= 0 url-http-chunked-counter
))
788 (if url-http-content-type
789 (url-display-percentage nil
790 "Reading [%s]... chunk #%d"
791 url-http-content-type url-http-chunked-counter
)
792 (url-display-percentage nil
793 "Reading... chunk #%d"
794 url-http-chunked-counter
))
795 (url-http-debug "Reading chunk %d (%d %d %d)"
796 url-http-chunked-counter st nd length
)
797 (setq regexp
(if no-initial-crlf
798 "\\([0-9a-z]+\\).*\r?\n"
799 "\r?\n\\([0-9a-z]+\\).*\r?\n"))
801 (if url-http-chunked-start
802 ;; We know how long the chunk is supposed to be, skip over
803 ;; leading crap if possible.
804 (if (> nd
(+ url-http-chunked-start url-http-chunked-length
))
806 (url-http-debug "Got to the end of chunk #%d!"
807 url-http-chunked-counter
)
808 (goto-char (+ url-http-chunked-start
809 url-http-chunked-length
)))
810 (url-http-debug "Still need %d bytes to hit end of chunk"
811 (- (+ url-http-chunked-start
812 url-http-chunked-length
)
814 (setq read-next-chunk nil
)))
815 (if (not read-next-chunk
)
816 (url-http-debug "Still spinning for next chunk...")
817 (if no-initial-crlf
(skip-chars-forward "\r\n"))
818 (if (not (looking-at regexp
))
820 ;; Must not have received the entirety of the chunk header,
821 ;; need to spin some more.
822 (url-http-debug "Did not see start of chunk @ %d!" (point))
823 (setq read-next-chunk nil
))
824 (add-text-properties (match-beginning 0) (match-end 0)
828 'face
(if (featurep 'xemacs
)
832 (setq url-http-chunked-length
(string-to-number (buffer-substring
836 url-http-chunked-counter
(1+ url-http-chunked-counter
)
837 url-http-chunked-start
(set-marker
838 (or url-http-chunked-start
841 ; (if (not url-http-debug)
842 (delete-region (match-beginning 0) (match-end 0));)
843 (url-http-debug "Saw start of chunk %d (length=%d, start=%d"
844 url-http-chunked-counter url-http-chunked-length
845 (marker-position url-http-chunked-start
))
846 (if (= 0 url-http-chunked-length
)
848 ;; Found the end of the document! Wheee!
849 (url-http-debug "Saw end of stream chunk!")
850 (setq read-next-chunk nil
)
851 (url-display-percentage nil nil
)
852 (goto-char (match-end 1))
853 (if (re-search-forward "^\r*$" nil t
)
854 (url-http-debug "Saw end of trailers..."))
855 (if (url-http-parse-headers)
856 (url-http-activate-callback))))))))))
858 (defun url-http-wait-for-headers-change-function (st nd length
)
859 ;; This will wait for the headers to arrive and then splice in the
860 ;; next appropriate after-change-function, etc.
861 (declare (special url-current-object
862 url-http-end-of-headers
863 url-http-content-type
864 url-http-content-length
865 url-http-transfer-encoding
866 url-callback-function
867 url-callback-arguments
870 url-http-after-change-function
871 url-http-response-status
))
872 (url-http-debug "url-http-wait-for-headers-change-function (%s)"
875 (let ((end-of-headers nil
)
877 (content-length nil
))
878 (goto-char (point-min))
879 (if (not (looking-at "^HTTP/[1-9]\\.[0-9]"))
880 ;; Not HTTP/x.y data, must be 0.9
881 ;; God, I wish this could die.
882 (setq end-of-headers t
883 url-http-end-of-headers
0
885 (if (re-search-forward "^\r*$" nil t
)
886 ;; Saw the end of the headers
888 (url-http-debug "Saw end of headers... (%s)" (buffer-name))
889 (setq url-http-end-of-headers
(set-marker (make-marker)
892 (url-http-clean-headers))))
894 (if (not end-of-headers
)
895 ;; Haven't seen the end of the headers yet, need to wait
896 ;; for more data to arrive.
899 (message "HTTP/0.9 How I hate thee!")
901 (url-http-parse-response)
902 (mail-narrow-to-head)
903 ;;(narrow-to-region (point-min) url-http-end-of-headers)
904 (setq url-http-transfer-encoding
(mail-fetch-field
906 url-http-content-type
(mail-fetch-field "content-type"))
907 (if (mail-fetch-field "content-length")
908 (setq url-http-content-length
909 (string-to-number (mail-fetch-field "content-length"))))
911 (if url-http-transfer-encoding
912 (setq url-http-transfer-encoding
913 (downcase url-http-transfer-encoding
)))
916 ((or (= url-http-response-status
204)
917 (= url-http-response-status
205))
918 (url-http-debug "%d response must have headers only (%s)."
919 url-http-response-status
(buffer-name))
920 (if (url-http-parse-headers)
921 (url-http-activate-callback)))
922 ((string= "HEAD" url-http-method
)
923 ;; A HEAD request is _ALWAYS_ terminated by the header
924 ;; information, regardless of any entity headers,
925 ;; according to section 4.4 of the HTTP/1.1 draft.
926 (url-http-debug "HEAD request must have headers only (%s)."
928 (if (url-http-parse-headers)
929 (url-http-activate-callback)))
930 ((string= "CONNECT" url-http-method
)
931 ;; A CONNECT request is finished, but we cannot stick this
932 ;; back on the free connectin list
933 (url-http-debug "CONNECT request must have headers only.")
934 (if (url-http-parse-headers)
935 (url-http-activate-callback)))
936 ((equal url-http-response-status
304)
937 ;; Only allowed to have a header section. We have to handle
938 ;; this here instead of in url-http-parse-headers because if
939 ;; you have a cached copy of something without a known
940 ;; content-length, and try to retrieve it from the cache, we'd
941 ;; fall into the 'being dumb' section and wait for the
942 ;; connection to terminate, which means we'd wait for 10
943 ;; seconds for the keep-alives to time out on some servers.
944 (if (url-http-parse-headers)
945 (url-http-activate-callback)))
947 ;; HTTP/0.9 always signaled end-of-connection by closing the
950 "Saw HTTP/0.9 response, connection closed means end of document.")
951 (setq url-http-after-change-function
952 'url-http-simple-after-change-function
))
953 ((equal url-http-transfer-encoding
"chunked")
954 (url-http-debug "Saw chunked encoding.")
955 (setq url-http-after-change-function
956 'url-http-chunked-encoding-after-change-function
)
957 (if (> nd url-http-end-of-headers
)
960 "Calling initial chunked-encoding for extra data at end of headers")
961 (url-http-chunked-encoding-after-change-function
962 (marker-position url-http-end-of-headers
) nd
963 (- nd url-http-end-of-headers
)))))
964 ((integerp url-http-content-length
)
966 "Got a content-length, being smart about document end.")
967 (setq url-http-after-change-function
968 'url-http-content-length-after-change-function
)
970 ((= 0 url-http-content-length
)
971 ;; We got a NULL body! Activate the callback
974 "Got 0-length content-length, activating callback immediately.")
975 (if (url-http-parse-headers)
976 (url-http-activate-callback)))
977 ((> nd url-http-end-of-headers
)
978 ;; Have some leftover data
979 (url-http-debug "Calling initial content-length for extra data at end of headers")
980 (url-http-content-length-after-change-function
981 (marker-position url-http-end-of-headers
)
983 (- nd url-http-end-of-headers
)))
987 (url-http-debug "No content-length, being dumb.")
988 (setq url-http-after-change-function
989 'url-http-simple-after-change-function
)))))
990 ;; We are still at the beginning of the buffer... must just be
991 ;; waiting for a response.
992 (url-http-debug "Spinning waiting for headers..."))
993 (goto-char (point-max)))
996 (defun url-http (url callback cbargs
)
997 "Retrieve URL via HTTP asynchronously.
998 URL must be a parsed URL. See `url-generic-parse-url' for details.
999 When retrieval is completed, the function CALLBACK is executed with
1000 CBARGS as the arguments."
1001 (check-type url vector
"Need a pre-parsed URL.")
1002 (declare (special url-current-object
1003 url-http-end-of-headers
1004 url-http-content-type
1005 url-http-content-length
1006 url-http-transfer-encoding
1007 url-http-after-change-function
1008 url-callback-function
1009 url-callback-arguments
1011 url-http-extra-headers
1013 url-http-chunked-length
1014 url-http-chunked-start
1015 url-http-chunked-counter
1017 (let ((connection (url-http-find-free-connection (url-host url
)
1019 (buffer (generate-new-buffer (format " *http %s:%d*"
1022 (if (not connection
)
1023 ;; Failed to open the connection for some reason
1025 (kill-buffer buffer
)
1027 (error "Could not create connection to %s:%d" (url-host url
)
1031 (mm-disable-multibyte)
1032 (setq url-current-object url
1033 mode-line-format
"%b [%s]")
1035 (dolist (var '(url-http-end-of-headers
1036 url-http-content-type
1037 url-http-content-length
1038 url-http-transfer-encoding
1039 url-http-after-change-function
1040 url-http-response-status
1041 url-http-chunked-length
1042 url-http-chunked-counter
1043 url-http-chunked-start
1044 url-callback-function
1045 url-callback-arguments
1048 url-http-extra-headers
1050 url-http-cookies-sources
))
1051 (set (make-local-variable var
) nil
))
1053 (setq url-http-method
(or url-request-method
"GET")
1054 url-http-extra-headers url-request-extra-headers
1055 url-http-data url-request-data
1056 url-http-process connection
1057 url-http-chunked-length nil
1058 url-http-chunked-start nil
1059 url-http-chunked-counter
0
1060 url-callback-function callback
1061 url-callback-arguments cbargs
1062 url-http-after-change-function
'url-http-wait-for-headers-change-function
1063 url-http-cookies-sources
(if (boundp 'proxy-object
)
1065 url-current-object
))
1067 (set-process-buffer connection buffer
)
1068 (set-process-sentinel connection
'url-http-end-of-document-sentinel
)
1069 (set-process-filter connection
'url-http-generic-filter
)
1070 (process-send-string connection
(url-http-create-request url
))))
1073 ;; Since Emacs 19/20 does not allow you to change the
1074 ;; `after-change-functions' hook in the midst of running them, we fake
1075 ;; an after change by hooking into the process filter and inserting
1076 ;; the data ourselves. This is slightly less efficient, but there
1077 ;; were tons of weird ways the after-change code was biting us in the
1079 (defun url-http-generic-filter (proc data
)
1080 ;; Sometimes we get a zero-length data chunk after the process has
1081 ;; been changed to 'free', which means it has no buffer associated
1082 ;; with it. Do nothing if there is no buffer, or 0 length data.
1083 (declare (special url-http-after-change-function
))
1084 (and (process-buffer proc
)
1085 (/= (length data
) 0)
1087 (set-buffer (process-buffer proc
))
1088 (url-http-debug "Calling after change function `%s' for `%S'" url-http-after-change-function proc
)
1089 (funcall url-http-after-change-function
1092 (goto-char (point-max))
1097 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
1098 ;;; file-name-handler stuff from here on out
1099 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
1100 (if (not (fboundp 'symbol-value-in-buffer
))
1101 (defun url-http-symbol-value-in-buffer (symbol buffer
1102 &optional unbound-value
)
1103 "Return the value of SYMBOL in BUFFER, or UNBOUND-VALUE if it is unbound."
1106 (if (not (boundp symbol
))
1108 (symbol-value symbol
))))
1109 (defalias 'url-http-symbol-value-in-buffer
'symbol-value-in-buffer
))
1111 (defun url-http-head (url)
1112 (let ((url-request-method "HEAD")
1113 (url-request-data nil
))
1114 (url-retrieve-synchronously url
)))
1117 (defun url-http-file-exists-p (url)
1120 (buffer (url-http-head url
)))
1123 (setq status
(url-http-symbol-value-in-buffer 'url-http-response-status
1125 exists
(and (>= status
200) (< status
300)))
1126 (kill-buffer buffer
))
1130 (defalias 'url-http-file-readable-p
'url-http-file-exists-p
)
1132 (defun url-http-head-file-attributes (url &optional id-format
)
1133 (let ((buffer (url-http-head url
))
1136 (setq attributes
(make-list 11 nil
))
1137 (setf (nth 1 attributes
) 1) ; Number of links to file
1138 (setf (nth 2 attributes
) 0) ; file uid
1139 (setf (nth 3 attributes
) 0) ; file gid
1140 (setf (nth 7 attributes
) ; file size
1141 (url-http-symbol-value-in-buffer 'url-http-content-length
1143 (setf (nth 8 attributes
) (eval-when-compile (make-string 10 ?-
)))
1144 (kill-buffer buffer
))
1148 (defun url-http-file-attributes (url &optional id-format
)
1149 (if (url-dav-supported-p url
)
1150 (url-dav-file-attributes url id-format
)
1151 (url-http-head-file-attributes url id-format
)))
1154 (defun url-http-options (url)
1155 "Returns a property list describing options available for URL.
1156 This list is retrieved using the `OPTIONS' HTTP method.
1158 Property list members:
1161 A list of symbols specifying what HTTP methods the resource
1165 A list of numbers specifying what DAV protocol/schema versions are
1169 A list of supported DASL search types supported (string form)
1172 A list of the units available for use in partial document fetches.
1175 The `Platform For Privacy Protection' description for the resource.
1176 Currently this is just the raw header contents. This is likely to
1177 change once P3P is formally supported by the URL package or
1180 (let* ((url-request-method "OPTIONS")
1181 (url-request-data nil
)
1182 (buffer (url-retrieve-synchronously url
))
1185 (when (and buffer
(= 2 (/ (url-http-symbol-value-in-buffer
1186 'url-http-response-status buffer
0) 100)))
1187 ;; Only parse the options if we got a 2xx response code!
1192 (mail-narrow-to-head)
1194 ;; Figure out what methods are supported.
1195 (when (setq header
(mail-fetch-field "allow"))
1196 (setq options
(plist-put
1198 (mapcar 'intern
(split-string header
"[ ,]+")))))
1201 (when (setq header
(mail-fetch-field "dav"))
1202 (setq options
(plist-put
1205 (mapcar 'string-to-number
1206 (split-string header
"[, ]+"))))))
1209 (when (setq header
(mail-fetch-field "dasl"))
1210 (setq options
(plist-put
1212 (split-string header
"[, ]+"))))
1214 ;; P3P - should get more detailed here. FIXME
1215 (when (setq header
(mail-fetch-field "p3p"))
1216 (setq options
(plist-put options
'p3p header
)))
1218 ;; Check for whether they accept byte-range requests.
1219 (when (setq header
(mail-fetch-field "accept-ranges"))
1220 (setq options
(plist-put
1224 (split-string header
"[, ]+"))))))
1226 (if buffer
(kill-buffer buffer
))
1231 ;; arch-tag: ba7c59ae-c0f4-4a31-9617-d85f221732ee
1232 ;;; url-http.el ends here