comparison lisp/w3/url.el @ 14:9ee227acff29 r19-15b90

Import from CVS: tag r19-15b90
author cvs
date Mon, 13 Aug 2007 08:48:42 +0200
parents
children 0293115a14e9 6a378aca36af
comparison
equal deleted inserted replaced
13:13c6d0aaafe5 14:9ee227acff29
1 ;;; url.el --- Uniform Resource Locator retrieval tool
2 ;; Author: wmperry
3 ;; Created: 1996/12/19 21:53:03
4 ;; Version: 1.40
5 ;; Keywords: comm, data, processes, hypermedia
6
7 ;;; LCD Archive Entry:
8 ;;; url|William M. Perry|wmperry@cs.indiana.edu|
9 ;;; Major mode for manipulating URLs|
10 ;;; 1996/12/19 21:53:03|1.40|Location Undetermined
11 ;;;
12
13 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
14 ;;; Copyright (c) 1993-1996 by William M. Perry (wmperry@cs.indiana.edu)
15 ;;; Copyright (c) 1996 Free Software Foundation, Inc.
16 ;;;
17 ;;; This file is not part of GNU Emacs, but the same permissions apply.
18 ;;;
19 ;;; GNU Emacs is free software; you can redistribute it and/or modify
20 ;;; it under the terms of the GNU General Public License as published by
21 ;;; the Free Software Foundation; either version 2, or (at your option)
22 ;;; any later version.
23 ;;;
24 ;;; GNU Emacs is distributed in the hope that it will be useful,
25 ;;; but WITHOUT ANY WARRANTY; without even the implied warranty of
26 ;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
27 ;;; GNU General Public License for more details.
28 ;;;
29 ;;; You should have received a copy of the GNU General Public License
30 ;;; along with GNU Emacs; see the file COPYING. If not, write to the
31 ;;; Free Software Foundation, Inc., 59 Temple Place - Suite 330,
32 ;;; Boston, MA 02111-1307, USA.
33 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
34
35
36 (require 'cl)
37 (require 'url-vars)
38 (require 'url-parse)
39 (require 'urlauth)
40 (require 'url-cookie)
41 (require 'mm)
42 (require 'md5)
43 (require 'base64)
44 (require 'mule-sysdp)
45 (or (featurep 'efs)
46 (featurep 'efs-auto)
47 (condition-case ()
48 (require 'ange-ftp)
49 (error nil)))
50
51 (require 'w3-sysdp)
52
53 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
54 ;;; Functions that might not exist in old versions of emacs
55 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
56 (defun url-save-error (errobj)
57 (save-excursion
58 (set-buffer (get-buffer-create " *url-error*"))
59 (erase-buffer))
60 (display-error errobj (get-buffer-create " *url-error*")))
61
62 (cond
63 ((fboundp 'display-warning)
64 (fset 'url-warn 'display-warning))
65 ((fboundp 'w3-warn)
66 (fset 'url-warn 'w3-warn))
67 ((fboundp 'warn)
68 (defun url-warn (class message &optional level)
69 (warn "(%s/%s) %s" class (or level 'warning) message)))
70 (t
71 (defun url-warn (class message &optional level)
72 (save-excursion
73 (set-buffer (get-buffer-create "*W3-WARNINGS*"))
74 (goto-char (point-max))
75 (save-excursion
76 (insert (format "(%s/%s) %s\n" class (or level 'warning) message)))
77 (display-buffer (current-buffer))))))
78
79
80 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
81 ;;; Autoload all the URL loaders
82 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
83 (autoload 'url-file "url-file")
84 (autoload 'url-ftp "url-file")
85 (autoload 'url-gopher "url-gopher")
86 (autoload 'url-irc "url-irc")
87 (autoload 'url-http "url-http")
88 (autoload 'url-nfs "url-nfs")
89 (autoload 'url-mailserver "url-mail")
90 (autoload 'url-mailto "url-mail")
91 (autoload 'url-info "url-misc")
92 (autoload 'url-shttp "url-http")
93 (autoload 'url-https "url-http")
94 (autoload 'url-finger "url-misc")
95 (autoload 'url-rlogin "url-misc")
96 (autoload 'url-telnet "url-misc")
97 (autoload 'url-tn3270 "url-misc")
98 (autoload 'url-proxy "url-misc")
99 (autoload 'url-x-exec "url-misc")
100 (autoload 'url-news "url-news")
101 (autoload 'url-nntp "url-news")
102 (autoload 'url-decode-pgp/pem "url-pgp")
103 (autoload 'url-wais "url-wais")
104
105 (autoload 'url-save-newsrc "url-news")
106 (autoload 'url-news-generate-reply-form "url-news")
107 (autoload 'url-parse-newsrc "url-news")
108 (autoload 'url-mime-response-p "url-http")
109 (autoload 'url-parse-mime-headers "url-http")
110 (autoload 'url-handle-refresh-header "url-http")
111 (autoload 'url-create-mime-request "url-http")
112 (autoload 'url-create-message-id "url-http")
113 (autoload 'url-create-multipart-request "url-http")
114 (autoload 'url-parse-viewer-types "url-http")
115
116 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
117 ;;; File-name-handler-alist functions
118 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
119 (defun url-setup-file-name-handlers ()
120 ;; Setup file-name handlers.
121 '(cond
122 ((not (boundp 'file-name-handler-alist))
123 nil) ; Don't load if no alist
124 ((rassq 'url-file-handler file-name-handler-alist)
125 nil) ; Don't load twice
126 ((and (string-match "XEmacs\\|Lucid" emacs-version)
127 (< url-emacs-minor-version 11)) ; Don't load in lemacs 19.10
128 nil)
129 (t
130 (setq file-name-handler-alist
131 (let ((new-handler (cons
132 (concat "^/*"
133 (substring url-nonrelative-link1 nil))
134 'url-file-handler)))
135 (if file-name-handler-alist
136 (append (list new-handler) file-name-handler-alist)
137 (list new-handler)))))))
138
139 (defun url-file-handler (operation &rest args)
140 ;; Function called from the file-name-handler-alist routines. OPERATION
141 ;; is what needs to be done ('file-exists-p, etc). args are the arguments
142 ;; that would have been passed to OPERATION."
143 (let ((fn (get operation 'url-file-handlers))
144 (url (car args))
145 (myargs (cdr args)))
146 (if (= (string-to-char url) ?/)
147 (setq url (substring url 1 nil)))
148 (if fn (apply fn url myargs)
149 (let (file-name-handler-alist)
150 (apply operation url myargs)))))
151
152 (defun url-file-handler-identity (&rest args)
153 (car args))
154
155 (defun url-file-handler-null (&rest args)
156 nil)
157
158 (put 'file-directory-p 'url-file-handlers 'url-file-handler-null)
159 (put 'substitute-in-file-name 'url-file-handlers 'url-file-handler-identity)
160 (put 'file-writable-p 'url-file-handlers 'url-file-handler-null)
161 (put 'file-truename 'url-file-handlers 'url-file-handler-identity)
162 (put 'insert-file-contents 'url-file-handlers 'url-insert-file-contents)
163 (put 'expand-file-name 'url-file-handlers 'url-expand-file-name)
164 (put 'directory-files 'url-file-handlers 'url-directory-files)
165 (put 'file-directory-p 'url-file-handlers 'url-file-directory-p)
166 (put 'file-writable-p 'url-file-handlers 'url-file-writable-p)
167 (put 'file-readable-p 'url-file-handlers 'url-file-exists)
168 (put 'file-executable-p 'url-file-handlers 'null)
169 (put 'file-symlink-p 'url-file-handlers 'null)
170 (put 'file-exists-p 'url-file-handlers 'url-file-exists)
171 (put 'copy-file 'url-file-handlers 'url-copy-file)
172 (put 'file-attributes 'url-file-handlers 'url-file-attributes)
173 (put 'file-name-all-completions 'url-file-handlers
174 'url-file-name-all-completions)
175 (put 'file-name-completion 'url-file-handlers 'url-file-name-completion)
176 (put 'file-local-copy 'url-file-handlers 'url-file-local-copy)
177
178
179 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
180 ;;; Utility functions
181 ;;; -----------------
182 ;;; Various functions used around the url code.
183 ;;; Some of these qualify as hacks, but hey, this is elisp.
184 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
185
186 (if (fboundp 'mm-string-to-tokens)
187 (fset 'url-string-to-tokens 'mm-string-to-tokens)
188 (defun url-string-to-tokens (str &optional delim)
189 "Return a list of words from the string STR"
190 (setq delim (or delim ? ))
191 (let (results y)
192 (mapcar
193 (function
194 (lambda (x)
195 (cond
196 ((and (= x delim) y) (setq results (cons y results) y nil))
197 ((/= x delim) (setq y (concat y (char-to-string x))))
198 (t nil)))) str)
199 (nreverse (cons y results)))))
200
201 (defun url-days-between (date1 date2)
202 ;; Return the number of days between date1 and date2.
203 (- (url-day-number date1) (url-day-number date2)))
204
205 (defun url-day-number (date)
206 (let ((dat (mapcar (function (lambda (s) (and s (string-to-int s)) ))
207 (timezone-parse-date date))))
208 (timezone-absolute-from-gregorian
209 (nth 1 dat) (nth 2 dat) (car dat))))
210
211 (defun url-seconds-since-epoch (date)
212 ;; Returns a number that says how many seconds have
213 ;; lapsed between Jan 1 12:00:00 1970 and DATE."
214 (let* ((tdate (mapcar (function (lambda (ti) (and ti (string-to-int ti))))
215 (timezone-parse-date date)))
216 (ttime (mapcar (function (lambda (ti) (and ti (string-to-int ti))))
217 (timezone-parse-time
218 (aref (timezone-parse-date date) 3))))
219 (edate (mapcar (function (lambda (ti) (and ti (string-to-int ti))))
220 (timezone-parse-date "Jan 1 12:00:00 1970")))
221 (tday (- (timezone-absolute-from-gregorian
222 (nth 1 tdate) (nth 2 tdate) (nth 0 tdate))
223 (timezone-absolute-from-gregorian
224 (nth 1 edate) (nth 2 edate) (nth 0 edate)))))
225 (+ (nth 2 ttime)
226 (* (nth 1 ttime) 60)
227 (* (nth 0 ttime) 60 60)
228 (* tday 60 60 24))))
229
230 (defun url-match (s x)
231 ;; Return regexp match x in s.
232 (substring s (match-beginning x) (match-end x)))
233
234 (defun url-split (str del)
235 ;; Split the string STR, with DEL (a regular expression) as the delimiter.
236 ;; Returns an assoc list that you can use with completing-read."
237 (let (x y)
238 (while (string-match del str)
239 (setq y (substring str 0 (match-beginning 0))
240 str (substring str (match-end 0) nil))
241 (if (not (string-match "^[ \t]+$" y))
242 (setq x (cons (list y y) x))))
243 (if (not (equal str ""))
244 (setq x (cons (list str str) x)))
245 x))
246
247 (defun url-replace-regexp (regexp to-string)
248 (goto-char (point-min))
249 (while (re-search-forward regexp nil t)
250 (replace-match to-string t nil)))
251
252 (defun url-clear-tmp-buffer ()
253 (set-buffer (get-buffer-create url-working-buffer))
254 (if buffer-read-only (toggle-read-only))
255 (erase-buffer))
256
257 (defun url-maybe-relative (url)
258 (url-retrieve (url-expand-file-name url)))
259
260 (defun url-buffer-is-hypertext (&optional buff)
261 "Return t if a buffer contains HTML, as near as we can guess."
262 (setq buff (or buff (current-buffer)))
263 (save-excursion
264 (set-buffer buff)
265 (let ((case-fold-search t))
266 (goto-char (point-min))
267 (re-search-forward
268 "<\\(TITLE\\|HEAD\\|BASE\\|H[0-9]\\|ISINDEX\\|P\\)>" nil t))))
269
270 (defun url-percentage (x y)
271 (if (fboundp 'float)
272 (round (* 100 (/ x (float y))))
273 (/ (* x 100) y)))
274
275 (defun url-after-change-function (&rest args)
276 ;; The nitty gritty details of messaging the HTTP/1.0 status messages
277 ;; in the minibuffer."
278 (or url-current-content-length
279 (save-excursion
280 (goto-char (point-min))
281 (skip-chars-forward " \t\n")
282 (if (not (looking-at "HTTP/[0-9]\.[0-9]"))
283 (setq url-current-content-length 0)
284 (setq url-current-isindex
285 (and (re-search-forward "$\r*$" nil t) (point)))
286 (if (re-search-forward
287 "^content-type:[ \t]*\\([^\r\n]+\\)\r*$"
288 url-current-isindex t)
289 (setq url-current-mime-type (downcase
290 (url-eat-trailing-space
291 (buffer-substring
292 (match-beginning 1)
293 (match-end 1))))))
294 (goto-char (point-min))
295 (if (re-search-forward "^content-length:\\([^\r\n]+\\)\r*$"
296 url-current-isindex t)
297 (setq url-current-content-length
298 (string-to-int (buffer-substring (match-beginning 1)
299 (match-end 1))))
300 (setq url-current-content-length nil))))
301 )
302 (let ((current-length (max (point-max)
303 (if url-current-isindex
304 (- (point-max) url-current-isindex)
305 (point-max)))))
306 (cond
307 ((and url-current-content-length (> url-current-content-length 1)
308 url-current-mime-type)
309 (url-lazy-message "Reading [%s]... %d of %d bytes (%d%%)"
310 url-current-mime-type
311 current-length
312 url-current-content-length
313 (url-percentage current-length
314 url-current-content-length)))
315 ((and url-current-content-length (> url-current-content-length 1))
316 (url-lazy-message "Reading... %d of %d bytes (%d%%)"
317 current-length url-current-content-length
318 (url-percentage current-length
319 url-current-content-length)))
320 ((and (/= 1 current-length) url-current-mime-type)
321 (url-lazy-message "Reading [%s]... %d bytes"
322 url-current-mime-type current-length))
323 ((/= 1 current-length)
324 (url-lazy-message "Reading... %d bytes." current-length))
325 (t (url-lazy-message "Waiting for response...")))))
326
327 (defun url-insert-entities-in-string (string)
328 "Convert HTML markup-start characters to entity references in STRING.
329 Also replaces the \" character, so that the result may be safely used as
330 an attribute value in a tag. Returns a new string with the result of the
331 conversion. Replaces these characters as follows:
332 & ==> &amp;
333 < ==> &lt;
334 > ==> &gt;
335 \" ==> &quot;"
336 (if (string-match "[&<>\"]" string)
337 (save-excursion
338 (set-buffer (get-buffer-create " *entity*"))
339 (erase-buffer)
340 (buffer-disable-undo (current-buffer))
341 (insert string)
342 (goto-char (point-min))
343 (while (progn
344 (skip-chars-forward "^&<>\"")
345 (not (eobp)))
346 (insert (cdr (assq (char-after (point))
347 '((?\" . "&quot;")
348 (?& . "&amp;")
349 (?< . "&lt;")
350 (?> . "&gt;")))))
351 (delete-char 1))
352 (buffer-string))
353 string))
354
355 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
356 ;;; Information information
357 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
358 (defvar url-process-lookup-table nil)
359
360 (defun url-process-get (proc prop &optional default)
361 "Get a value associated to PROC as property PROP
362 in plist stored in `url-process-lookup-table'"
363 (or (plist-get (cdr-safe (assq proc url-process-lookup-table)) prop)
364 default))
365
366 (defun url-process-put (proc prop val)
367 "Associate to PROC as property PROP the value VAL
368 in plist stored in `url-process-lookup-table'"
369 (let ((node (assq proc url-process-lookup-table)))
370 (if (not node)
371 (setq url-process-lookup-table (cons (cons proc (list prop val))
372 url-process-lookup-table))
373 (setcdr node (plist-put (cdr node) prop val)))))
374
375 (defun url-gc-process-lookup-table ()
376 (let (new)
377 (while url-process-lookup-table
378 (if (not (memq (process-status (caar url-process-lookup-table))
379 '(stop closed nil)))
380 (setq new (cons (car url-process-lookup-table) new)))
381 (setq url-process-lookup-table (cdr url-process-lookup-table)))
382 (setq url-process-lookup-table new)))
383
384 (defun url-process-list ()
385 (url-gc-process-lookup-table)
386 (let ((processes (process-list))
387 (retval nil))
388 (while processes
389 (if (url-process-get (car processes) 'url)
390 (setq retval (cons (car processes) retval)))
391 (setq processes (cdr processes)))
392 retval))
393
394 (defun url-list-processes ()
395 (interactive)
396 (let ((processes (url-process-list))
397 proc total-len len type url
398 (url-status-buf (get-buffer-create "URL Status Display")))
399 (set-buffer url-status-buf)
400 (erase-buffer)
401 (display-buffer url-status-buf)
402 (insert
403 (eval-when-compile (format "%-40s %-20s %-15s" "URL" "Size" "Type")) "\n"
404 (eval-when-compile (make-string 77 ?-)) "\n")
405 (while processes
406 (setq proc (car processes)
407 processes (cdr processes))
408 (save-excursion
409 (set-buffer (process-buffer proc))
410 (setq total-len url-current-content-length
411 len (max (point-max)
412 (if url-current-isindex
413 (- (point-max) url-current-isindex)
414 (point-max)))
415 type url-current-mime-type
416 url (url-process-get proc 'url))
417 (set-buffer url-status-buf)
418 (insert
419 (format "%-40s%s%-20s %-15s\n"
420 (url-process-get proc 'url)
421 (if (> (length url) 40)
422 (format "\n%-40s " " ")
423 " ")
424 (if total-len
425 (format "%d of %d" len total-len)
426 (format "%d" len))
427 (or type "unknown")))))))
428
429
430 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
431 ;;; file-name-handler stuff calls this
432 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
433
434 (defun url-have-visited-url (url &rest args)
435 "Return non-nil iff the user has visited URL before.
436 The return value is a cons of the url and the date last accessed as a string"
437 (cl-gethash url url-global-history-hash-table))
438
439 (defun url-directory-files (url &rest args)
440 "Return a list of files on a server."
441 nil)
442
443 (defun url-file-writable-p (url &rest args)
444 "Return t iff a url is writable by this user"
445 nil)
446
447 (defun url-copy-file (url &rest args)
448 "Copy a url to the specified filename."
449 nil)
450
451 (defun url-file-directly-accessible-p (url)
452 "Returns t iff the specified URL is directly accessible
453 on your filesystem. (nfs, local file, etc)."
454 (let* ((urlobj (if (vectorp url) url (url-generic-parse-url url)))
455 (type (url-type urlobj)))
456 (and (member type '("file" "ftp"))
457 (not (url-host urlobj)))))
458
459 ;;;###autoload
460 (defun url-file-attributes (url &rest args)
461 "Return a list of attributes of URL.
462 Value is nil if specified file cannot be opened.
463 Otherwise, list elements are:
464 0. t for directory, string (name linked to) for symbolic link, or nil.
465 1. Number of links to file.
466 2. File uid.
467 3. File gid.
468 4. Last access time, as a list of two integers.
469 First integer has high-order 16 bits of time, second has low 16 bits.
470 5. Last modification time, likewise.
471 6. Last status change time, likewise.
472 7. Size in bytes. (-1, if number is out of range).
473 8. File modes, as a string of ten letters or dashes as in ls -l.
474 If URL is on an http server, this will return the content-type if possible.
475 9. t iff file's gid would change if file were deleted and recreated.
476 10. inode number.
477 11. Device number.
478
479 If file does not exist, returns nil."
480 (and url
481 (let* ((urlobj (url-generic-parse-url url))
482 (type (url-type urlobj))
483 (url-automatic-caching nil)
484 (data nil)
485 (exists nil))
486 (cond
487 ((equal type "http")
488 (cond
489 ((not url-be-anal-about-file-attributes)
490 (setq data (list
491 (url-file-directory-p url) ; Directory
492 1 ; number of links to it
493 0 ; UID
494 0 ; GID
495 (cons 0 0) ; Last access time
496 (cons 0 0) ; Last mod. time
497 (cons 0 0) ; Last status time
498 -1 ; file size
499 (mm-extension-to-mime
500 (url-file-extension (url-filename urlobj)))
501 nil ; gid would change
502 0 ; inode number
503 0 ; device number
504 )))
505 (t ; HTTP/1.0, use HEAD
506 (let ((url-request-method "HEAD")
507 (url-request-data nil)
508 (url-working-buffer " *url-temp*"))
509 (save-excursion
510 (condition-case ()
511 (progn
512 (url-retrieve url)
513 (setq data (and
514 (setq exists
515 (cdr
516 (assoc "status"
517 url-current-mime-headers)))
518 (>= exists 200)
519 (< exists 300)
520 (list
521 (url-file-directory-p url) ; Directory
522 1 ; links to
523 0 ; UID
524 0 ; GID
525 (cons 0 0) ; Last access time
526 (cons 0 0) ; Last mod. time
527 (cons 0 0) ; Last status time
528 (or ; Size in bytes
529 (cdr (assoc "content-length"
530 url-current-mime-headers))
531 -1)
532 (or
533 (cdr (assoc "content-type"
534 url-current-mime-headers))
535 (mm-extension-to-mime
536 (url-file-extension
537 (url-filename urlobj)))) ; content-type
538 nil ; gid would change
539 0 ; inode number
540 0 ; device number
541 ))))
542 (error nil))
543 (and (not data)
544 (setq data (list (url-file-directory-p url)
545 1 0 0 (cons 0 0) (cons 0 0) (cons 0 0)
546 -1 (mm-extension-to-mime
547 (url-file-extension
548 url-current-file))
549 nil 0 0)))
550 (kill-buffer " *url-temp*"))))))
551 ((member type '("ftp" "file"))
552 (let ((fname (if (url-host urlobj)
553 (concat "/"
554 (if (url-user urlobj)
555 (concat (url-user urlobj) "@")
556 "")
557 (url-host urlobj) ":"
558 (url-filename urlobj))
559 (url-filename urlobj))))
560 (setq data (or (file-attributes fname) (make-list 12 nil)))
561 (setcar (cdr (cdr (cdr (cdr (cdr (cdr (cdr (cdr data))))))))
562 (mm-extension-to-mime (url-file-extension fname)))))
563 (t nil))
564 data)))
565
566 (defun url-file-name-all-completions (file dirname &rest args)
567 "Return a list of all completions of file name FILE in directory DIR.
568 These are all file names in directory DIR which begin with FILE."
569 ;; need to rewrite
570 )
571
572 (defun url-file-name-completion (file dirname &rest args)
573 "Complete file name FILE in directory DIR.
574 Returns the longest string
575 common to all filenames in DIR that start with FILE.
576 If there is only one and FILE matches it exactly, returns t.
577 Returns nil if DIR contains no name starting with FILE."
578 (apply 'url-file-name-all-completions file dirname args))
579
580 (defun url-file-local-copy (file &rest args)
581 "Copy the file FILE into a temporary file on this machine.
582 Returns the name of the local copy, or nil, if FILE is directly
583 accessible."
584 nil)
585
586 (defun url-insert-file-contents (url &rest args)
587 "Insert the contents of the URL in this buffer."
588 (interactive "sURL: ")
589 (save-excursion
590 (let ((old-asynch url-be-asynchronous))
591 (setq-default url-be-asynchronous nil)
592 (let ((buf (current-buffer))
593 (url-working-buffer (cdr (url-retrieve url))))
594 (setq-default url-be-asynchronous old-asynch)
595 (set-buffer buf)
596 (insert-buffer url-working-buffer)
597 (setq buffer-file-name url)
598 (save-excursion
599 (set-buffer url-working-buffer)
600 (set-buffer-modified-p nil))
601 (kill-buffer url-working-buffer)))))
602
603 (defun url-file-directory-p (url &rest args)
604 "Return t iff a url points to a directory"
605 (equal (substring url -1 nil) "/"))
606
607 (defun url-file-exists (url &rest args)
608 "Return t iff a file exists."
609 (let* ((urlobj (url-generic-parse-url url))
610 (type (url-type urlobj))
611 (exists nil))
612 (cond
613 ((equal type "http") ; use head
614 (let ((url-request-method "HEAD")
615 (url-request-data nil)
616 (url-working-buffer " *url-temp*"))
617 (save-excursion
618 (url-retrieve url)
619 (setq exists (or (cdr
620 (assoc "status" url-current-mime-headers)) 500))
621 (kill-buffer " *url-temp*")
622 (setq exists (and (>= exists 200) (< exists 300))))))
623 ((member type '("ftp" "file")) ; file-attributes
624 (let ((fname (if (url-host urlobj)
625 (concat "/"
626 (if (url-user urlobj)
627 (concat (url-user urlobj) "@")
628 "")
629 (url-host urlobj) ":"
630 (url-filename urlobj))
631 (url-filename urlobj))))
632 (setq exists (file-exists-p fname))))
633 (t nil))
634 exists))
635
636 ;;;###autoload
637 (defun url-normalize-url (url)
638 "Return a 'normalized' version of URL. This strips out default port
639 numbers, etc."
640 (let (type data grok retval)
641 (setq data (url-generic-parse-url url)
642 type (url-type data))
643 (if (member type '("www" "about" "mailto" "mailserver" "info"))
644 (setq retval url)
645 (setq retval (url-recreate-url data)))
646 retval))
647
648 ;;;###autoload
649 (defun url-buffer-visiting (url)
650 "Return the name of a buffer (if any) that is visiting URL."
651 (setq url (url-normalize-url url))
652 (let ((bufs (buffer-list))
653 (found nil))
654 (if (condition-case ()
655 (string-match "\\(.*\\)#" url)
656 (error nil))
657 (setq url (url-match url 1)))
658 (while (and bufs (not found))
659 (save-excursion
660 (set-buffer (car bufs))
661 (setq found (if (and
662 (not (string-match " \\*URL-?[0-9]*\\*" (buffer-name (car bufs))))
663 (memq major-mode '(url-mode w3-mode))
664 (equal (url-view-url t) url)) (car bufs) nil)
665 bufs (cdr bufs))))
666 found))
667
668 (defun url-file-size (url &rest args)
669 "Return the size of a file in bytes, or -1 if can't be determined."
670 (let* ((urlobj (url-generic-parse-url url))
671 (type (url-type urlobj))
672 (size -1)
673 (data nil))
674 (cond
675 ((equal type "http") ; use head
676 (let ((url-request-method "HEAD")
677 (url-request-data nil)
678 (url-working-buffer " *url-temp*"))
679 (save-excursion
680 (url-retrieve url)
681 (setq size (or (cdr
682 (assoc "content-length" url-current-mime-headers))
683 -1))
684 (kill-buffer " *url-temp*"))))
685 ((member type '("ftp" "file")) ; file-attributes
686 (let ((fname (if (url-host urlobj)
687 (concat "/"
688 (if (url-user urlobj)
689 (concat (url-user urlobj) "@")
690 "")
691 (url-host urlobj) ":"
692 (url-filename urlobj))
693 (url-filename urlobj))))
694 (setq data (file-attributes fname)
695 size (nth 7 data))))
696 (t nil))
697 (cond
698 ((stringp size) (string-to-int size))
699 ((integerp size) size)
700 ((null size) -1)
701 (t -1))))
702
703 (defun url-generate-new-buffer-name (start)
704 "Create a new buffer name based on START."
705 (let ((x 1)
706 name)
707 (if (not (get-buffer start))
708 start
709 (progn
710 (setq name (format "%s<%d>" start x))
711 (while (get-buffer name)
712 (setq x (1+ x)
713 name (format "%s<%d>" start x)))
714 name))))
715
716 (defun url-generate-unique-filename (&optional fmt)
717 "Generate a unique filename in url-temporary-directory"
718 (if (not fmt)
719 (let ((base (format "url-tmp.%d" (user-real-uid)))
720 (fname "")
721 (x 0))
722 (setq fname (format "%s%d" base x))
723 (while (file-exists-p (expand-file-name fname url-temporary-directory))
724 (setq x (1+ x)
725 fname (concat base (int-to-string x))))
726 (expand-file-name fname url-temporary-directory))
727 (let ((base (concat "url" (int-to-string (user-real-uid))))
728 (fname "")
729 (x 0))
730 (setq fname (format fmt (concat base (int-to-string x))))
731 (while (file-exists-p (expand-file-name fname url-temporary-directory))
732 (setq x (1+ x)
733 fname (format fmt (concat base (int-to-string x)))))
734 (expand-file-name fname url-temporary-directory))))
735
736 (defun url-lazy-message (&rest args)
737 "Just like `message', but is a no-op if called more than once a second.
738 Will not do anything if url-show-status is nil."
739 (if (or (null url-show-status)
740 (= url-lazy-message-time
741 (setq url-lazy-message-time (nth 1 (current-time)))))
742 nil
743 (apply 'message args)))
744
745
746 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
747 ;;; Gateway Support
748 ;;; ---------------
749 ;;; Fairly good/complete gateway support
750 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
751 (defun url-kill-process (proc)
752 "Kill the process PROC - knows about all the various gateway types,
753 and acts accordingly."
754 (cond
755 ((eq url-gateway-method 'native) (delete-process proc))
756 ((eq url-gateway-method 'program) (kill-process proc))
757 (t (error "Unknown url-gateway-method %s" url-gateway-method))))
758
759 (defun url-accept-process-output (proc)
760 "Allow any pending output from subprocesses to be read by Emacs.
761 It is read into the process' buffers or given to their filter functions.
762 Where possible, this will not exit until some output is received from PROC,
763 or 1 second has elapsed."
764 (accept-process-output proc 1))
765
766 (defun url-process-status (proc)
767 "Return the process status of a url buffer"
768 (cond
769 ((memq url-gateway-method '(native ssl program)) (process-status proc))
770 (t (error "Unkown url-gateway-method %s" url-gateway-method))))
771
772 (defun url-open-stream (name buffer host service)
773 "Open a stream to a host"
774 (let ((tmp-gateway-method (if (and url-gateway-local-host-regexp
775 (not (eq 'ssl url-gateway-method))
776 (string-match
777 url-gateway-local-host-regexp
778 host))
779 'native
780 url-gateway-method))
781 (tcp-binary-process-output-services (if (stringp service)
782 (list service)
783 (list service
784 (int-to-string service)))))
785 (and (eq url-gateway-method 'tcp)
786 (require 'tcp)
787 (setq url-gateway-method 'native
788 tmp-gateway-method 'native))
789 (cond
790 ((eq tmp-gateway-method 'ssl)
791 (open-ssl-stream name buffer host service))
792 ((eq tmp-gateway-method 'native)
793 (if url-broken-resolution
794 (setq host
795 (cond
796 ((featurep 'ange-ftp) (ange-ftp-nslookup-host host))
797 ((featurep 'efs) (efs-nslookup-host host))
798 ((featurep 'efs-auto) (efs-nslookup-host host))
799 (t host))))
800 (let ((max-retries url-connection-retries)
801 (cur-retries 0)
802 (retry t)
803 (errobj nil)
804 (conn nil))
805 (while (and (not conn) retry)
806 (condition-case errobj
807 (setq conn (open-network-stream name buffer host service))
808 (error
809 (url-save-error errobj)
810 (save-window-excursion
811 (save-excursion
812 (switch-to-buffer-other-window " *url-error*")
813 (shrink-window-if-larger-than-buffer)
814 (goto-char (point-min))
815 (if (and (re-search-forward "in use" nil t)
816 (< cur-retries max-retries))
817 (progn
818 (setq retry t
819 cur-retries (1+ cur-retries))
820 (sleep-for 0.5))
821 (setq cur-retries 0
822 retry (funcall url-confirmation-func
823 (concat "Connection to " host
824 " failed, retry? "))))
825 (kill-buffer (current-buffer)))))))
826 (if (not conn)
827 (error "Unable to connect to %s:%s" host service)
828 (mule-inhibit-code-conversion conn)
829 conn)))
830 ((eq tmp-gateway-method 'program)
831 (let ((proc (start-process name buffer url-gateway-telnet-program host
832 (int-to-string service)))
833 (tmp nil))
834 (save-excursion
835 (set-buffer buffer)
836 (setq tmp (point))
837 (while (not (progn
838 (goto-char (point-min))
839 (re-search-forward
840 url-gateway-telnet-ready-regexp nil t)))
841 (url-accept-process-output proc))
842 (delete-region tmp (point))
843 (goto-char (point-min))
844 (if (re-search-forward "connect:" nil t)
845 (progn
846 (condition-case ()
847 (delete-process proc)
848 (error nil))
849 (url-replace-regexp ".*connect:.*" "")
850 nil)
851 proc))))
852 (t (error "Unknown url-gateway-method %s" url-gateway-method)))))
853
854
855 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
856 ;;; Miscellaneous functions
857 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
858 (defun url-setup-privacy-info ()
859 (interactive)
860 (setq url-system-type
861 (cond
862 ((or (eq url-privacy-level 'paranoid)
863 (and (listp url-privacy-level)
864 (memq 'os url-privacy-level)))
865 nil)
866 ((eq system-type 'Apple-Macintosh) "Macintosh")
867 ((eq system-type 'next-mach) "NeXT")
868 ((eq system-type 'windows-nt) "Windows-NT; 32bit")
869 ((eq system-type 'ms-windows) "Windows; 16bit")
870 ((eq system-type 'ms-dos) "MS-DOS; 32bit")
871 ((and (eq system-type 'vax-vms) (device-type))
872 "VMS; X11")
873 ((eq system-type 'vax-vms) "VMS; TTY")
874 ((eq (device-type) 'x) "X11")
875 ((eq (device-type) 'ns) "NeXTStep")
876 ((eq (device-type) 'pm) "OS/2")
877 ((eq (device-type) 'win32) "Windows; 32bit")
878 ((eq (device-type) 'tty) "(Unix?); TTY")
879 (t "UnkownPlatform")))
880
881 ;; Set up the entity definition for PGP and PEM authentication
882 (setq url-pgp/pem-entity (or url-pgp/pem-entity
883 user-mail-address
884 (format "%s@%s" (user-real-login-name)
885 (system-name))))
886
887 (setq url-personal-mail-address (or url-personal-mail-address
888 url-pgp/pem-entity
889 user-mail-address))
890
891 (if (or (memq url-privacy-level '(paranoid high))
892 (and (listp url-privacy-level)
893 (memq 'email url-privacy-level)))
894 (setq url-personal-mail-address nil))
895
896 (if (or (eq url-privacy-level 'paranoid)
897 (and (listp url-privacy-level)
898 (memq 'os url-privacy-level)))
899 (setq url-os-type nil)
900 (let ((vers (emacs-version)))
901 (if (string-match "(\\([^, )]+\\))$" vers)
902 (setq url-os-type (url-match vers 1))
903 (setq url-os-type (symbol-name system-type))))))
904
905 (defun url-handle-no-scheme (url)
906 (let ((temp url-registered-protocols)
907 (found nil))
908 (while (and temp (not found))
909 (if (and (not (member (car (car temp)) '("auto" "www")))
910 (string-match (concat "^" (car (car temp)) "\\.")
911 url))
912 (setq found t)
913 (setq temp (cdr temp))))
914 (cond
915 (found ; Found something like ftp.spry.com
916 (url-retrieve (concat (car (car temp)) "://" url)))
917 ((string-match "^www\\." url)
918 (url-retrieve (concat "http://" url)))
919 ((string-match "\\(\\.[^\\.]+\\)\\(\\.[^\\.]+\\)" url)
920 ;; Ok, we have at least two dots in the filename, just stick http on it
921 (url-retrieve (concat "http://" url)))
922 (t
923 (url-retrieve (concat "http://www." url ".com"))))))
924
925 (defun url-setup-save-timer ()
926 "Reset the history list timer."
927 (interactive)
928 (cond
929 ((featurep 'itimer)
930 (if (get-itimer "url-history-saver")
931 (delete-itimer (get-itimer "url-history-saver")))
932 (start-itimer "url-history-saver" 'url-write-global-history
933 url-global-history-save-interval
934 url-global-history-save-interval))
935 ((fboundp 'run-at-time)
936 (run-at-time url-global-history-save-interval
937 url-global-history-save-interval
938 'url-write-global-history))
939 (t nil)))
940
941 (defvar url-download-minor-mode nil)
942
943 (defun url-download-minor-mode (on)
944 (setq url-download-minor-mode (if on
945 (1+ (or url-download-minor-mode 0))
946 (1- (or url-download-minor-mode 1))))
947 (if (<= url-download-minor-mode 0)
948 (setq url-download-minor-mode nil)))
949
950 (defun url-do-setup ()
951 "Do setup - this is to avoid conflict with user settings when URL is
952 dumped with emacs."
953 (if url-setup-done
954 nil
955
956 (add-minor-mode 'url-download-minor-mode " Webbing" nil)
957
958 ;; Make OS/2 happy
959 (setq tcp-binary-process-input-services
960 (append '("http" "80")
961 tcp-binary-process-input-services))
962
963 ;; Register all the protocols we can handle
964 (url-register-protocol 'file)
965 (url-register-protocol 'ftp nil nil "21")
966 (url-register-protocol 'gopher nil nil "70")
967 (url-register-protocol 'http nil nil "80")
968 (url-register-protocol 'https nil nil "443")
969 (url-register-protocol 'nfs nil nil "2049")
970 (url-register-protocol 'info nil 'url-identity-expander)
971 (url-register-protocol 'mailserver nil 'url-identity-expander)
972 (url-register-protocol 'finger nil 'url-identity-expander "79")
973 (url-register-protocol 'mailto nil 'url-identity-expander)
974 (url-register-protocol 'news nil 'url-identity-expander "119")
975 (url-register-protocol 'nntp nil 'url-identity-expander "119")
976 (url-register-protocol 'irc nil 'url-identity-expander "6667")
977 (url-register-protocol 'rlogin)
978 (url-register-protocol 'shttp nil nil "80")
979 (url-register-protocol 'telnet)
980 (url-register-protocol 'tn3270)
981 (url-register-protocol 'wais)
982 (url-register-protocol 'x-exec)
983 (url-register-protocol 'proxy)
984 (url-register-protocol 'auto 'url-handle-no-scheme)
985
986 ;; Register all the authentication schemes we can handle
987 (url-register-auth-scheme "basic" nil 4)
988 (url-register-auth-scheme "digest" nil 7)
989
990 ;; Filename handler stuff for emacsen that support it
991 (url-setup-file-name-handlers)
992
993 (setq url-cookie-file
994 (or url-cookie-file
995 (expand-file-name "~/.w3cookies")))
996
997 (setq url-global-history-file
998 (or url-global-history-file
999 (and (memq system-type '(ms-dos ms-windows))
1000 (expand-file-name "~/mosaic.hst"))
1001 (and (memq system-type '(axp-vms vax-vms))
1002 (expand-file-name "~/mosaic.global-history"))
1003 (condition-case ()
1004 (expand-file-name "~/.mosaic-global-history")
1005 (error nil))))
1006
1007 ;; Parse the global history file if it exists, so that it can be used
1008 ;; for URL completion, etc.
1009 (if (and url-global-history-file
1010 (file-exists-p url-global-history-file))
1011 (url-parse-global-history))
1012
1013 ;; Setup save timer
1014 (and url-global-history-save-interval (url-setup-save-timer))
1015
1016 (if (and url-cookie-file
1017 (file-exists-p url-cookie-file))
1018 (url-cookie-parse-file url-cookie-file))
1019
1020 ;; Read in proxy gateways
1021 (let ((noproxy (and (not (assoc "no_proxy" url-proxy-services))
1022 (or (getenv "NO_PROXY")
1023 (getenv "no_PROXY")
1024 (getenv "no_proxy")))))
1025 (if noproxy
1026 (setq url-proxy-services
1027 (cons (cons "no_proxy"
1028 (concat "\\("
1029 (mapconcat
1030 (function
1031 (lambda (x)
1032 (cond
1033 ((= x ?,) "\\|")
1034 ((= x ? ) "")
1035 ((= x ?.) (regexp-quote "."))
1036 ((= x ?*) ".*")
1037 ((= x ??) ".")
1038 (t (char-to-string x)))))
1039 noproxy "") "\\)"))
1040 url-proxy-services))))
1041
1042 ;; Set the url-use-transparent with decent defaults
1043 (if (not (eq (device-type) 'tty))
1044 (setq url-use-transparent nil))
1045 (and url-use-transparent (require 'transparent))
1046
1047 ;; Set the password entry funtion based on user defaults or guess
1048 ;; based on which remote-file-access package they are using.
1049 (cond
1050 (url-passwd-entry-func nil) ; Already been set
1051 ((boundp 'read-passwd) ; Use secure password if available
1052 (setq url-passwd-entry-func 'read-passwd))
1053 ((or (featurep 'efs) ; Using EFS
1054 (featurep 'efs-auto)) ; or autoloading efs
1055 (if (not (fboundp 'read-passwd))
1056 (autoload 'read-passwd "passwd" "Read in a password" nil))
1057 (setq url-passwd-entry-func 'read-passwd))
1058 ((or (featurep 'ange-ftp) ; Using ange-ftp
1059 (and (boundp 'file-name-handler-alist)
1060 (not (string-match "Lucid" (emacs-version)))))
1061 (setq url-passwd-entry-func 'ange-ftp-read-passwd))
1062 (t
1063 (url-warn 'security
1064 "Can't determine how to read passwords, winging it.")))
1065
1066 ;; Set up the news service if they haven't done so
1067 (setq url-news-server
1068 (cond
1069 (url-news-server url-news-server)
1070 ((and (boundp 'gnus-default-nntp-server)
1071 (not (equal "" gnus-default-nntp-server)))
1072 gnus-default-nntp-server)
1073 ((and (boundp 'gnus-nntp-server)
1074 (not (null gnus-nntp-server))
1075 (not (equal "" gnus-nntp-server)))
1076 gnus-nntp-server)
1077 ((and (boundp 'nntp-server-name)
1078 (not (null nntp-server-name))
1079 (not (equal "" nntp-server-name)))
1080 nntp-server-name)
1081 ((getenv "NNTPSERVER") (getenv "NNTPSERVER"))
1082 (t "news")))
1083
1084 ;; Set up the MIME accept string if they haven't got it hardcoded yet
1085 (or url-mime-accept-string
1086 (setq url-mime-accept-string (url-parse-viewer-types)))
1087 (or url-mime-encoding-string
1088 (setq url-mime-encoding-string
1089 (mapconcat 'car
1090 mm-content-transfer-encodings
1091 ", ")))
1092
1093 (url-setup-privacy-info)
1094 (run-hooks 'url-load-hook)
1095 (setq url-setup-done t)))
1096
1097 (defun url-cache-file-writable-p (file)
1098 "Follows the documentation of file-writable-p, unlike file-writable-p."
1099 (and (file-writable-p file)
1100 (if (file-exists-p file)
1101 (not (file-directory-p file))
1102 (file-directory-p (file-name-directory file)))))
1103
1104 (defun url-prepare-cache-for-file (file)
1105 "Makes it possible to cache data in FILE.
1106 Creates any necessary parent directories, deleting any non-directory files
1107 that would stop this. Returns nil if parent directories can not be
1108 created. If FILE already exists as a non-directory, it changes
1109 permissions of FILE or deletes FILE to make it possible to write a new
1110 version of FILE. Returns nil if this can not be done. Returns nil if
1111 FILE already exists as a directory. Otherwise, returns t, indicating that
1112 FILE can be created or overwritten."
1113
1114 ;; COMMENT: We don't delete directories because that requires
1115 ;; recursively deleting the directories's contents, which might
1116 ;; eliminate a substantial portion of the cache.
1117
1118 (cond
1119 ((url-cache-file-writable-p file)
1120 t)
1121 ((file-directory-p file)
1122 nil)
1123 (t
1124 (catch 'upcff-tag
1125 (let ((dir (file-name-directory file))
1126 dir-parent dir-last-component)
1127 (if (string-equal dir file)
1128 ;; *** Should I have a warning here?
1129 ;; FILE must match a pattern like /foo/bar/, indicating it is a
1130 ;; name only suitable for a directory. So presume we won't be
1131 ;; able to overwrite FILE and return nil.
1132 (throw 'upcff-tag nil))
1133
1134 ;; Make sure the containing directory exists, or throw a failure
1135 ;; if we can't create it.
1136 (if (file-directory-p dir)
1137 nil
1138 (or (fboundp 'make-directory)
1139 (throw 'upcff-tag nil))
1140 (make-directory dir t)
1141 ;; make-directory silently fails if there is an obstacle, so
1142 ;; we must verify its results.
1143 (if (file-directory-p dir)
1144 nil
1145 ;; Look at prefixes of the path to find the obstacle that is
1146 ;; stopping us from making the directory. Unfortunately, there
1147 ;; is no portable function in Emacs to find the parent directory
1148 ;; of a *directory*. So this code may not work on VMS.
1149 (while (progn
1150 (if (eq ?/ (aref dir (1- (length dir))))
1151 (setq dir (substring dir 0 -1))
1152 ;; Maybe we're on VMS where the syntax is different.
1153 (throw 'upcff-tag nil))
1154 (setq dir-parent (file-name-directory dir))
1155 (not (file-directory-p dir-parent)))
1156 (setq dir dir-parent))
1157 ;; We have found the longest path prefix that exists as a
1158 ;; directory. Deal with any obstacles in this directory.
1159 (if (file-exists-p dir)
1160 (condition-case nil
1161 (delete-file dir)
1162 (error (throw 'upcff-tag nil))))
1163 (if (file-exists-p dir)
1164 (throw 'upcff-tag nil))
1165 ;; Try making the directory again.
1166 (setq dir (file-name-directory file))
1167 (make-directory dir t)
1168 (or (file-directory-p dir)
1169 (throw 'upcff-tag nil))))
1170
1171 ;; The containing directory exists. Let's see if there is
1172 ;; something in the way in this directory.
1173 (if (url-cache-file-writable-p file)
1174 (throw 'upcff-tag t)
1175 (condition-case nil
1176 (delete-file file)
1177 (error (throw 'upcff-tag nil))))
1178
1179 ;; The return value, if we get this far.
1180 (url-cache-file-writable-p file))))))
1181
1182 (defun url-store-in-cache (&optional buff)
1183 "Store buffer BUFF in the cache"
1184 (if (or (not (get-buffer buff))
1185 (member url-current-type '("www" "about" "https" "shttp"
1186 "news" "mailto"))
1187 (and (member url-current-type '("file" "ftp" nil))
1188 (not url-current-server))
1189 )
1190 nil
1191 (save-excursion
1192 (and buff (set-buffer buff))
1193 (let* ((fname (url-create-cached-filename (url-view-url t)))
1194 (fname-hdr (concat (if (memq system-type '(ms-windows ms-dos os2))
1195 (url-file-extension fname t)
1196 fname) ".hdr"))
1197 (info (mapcar (function (lambda (var)
1198 (cons (symbol-name var)
1199 (symbol-value var))))
1200 '( url-current-content-length
1201 url-current-file
1202 url-current-isindex
1203 url-current-mime-encoding
1204 url-current-mime-headers
1205 url-current-mime-type
1206 url-current-port
1207 url-current-server
1208 url-current-type
1209 url-current-user
1210 ))))
1211 (cond ((and (url-prepare-cache-for-file fname)
1212 (url-prepare-cache-for-file fname-hdr))
1213 (write-region (point-min) (point-max) fname nil 5)
1214 (set-buffer (get-buffer-create " *cache-tmp*"))
1215 (erase-buffer)
1216 (insert "(setq ")
1217 (mapcar
1218 (function
1219 (lambda (x)
1220 (insert (car x) " "
1221 (cond ((null (setq x (cdr x))) "nil")
1222 ((stringp x) (prin1-to-string x))
1223 ((listp x) (concat "'" (prin1-to-string x)))
1224 ((numberp x) (int-to-string x))
1225 (t "'???")) "\n")))
1226 info)
1227 (insert ")\n")
1228 (write-region (point-min) (point-max) fname-hdr nil 5)))))))
1229
1230
1231 (defun url-is-cached (url)
1232 "Return non-nil if the URL is cached."
1233 (let* ((fname (url-create-cached-filename url))
1234 (attribs (file-attributes fname)))
1235 (and fname ; got a filename
1236 (file-exists-p fname) ; file exists
1237 (not (eq (nth 0 attribs) t)) ; Its not a directory
1238 (nth 5 attribs)))) ; Can get last mod-time
1239
1240 (defun url-create-cached-filename-using-md5 (url)
1241 (if url
1242 (expand-file-name (md5 url)
1243 (concat url-temporary-directory "/"
1244 (user-real-login-name)))))
1245
1246 (defun url-create-cached-filename (url)
1247 "Return a filename in the local cache for URL"
1248 (if url
1249 (let* ((url url)
1250 (urlobj (if (vectorp url)
1251 url
1252 (url-generic-parse-url url)))
1253 (protocol (url-type urlobj))
1254 (hostname (url-host urlobj))
1255 (host-components
1256 (cons
1257 (user-real-login-name)
1258 (cons (or protocol "file")
1259 (nreverse
1260 (delq nil
1261 (mm-string-to-tokens
1262 (or hostname "localhost") ?.))))))
1263 (fname (url-filename urlobj)))
1264 (if (and fname (/= (length fname) 0) (= (aref fname 0) ?/))
1265 (setq fname (substring fname 1 nil)))
1266 (if fname
1267 (let ((slash nil))
1268 (setq fname
1269 (mapconcat
1270 (function
1271 (lambda (x)
1272 (cond
1273 ((and (= ?/ x) slash)
1274 (setq slash nil)
1275 "%2F")
1276 ((= ?/ x)
1277 (setq slash t)
1278 "/")
1279 (t
1280 (setq slash nil)
1281 (char-to-string x))))) fname ""))))
1282
1283 (if (and fname (memq system-type '(ms-windows ms-dos windows-nt))
1284 (string-match "\\([A-Za-z]\\):[/\\]" fname))
1285 (setq fname (concat (url-match fname 1) "/"
1286 (substring fname (match-end 0)))))
1287
1288 (setq fname (and fname
1289 (mapconcat
1290 (function (lambda (x)
1291 (if (= x ?~) "" (char-to-string x))))
1292 fname ""))
1293 fname (cond
1294 ((null fname) nil)
1295 ((or (string= "" fname) (string= "/" fname))
1296 url-directory-index-file)
1297 ((= (string-to-char fname) ?/)
1298 (if (string= (substring fname -1 nil) "/")
1299 (concat fname url-directory-index-file)
1300 (substring fname 1 nil)))
1301 (t
1302 (if (string= (substring fname -1 nil) "/")
1303 (concat fname url-directory-index-file)
1304 fname))))
1305
1306 ;; Honor hideous 8.3 filename limitations on dos and windows
1307 ;; we don't have to worry about this in Windows NT/95 (or OS/2?)
1308 (if (and fname (memq system-type '(ms-windows ms-dos)))
1309 (let ((base (url-file-extension fname t))
1310 (ext (url-file-extension fname nil)))
1311 (setq fname (concat (substring base 0 (min 8 (length base)))
1312 (substring ext 0 (min 4 (length ext)))))
1313 (setq host-components
1314 (mapcar
1315 (function
1316 (lambda (x)
1317 (if (> (length x) 8)
1318 (concat
1319 (substring x 0 8) "."
1320 (substring x 8 (min (length x) 11)))
1321 x)))
1322 host-components))))
1323
1324 (and fname
1325 (expand-file-name fname
1326 (expand-file-name
1327 (mapconcat 'identity host-components "/")
1328 url-temporary-directory))))))
1329
1330 (defun url-extract-from-cache (fnam)
1331 "Extract FNAM from the local disk cache"
1332 (set-buffer (get-buffer-create url-working-buffer))
1333 (erase-buffer)
1334 (setq url-current-mime-viewer nil)
1335 (insert-file-contents-literally fnam)
1336 (load (concat (if (memq system-type '(ms-windows ms-dos os2))
1337 (url-file-extension fnam t)
1338 fnam) ".hdr") t t))
1339
1340 ;;;###autoload
1341 (defun url-get-url-at-point (&optional pt)
1342 "Get the URL closest to point, but don't change your
1343 position. Has a preference for looking backward when not
1344 directly on a symbol."
1345 ;; Not at all perfect - point must be right in the name.
1346 (save-excursion
1347 (if pt (goto-char pt))
1348 (let ((filename-chars "%.?@a-zA-Z0-9---()_/:~=&") start url)
1349 (save-excursion
1350 ;; first see if you're just past a filename
1351 (if (not (eobp))
1352 (if (looking-at "[] \t\n[{}()]") ; whitespace or some parens
1353 (progn
1354 (skip-chars-backward " \n\t\r({[]})")
1355 (if (not (bobp))
1356 (backward-char 1)))))
1357 (if (string-match (concat "[" filename-chars "]")
1358 (char-to-string (following-char)))
1359 (progn
1360 (skip-chars-backward filename-chars)
1361 (setq start (point))
1362 (skip-chars-forward filename-chars))
1363 (setq start (point)))
1364 (setq url (if (fboundp 'buffer-substring-no-properties)
1365 (buffer-substring-no-properties start (point))
1366 (buffer-substring start (point)))))
1367 (if (string-match "^URL:" url)
1368 (setq url (substring url 4 nil)))
1369 (if (string-match "\\.$" url)
1370 (setq url (substring url 0 -1)))
1371 (if (not (string-match url-nonrelative-link url))
1372 (setq url nil))
1373 url)))
1374
1375 (defun url-eat-trailing-space (x)
1376 ;; Remove spaces/tabs at the end of a string
1377 (let ((y (1- (length x)))
1378 (skip-chars (list ? ?\t ?\n)))
1379 (while (and (>= y 0) (memq (aref x y) skip-chars))
1380 (setq y (1- y)))
1381 (substring x 0 (1+ y))))
1382
1383 (defun url-strip-leading-spaces (x)
1384 ;; Remove spaces at the front of a string
1385 (let ((y (1- (length x)))
1386 (z 0)
1387 (skip-chars (list ? ?\t ?\n)))
1388 (while (and (<= z y) (memq (aref x z) skip-chars))
1389 (setq z (1+ z)))
1390 (substring x z nil)))
1391
1392 (defun url-convert-newlines-to-spaces (x)
1393 "Convert newlines and carriage returns embedded in a string into spaces,
1394 and swallow following whitespace.
1395 The argument is not side-effected, but may be returned by this function."
1396 (if (string-match "[\n\r]+\\s-*" x) ; [\\n\\r\\t ]
1397 (concat (substring x 0 (match-beginning 0)) " "
1398 (url-convert-newlines-to-spaces
1399 (substring x (match-end 0))))
1400 x))
1401
1402 ;; Test cases
1403 ;; (url-convert-newlines-to-spaces "foo bar") ; nothing happens
1404 ;; (url-convert-newlines-to-spaces "foo\n \t bar") ; whitespace converted
1405 ;;
1406 ;; This implementation doesn't mangle the match-data, is fast, and doesn't
1407 ;; create garbage, but it leaves whitespace.
1408 ;; (defun url-convert-newlines-to-spaces (x)
1409 ;; "Convert newlines and carriage returns embedded in a string into spaces.
1410 ;; The string is side-effected, then returned."
1411 ;; (let ((i 0)
1412 ;; (limit (length x)))
1413 ;; (while (< i limit)
1414 ;; (if (or (= ?\n (aref x i))
1415 ;; (= ?\r (aref x i)))
1416 ;; (aset x i ? ))
1417 ;; (setq i (1+ i)))
1418 ;; x))
1419
1420 (defun url-expand-file-name (url &optional default)
1421 "Convert URL to a fully specified URL, and canonicalize it.
1422 Second arg DEFAULT is a URL to start with if URL is relative.
1423 If DEFAULT is nil or missing, the current buffer's URL is used.
1424 Path components that are `.' are removed, and
1425 path components followed by `..' are removed, along with the `..' itself."
1426 (if url
1427 (setq url (mapconcat (function (lambda (x)
1428 (if (= x ?\n) "" (char-to-string x))))
1429 (url-strip-leading-spaces
1430 (url-eat-trailing-space url)) "")))
1431 (cond
1432 ((null url) nil) ; Something hosed! Be graceful
1433 ((string-match "^#" url) ; Offset link, use it raw
1434 url)
1435 (t
1436 (let* ((urlobj (url-generic-parse-url url))
1437 (inhibit-file-name-handlers t)
1438 (defobj (cond
1439 ((vectorp default) default)
1440 (default (url-generic-parse-url default))
1441 (url-current-object url-current-object)
1442 (t (url-generic-parse-url (url-view-url t)))))
1443 (expander (cdr-safe
1444 (cdr-safe
1445 (assoc (or (url-type urlobj)
1446 (url-type defobj))
1447 url-registered-protocols)))))
1448 (if (string-match "^//" url)
1449 (setq urlobj (url-generic-parse-url (concat (url-type defobj) ":"
1450 url))))
1451 (if (fboundp expander)
1452 (funcall expander urlobj defobj)
1453 (message "Unknown URL scheme: %s" (or (url-type urlobj)
1454 (url-type defobj)))
1455 (url-identity-expander urlobj defobj))
1456 (url-recreate-url urlobj)))))
1457
1458 (defun url-default-expander (urlobj defobj)
1459 ;; The default expansion routine - urlobj is modified by side effect!
1460 (url-set-type urlobj (or (url-type urlobj) (url-type defobj)))
1461 (url-set-port urlobj (or (url-port urlobj)
1462 (and (string= (url-type urlobj)
1463 (url-type defobj))
1464 (url-port defobj))))
1465 (if (not (string= "file" (url-type urlobj)))
1466 (url-set-host urlobj (or (url-host urlobj) (url-host defobj))))
1467 (if (string= "ftp" (url-type urlobj))
1468 (url-set-user urlobj (or (url-user urlobj) (url-user defobj))))
1469 (if (string= (url-filename urlobj) "")
1470 (url-set-filename urlobj "/"))
1471 (if (string-match "^/" (url-filename urlobj))
1472 nil
1473 (url-set-filename urlobj
1474 (url-remove-relative-links
1475 (concat (url-basepath (url-filename defobj))
1476 (url-filename urlobj))))))
1477
1478 (defun url-identity-expander (urlobj defobj)
1479 (url-set-type urlobj (or (url-type urlobj) (url-type defobj))))
1480
1481 (defconst url-unreserved-chars
1482 '(
1483 ?a ?b ?c ?d ?e ?f ?g ?h ?i ?j ?k ?l ?m ?n ?o ?p ?q ?r ?s ?t ?u ?v ?w ?x ?y ?z
1484 ?A ?B ?C ?D ?E ?F ?G ?H ?I ?J ?K ?L ?M ?N ?O ?P ?Q ?R ?S ?T ?U ?V ?W ?X ?Y ?Z
1485 ?0 ?1 ?2 ?3 ?4 ?5 ?6 ?7 ?8 ?9
1486 ?$ ?- ?_ ?. ?! ?~ ?* ?' ?\( ?\) ?,)
1487 "A list of characters that are _NOT_ reserve in the URL spec.
1488 This is taken from draft-fielding-url-syntax-02.txt - check your local
1489 internet drafts directory for a copy.")
1490
1491 (defun url-hexify-string (str)
1492 "Escape characters in a string"
1493 (mapconcat
1494 (function
1495 (lambda (char)
1496 (if (not (memq char url-unreserved-chars))
1497 (if (< char 16)
1498 (upcase (format "%%0%x" char))
1499 (upcase (format "%%%x" char)))
1500 (char-to-string char))))
1501 (mule-decode-string str) ""))
1502
1503 (defun url-make-sequence (start end)
1504 "Make a sequence (list) of numbers from START to END"
1505 (cond
1506 ((= start end) '())
1507 ((> start end) '())
1508 (t
1509 (let ((sqnc '()))
1510 (while (<= start end)
1511 (setq sqnc (cons end sqnc)
1512 end (1- end)))
1513 sqnc))))
1514
1515 (defun url-file-extension (fname &optional x)
1516 "Return the filename extension of FNAME. If optional variable X is t,
1517 then return the basename of the file with the extension stripped off."
1518 (if (and fname (string-match "\\.[^./]+$" fname))
1519 (if x (substring fname 0 (match-beginning 0))
1520 (substring fname (match-beginning 0) nil))
1521 ;;
1522 ;; If fname has no extension, and x then return fname itself instead of
1523 ;; nothing. When caching it allows the correct .hdr file to be produced
1524 ;; for filenames without extension.
1525 ;;
1526 (if x
1527 fname
1528 "")))
1529
1530 (defun url-basepath (file &optional x)
1531 "Return the base pathname of FILE, or the actual filename if X is true"
1532 (cond
1533 ((null file) "")
1534 (x (file-name-nondirectory file))
1535 (t (file-name-directory file))))
1536
1537 (defun url-parse-query-string (query &optional downcase)
1538 (let (retval pairs cur key val)
1539 (setq pairs (split-string query "&"))
1540 (while pairs
1541 (setq cur (car pairs)
1542 pairs (cdr pairs))
1543 (if (not (string-match "=" cur))
1544 nil ; Grace
1545 (setq key (url-unhex-string (substring cur 0 (match-beginning 0)))
1546 val (url-unhex-string (substring cur (match-end 0) nil)))
1547 (if downcase
1548 (setq key (downcase key)))
1549 (setq cur (assoc key retval))
1550 (if cur
1551 (setcdr cur (cons val (cdr cur)))
1552 (setq retval (cons (list key val) retval)))))
1553 retval))
1554
1555 (defun url-unhex (x)
1556 (if (> x ?9)
1557 (if (>= x ?a)
1558 (+ 10 (- x ?a))
1559 (+ 10 (- x ?A)))
1560 (- x ?0)))
1561
1562 (defun url-unhex-string (str &optional allow-newlines)
1563 "Remove %XXX embedded spaces, etc in a url.
1564 If optional second argument ALLOW-NEWLINES is non-nil, then allow the
1565 decoding of carriage returns and line feeds in the string, which is normally
1566 forbidden in URL encoding."
1567 (setq str (or str ""))
1568 (let ((tmp "")
1569 (case-fold-search t))
1570 (while (string-match "%[0-9a-f][0-9a-f]" str)
1571 (let* ((start (match-beginning 0))
1572 (ch1 (url-unhex (elt str (+ start 1))))
1573 (code (+ (* 16 ch1)
1574 (url-unhex (elt str (+ start 2))))))
1575 (setq tmp (concat
1576 tmp (substring str 0 start)
1577 (cond
1578 (allow-newlines
1579 (char-to-string code))
1580 ((or (= code ?\n) (= code ?\r))
1581 " ")
1582 (t (char-to-string code))))
1583 str (substring str (match-end 0)))))
1584 (setq tmp (concat tmp str))
1585 tmp))
1586
1587 (defun url-clean-text ()
1588 "Clean up a buffer, removing any excess garbage from a gateway mechanism,
1589 and decoding any MIME content-transfer-encoding used."
1590 (set-buffer url-working-buffer)
1591 (goto-char (point-min))
1592 (url-replace-regexp "Connection closed by.*" "")
1593 (goto-char (point-min))
1594 (url-replace-regexp "Process WWW.*" ""))
1595
1596 (defun url-remove-compressed-extensions (filename)
1597 (while (assoc (url-file-extension filename) url-uncompressor-alist)
1598 (setq filename (url-file-extension filename t)))
1599 filename)
1600
1601 (defun url-uncompress ()
1602 "Do any necessary uncompression on `url-working-buffer'"
1603 (set-buffer url-working-buffer)
1604 (if (not url-inhibit-uncompression)
1605 (let* ((extn (url-file-extension url-current-file))
1606 (decoder nil)
1607 (code-1 (cdr-safe
1608 (assoc "content-transfer-encoding"
1609 url-current-mime-headers)))
1610 (code-2 (cdr-safe
1611 (assoc "content-encoding" url-current-mime-headers)))
1612 (code-3 (and (not code-1) (not code-2)
1613 (cdr-safe (assoc extn url-uncompressor-alist))))
1614 (done nil)
1615 (default-process-coding-system
1616 (cons mule-no-coding-system mule-no-coding-system)))
1617 (mapcar
1618 (function
1619 (lambda (code)
1620 (setq decoder (and (not (member code done))
1621 (cdr-safe
1622 (assoc code mm-content-transfer-encodings)))
1623 done (cons code done))
1624 (cond
1625 ((null decoder) nil)
1626 ((stringp decoder)
1627 (message "Decoding...")
1628 (call-process-region (point-min) (point-max) decoder t t nil)
1629 (message "Decoding... done."))
1630 ((listp decoder)
1631 (apply 'call-process-region (point-min) (point-max)
1632 (car decoder) t t nil (cdr decoder)))
1633 ((and (symbolp decoder) (fboundp decoder))
1634 (message "Decoding...")
1635 (funcall decoder (point-min) (point-max))
1636 (message "Decoding... done."))
1637 (t
1638 (error "Bad entry for %s in `mm-content-transfer-encodings'"
1639 code)))))
1640 (list code-1 code-2 code-3))))
1641 (set-buffer-modified-p nil))
1642
1643 (defun url-filter (proc string)
1644 (save-excursion
1645 (set-buffer url-working-buffer)
1646 (insert string)
1647 (if (string-match "\nConnection closed by" string)
1648 (progn (set-process-filter proc nil)
1649 (url-sentinel proc string))))
1650 string)
1651
1652 (defun url-default-callback (buf)
1653 (url-download-minor-mode nil)
1654 (cond
1655 ((save-excursion (set-buffer buf)
1656 (and url-current-callback-func
1657 (fboundp url-current-callback-func)))
1658 (save-excursion
1659 (save-window-excursion
1660 (set-buffer buf)
1661 (cond
1662 ((listp url-current-callback-data)
1663 (apply url-current-callback-func
1664 url-current-callback-data))
1665 (url-current-callback-data
1666 (funcall url-current-callback-func
1667 url-current-callback-data))
1668 (t
1669 (funcall url-current-callback-func))))))
1670 ((fboundp 'w3-sentinel)
1671 (set-variable 'w3-working-buffer buf)
1672 (w3-sentinel))
1673 (t
1674 (message "Retrieval for %s complete." buf))))
1675
1676 (defun url-sentinel (proc string)
1677 (let* ((buf (process-buffer proc))
1678 (url-working-buffer (and buf (get-buffer buf)))
1679 status)
1680 (if (not url-working-buffer)
1681 (url-warn 'url (format "Process %s completed with no buffer!" proc))
1682 (save-excursion
1683 (set-buffer url-working-buffer)
1684 (remove-hook 'after-change-functions 'url-after-change-function)
1685 (if url-be-asynchronous
1686 (progn
1687 (widen)
1688 (url-clean-text)
1689 (cond
1690 ((and (null proc) (not url-working-buffer)) nil)
1691 ((url-mime-response-p)
1692 (setq status (url-parse-mime-headers))))
1693 (if (not url-current-mime-type)
1694 (setq url-current-mime-type (mm-extension-to-mime
1695 (url-file-extension
1696 url-current-file)))))))
1697 (if (member status '(401 301 302 303 204))
1698 nil
1699 (funcall url-default-retrieval-proc (buffer-name url-working-buffer))))))
1700
1701 (defun url-remove-relative-links (name)
1702 ;; Strip . and .. from pathnames
1703 (let ((new (if (not (string-match "^/" name))
1704 (concat "/" name)
1705 name)))
1706 (while (string-match "/\\(\\./\\)" new)
1707 (setq new (concat (substring new 0 (match-beginning 1))
1708 (substring new (match-end 1)))))
1709 (while (string-match "/\\([^/]*/\\.\\./\\)" new)
1710 (setq new (concat (substring new 0 (match-beginning 1))
1711 (substring new (match-end 1)))))
1712 (while (string-match "^/\\.\\.\\(/\\)" new)
1713 (setq new (substring new (match-beginning 1) nil)))
1714 new))
1715
1716 (defun url-truncate-url-for-viewing (url &optional width)
1717 "Return a shortened version of URL that is WIDTH characters or less wide.
1718 WIDTH defaults to the current frame width."
1719 (let* ((fr-width (or width (frame-width)))
1720 (str-width (length url))
1721 (tail (file-name-nondirectory url))
1722 (fname nil)
1723 (modified 0)
1724 (urlobj nil))
1725 ;; The first thing that can go are the search strings
1726 (if (and (>= str-width fr-width)
1727 (string-match "?" url))
1728 (setq url (concat (substring url 0 (match-beginning 0)) "?...")
1729 str-width (length url)
1730 tail (file-name-nondirectory url)))
1731 (if (< str-width fr-width)
1732 nil ; Hey, we are done!
1733 (setq urlobj (url-generic-parse-url url)
1734 fname (url-filename urlobj)
1735 fr-width (- fr-width 4))
1736 (while (and (>= str-width fr-width)
1737 (string-match "/" fname))
1738 (setq fname (substring fname (match-end 0) nil)
1739 modified (1+ modified))
1740 (url-set-filename urlobj fname)
1741 (setq url (url-recreate-url urlobj)
1742 str-width (length url)))
1743 (if (> modified 1)
1744 (setq fname (concat "/.../" fname))
1745 (setq fname (concat "/" fname)))
1746 (url-set-filename urlobj fname)
1747 (setq url (url-recreate-url urlobj)))
1748 url))
1749
1750 (defun url-view-url (&optional no-show)
1751 "View the current document's URL. Optional argument NO-SHOW means
1752 just return the URL, don't show it in the minibuffer."
1753 (interactive)
1754 (let ((url ""))
1755 (cond
1756 ((equal url-current-type "gopher")
1757 (setq url (format "%s://%s%s/%s"
1758 url-current-type url-current-server
1759 (if (or (null url-current-port)
1760 (string= "70" url-current-port)) ""
1761 (concat ":" url-current-port))
1762 url-current-file)))
1763 ((equal url-current-type "news")
1764 (setq url (concat "news:"
1765 (if (not (equal url-current-server
1766 url-news-server))
1767 (concat "//" url-current-server
1768 (if (or (null url-current-port)
1769 (string= "119" url-current-port))
1770 ""
1771 (concat ":" url-current-port)) "/"))
1772 url-current-file)))
1773 ((equal url-current-type "about")
1774 (setq url (concat "about:" url-current-file)))
1775 ((member url-current-type '("http" "shttp" "https"))
1776 (setq url (format "%s://%s%s/%s" url-current-type url-current-server
1777 (if (or (null url-current-port)
1778 (string= "80" url-current-port))
1779 ""
1780 (concat ":" url-current-port))
1781 (if (and url-current-file
1782 (= ?/ (string-to-char url-current-file)))
1783 (substring url-current-file 1 nil)
1784 url-current-file))))
1785 ((equal url-current-type "ftp")
1786 (setq url (format "%s://%s%s/%s" url-current-type
1787 (if (and url-current-user
1788 (not (string= "anonymous" url-current-user)))
1789 (concat url-current-user "@") "")
1790 url-current-server
1791 (if (and url-current-file
1792 (= ?/ (string-to-char url-current-file)))
1793 (substring url-current-file 1 nil)
1794 url-current-file))))
1795 ((and (member url-current-type '("file" nil)) url-current-file)
1796 (setq url (format "file:%s" url-current-file)))
1797 ((equal url-current-type "www")
1798 (setq url (format "www:/%s/%s" url-current-server url-current-file)))
1799 (t
1800 (setq url nil)))
1801 (if (not no-show) (message "%s" url) url)))
1802
1803 (defun url-parse-Netscape-history (fname)
1804 ;; Parse a Netscape/X style global history list.
1805 (let (pos ; Position holder
1806 url ; The URL
1807 time) ; Last time accessed
1808 (goto-char (point-min))
1809 (skip-chars-forward "^\n")
1810 (skip-chars-forward "\n \t") ; Skip past the tag line
1811 (setq url-global-history-hash-table (make-hash-table :size 131
1812 :test 'equal))
1813 ;; Here we will go to the end of the line and
1814 ;; skip back over a token, since we might run
1815 ;; into spaces in URLs, depending on how much
1816 ;; smarter netscape is than the old XMosaic :)
1817 (while (not (eobp))
1818 (setq pos (point))
1819 (end-of-line)
1820 (skip-chars-backward "^ \t")
1821 (skip-chars-backward " \t")
1822 (setq url (buffer-substring pos (point))
1823 pos (1+ (point)))
1824 (skip-chars-forward "^\n")
1825 (setq time (buffer-substring pos (point)))
1826 (skip-chars-forward "\n")
1827 (setq url-history-changed-since-last-save t)
1828 (cl-puthash url time url-global-history-hash-table))))
1829
1830 (defun url-parse-Mosaic-history-v1 (fname)
1831 ;; Parse an NCSA Mosaic/X style global history list
1832 (goto-char (point-min))
1833 (skip-chars-forward "^\n")
1834 (skip-chars-forward "\n \t") ; Skip past the tag line
1835 (skip-chars-forward "^\n")
1836 (skip-chars-forward "\n \t") ; Skip past the second tag line
1837 (setq url-global-history-hash-table (make-hash-table :size 131
1838 :test 'equal))
1839 (let (pos ; Temporary position holder
1840 bol ; Beginning-of-line
1841 url ; URL
1842 time ; Time
1843 last-end ; Last ending point
1844 )
1845 (while (not (eobp))
1846 (setq bol (point))
1847 (end-of-line)
1848 (setq pos (point)
1849 last-end (point))
1850 (skip-chars-backward "^ \t" bol) ; Skip over year
1851 (skip-chars-backward " \t" bol)
1852 (skip-chars-backward "^ \t" bol) ; Skip over time
1853 (skip-chars-backward " \t" bol)
1854 (skip-chars-backward "^ \t" bol) ; Skip over day #
1855 (skip-chars-backward " \t" bol)
1856 (skip-chars-backward "^ \t" bol) ; Skip over month
1857 (skip-chars-backward " \t" bol)
1858 (skip-chars-backward "^ \t" bol) ; Skip over day abbrev.
1859 (if (bolp)
1860 nil ; Malformed entry!!! Ack! Bailout!
1861 (setq time (buffer-substring pos (point)))
1862 (skip-chars-backward " \t")
1863 (setq pos (point)))
1864 (beginning-of-line)
1865 (setq url (buffer-substring (point) pos))
1866 (goto-char (min (1+ last-end) (point-max))) ; Goto next line
1867 (if (/= (length url) 0)
1868 (progn
1869 (setq url-history-changed-since-last-save t)
1870 (cl-puthash url time url-global-history-hash-table))))))
1871
1872 (defun url-parse-Mosaic-history-v2 (fname)
1873 ;; Parse an NCSA Mosaic/X style global history list (version 2)
1874 (goto-char (point-min))
1875 (skip-chars-forward "^\n")
1876 (skip-chars-forward "\n \t") ; Skip past the tag line
1877 (skip-chars-forward "^\n")
1878 (skip-chars-forward "\n \t") ; Skip past the second tag line
1879 (setq url-global-history-hash-table (make-hash-table :size 131
1880 :test 'equal))
1881 (let (pos ; Temporary position holder
1882 bol ; Beginning-of-line
1883 url ; URL
1884 time ; Time
1885 last-end ; Last ending point
1886 )
1887 (while (not (eobp))
1888 (setq bol (point))
1889 (end-of-line)
1890 (setq pos (point)
1891 last-end (point))
1892 (skip-chars-backward "^ \t" bol) ; Skip over time
1893 (if (bolp)
1894 nil ; Malformed entry!!! Ack! Bailout!
1895 (setq time (buffer-substring pos (point)))
1896 (skip-chars-backward " \t")
1897 (setq pos (point)))
1898 (beginning-of-line)
1899 (setq url (buffer-substring (point) pos))
1900 (goto-char (min (1+ last-end) (point-max))) ; Goto next line
1901 (if (/= (length url) 0)
1902 (progn
1903 (setq url-history-changed-since-last-save t)
1904 (cl-puthash url time url-global-history-hash-table))))))
1905
1906 (defun url-parse-Emacs-history (&optional fname)
1907 ;; Parse out the Emacs-w3 global history file for completion, etc.
1908 (or fname (setq fname (expand-file-name url-global-history-file)))
1909 (cond
1910 ((not (file-exists-p fname))
1911 (message "%s does not exist." fname))
1912 ((not (file-readable-p fname))
1913 (message "%s is unreadable." fname))
1914 (t
1915 (condition-case ()
1916 (load fname nil t)
1917 (error (message "Could not load %s" fname)))
1918 (if (boundp 'url-global-history-completion-list)
1919 ;; Hey! Automatic conversion of old format!
1920 (progn
1921 (setq url-global-history-hash-table (make-hash-table :size 131
1922 :test 'equal)
1923 url-history-changed-since-last-save t)
1924 (mapcar (function
1925 (lambda (x)
1926 (cl-puthash (car x) (cdr x)
1927 url-global-history-hash-table)))
1928 (symbol-value 'url-global-history-completion-list)))))))
1929
1930 (defun url-parse-global-history (&optional fname)
1931 ;; Parse out the mosaic global history file for completions, etc.
1932 (or fname (setq fname (expand-file-name url-global-history-file)))
1933 (cond
1934 ((not (file-exists-p fname))
1935 (message "%s does not exist." fname))
1936 ((not (file-readable-p fname))
1937 (message "%s is unreadable." fname))
1938 (t
1939 (save-excursion
1940 (set-buffer (get-buffer-create " *url-tmp*"))
1941 (erase-buffer)
1942 (insert-file-contents-literally fname)
1943 (goto-char (point-min))
1944 (cond
1945 ((looking-at "(setq") (url-parse-Emacs-history fname))
1946 ((looking-at "ncsa-mosaic-.*-1$") (url-parse-Mosaic-history-v1 fname))
1947 ((looking-at "ncsa-mosaic-.*-2$") (url-parse-Mosaic-history-v2 fname))
1948 ((or (looking-at "MCOM-") (looking-at "netscape"))
1949 (url-parse-Netscape-history fname))
1950 (t
1951 (url-warn 'url (format "Cannot deduce type of history file: %s"
1952 fname))))))))
1953
1954 (defun url-write-Emacs-history (fname)
1955 ;; Write an Emacs-w3 style global history list into FNAME
1956 (erase-buffer)
1957 (let ((count 0))
1958 (cl-maphash (function
1959 (lambda (key value)
1960 (setq count (1+ count))
1961 (insert "(cl-puthash \"" key "\""
1962 (if (not (stringp value)) " '" "")
1963 (prin1-to-string value)
1964 " url-global-history-hash-table)\n")))
1965 url-global-history-hash-table)
1966 (goto-char (point-min))
1967 (insert (format
1968 "(setq url-global-history-hash-table (make-hash-table :size %d :test 'equal))\n"
1969 (/ count 4)))
1970 (goto-char (point-max))
1971 (insert "\n")
1972 (write-file fname)))
1973
1974 (defun url-write-Netscape-history (fname)
1975 ;; Write a Netscape-style global history list into FNAME
1976 (erase-buffer)
1977 (let ((last-valid-time "785305714")) ; Picked out of thin air,
1978 ; in case first in assoc list
1979 ; doesn't have a valid time
1980 (goto-char (point-min))
1981 (insert "MCOM-Global-history-file-1\n")
1982 (cl-maphash (function
1983 (lambda (url time)
1984 (if (or (not (stringp time)) (string-match " \t" time))
1985 (setq time last-valid-time)
1986 (setq last-valid-time time))
1987 (insert url " " time "\n")))
1988 url-global-history-hash-table)
1989 (write-file fname)))
1990
1991 (defun url-write-Mosaic-history-v1 (fname)
1992 ;; Write a Mosaic/X-style global history list into FNAME
1993 (erase-buffer)
1994 (goto-char (point-min))
1995 (insert "ncsa-mosaic-history-format-1\nGlobal\n")
1996 (cl-maphash (function
1997 (lambda (url time)
1998 (if (listp time)
1999 (setq time (current-time-string time)))
2000 (if (or (not (stringp time))
2001 (not (string-match " " time)))
2002 (setq time (current-time-string)))
2003 (insert url " " time "\n")))
2004 url-global-history-hash-table)
2005 (write-file fname))
2006
2007 (defun url-write-Mosaic-history-v2 (fname)
2008 ;; Write a Mosaic/X-style global history list into FNAME
2009 (let ((last-valid-time "827250806"))
2010 (erase-buffer)
2011 (goto-char (point-min))
2012 (insert "ncsa-mosaic-history-format-2\nGlobal\n")
2013 (cl-maphash (function
2014 (lambda (url time)
2015 (if (listp time)
2016 (setq time last-valid-time)
2017 (setq last-valid-time time))
2018 (if (not (stringp time))
2019 (setq time last-valid-time))
2020 (insert url " " time "\n")))
2021 url-global-history-hash-table)
2022 (write-file fname)))
2023
2024 (defun url-write-global-history (&optional fname)
2025 "Write the global history file into `url-global-history-file'.
2026 The type of data written is determined by what is in the file to begin
2027 with. If the type of storage cannot be determined, then prompt the
2028 user for what type to save as."
2029 (interactive)
2030 (or fname (setq fname (expand-file-name url-global-history-file)))
2031 (cond
2032 ((not url-history-changed-since-last-save) nil)
2033 ((not (file-writable-p fname))
2034 (message "%s is unwritable." fname))
2035 (t
2036 (let ((make-backup-files nil)
2037 (version-control nil)
2038 (require-final-newline t))
2039 (save-excursion
2040 (set-buffer (get-buffer-create " *url-tmp*"))
2041 (erase-buffer)
2042 (condition-case ()
2043 (insert-file-contents-literally fname)
2044 (error nil))
2045 (goto-char (point-min))
2046 (cond
2047 ((looking-at "ncsa-mosaic-.*-1$") (url-write-Mosaic-history-v1 fname))
2048 ((looking-at "ncsa-mosaic-.*-2$") (url-write-Mosaic-history-v2 fname))
2049 ((looking-at "MCOM-") (url-write-Netscape-history fname))
2050 ((looking-at "netscape") (url-write-Netscape-history fname))
2051 ((looking-at "(setq") (url-write-Emacs-history fname))
2052 (t (url-write-Emacs-history fname)))
2053 (kill-buffer (current-buffer))))))
2054 (setq url-history-changed-since-last-save nil))
2055
2056
2057 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
2058 ;;; The main URL fetching interface
2059 ;;; -------------------------------
2060 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
2061
2062 ;;;###autoload
2063 (defun url-popup-info (url)
2064 "Retrieve the HTTP/1.0 headers and display them in a temp buffer."
2065 (let* ((urlobj (url-generic-parse-url url))
2066 (type (url-type urlobj))
2067 data)
2068 (cond
2069 ((string= type "http")
2070 (let ((url-request-method "HEAD")
2071 (url-automatic-caching nil)
2072 (url-inhibit-mime-parsing t)
2073 (url-working-buffer " *popup*"))
2074 (save-excursion
2075 (set-buffer (get-buffer-create url-working-buffer))
2076 (erase-buffer)
2077 (setq url-be-asynchronous nil)
2078 (url-retrieve url)
2079 (subst-char-in-region (point-min) (point-max) ?\r ? )
2080 (buffer-string))))
2081 ((or (string= type "file") (string= type "ftp"))
2082 (setq data (url-file-attributes url))
2083 (set-buffer (get-buffer-create
2084 (url-generate-new-buffer-name "*Header Info*")))
2085 (erase-buffer)
2086 (if data
2087 (concat (if (stringp (nth 0 data))
2088 (concat " Linked to: " (nth 0 data))
2089 (concat " Directory: " (if (nth 0 data) "Yes" "No")))
2090 "\n Links: " (int-to-string (nth 1 data))
2091 "\n File UID: " (int-to-string (nth 2 data))
2092 "\n File GID: " (int-to-string (nth 3 data))
2093 "\n Last Access: " (current-time-string (nth 4 data))
2094 "\nLast Modified: " (current-time-string (nth 5 data))
2095 "\n Last Changed: " (current-time-string (nth 6 data))
2096 "\n Size (bytes): " (int-to-string (nth 7 data))
2097 "\n File Type: " (or (nth 8 data) "text/plain"))
2098 (concat "No info found for " url)))
2099 ((and (string= type "news") (string-match "@" url))
2100 (let ((art (url-filename urlobj)))
2101 (if (not (string= (substring art -1 nil) ">"))
2102 (setq art (concat "<" art ">")))
2103 (url-get-headers-from-article-id art)))
2104 (t (concat "Don't know how to find information on " url)))))
2105
2106 (defun url-decode-text ()
2107 ;; Decode text transmitted by NNTP.
2108 ;; 0. Delete status line.
2109 ;; 1. Delete `^M' at end of line.
2110 ;; 2. Delete `.' at end of buffer (end of text mark).
2111 ;; 3. Delete `.' at beginning of line."
2112 (save-excursion
2113 (set-buffer nntp-server-buffer)
2114 ;; Insert newline at end of buffer.
2115 (goto-char (point-max))
2116 (if (not (bolp))
2117 (insert "\n"))
2118 ;; Delete status line.
2119 (goto-char (point-min))
2120 (delete-region (point) (progn (forward-line 1) (point)))
2121 ;; Delete `^M' at end of line.
2122 ;; (replace-regexp "\r$" "")
2123 (while (not (eobp))
2124 (end-of-line)
2125 (if (= (preceding-char) ?\r)
2126 (delete-char -1))
2127 (forward-line 1)
2128 )
2129 ;; Delete `.' at end of buffer (end of text mark).
2130 (goto-char (point-max))
2131 (forward-line -1) ;(beginning-of-line)
2132 (if (looking-at "^\\.$")
2133 (delete-region (point) (progn (forward-line 1) (point))))
2134 ;; Replace `..' at beginning of line with `.'.
2135 (goto-char (point-min))
2136 ;; (replace-regexp "^\\.\\." ".")
2137 (while (search-forward "\n.." nil t)
2138 (delete-char -1))
2139 ))
2140
2141 (defun url-get-headers-from-article-id (art)
2142 ;; Return the HEAD of ART (a usenet news article)
2143 (cond
2144 ((string-match "flee" nntp-version)
2145 (nntp/command "HEAD" art)
2146 (save-excursion
2147 (set-buffer nntp-server-buffer)
2148 (while (progn (goto-char (point-min))
2149 (not (re-search-forward "^.\r*$" nil t)))
2150 (url-accept-process-output nntp/connection))))
2151 (t
2152 (nntp-send-command "^\\.\r$" "HEAD" art)
2153 (url-decode-text)))
2154 (save-excursion
2155 (set-buffer nntp-server-buffer)
2156 (buffer-string)))
2157
2158 (defvar url-external-retrieval-program "www"
2159 "*Name of the external executable to run to retrieve URLs.")
2160
2161 (defvar url-external-retrieval-args '("-source")
2162 "*A list of arguments to pass to `url-external-retrieval-program' to
2163 retrieve a URL by its HTML source.")
2164
2165 (defun url-retrieve-externally (url &optional no-cache)
2166 (let ((url-working-buffer (if (and url-multiple-p
2167 (string-equal url-working-buffer
2168 url-default-working-buffer))
2169 (url-get-working-buffer-name)
2170 url-working-buffer)))
2171 (if (get-buffer-create url-working-buffer)
2172 (save-excursion
2173 (set-buffer url-working-buffer)
2174 (set-buffer-modified-p nil)
2175 (kill-buffer url-working-buffer)))
2176 (set-buffer (get-buffer-create url-working-buffer))
2177 (let* ((args (append url-external-retrieval-args (list url)))
2178 (urlobj (url-generic-parse-url url))
2179 (type (url-type urlobj)))
2180 (if (or (member type '("www" "about" "mailto" "mailserver"))
2181 (url-file-directly-accessible-p urlobj))
2182 (url-retrieve-internally url)
2183 (url-lazy-message "Retrieving %s..." url)
2184 (apply 'call-process url-external-retrieval-program
2185 nil t nil args)
2186 (url-lazy-message "Retrieving %s... done" url)
2187 (if (and type urlobj)
2188 (setq url-current-server (url-host urlobj)
2189 url-current-type (url-type urlobj)
2190 url-current-port (url-port urlobj)
2191 url-current-file (url-filename urlobj)))
2192 (if (member url-current-file '("/" ""))
2193 (setq url-current-mime-type "text/html"))))))
2194
2195 (defun url-get-normalized-date (&optional specified-time)
2196 ;; Return a 'real' date string that most HTTP servers can understand.
2197 (require 'timezone)
2198 (let* ((raw (if specified-time (current-time-string specified-time)
2199 (current-time-string)))
2200 (gmt (timezone-make-date-arpa-standard raw
2201 (nth 1 (current-time-zone))
2202 "GMT"))
2203 (parsed (timezone-parse-date gmt))
2204 (day (cdr-safe (assoc (substring raw 0 3) weekday-alist)))
2205 (year nil)
2206 (month (car
2207 (rassoc
2208 (string-to-int (aref parsed 1)) monthabbrev-alist)))
2209 )
2210 (setq day (or (car-safe (rassoc day weekday-alist))
2211 (substring raw 0 3))
2212 year (aref parsed 0))
2213 ;; This is needed for plexus servers, or the server will hang trying to
2214 ;; parse the if-modified-since header. Hopefully, I can take this out
2215 ;; soon.
2216 (if (and year (> (length year) 2))
2217 (setq year (substring year -2 nil)))
2218
2219 (concat day ", " (aref parsed 2) "-" month "-" year " "
2220 (aref parsed 3) " " (or (aref parsed 4)
2221 (concat "[" (nth 1 (current-time-zone))
2222 "]")))))
2223
2224 ;;;###autoload
2225 (defun url-cache-expired (url mod)
2226 "Return t iff a cached file has expired."
2227 (if (not (string-match url-nonrelative-link url))
2228 t
2229 (let* ((urlobj (url-generic-parse-url url))
2230 (type (url-type urlobj)))
2231 (cond
2232 (url-standalone-mode
2233 (not (file-exists-p (url-create-cached-filename urlobj))))
2234 ((string= type "http")
2235 (if (not url-standalone-mode) t
2236 (not (file-exists-p (url-create-cached-filename urlobj)))))
2237 ((not (fboundp 'current-time))
2238 t)
2239 ((member type '("file" "ftp"))
2240 (if (or (equal mod '(0 0)) (not mod))
2241 (return t)
2242 (or (> (nth 0 mod) (nth 0 (current-time)))
2243 (> (nth 1 mod) (nth 1 (current-time))))))
2244 (t nil)))))
2245
2246 (defun url-get-working-buffer-name ()
2247 "Get a working buffer name such as ` *URL-<i>*' without a live process and empty"
2248 (let ((num 1)
2249 name buf)
2250 (while (progn (setq name (format " *URL-%d*" num))
2251 (setq buf (get-buffer name))
2252 (and buf (or (get-buffer-process buf)
2253 (save-excursion (set-buffer buf)
2254 (> (point-max) 1)))))
2255 (setq num (1+ num)))
2256 name))
2257
2258 (defun url-default-find-proxy-for-url (urlobj host)
2259 (cond
2260 ((or (and (assoc "no_proxy" url-proxy-services)
2261 (string-match
2262 (cdr
2263 (assoc "no_proxy" url-proxy-services))
2264 host))
2265 (equal "www" (url-type urlobj)))
2266 "DIRECT")
2267 ((cdr (assoc (url-type urlobj) url-proxy-services))
2268 (concat "PROXY " (cdr (assoc (url-type urlobj) url-proxy-services))))
2269 ;;
2270 ;; Should check for socks
2271 ;;
2272 (t
2273 "DIRECT")))
2274
2275 (defvar url-proxy-locator 'url-default-find-proxy-for-url)
2276
2277 (defun url-find-proxy-for-url (url host)
2278 (let ((proxies (split-string (funcall url-proxy-locator url host) " *; *"))
2279 (proxy nil)
2280 (case-fold-search t))
2281 ;; Not sure how I should handle gracefully degrading from one proxy to
2282 ;; another, so for now just deal with the first one
2283 ;; (while proxies
2284 (setq proxy (pop proxies))
2285 (cond
2286 ((string-match "^direct" proxy) nil)
2287 ((string-match "^proxy +" proxy)
2288 (concat "http://" (substring proxy (match-end 0)) "/"))
2289 ((string-match "^socks +" proxy)
2290 (concat "socks://" (substring proxy (match-end 0))))
2291 (t
2292 (url-warn 'url (format "Unknown proxy directive: %s" proxy) 'critical)
2293 nil))))
2294
2295 (defun url-retrieve-internally (url &optional no-cache)
2296 (let ((url-working-buffer (if (and url-multiple-p
2297 (string-equal
2298 (if (bufferp url-working-buffer)
2299 (buffer-name url-working-buffer)
2300 url-working-buffer)
2301 url-default-working-buffer))
2302 (url-get-working-buffer-name)
2303 url-working-buffer)))
2304 (if (get-buffer url-working-buffer)
2305 (save-excursion
2306 (set-buffer url-working-buffer)
2307 (erase-buffer)
2308 (setq url-current-can-be-cached (not no-cache))
2309 (set-buffer-modified-p nil)))
2310 (let* ((urlobj (url-generic-parse-url url))
2311 (type (url-type urlobj))
2312 (url-using-proxy (if (url-host urlobj)
2313 (url-find-proxy-for-url urlobj
2314 (url-host urlobj))
2315 nil))
2316 (handler nil)
2317 (original-url url)
2318 (cached nil)
2319 (tmp url-current-file))
2320 (if url-using-proxy (setq type "proxy"))
2321 (setq cached (url-is-cached url)
2322 cached (and cached (not (url-cache-expired url cached)))
2323 handler (if cached 'url-extract-from-cache
2324 (car-safe
2325 (cdr-safe (assoc (or type "auto")
2326 url-registered-protocols))))
2327 url (if cached (url-create-cached-filename url) url))
2328 (save-excursion
2329 (set-buffer (get-buffer-create url-working-buffer))
2330 (setq url-current-can-be-cached (not no-cache)))
2331 ; (if url-be-asynchronous
2332 ; (url-download-minor-mode t))
2333 (if (and handler (fboundp handler))
2334 (funcall handler url)
2335 (set-buffer (get-buffer-create url-working-buffer))
2336 (setq url-current-file tmp)
2337 (erase-buffer)
2338 (insert "<title> Link Error! </title>\n"
2339 "<h1> An error has occurred... </h1>\n"
2340 (format "The link type `<code>%s</code>'" type)
2341 " is unrecognized or unsupported at this time.<p>\n"
2342 "If you feel this is an error, please "
2343 "<a href=\"mailto://" url-bug-address "\">send me mail.</a>"
2344 "<p><address>William Perry</address><br>"
2345 "<address>" url-bug-address "</address>")
2346 (setq url-current-file "error.html"))
2347 (if (and
2348 (not url-be-asynchronous)
2349 (get-buffer url-working-buffer))
2350 (progn
2351 (set-buffer url-working-buffer)
2352
2353 (url-clean-text)))
2354 (cond
2355 ((equal type "wais") nil)
2356 ((and url-be-asynchronous (not cached) (member type '("http" "proxy")))
2357 nil)
2358 (url-be-asynchronous
2359 (funcall url-default-retrieval-proc (buffer-name)))
2360 ((not (get-buffer url-working-buffer)) nil)
2361 ((and (not url-inhibit-mime-parsing)
2362 (or cached (url-mime-response-p t)))
2363 (or cached (url-parse-mime-headers nil t))))
2364 (if (and (or (not url-be-asynchronous)
2365 (not (equal type "http")))
2366 (not url-current-mime-type))
2367 (if (url-buffer-is-hypertext)
2368 (setq url-current-mime-type "text/html")
2369 (setq url-current-mime-type (mm-extension-to-mime
2370 (url-file-extension
2371 url-current-file)))))
2372 (if (and url-automatic-caching url-current-can-be-cached
2373 (not url-be-asynchronous))
2374 (save-excursion
2375 (url-store-in-cache url-working-buffer)))
2376 (if (not url-global-history-hash-table)
2377 (setq url-global-history-hash-table (make-hash-table :size 131
2378 :test 'equal)))
2379 (if (not (string-match "^about:" original-url))
2380 (progn
2381 (setq url-history-changed-since-last-save t)
2382 (cl-puthash original-url (current-time)
2383 url-global-history-hash-table)))
2384 (cons cached url-working-buffer))))
2385
2386 ;;;###autoload
2387 (defun url-retrieve (url &optional no-cache expected-md5)
2388 "Retrieve a document over the World Wide Web.
2389 The document should be specified by its fully specified
2390 Uniform Resource Locator. No parsing is done, just return the
2391 document as the server sent it. The document is left in the
2392 buffer specified by url-working-buffer. url-working-buffer is killed
2393 immediately before starting the transfer, so that no buffer-local
2394 variables interfere with the retrieval. HTTP/1.0 redirection will
2395 be honored before this function exits."
2396 (url-do-setup)
2397 (if (and (fboundp 'set-text-properties)
2398 (subrp (symbol-function 'set-text-properties)))
2399 (set-text-properties 0 (length url) nil url))
2400 (if (and url (string-match "^url:" url))
2401 (setq url (substring url (match-end 0) nil)))
2402 (let ((status (url-retrieve-internally url no-cache)))
2403 (if (and expected-md5 url-check-md5s)
2404 (let ((cur-md5 (md5 (current-buffer))))
2405 (if (not (string= cur-md5 expected-md5))
2406 (and (not (funcall url-confirmation-func
2407 "MD5s do not match, use anyway? "))
2408 (error "MD5 error.")))))
2409 status))
2410
2411 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
2412 ;;; How to register a protocol
2413 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
2414 (defun url-register-protocol (protocol &optional retrieve expander defport)
2415 "Register a protocol with the URL retrieval package.
2416 PROTOCOL is the type of protocol being registers (http, nntp, etc),
2417 and is the first chunk of the URL. ie: http:// URLs will be
2418 handled by the protocol registered as 'http'. PROTOCOL can
2419 be either a symbol or a string - it is converted to a string,
2420 and lowercased before being registered.
2421 RETRIEVE (optional) is the function to be called with a url as its
2422 only argument. If this argument is omitted, then this looks
2423 for a function called 'url-PROTOCOL'. A warning is shown if
2424 the function is undefined, but the protocol is still
2425 registered.
2426 EXPANDER (optional) is the function to call to expand a relative link
2427 of type PROTOCOL. If omitted, this defaults to
2428 `url-default-expander'
2429
2430 Any proxy information is read in from environment variables at this
2431 time, so this function should only be called after dumping emacs."
2432 (let* ((protocol (cond
2433 ((stringp protocol) (downcase protocol))
2434 ((symbolp protocol) (downcase (symbol-name protocol)))
2435 (t nil)))
2436
2437 (retrieve (or retrieve (intern (concat "url-" protocol))))
2438 (expander (or expander 'url-default-expander))
2439 (cur-protocol (assoc protocol url-registered-protocols))
2440 (urlobj nil)
2441 (cur-proxy (assoc protocol url-proxy-services))
2442 (env-proxy (or (getenv (concat protocol "_proxy"))
2443 (getenv (concat protocol "_PROXY"))
2444 (getenv (upcase (concat protocol "_PROXY"))))))
2445
2446 (if (not protocol)
2447 (error "Invalid data to url-register-protocol."))
2448
2449 (if (not (fboundp retrieve))
2450 (message "Warning: %s registered, but no function found." protocol))
2451
2452 ;; Store the default port, if none previously specified and
2453 ;; defport given
2454 (if (and defport (not (assoc protocol url-default-ports)))
2455 (setq url-default-ports (cons (cons protocol defport)
2456 url-default-ports)))
2457
2458 ;; Store the appropriate information for later
2459 (if cur-protocol
2460 (setcdr cur-protocol (cons retrieve expander))
2461 (setq url-registered-protocols (cons (cons protocol
2462 (cons retrieve expander))
2463 url-registered-protocols)))
2464
2465 ;; Store any proxying information - this will not overwrite an old
2466 ;; entry, so that people can still set this information in their
2467 ;; .emacs file
2468 (cond
2469 (cur-proxy nil) ; Keep their old settings
2470 ((null env-proxy) nil) ; No proxy setup
2471 ;; First check if its something like hostname:port
2472 ((string-match "^\\([^:]+\\):\\([0-9]+\\)$" env-proxy)
2473 (setq urlobj (url-generic-parse-url nil)) ; Get a blank object
2474 (url-set-type urlobj "http")
2475 (url-set-host urlobj (url-match env-proxy 1))
2476 (url-set-port urlobj (url-match env-proxy 2)))
2477 ;; Then check if its a fully specified URL
2478 ((string-match url-nonrelative-link env-proxy)
2479 (setq urlobj (url-generic-parse-url env-proxy))
2480 (url-set-type urlobj "http")
2481 (url-set-target urlobj nil))
2482 ;; Finally, fall back on the assumption that its just a hostname
2483 (t
2484 (setq urlobj (url-generic-parse-url nil)) ; Get a blank object
2485 (url-set-type urlobj "http")
2486 (url-set-host urlobj env-proxy)))
2487
2488 (if (and (not cur-proxy) urlobj)
2489 (progn
2490 (setq url-proxy-services
2491 (cons (cons protocol (concat (url-host urlobj) ":"
2492 (url-port urlobj)))
2493 url-proxy-services))
2494 (message "Using a proxy for %s..." protocol)))))
2495
2496 (provide 'url)