comparison lisp/url/url.el @ 0:376386a54a3c r19-14

Import from CVS: tag r19-14
author cvs
date Mon, 13 Aug 2007 08:45:50 +0200
parents
children ac2d302a0011
comparison
equal deleted inserted replaced
-1:000000000000 0:376386a54a3c
1 ;;; url.el,v --- Uniform Resource Locator retrieval tool
2 ;; Author: wmperry
3 ;; Created: 1996/05/30 13:25:47
4 ;; Version: 1.52
5 ;; Keywords: comm, data, processes, hypermedia
6
7 ;;; LCD Archive Entry:
8 ;;; url|William M. Perry|wmperry@spry.com|
9 ;;; Major mode for manipulating URLs|
10 ;;; 1996/05/30 13:25:47|1.52|Location Undetermined
11 ;;;
12
13 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
14 ;;; Copyright (c) 1993, 1994, 1995 by William M. Perry (wmperry@spry.com)
15 ;;;
16 ;;; This file is not part of GNU Emacs, but the same permissions apply.
17 ;;;
18 ;;; GNU Emacs is free software; you can redistribute it and/or modify
19 ;;; it under the terms of the GNU General Public License as published by
20 ;;; the Free Software Foundation; either version 2, or (at your option)
21 ;;; any later version.
22 ;;;
23 ;;; GNU Emacs is distributed in the hope that it will be useful,
24 ;;; but WITHOUT ANY WARRANTY; without even the implied warranty of
25 ;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
26 ;;; GNU General Public License for more details.
27 ;;;
28 ;;; You should have received a copy of the GNU General Public License
29 ;;; along with GNU Emacs; see the file COPYING. If not, write to
30 ;;; the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA.
31 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
32
33 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
34 ;;; Copyright (c) 1993, 1994, 1995 by William M. Perry (wmperry@spry.com) ;;;
35 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
36
37
38 (require 'url-vars)
39 (require 'url-parse)
40 (require 'urlauth)
41 (require 'url-cookie)
42 (require 'mm)
43 (require 'md5)
44 (require 'base64)
45 (require 'url-hash)
46 (or (featurep 'efs)
47 (featurep 'efs-auto)
48 (condition-case ()
49 (require 'ange-ftp)
50 (error nil)))
51
52 (load-library "url-sysdp")
53
54 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
55 ;;; Functions that might not exist in old versions of emacs
56 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
57 (defun url-save-error (errobj)
58 (save-excursion
59 (set-buffer (get-buffer-create " *url-error*"))
60 (erase-buffer))
61 (display-error errobj (get-buffer-create " *url-error*")))
62
63 (cond
64 ((fboundp 'display-warning)
65 (fset 'url-warn 'display-warning))
66 ((fboundp 'w3-warn)
67 (fset 'url-warn 'w3-warn))
68 ((fboundp 'warn)
69 (defun url-warn (class message &optional level)
70 (warn "(%s/%s) %s" class (or level 'warning) message)))
71 (t
72 (defun url-warn (class message &optional level)
73 (save-excursion
74 (set-buffer (get-buffer-create "*W3-WARNINGS*"))
75 (goto-char (point-max))
76 (save-excursion
77 (insert (format "(%s/%s) %s\n" class (or level 'warning) message)))
78 (display-buffer (current-buffer))))))
79
80
81 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
82 ;;; Autoload all the URL loaders
83 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
84 (autoload 'url-file "url-file")
85 (autoload 'url-ftp "url-file")
86 (autoload 'url-gopher "url-gopher")
87 (autoload 'url-irc "url-irc")
88 (autoload 'url-http "url-http")
89 (autoload 'url-mailserver "url-mail")
90 (autoload 'url-mailto "url-mail")
91 (autoload 'url-info "url-misc")
92 (autoload 'url-shttp "url-http")
93 (autoload 'url-https "url-http")
94 (autoload 'url-finger "url-misc")
95 (autoload 'url-rlogin "url-misc")
96 (autoload 'url-telnet "url-misc")
97 (autoload 'url-tn3270 "url-misc")
98 (autoload 'url-proxy "url-misc")
99 (autoload 'url-x-exec "url-misc")
100 (autoload 'url-news "url-news")
101 (autoload 'url-nntp "url-news")
102 (autoload 'url-decode-pgp/pem "url-pgp")
103 (autoload 'url-wais "url-wais")
104
105 (autoload 'url-save-newsrc "url-news")
106 (autoload 'url-news-generate-reply-form "url-news")
107 (autoload 'url-parse-newsrc "url-news")
108 (autoload 'url-mime-response-p "url-http")
109 (autoload 'url-parse-mime-headers "url-http")
110 (autoload 'url-handle-refresh-header "url-http")
111 (autoload 'url-create-mime-request "url-http")
112 (autoload 'url-create-message-id "url-http")
113 (autoload 'url-create-multipart-request "url-http")
114 (autoload 'url-parse-viewer-types "url-http")
115
116 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
117 ;;; File-name-handler-alist functions
118 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
119 (defun url-setup-file-name-handlers ()
120 ;; Setup file-name handlers.
121 '(cond
122 ((not (boundp 'file-name-handler-alist))
123 nil) ; Don't load if no alist
124 ((rassq 'url-file-handler file-name-handler-alist)
125 nil) ; Don't load twice
126 ((and (string-match "XEmacs\\|Lucid" emacs-version)
127 (< url-emacs-minor-version 11)) ; Don't load in lemacs 19.10
128 nil)
129 (t
130 (setq file-name-handler-alist
131 (let ((new-handler (cons
132 (concat "^/*"
133 (substring url-nonrelative-link1 nil))
134 'url-file-handler)))
135 (if file-name-handler-alist
136 (append (list new-handler) file-name-handler-alist)
137 (list new-handler)))))))
138
139 (defun url-file-handler (operation &rest args)
140 ;; Function called from the file-name-handler-alist routines. OPERATION
141 ;; is what needs to be done ('file-exists-p, etc). args are the arguments
142 ;; that would have been passed to OPERATION."
143 (let ((fn (get operation 'url-file-handlers))
144 (url (car args))
145 (myargs (cdr args)))
146 (if (= (string-to-char url) ?/)
147 (setq url (substring url 1 nil)))
148 (if fn (apply fn url myargs)
149 (let (file-name-handler-alist)
150 (apply operation url myargs)))))
151
152 (defun url-file-handler-identity (&rest args)
153 (car args))
154
155 (defun url-file-handler-null (&rest args)
156 nil)
157
158 (put 'file-directory-p 'url-file-handlers 'url-file-handler-null)
159 (put 'substitute-in-file-name 'url-file-handlers 'url-file-handler-identity)
160 (put 'file-writable-p 'url-file-handlers 'url-file-handler-null)
161 (put 'file-truename 'url-file-handlers 'url-file-handler-identity)
162 (put 'insert-file-contents 'url-file-handlers 'url-insert-file-contents)
163 (put 'expand-file-name 'url-file-handlers 'url-expand-file-name)
164 (put 'directory-files 'url-file-handlers 'url-directory-files)
165 (put 'file-directory-p 'url-file-handlers 'url-file-directory-p)
166 (put 'file-writable-p 'url-file-handlers 'url-file-writable-p)
167 (put 'file-readable-p 'url-file-handlers 'url-file-exists)
168 (put 'file-executable-p 'url-file-handlers 'null)
169 (put 'file-symlink-p 'url-file-handlers 'null)
170 (put 'file-exists-p 'url-file-handlers 'url-file-exists)
171 (put 'copy-file 'url-file-handlers 'url-copy-file)
172 (put 'file-attributes 'url-file-handlers 'url-file-attributes)
173 (put 'file-name-all-completions 'url-file-handlers
174 'url-file-name-all-completions)
175 (put 'file-name-completion 'url-file-handlers 'url-file-name-completion)
176 (put 'file-local-copy 'url-file-handlers 'url-file-local-copy)
177
178
179 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
180 ;;; Utility functions
181 ;;; -----------------
182 ;;; Various functions used around the url code.
183 ;;; Some of these qualify as hacks, but hey, this is elisp.
184 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
185
186 (if (fboundp 'mm-string-to-tokens)
187 (fset 'url-string-to-tokens 'mm-string-to-tokens)
188 (defun url-string-to-tokens (str &optional delim)
189 "Return a list of words from the string STR"
190 (setq delim (or delim ? ))
191 (let (results y)
192 (mapcar
193 (function
194 (lambda (x)
195 (cond
196 ((and (= x delim) y) (setq results (cons y results) y nil))
197 ((/= x delim) (setq y (concat y (char-to-string x))))
198 (t nil)))) str)
199 (nreverse (cons y results)))))
200
201 (defun url-days-between (date1 date2)
202 ;; Return the number of days between date1 and date2.
203 (- (url-day-number date1) (url-day-number date2)))
204
205 (defun url-day-number (date)
206 (let ((dat (mapcar (function (lambda (s) (and s (string-to-int s)) ))
207 (timezone-parse-date date))))
208 (timezone-absolute-from-gregorian
209 (nth 1 dat) (nth 2 dat) (car dat))))
210
211 (defun url-seconds-since-epoch (date)
212 ;; Returns a number that says how many seconds have
213 ;; lapsed between Jan 1 12:00:00 1970 and DATE."
214 (let* ((tdate (mapcar (function (lambda (ti) (and ti (string-to-int ti))))
215 (timezone-parse-date date)))
216 (ttime (mapcar (function (lambda (ti) (and ti (string-to-int ti))))
217 (timezone-parse-time
218 (aref (timezone-parse-date date) 3))))
219 (edate (mapcar (function (lambda (ti) (and ti (string-to-int ti))))
220 (timezone-parse-date "Jan 1 12:00:00 1970")))
221 (tday (- (timezone-absolute-from-gregorian
222 (nth 1 tdate) (nth 2 tdate) (nth 0 tdate))
223 (timezone-absolute-from-gregorian
224 (nth 1 edate) (nth 2 edate) (nth 0 edate)))))
225 (+ (nth 2 ttime)
226 (* (nth 1 ttime) 60)
227 (* (nth 0 ttime) 60 60)
228 (* tday 60 60 24))))
229
230 (defun url-match (s x)
231 ;; Return regexp match x in s.
232 (substring s (match-beginning x) (match-end x)))
233
234 (defun url-split (str del)
235 ;; Split the string STR, with DEL (a regular expression) as the delimiter.
236 ;; Returns an assoc list that you can use with completing-read."
237 (let (x y)
238 (while (string-match del str)
239 (setq y (substring str 0 (match-beginning 0))
240 str (substring str (match-end 0) nil))
241 (if (not (string-match "^[ \t]+$" y))
242 (setq x (cons (list y y) x))))
243 (if (not (equal str ""))
244 (setq x (cons (list str str) x)))
245 x))
246
247 (defun url-replace-regexp (regexp to-string)
248 (goto-char (point-min))
249 (while (re-search-forward regexp nil t)
250 (replace-match to-string t nil)))
251
252 (defun url-clear-tmp-buffer ()
253 (set-buffer (get-buffer-create url-working-buffer))
254 (if buffer-read-only (toggle-read-only))
255 (erase-buffer))
256
257 (defun url-maybe-relative (url)
258 (url-retrieve (url-expand-file-name url)))
259
260 (defun url-buffer-is-hypertext (&optional buff)
261 "Return t if a buffer contains HTML, as near as we can guess."
262 (setq buff (or buff (current-buffer)))
263 (save-excursion
264 (set-buffer buff)
265 (let ((case-fold-search t))
266 (goto-char (point-min))
267 (re-search-forward
268 "<\\(TITLE\\|HEAD\\|BASE\\|H[0-9]\\|ISINDEX\\|P\\)>" nil t))))
269
270 (defun nntp-after-change-function (&rest args)
271 (save-excursion
272 (set-buffer nntp-server-buffer)
273 (message "Read %d bytes" (point-max))))
274
275 (defun url-percentage (x y)
276 (if (fboundp 'float)
277 (round (* 100 (/ x (float y))))
278 (/ (* x 100) y)))
279
280 (defun url-after-change-function (&rest args)
281 ;; The nitty gritty details of messaging the HTTP/1.0 status messages
282 ;; in the minibuffer."
283 (save-excursion
284 (set-buffer url-working-buffer)
285 (let (status-message)
286 (if url-current-content-length
287 nil
288 (goto-char (point-min))
289 (skip-chars-forward " \t\n")
290 (if (not (looking-at "HTTP/[0-9]\.[0-9]"))
291 (setq url-current-content-length 0)
292 (setq url-current-isindex
293 (and (re-search-forward "$\r*$" nil t) (point)))
294 (if (re-search-forward
295 "^content-type:[ \t]*\\([^\r\n]+\\)\r*$"
296 url-current-isindex t)
297 (setq url-current-mime-type (downcase
298 (url-eat-trailing-space
299 (buffer-substring
300 (match-beginning 1)
301 (match-end 1))))))
302 (if (re-search-forward "^content-length:\\([^\r\n]+\\)\r*$"
303 url-current-isindex t)
304 (setq url-current-content-length
305 (string-to-int (buffer-substring (match-beginning 1)
306 (match-end 1))))
307 (setq url-current-content-length nil))))
308 (goto-char (point-min))
309 (if (re-search-forward "^status:\\([^\r]*\\)" url-current-isindex t)
310 (progn
311 (setq status-message (buffer-substring (match-beginning 1)
312 (match-end 1)))
313 (replace-match (concat "btatus:" status-message))))
314 (goto-char (point-max))
315 (cond
316 (status-message (url-lazy-message "%s" status-message))
317 ((and url-current-content-length (> url-current-content-length 1)
318 url-current-mime-type)
319 (url-lazy-message "Read %d of %d bytes (%d%%) [%s]"
320 (point-max) url-current-content-length
321 (url-percentage (point-max) url-current-content-length)
322 url-current-mime-type))
323 ((and url-current-content-length (> url-current-content-length 1))
324 (url-lazy-message "Read %d of %d bytes (%d%%)"
325 (point-max) url-current-content-length
326 (url-percentage (point-max)
327 url-current-content-length)))
328 ((and (/= 1 (point-max)) url-current-mime-type)
329 (url-lazy-message "Read %d bytes. [%s]" (point-max)
330 url-current-mime-type))
331 ((/= 1 (point-max))
332 (url-lazy-message "Read %d bytes." (point-max)))
333 (t (url-lazy-message "Waiting for response."))))))
334
335
336 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
337 ;;; Information information
338 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
339 (defvar url-process-lookup-table nil)
340
341 (defun url-setup-process-get ()
342 (let ((x nil)
343 (nativep t))
344 (condition-case ()
345 (progn
346 (setq x (start-process "Test" nil "/bin/sh"))
347 (get x 'command))
348 (error (setq nativep nil)))
349 (cond
350 ((fboundp 'process-get) ; Emacs 19.31 w/my hacks
351 (defun url-process-get (proc prop &optional default)
352 (or (process-get proc prop) default)))
353 (nativep ; XEmacs 19.14 w/my hacks
354 (fset 'url-process-get 'get))
355 (t
356 (defun url-process-get (proc prop &optional default)
357 (or (plist-get (cdr-safe (assq proc url-process-lookup-table)) prop)
358 default))))
359 (cond
360 ((fboundp 'process-put) ; Emacs 19.31 w/my hacks
361 (fset 'url-process-put 'process-put))
362 (nativep
363 (fset 'url-process-put 'put))
364 (t
365 (defun url-process-put (proc prop val)
366 (let ((node (assq proc url-process-lookup-table)))
367 (if (not node)
368 (setq url-process-lookup-table (cons (cons proc (list prop val))
369 url-process-lookup-table))
370 (setcdr node (plist-put (cdr node) prop val)))))))
371 (and (processp x) (delete-process x))))
372
373 (defun url-gc-process-lookup-table ()
374 (let (new)
375 (while url-process-lookup-table
376 (if (not (memq (process-status (caar url-process-lookup-table))
377 '(stop closed nil)))
378 (setq new (cons (car url-process-lookup-table) new)))
379 (setq url-process-lookup-table (cdr url-process-lookup-table)))
380 (setq url-process-lookup-table new)))
381
382 (defun url-list-processes ()
383 (interactive)
384 (url-gc-process-lookup-table)
385 (let ((processes (process-list))
386 proc len type)
387 (set-buffer (get-buffer-create "URL Status Display"))
388 (display-buffer (current-buffer))
389 (erase-buffer)
390 (insert
391 (eval-when-compile (format "%-40s%-10s%-25s" "URL" "Size" "Type")) "\n"
392 (eval-when-compile (make-string 75 ?-)) "\n")
393 (while processes
394 (setq proc (car processes)
395 processes (cdr processes))
396 (if (url-process-get proc 'url)
397 (progn
398 (save-excursion
399 (set-buffer (process-buffer proc))
400 (setq len url-current-content-length
401 type url-current-mime-type))
402 (insert
403 (format "%-40s%-10d%-25s" (url-process-get proc 'url)
404 len type)))))))
405
406
407 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
408 ;;; file-name-handler stuff calls this
409 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
410
411 (defun url-have-visited-url (url &rest args)
412 "Return non-nil iff the user has visited URL before.
413 The return value is a cons of the url and the date last accessed as a string"
414 (url-gethash url url-global-history-hash-table))
415
416 (defun url-directory-files (url &rest args)
417 "Return a list of files on a server."
418 nil)
419
420 (defun url-file-writable-p (url &rest args)
421 "Return t iff a url is writable by this user"
422 nil)
423
424 (defun url-copy-file (url &rest args)
425 "Copy a url to the specified filename."
426 nil)
427
428 (defun url-file-directly-accessible-p (url)
429 "Returns t iff the specified URL is directly accessible
430 on your filesystem. (nfs, local file, etc)."
431 (let* ((urlobj (if (vectorp url) url (url-generic-parse-url url)))
432 (type (url-type urlobj)))
433 (and (member type '("file" "ftp"))
434 (not (url-host urlobj)))))
435
436 ;;;###autoload
437 (defun url-file-attributes (url &rest args)
438 "Return a list of attributes of URL.
439 Value is nil if specified file cannot be opened.
440 Otherwise, list elements are:
441 0. t for directory, string (name linked to) for symbolic link, or nil.
442 1. Number of links to file.
443 2. File uid.
444 3. File gid.
445 4. Last access time, as a list of two integers.
446 First integer has high-order 16 bits of time, second has low 16 bits.
447 5. Last modification time, likewise.
448 6. Last status change time, likewise.
449 7. Size in bytes. (-1, if number is out of range).
450 8. File modes, as a string of ten letters or dashes as in ls -l.
451 If URL is on an http server, this will return the content-type if possible.
452 9. t iff file's gid would change if file were deleted and recreated.
453 10. inode number.
454 11. Device number.
455
456 If file does not exist, returns nil."
457 (and url
458 (let* ((urlobj (url-generic-parse-url url))
459 (type (url-type urlobj))
460 (url-automatic-caching nil)
461 (data nil)
462 (exists nil))
463 (cond
464 ((equal type "http")
465 (cond
466 ((not url-be-anal-about-file-attributes)
467 (setq data (list
468 (url-file-directory-p url) ; Directory
469 1 ; number of links to it
470 0 ; UID
471 0 ; GID
472 (cons 0 0) ; Last access time
473 (cons 0 0) ; Last mod. time
474 (cons 0 0) ; Last status time
475 -1 ; file size
476 (mm-extension-to-mime
477 (url-file-extension (url-filename urlobj)))
478 nil ; gid would change
479 0 ; inode number
480 0 ; device number
481 )))
482 (t ; HTTP/1.0, use HEAD
483 (let ((url-request-method "HEAD")
484 (url-request-data nil)
485 (url-working-buffer " *url-temp*"))
486 (save-excursion
487 (condition-case ()
488 (progn
489 (url-retrieve url)
490 (setq data (and
491 (setq exists
492 (cdr
493 (assoc "status"
494 url-current-mime-headers)))
495 (>= exists 200)
496 (< exists 300)
497 (list
498 (url-file-directory-p url) ; Directory
499 1 ; links to
500 0 ; UID
501 0 ; GID
502 (cons 0 0) ; Last access time
503 (cons 0 0) ; Last mod. time
504 (cons 0 0) ; Last status time
505 (or ; Size in bytes
506 (cdr (assoc "content-length"
507 url-current-mime-headers))
508 -1)
509 (or
510 (cdr (assoc "content-type"
511 url-current-mime-headers))
512 (mm-extension-to-mime
513 (url-file-extension
514 (url-filename urlobj)))) ; content-type
515 nil ; gid would change
516 0 ; inode number
517 0 ; device number
518 ))))
519 (error nil))
520 (and (not data)
521 (setq data (list (url-file-directory-p url)
522 1 0 0 (cons 0 0) (cons 0 0) (cons 0 0)
523 -1 (mm-extension-to-mime
524 (url-file-extension
525 url-current-file))
526 nil 0 0)))
527 (kill-buffer " *url-temp*"))))))
528 ((member type '("ftp" "file"))
529 (let ((fname (if (url-host urlobj)
530 (concat "/"
531 (if (url-user urlobj)
532 (concat (url-user urlobj) "@")
533 "")
534 (url-host urlobj) ":"
535 (url-filename urlobj))
536 (url-filename urlobj))))
537 (setq data (or (file-attributes fname) (make-list 12 nil)))
538 (setcar (cdr (cdr (cdr (cdr (cdr (cdr (cdr (cdr data))))))))
539 (mm-extension-to-mime (url-file-extension fname)))))
540 (t nil))
541 data)))
542
543 (defun url-file-name-all-completions (file dirname &rest args)
544 "Return a list of all completions of file name FILE in directory DIR.
545 These are all file names in directory DIR which begin with FILE."
546 ;; need to rewrite
547 )
548
549 (defun url-file-name-completion (file dirname &rest args)
550 "Complete file name FILE in directory DIR.
551 Returns the longest string
552 common to all filenames in DIR that start with FILE.
553 If there is only one and FILE matches it exactly, returns t.
554 Returns nil if DIR contains no name starting with FILE."
555 (apply 'url-file-name-all-completions file dirname args))
556
557 (defun url-file-local-copy (file &rest args)
558 "Copy the file FILE into a temporary file on this machine.
559 Returns the name of the local copy, or nil, if FILE is directly
560 accessible."
561 nil)
562
563 (defun url-insert-file-contents (url &rest args)
564 "Insert the contents of the URL in this buffer."
565 (save-excursion
566 (let ((old-asynch url-be-asynchronous))
567 (setq-default url-be-asynchronous nil)
568 (url-retrieve url)
569 (setq-default url-be-asynchronous old-asynch)))
570 (insert-buffer url-working-buffer)
571 (setq buffer-file-name url)
572 (kill-buffer url-working-buffer))
573
574 (defun url-file-directory-p (url &rest args)
575 "Return t iff a url points to a directory"
576 (equal (substring url -1 nil) "/"))
577
578 (defun url-file-exists (url &rest args)
579 "Return t iff a file exists."
580 (let* ((urlobj (url-generic-parse-url url))
581 (type (url-type urlobj))
582 (exists nil))
583 (cond
584 ((equal type "http") ; use head
585 (let ((url-request-method "HEAD")
586 (url-request-data nil)
587 (url-working-buffer " *url-temp*"))
588 (save-excursion
589 (url-retrieve url)
590 (setq exists (or (cdr
591 (assoc "status" url-current-mime-headers)) 500))
592 (kill-buffer " *url-temp*")
593 (setq exists (and (>= exists 200) (< exists 300))))))
594 ((member type '("ftp" "file")) ; file-attributes
595 (let ((fname (if (url-host urlobj)
596 (concat "/"
597 (if (url-user urlobj)
598 (concat (url-user urlobj) "@")
599 "")
600 (url-host urlobj) ":"
601 (url-filename urlobj))
602 (url-filename urlobj))))
603 (setq exists (file-exists-p fname))))
604 (t nil))
605 exists))
606
607 ;;;###autoload
608 (defun url-normalize-url (url)
609 "Return a 'normalized' version of URL. This strips out default port
610 numbers, etc."
611 (let (type data grok retval)
612 (setq data (url-generic-parse-url url)
613 type (url-type data))
614 (if (member type '("www" "about" "mailto" "mailserver" "info"))
615 (setq retval url)
616 (setq retval (url-recreate-url data)))
617 retval))
618
619 ;;;###autoload
620 (defun url-buffer-visiting (url)
621 "Return the name of a buffer (if any) that is visiting URL."
622 (setq url (url-normalize-url url))
623 (let ((bufs (buffer-list))
624 (found nil))
625 (if (condition-case ()
626 (string-match "\\(.*\\)#" url)
627 (error nil))
628 (setq url (url-match url 1)))
629 (while (and bufs (not found))
630 (save-excursion
631 (set-buffer (car bufs))
632 (setq found (if (and
633 (not (equal (buffer-name (car bufs))
634 url-working-buffer))
635 (memq major-mode '(url-mode w3-mode))
636 (equal (url-view-url t) url)) (car bufs) nil)
637 bufs (cdr bufs))))
638 found))
639
640 (defun url-file-size (url &rest args)
641 "Return the size of a file in bytes, or -1 if can't be determined."
642 (let* ((urlobj (url-generic-parse-url url))
643 (type (url-type urlobj))
644 (size -1)
645 (data nil))
646 (cond
647 ((equal type "http") ; use head
648 (let ((url-request-method "HEAD")
649 (url-request-data nil)
650 (url-working-buffer " *url-temp*"))
651 (save-excursion
652 (url-retrieve url)
653 (setq size (or (cdr
654 (assoc "content-length" url-current-mime-headers))
655 -1))
656 (kill-buffer " *url-temp*"))))
657 ((member type '("ftp" "file")) ; file-attributes
658 (let ((fname (if (url-host urlobj)
659 (concat "/"
660 (if (url-user urlobj)
661 (concat (url-user urlobj) "@")
662 "")
663 (url-host urlobj) ":"
664 (url-filename urlobj))
665 (url-filename urlobj))))
666 (setq data (file-attributes fname)
667 size (nth 7 data))))
668 (t nil))
669 (cond
670 ((stringp size) (string-to-int size))
671 ((integerp size) size)
672 ((null size) -1)
673 (t -1))))
674
675 (defun url-generate-new-buffer-name (start)
676 "Create a new buffer name based on START."
677 (let ((x 1)
678 name)
679 (if (not (get-buffer start))
680 start
681 (progn
682 (setq name (format "%s<%d>" start x))
683 (while (get-buffer name)
684 (setq x (1+ x)
685 name (format "%s<%d>" start x)))
686 name))))
687
688 (defun url-generate-unique-filename (&optional fmt)
689 "Generate a unique filename in url-temporary-directory"
690 (if (not fmt)
691 (let ((base (format "url-tmp.%d" (user-real-uid)))
692 (fname "")
693 (x 0))
694 (setq fname (format "%s%d" base x))
695 (while (file-exists-p (expand-file-name fname url-temporary-directory))
696 (setq x (1+ x)
697 fname (concat base (int-to-string x))))
698 (expand-file-name fname url-temporary-directory))
699 (let ((base (concat "url" (int-to-string (user-real-uid))))
700 (fname "")
701 (x 0))
702 (setq fname (format fmt (concat base (int-to-string x))))
703 (while (file-exists-p (expand-file-name fname url-temporary-directory))
704 (setq x (1+ x)
705 fname (format fmt (concat base (int-to-string x)))))
706 (expand-file-name fname url-temporary-directory))))
707
708 (defun url-lazy-message (&rest args)
709 "Just like `message', but is a no-op if called more than once a second.
710 Will not do anything if url-show-status is nil."
711 (if (or (null url-show-status)
712 (= url-lazy-message-time
713 (setq url-lazy-message-time (nth 1 (current-time)))))
714 nil
715 (apply 'message args)))
716
717
718 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
719 ;;; Gateway Support
720 ;;; ---------------
721 ;;; Fairly good/complete gateway support
722 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
723 (defun url-kill-process (proc)
724 "Kill the process PROC - knows about all the various gateway types,
725 and acts accordingly."
726 (cond
727 ((eq url-gateway-method 'native) (delete-process proc))
728 ((eq url-gateway-method 'program) (kill-process proc))
729 (t (error "Unknown url-gateway-method %s" url-gateway-method))))
730
731 (defun url-accept-process-output (proc)
732 "Allow any pending output from subprocesses to be read by Emacs.
733 It is read into the process' buffers or given to their filter functions.
734 Where possible, this will not exit until some output is received from PROC,
735 or 1 second has elapsed."
736 (accept-process-output proc 1))
737
738 (defun url-process-status (proc)
739 "Return the process status of a url buffer"
740 (cond
741 ((memq url-gateway-method '(native ssl program)) (process-status proc))
742 (t (error "Unkown url-gateway-method %s" url-gateway-method))))
743
744 (defun url-open-stream (name buffer host service)
745 "Open a stream to a host"
746 (let ((tmp-gateway-method (if (and url-gateway-local-host-regexp
747 (not (eq 'ssl url-gateway-method))
748 (string-match
749 url-gateway-local-host-regexp
750 host))
751 'native
752 url-gateway-method))
753 (tcp-binary-process-output-services (if (stringp service)
754 (list service)
755 (list service
756 (int-to-string service)))))
757 (and (eq url-gateway-method 'tcp)
758 (require 'tcp)
759 (setq url-gateway-method 'native
760 tmp-gateway-method 'native))
761 (cond
762 ((eq tmp-gateway-method 'ssl)
763 (open-ssl-stream name buffer host service))
764 ((eq tmp-gateway-method 'native)
765 (if url-broken-resolution
766 (setq host
767 (cond
768 ((featurep 'ange-ftp) (ange-ftp-nslookup-host host))
769 ((featurep 'efs) (efs-nslookup-host host))
770 ((featurep 'efs-auto) (efs-nslookup-host host))
771 (t host))))
772 (let ((max-retries url-connection-retries)
773 (cur-retries 0)
774 (retry t)
775 (errobj nil)
776 (conn nil))
777 (while (and (not conn) retry)
778 (condition-case errobj
779 (setq conn (open-network-stream name buffer host service))
780 (error
781 (url-save-error errobj)
782 (save-window-excursion
783 (save-excursion
784 (switch-to-buffer-other-window " *url-error*")
785 (shrink-window-if-larger-than-buffer)
786 (goto-char (point-min))
787 (if (and (re-search-forward "in use" nil t)
788 (< cur-retries max-retries))
789 (progn
790 (setq retry t
791 cur-retries (1+ cur-retries))
792 (sleep-for 0.5))
793 (setq cur-retries 0
794 retry (funcall url-confirmation-func
795 (concat "Connection to " host
796 " failed, retry? "))))
797 (kill-buffer (current-buffer)))))))
798 (if conn
799 (progn
800 (if (featurep 'mule)
801 (save-excursion
802 (set-buffer (get-buffer-create buffer))
803 (setq mc-flag nil)
804 (set-process-coding-system conn *noconv* *noconv*)))
805 conn)
806 (error "Unable to connect to %s:%s" host service))))
807 ((eq tmp-gateway-method 'program)
808 (let ((proc (start-process name buffer url-gateway-telnet-program host
809 (int-to-string service)))
810 (tmp nil))
811 (save-excursion
812 (set-buffer buffer)
813 (setq tmp (point))
814 (while (not (progn
815 (goto-char (point-min))
816 (re-search-forward
817 url-gateway-telnet-ready-regexp nil t)))
818 (url-accept-process-output proc))
819 (delete-region tmp (point))
820 (goto-char (point-min))
821 (if (re-search-forward "connect:" nil t)
822 (progn
823 (condition-case ()
824 (delete-process proc)
825 (error nil))
826 (url-replace-regexp ".*connect:.*" "")
827 nil)
828 proc))))
829 (t (error "Unknown url-gateway-method %s" url-gateway-method)))))
830
831
832 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
833 ;;; Miscellaneous functions
834 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
835 (defun url-setup-privacy-info ()
836 (interactive)
837 (setq url-system-type
838 (cond
839 ((or (eq url-privacy-level 'paranoid)
840 (and (listp url-privacy-level)
841 (memq 'os url-privacy-level)))
842 nil)
843 ((eq system-type 'Apple-Macintosh) "Macintosh")
844 ((eq system-type 'next-mach) "NeXT")
845 ((eq system-type 'windows-nt) "Windows-NT; 32bit")
846 ((eq system-type 'ms-windows) "Windows; 16bit")
847 ((eq system-type 'ms-dos) "MS-DOS; 32bit")
848 ((and (eq system-type 'vax-vms) (device-type))
849 "VMS; X11")
850 ((eq system-type 'vax-vms) "VMS; TTY")
851 ((eq (device-type) 'x) "X11")
852 ((eq (device-type) 'ns) "NeXTStep")
853 ((eq (device-type) 'pm) "OS/2")
854 ((eq (device-type) 'win32) "Windows; 32bit")
855 ((eq (device-type) 'tty) "(Unix?); TTY")
856 (t "UnkownPlatform")))
857
858 ;; Set up the entity definition for PGP and PEM authentication
859 (setq url-pgp/pem-entity (or url-pgp/pem-entity
860 user-mail-address
861 (format "%s@%s" (user-real-login-name)
862 (system-name))))
863
864 (setq url-personal-mail-address (or url-personal-mail-address
865 url-pgp/pem-entity
866 user-mail-address))
867
868 (if (or (memq url-privacy-level '(paranoid high))
869 (and (listp url-privacy-level)
870 (memq 'email url-privacy-level)))
871 (setq url-personal-mail-address nil))
872
873 (if (or (eq url-privacy-level 'paranoid)
874 (and (listp url-privacy-level)
875 (memq 'os url-privacy-level)))
876 (setq url-os-type nil)
877 (let ((vers (emacs-version)))
878 (if (string-match "(\\([^, )]+\\))$" vers)
879 (setq url-os-type (url-match vers 1))
880 (setq url-os-type (symbol-name system-type))))))
881
882 (defun url-handle-no-scheme (url)
883 (let ((temp url-registered-protocols)
884 (found nil))
885 (while (and temp (not found))
886 (if (and (not (member (car (car temp)) '("auto" "www")))
887 (string-match (concat "^" (car (car temp)) "\\.")
888 url))
889 (setq found t)
890 (setq temp (cdr temp))))
891 (cond
892 (found ; Found something like ftp.spry.com
893 (url-retrieve (concat (car (car temp)) "://" url)))
894 ((string-match "^www\\." url)
895 (url-retrieve (concat "http://" url)))
896 ((string-match "\\(\\.[^\\.]+\\)\\(\\.[^\\.]+\\)" url)
897 ;; Ok, we have at least two dots in the filename, just stick http on it
898 (url-retrieve (concat "http://" url)))
899 (t
900 (url-retrieve (concat "http://www." url ".com"))))))
901
902 (defun url-setup-save-timer ()
903 "Reset the history list timer."
904 (interactive)
905 (cond
906 ((featurep 'itimer)
907 (if (get-itimer "url-history-saver")
908 (delete-itimer (get-itimer "url-history-saver")))
909 (start-itimer "url-history-saver" 'url-write-global-history
910 url-global-history-save-interval
911 url-global-history-save-interval))
912 ((fboundp 'run-at-time)
913 (run-at-time url-global-history-save-interval
914 url-global-history-save-interval
915 'url-write-global-history))
916 (t nil)))
917
918 (defvar url-download-minor-mode nil)
919
920 (defun url-download-minor-mode (on)
921 (setq url-download-minor-mode (if on
922 (1+ (or url-download-minor-mode 0))
923 (1- (or url-download-minor-mode 1))))
924 (if (<= url-download-minor-mode 0)
925 (setq url-download-minor-mode nil)))
926
927 (defun url-do-setup ()
928 "Do setup - this is to avoid conflict with user settings when URL is
929 dumped with emacs."
930 (if url-setup-done
931 nil
932
933 (add-minor-mode 'url-download-minor-mode " Webbing" nil)
934 ;; Decide what type of process-get to use
935 ;(url-setup-process-get)
936
937 ;; Make OS/2 happy
938 (setq tcp-binary-process-input-services
939 (append '("http" "80")
940 tcp-binary-process-input-services))
941
942 ;; Register all the protocols we can handle
943 (url-register-protocol 'file)
944 (url-register-protocol 'ftp nil nil "21")
945 (url-register-protocol 'gopher nil nil "70")
946 (url-register-protocol 'http nil nil "80")
947 (url-register-protocol 'https nil nil "443")
948 (url-register-protocol 'nfs nil nil "2049")
949 (url-register-protocol 'info nil 'url-identity-expander)
950 (url-register-protocol 'mailserver nil 'url-identity-expander)
951 (url-register-protocol 'finger nil 'url-identity-expander "79")
952 (url-register-protocol 'mailto nil 'url-identity-expander)
953 (url-register-protocol 'news nil 'url-identity-expander "119")
954 (url-register-protocol 'nntp nil 'url-identity-expander "119")
955 (url-register-protocol 'irc nil 'url-identity-expander "6667")
956 (url-register-protocol 'rlogin)
957 (url-register-protocol 'shttp nil nil "80")
958 (url-register-protocol 'telnet)
959 (url-register-protocol 'tn3270)
960 (url-register-protocol 'wais)
961 (url-register-protocol 'x-exec)
962 (url-register-protocol 'proxy)
963 (url-register-protocol 'auto 'url-handle-no-scheme)
964
965 ;; Register all the authentication schemes we can handle
966 (url-register-auth-scheme "basic" nil 4)
967 (url-register-auth-scheme "digest" nil 7)
968
969 ;; Filename handler stuff for emacsen that support it
970 (url-setup-file-name-handlers)
971
972 (setq url-cookie-file
973 (or url-cookie-file
974 (expand-file-name "~/.w3cookies")))
975
976 (setq url-global-history-file
977 (or url-global-history-file
978 (and (memq system-type '(ms-dos ms-windows))
979 (expand-file-name "~/mosaic.hst"))
980 (and (memq system-type '(axp-vms vax-vms))
981 (expand-file-name "~/mosaic.global-history"))
982 (condition-case ()
983 (expand-file-name "~/.mosaic-global-history")
984 (error nil))))
985
986 ;; Parse the global history file if it exists, so that it can be used
987 ;; for URL completion, etc.
988 (if (and url-global-history-file
989 (file-exists-p url-global-history-file))
990 (url-parse-global-history))
991
992 ;; Setup save timer
993 (and url-global-history-save-interval (url-setup-save-timer))
994
995 (if (and url-cookie-file
996 (file-exists-p url-cookie-file))
997 (url-cookie-parse-file url-cookie-file))
998
999 ;; Read in proxy gateways
1000 (let ((noproxy (and (not (assoc "no_proxy" url-proxy-services))
1001 (or (getenv "NO_PROXY")
1002 (getenv "no_PROXY")
1003 (getenv "no_proxy")))))
1004 (if noproxy
1005 (setq url-proxy-services
1006 (cons (cons "no_proxy"
1007 (concat "\\("
1008 (mapconcat
1009 (function
1010 (lambda (x)
1011 (cond
1012 ((= x ?,) "\\|")
1013 ((= x ? ) "")
1014 ((= x ?.) (regexp-quote "."))
1015 ((= x ?*) ".*")
1016 ((= x ??) ".")
1017 (t (char-to-string x)))))
1018 noproxy "") "\\)"))
1019 url-proxy-services))))
1020
1021 ;; Set the url-use-transparent with decent defaults
1022 (if (not (eq (device-type) 'tty))
1023 (setq url-use-transparent nil))
1024 (and url-use-transparent (require 'transparent))
1025
1026 ;; Set the password entry funtion based on user defaults or guess
1027 ;; based on which remote-file-access package they are using.
1028 (cond
1029 (url-passwd-entry-func nil) ; Already been set
1030 ((boundp 'read-passwd) ; Use secure password if available
1031 (setq url-passwd-entry-func 'read-passwd))
1032 ((or (featurep 'efs) ; Using EFS
1033 (featurep 'efs-auto)) ; or autoloading efs
1034 (if (not (fboundp 'read-passwd))
1035 (autoload 'read-passwd "passwd" "Read in a password" nil))
1036 (setq url-passwd-entry-func 'read-passwd))
1037 ((or (featurep 'ange-ftp) ; Using ange-ftp
1038 (and (boundp 'file-name-handler-alist)
1039 (not (string-match "Lucid" (emacs-version)))))
1040 (setq url-passwd-entry-func 'ange-ftp-read-passwd))
1041 (t
1042 (url-warn 'security
1043 "Can't determine how to read passwords, winging it.")))
1044
1045 ;; Set up the news service if they haven't done so
1046 (setq url-news-server
1047 (cond
1048 (url-news-server url-news-server)
1049 ((and (boundp 'gnus-default-nntp-server)
1050 (not (equal "" gnus-default-nntp-server)))
1051 gnus-default-nntp-server)
1052 ((and (boundp 'gnus-nntp-server)
1053 (not (null gnus-nntp-server))
1054 (not (equal "" gnus-nntp-server)))
1055 gnus-nntp-server)
1056 ((and (boundp 'nntp-server-name)
1057 (not (null nntp-server-name))
1058 (not (equal "" nntp-server-name)))
1059 nntp-server-name)
1060 ((getenv "NNTPSERVER") (getenv "NNTPSERVER"))
1061 (t "news")))
1062
1063 ;; Set up the MIME accept string if they haven't got it hardcoded yet
1064 (or url-mime-accept-string
1065 (setq url-mime-accept-string (url-parse-viewer-types)))
1066 (or url-mime-encoding-string
1067 (setq url-mime-encoding-string
1068 (mapconcat 'car
1069 mm-content-transfer-encodings
1070 ", ")))
1071
1072 (url-setup-privacy-info)
1073 (run-hooks 'url-load-hook)
1074 (setq url-setup-done t)))
1075
1076 (defun url-cache-file-writable-p (file)
1077 "Follows the documentation of file-writable-p, unlike file-writable-p."
1078 (and (file-writable-p file)
1079 (if (file-exists-p file)
1080 (not (file-directory-p file))
1081 (file-directory-p (file-name-directory file)))))
1082
1083 (defun url-prepare-cache-for-file (file)
1084 "Makes it possible to cache data in FILE.
1085 Creates any necessary parent directories, deleting any non-directory files
1086 that would stop this. Returns nil if parent directories can not be
1087 created. If FILE already exists as a non-directory, it changes
1088 permissions of FILE or deletes FILE to make it possible to write a new
1089 version of FILE. Returns nil if this can not be done. Returns nil if
1090 FILE already exists as a directory. Otherwise, returns t, indicating that
1091 FILE can be created or overwritten."
1092
1093 ;; COMMENT: We don't delete directories because that requires
1094 ;; recursively deleting the directories's contents, which might
1095 ;; eliminate a substantial portion of the cache.
1096
1097 (cond
1098 ((url-cache-file-writable-p file)
1099 t)
1100 ((file-directory-p file)
1101 nil)
1102 (t
1103 (catch 'upcff-tag
1104 (let ((dir (file-name-directory file))
1105 dir-parent dir-last-component)
1106 (if (string-equal dir file)
1107 ;; *** Should I have a warning here?
1108 ;; FILE must match a pattern like /foo/bar/, indicating it is a
1109 ;; name only suitable for a directory. So presume we won't be
1110 ;; able to overwrite FILE and return nil.
1111 (throw 'upcff-tag nil))
1112
1113 ;; Make sure the containing directory exists, or throw a failure
1114 ;; if we can't create it.
1115 (if (file-directory-p dir)
1116 nil
1117 (or (fboundp 'make-directory)
1118 (throw 'upcff-tag nil))
1119 (make-directory dir t)
1120 ;; make-directory silently fails if there is an obstacle, so
1121 ;; we must verify its results.
1122 (if (file-directory-p dir)
1123 nil
1124 ;; Look at prefixes of the path to find the obstacle that is
1125 ;; stopping us from making the directory. Unfortunately, there
1126 ;; is no portable function in Emacs to find the parent directory
1127 ;; of a *directory*. So this code may not work on VMS.
1128 (while (progn
1129 (if (eq ?/ (aref dir (1- (length dir))))
1130 (setq dir (substring dir 0 -1))
1131 ;; Maybe we're on VMS where the syntax is different.
1132 (throw 'upcff-tag nil))
1133 (setq dir-parent (file-name-directory dir))
1134 (not (file-directory-p dir-parent)))
1135 (setq dir dir-parent))
1136 ;; We have found the longest path prefix that exists as a
1137 ;; directory. Deal with any obstacles in this directory.
1138 (if (file-exists-p dir)
1139 (condition-case nil
1140 (delete-file dir)
1141 (error (throw 'upcff-tag nil))))
1142 (if (file-exists-p dir)
1143 (throw 'upcff-tag nil))
1144 ;; Try making the directory again.
1145 (setq dir (file-name-directory file))
1146 (make-directory dir t)
1147 (or (file-directory-p dir)
1148 (throw 'upcff-tag nil))))
1149
1150 ;; The containing directory exists. Let's see if there is
1151 ;; something in the way in this directory.
1152 (if (url-cache-file-writable-p file)
1153 (throw 'upcff-tag t)
1154 (condition-case nil
1155 (delete-file file)
1156 (error (throw 'upcff-tag nil))))
1157
1158 ;; The return value, if we get this far.
1159 (url-cache-file-writable-p file))))))
1160
1161 (defun url-store-in-cache (&optional buff)
1162 "Store buffer BUFF in the cache"
1163 (if (or (not (get-buffer buff))
1164 (member url-current-type '("www" "about" "https" "shttp"
1165 "news" "mailto"))
1166 (and (member url-current-type '("file" "ftp" nil))
1167 (not url-current-server))
1168 )
1169 nil
1170 (save-excursion
1171 (and buff (set-buffer buff))
1172 (let* ((fname (url-create-cached-filename (url-view-url t)))
1173 (fname-hdr (concat (if (memq system-type '(ms-windows ms-dos os2))
1174 (url-file-extension fname t)
1175 fname) ".hdr"))
1176 (info (mapcar (function (lambda (var)
1177 (cons (symbol-name var)
1178 (symbol-value var))))
1179 '( url-current-content-length
1180 url-current-file
1181 url-current-isindex
1182 url-current-mime-encoding
1183 url-current-mime-headers
1184 url-current-mime-type
1185 url-current-port
1186 url-current-server
1187 url-current-type
1188 url-current-user
1189 ))))
1190 (cond ((and (url-prepare-cache-for-file fname)
1191 (url-prepare-cache-for-file fname-hdr))
1192 (write-region (point-min) (point-max) fname nil 5)
1193 (set-buffer (get-buffer-create " *cache-tmp*"))
1194 (erase-buffer)
1195 (insert "(setq ")
1196 (mapcar
1197 (function
1198 (lambda (x)
1199 (insert (car x) " "
1200 (cond ((null (setq x (cdr x))) "nil")
1201 ((stringp x) (prin1-to-string x))
1202 ((listp x) (concat "'" (prin1-to-string x)))
1203 ((numberp x) (int-to-string x))
1204 (t "'???")) "\n")))
1205 info)
1206 (insert ")\n")
1207 (write-region (point-min) (point-max) fname-hdr nil 5)))))))
1208
1209
1210 (defun url-is-cached (url)
1211 "Return non-nil if the URL is cached."
1212 (let* ((fname (url-create-cached-filename url))
1213 (attribs (file-attributes fname)))
1214 (and fname ; got a filename
1215 (file-exists-p fname) ; file exists
1216 (not (eq (nth 0 attribs) t)) ; Its not a directory
1217 (nth 5 attribs)))) ; Can get last mod-time
1218
1219 (defun url-create-cached-filename-using-md5 (url)
1220 (if url
1221 (expand-file-name (md5 url)
1222 (concat url-temporary-directory "/"
1223 (user-real-login-name)))))
1224
1225 (defun url-create-cached-filename (url)
1226 "Return a filename in the local cache for URL"
1227 (if url
1228 (let* ((url url)
1229 (urlobj (if (vectorp url)
1230 url
1231 (url-generic-parse-url url)))
1232 (protocol (url-type urlobj))
1233 (hostname (url-host urlobj))
1234 (host-components
1235 (cons
1236 (user-real-login-name)
1237 (cons (or protocol "file")
1238 (nreverse
1239 (delq nil
1240 (mm-string-to-tokens
1241 (or hostname "localhost") ?.))))))
1242 (fname (url-filename urlobj)))
1243 (if (and fname (/= (length fname) 0) (= (aref fname 0) ?/))
1244 (setq fname (substring fname 1 nil)))
1245 (if fname
1246 (let ((slash nil))
1247 (setq fname
1248 (mapconcat
1249 (function
1250 (lambda (x)
1251 (cond
1252 ((and (= ?/ x) slash)
1253 (setq slash nil)
1254 "%2F")
1255 ((= ?/ x)
1256 (setq slash t)
1257 "/")
1258 (t
1259 (setq slash nil)
1260 (char-to-string x))))) fname ""))))
1261
1262 (if (and fname (memq system-type '(ms-windows ms-dos windows-nt))
1263 (string-match "\\([A-Za-z]\\):[/\\]" fname))
1264 (setq fname (concat (url-match fname 1) "/"
1265 (substring fname (match-end 0)))))
1266
1267 (setq fname (and fname
1268 (mapconcat
1269 (function (lambda (x)
1270 (if (= x ?~) "" (char-to-string x))))
1271 fname ""))
1272 fname (cond
1273 ((null fname) nil)
1274 ((or (string= "" fname) (string= "/" fname))
1275 url-directory-index-file)
1276 ((= (string-to-char fname) ?/)
1277 (if (string= (substring fname -1 nil) "/")
1278 (concat fname url-directory-index-file)
1279 (substring fname 1 nil)))
1280 (t
1281 (if (string= (substring fname -1 nil) "/")
1282 (concat fname url-directory-index-file)
1283 fname))))
1284
1285 ;; Honor hideous 8.3 filename limitations on dos and windows
1286 ;; we don't have to worry about this in Windows NT/95 (or OS/2?)
1287 (if (and fname (memq system-type '(ms-windows ms-dos)))
1288 (let ((base (url-file-extension fname t))
1289 (ext (url-file-extension fname nil)))
1290 (setq fname (concat (substring base 0 (min 8 (length base)))
1291 (substring ext 0 (min 4 (length ext)))))
1292 (setq host-components
1293 (mapcar
1294 (function
1295 (lambda (x)
1296 (if (> (length x) 8)
1297 (concat
1298 (substring x 0 8) "."
1299 (substring x 8 (min (length x) 11)))
1300 x)))
1301 host-components))))
1302
1303 (and fname
1304 (expand-file-name fname
1305 (expand-file-name
1306 (mapconcat 'identity host-components "/")
1307 url-temporary-directory))))))
1308
1309 (defun url-extract-from-cache (fnam)
1310 "Extract FNAM from the local disk cache"
1311 (set-buffer (get-buffer-create url-working-buffer))
1312 (erase-buffer)
1313 (setq url-current-mime-viewer nil)
1314 (insert-file-contents-literally fnam)
1315 (load (concat (if (memq system-type '(ms-windows ms-dos os2))
1316 (url-file-extension fnam t)
1317 fnam) ".hdr") t t))
1318
1319 ;;;###autoload
1320 (defun url-get-url-at-point (&optional pt)
1321 "Get the URL closest to point, but don't change your
1322 position. Has a preference for looking backward when not
1323 directly on a symbol."
1324 ;; Not at all perfect - point must be right in the name.
1325 (save-excursion
1326 (if pt (goto-char pt))
1327 (let ((filename-chars "%.?@a-zA-Z0-9---()_/:~=&") start url)
1328 (save-excursion
1329 ;; first see if you're just past a filename
1330 (if (not (eobp))
1331 (if (looking-at "[] \t\n[{}()]") ; whitespace or some parens
1332 (progn
1333 (skip-chars-backward " \n\t\r({[]})")
1334 (if (not (bobp))
1335 (backward-char 1)))))
1336 (if (string-match (concat "[" filename-chars "]")
1337 (char-to-string (following-char)))
1338 (progn
1339 (skip-chars-backward filename-chars)
1340 (setq start (point))
1341 (skip-chars-forward filename-chars))
1342 (setq start (point)))
1343 (setq url (if (fboundp 'buffer-substring-no-properties)
1344 (buffer-substring-no-properties start (point))
1345 (buffer-substring start (point)))))
1346 (if (string-match "^URL:" url)
1347 (setq url (substring url 4 nil)))
1348 (if (string-match "\\.$" url)
1349 (setq url (substring url 0 -1)))
1350 (if (not (string-match url-nonrelative-link url))
1351 (setq url nil))
1352 url)))
1353
1354 (defun url-eat-trailing-space (x)
1355 ;; Remove spaces/tabs at the end of a string
1356 (let ((y (1- (length x)))
1357 (skip-chars (list ? ?\t ?\n)))
1358 (while (and (>= y 0) (memq (aref x y) skip-chars))
1359 (setq y (1- y)))
1360 (substring x 0 (1+ y))))
1361
1362 (defun url-strip-leading-spaces (x)
1363 ;; Remove spaces at the front of a string
1364 (let ((y (1- (length x)))
1365 (z 0)
1366 (skip-chars (list ? ?\t ?\n)))
1367 (while (and (<= z y) (memq (aref x z) skip-chars))
1368 (setq z (1+ z)))
1369 (substring x z nil)))
1370
1371 (defun url-convert-newlines-to-spaces (x)
1372 "Convert newlines and carriage returns embedded in a string into spaces,
1373 and swallow following whitespace.
1374 The argument is not side-effected, but may be returned by this function."
1375 (if (string-match "[\n\r]+\\s-*" x) ; [\\n\\r\\t ]
1376 (concat (substring x 0 (match-beginning 0)) " "
1377 (url-convert-newlines-to-spaces
1378 (substring x (match-end 0))))
1379 x))
1380
1381 ;; Test cases
1382 ;; (url-convert-newlines-to-spaces "foo bar") ; nothing happens
1383 ;; (url-convert-newlines-to-spaces "foo\n \t bar") ; whitespace converted
1384 ;;
1385 ;; This implementation doesn't mangle the match-data, is fast, and doesn't
1386 ;; create garbage, but it leaves whitespace.
1387 ;; (defun url-convert-newlines-to-spaces (x)
1388 ;; "Convert newlines and carriage returns embedded in a string into spaces.
1389 ;; The string is side-effected, then returned."
1390 ;; (let ((i 0)
1391 ;; (limit (length x)))
1392 ;; (while (< i limit)
1393 ;; (if (or (= ?\n (aref x i))
1394 ;; (= ?\r (aref x i)))
1395 ;; (aset x i ? ))
1396 ;; (setq i (1+ i)))
1397 ;; x))
1398
1399 (defun url-expand-file-name (url &optional default)
1400 "Convert URL to a fully specified URL, and canonicalize it.
1401 Second arg DEFAULT is a URL to start with if URL is relative.
1402 If DEFAULT is nil or missing, the current buffer's URL is used.
1403 Path components that are `.' are removed, and
1404 path components followed by `..' are removed, along with the `..' itself."
1405 (if url
1406 (setq url (mapconcat (function (lambda (x)
1407 (if (= x ?\n) "" (char-to-string x))))
1408 (url-strip-leading-spaces
1409 (url-eat-trailing-space url)) "")))
1410 (cond
1411 ((null url) nil) ; Something hosed! Be graceful
1412 ((string-match "^#" url) ; Offset link, use it raw
1413 url)
1414 (t
1415 (let* ((urlobj (url-generic-parse-url url))
1416 (inhibit-file-name-handlers t)
1417 (defobj (cond
1418 ((vectorp default) default)
1419 (default (url-generic-parse-url default))
1420 ((and (null default) url-current-object)
1421 url-current-object)
1422 (t (url-generic-parse-url (url-view-url t)))))
1423 (expander (cdr-safe
1424 (cdr-safe
1425 (assoc (or (url-type urlobj)
1426 (url-type defobj))
1427 url-registered-protocols)))))
1428 (if (string-match "^//" url)
1429 (setq urlobj (url-generic-parse-url (concat (url-type defobj) ":"
1430 url))))
1431 (if (fboundp expander)
1432 (funcall expander urlobj defobj)
1433 (message "Unknown URL scheme: %s" (or (url-type urlobj)
1434 (url-type defobj)))
1435 (url-identity-expander urlobj defobj))
1436 (url-recreate-url urlobj)))))
1437
1438 (defun url-default-expander (urlobj defobj)
1439 ;; The default expansion routine - urlobj is modified by side effect!
1440 (url-set-type urlobj (or (url-type urlobj) (url-type defobj)))
1441 (url-set-port urlobj (or (url-port urlobj)
1442 (and (string= (url-type urlobj)
1443 (url-type defobj))
1444 (url-port defobj))))
1445 (if (not (string= "file" (url-type urlobj)))
1446 (url-set-host urlobj (or (url-host urlobj) (url-host defobj))))
1447 (if (string= "ftp" (url-type urlobj))
1448 (url-set-user urlobj (or (url-user urlobj) (url-user defobj))))
1449 (if (string= (url-filename urlobj) "")
1450 (url-set-filename urlobj "/"))
1451 (if (string-match "^/" (url-filename urlobj))
1452 nil
1453 (url-set-filename urlobj
1454 (url-remove-relative-links
1455 (concat (url-basepath (url-filename defobj))
1456 (url-filename urlobj))))))
1457
1458 (defun url-identity-expander (urlobj defobj)
1459 (url-set-type urlobj (or (url-type urlobj) (url-type defobj))))
1460
1461 (defun url-hexify-string (str)
1462 "Escape characters in a string"
1463 (if (and (featurep 'mule) str)
1464 (setq str (code-convert-string
1465 str *internal* url-mule-retrieval-coding-system)))
1466 (setq str (mapconcat
1467 (function
1468 (lambda (char)
1469 (if (or (> char ?z)
1470 (< char ?-)
1471 (and (< char ?a)
1472 (> char ?Z))
1473 (and (< char ?@)
1474 (> char ?:)))
1475 (if (< char 16)
1476 (upcase (format "%%0%x" char))
1477 (upcase (format "%%%x" char)))
1478 (char-to-string char)))) str "")))
1479
1480 (defun url-make-sequence (start end)
1481 "Make a sequence (list) of numbers from START to END"
1482 (cond
1483 ((= start end) '())
1484 ((> start end) '())
1485 (t
1486 (let ((sqnc '()))
1487 (while (<= start end)
1488 (setq sqnc (cons end sqnc)
1489 end (1- end)))
1490 sqnc))))
1491
1492 (defun url-file-extension (fname &optional x)
1493 "Return the filename extension of FNAME. If optional variable X is t,
1494 then return the basename of the file with the extension stripped off."
1495 (if (and fname (string-match "\\.[^./]+$" fname))
1496 (if x (substring fname 0 (match-beginning 0))
1497 (substring fname (match-beginning 0) nil))
1498 ;;
1499 ;; If fname has no extension, and x then return fname itself instead of
1500 ;; nothing. When caching it allows the correct .hdr file to be produced
1501 ;; for filenames without extension.
1502 ;;
1503 (if x
1504 fname
1505 "")))
1506
1507 (defun url-basepath (file &optional x)
1508 "Return the base pathname of FILE, or the actual filename if X is true"
1509 (cond
1510 ((null file) "")
1511 (x (file-name-nondirectory file))
1512 (t (file-name-directory file))))
1513
1514 (defun url-unhex (x)
1515 (if (> x ?9)
1516 (if (>= x ?a)
1517 (+ 10 (- x ?a))
1518 (+ 10 (- x ?A)))
1519 (- x ?0)))
1520
1521 (defun url-unhex-string (str)
1522 "Remove %XXX embedded spaces, etc in a url"
1523 (setq str (or str ""))
1524 (let ((tmp ""))
1525 (while (string-match "%[0-9a-f][0-9a-f]" str)
1526 (let* ((start (match-beginning 0))
1527 (ch1 (url-unhex (elt str (+ start 1))))
1528 (code (+ (* 16 ch1)
1529 (url-unhex (elt str (+ start 2))))))
1530 (setq tmp
1531 (concat
1532 tmp (substring str 0 start)
1533 (if (or (= code ?\n) (= code ?\r)) " " (char-to-string code)))
1534 str (substring str (match-end 0)))))
1535 (setq tmp (concat tmp str))
1536 tmp))
1537
1538 (defun url-clean-text ()
1539 "Clean up a buffer, removing any excess garbage from a gateway mechanism,
1540 and decoding any MIME content-transfer-encoding used."
1541 (set-buffer url-working-buffer)
1542 (goto-char (point-min))
1543 (url-replace-regexp "Connection closed by.*" "")
1544 (goto-char (point-min))
1545 (url-replace-regexp "Process WWW.*" ""))
1546
1547 (defun url-uncompress ()
1548 "Do any necessary uncompression on `url-working-buffer'"
1549 (set-buffer url-working-buffer)
1550 (if (not url-inhibit-uncompression)
1551 (let* ((extn (url-file-extension url-current-file))
1552 (decoder nil)
1553 (code-1 (cdr-safe
1554 (assoc "content-transfer-encoding"
1555 url-current-mime-headers)))
1556 (code-2 (cdr-safe
1557 (assoc "content-encoding" url-current-mime-headers)))
1558 (code-3 (and (not code-1) (not code-2)
1559 (cdr-safe (assoc extn url-uncompressor-alist))))
1560 (done nil)
1561 (default-process-coding-system
1562 (if (featurep 'mule) (cons *noconv* *noconv*))))
1563 (mapcar
1564 (function
1565 (lambda (code)
1566 (setq decoder (and (not (member code done))
1567 (cdr-safe
1568 (assoc code mm-content-transfer-encodings)))
1569 done (cons code done))
1570 (cond
1571 ((null decoder) nil)
1572 ((stringp decoder)
1573 (message "Decoding...")
1574 (call-process-region (point-min) (point-max) decoder t t nil)
1575 (message "Decoding... done."))
1576 ((listp decoder)
1577 (apply 'call-process-region (point-min) (point-max)
1578 (car decoder) t t nil (cdr decoder)))
1579 ((and (symbolp decoder) (fboundp decoder))
1580 (message "Decoding...")
1581 (funcall decoder (point-min) (point-max))
1582 (message "Decoding... done."))
1583 (t
1584 (error "Bad entry for %s in `mm-content-transfer-encodings'"
1585 code)))))
1586 (list code-1 code-2 code-3))))
1587 (set-buffer-modified-p nil))
1588
1589 (defun url-filter (proc string)
1590 (save-excursion
1591 (set-buffer url-working-buffer)
1592 (insert string)
1593 (if (string-match "\nConnection closed by" string)
1594 (progn (set-process-filter proc nil)
1595 (url-sentinel proc string))))
1596 string)
1597
1598 (defun url-default-callback (buf)
1599 (url-download-minor-mode nil)
1600 (cond
1601 ((save-excursion (set-buffer buf)
1602 (and url-current-callback-func
1603 (fboundp url-current-callback-func)))
1604 (save-excursion
1605 (save-window-excursion
1606 (set-buffer buf)
1607 (cond
1608 ((listp url-current-callback-data)
1609 (apply url-current-callback-func
1610 url-current-callback-data))
1611 (url-current-callback-data
1612 (funcall url-current-callback-func
1613 url-current-callback-data))
1614 (t
1615 (funcall url-current-callback-func))))))
1616 ((fboundp 'w3-sentinel)
1617 (set-variable 'w3-working-buffer buf)
1618 (w3-sentinel))
1619 (t
1620 (message "Retrieval for %s complete." buf))))
1621
1622 (defun url-sentinel (proc string)
1623 (if (buffer-name (process-buffer proc))
1624 (save-excursion
1625 (set-buffer (get-buffer (process-buffer proc)))
1626 (remove-hook 'after-change-functions 'url-after-change-function)
1627 (let ((status nil)
1628 (url-working-buffer (current-buffer)))
1629 (if url-be-asynchronous
1630 (progn
1631 (widen)
1632 (url-clean-text)
1633 (cond
1634 ((and (null proc) (not (get-buffer url-working-buffer))) nil)
1635 ((url-mime-response-p)
1636 (setq status (url-parse-mime-headers))))
1637 (if (not url-current-mime-type)
1638 (setq url-current-mime-type (mm-extension-to-mime
1639 (url-file-extension
1640 url-current-file))))))
1641 (if (member status '(401 301 302 303 204))
1642 nil
1643 (funcall url-default-retrieval-proc (buffer-name)))))
1644 (url-warn 'url (format "Process %s completed with no buffer!" proc))))
1645
1646 (defun url-remove-relative-links (name)
1647 ;; Strip . and .. from pathnames
1648 (let ((new (if (not (string-match "^/" name))
1649 (concat "/" name)
1650 name)))
1651 (while (string-match "/\\([^/]*/\\.\\./\\)" new)
1652 (setq new (concat (substring new 0 (match-beginning 1))
1653 (substring new (match-end 1)))))
1654 (while (string-match "/\\(\\./\\)" new)
1655 (setq new (concat (substring new 0 (match-beginning 1))
1656 (substring new (match-end 1)))))
1657 (while (string-match "^/\\.\\.\\(/\\)" new)
1658 (setq new (substring new (match-beginning 1) nil)))
1659 new))
1660
1661 (defun url-truncate-url-for-viewing (url &optional width)
1662 "Return a shortened version of URL that is WIDTH characters or less wide.
1663 WIDTH defaults to the current frame width."
1664 (let* ((fr-width (or width (frame-width)))
1665 (str-width (length url))
1666 (tail (file-name-nondirectory url))
1667 (fname nil)
1668 (modified 0)
1669 (urlobj nil))
1670 ;; The first thing that can go are the search strings
1671 (if (and (>= str-width fr-width)
1672 (string-match "?" url))
1673 (setq url (concat (substring url 0 (match-beginning 0)) "?...")
1674 str-width (length url)
1675 tail (file-name-nondirectory url)))
1676 (if (< str-width fr-width)
1677 nil ; Hey, we are done!
1678 (setq urlobj (url-generic-parse-url url)
1679 fname (url-filename urlobj)
1680 fr-width (- fr-width 4))
1681 (while (and (>= str-width fr-width)
1682 (string-match "/" fname))
1683 (setq fname (substring fname (match-end 0) nil)
1684 modified (1+ modified))
1685 (url-set-filename urlobj fname)
1686 (setq url (url-recreate-url urlobj)
1687 str-width (length url)))
1688 (if (> modified 1)
1689 (setq fname (concat "/.../" fname))
1690 (setq fname (concat "/" fname)))
1691 (url-set-filename urlobj fname)
1692 (setq url (url-recreate-url urlobj)))
1693 url))
1694
1695 (defun url-view-url (&optional no-show)
1696 "View the current document's URL. Optional argument NO-SHOW means
1697 just return the URL, don't show it in the minibuffer."
1698 (interactive)
1699 (let ((url ""))
1700 (cond
1701 ((equal url-current-type "gopher")
1702 (setq url (format "%s://%s%s/%s"
1703 url-current-type url-current-server
1704 (if (or (null url-current-port)
1705 (string= "70" url-current-port)) ""
1706 (concat ":" url-current-port))
1707 url-current-file)))
1708 ((equal url-current-type "news")
1709 (setq url (concat "news:"
1710 (if (not (equal url-current-server
1711 url-news-server))
1712 (concat "//" url-current-server
1713 (if (or (null url-current-port)
1714 (string= "119" url-current-port))
1715 ""
1716 (concat ":" url-current-port)) "/"))
1717 url-current-file)))
1718 ((equal url-current-type "about")
1719 (setq url (concat "about:" url-current-file)))
1720 ((member url-current-type '("http" "shttp" "https"))
1721 (setq url (format "%s://%s%s/%s" url-current-type url-current-server
1722 (if (or (null url-current-port)
1723 (string= "80" url-current-port))
1724 ""
1725 (concat ":" url-current-port))
1726 (if (and url-current-file
1727 (= ?/ (string-to-char url-current-file)))
1728 (substring url-current-file 1 nil)
1729 url-current-file))))
1730 ((equal url-current-type "ftp")
1731 (setq url (format "%s://%s%s/%s" url-current-type
1732 (if (and url-current-user
1733 (not (string= "anonymous" url-current-user)))
1734 (concat url-current-user "@") "")
1735 url-current-server
1736 (if (and url-current-file
1737 (= ?/ (string-to-char url-current-file)))
1738 (substring url-current-file 1 nil)
1739 url-current-file))))
1740 ((and (member url-current-type '("file" nil)) url-current-file)
1741 (setq url (format "file:%s" url-current-file)))
1742 ((equal url-current-type "www")
1743 (setq url (format "www:/%s/%s" url-current-server url-current-file)))
1744 (t
1745 (setq url nil)))
1746 (if (not no-show) (message "%s" url) url)))
1747
1748 (defun url-parse-Netscape-history (fname)
1749 ;; Parse a Netscape/X style global history list.
1750 (let (pos ; Position holder
1751 url ; The URL
1752 time) ; Last time accessed
1753 (goto-char (point-min))
1754 (skip-chars-forward "^\n")
1755 (skip-chars-forward "\n \t") ; Skip past the tag line
1756 (setq url-global-history-hash-table (url-make-hashtable 131))
1757 ;; Here we will go to the end of the line and
1758 ;; skip back over a token, since we might run
1759 ;; into spaces in URLs, depending on how much
1760 ;; smarter netscape is than the old XMosaic :)
1761 (while (not (eobp))
1762 (setq pos (point))
1763 (end-of-line)
1764 (skip-chars-backward "^ \t")
1765 (skip-chars-backward " \t")
1766 (setq url (buffer-substring pos (point))
1767 pos (1+ (point)))
1768 (skip-chars-forward "^\n")
1769 (setq time (buffer-substring pos (point)))
1770 (skip-chars-forward "\n")
1771 (setq url-history-changed-since-last-save t)
1772 (url-puthash url time url-global-history-hash-table))))
1773
1774 (defun url-parse-Mosaic-history-v1 (fname)
1775 ;; Parse an NCSA Mosaic/X style global history list
1776 (goto-char (point-min))
1777 (skip-chars-forward "^\n")
1778 (skip-chars-forward "\n \t") ; Skip past the tag line
1779 (skip-chars-forward "^\n")
1780 (skip-chars-forward "\n \t") ; Skip past the second tag line
1781 (setq url-global-history-hash-table (url-make-hashtable 131))
1782 (let (pos ; Temporary position holder
1783 bol ; Beginning-of-line
1784 url ; URL
1785 time ; Time
1786 last-end ; Last ending point
1787 )
1788 (while (not (eobp))
1789 (setq bol (point))
1790 (end-of-line)
1791 (setq pos (point)
1792 last-end (point))
1793 (skip-chars-backward "^ \t" bol) ; Skip over year
1794 (skip-chars-backward " \t" bol)
1795 (skip-chars-backward "^ \t" bol) ; Skip over time
1796 (skip-chars-backward " \t" bol)
1797 (skip-chars-backward "^ \t" bol) ; Skip over day #
1798 (skip-chars-backward " \t" bol)
1799 (skip-chars-backward "^ \t" bol) ; Skip over month
1800 (skip-chars-backward " \t" bol)
1801 (skip-chars-backward "^ \t" bol) ; Skip over day abbrev.
1802 (if (bolp)
1803 nil ; Malformed entry!!! Ack! Bailout!
1804 (setq time (buffer-substring pos (point)))
1805 (skip-chars-backward " \t")
1806 (setq pos (point)))
1807 (beginning-of-line)
1808 (setq url (buffer-substring (point) pos))
1809 (goto-char (min (1+ last-end) (point-max))) ; Goto next line
1810 (if (/= (length url) 0)
1811 (progn
1812 (setq url-history-changed-since-last-save t)
1813 (url-puthash url time url-global-history-hash-table))))))
1814
1815 (defun url-parse-Mosaic-history-v2 (fname)
1816 ;; Parse an NCSA Mosaic/X style global history list (version 2)
1817 (goto-char (point-min))
1818 (skip-chars-forward "^\n")
1819 (skip-chars-forward "\n \t") ; Skip past the tag line
1820 (skip-chars-forward "^\n")
1821 (skip-chars-forward "\n \t") ; Skip past the second tag line
1822 (setq url-global-history-hash-table (url-make-hashtable 131))
1823 (let (pos ; Temporary position holder
1824 bol ; Beginning-of-line
1825 url ; URL
1826 time ; Time
1827 last-end ; Last ending point
1828 )
1829 (while (not (eobp))
1830 (setq bol (point))
1831 (end-of-line)
1832 (setq pos (point)
1833 last-end (point))
1834 (skip-chars-backward "^ \t" bol) ; Skip over time
1835 (if (bolp)
1836 nil ; Malformed entry!!! Ack! Bailout!
1837 (setq time (buffer-substring pos (point)))
1838 (skip-chars-backward " \t")
1839 (setq pos (point)))
1840 (beginning-of-line)
1841 (setq url (buffer-substring (point) pos))
1842 (goto-char (min (1+ last-end) (point-max))) ; Goto next line
1843 (if (/= (length url) 0)
1844 (progn
1845 (setq url-history-changed-since-last-save t)
1846 (url-puthash url time url-global-history-hash-table))))))
1847
1848 (defun url-parse-Emacs-history (&optional fname)
1849 ;; Parse out the Emacs-w3 global history file for completion, etc.
1850 (or fname (setq fname (expand-file-name url-global-history-file)))
1851 (cond
1852 ((not (file-exists-p fname))
1853 (message "%s does not exist." fname))
1854 ((not (file-readable-p fname))
1855 (message "%s is unreadable." fname))
1856 (t
1857 (condition-case ()
1858 (load fname nil t)
1859 (error (message "Could not load %s" fname)))
1860 (if (boundp 'url-global-history-completion-list)
1861 ;; Hey! Automatic conversion of old format!
1862 (progn
1863 (setq url-global-history-hash-table (url-make-hashtable 131)
1864 url-history-changed-since-last-save t)
1865 (mapcar (function
1866 (lambda (x)
1867 (url-puthash (car x) (cdr x)
1868 url-global-history-hash-table)))
1869 (symbol-value 'url-global-history-completion-list)))))))
1870
1871 (defun url-parse-global-history (&optional fname)
1872 ;; Parse out the mosaic global history file for completions, etc.
1873 (or fname (setq fname (expand-file-name url-global-history-file)))
1874 (cond
1875 ((not (file-exists-p fname))
1876 (message "%s does not exist." fname))
1877 ((not (file-readable-p fname))
1878 (message "%s is unreadable." fname))
1879 (t
1880 (save-excursion
1881 (set-buffer (get-buffer-create " *url-tmp*"))
1882 (erase-buffer)
1883 (insert-file-contents-literally fname)
1884 (goto-char (point-min))
1885 (cond
1886 ((looking-at "(setq") (url-parse-Emacs-history fname))
1887 ((looking-at "ncsa-mosaic-.*-1$") (url-parse-Mosaic-history-v1 fname))
1888 ((looking-at "ncsa-mosaic-.*-2$") (url-parse-Mosaic-history-v2 fname))
1889 ((or (looking-at "MCOM-") (looking-at "netscape"))
1890 (url-parse-Netscape-history fname))
1891 (t
1892 (url-warn 'url (format "Cannot deduce type of history file: %s"
1893 fname))))))))
1894
1895 (defun url-write-Emacs-history (fname)
1896 ;; Write an Emacs-w3 style global history list into FNAME
1897 (erase-buffer)
1898 (let ((count 0))
1899 (url-maphash (function
1900 (lambda (key value)
1901 (setq count (1+ count))
1902 (insert "(url-puthash "
1903 (if (stringp key)
1904 (prin1-to-string key)
1905 (concat "\"" (symbol-name key) "\""))
1906 (if (not (stringp value)) " '" "")
1907 (prin1-to-string value)
1908 " url-global-history-hash-table)\n")))
1909 url-global-history-hash-table)
1910 (goto-char (point-min))
1911 (insert (format
1912 "(setq url-global-history-hash-table (url-make-hashtable %d))\n"
1913 (/ count 4)))
1914 (goto-char (point-max))
1915 (insert "\n")
1916 (write-file fname)))
1917
1918 (defun url-write-Netscape-history (fname)
1919 ;; Write a Netscape-style global history list into FNAME
1920 (erase-buffer)
1921 (let ((last-valid-time "785305714")) ; Picked out of thin air,
1922 ; in case first in assoc list
1923 ; doesn't have a valid time
1924 (goto-char (point-min))
1925 (insert "MCOM-Global-history-file-1\n")
1926 (url-maphash (function
1927 (lambda (url time)
1928 (if (or (not (stringp time)) (string-match " \t" time))
1929 (setq time last-valid-time)
1930 (setq last-valid-time time))
1931 (insert (concat (if (stringp url)
1932 url
1933 (symbol-name url))
1934 " " time "\n"))))
1935 url-global-history-hash-table)
1936 (write-file fname)))
1937
1938 (defun url-write-Mosaic-history-v1 (fname)
1939 ;; Write a Mosaic/X-style global history list into FNAME
1940 (erase-buffer)
1941 (goto-char (point-min))
1942 (insert "ncsa-mosaic-history-format-1\nGlobal\n")
1943 (url-maphash (function
1944 (lambda (url time)
1945 (if (listp time)
1946 (setq time (current-time-string time)))
1947 (if (or (not (stringp time))
1948 (not (string-match " " time)))
1949 (setq time (current-time-string)))
1950 (insert (concat (if (stringp url)
1951 url
1952 (symbol-name url))
1953 " " time "\n"))))
1954 url-global-history-hash-table)
1955 (write-file fname))
1956
1957 (defun url-write-Mosaic-history-v2 (fname)
1958 ;; Write a Mosaic/X-style global history list into FNAME
1959 (let ((last-valid-time "827250806"))
1960 (erase-buffer)
1961 (goto-char (point-min))
1962 (insert "ncsa-mosaic-history-format-2\nGlobal\n")
1963 (url-maphash (function
1964 (lambda (url time)
1965 (if (listp time)
1966 (setq time last-valid-time)
1967 (setq last-valid-time time))
1968 (if (not (stringp time))
1969 (setq time last-valid-time))
1970 (insert (concat (if (stringp url)
1971 url
1972 (symbol-name url))
1973 " " time "\n"))))
1974 url-global-history-hash-table)
1975 (write-file fname)))
1976
1977 (defun url-write-global-history (&optional fname)
1978 "Write the global history file into `url-global-history-file'.
1979 The type of data written is determined by what is in the file to begin
1980 with. If the type of storage cannot be determined, then prompt the
1981 user for what type to save as."
1982 (interactive)
1983 (or fname (setq fname (expand-file-name url-global-history-file)))
1984 (cond
1985 ((not url-history-changed-since-last-save) nil)
1986 ((not (file-writable-p fname))
1987 (message "%s is unwritable." fname))
1988 (t
1989 (let ((make-backup-files nil)
1990 (version-control nil)
1991 (require-final-newline t))
1992 (save-excursion
1993 (set-buffer (get-buffer-create " *url-tmp*"))
1994 (erase-buffer)
1995 (condition-case ()
1996 (insert-file-contents-literally fname)
1997 (error nil))
1998 (goto-char (point-min))
1999 (cond
2000 ((looking-at "ncsa-mosaic-.*-1$") (url-write-Mosaic-history-v1 fname))
2001 ((looking-at "ncsa-mosaic-.*-2$") (url-write-Mosaic-history-v2 fname))
2002 ((looking-at "MCOM-") (url-write-Netscape-history fname))
2003 ((looking-at "netscape") (url-write-Netscape-history fname))
2004 ((looking-at "(setq") (url-write-Emacs-history fname))
2005 (t (url-write-Emacs-history fname)))
2006 (kill-buffer (current-buffer))))))
2007 (setq url-history-changed-since-last-save nil))
2008
2009
2010 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
2011 ;;; The main URL fetching interface
2012 ;;; -------------------------------
2013 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
2014
2015 ;;;###autoload
2016 (defun url-popup-info (url)
2017 "Retrieve the HTTP/1.0 headers and display them in a temp buffer."
2018 (let* ((urlobj (url-generic-parse-url url))
2019 (type (url-type urlobj))
2020 data)
2021 (cond
2022 ((string= type "http")
2023 (let ((url-request-method "HEAD")
2024 (url-automatic-caching nil)
2025 (url-inhibit-mime-parsing t)
2026 (url-working-buffer " *popup*"))
2027 (save-excursion
2028 (set-buffer (get-buffer-create url-working-buffer))
2029 (erase-buffer)
2030 (setq url-be-asynchronous nil)
2031 (url-retrieve url)
2032 (subst-char-in-region (point-min) (point-max) ?\r ? )
2033 (buffer-string))))
2034 ((or (string= type "file") (string= type "ftp"))
2035 (setq data (url-file-attributes url))
2036 (set-buffer (get-buffer-create
2037 (url-generate-new-buffer-name "*Header Info*")))
2038 (erase-buffer)
2039 (if data
2040 (concat (if (stringp (nth 0 data))
2041 (concat " Linked to: " (nth 0 data))
2042 (concat " Directory: " (if (nth 0 data) "Yes" "No")))
2043 "\n Links: " (int-to-string (nth 1 data))
2044 "\n File UID: " (int-to-string (nth 2 data))
2045 "\n File GID: " (int-to-string (nth 3 data))
2046 "\n Last Access: " (current-time-string (nth 4 data))
2047 "\nLast Modified: " (current-time-string (nth 5 data))
2048 "\n Last Changed: " (current-time-string (nth 6 data))
2049 "\n Size (bytes): " (int-to-string (nth 7 data))
2050 "\n File Type: " (or (nth 8 data) "text/plain"))
2051 (concat "No info found for " url)))
2052 ((and (string= type "news") (string-match "@" url))
2053 (let ((art (url-filename urlobj)))
2054 (if (not (string= (substring art -1 nil) ">"))
2055 (setq art (concat "<" art ">")))
2056 (url-get-headers-from-article-id art)))
2057 (t (concat "Don't know how to find information on " url)))))
2058
2059 (defun url-decode-text ()
2060 ;; Decode text transmitted by NNTP.
2061 ;; 0. Delete status line.
2062 ;; 1. Delete `^M' at end of line.
2063 ;; 2. Delete `.' at end of buffer (end of text mark).
2064 ;; 3. Delete `.' at beginning of line."
2065 (save-excursion
2066 (set-buffer nntp-server-buffer)
2067 ;; Insert newline at end of buffer.
2068 (goto-char (point-max))
2069 (if (not (bolp))
2070 (insert "\n"))
2071 ;; Delete status line.
2072 (goto-char (point-min))
2073 (delete-region (point) (progn (forward-line 1) (point)))
2074 ;; Delete `^M' at end of line.
2075 ;; (replace-regexp "\r$" "")
2076 (while (not (eobp))
2077 (end-of-line)
2078 (if (= (preceding-char) ?\r)
2079 (delete-char -1))
2080 (forward-line 1)
2081 )
2082 ;; Delete `.' at end of buffer (end of text mark).
2083 (goto-char (point-max))
2084 (forward-line -1) ;(beginning-of-line)
2085 (if (looking-at "^\\.$")
2086 (delete-region (point) (progn (forward-line 1) (point))))
2087 ;; Replace `..' at beginning of line with `.'.
2088 (goto-char (point-min))
2089 ;; (replace-regexp "^\\.\\." ".")
2090 (while (search-forward "\n.." nil t)
2091 (delete-char -1))
2092 ))
2093
2094 (defun url-get-headers-from-article-id (art)
2095 ;; Return the HEAD of ART (a usenet news article)
2096 (cond
2097 ((string-match "flee" nntp-version)
2098 (nntp/command "HEAD" art)
2099 (save-excursion
2100 (set-buffer nntp-server-buffer)
2101 (while (progn (goto-char (point-min))
2102 (not (re-search-forward "^.\r*$" nil t)))
2103 (url-accept-process-output nntp/connection))))
2104 (t
2105 (nntp-send-command "^\\.\r$" "HEAD" art)
2106 (url-decode-text)))
2107 (save-excursion
2108 (set-buffer nntp-server-buffer)
2109 (buffer-string)))
2110
2111 (defvar url-external-retrieval-program "www"
2112 "*Name of the external executable to run to retrieve URLs.")
2113
2114 (defvar url-external-retrieval-args '("-source")
2115 "*A list of arguments to pass to `url-external-retrieval-program' to
2116 retrieve a URL by its HTML source.")
2117
2118 (defun url-retrieve-externally (url &optional no-cache)
2119 (if (get-buffer url-working-buffer)
2120 (save-excursion
2121 (set-buffer url-working-buffer)
2122 (set-buffer-modified-p nil)
2123 (kill-buffer url-working-buffer)))
2124 (set-buffer (get-buffer-create url-working-buffer))
2125 (let* ((args (append url-external-retrieval-args (list url)))
2126 (urlobj (url-generic-parse-url url))
2127 (type (url-type urlobj)))
2128 (if (or (member type '("www" "about" "mailto" "mailserver"))
2129 (url-file-directly-accessible-p urlobj))
2130 (url-retrieve-internally url)
2131 (url-lazy-message "Retrieving %s..." url)
2132 (apply 'call-process url-external-retrieval-program
2133 nil t nil args)
2134 (url-lazy-message "Retrieving %s... done" url)
2135 (if (and type urlobj)
2136 (setq url-current-server (url-host urlobj)
2137 url-current-type (url-type urlobj)
2138 url-current-port (url-port urlobj)
2139 url-current-file (url-filename urlobj)))
2140 (if (member url-current-file '("/" ""))
2141 (setq url-current-mime-type "text/html")))))
2142
2143 (defun url-get-normalized-date (&optional specified-time)
2144 ;; Return a 'real' date string that most HTTP servers can understand.
2145 (require 'timezone)
2146 (let* ((raw (if specified-time (current-time-string specified-time)
2147 (current-time-string)))
2148 (gmt (timezone-make-date-arpa-standard raw
2149 (nth 1 (current-time-zone))
2150 "GMT"))
2151 (parsed (timezone-parse-date gmt))
2152 (day (cdr-safe (assoc (substring raw 0 3) weekday-alist)))
2153 (year nil)
2154 (month (car
2155 (rassoc
2156 (string-to-int (aref parsed 1)) monthabbrev-alist)))
2157 )
2158 (setq day (or (car-safe (rassoc day weekday-alist))
2159 (substring raw 0 3))
2160 year (aref parsed 0))
2161 ;; This is needed for plexus servers, or the server will hang trying to
2162 ;; parse the if-modified-since header. Hopefully, I can take this out
2163 ;; soon.
2164 (if (and year (> (length year) 2))
2165 (setq year (substring year -2 nil)))
2166
2167 (concat day ", " (aref parsed 2) "-" month "-" year " "
2168 (aref parsed 3) " " (or (aref parsed 4)
2169 (concat "[" (nth 1 (current-time-zone))
2170 "]")))))
2171
2172 ;;;###autoload
2173 (defun url-cache-expired (url mod)
2174 "Return t iff a cached file has expired."
2175 (if (not (string-match url-nonrelative-link url))
2176 t
2177 (let* ((urlobj (url-generic-parse-url url))
2178 (type (url-type urlobj)))
2179 (cond
2180 (url-standalone-mode
2181 (not (file-exists-p (url-create-cached-filename urlobj))))
2182 ((string= type "http")
2183 (if (not url-standalone-mode) t
2184 (not (file-exists-p (url-create-cached-filename urlobj)))))
2185 ((not (fboundp 'current-time))
2186 t)
2187 ((member type '("file" "ftp"))
2188 (if (or (equal mod '(0 0)) (not mod))
2189 (return t)
2190 (or (> (nth 0 mod) (nth 0 (current-time)))
2191 (> (nth 1 mod) (nth 1 (current-time))))))
2192 (t nil)))))
2193
2194 (defun url-retrieve-internally (url &optional no-cache)
2195 (if (get-buffer url-working-buffer)
2196 (save-excursion
2197 (set-buffer url-working-buffer)
2198 (erase-buffer)
2199 (setq url-current-can-be-cached (not no-cache))
2200 (set-buffer-modified-p nil)))
2201 (let* ((urlobj (url-generic-parse-url url))
2202 (type (url-type urlobj))
2203 (url-using-proxy (and
2204 (if (assoc "no_proxy" url-proxy-services)
2205 (not (string-match
2206 (cdr
2207 (assoc "no_proxy" url-proxy-services))
2208 url))
2209 t)
2210 (not
2211 (and
2212 (string-match "file:" url)
2213 (not (string-match "file://" url))))
2214 (cdr (assoc type url-proxy-services))))
2215 (handler nil)
2216 (original-url url)
2217 (cached nil)
2218 (tmp url-current-file))
2219 (if url-using-proxy (setq type "proxy"))
2220 (setq cached (url-is-cached url)
2221 cached (and cached (not (url-cache-expired url cached)))
2222 handler (if cached 'url-extract-from-cache
2223 (car-safe
2224 (cdr-safe (assoc (or type "auto")
2225 url-registered-protocols))))
2226 url (if cached (url-create-cached-filename url) url))
2227 (save-excursion
2228 (set-buffer (get-buffer-create url-working-buffer))
2229 (setq url-current-can-be-cached (not no-cache)))
2230 (if url-be-asynchronous
2231 (url-download-minor-mode t))
2232 (if (and handler (fboundp handler))
2233 (funcall handler url)
2234 (set-buffer (get-buffer-create url-working-buffer))
2235 (setq url-current-file tmp)
2236 (erase-buffer)
2237 (insert "<title> Link Error! </title>\n"
2238 "<h1> An error has occurred... </h1>\n"
2239 (format "The link type `<code>%s</code>'" type)
2240 " is unrecognized or unsupported at this time.<p>\n"
2241 "If you feel this is an error, please "
2242 "<a href=\"mailto://" url-bug-address "\">send me mail.</a>"
2243 "<p><address>William Perry</address><br>"
2244 "<address>" url-bug-address "</address>")
2245 (setq url-current-file "error.html"))
2246 (if (and
2247 (not url-be-asynchronous)
2248 (get-buffer url-working-buffer))
2249 (progn
2250 (set-buffer url-working-buffer)
2251 (if (not url-current-object)
2252 (setq url-current-object urlobj))
2253 (url-clean-text)))
2254 (cond
2255 ((equal type "wais") nil)
2256 ((and url-be-asynchronous (not cached) (member type '("http" "proxy")))
2257 nil)
2258 (url-be-asynchronous
2259 (funcall url-default-retrieval-proc (buffer-name)))
2260 ((not (get-buffer url-working-buffer)) nil)
2261 ((and (not url-inhibit-mime-parsing)
2262 (or cached (url-mime-response-p t)))
2263 (or cached (url-parse-mime-headers nil t))))
2264 (if (and (or (not url-be-asynchronous)
2265 (not (equal type "http")))
2266 (not url-current-mime-type))
2267 (if (url-buffer-is-hypertext)
2268 (setq url-current-mime-type "text/html")
2269 (setq url-current-mime-type (mm-extension-to-mime
2270 (url-file-extension
2271 url-current-file)))))
2272 (if (and url-automatic-caching url-current-can-be-cached
2273 (not url-be-asynchronous))
2274 (save-excursion
2275 (url-store-in-cache url-working-buffer)))
2276 (if (not (url-hashtablep url-global-history-hash-table))
2277 (setq url-global-history-hash-table (url-make-hashtable 131)))
2278 (if (not (string-match "^about:" original-url))
2279 (progn
2280 (setq url-history-changed-since-last-save t)
2281 (url-puthash original-url (current-time)
2282 url-global-history-hash-table)))
2283 cached))
2284
2285 ;;;###autoload
2286 (defun url-retrieve (url &optional no-cache expected-md5)
2287 "Retrieve a document over the World Wide Web.
2288 The document should be specified by its fully specified
2289 Uniform Resource Locator. No parsing is done, just return the
2290 document as the server sent it. The document is left in the
2291 buffer specified by url-working-buffer. url-working-buffer is killed
2292 immediately before starting the transfer, so that no buffer-local
2293 variables interfere with the retrieval. HTTP/1.0 redirection will
2294 be honored before this function exits."
2295 (url-do-setup)
2296 (if (and (fboundp 'set-text-properties)
2297 (subrp (symbol-function 'set-text-properties)))
2298 (set-text-properties 0 (length url) nil url))
2299 (if (and url (string-match "^url:" url))
2300 (setq url (substring url (match-end 0) nil)))
2301 (let ((status (url-retrieve-internally url no-cache)))
2302 (if (and expected-md5 url-check-md5s)
2303 (let ((cur-md5 (md5 (current-buffer))))
2304 (if (not (string= cur-md5 expected-md5))
2305 (and (not (funcall url-confirmation-func
2306 "MD5s do not match, use anyway? "))
2307 (error "MD5 error.")))))
2308 status))
2309
2310 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
2311 ;;; How to register a protocol
2312 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
2313 (defun url-register-protocol (protocol &optional retrieve expander defport)
2314 "Register a protocol with the URL retrieval package.
2315 PROTOCOL is the type of protocol being registers (http, nntp, etc),
2316 and is the first chunk of the URL. ie: http:// URLs will be
2317 handled by the protocol registered as 'http'. PROTOCOL can
2318 be either a symbol or a string - it is converted to a string,
2319 and lowercased before being registered.
2320 RETRIEVE (optional) is the function to be called with a url as its
2321 only argument. If this argument is omitted, then this looks
2322 for a function called 'url-PROTOCOL'. A warning is shown if
2323 the function is undefined, but the protocol is still
2324 registered.
2325 EXPANDER (optional) is the function to call to expand a relative link
2326 of type PROTOCOL. If omitted, this defaults to
2327 `url-default-expander'
2328
2329 Any proxy information is read in from environment variables at this
2330 time, so this function should only be called after dumping emacs."
2331 (let* ((protocol (cond
2332 ((stringp protocol) (downcase protocol))
2333 ((symbolp protocol) (downcase (symbol-name protocol)))
2334 (t nil)))
2335
2336 (retrieve (or retrieve (intern (concat "url-" protocol))))
2337 (expander (or expander 'url-default-expander))
2338 (cur-protocol (assoc protocol url-registered-protocols))
2339 (urlobj nil)
2340 (cur-proxy (assoc protocol url-proxy-services))
2341 (env-proxy (or (getenv (concat protocol "_proxy"))
2342 (getenv (concat protocol "_PROXY"))
2343 (getenv (upcase (concat protocol "_PROXY"))))))
2344
2345 (if (not protocol)
2346 (error "Invalid data to url-register-protocol."))
2347
2348 (if (not (fboundp retrieve))
2349 (message "Warning: %s registered, but no function found." protocol))
2350
2351 ;; Store the default port, if none previously specified and
2352 ;; defport given
2353 (if (and defport (not (assoc protocol url-default-ports)))
2354 (setq url-default-ports (cons (cons protocol defport)
2355 url-default-ports)))
2356
2357 ;; Store the appropriate information for later
2358 (if cur-protocol
2359 (setcdr cur-protocol (cons retrieve expander))
2360 (setq url-registered-protocols (cons (cons protocol
2361 (cons retrieve expander))
2362 url-registered-protocols)))
2363
2364 ;; Store any proxying information - this will not overwrite an old
2365 ;; entry, so that people can still set this information in their
2366 ;; .emacs file
2367 (cond
2368 (cur-proxy nil) ; Keep their old settings
2369 ((null env-proxy) nil) ; No proxy setup
2370 ;; First check if its something like hostname:port
2371 ((string-match "^\\([^:]+\\):\\([0-9]+\\)$" env-proxy)
2372 (setq urlobj (url-generic-parse-url nil)) ; Get a blank object
2373 (url-set-type urlobj "http")
2374 (url-set-host urlobj (url-match env-proxy 1))
2375 (url-set-port urlobj (url-match env-proxy 2)))
2376 ;; Then check if its a fully specified URL
2377 ((string-match url-nonrelative-link env-proxy)
2378 (setq urlobj (url-generic-parse-url env-proxy))
2379 (url-set-type urlobj "http")
2380 (url-set-target urlobj nil))
2381 ;; Finally, fall back on the assumption that its just a hostname
2382 (t
2383 (setq urlobj (url-generic-parse-url nil)) ; Get a blank object
2384 (url-set-type urlobj "http")
2385 (url-set-host urlobj env-proxy)))
2386
2387 (if (and (not cur-proxy) urlobj)
2388 (progn
2389 (setq url-proxy-services
2390 (cons (cons protocol (url-recreate-url urlobj))
2391 url-proxy-services))
2392 (message "Using a proxy for %s..." protocol)))))
2393
2394 (provide 'url)