0
|
1 ;;; url.el,v --- Uniform Resource Locator retrieval tool
|
|
2 ;; Author: wmperry
|
|
3 ;; Created: 1996/05/30 13:25:47
|
|
4 ;; Version: 1.52
|
|
5 ;; Keywords: comm, data, processes, hypermedia
|
|
6
|
|
7 ;;; LCD Archive Entry:
|
|
8 ;;; url|William M. Perry|wmperry@spry.com|
|
|
9 ;;; Major mode for manipulating URLs|
|
|
10 ;;; 1996/05/30 13:25:47|1.52|Location Undetermined
|
|
11 ;;;
|
|
12
|
|
13 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
|
14 ;;; Copyright (c) 1993, 1994, 1995 by William M. Perry (wmperry@spry.com)
|
|
15 ;;;
|
|
16 ;;; This file is not part of GNU Emacs, but the same permissions apply.
|
|
17 ;;;
|
|
18 ;;; GNU Emacs is free software; you can redistribute it and/or modify
|
|
19 ;;; it under the terms of the GNU General Public License as published by
|
|
20 ;;; the Free Software Foundation; either version 2, or (at your option)
|
|
21 ;;; any later version.
|
|
22 ;;;
|
|
23 ;;; GNU Emacs is distributed in the hope that it will be useful,
|
|
24 ;;; but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
25 ;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
26 ;;; GNU General Public License for more details.
|
|
27 ;;;
|
|
28 ;;; You should have received a copy of the GNU General Public License
|
|
29 ;;; along with GNU Emacs; see the file COPYING. If not, write to
|
|
30 ;;; the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA.
|
|
31 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
|
32
|
|
33 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
|
34 ;;; Copyright (c) 1993, 1994, 1995 by William M. Perry (wmperry@spry.com) ;;;
|
|
35 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
|
36
|
|
37
|
|
38 (require 'url-vars)
|
|
39 (require 'url-parse)
|
|
40 (require 'urlauth)
|
|
41 (require 'url-cookie)
|
|
42 (require 'mm)
|
|
43 (require 'md5)
|
|
44 (require 'base64)
|
|
45 (require 'url-hash)
|
|
46 (or (featurep 'efs)
|
|
47 (featurep 'efs-auto)
|
|
48 (condition-case ()
|
|
49 (require 'ange-ftp)
|
|
50 (error nil)))
|
|
51
|
|
52 (load-library "url-sysdp")
|
|
53
|
|
54 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
|
55 ;;; Functions that might not exist in old versions of emacs
|
|
56 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
|
57 (defun url-save-error (errobj)
|
|
58 (save-excursion
|
|
59 (set-buffer (get-buffer-create " *url-error*"))
|
|
60 (erase-buffer))
|
|
61 (display-error errobj (get-buffer-create " *url-error*")))
|
|
62
|
|
63 (cond
|
|
64 ((fboundp 'display-warning)
|
|
65 (fset 'url-warn 'display-warning))
|
|
66 ((fboundp 'w3-warn)
|
|
67 (fset 'url-warn 'w3-warn))
|
|
68 ((fboundp 'warn)
|
|
69 (defun url-warn (class message &optional level)
|
|
70 (warn "(%s/%s) %s" class (or level 'warning) message)))
|
|
71 (t
|
|
72 (defun url-warn (class message &optional level)
|
|
73 (save-excursion
|
|
74 (set-buffer (get-buffer-create "*W3-WARNINGS*"))
|
|
75 (goto-char (point-max))
|
|
76 (save-excursion
|
|
77 (insert (format "(%s/%s) %s\n" class (or level 'warning) message)))
|
|
78 (display-buffer (current-buffer))))))
|
|
79
|
|
80
|
|
81 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
|
82 ;;; Autoload all the URL loaders
|
|
83 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
|
84 (autoload 'url-file "url-file")
|
|
85 (autoload 'url-ftp "url-file")
|
|
86 (autoload 'url-gopher "url-gopher")
|
|
87 (autoload 'url-irc "url-irc")
|
|
88 (autoload 'url-http "url-http")
|
2
|
89 (autoload 'url-nfs "url-nfs")
|
0
|
90 (autoload 'url-mailserver "url-mail")
|
|
91 (autoload 'url-mailto "url-mail")
|
|
92 (autoload 'url-info "url-misc")
|
|
93 (autoload 'url-shttp "url-http")
|
|
94 (autoload 'url-https "url-http")
|
|
95 (autoload 'url-finger "url-misc")
|
|
96 (autoload 'url-rlogin "url-misc")
|
|
97 (autoload 'url-telnet "url-misc")
|
|
98 (autoload 'url-tn3270 "url-misc")
|
|
99 (autoload 'url-proxy "url-misc")
|
|
100 (autoload 'url-x-exec "url-misc")
|
|
101 (autoload 'url-news "url-news")
|
|
102 (autoload 'url-nntp "url-news")
|
|
103 (autoload 'url-decode-pgp/pem "url-pgp")
|
|
104 (autoload 'url-wais "url-wais")
|
|
105
|
|
106 (autoload 'url-save-newsrc "url-news")
|
|
107 (autoload 'url-news-generate-reply-form "url-news")
|
|
108 (autoload 'url-parse-newsrc "url-news")
|
|
109 (autoload 'url-mime-response-p "url-http")
|
|
110 (autoload 'url-parse-mime-headers "url-http")
|
|
111 (autoload 'url-handle-refresh-header "url-http")
|
|
112 (autoload 'url-create-mime-request "url-http")
|
|
113 (autoload 'url-create-message-id "url-http")
|
|
114 (autoload 'url-create-multipart-request "url-http")
|
|
115 (autoload 'url-parse-viewer-types "url-http")
|
|
116
|
|
117 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
|
118 ;;; File-name-handler-alist functions
|
|
119 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
|
120 (defun url-setup-file-name-handlers ()
|
|
121 ;; Setup file-name handlers.
|
|
122 '(cond
|
|
123 ((not (boundp 'file-name-handler-alist))
|
|
124 nil) ; Don't load if no alist
|
|
125 ((rassq 'url-file-handler file-name-handler-alist)
|
|
126 nil) ; Don't load twice
|
|
127 ((and (string-match "XEmacs\\|Lucid" emacs-version)
|
|
128 (< url-emacs-minor-version 11)) ; Don't load in lemacs 19.10
|
|
129 nil)
|
|
130 (t
|
|
131 (setq file-name-handler-alist
|
|
132 (let ((new-handler (cons
|
|
133 (concat "^/*"
|
|
134 (substring url-nonrelative-link1 nil))
|
|
135 'url-file-handler)))
|
|
136 (if file-name-handler-alist
|
|
137 (append (list new-handler) file-name-handler-alist)
|
|
138 (list new-handler)))))))
|
|
139
|
|
140 (defun url-file-handler (operation &rest args)
|
|
141 ;; Function called from the file-name-handler-alist routines. OPERATION
|
|
142 ;; is what needs to be done ('file-exists-p, etc). args are the arguments
|
|
143 ;; that would have been passed to OPERATION."
|
|
144 (let ((fn (get operation 'url-file-handlers))
|
|
145 (url (car args))
|
|
146 (myargs (cdr args)))
|
|
147 (if (= (string-to-char url) ?/)
|
|
148 (setq url (substring url 1 nil)))
|
|
149 (if fn (apply fn url myargs)
|
|
150 (let (file-name-handler-alist)
|
|
151 (apply operation url myargs)))))
|
|
152
|
|
153 (defun url-file-handler-identity (&rest args)
|
|
154 (car args))
|
|
155
|
|
156 (defun url-file-handler-null (&rest args)
|
|
157 nil)
|
|
158
|
|
159 (put 'file-directory-p 'url-file-handlers 'url-file-handler-null)
|
|
160 (put 'substitute-in-file-name 'url-file-handlers 'url-file-handler-identity)
|
|
161 (put 'file-writable-p 'url-file-handlers 'url-file-handler-null)
|
|
162 (put 'file-truename 'url-file-handlers 'url-file-handler-identity)
|
|
163 (put 'insert-file-contents 'url-file-handlers 'url-insert-file-contents)
|
|
164 (put 'expand-file-name 'url-file-handlers 'url-expand-file-name)
|
|
165 (put 'directory-files 'url-file-handlers 'url-directory-files)
|
|
166 (put 'file-directory-p 'url-file-handlers 'url-file-directory-p)
|
|
167 (put 'file-writable-p 'url-file-handlers 'url-file-writable-p)
|
|
168 (put 'file-readable-p 'url-file-handlers 'url-file-exists)
|
|
169 (put 'file-executable-p 'url-file-handlers 'null)
|
|
170 (put 'file-symlink-p 'url-file-handlers 'null)
|
|
171 (put 'file-exists-p 'url-file-handlers 'url-file-exists)
|
|
172 (put 'copy-file 'url-file-handlers 'url-copy-file)
|
|
173 (put 'file-attributes 'url-file-handlers 'url-file-attributes)
|
|
174 (put 'file-name-all-completions 'url-file-handlers
|
|
175 'url-file-name-all-completions)
|
|
176 (put 'file-name-completion 'url-file-handlers 'url-file-name-completion)
|
|
177 (put 'file-local-copy 'url-file-handlers 'url-file-local-copy)
|
|
178
|
|
179
|
|
180 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
|
181 ;;; Utility functions
|
|
182 ;;; -----------------
|
|
183 ;;; Various functions used around the url code.
|
|
184 ;;; Some of these qualify as hacks, but hey, this is elisp.
|
|
185 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
|
186
|
|
187 (if (fboundp 'mm-string-to-tokens)
|
|
188 (fset 'url-string-to-tokens 'mm-string-to-tokens)
|
|
189 (defun url-string-to-tokens (str &optional delim)
|
|
190 "Return a list of words from the string STR"
|
|
191 (setq delim (or delim ? ))
|
|
192 (let (results y)
|
|
193 (mapcar
|
|
194 (function
|
|
195 (lambda (x)
|
|
196 (cond
|
|
197 ((and (= x delim) y) (setq results (cons y results) y nil))
|
|
198 ((/= x delim) (setq y (concat y (char-to-string x))))
|
|
199 (t nil)))) str)
|
|
200 (nreverse (cons y results)))))
|
|
201
|
|
202 (defun url-days-between (date1 date2)
|
|
203 ;; Return the number of days between date1 and date2.
|
|
204 (- (url-day-number date1) (url-day-number date2)))
|
|
205
|
|
206 (defun url-day-number (date)
|
|
207 (let ((dat (mapcar (function (lambda (s) (and s (string-to-int s)) ))
|
|
208 (timezone-parse-date date))))
|
|
209 (timezone-absolute-from-gregorian
|
|
210 (nth 1 dat) (nth 2 dat) (car dat))))
|
|
211
|
|
212 (defun url-seconds-since-epoch (date)
|
|
213 ;; Returns a number that says how many seconds have
|
|
214 ;; lapsed between Jan 1 12:00:00 1970 and DATE."
|
|
215 (let* ((tdate (mapcar (function (lambda (ti) (and ti (string-to-int ti))))
|
|
216 (timezone-parse-date date)))
|
|
217 (ttime (mapcar (function (lambda (ti) (and ti (string-to-int ti))))
|
|
218 (timezone-parse-time
|
|
219 (aref (timezone-parse-date date) 3))))
|
|
220 (edate (mapcar (function (lambda (ti) (and ti (string-to-int ti))))
|
|
221 (timezone-parse-date "Jan 1 12:00:00 1970")))
|
|
222 (tday (- (timezone-absolute-from-gregorian
|
|
223 (nth 1 tdate) (nth 2 tdate) (nth 0 tdate))
|
|
224 (timezone-absolute-from-gregorian
|
|
225 (nth 1 edate) (nth 2 edate) (nth 0 edate)))))
|
|
226 (+ (nth 2 ttime)
|
|
227 (* (nth 1 ttime) 60)
|
|
228 (* (nth 0 ttime) 60 60)
|
|
229 (* tday 60 60 24))))
|
|
230
|
|
231 (defun url-match (s x)
|
|
232 ;; Return regexp match x in s.
|
|
233 (substring s (match-beginning x) (match-end x)))
|
|
234
|
|
235 (defun url-split (str del)
|
|
236 ;; Split the string STR, with DEL (a regular expression) as the delimiter.
|
|
237 ;; Returns an assoc list that you can use with completing-read."
|
|
238 (let (x y)
|
|
239 (while (string-match del str)
|
|
240 (setq y (substring str 0 (match-beginning 0))
|
|
241 str (substring str (match-end 0) nil))
|
|
242 (if (not (string-match "^[ \t]+$" y))
|
|
243 (setq x (cons (list y y) x))))
|
|
244 (if (not (equal str ""))
|
|
245 (setq x (cons (list str str) x)))
|
|
246 x))
|
|
247
|
|
248 (defun url-replace-regexp (regexp to-string)
|
|
249 (goto-char (point-min))
|
|
250 (while (re-search-forward regexp nil t)
|
|
251 (replace-match to-string t nil)))
|
|
252
|
|
253 (defun url-clear-tmp-buffer ()
|
|
254 (set-buffer (get-buffer-create url-working-buffer))
|
|
255 (if buffer-read-only (toggle-read-only))
|
|
256 (erase-buffer))
|
|
257
|
|
258 (defun url-maybe-relative (url)
|
|
259 (url-retrieve (url-expand-file-name url)))
|
|
260
|
|
261 (defun url-buffer-is-hypertext (&optional buff)
|
|
262 "Return t if a buffer contains HTML, as near as we can guess."
|
|
263 (setq buff (or buff (current-buffer)))
|
|
264 (save-excursion
|
|
265 (set-buffer buff)
|
|
266 (let ((case-fold-search t))
|
|
267 (goto-char (point-min))
|
|
268 (re-search-forward
|
|
269 "<\\(TITLE\\|HEAD\\|BASE\\|H[0-9]\\|ISINDEX\\|P\\)>" nil t))))
|
|
270
|
|
271 (defun nntp-after-change-function (&rest args)
|
|
272 (save-excursion
|
|
273 (set-buffer nntp-server-buffer)
|
|
274 (message "Read %d bytes" (point-max))))
|
|
275
|
|
276 (defun url-percentage (x y)
|
|
277 (if (fboundp 'float)
|
|
278 (round (* 100 (/ x (float y))))
|
|
279 (/ (* x 100) y)))
|
|
280
|
|
281 (defun url-after-change-function (&rest args)
|
|
282 ;; The nitty gritty details of messaging the HTTP/1.0 status messages
|
|
283 ;; in the minibuffer."
|
2
|
284 (if (get-buffer url-working-buffer)
|
|
285 (save-excursion
|
|
286 (set-buffer url-working-buffer)
|
|
287 (let (status-message)
|
|
288 (if url-current-content-length
|
|
289 nil
|
|
290 (goto-char (point-min))
|
|
291 (skip-chars-forward " \t\n")
|
|
292 (if (not (looking-at "HTTP/[0-9]\.[0-9]"))
|
|
293 (setq url-current-content-length 0)
|
|
294 (setq url-current-isindex
|
|
295 (and (re-search-forward "$\r*$" nil t) (point)))
|
|
296 (if (re-search-forward
|
|
297 "^content-type:[ \t]*\\([^\r\n]+\\)\r*$"
|
|
298 url-current-isindex t)
|
|
299 (setq url-current-mime-type (downcase
|
|
300 (url-eat-trailing-space
|
|
301 (buffer-substring
|
|
302 (match-beginning 1)
|
|
303 (match-end 1))))))
|
|
304 (if (re-search-forward "^content-length:\\([^\r\n]+\\)\r*$"
|
|
305 url-current-isindex t)
|
|
306 (setq url-current-content-length
|
|
307 (string-to-int (buffer-substring (match-beginning 1)
|
|
308 (match-end 1))))
|
|
309 (setq url-current-content-length nil))))
|
|
310 (goto-char (point-min))
|
|
311 (if (re-search-forward "^status:\\([^\r]*\\)" url-current-isindex t)
|
|
312 (progn
|
|
313 (setq status-message (buffer-substring (match-beginning 1)
|
|
314 (match-end 1)))
|
|
315 (replace-match (concat "btatus:" status-message))))
|
|
316 (goto-char (point-max))
|
|
317 (cond
|
|
318 (status-message (url-lazy-message "%s" status-message))
|
|
319 ((and url-current-content-length (> url-current-content-length 1)
|
|
320 url-current-mime-type)
|
|
321 (url-lazy-message "Read %d of %d bytes (%d%%) [%s]"
|
|
322 (point-max) url-current-content-length
|
|
323 (url-percentage (point-max)
|
|
324 url-current-content-length)
|
|
325 url-current-mime-type))
|
|
326 ((and url-current-content-length (> url-current-content-length 1))
|
|
327 (url-lazy-message "Read %d of %d bytes (%d%%)"
|
|
328 (point-max) url-current-content-length
|
|
329 (url-percentage (point-max)
|
|
330 url-current-content-length)))
|
|
331 ((and (/= 1 (point-max)) url-current-mime-type)
|
|
332 (url-lazy-message "Read %d bytes. [%s]" (point-max)
|
|
333 url-current-mime-type))
|
|
334 ((/= 1 (point-max))
|
|
335 (url-lazy-message "Read %d bytes." (point-max)))
|
|
336 (t (url-lazy-message "Waiting for response.")))))))
|
|
337
|
|
338 (defun url-insert-entities-in-string (string)
|
|
339 "Convert HTML markup-start characters to entity references in STRING.
|
|
340 Also replaces the \" character, so that the result may be safely used as
|
|
341 an attribute value in a tag. Returns a new string with the result of the
|
|
342 conversion. Replaces these characters as follows:
|
|
343 & ==> &
|
|
344 < ==> <
|
|
345 > ==> >
|
|
346 \" ==> ""
|
|
347 (if (string-match "[&<>\"]" string)
|
|
348 (save-excursion
|
|
349 (set-buffer (get-buffer-create " *entity*"))
|
|
350 (erase-buffer)
|
|
351 (buffer-disable-undo (current-buffer))
|
|
352 (insert string)
|
0
|
353 (goto-char (point-min))
|
2
|
354 (while (progn
|
|
355 (skip-chars-forward "^&<>\"")
|
|
356 (not (eobp)))
|
|
357 (insert (cdr (assq (char-after (point))
|
|
358 '((?\" . """)
|
|
359 (?& . "&")
|
|
360 (?< . "<")
|
|
361 (?> . ">")))))
|
|
362 (delete-char 1))
|
|
363 (buffer-string))
|
|
364 string))
|
0
|
365
|
|
366 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
|
367 ;;; Information information
|
|
368 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
|
369 (defvar url-process-lookup-table nil)
|
|
370
|
|
371 (defun url-setup-process-get ()
|
|
372 (let ((x nil)
|
|
373 (nativep t))
|
|
374 (condition-case ()
|
|
375 (progn
|
|
376 (setq x (start-process "Test" nil "/bin/sh"))
|
|
377 (get x 'command))
|
|
378 (error (setq nativep nil)))
|
|
379 (cond
|
|
380 ((fboundp 'process-get) ; Emacs 19.31 w/my hacks
|
|
381 (defun url-process-get (proc prop &optional default)
|
|
382 (or (process-get proc prop) default)))
|
|
383 (nativep ; XEmacs 19.14 w/my hacks
|
|
384 (fset 'url-process-get 'get))
|
|
385 (t
|
|
386 (defun url-process-get (proc prop &optional default)
|
|
387 (or (plist-get (cdr-safe (assq proc url-process-lookup-table)) prop)
|
|
388 default))))
|
|
389 (cond
|
|
390 ((fboundp 'process-put) ; Emacs 19.31 w/my hacks
|
|
391 (fset 'url-process-put 'process-put))
|
|
392 (nativep
|
|
393 (fset 'url-process-put 'put))
|
|
394 (t
|
|
395 (defun url-process-put (proc prop val)
|
|
396 (let ((node (assq proc url-process-lookup-table)))
|
|
397 (if (not node)
|
|
398 (setq url-process-lookup-table (cons (cons proc (list prop val))
|
|
399 url-process-lookup-table))
|
|
400 (setcdr node (plist-put (cdr node) prop val)))))))
|
|
401 (and (processp x) (delete-process x))))
|
|
402
|
|
403 (defun url-gc-process-lookup-table ()
|
|
404 (let (new)
|
|
405 (while url-process-lookup-table
|
|
406 (if (not (memq (process-status (caar url-process-lookup-table))
|
|
407 '(stop closed nil)))
|
|
408 (setq new (cons (car url-process-lookup-table) new)))
|
|
409 (setq url-process-lookup-table (cdr url-process-lookup-table)))
|
|
410 (setq url-process-lookup-table new)))
|
|
411
|
|
412 (defun url-list-processes ()
|
|
413 (interactive)
|
|
414 (url-gc-process-lookup-table)
|
|
415 (let ((processes (process-list))
|
|
416 proc len type)
|
|
417 (set-buffer (get-buffer-create "URL Status Display"))
|
|
418 (display-buffer (current-buffer))
|
|
419 (erase-buffer)
|
|
420 (insert
|
|
421 (eval-when-compile (format "%-40s%-10s%-25s" "URL" "Size" "Type")) "\n"
|
|
422 (eval-when-compile (make-string 75 ?-)) "\n")
|
|
423 (while processes
|
|
424 (setq proc (car processes)
|
|
425 processes (cdr processes))
|
|
426 (if (url-process-get proc 'url)
|
|
427 (progn
|
|
428 (save-excursion
|
|
429 (set-buffer (process-buffer proc))
|
|
430 (setq len url-current-content-length
|
|
431 type url-current-mime-type))
|
|
432 (insert
|
|
433 (format "%-40s%-10d%-25s" (url-process-get proc 'url)
|
|
434 len type)))))))
|
|
435
|
|
436
|
|
437 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
|
438 ;;; file-name-handler stuff calls this
|
|
439 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
|
440
|
|
441 (defun url-have-visited-url (url &rest args)
|
|
442 "Return non-nil iff the user has visited URL before.
|
|
443 The return value is a cons of the url and the date last accessed as a string"
|
|
444 (url-gethash url url-global-history-hash-table))
|
|
445
|
|
446 (defun url-directory-files (url &rest args)
|
|
447 "Return a list of files on a server."
|
|
448 nil)
|
|
449
|
|
450 (defun url-file-writable-p (url &rest args)
|
|
451 "Return t iff a url is writable by this user"
|
|
452 nil)
|
|
453
|
|
454 (defun url-copy-file (url &rest args)
|
|
455 "Copy a url to the specified filename."
|
|
456 nil)
|
|
457
|
|
458 (defun url-file-directly-accessible-p (url)
|
|
459 "Returns t iff the specified URL is directly accessible
|
|
460 on your filesystem. (nfs, local file, etc)."
|
|
461 (let* ((urlobj (if (vectorp url) url (url-generic-parse-url url)))
|
|
462 (type (url-type urlobj)))
|
|
463 (and (member type '("file" "ftp"))
|
|
464 (not (url-host urlobj)))))
|
|
465
|
|
466 ;;;###autoload
|
|
467 (defun url-file-attributes (url &rest args)
|
|
468 "Return a list of attributes of URL.
|
|
469 Value is nil if specified file cannot be opened.
|
|
470 Otherwise, list elements are:
|
|
471 0. t for directory, string (name linked to) for symbolic link, or nil.
|
|
472 1. Number of links to file.
|
|
473 2. File uid.
|
|
474 3. File gid.
|
|
475 4. Last access time, as a list of two integers.
|
|
476 First integer has high-order 16 bits of time, second has low 16 bits.
|
|
477 5. Last modification time, likewise.
|
|
478 6. Last status change time, likewise.
|
|
479 7. Size in bytes. (-1, if number is out of range).
|
|
480 8. File modes, as a string of ten letters or dashes as in ls -l.
|
|
481 If URL is on an http server, this will return the content-type if possible.
|
|
482 9. t iff file's gid would change if file were deleted and recreated.
|
|
483 10. inode number.
|
|
484 11. Device number.
|
|
485
|
|
486 If file does not exist, returns nil."
|
|
487 (and url
|
|
488 (let* ((urlobj (url-generic-parse-url url))
|
|
489 (type (url-type urlobj))
|
|
490 (url-automatic-caching nil)
|
|
491 (data nil)
|
|
492 (exists nil))
|
|
493 (cond
|
|
494 ((equal type "http")
|
|
495 (cond
|
|
496 ((not url-be-anal-about-file-attributes)
|
|
497 (setq data (list
|
|
498 (url-file-directory-p url) ; Directory
|
|
499 1 ; number of links to it
|
|
500 0 ; UID
|
|
501 0 ; GID
|
|
502 (cons 0 0) ; Last access time
|
|
503 (cons 0 0) ; Last mod. time
|
|
504 (cons 0 0) ; Last status time
|
|
505 -1 ; file size
|
|
506 (mm-extension-to-mime
|
|
507 (url-file-extension (url-filename urlobj)))
|
|
508 nil ; gid would change
|
|
509 0 ; inode number
|
|
510 0 ; device number
|
|
511 )))
|
|
512 (t ; HTTP/1.0, use HEAD
|
|
513 (let ((url-request-method "HEAD")
|
|
514 (url-request-data nil)
|
|
515 (url-working-buffer " *url-temp*"))
|
|
516 (save-excursion
|
|
517 (condition-case ()
|
|
518 (progn
|
|
519 (url-retrieve url)
|
|
520 (setq data (and
|
|
521 (setq exists
|
|
522 (cdr
|
|
523 (assoc "status"
|
|
524 url-current-mime-headers)))
|
|
525 (>= exists 200)
|
|
526 (< exists 300)
|
|
527 (list
|
|
528 (url-file-directory-p url) ; Directory
|
|
529 1 ; links to
|
|
530 0 ; UID
|
|
531 0 ; GID
|
|
532 (cons 0 0) ; Last access time
|
|
533 (cons 0 0) ; Last mod. time
|
|
534 (cons 0 0) ; Last status time
|
|
535 (or ; Size in bytes
|
|
536 (cdr (assoc "content-length"
|
|
537 url-current-mime-headers))
|
|
538 -1)
|
|
539 (or
|
|
540 (cdr (assoc "content-type"
|
|
541 url-current-mime-headers))
|
|
542 (mm-extension-to-mime
|
|
543 (url-file-extension
|
|
544 (url-filename urlobj)))) ; content-type
|
|
545 nil ; gid would change
|
|
546 0 ; inode number
|
|
547 0 ; device number
|
|
548 ))))
|
|
549 (error nil))
|
|
550 (and (not data)
|
|
551 (setq data (list (url-file-directory-p url)
|
|
552 1 0 0 (cons 0 0) (cons 0 0) (cons 0 0)
|
|
553 -1 (mm-extension-to-mime
|
|
554 (url-file-extension
|
|
555 url-current-file))
|
|
556 nil 0 0)))
|
|
557 (kill-buffer " *url-temp*"))))))
|
|
558 ((member type '("ftp" "file"))
|
|
559 (let ((fname (if (url-host urlobj)
|
|
560 (concat "/"
|
|
561 (if (url-user urlobj)
|
|
562 (concat (url-user urlobj) "@")
|
|
563 "")
|
|
564 (url-host urlobj) ":"
|
|
565 (url-filename urlobj))
|
|
566 (url-filename urlobj))))
|
|
567 (setq data (or (file-attributes fname) (make-list 12 nil)))
|
|
568 (setcar (cdr (cdr (cdr (cdr (cdr (cdr (cdr (cdr data))))))))
|
|
569 (mm-extension-to-mime (url-file-extension fname)))))
|
|
570 (t nil))
|
|
571 data)))
|
|
572
|
|
573 (defun url-file-name-all-completions (file dirname &rest args)
|
|
574 "Return a list of all completions of file name FILE in directory DIR.
|
|
575 These are all file names in directory DIR which begin with FILE."
|
|
576 ;; need to rewrite
|
|
577 )
|
|
578
|
|
579 (defun url-file-name-completion (file dirname &rest args)
|
|
580 "Complete file name FILE in directory DIR.
|
|
581 Returns the longest string
|
|
582 common to all filenames in DIR that start with FILE.
|
|
583 If there is only one and FILE matches it exactly, returns t.
|
|
584 Returns nil if DIR contains no name starting with FILE."
|
|
585 (apply 'url-file-name-all-completions file dirname args))
|
|
586
|
|
587 (defun url-file-local-copy (file &rest args)
|
|
588 "Copy the file FILE into a temporary file on this machine.
|
|
589 Returns the name of the local copy, or nil, if FILE is directly
|
|
590 accessible."
|
|
591 nil)
|
|
592
|
|
593 (defun url-insert-file-contents (url &rest args)
|
|
594 "Insert the contents of the URL in this buffer."
|
2
|
595 (interactive "sURL: ")
|
0
|
596 (save-excursion
|
|
597 (let ((old-asynch url-be-asynchronous))
|
|
598 (setq-default url-be-asynchronous nil)
|
|
599 (url-retrieve url)
|
|
600 (setq-default url-be-asynchronous old-asynch)))
|
|
601 (insert-buffer url-working-buffer)
|
|
602 (setq buffer-file-name url)
|
|
603 (kill-buffer url-working-buffer))
|
|
604
|
|
605 (defun url-file-directory-p (url &rest args)
|
|
606 "Return t iff a url points to a directory"
|
|
607 (equal (substring url -1 nil) "/"))
|
|
608
|
|
609 (defun url-file-exists (url &rest args)
|
|
610 "Return t iff a file exists."
|
|
611 (let* ((urlobj (url-generic-parse-url url))
|
|
612 (type (url-type urlobj))
|
|
613 (exists nil))
|
|
614 (cond
|
|
615 ((equal type "http") ; use head
|
|
616 (let ((url-request-method "HEAD")
|
|
617 (url-request-data nil)
|
|
618 (url-working-buffer " *url-temp*"))
|
|
619 (save-excursion
|
|
620 (url-retrieve url)
|
|
621 (setq exists (or (cdr
|
|
622 (assoc "status" url-current-mime-headers)) 500))
|
|
623 (kill-buffer " *url-temp*")
|
|
624 (setq exists (and (>= exists 200) (< exists 300))))))
|
|
625 ((member type '("ftp" "file")) ; file-attributes
|
|
626 (let ((fname (if (url-host urlobj)
|
|
627 (concat "/"
|
|
628 (if (url-user urlobj)
|
|
629 (concat (url-user urlobj) "@")
|
|
630 "")
|
|
631 (url-host urlobj) ":"
|
|
632 (url-filename urlobj))
|
|
633 (url-filename urlobj))))
|
|
634 (setq exists (file-exists-p fname))))
|
|
635 (t nil))
|
|
636 exists))
|
|
637
|
|
638 ;;;###autoload
|
|
639 (defun url-normalize-url (url)
|
|
640 "Return a 'normalized' version of URL. This strips out default port
|
|
641 numbers, etc."
|
|
642 (let (type data grok retval)
|
|
643 (setq data (url-generic-parse-url url)
|
|
644 type (url-type data))
|
|
645 (if (member type '("www" "about" "mailto" "mailserver" "info"))
|
|
646 (setq retval url)
|
|
647 (setq retval (url-recreate-url data)))
|
|
648 retval))
|
|
649
|
|
650 ;;;###autoload
|
|
651 (defun url-buffer-visiting (url)
|
|
652 "Return the name of a buffer (if any) that is visiting URL."
|
|
653 (setq url (url-normalize-url url))
|
|
654 (let ((bufs (buffer-list))
|
|
655 (found nil))
|
|
656 (if (condition-case ()
|
|
657 (string-match "\\(.*\\)#" url)
|
|
658 (error nil))
|
|
659 (setq url (url-match url 1)))
|
|
660 (while (and bufs (not found))
|
|
661 (save-excursion
|
|
662 (set-buffer (car bufs))
|
|
663 (setq found (if (and
|
|
664 (not (equal (buffer-name (car bufs))
|
|
665 url-working-buffer))
|
|
666 (memq major-mode '(url-mode w3-mode))
|
|
667 (equal (url-view-url t) url)) (car bufs) nil)
|
|
668 bufs (cdr bufs))))
|
|
669 found))
|
|
670
|
|
671 (defun url-file-size (url &rest args)
|
|
672 "Return the size of a file in bytes, or -1 if can't be determined."
|
|
673 (let* ((urlobj (url-generic-parse-url url))
|
|
674 (type (url-type urlobj))
|
|
675 (size -1)
|
|
676 (data nil))
|
|
677 (cond
|
|
678 ((equal type "http") ; use head
|
|
679 (let ((url-request-method "HEAD")
|
|
680 (url-request-data nil)
|
|
681 (url-working-buffer " *url-temp*"))
|
|
682 (save-excursion
|
|
683 (url-retrieve url)
|
|
684 (setq size (or (cdr
|
|
685 (assoc "content-length" url-current-mime-headers))
|
|
686 -1))
|
|
687 (kill-buffer " *url-temp*"))))
|
|
688 ((member type '("ftp" "file")) ; file-attributes
|
|
689 (let ((fname (if (url-host urlobj)
|
|
690 (concat "/"
|
|
691 (if (url-user urlobj)
|
|
692 (concat (url-user urlobj) "@")
|
|
693 "")
|
|
694 (url-host urlobj) ":"
|
|
695 (url-filename urlobj))
|
|
696 (url-filename urlobj))))
|
|
697 (setq data (file-attributes fname)
|
|
698 size (nth 7 data))))
|
|
699 (t nil))
|
|
700 (cond
|
|
701 ((stringp size) (string-to-int size))
|
|
702 ((integerp size) size)
|
|
703 ((null size) -1)
|
|
704 (t -1))))
|
|
705
|
|
706 (defun url-generate-new-buffer-name (start)
|
|
707 "Create a new buffer name based on START."
|
|
708 (let ((x 1)
|
|
709 name)
|
|
710 (if (not (get-buffer start))
|
|
711 start
|
|
712 (progn
|
|
713 (setq name (format "%s<%d>" start x))
|
|
714 (while (get-buffer name)
|
|
715 (setq x (1+ x)
|
|
716 name (format "%s<%d>" start x)))
|
|
717 name))))
|
|
718
|
|
719 (defun url-generate-unique-filename (&optional fmt)
|
|
720 "Generate a unique filename in url-temporary-directory"
|
|
721 (if (not fmt)
|
|
722 (let ((base (format "url-tmp.%d" (user-real-uid)))
|
|
723 (fname "")
|
|
724 (x 0))
|
|
725 (setq fname (format "%s%d" base x))
|
|
726 (while (file-exists-p (expand-file-name fname url-temporary-directory))
|
|
727 (setq x (1+ x)
|
|
728 fname (concat base (int-to-string x))))
|
|
729 (expand-file-name fname url-temporary-directory))
|
|
730 (let ((base (concat "url" (int-to-string (user-real-uid))))
|
|
731 (fname "")
|
|
732 (x 0))
|
|
733 (setq fname (format fmt (concat base (int-to-string x))))
|
|
734 (while (file-exists-p (expand-file-name fname url-temporary-directory))
|
|
735 (setq x (1+ x)
|
|
736 fname (format fmt (concat base (int-to-string x)))))
|
|
737 (expand-file-name fname url-temporary-directory))))
|
|
738
|
|
739 (defun url-lazy-message (&rest args)
|
|
740 "Just like `message', but is a no-op if called more than once a second.
|
|
741 Will not do anything if url-show-status is nil."
|
|
742 (if (or (null url-show-status)
|
|
743 (= url-lazy-message-time
|
|
744 (setq url-lazy-message-time (nth 1 (current-time)))))
|
|
745 nil
|
|
746 (apply 'message args)))
|
|
747
|
|
748
|
|
749 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
|
750 ;;; Gateway Support
|
|
751 ;;; ---------------
|
|
752 ;;; Fairly good/complete gateway support
|
|
753 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
|
754 (defun url-kill-process (proc)
|
|
755 "Kill the process PROC - knows about all the various gateway types,
|
|
756 and acts accordingly."
|
|
757 (cond
|
|
758 ((eq url-gateway-method 'native) (delete-process proc))
|
|
759 ((eq url-gateway-method 'program) (kill-process proc))
|
|
760 (t (error "Unknown url-gateway-method %s" url-gateway-method))))
|
|
761
|
|
762 (defun url-accept-process-output (proc)
|
|
763 "Allow any pending output from subprocesses to be read by Emacs.
|
|
764 It is read into the process' buffers or given to their filter functions.
|
|
765 Where possible, this will not exit until some output is received from PROC,
|
|
766 or 1 second has elapsed."
|
|
767 (accept-process-output proc 1))
|
|
768
|
|
769 (defun url-process-status (proc)
|
|
770 "Return the process status of a url buffer"
|
|
771 (cond
|
|
772 ((memq url-gateway-method '(native ssl program)) (process-status proc))
|
|
773 (t (error "Unkown url-gateway-method %s" url-gateway-method))))
|
|
774
|
|
775 (defun url-open-stream (name buffer host service)
|
|
776 "Open a stream to a host"
|
|
777 (let ((tmp-gateway-method (if (and url-gateway-local-host-regexp
|
|
778 (not (eq 'ssl url-gateway-method))
|
|
779 (string-match
|
|
780 url-gateway-local-host-regexp
|
|
781 host))
|
|
782 'native
|
|
783 url-gateway-method))
|
|
784 (tcp-binary-process-output-services (if (stringp service)
|
|
785 (list service)
|
|
786 (list service
|
|
787 (int-to-string service)))))
|
|
788 (and (eq url-gateway-method 'tcp)
|
|
789 (require 'tcp)
|
|
790 (setq url-gateway-method 'native
|
|
791 tmp-gateway-method 'native))
|
|
792 (cond
|
|
793 ((eq tmp-gateway-method 'ssl)
|
|
794 (open-ssl-stream name buffer host service))
|
|
795 ((eq tmp-gateway-method 'native)
|
|
796 (if url-broken-resolution
|
|
797 (setq host
|
|
798 (cond
|
|
799 ((featurep 'ange-ftp) (ange-ftp-nslookup-host host))
|
|
800 ((featurep 'efs) (efs-nslookup-host host))
|
|
801 ((featurep 'efs-auto) (efs-nslookup-host host))
|
|
802 (t host))))
|
|
803 (let ((max-retries url-connection-retries)
|
|
804 (cur-retries 0)
|
|
805 (retry t)
|
|
806 (errobj nil)
|
|
807 (conn nil))
|
|
808 (while (and (not conn) retry)
|
|
809 (condition-case errobj
|
|
810 (setq conn (open-network-stream name buffer host service))
|
|
811 (error
|
|
812 (url-save-error errobj)
|
|
813 (save-window-excursion
|
|
814 (save-excursion
|
|
815 (switch-to-buffer-other-window " *url-error*")
|
|
816 (shrink-window-if-larger-than-buffer)
|
|
817 (goto-char (point-min))
|
|
818 (if (and (re-search-forward "in use" nil t)
|
|
819 (< cur-retries max-retries))
|
|
820 (progn
|
|
821 (setq retry t
|
|
822 cur-retries (1+ cur-retries))
|
|
823 (sleep-for 0.5))
|
|
824 (setq cur-retries 0
|
|
825 retry (funcall url-confirmation-func
|
|
826 (concat "Connection to " host
|
|
827 " failed, retry? "))))
|
|
828 (kill-buffer (current-buffer)))))))
|
|
829 (if conn
|
|
830 (progn
|
|
831 (if (featurep 'mule)
|
|
832 (save-excursion
|
|
833 (set-buffer (get-buffer-create buffer))
|
|
834 (setq mc-flag nil)
|
2
|
835 (if (not url-running-xemacs)
|
|
836 (set-process-coding-system conn *noconv* *noconv*)
|
|
837 (set-process-input-coding-system conn 'noconv)
|
|
838 (set-process-output-coding-system conn 'noconv))))
|
0
|
839 conn)
|
|
840 (error "Unable to connect to %s:%s" host service))))
|
|
841 ((eq tmp-gateway-method 'program)
|
|
842 (let ((proc (start-process name buffer url-gateway-telnet-program host
|
|
843 (int-to-string service)))
|
|
844 (tmp nil))
|
|
845 (save-excursion
|
|
846 (set-buffer buffer)
|
|
847 (setq tmp (point))
|
|
848 (while (not (progn
|
|
849 (goto-char (point-min))
|
|
850 (re-search-forward
|
|
851 url-gateway-telnet-ready-regexp nil t)))
|
|
852 (url-accept-process-output proc))
|
|
853 (delete-region tmp (point))
|
|
854 (goto-char (point-min))
|
|
855 (if (re-search-forward "connect:" nil t)
|
|
856 (progn
|
|
857 (condition-case ()
|
|
858 (delete-process proc)
|
|
859 (error nil))
|
|
860 (url-replace-regexp ".*connect:.*" "")
|
|
861 nil)
|
|
862 proc))))
|
|
863 (t (error "Unknown url-gateway-method %s" url-gateway-method)))))
|
|
864
|
|
865
|
|
866 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
|
867 ;;; Miscellaneous functions
|
|
868 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
|
869 (defun url-setup-privacy-info ()
|
|
870 (interactive)
|
|
871 (setq url-system-type
|
|
872 (cond
|
|
873 ((or (eq url-privacy-level 'paranoid)
|
|
874 (and (listp url-privacy-level)
|
|
875 (memq 'os url-privacy-level)))
|
|
876 nil)
|
|
877 ((eq system-type 'Apple-Macintosh) "Macintosh")
|
|
878 ((eq system-type 'next-mach) "NeXT")
|
|
879 ((eq system-type 'windows-nt) "Windows-NT; 32bit")
|
|
880 ((eq system-type 'ms-windows) "Windows; 16bit")
|
|
881 ((eq system-type 'ms-dos) "MS-DOS; 32bit")
|
|
882 ((and (eq system-type 'vax-vms) (device-type))
|
|
883 "VMS; X11")
|
|
884 ((eq system-type 'vax-vms) "VMS; TTY")
|
|
885 ((eq (device-type) 'x) "X11")
|
|
886 ((eq (device-type) 'ns) "NeXTStep")
|
|
887 ((eq (device-type) 'pm) "OS/2")
|
|
888 ((eq (device-type) 'win32) "Windows; 32bit")
|
|
889 ((eq (device-type) 'tty) "(Unix?); TTY")
|
|
890 (t "UnkownPlatform")))
|
|
891
|
|
892 ;; Set up the entity definition for PGP and PEM authentication
|
|
893 (setq url-pgp/pem-entity (or url-pgp/pem-entity
|
|
894 user-mail-address
|
|
895 (format "%s@%s" (user-real-login-name)
|
|
896 (system-name))))
|
|
897
|
|
898 (setq url-personal-mail-address (or url-personal-mail-address
|
|
899 url-pgp/pem-entity
|
|
900 user-mail-address))
|
|
901
|
|
902 (if (or (memq url-privacy-level '(paranoid high))
|
|
903 (and (listp url-privacy-level)
|
|
904 (memq 'email url-privacy-level)))
|
|
905 (setq url-personal-mail-address nil))
|
|
906
|
|
907 (if (or (eq url-privacy-level 'paranoid)
|
|
908 (and (listp url-privacy-level)
|
|
909 (memq 'os url-privacy-level)))
|
|
910 (setq url-os-type nil)
|
|
911 (let ((vers (emacs-version)))
|
|
912 (if (string-match "(\\([^, )]+\\))$" vers)
|
|
913 (setq url-os-type (url-match vers 1))
|
|
914 (setq url-os-type (symbol-name system-type))))))
|
|
915
|
|
916 (defun url-handle-no-scheme (url)
|
|
917 (let ((temp url-registered-protocols)
|
|
918 (found nil))
|
|
919 (while (and temp (not found))
|
|
920 (if (and (not (member (car (car temp)) '("auto" "www")))
|
|
921 (string-match (concat "^" (car (car temp)) "\\.")
|
|
922 url))
|
|
923 (setq found t)
|
|
924 (setq temp (cdr temp))))
|
|
925 (cond
|
|
926 (found ; Found something like ftp.spry.com
|
|
927 (url-retrieve (concat (car (car temp)) "://" url)))
|
|
928 ((string-match "^www\\." url)
|
|
929 (url-retrieve (concat "http://" url)))
|
|
930 ((string-match "\\(\\.[^\\.]+\\)\\(\\.[^\\.]+\\)" url)
|
|
931 ;; Ok, we have at least two dots in the filename, just stick http on it
|
|
932 (url-retrieve (concat "http://" url)))
|
|
933 (t
|
|
934 (url-retrieve (concat "http://www." url ".com"))))))
|
|
935
|
|
936 (defun url-setup-save-timer ()
|
|
937 "Reset the history list timer."
|
|
938 (interactive)
|
|
939 (cond
|
|
940 ((featurep 'itimer)
|
|
941 (if (get-itimer "url-history-saver")
|
|
942 (delete-itimer (get-itimer "url-history-saver")))
|
|
943 (start-itimer "url-history-saver" 'url-write-global-history
|
|
944 url-global-history-save-interval
|
|
945 url-global-history-save-interval))
|
|
946 ((fboundp 'run-at-time)
|
|
947 (run-at-time url-global-history-save-interval
|
|
948 url-global-history-save-interval
|
|
949 'url-write-global-history))
|
|
950 (t nil)))
|
|
951
|
|
952 (defvar url-download-minor-mode nil)
|
|
953
|
|
954 (defun url-download-minor-mode (on)
|
|
955 (setq url-download-minor-mode (if on
|
|
956 (1+ (or url-download-minor-mode 0))
|
|
957 (1- (or url-download-minor-mode 1))))
|
|
958 (if (<= url-download-minor-mode 0)
|
|
959 (setq url-download-minor-mode nil)))
|
|
960
|
|
961 (defun url-do-setup ()
|
|
962 "Do setup - this is to avoid conflict with user settings when URL is
|
|
963 dumped with emacs."
|
|
964 (if url-setup-done
|
|
965 nil
|
|
966
|
|
967 (add-minor-mode 'url-download-minor-mode " Webbing" nil)
|
|
968 ;; Decide what type of process-get to use
|
|
969 ;(url-setup-process-get)
|
|
970
|
|
971 ;; Make OS/2 happy
|
|
972 (setq tcp-binary-process-input-services
|
|
973 (append '("http" "80")
|
|
974 tcp-binary-process-input-services))
|
|
975
|
|
976 ;; Register all the protocols we can handle
|
|
977 (url-register-protocol 'file)
|
|
978 (url-register-protocol 'ftp nil nil "21")
|
|
979 (url-register-protocol 'gopher nil nil "70")
|
|
980 (url-register-protocol 'http nil nil "80")
|
|
981 (url-register-protocol 'https nil nil "443")
|
|
982 (url-register-protocol 'nfs nil nil "2049")
|
|
983 (url-register-protocol 'info nil 'url-identity-expander)
|
|
984 (url-register-protocol 'mailserver nil 'url-identity-expander)
|
|
985 (url-register-protocol 'finger nil 'url-identity-expander "79")
|
|
986 (url-register-protocol 'mailto nil 'url-identity-expander)
|
|
987 (url-register-protocol 'news nil 'url-identity-expander "119")
|
|
988 (url-register-protocol 'nntp nil 'url-identity-expander "119")
|
|
989 (url-register-protocol 'irc nil 'url-identity-expander "6667")
|
|
990 (url-register-protocol 'rlogin)
|
|
991 (url-register-protocol 'shttp nil nil "80")
|
|
992 (url-register-protocol 'telnet)
|
|
993 (url-register-protocol 'tn3270)
|
|
994 (url-register-protocol 'wais)
|
|
995 (url-register-protocol 'x-exec)
|
|
996 (url-register-protocol 'proxy)
|
|
997 (url-register-protocol 'auto 'url-handle-no-scheme)
|
|
998
|
|
999 ;; Register all the authentication schemes we can handle
|
|
1000 (url-register-auth-scheme "basic" nil 4)
|
|
1001 (url-register-auth-scheme "digest" nil 7)
|
|
1002
|
|
1003 ;; Filename handler stuff for emacsen that support it
|
|
1004 (url-setup-file-name-handlers)
|
|
1005
|
|
1006 (setq url-cookie-file
|
|
1007 (or url-cookie-file
|
|
1008 (expand-file-name "~/.w3cookies")))
|
|
1009
|
|
1010 (setq url-global-history-file
|
|
1011 (or url-global-history-file
|
|
1012 (and (memq system-type '(ms-dos ms-windows))
|
|
1013 (expand-file-name "~/mosaic.hst"))
|
|
1014 (and (memq system-type '(axp-vms vax-vms))
|
|
1015 (expand-file-name "~/mosaic.global-history"))
|
|
1016 (condition-case ()
|
|
1017 (expand-file-name "~/.mosaic-global-history")
|
|
1018 (error nil))))
|
|
1019
|
|
1020 ;; Parse the global history file if it exists, so that it can be used
|
|
1021 ;; for URL completion, etc.
|
|
1022 (if (and url-global-history-file
|
|
1023 (file-exists-p url-global-history-file))
|
|
1024 (url-parse-global-history))
|
|
1025
|
|
1026 ;; Setup save timer
|
|
1027 (and url-global-history-save-interval (url-setup-save-timer))
|
|
1028
|
|
1029 (if (and url-cookie-file
|
|
1030 (file-exists-p url-cookie-file))
|
|
1031 (url-cookie-parse-file url-cookie-file))
|
|
1032
|
|
1033 ;; Read in proxy gateways
|
|
1034 (let ((noproxy (and (not (assoc "no_proxy" url-proxy-services))
|
|
1035 (or (getenv "NO_PROXY")
|
|
1036 (getenv "no_PROXY")
|
|
1037 (getenv "no_proxy")))))
|
|
1038 (if noproxy
|
|
1039 (setq url-proxy-services
|
|
1040 (cons (cons "no_proxy"
|
|
1041 (concat "\\("
|
|
1042 (mapconcat
|
|
1043 (function
|
|
1044 (lambda (x)
|
|
1045 (cond
|
|
1046 ((= x ?,) "\\|")
|
|
1047 ((= x ? ) "")
|
|
1048 ((= x ?.) (regexp-quote "."))
|
|
1049 ((= x ?*) ".*")
|
|
1050 ((= x ??) ".")
|
|
1051 (t (char-to-string x)))))
|
|
1052 noproxy "") "\\)"))
|
|
1053 url-proxy-services))))
|
|
1054
|
|
1055 ;; Set the url-use-transparent with decent defaults
|
|
1056 (if (not (eq (device-type) 'tty))
|
|
1057 (setq url-use-transparent nil))
|
|
1058 (and url-use-transparent (require 'transparent))
|
|
1059
|
|
1060 ;; Set the password entry funtion based on user defaults or guess
|
|
1061 ;; based on which remote-file-access package they are using.
|
|
1062 (cond
|
|
1063 (url-passwd-entry-func nil) ; Already been set
|
|
1064 ((boundp 'read-passwd) ; Use secure password if available
|
|
1065 (setq url-passwd-entry-func 'read-passwd))
|
|
1066 ((or (featurep 'efs) ; Using EFS
|
|
1067 (featurep 'efs-auto)) ; or autoloading efs
|
|
1068 (if (not (fboundp 'read-passwd))
|
|
1069 (autoload 'read-passwd "passwd" "Read in a password" nil))
|
|
1070 (setq url-passwd-entry-func 'read-passwd))
|
|
1071 ((or (featurep 'ange-ftp) ; Using ange-ftp
|
|
1072 (and (boundp 'file-name-handler-alist)
|
|
1073 (not (string-match "Lucid" (emacs-version)))))
|
|
1074 (setq url-passwd-entry-func 'ange-ftp-read-passwd))
|
|
1075 (t
|
|
1076 (url-warn 'security
|
|
1077 "Can't determine how to read passwords, winging it.")))
|
|
1078
|
|
1079 ;; Set up the news service if they haven't done so
|
|
1080 (setq url-news-server
|
|
1081 (cond
|
|
1082 (url-news-server url-news-server)
|
|
1083 ((and (boundp 'gnus-default-nntp-server)
|
|
1084 (not (equal "" gnus-default-nntp-server)))
|
|
1085 gnus-default-nntp-server)
|
|
1086 ((and (boundp 'gnus-nntp-server)
|
|
1087 (not (null gnus-nntp-server))
|
|
1088 (not (equal "" gnus-nntp-server)))
|
|
1089 gnus-nntp-server)
|
|
1090 ((and (boundp 'nntp-server-name)
|
|
1091 (not (null nntp-server-name))
|
|
1092 (not (equal "" nntp-server-name)))
|
|
1093 nntp-server-name)
|
|
1094 ((getenv "NNTPSERVER") (getenv "NNTPSERVER"))
|
|
1095 (t "news")))
|
|
1096
|
|
1097 ;; Set up the MIME accept string if they haven't got it hardcoded yet
|
|
1098 (or url-mime-accept-string
|
|
1099 (setq url-mime-accept-string (url-parse-viewer-types)))
|
|
1100 (or url-mime-encoding-string
|
|
1101 (setq url-mime-encoding-string
|
|
1102 (mapconcat 'car
|
|
1103 mm-content-transfer-encodings
|
|
1104 ", ")))
|
|
1105
|
|
1106 (url-setup-privacy-info)
|
|
1107 (run-hooks 'url-load-hook)
|
|
1108 (setq url-setup-done t)))
|
|
1109
|
|
1110 (defun url-cache-file-writable-p (file)
|
|
1111 "Follows the documentation of file-writable-p, unlike file-writable-p."
|
|
1112 (and (file-writable-p file)
|
|
1113 (if (file-exists-p file)
|
|
1114 (not (file-directory-p file))
|
|
1115 (file-directory-p (file-name-directory file)))))
|
|
1116
|
|
1117 (defun url-prepare-cache-for-file (file)
|
|
1118 "Makes it possible to cache data in FILE.
|
|
1119 Creates any necessary parent directories, deleting any non-directory files
|
|
1120 that would stop this. Returns nil if parent directories can not be
|
|
1121 created. If FILE already exists as a non-directory, it changes
|
|
1122 permissions of FILE or deletes FILE to make it possible to write a new
|
|
1123 version of FILE. Returns nil if this can not be done. Returns nil if
|
|
1124 FILE already exists as a directory. Otherwise, returns t, indicating that
|
|
1125 FILE can be created or overwritten."
|
|
1126
|
|
1127 ;; COMMENT: We don't delete directories because that requires
|
|
1128 ;; recursively deleting the directories's contents, which might
|
|
1129 ;; eliminate a substantial portion of the cache.
|
|
1130
|
|
1131 (cond
|
|
1132 ((url-cache-file-writable-p file)
|
|
1133 t)
|
|
1134 ((file-directory-p file)
|
|
1135 nil)
|
|
1136 (t
|
|
1137 (catch 'upcff-tag
|
|
1138 (let ((dir (file-name-directory file))
|
|
1139 dir-parent dir-last-component)
|
|
1140 (if (string-equal dir file)
|
|
1141 ;; *** Should I have a warning here?
|
|
1142 ;; FILE must match a pattern like /foo/bar/, indicating it is a
|
|
1143 ;; name only suitable for a directory. So presume we won't be
|
|
1144 ;; able to overwrite FILE and return nil.
|
|
1145 (throw 'upcff-tag nil))
|
|
1146
|
|
1147 ;; Make sure the containing directory exists, or throw a failure
|
|
1148 ;; if we can't create it.
|
|
1149 (if (file-directory-p dir)
|
|
1150 nil
|
|
1151 (or (fboundp 'make-directory)
|
|
1152 (throw 'upcff-tag nil))
|
|
1153 (make-directory dir t)
|
|
1154 ;; make-directory silently fails if there is an obstacle, so
|
|
1155 ;; we must verify its results.
|
|
1156 (if (file-directory-p dir)
|
|
1157 nil
|
|
1158 ;; Look at prefixes of the path to find the obstacle that is
|
|
1159 ;; stopping us from making the directory. Unfortunately, there
|
|
1160 ;; is no portable function in Emacs to find the parent directory
|
|
1161 ;; of a *directory*. So this code may not work on VMS.
|
|
1162 (while (progn
|
|
1163 (if (eq ?/ (aref dir (1- (length dir))))
|
|
1164 (setq dir (substring dir 0 -1))
|
|
1165 ;; Maybe we're on VMS where the syntax is different.
|
|
1166 (throw 'upcff-tag nil))
|
|
1167 (setq dir-parent (file-name-directory dir))
|
|
1168 (not (file-directory-p dir-parent)))
|
|
1169 (setq dir dir-parent))
|
|
1170 ;; We have found the longest path prefix that exists as a
|
|
1171 ;; directory. Deal with any obstacles in this directory.
|
|
1172 (if (file-exists-p dir)
|
|
1173 (condition-case nil
|
|
1174 (delete-file dir)
|
|
1175 (error (throw 'upcff-tag nil))))
|
|
1176 (if (file-exists-p dir)
|
|
1177 (throw 'upcff-tag nil))
|
|
1178 ;; Try making the directory again.
|
|
1179 (setq dir (file-name-directory file))
|
|
1180 (make-directory dir t)
|
|
1181 (or (file-directory-p dir)
|
|
1182 (throw 'upcff-tag nil))))
|
|
1183
|
|
1184 ;; The containing directory exists. Let's see if there is
|
|
1185 ;; something in the way in this directory.
|
|
1186 (if (url-cache-file-writable-p file)
|
|
1187 (throw 'upcff-tag t)
|
|
1188 (condition-case nil
|
|
1189 (delete-file file)
|
|
1190 (error (throw 'upcff-tag nil))))
|
|
1191
|
|
1192 ;; The return value, if we get this far.
|
|
1193 (url-cache-file-writable-p file))))))
|
|
1194
|
|
1195 (defun url-store-in-cache (&optional buff)
|
|
1196 "Store buffer BUFF in the cache"
|
|
1197 (if (or (not (get-buffer buff))
|
|
1198 (member url-current-type '("www" "about" "https" "shttp"
|
|
1199 "news" "mailto"))
|
|
1200 (and (member url-current-type '("file" "ftp" nil))
|
|
1201 (not url-current-server))
|
|
1202 )
|
|
1203 nil
|
|
1204 (save-excursion
|
|
1205 (and buff (set-buffer buff))
|
|
1206 (let* ((fname (url-create-cached-filename (url-view-url t)))
|
|
1207 (fname-hdr (concat (if (memq system-type '(ms-windows ms-dos os2))
|
|
1208 (url-file-extension fname t)
|
|
1209 fname) ".hdr"))
|
|
1210 (info (mapcar (function (lambda (var)
|
|
1211 (cons (symbol-name var)
|
|
1212 (symbol-value var))))
|
|
1213 '( url-current-content-length
|
|
1214 url-current-file
|
|
1215 url-current-isindex
|
|
1216 url-current-mime-encoding
|
|
1217 url-current-mime-headers
|
|
1218 url-current-mime-type
|
|
1219 url-current-port
|
|
1220 url-current-server
|
|
1221 url-current-type
|
|
1222 url-current-user
|
|
1223 ))))
|
|
1224 (cond ((and (url-prepare-cache-for-file fname)
|
|
1225 (url-prepare-cache-for-file fname-hdr))
|
|
1226 (write-region (point-min) (point-max) fname nil 5)
|
|
1227 (set-buffer (get-buffer-create " *cache-tmp*"))
|
|
1228 (erase-buffer)
|
|
1229 (insert "(setq ")
|
|
1230 (mapcar
|
|
1231 (function
|
|
1232 (lambda (x)
|
|
1233 (insert (car x) " "
|
|
1234 (cond ((null (setq x (cdr x))) "nil")
|
|
1235 ((stringp x) (prin1-to-string x))
|
|
1236 ((listp x) (concat "'" (prin1-to-string x)))
|
|
1237 ((numberp x) (int-to-string x))
|
|
1238 (t "'???")) "\n")))
|
|
1239 info)
|
|
1240 (insert ")\n")
|
|
1241 (write-region (point-min) (point-max) fname-hdr nil 5)))))))
|
|
1242
|
|
1243
|
|
1244 (defun url-is-cached (url)
|
|
1245 "Return non-nil if the URL is cached."
|
|
1246 (let* ((fname (url-create-cached-filename url))
|
|
1247 (attribs (file-attributes fname)))
|
|
1248 (and fname ; got a filename
|
|
1249 (file-exists-p fname) ; file exists
|
|
1250 (not (eq (nth 0 attribs) t)) ; Its not a directory
|
|
1251 (nth 5 attribs)))) ; Can get last mod-time
|
|
1252
|
|
1253 (defun url-create-cached-filename-using-md5 (url)
|
|
1254 (if url
|
|
1255 (expand-file-name (md5 url)
|
|
1256 (concat url-temporary-directory "/"
|
|
1257 (user-real-login-name)))))
|
|
1258
|
|
1259 (defun url-create-cached-filename (url)
|
|
1260 "Return a filename in the local cache for URL"
|
|
1261 (if url
|
|
1262 (let* ((url url)
|
|
1263 (urlobj (if (vectorp url)
|
|
1264 url
|
|
1265 (url-generic-parse-url url)))
|
|
1266 (protocol (url-type urlobj))
|
|
1267 (hostname (url-host urlobj))
|
|
1268 (host-components
|
|
1269 (cons
|
|
1270 (user-real-login-name)
|
|
1271 (cons (or protocol "file")
|
|
1272 (nreverse
|
|
1273 (delq nil
|
|
1274 (mm-string-to-tokens
|
|
1275 (or hostname "localhost") ?.))))))
|
|
1276 (fname (url-filename urlobj)))
|
|
1277 (if (and fname (/= (length fname) 0) (= (aref fname 0) ?/))
|
|
1278 (setq fname (substring fname 1 nil)))
|
|
1279 (if fname
|
|
1280 (let ((slash nil))
|
|
1281 (setq fname
|
|
1282 (mapconcat
|
|
1283 (function
|
|
1284 (lambda (x)
|
|
1285 (cond
|
|
1286 ((and (= ?/ x) slash)
|
|
1287 (setq slash nil)
|
|
1288 "%2F")
|
|
1289 ((= ?/ x)
|
|
1290 (setq slash t)
|
|
1291 "/")
|
|
1292 (t
|
|
1293 (setq slash nil)
|
|
1294 (char-to-string x))))) fname ""))))
|
|
1295
|
|
1296 (if (and fname (memq system-type '(ms-windows ms-dos windows-nt))
|
|
1297 (string-match "\\([A-Za-z]\\):[/\\]" fname))
|
|
1298 (setq fname (concat (url-match fname 1) "/"
|
|
1299 (substring fname (match-end 0)))))
|
|
1300
|
|
1301 (setq fname (and fname
|
|
1302 (mapconcat
|
|
1303 (function (lambda (x)
|
|
1304 (if (= x ?~) "" (char-to-string x))))
|
|
1305 fname ""))
|
|
1306 fname (cond
|
|
1307 ((null fname) nil)
|
|
1308 ((or (string= "" fname) (string= "/" fname))
|
|
1309 url-directory-index-file)
|
|
1310 ((= (string-to-char fname) ?/)
|
|
1311 (if (string= (substring fname -1 nil) "/")
|
|
1312 (concat fname url-directory-index-file)
|
|
1313 (substring fname 1 nil)))
|
|
1314 (t
|
|
1315 (if (string= (substring fname -1 nil) "/")
|
|
1316 (concat fname url-directory-index-file)
|
|
1317 fname))))
|
|
1318
|
|
1319 ;; Honor hideous 8.3 filename limitations on dos and windows
|
|
1320 ;; we don't have to worry about this in Windows NT/95 (or OS/2?)
|
|
1321 (if (and fname (memq system-type '(ms-windows ms-dos)))
|
|
1322 (let ((base (url-file-extension fname t))
|
|
1323 (ext (url-file-extension fname nil)))
|
|
1324 (setq fname (concat (substring base 0 (min 8 (length base)))
|
|
1325 (substring ext 0 (min 4 (length ext)))))
|
|
1326 (setq host-components
|
|
1327 (mapcar
|
|
1328 (function
|
|
1329 (lambda (x)
|
|
1330 (if (> (length x) 8)
|
|
1331 (concat
|
|
1332 (substring x 0 8) "."
|
|
1333 (substring x 8 (min (length x) 11)))
|
|
1334 x)))
|
|
1335 host-components))))
|
|
1336
|
|
1337 (and fname
|
|
1338 (expand-file-name fname
|
|
1339 (expand-file-name
|
|
1340 (mapconcat 'identity host-components "/")
|
|
1341 url-temporary-directory))))))
|
|
1342
|
|
1343 (defun url-extract-from-cache (fnam)
|
|
1344 "Extract FNAM from the local disk cache"
|
|
1345 (set-buffer (get-buffer-create url-working-buffer))
|
|
1346 (erase-buffer)
|
|
1347 (setq url-current-mime-viewer nil)
|
|
1348 (insert-file-contents-literally fnam)
|
|
1349 (load (concat (if (memq system-type '(ms-windows ms-dos os2))
|
|
1350 (url-file-extension fnam t)
|
|
1351 fnam) ".hdr") t t))
|
|
1352
|
|
1353 ;;;###autoload
|
|
1354 (defun url-get-url-at-point (&optional pt)
|
|
1355 "Get the URL closest to point, but don't change your
|
|
1356 position. Has a preference for looking backward when not
|
|
1357 directly on a symbol."
|
|
1358 ;; Not at all perfect - point must be right in the name.
|
|
1359 (save-excursion
|
|
1360 (if pt (goto-char pt))
|
|
1361 (let ((filename-chars "%.?@a-zA-Z0-9---()_/:~=&") start url)
|
|
1362 (save-excursion
|
|
1363 ;; first see if you're just past a filename
|
|
1364 (if (not (eobp))
|
|
1365 (if (looking-at "[] \t\n[{}()]") ; whitespace or some parens
|
|
1366 (progn
|
|
1367 (skip-chars-backward " \n\t\r({[]})")
|
|
1368 (if (not (bobp))
|
|
1369 (backward-char 1)))))
|
|
1370 (if (string-match (concat "[" filename-chars "]")
|
|
1371 (char-to-string (following-char)))
|
|
1372 (progn
|
|
1373 (skip-chars-backward filename-chars)
|
|
1374 (setq start (point))
|
|
1375 (skip-chars-forward filename-chars))
|
|
1376 (setq start (point)))
|
|
1377 (setq url (if (fboundp 'buffer-substring-no-properties)
|
|
1378 (buffer-substring-no-properties start (point))
|
|
1379 (buffer-substring start (point)))))
|
|
1380 (if (string-match "^URL:" url)
|
|
1381 (setq url (substring url 4 nil)))
|
|
1382 (if (string-match "\\.$" url)
|
|
1383 (setq url (substring url 0 -1)))
|
|
1384 (if (not (string-match url-nonrelative-link url))
|
|
1385 (setq url nil))
|
|
1386 url)))
|
|
1387
|
|
1388 (defun url-eat-trailing-space (x)
|
|
1389 ;; Remove spaces/tabs at the end of a string
|
|
1390 (let ((y (1- (length x)))
|
|
1391 (skip-chars (list ? ?\t ?\n)))
|
|
1392 (while (and (>= y 0) (memq (aref x y) skip-chars))
|
|
1393 (setq y (1- y)))
|
|
1394 (substring x 0 (1+ y))))
|
|
1395
|
|
1396 (defun url-strip-leading-spaces (x)
|
|
1397 ;; Remove spaces at the front of a string
|
|
1398 (let ((y (1- (length x)))
|
|
1399 (z 0)
|
|
1400 (skip-chars (list ? ?\t ?\n)))
|
|
1401 (while (and (<= z y) (memq (aref x z) skip-chars))
|
|
1402 (setq z (1+ z)))
|
|
1403 (substring x z nil)))
|
|
1404
|
|
1405 (defun url-convert-newlines-to-spaces (x)
|
|
1406 "Convert newlines and carriage returns embedded in a string into spaces,
|
|
1407 and swallow following whitespace.
|
|
1408 The argument is not side-effected, but may be returned by this function."
|
|
1409 (if (string-match "[\n\r]+\\s-*" x) ; [\\n\\r\\t ]
|
|
1410 (concat (substring x 0 (match-beginning 0)) " "
|
|
1411 (url-convert-newlines-to-spaces
|
|
1412 (substring x (match-end 0))))
|
|
1413 x))
|
|
1414
|
|
1415 ;; Test cases
|
|
1416 ;; (url-convert-newlines-to-spaces "foo bar") ; nothing happens
|
|
1417 ;; (url-convert-newlines-to-spaces "foo\n \t bar") ; whitespace converted
|
|
1418 ;;
|
|
1419 ;; This implementation doesn't mangle the match-data, is fast, and doesn't
|
|
1420 ;; create garbage, but it leaves whitespace.
|
|
1421 ;; (defun url-convert-newlines-to-spaces (x)
|
|
1422 ;; "Convert newlines and carriage returns embedded in a string into spaces.
|
|
1423 ;; The string is side-effected, then returned."
|
|
1424 ;; (let ((i 0)
|
|
1425 ;; (limit (length x)))
|
|
1426 ;; (while (< i limit)
|
|
1427 ;; (if (or (= ?\n (aref x i))
|
|
1428 ;; (= ?\r (aref x i)))
|
|
1429 ;; (aset x i ? ))
|
|
1430 ;; (setq i (1+ i)))
|
|
1431 ;; x))
|
|
1432
|
|
1433 (defun url-expand-file-name (url &optional default)
|
|
1434 "Convert URL to a fully specified URL, and canonicalize it.
|
|
1435 Second arg DEFAULT is a URL to start with if URL is relative.
|
|
1436 If DEFAULT is nil or missing, the current buffer's URL is used.
|
|
1437 Path components that are `.' are removed, and
|
|
1438 path components followed by `..' are removed, along with the `..' itself."
|
|
1439 (if url
|
|
1440 (setq url (mapconcat (function (lambda (x)
|
|
1441 (if (= x ?\n) "" (char-to-string x))))
|
|
1442 (url-strip-leading-spaces
|
|
1443 (url-eat-trailing-space url)) "")))
|
|
1444 (cond
|
|
1445 ((null url) nil) ; Something hosed! Be graceful
|
|
1446 ((string-match "^#" url) ; Offset link, use it raw
|
|
1447 url)
|
|
1448 (t
|
|
1449 (let* ((urlobj (url-generic-parse-url url))
|
|
1450 (inhibit-file-name-handlers t)
|
|
1451 (defobj (cond
|
|
1452 ((vectorp default) default)
|
|
1453 (default (url-generic-parse-url default))
|
|
1454 ((and (null default) url-current-object)
|
|
1455 url-current-object)
|
|
1456 (t (url-generic-parse-url (url-view-url t)))))
|
|
1457 (expander (cdr-safe
|
|
1458 (cdr-safe
|
|
1459 (assoc (or (url-type urlobj)
|
|
1460 (url-type defobj))
|
|
1461 url-registered-protocols)))))
|
|
1462 (if (string-match "^//" url)
|
|
1463 (setq urlobj (url-generic-parse-url (concat (url-type defobj) ":"
|
|
1464 url))))
|
|
1465 (if (fboundp expander)
|
|
1466 (funcall expander urlobj defobj)
|
|
1467 (message "Unknown URL scheme: %s" (or (url-type urlobj)
|
|
1468 (url-type defobj)))
|
|
1469 (url-identity-expander urlobj defobj))
|
|
1470 (url-recreate-url urlobj)))))
|
|
1471
|
|
1472 (defun url-default-expander (urlobj defobj)
|
|
1473 ;; The default expansion routine - urlobj is modified by side effect!
|
|
1474 (url-set-type urlobj (or (url-type urlobj) (url-type defobj)))
|
|
1475 (url-set-port urlobj (or (url-port urlobj)
|
|
1476 (and (string= (url-type urlobj)
|
|
1477 (url-type defobj))
|
|
1478 (url-port defobj))))
|
|
1479 (if (not (string= "file" (url-type urlobj)))
|
|
1480 (url-set-host urlobj (or (url-host urlobj) (url-host defobj))))
|
|
1481 (if (string= "ftp" (url-type urlobj))
|
|
1482 (url-set-user urlobj (or (url-user urlobj) (url-user defobj))))
|
|
1483 (if (string= (url-filename urlobj) "")
|
|
1484 (url-set-filename urlobj "/"))
|
|
1485 (if (string-match "^/" (url-filename urlobj))
|
|
1486 nil
|
|
1487 (url-set-filename urlobj
|
|
1488 (url-remove-relative-links
|
|
1489 (concat (url-basepath (url-filename defobj))
|
|
1490 (url-filename urlobj))))))
|
|
1491
|
|
1492 (defun url-identity-expander (urlobj defobj)
|
|
1493 (url-set-type urlobj (or (url-type urlobj) (url-type defobj))))
|
|
1494
|
|
1495 (defun url-hexify-string (str)
|
|
1496 "Escape characters in a string"
|
|
1497 (if (and (featurep 'mule) str)
|
|
1498 (setq str (code-convert-string
|
|
1499 str *internal* url-mule-retrieval-coding-system)))
|
|
1500 (setq str (mapconcat
|
|
1501 (function
|
|
1502 (lambda (char)
|
|
1503 (if (or (> char ?z)
|
|
1504 (< char ?-)
|
|
1505 (and (< char ?a)
|
|
1506 (> char ?Z))
|
|
1507 (and (< char ?@)
|
2
|
1508 (>= char ?:)))
|
0
|
1509 (if (< char 16)
|
|
1510 (upcase (format "%%0%x" char))
|
|
1511 (upcase (format "%%%x" char)))
|
|
1512 (char-to-string char)))) str "")))
|
|
1513
|
|
1514 (defun url-make-sequence (start end)
|
|
1515 "Make a sequence (list) of numbers from START to END"
|
|
1516 (cond
|
|
1517 ((= start end) '())
|
|
1518 ((> start end) '())
|
|
1519 (t
|
|
1520 (let ((sqnc '()))
|
|
1521 (while (<= start end)
|
|
1522 (setq sqnc (cons end sqnc)
|
|
1523 end (1- end)))
|
|
1524 sqnc))))
|
|
1525
|
|
1526 (defun url-file-extension (fname &optional x)
|
|
1527 "Return the filename extension of FNAME. If optional variable X is t,
|
|
1528 then return the basename of the file with the extension stripped off."
|
|
1529 (if (and fname (string-match "\\.[^./]+$" fname))
|
|
1530 (if x (substring fname 0 (match-beginning 0))
|
|
1531 (substring fname (match-beginning 0) nil))
|
|
1532 ;;
|
|
1533 ;; If fname has no extension, and x then return fname itself instead of
|
|
1534 ;; nothing. When caching it allows the correct .hdr file to be produced
|
|
1535 ;; for filenames without extension.
|
|
1536 ;;
|
|
1537 (if x
|
|
1538 fname
|
|
1539 "")))
|
|
1540
|
|
1541 (defun url-basepath (file &optional x)
|
|
1542 "Return the base pathname of FILE, or the actual filename if X is true"
|
|
1543 (cond
|
|
1544 ((null file) "")
|
|
1545 (x (file-name-nondirectory file))
|
|
1546 (t (file-name-directory file))))
|
|
1547
|
|
1548 (defun url-unhex (x)
|
|
1549 (if (> x ?9)
|
|
1550 (if (>= x ?a)
|
|
1551 (+ 10 (- x ?a))
|
|
1552 (+ 10 (- x ?A)))
|
|
1553 (- x ?0)))
|
|
1554
|
|
1555 (defun url-unhex-string (str)
|
|
1556 "Remove %XXX embedded spaces, etc in a url"
|
|
1557 (setq str (or str ""))
|
|
1558 (let ((tmp ""))
|
|
1559 (while (string-match "%[0-9a-f][0-9a-f]" str)
|
|
1560 (let* ((start (match-beginning 0))
|
|
1561 (ch1 (url-unhex (elt str (+ start 1))))
|
|
1562 (code (+ (* 16 ch1)
|
|
1563 (url-unhex (elt str (+ start 2))))))
|
|
1564 (setq tmp
|
|
1565 (concat
|
|
1566 tmp (substring str 0 start)
|
|
1567 (if (or (= code ?\n) (= code ?\r)) " " (char-to-string code)))
|
|
1568 str (substring str (match-end 0)))))
|
|
1569 (setq tmp (concat tmp str))
|
|
1570 tmp))
|
|
1571
|
|
1572 (defun url-clean-text ()
|
|
1573 "Clean up a buffer, removing any excess garbage from a gateway mechanism,
|
|
1574 and decoding any MIME content-transfer-encoding used."
|
|
1575 (set-buffer url-working-buffer)
|
|
1576 (goto-char (point-min))
|
|
1577 (url-replace-regexp "Connection closed by.*" "")
|
|
1578 (goto-char (point-min))
|
|
1579 (url-replace-regexp "Process WWW.*" ""))
|
|
1580
|
2
|
1581 (defun url-remove-compressed-extensions (filename)
|
|
1582 (while (assoc (url-file-extension filename) url-uncompressor-alist)
|
|
1583 (setq filename (url-file-extension filename t)))
|
|
1584 filename)
|
|
1585
|
0
|
1586 (defun url-uncompress ()
|
|
1587 "Do any necessary uncompression on `url-working-buffer'"
|
|
1588 (set-buffer url-working-buffer)
|
|
1589 (if (not url-inhibit-uncompression)
|
|
1590 (let* ((extn (url-file-extension url-current-file))
|
|
1591 (decoder nil)
|
|
1592 (code-1 (cdr-safe
|
|
1593 (assoc "content-transfer-encoding"
|
|
1594 url-current-mime-headers)))
|
|
1595 (code-2 (cdr-safe
|
|
1596 (assoc "content-encoding" url-current-mime-headers)))
|
|
1597 (code-3 (and (not code-1) (not code-2)
|
|
1598 (cdr-safe (assoc extn url-uncompressor-alist))))
|
|
1599 (done nil)
|
|
1600 (default-process-coding-system
|
|
1601 (if (featurep 'mule) (cons *noconv* *noconv*))))
|
|
1602 (mapcar
|
|
1603 (function
|
|
1604 (lambda (code)
|
|
1605 (setq decoder (and (not (member code done))
|
|
1606 (cdr-safe
|
|
1607 (assoc code mm-content-transfer-encodings)))
|
|
1608 done (cons code done))
|
|
1609 (cond
|
|
1610 ((null decoder) nil)
|
|
1611 ((stringp decoder)
|
|
1612 (message "Decoding...")
|
|
1613 (call-process-region (point-min) (point-max) decoder t t nil)
|
|
1614 (message "Decoding... done."))
|
|
1615 ((listp decoder)
|
|
1616 (apply 'call-process-region (point-min) (point-max)
|
|
1617 (car decoder) t t nil (cdr decoder)))
|
|
1618 ((and (symbolp decoder) (fboundp decoder))
|
|
1619 (message "Decoding...")
|
|
1620 (funcall decoder (point-min) (point-max))
|
|
1621 (message "Decoding... done."))
|
|
1622 (t
|
|
1623 (error "Bad entry for %s in `mm-content-transfer-encodings'"
|
|
1624 code)))))
|
|
1625 (list code-1 code-2 code-3))))
|
|
1626 (set-buffer-modified-p nil))
|
|
1627
|
|
1628 (defun url-filter (proc string)
|
|
1629 (save-excursion
|
|
1630 (set-buffer url-working-buffer)
|
|
1631 (insert string)
|
|
1632 (if (string-match "\nConnection closed by" string)
|
|
1633 (progn (set-process-filter proc nil)
|
|
1634 (url-sentinel proc string))))
|
|
1635 string)
|
|
1636
|
|
1637 (defun url-default-callback (buf)
|
|
1638 (url-download-minor-mode nil)
|
|
1639 (cond
|
|
1640 ((save-excursion (set-buffer buf)
|
|
1641 (and url-current-callback-func
|
|
1642 (fboundp url-current-callback-func)))
|
|
1643 (save-excursion
|
|
1644 (save-window-excursion
|
|
1645 (set-buffer buf)
|
|
1646 (cond
|
|
1647 ((listp url-current-callback-data)
|
|
1648 (apply url-current-callback-func
|
|
1649 url-current-callback-data))
|
|
1650 (url-current-callback-data
|
|
1651 (funcall url-current-callback-func
|
|
1652 url-current-callback-data))
|
|
1653 (t
|
|
1654 (funcall url-current-callback-func))))))
|
|
1655 ((fboundp 'w3-sentinel)
|
|
1656 (set-variable 'w3-working-buffer buf)
|
|
1657 (w3-sentinel))
|
|
1658 (t
|
|
1659 (message "Retrieval for %s complete." buf))))
|
|
1660
|
|
1661 (defun url-sentinel (proc string)
|
|
1662 (if (buffer-name (process-buffer proc))
|
|
1663 (save-excursion
|
|
1664 (set-buffer (get-buffer (process-buffer proc)))
|
|
1665 (remove-hook 'after-change-functions 'url-after-change-function)
|
|
1666 (let ((status nil)
|
|
1667 (url-working-buffer (current-buffer)))
|
|
1668 (if url-be-asynchronous
|
|
1669 (progn
|
|
1670 (widen)
|
|
1671 (url-clean-text)
|
|
1672 (cond
|
|
1673 ((and (null proc) (not (get-buffer url-working-buffer))) nil)
|
|
1674 ((url-mime-response-p)
|
|
1675 (setq status (url-parse-mime-headers))))
|
|
1676 (if (not url-current-mime-type)
|
|
1677 (setq url-current-mime-type (mm-extension-to-mime
|
|
1678 (url-file-extension
|
|
1679 url-current-file))))))
|
|
1680 (if (member status '(401 301 302 303 204))
|
|
1681 nil
|
|
1682 (funcall url-default-retrieval-proc (buffer-name)))))
|
|
1683 (url-warn 'url (format "Process %s completed with no buffer!" proc))))
|
|
1684
|
|
1685 (defun url-remove-relative-links (name)
|
|
1686 ;; Strip . and .. from pathnames
|
|
1687 (let ((new (if (not (string-match "^/" name))
|
|
1688 (concat "/" name)
|
|
1689 name)))
|
|
1690 (while (string-match "/\\([^/]*/\\.\\./\\)" new)
|
|
1691 (setq new (concat (substring new 0 (match-beginning 1))
|
|
1692 (substring new (match-end 1)))))
|
|
1693 (while (string-match "/\\(\\./\\)" new)
|
|
1694 (setq new (concat (substring new 0 (match-beginning 1))
|
|
1695 (substring new (match-end 1)))))
|
|
1696 (while (string-match "^/\\.\\.\\(/\\)" new)
|
|
1697 (setq new (substring new (match-beginning 1) nil)))
|
|
1698 new))
|
|
1699
|
|
1700 (defun url-truncate-url-for-viewing (url &optional width)
|
|
1701 "Return a shortened version of URL that is WIDTH characters or less wide.
|
|
1702 WIDTH defaults to the current frame width."
|
|
1703 (let* ((fr-width (or width (frame-width)))
|
|
1704 (str-width (length url))
|
|
1705 (tail (file-name-nondirectory url))
|
|
1706 (fname nil)
|
|
1707 (modified 0)
|
|
1708 (urlobj nil))
|
|
1709 ;; The first thing that can go are the search strings
|
|
1710 (if (and (>= str-width fr-width)
|
|
1711 (string-match "?" url))
|
|
1712 (setq url (concat (substring url 0 (match-beginning 0)) "?...")
|
|
1713 str-width (length url)
|
|
1714 tail (file-name-nondirectory url)))
|
|
1715 (if (< str-width fr-width)
|
|
1716 nil ; Hey, we are done!
|
|
1717 (setq urlobj (url-generic-parse-url url)
|
|
1718 fname (url-filename urlobj)
|
|
1719 fr-width (- fr-width 4))
|
|
1720 (while (and (>= str-width fr-width)
|
|
1721 (string-match "/" fname))
|
|
1722 (setq fname (substring fname (match-end 0) nil)
|
|
1723 modified (1+ modified))
|
|
1724 (url-set-filename urlobj fname)
|
|
1725 (setq url (url-recreate-url urlobj)
|
|
1726 str-width (length url)))
|
|
1727 (if (> modified 1)
|
|
1728 (setq fname (concat "/.../" fname))
|
|
1729 (setq fname (concat "/" fname)))
|
|
1730 (url-set-filename urlobj fname)
|
|
1731 (setq url (url-recreate-url urlobj)))
|
|
1732 url))
|
|
1733
|
|
1734 (defun url-view-url (&optional no-show)
|
|
1735 "View the current document's URL. Optional argument NO-SHOW means
|
|
1736 just return the URL, don't show it in the minibuffer."
|
|
1737 (interactive)
|
|
1738 (let ((url ""))
|
|
1739 (cond
|
|
1740 ((equal url-current-type "gopher")
|
|
1741 (setq url (format "%s://%s%s/%s"
|
|
1742 url-current-type url-current-server
|
|
1743 (if (or (null url-current-port)
|
|
1744 (string= "70" url-current-port)) ""
|
|
1745 (concat ":" url-current-port))
|
|
1746 url-current-file)))
|
|
1747 ((equal url-current-type "news")
|
|
1748 (setq url (concat "news:"
|
|
1749 (if (not (equal url-current-server
|
|
1750 url-news-server))
|
|
1751 (concat "//" url-current-server
|
|
1752 (if (or (null url-current-port)
|
|
1753 (string= "119" url-current-port))
|
|
1754 ""
|
|
1755 (concat ":" url-current-port)) "/"))
|
|
1756 url-current-file)))
|
|
1757 ((equal url-current-type "about")
|
|
1758 (setq url (concat "about:" url-current-file)))
|
|
1759 ((member url-current-type '("http" "shttp" "https"))
|
|
1760 (setq url (format "%s://%s%s/%s" url-current-type url-current-server
|
|
1761 (if (or (null url-current-port)
|
|
1762 (string= "80" url-current-port))
|
|
1763 ""
|
|
1764 (concat ":" url-current-port))
|
|
1765 (if (and url-current-file
|
|
1766 (= ?/ (string-to-char url-current-file)))
|
|
1767 (substring url-current-file 1 nil)
|
|
1768 url-current-file))))
|
|
1769 ((equal url-current-type "ftp")
|
|
1770 (setq url (format "%s://%s%s/%s" url-current-type
|
|
1771 (if (and url-current-user
|
|
1772 (not (string= "anonymous" url-current-user)))
|
|
1773 (concat url-current-user "@") "")
|
|
1774 url-current-server
|
|
1775 (if (and url-current-file
|
|
1776 (= ?/ (string-to-char url-current-file)))
|
|
1777 (substring url-current-file 1 nil)
|
|
1778 url-current-file))))
|
|
1779 ((and (member url-current-type '("file" nil)) url-current-file)
|
|
1780 (setq url (format "file:%s" url-current-file)))
|
|
1781 ((equal url-current-type "www")
|
|
1782 (setq url (format "www:/%s/%s" url-current-server url-current-file)))
|
|
1783 (t
|
|
1784 (setq url nil)))
|
|
1785 (if (not no-show) (message "%s" url) url)))
|
|
1786
|
|
1787 (defun url-parse-Netscape-history (fname)
|
|
1788 ;; Parse a Netscape/X style global history list.
|
|
1789 (let (pos ; Position holder
|
|
1790 url ; The URL
|
|
1791 time) ; Last time accessed
|
|
1792 (goto-char (point-min))
|
|
1793 (skip-chars-forward "^\n")
|
|
1794 (skip-chars-forward "\n \t") ; Skip past the tag line
|
|
1795 (setq url-global-history-hash-table (url-make-hashtable 131))
|
|
1796 ;; Here we will go to the end of the line and
|
|
1797 ;; skip back over a token, since we might run
|
|
1798 ;; into spaces in URLs, depending on how much
|
|
1799 ;; smarter netscape is than the old XMosaic :)
|
|
1800 (while (not (eobp))
|
|
1801 (setq pos (point))
|
|
1802 (end-of-line)
|
|
1803 (skip-chars-backward "^ \t")
|
|
1804 (skip-chars-backward " \t")
|
|
1805 (setq url (buffer-substring pos (point))
|
|
1806 pos (1+ (point)))
|
|
1807 (skip-chars-forward "^\n")
|
|
1808 (setq time (buffer-substring pos (point)))
|
|
1809 (skip-chars-forward "\n")
|
|
1810 (setq url-history-changed-since-last-save t)
|
|
1811 (url-puthash url time url-global-history-hash-table))))
|
|
1812
|
|
1813 (defun url-parse-Mosaic-history-v1 (fname)
|
|
1814 ;; Parse an NCSA Mosaic/X style global history list
|
|
1815 (goto-char (point-min))
|
|
1816 (skip-chars-forward "^\n")
|
|
1817 (skip-chars-forward "\n \t") ; Skip past the tag line
|
|
1818 (skip-chars-forward "^\n")
|
|
1819 (skip-chars-forward "\n \t") ; Skip past the second tag line
|
|
1820 (setq url-global-history-hash-table (url-make-hashtable 131))
|
|
1821 (let (pos ; Temporary position holder
|
|
1822 bol ; Beginning-of-line
|
|
1823 url ; URL
|
|
1824 time ; Time
|
|
1825 last-end ; Last ending point
|
|
1826 )
|
|
1827 (while (not (eobp))
|
|
1828 (setq bol (point))
|
|
1829 (end-of-line)
|
|
1830 (setq pos (point)
|
|
1831 last-end (point))
|
|
1832 (skip-chars-backward "^ \t" bol) ; Skip over year
|
|
1833 (skip-chars-backward " \t" bol)
|
|
1834 (skip-chars-backward "^ \t" bol) ; Skip over time
|
|
1835 (skip-chars-backward " \t" bol)
|
|
1836 (skip-chars-backward "^ \t" bol) ; Skip over day #
|
|
1837 (skip-chars-backward " \t" bol)
|
|
1838 (skip-chars-backward "^ \t" bol) ; Skip over month
|
|
1839 (skip-chars-backward " \t" bol)
|
|
1840 (skip-chars-backward "^ \t" bol) ; Skip over day abbrev.
|
|
1841 (if (bolp)
|
|
1842 nil ; Malformed entry!!! Ack! Bailout!
|
|
1843 (setq time (buffer-substring pos (point)))
|
|
1844 (skip-chars-backward " \t")
|
|
1845 (setq pos (point)))
|
|
1846 (beginning-of-line)
|
|
1847 (setq url (buffer-substring (point) pos))
|
|
1848 (goto-char (min (1+ last-end) (point-max))) ; Goto next line
|
|
1849 (if (/= (length url) 0)
|
|
1850 (progn
|
|
1851 (setq url-history-changed-since-last-save t)
|
|
1852 (url-puthash url time url-global-history-hash-table))))))
|
|
1853
|
|
1854 (defun url-parse-Mosaic-history-v2 (fname)
|
|
1855 ;; Parse an NCSA Mosaic/X style global history list (version 2)
|
|
1856 (goto-char (point-min))
|
|
1857 (skip-chars-forward "^\n")
|
|
1858 (skip-chars-forward "\n \t") ; Skip past the tag line
|
|
1859 (skip-chars-forward "^\n")
|
|
1860 (skip-chars-forward "\n \t") ; Skip past the second tag line
|
|
1861 (setq url-global-history-hash-table (url-make-hashtable 131))
|
|
1862 (let (pos ; Temporary position holder
|
|
1863 bol ; Beginning-of-line
|
|
1864 url ; URL
|
|
1865 time ; Time
|
|
1866 last-end ; Last ending point
|
|
1867 )
|
|
1868 (while (not (eobp))
|
|
1869 (setq bol (point))
|
|
1870 (end-of-line)
|
|
1871 (setq pos (point)
|
|
1872 last-end (point))
|
|
1873 (skip-chars-backward "^ \t" bol) ; Skip over time
|
|
1874 (if (bolp)
|
|
1875 nil ; Malformed entry!!! Ack! Bailout!
|
|
1876 (setq time (buffer-substring pos (point)))
|
|
1877 (skip-chars-backward " \t")
|
|
1878 (setq pos (point)))
|
|
1879 (beginning-of-line)
|
|
1880 (setq url (buffer-substring (point) pos))
|
|
1881 (goto-char (min (1+ last-end) (point-max))) ; Goto next line
|
|
1882 (if (/= (length url) 0)
|
|
1883 (progn
|
|
1884 (setq url-history-changed-since-last-save t)
|
|
1885 (url-puthash url time url-global-history-hash-table))))))
|
|
1886
|
|
1887 (defun url-parse-Emacs-history (&optional fname)
|
|
1888 ;; Parse out the Emacs-w3 global history file for completion, etc.
|
|
1889 (or fname (setq fname (expand-file-name url-global-history-file)))
|
|
1890 (cond
|
|
1891 ((not (file-exists-p fname))
|
|
1892 (message "%s does not exist." fname))
|
|
1893 ((not (file-readable-p fname))
|
|
1894 (message "%s is unreadable." fname))
|
|
1895 (t
|
|
1896 (condition-case ()
|
|
1897 (load fname nil t)
|
|
1898 (error (message "Could not load %s" fname)))
|
|
1899 (if (boundp 'url-global-history-completion-list)
|
|
1900 ;; Hey! Automatic conversion of old format!
|
|
1901 (progn
|
|
1902 (setq url-global-history-hash-table (url-make-hashtable 131)
|
|
1903 url-history-changed-since-last-save t)
|
|
1904 (mapcar (function
|
|
1905 (lambda (x)
|
|
1906 (url-puthash (car x) (cdr x)
|
|
1907 url-global-history-hash-table)))
|
|
1908 (symbol-value 'url-global-history-completion-list)))))))
|
|
1909
|
|
1910 (defun url-parse-global-history (&optional fname)
|
|
1911 ;; Parse out the mosaic global history file for completions, etc.
|
|
1912 (or fname (setq fname (expand-file-name url-global-history-file)))
|
|
1913 (cond
|
|
1914 ((not (file-exists-p fname))
|
|
1915 (message "%s does not exist." fname))
|
|
1916 ((not (file-readable-p fname))
|
|
1917 (message "%s is unreadable." fname))
|
|
1918 (t
|
|
1919 (save-excursion
|
|
1920 (set-buffer (get-buffer-create " *url-tmp*"))
|
|
1921 (erase-buffer)
|
|
1922 (insert-file-contents-literally fname)
|
|
1923 (goto-char (point-min))
|
|
1924 (cond
|
|
1925 ((looking-at "(setq") (url-parse-Emacs-history fname))
|
|
1926 ((looking-at "ncsa-mosaic-.*-1$") (url-parse-Mosaic-history-v1 fname))
|
|
1927 ((looking-at "ncsa-mosaic-.*-2$") (url-parse-Mosaic-history-v2 fname))
|
|
1928 ((or (looking-at "MCOM-") (looking-at "netscape"))
|
|
1929 (url-parse-Netscape-history fname))
|
|
1930 (t
|
|
1931 (url-warn 'url (format "Cannot deduce type of history file: %s"
|
|
1932 fname))))))))
|
|
1933
|
|
1934 (defun url-write-Emacs-history (fname)
|
|
1935 ;; Write an Emacs-w3 style global history list into FNAME
|
|
1936 (erase-buffer)
|
|
1937 (let ((count 0))
|
|
1938 (url-maphash (function
|
|
1939 (lambda (key value)
|
|
1940 (setq count (1+ count))
|
|
1941 (insert "(url-puthash "
|
|
1942 (if (stringp key)
|
|
1943 (prin1-to-string key)
|
|
1944 (concat "\"" (symbol-name key) "\""))
|
|
1945 (if (not (stringp value)) " '" "")
|
|
1946 (prin1-to-string value)
|
|
1947 " url-global-history-hash-table)\n")))
|
|
1948 url-global-history-hash-table)
|
|
1949 (goto-char (point-min))
|
|
1950 (insert (format
|
|
1951 "(setq url-global-history-hash-table (url-make-hashtable %d))\n"
|
|
1952 (/ count 4)))
|
|
1953 (goto-char (point-max))
|
|
1954 (insert "\n")
|
|
1955 (write-file fname)))
|
|
1956
|
|
1957 (defun url-write-Netscape-history (fname)
|
|
1958 ;; Write a Netscape-style global history list into FNAME
|
|
1959 (erase-buffer)
|
|
1960 (let ((last-valid-time "785305714")) ; Picked out of thin air,
|
|
1961 ; in case first in assoc list
|
|
1962 ; doesn't have a valid time
|
|
1963 (goto-char (point-min))
|
|
1964 (insert "MCOM-Global-history-file-1\n")
|
|
1965 (url-maphash (function
|
|
1966 (lambda (url time)
|
|
1967 (if (or (not (stringp time)) (string-match " \t" time))
|
|
1968 (setq time last-valid-time)
|
|
1969 (setq last-valid-time time))
|
|
1970 (insert (concat (if (stringp url)
|
|
1971 url
|
|
1972 (symbol-name url))
|
|
1973 " " time "\n"))))
|
|
1974 url-global-history-hash-table)
|
|
1975 (write-file fname)))
|
|
1976
|
|
1977 (defun url-write-Mosaic-history-v1 (fname)
|
|
1978 ;; Write a Mosaic/X-style global history list into FNAME
|
|
1979 (erase-buffer)
|
|
1980 (goto-char (point-min))
|
|
1981 (insert "ncsa-mosaic-history-format-1\nGlobal\n")
|
|
1982 (url-maphash (function
|
|
1983 (lambda (url time)
|
|
1984 (if (listp time)
|
|
1985 (setq time (current-time-string time)))
|
|
1986 (if (or (not (stringp time))
|
|
1987 (not (string-match " " time)))
|
|
1988 (setq time (current-time-string)))
|
|
1989 (insert (concat (if (stringp url)
|
|
1990 url
|
|
1991 (symbol-name url))
|
|
1992 " " time "\n"))))
|
|
1993 url-global-history-hash-table)
|
|
1994 (write-file fname))
|
|
1995
|
|
1996 (defun url-write-Mosaic-history-v2 (fname)
|
|
1997 ;; Write a Mosaic/X-style global history list into FNAME
|
|
1998 (let ((last-valid-time "827250806"))
|
|
1999 (erase-buffer)
|
|
2000 (goto-char (point-min))
|
|
2001 (insert "ncsa-mosaic-history-format-2\nGlobal\n")
|
|
2002 (url-maphash (function
|
|
2003 (lambda (url time)
|
|
2004 (if (listp time)
|
|
2005 (setq time last-valid-time)
|
|
2006 (setq last-valid-time time))
|
|
2007 (if (not (stringp time))
|
|
2008 (setq time last-valid-time))
|
|
2009 (insert (concat (if (stringp url)
|
|
2010 url
|
|
2011 (symbol-name url))
|
|
2012 " " time "\n"))))
|
|
2013 url-global-history-hash-table)
|
|
2014 (write-file fname)))
|
|
2015
|
|
2016 (defun url-write-global-history (&optional fname)
|
|
2017 "Write the global history file into `url-global-history-file'.
|
|
2018 The type of data written is determined by what is in the file to begin
|
|
2019 with. If the type of storage cannot be determined, then prompt the
|
|
2020 user for what type to save as."
|
|
2021 (interactive)
|
|
2022 (or fname (setq fname (expand-file-name url-global-history-file)))
|
|
2023 (cond
|
|
2024 ((not url-history-changed-since-last-save) nil)
|
|
2025 ((not (file-writable-p fname))
|
|
2026 (message "%s is unwritable." fname))
|
|
2027 (t
|
|
2028 (let ((make-backup-files nil)
|
|
2029 (version-control nil)
|
|
2030 (require-final-newline t))
|
|
2031 (save-excursion
|
|
2032 (set-buffer (get-buffer-create " *url-tmp*"))
|
|
2033 (erase-buffer)
|
|
2034 (condition-case ()
|
|
2035 (insert-file-contents-literally fname)
|
|
2036 (error nil))
|
|
2037 (goto-char (point-min))
|
|
2038 (cond
|
|
2039 ((looking-at "ncsa-mosaic-.*-1$") (url-write-Mosaic-history-v1 fname))
|
|
2040 ((looking-at "ncsa-mosaic-.*-2$") (url-write-Mosaic-history-v2 fname))
|
|
2041 ((looking-at "MCOM-") (url-write-Netscape-history fname))
|
|
2042 ((looking-at "netscape") (url-write-Netscape-history fname))
|
|
2043 ((looking-at "(setq") (url-write-Emacs-history fname))
|
|
2044 (t (url-write-Emacs-history fname)))
|
|
2045 (kill-buffer (current-buffer))))))
|
|
2046 (setq url-history-changed-since-last-save nil))
|
|
2047
|
|
2048
|
|
2049 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
|
2050 ;;; The main URL fetching interface
|
|
2051 ;;; -------------------------------
|
|
2052 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
|
2053
|
|
2054 ;;;###autoload
|
|
2055 (defun url-popup-info (url)
|
|
2056 "Retrieve the HTTP/1.0 headers and display them in a temp buffer."
|
|
2057 (let* ((urlobj (url-generic-parse-url url))
|
|
2058 (type (url-type urlobj))
|
|
2059 data)
|
|
2060 (cond
|
|
2061 ((string= type "http")
|
|
2062 (let ((url-request-method "HEAD")
|
|
2063 (url-automatic-caching nil)
|
|
2064 (url-inhibit-mime-parsing t)
|
|
2065 (url-working-buffer " *popup*"))
|
|
2066 (save-excursion
|
|
2067 (set-buffer (get-buffer-create url-working-buffer))
|
|
2068 (erase-buffer)
|
|
2069 (setq url-be-asynchronous nil)
|
|
2070 (url-retrieve url)
|
|
2071 (subst-char-in-region (point-min) (point-max) ?\r ? )
|
|
2072 (buffer-string))))
|
|
2073 ((or (string= type "file") (string= type "ftp"))
|
|
2074 (setq data (url-file-attributes url))
|
|
2075 (set-buffer (get-buffer-create
|
|
2076 (url-generate-new-buffer-name "*Header Info*")))
|
|
2077 (erase-buffer)
|
|
2078 (if data
|
|
2079 (concat (if (stringp (nth 0 data))
|
|
2080 (concat " Linked to: " (nth 0 data))
|
|
2081 (concat " Directory: " (if (nth 0 data) "Yes" "No")))
|
|
2082 "\n Links: " (int-to-string (nth 1 data))
|
|
2083 "\n File UID: " (int-to-string (nth 2 data))
|
|
2084 "\n File GID: " (int-to-string (nth 3 data))
|
|
2085 "\n Last Access: " (current-time-string (nth 4 data))
|
|
2086 "\nLast Modified: " (current-time-string (nth 5 data))
|
|
2087 "\n Last Changed: " (current-time-string (nth 6 data))
|
|
2088 "\n Size (bytes): " (int-to-string (nth 7 data))
|
|
2089 "\n File Type: " (or (nth 8 data) "text/plain"))
|
|
2090 (concat "No info found for " url)))
|
|
2091 ((and (string= type "news") (string-match "@" url))
|
|
2092 (let ((art (url-filename urlobj)))
|
|
2093 (if (not (string= (substring art -1 nil) ">"))
|
|
2094 (setq art (concat "<" art ">")))
|
|
2095 (url-get-headers-from-article-id art)))
|
|
2096 (t (concat "Don't know how to find information on " url)))))
|
|
2097
|
|
2098 (defun url-decode-text ()
|
|
2099 ;; Decode text transmitted by NNTP.
|
|
2100 ;; 0. Delete status line.
|
|
2101 ;; 1. Delete `^M' at end of line.
|
|
2102 ;; 2. Delete `.' at end of buffer (end of text mark).
|
|
2103 ;; 3. Delete `.' at beginning of line."
|
|
2104 (save-excursion
|
|
2105 (set-buffer nntp-server-buffer)
|
|
2106 ;; Insert newline at end of buffer.
|
|
2107 (goto-char (point-max))
|
|
2108 (if (not (bolp))
|
|
2109 (insert "\n"))
|
|
2110 ;; Delete status line.
|
|
2111 (goto-char (point-min))
|
|
2112 (delete-region (point) (progn (forward-line 1) (point)))
|
|
2113 ;; Delete `^M' at end of line.
|
|
2114 ;; (replace-regexp "\r$" "")
|
|
2115 (while (not (eobp))
|
|
2116 (end-of-line)
|
|
2117 (if (= (preceding-char) ?\r)
|
|
2118 (delete-char -1))
|
|
2119 (forward-line 1)
|
|
2120 )
|
|
2121 ;; Delete `.' at end of buffer (end of text mark).
|
|
2122 (goto-char (point-max))
|
|
2123 (forward-line -1) ;(beginning-of-line)
|
|
2124 (if (looking-at "^\\.$")
|
|
2125 (delete-region (point) (progn (forward-line 1) (point))))
|
|
2126 ;; Replace `..' at beginning of line with `.'.
|
|
2127 (goto-char (point-min))
|
|
2128 ;; (replace-regexp "^\\.\\." ".")
|
|
2129 (while (search-forward "\n.." nil t)
|
|
2130 (delete-char -1))
|
|
2131 ))
|
|
2132
|
|
2133 (defun url-get-headers-from-article-id (art)
|
|
2134 ;; Return the HEAD of ART (a usenet news article)
|
|
2135 (cond
|
|
2136 ((string-match "flee" nntp-version)
|
|
2137 (nntp/command "HEAD" art)
|
|
2138 (save-excursion
|
|
2139 (set-buffer nntp-server-buffer)
|
|
2140 (while (progn (goto-char (point-min))
|
|
2141 (not (re-search-forward "^.\r*$" nil t)))
|
|
2142 (url-accept-process-output nntp/connection))))
|
|
2143 (t
|
|
2144 (nntp-send-command "^\\.\r$" "HEAD" art)
|
|
2145 (url-decode-text)))
|
|
2146 (save-excursion
|
|
2147 (set-buffer nntp-server-buffer)
|
|
2148 (buffer-string)))
|
|
2149
|
|
2150 (defvar url-external-retrieval-program "www"
|
|
2151 "*Name of the external executable to run to retrieve URLs.")
|
|
2152
|
|
2153 (defvar url-external-retrieval-args '("-source")
|
|
2154 "*A list of arguments to pass to `url-external-retrieval-program' to
|
|
2155 retrieve a URL by its HTML source.")
|
|
2156
|
|
2157 (defun url-retrieve-externally (url &optional no-cache)
|
|
2158 (if (get-buffer url-working-buffer)
|
|
2159 (save-excursion
|
|
2160 (set-buffer url-working-buffer)
|
|
2161 (set-buffer-modified-p nil)
|
|
2162 (kill-buffer url-working-buffer)))
|
|
2163 (set-buffer (get-buffer-create url-working-buffer))
|
|
2164 (let* ((args (append url-external-retrieval-args (list url)))
|
|
2165 (urlobj (url-generic-parse-url url))
|
|
2166 (type (url-type urlobj)))
|
|
2167 (if (or (member type '("www" "about" "mailto" "mailserver"))
|
|
2168 (url-file-directly-accessible-p urlobj))
|
|
2169 (url-retrieve-internally url)
|
|
2170 (url-lazy-message "Retrieving %s..." url)
|
|
2171 (apply 'call-process url-external-retrieval-program
|
|
2172 nil t nil args)
|
|
2173 (url-lazy-message "Retrieving %s... done" url)
|
|
2174 (if (and type urlobj)
|
|
2175 (setq url-current-server (url-host urlobj)
|
|
2176 url-current-type (url-type urlobj)
|
|
2177 url-current-port (url-port urlobj)
|
|
2178 url-current-file (url-filename urlobj)))
|
|
2179 (if (member url-current-file '("/" ""))
|
|
2180 (setq url-current-mime-type "text/html")))))
|
|
2181
|
|
2182 (defun url-get-normalized-date (&optional specified-time)
|
|
2183 ;; Return a 'real' date string that most HTTP servers can understand.
|
|
2184 (require 'timezone)
|
|
2185 (let* ((raw (if specified-time (current-time-string specified-time)
|
|
2186 (current-time-string)))
|
|
2187 (gmt (timezone-make-date-arpa-standard raw
|
|
2188 (nth 1 (current-time-zone))
|
|
2189 "GMT"))
|
|
2190 (parsed (timezone-parse-date gmt))
|
|
2191 (day (cdr-safe (assoc (substring raw 0 3) weekday-alist)))
|
|
2192 (year nil)
|
|
2193 (month (car
|
|
2194 (rassoc
|
|
2195 (string-to-int (aref parsed 1)) monthabbrev-alist)))
|
|
2196 )
|
|
2197 (setq day (or (car-safe (rassoc day weekday-alist))
|
|
2198 (substring raw 0 3))
|
|
2199 year (aref parsed 0))
|
|
2200 ;; This is needed for plexus servers, or the server will hang trying to
|
|
2201 ;; parse the if-modified-since header. Hopefully, I can take this out
|
|
2202 ;; soon.
|
|
2203 (if (and year (> (length year) 2))
|
|
2204 (setq year (substring year -2 nil)))
|
|
2205
|
|
2206 (concat day ", " (aref parsed 2) "-" month "-" year " "
|
|
2207 (aref parsed 3) " " (or (aref parsed 4)
|
|
2208 (concat "[" (nth 1 (current-time-zone))
|
|
2209 "]")))))
|
|
2210
|
|
2211 ;;;###autoload
|
|
2212 (defun url-cache-expired (url mod)
|
|
2213 "Return t iff a cached file has expired."
|
|
2214 (if (not (string-match url-nonrelative-link url))
|
|
2215 t
|
|
2216 (let* ((urlobj (url-generic-parse-url url))
|
|
2217 (type (url-type urlobj)))
|
|
2218 (cond
|
|
2219 (url-standalone-mode
|
|
2220 (not (file-exists-p (url-create-cached-filename urlobj))))
|
|
2221 ((string= type "http")
|
|
2222 (if (not url-standalone-mode) t
|
|
2223 (not (file-exists-p (url-create-cached-filename urlobj)))))
|
|
2224 ((not (fboundp 'current-time))
|
|
2225 t)
|
|
2226 ((member type '("file" "ftp"))
|
|
2227 (if (or (equal mod '(0 0)) (not mod))
|
|
2228 (return t)
|
|
2229 (or (> (nth 0 mod) (nth 0 (current-time)))
|
|
2230 (> (nth 1 mod) (nth 1 (current-time))))))
|
|
2231 (t nil)))))
|
|
2232
|
|
2233 (defun url-retrieve-internally (url &optional no-cache)
|
|
2234 (if (get-buffer url-working-buffer)
|
|
2235 (save-excursion
|
|
2236 (set-buffer url-working-buffer)
|
|
2237 (erase-buffer)
|
|
2238 (setq url-current-can-be-cached (not no-cache))
|
|
2239 (set-buffer-modified-p nil)))
|
|
2240 (let* ((urlobj (url-generic-parse-url url))
|
|
2241 (type (url-type urlobj))
|
|
2242 (url-using-proxy (and
|
2
|
2243 (url-host urlobj)
|
0
|
2244 (if (assoc "no_proxy" url-proxy-services)
|
|
2245 (not (string-match
|
|
2246 (cdr
|
|
2247 (assoc "no_proxy" url-proxy-services))
|
2
|
2248 (url-host urlobj)))
|
0
|
2249 t)
|
|
2250 (cdr (assoc type url-proxy-services))))
|
|
2251 (handler nil)
|
|
2252 (original-url url)
|
|
2253 (cached nil)
|
|
2254 (tmp url-current-file))
|
|
2255 (if url-using-proxy (setq type "proxy"))
|
|
2256 (setq cached (url-is-cached url)
|
|
2257 cached (and cached (not (url-cache-expired url cached)))
|
|
2258 handler (if cached 'url-extract-from-cache
|
|
2259 (car-safe
|
|
2260 (cdr-safe (assoc (or type "auto")
|
|
2261 url-registered-protocols))))
|
|
2262 url (if cached (url-create-cached-filename url) url))
|
|
2263 (save-excursion
|
|
2264 (set-buffer (get-buffer-create url-working-buffer))
|
|
2265 (setq url-current-can-be-cached (not no-cache)))
|
2
|
2266 ; (if url-be-asynchronous
|
|
2267 ; (url-download-minor-mode t))
|
0
|
2268 (if (and handler (fboundp handler))
|
|
2269 (funcall handler url)
|
|
2270 (set-buffer (get-buffer-create url-working-buffer))
|
|
2271 (setq url-current-file tmp)
|
|
2272 (erase-buffer)
|
|
2273 (insert "<title> Link Error! </title>\n"
|
|
2274 "<h1> An error has occurred... </h1>\n"
|
|
2275 (format "The link type `<code>%s</code>'" type)
|
|
2276 " is unrecognized or unsupported at this time.<p>\n"
|
|
2277 "If you feel this is an error, please "
|
|
2278 "<a href=\"mailto://" url-bug-address "\">send me mail.</a>"
|
|
2279 "<p><address>William Perry</address><br>"
|
|
2280 "<address>" url-bug-address "</address>")
|
|
2281 (setq url-current-file "error.html"))
|
|
2282 (if (and
|
|
2283 (not url-be-asynchronous)
|
|
2284 (get-buffer url-working-buffer))
|
|
2285 (progn
|
|
2286 (set-buffer url-working-buffer)
|
|
2287 (if (not url-current-object)
|
|
2288 (setq url-current-object urlobj))
|
|
2289 (url-clean-text)))
|
|
2290 (cond
|
|
2291 ((equal type "wais") nil)
|
|
2292 ((and url-be-asynchronous (not cached) (member type '("http" "proxy")))
|
|
2293 nil)
|
|
2294 (url-be-asynchronous
|
|
2295 (funcall url-default-retrieval-proc (buffer-name)))
|
|
2296 ((not (get-buffer url-working-buffer)) nil)
|
|
2297 ((and (not url-inhibit-mime-parsing)
|
|
2298 (or cached (url-mime-response-p t)))
|
|
2299 (or cached (url-parse-mime-headers nil t))))
|
|
2300 (if (and (or (not url-be-asynchronous)
|
|
2301 (not (equal type "http")))
|
|
2302 (not url-current-mime-type))
|
|
2303 (if (url-buffer-is-hypertext)
|
|
2304 (setq url-current-mime-type "text/html")
|
|
2305 (setq url-current-mime-type (mm-extension-to-mime
|
|
2306 (url-file-extension
|
|
2307 url-current-file)))))
|
|
2308 (if (and url-automatic-caching url-current-can-be-cached
|
|
2309 (not url-be-asynchronous))
|
|
2310 (save-excursion
|
|
2311 (url-store-in-cache url-working-buffer)))
|
|
2312 (if (not (url-hashtablep url-global-history-hash-table))
|
|
2313 (setq url-global-history-hash-table (url-make-hashtable 131)))
|
|
2314 (if (not (string-match "^about:" original-url))
|
|
2315 (progn
|
|
2316 (setq url-history-changed-since-last-save t)
|
|
2317 (url-puthash original-url (current-time)
|
|
2318 url-global-history-hash-table)))
|
|
2319 cached))
|
|
2320
|
|
2321 ;;;###autoload
|
|
2322 (defun url-retrieve (url &optional no-cache expected-md5)
|
|
2323 "Retrieve a document over the World Wide Web.
|
|
2324 The document should be specified by its fully specified
|
|
2325 Uniform Resource Locator. No parsing is done, just return the
|
|
2326 document as the server sent it. The document is left in the
|
|
2327 buffer specified by url-working-buffer. url-working-buffer is killed
|
|
2328 immediately before starting the transfer, so that no buffer-local
|
|
2329 variables interfere with the retrieval. HTTP/1.0 redirection will
|
|
2330 be honored before this function exits."
|
|
2331 (url-do-setup)
|
|
2332 (if (and (fboundp 'set-text-properties)
|
|
2333 (subrp (symbol-function 'set-text-properties)))
|
|
2334 (set-text-properties 0 (length url) nil url))
|
|
2335 (if (and url (string-match "^url:" url))
|
|
2336 (setq url (substring url (match-end 0) nil)))
|
|
2337 (let ((status (url-retrieve-internally url no-cache)))
|
|
2338 (if (and expected-md5 url-check-md5s)
|
|
2339 (let ((cur-md5 (md5 (current-buffer))))
|
|
2340 (if (not (string= cur-md5 expected-md5))
|
|
2341 (and (not (funcall url-confirmation-func
|
|
2342 "MD5s do not match, use anyway? "))
|
|
2343 (error "MD5 error.")))))
|
|
2344 status))
|
|
2345
|
|
2346 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
|
2347 ;;; How to register a protocol
|
|
2348 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
|
2349 (defun url-register-protocol (protocol &optional retrieve expander defport)
|
|
2350 "Register a protocol with the URL retrieval package.
|
|
2351 PROTOCOL is the type of protocol being registers (http, nntp, etc),
|
|
2352 and is the first chunk of the URL. ie: http:// URLs will be
|
|
2353 handled by the protocol registered as 'http'. PROTOCOL can
|
|
2354 be either a symbol or a string - it is converted to a string,
|
|
2355 and lowercased before being registered.
|
|
2356 RETRIEVE (optional) is the function to be called with a url as its
|
|
2357 only argument. If this argument is omitted, then this looks
|
|
2358 for a function called 'url-PROTOCOL'. A warning is shown if
|
|
2359 the function is undefined, but the protocol is still
|
|
2360 registered.
|
|
2361 EXPANDER (optional) is the function to call to expand a relative link
|
|
2362 of type PROTOCOL. If omitted, this defaults to
|
|
2363 `url-default-expander'
|
|
2364
|
|
2365 Any proxy information is read in from environment variables at this
|
|
2366 time, so this function should only be called after dumping emacs."
|
|
2367 (let* ((protocol (cond
|
|
2368 ((stringp protocol) (downcase protocol))
|
|
2369 ((symbolp protocol) (downcase (symbol-name protocol)))
|
|
2370 (t nil)))
|
|
2371
|
|
2372 (retrieve (or retrieve (intern (concat "url-" protocol))))
|
|
2373 (expander (or expander 'url-default-expander))
|
|
2374 (cur-protocol (assoc protocol url-registered-protocols))
|
|
2375 (urlobj nil)
|
|
2376 (cur-proxy (assoc protocol url-proxy-services))
|
|
2377 (env-proxy (or (getenv (concat protocol "_proxy"))
|
|
2378 (getenv (concat protocol "_PROXY"))
|
|
2379 (getenv (upcase (concat protocol "_PROXY"))))))
|
|
2380
|
|
2381 (if (not protocol)
|
|
2382 (error "Invalid data to url-register-protocol."))
|
|
2383
|
|
2384 (if (not (fboundp retrieve))
|
|
2385 (message "Warning: %s registered, but no function found." protocol))
|
|
2386
|
|
2387 ;; Store the default port, if none previously specified and
|
|
2388 ;; defport given
|
|
2389 (if (and defport (not (assoc protocol url-default-ports)))
|
|
2390 (setq url-default-ports (cons (cons protocol defport)
|
|
2391 url-default-ports)))
|
|
2392
|
|
2393 ;; Store the appropriate information for later
|
|
2394 (if cur-protocol
|
|
2395 (setcdr cur-protocol (cons retrieve expander))
|
|
2396 (setq url-registered-protocols (cons (cons protocol
|
|
2397 (cons retrieve expander))
|
|
2398 url-registered-protocols)))
|
|
2399
|
|
2400 ;; Store any proxying information - this will not overwrite an old
|
|
2401 ;; entry, so that people can still set this information in their
|
|
2402 ;; .emacs file
|
|
2403 (cond
|
|
2404 (cur-proxy nil) ; Keep their old settings
|
|
2405 ((null env-proxy) nil) ; No proxy setup
|
|
2406 ;; First check if its something like hostname:port
|
|
2407 ((string-match "^\\([^:]+\\):\\([0-9]+\\)$" env-proxy)
|
|
2408 (setq urlobj (url-generic-parse-url nil)) ; Get a blank object
|
|
2409 (url-set-type urlobj "http")
|
|
2410 (url-set-host urlobj (url-match env-proxy 1))
|
|
2411 (url-set-port urlobj (url-match env-proxy 2)))
|
|
2412 ;; Then check if its a fully specified URL
|
|
2413 ((string-match url-nonrelative-link env-proxy)
|
|
2414 (setq urlobj (url-generic-parse-url env-proxy))
|
|
2415 (url-set-type urlobj "http")
|
|
2416 (url-set-target urlobj nil))
|
|
2417 ;; Finally, fall back on the assumption that its just a hostname
|
|
2418 (t
|
|
2419 (setq urlobj (url-generic-parse-url nil)) ; Get a blank object
|
|
2420 (url-set-type urlobj "http")
|
|
2421 (url-set-host urlobj env-proxy)))
|
|
2422
|
|
2423 (if (and (not cur-proxy) urlobj)
|
|
2424 (progn
|
|
2425 (setq url-proxy-services
|
|
2426 (cons (cons protocol (url-recreate-url urlobj))
|
|
2427 url-proxy-services))
|
|
2428 (message "Using a proxy for %s..." protocol)))))
|
|
2429
|
|
2430 (provide 'url)
|