14
|
1 ;;; url.el --- Uniform Resource Locator retrieval tool
|
|
2 ;; Author: wmperry
|
26
|
3 ;; Created: 1997/02/20 15:34:07
|
|
4 ;; Version: 1.57
|
14
|
5 ;; Keywords: comm, data, processes, hypermedia
|
|
6
|
|
7 ;;; LCD Archive Entry:
|
|
8 ;;; url|William M. Perry|wmperry@cs.indiana.edu|
|
16
|
9 ;;; Functions for retrieving/manipulating URLs|
|
26
|
10 ;;; 1997/02/20 15:34:07|1.57|Location Undetermined
|
14
|
11 ;;;
|
|
12
|
|
13 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
|
14 ;;; Copyright (c) 1993-1996 by William M. Perry (wmperry@cs.indiana.edu)
|
16
|
15 ;;; Copyright (c) 1996, 1997 Free Software Foundation, Inc.
|
14
|
16 ;;;
|
|
17 ;;; This file is not part of GNU Emacs, but the same permissions apply.
|
|
18 ;;;
|
|
19 ;;; GNU Emacs is free software; you can redistribute it and/or modify
|
|
20 ;;; it under the terms of the GNU General Public License as published by
|
|
21 ;;; the Free Software Foundation; either version 2, or (at your option)
|
|
22 ;;; any later version.
|
|
23 ;;;
|
|
24 ;;; GNU Emacs is distributed in the hope that it will be useful,
|
|
25 ;;; but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
26 ;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
27 ;;; GNU General Public License for more details.
|
|
28 ;;;
|
|
29 ;;; You should have received a copy of the GNU General Public License
|
|
30 ;;; along with GNU Emacs; see the file COPYING. If not, write to the
|
|
31 ;;; Free Software Foundation, Inc., 59 Temple Place - Suite 330,
|
|
32 ;;; Boston, MA 02111-1307, USA.
|
|
33 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
|
34
|
|
35
|
|
36 (require 'cl)
|
|
37 (require 'url-vars)
|
|
38 (require 'url-parse)
|
|
39 (require 'mm)
|
|
40 (require 'mule-sysdp)
|
|
41 (or (featurep 'efs)
|
|
42 (featurep 'efs-auto)
|
|
43 (condition-case ()
|
|
44 (require 'ange-ftp)
|
|
45 (error nil)))
|
|
46
|
20
|
47 (eval-and-compile
|
|
48 (if (not (and (string-match "XEmacs" emacs-version)
|
|
49 (or (> emacs-major-version 19)
|
|
50 (>= emacs-minor-version 14))))
|
|
51 (require 'w3-sysdp)))
|
14
|
52
|
|
53 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
|
54 ;;; Functions that might not exist in old versions of emacs
|
|
55 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
|
56 (defun url-save-error (errobj)
|
|
57 (save-excursion
|
|
58 (set-buffer (get-buffer-create " *url-error*"))
|
|
59 (erase-buffer))
|
|
60 (display-error errobj (get-buffer-create " *url-error*")))
|
|
61
|
|
62 (cond
|
|
63 ((fboundp 'display-warning)
|
|
64 (fset 'url-warn 'display-warning))
|
|
65 ((fboundp 'w3-warn)
|
|
66 (fset 'url-warn 'w3-warn))
|
|
67 ((fboundp 'warn)
|
|
68 (defun url-warn (class message &optional level)
|
|
69 (warn "(%s/%s) %s" class (or level 'warning) message)))
|
|
70 (t
|
|
71 (defun url-warn (class message &optional level)
|
|
72 (save-excursion
|
|
73 (set-buffer (get-buffer-create "*W3-WARNINGS*"))
|
|
74 (goto-char (point-max))
|
|
75 (save-excursion
|
|
76 (insert (format "(%s/%s) %s\n" class (or level 'warning) message)))
|
|
77 (display-buffer (current-buffer))))))
|
|
78
|
|
79
|
|
80 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
|
81 ;;; Autoload all the URL loaders
|
|
82 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
|
83 (autoload 'url-file "url-file")
|
|
84 (autoload 'url-ftp "url-file")
|
|
85 (autoload 'url-gopher "url-gopher")
|
|
86 (autoload 'url-irc "url-irc")
|
|
87 (autoload 'url-http "url-http")
|
|
88 (autoload 'url-nfs "url-nfs")
|
|
89 (autoload 'url-mailserver "url-mail")
|
|
90 (autoload 'url-mailto "url-mail")
|
|
91 (autoload 'url-info "url-misc")
|
|
92 (autoload 'url-shttp "url-http")
|
|
93 (autoload 'url-https "url-http")
|
16
|
94 (autoload 'url-data "url-misc")
|
14
|
95 (autoload 'url-finger "url-misc")
|
|
96 (autoload 'url-rlogin "url-misc")
|
|
97 (autoload 'url-telnet "url-misc")
|
|
98 (autoload 'url-tn3270 "url-misc")
|
|
99 (autoload 'url-proxy "url-misc")
|
|
100 (autoload 'url-news "url-news")
|
|
101 (autoload 'url-nntp "url-news")
|
|
102
|
16
|
103 (autoload 'url-open-stream "url-gw")
|
14
|
104 (autoload 'url-mime-response-p "url-http")
|
|
105 (autoload 'url-parse-mime-headers "url-http")
|
|
106 (autoload 'url-handle-refresh-header "url-http")
|
|
107 (autoload 'url-create-mime-request "url-http")
|
|
108 (autoload 'url-create-message-id "url-http")
|
|
109 (autoload 'url-create-multipart-request "url-http")
|
|
110 (autoload 'url-parse-viewer-types "url-http")
|
16
|
111
|
|
112 (autoload 'url-get-authentication "url-auth")
|
|
113 (autoload 'url-register-auth-scheme "url-auth")
|
|
114 (autoload 'url-cookie-write-file "url-cookie")
|
|
115 (autoload 'url-cookie-retrieve "url-cookie")
|
|
116 (autoload 'url-cookie-generate-header-lines "url-cookie")
|
|
117 (autoload 'url-cookie-handle-set-cookie "url-cookie")
|
|
118
|
26
|
119 (autoload 'url-is-cached "url-cache")
|
|
120 (autoload 'url-store-in-cache "url-cache")
|
|
121 (autoload 'url-is-cached "url-cache")
|
|
122 (autoload 'url-create-cached-filename "url-cache")
|
|
123 (autoload 'url-extract-from-cache "url-cache")
|
|
124 (autoload 'url-cache-expired "url-cache")
|
|
125
|
16
|
126 (require 'md5)
|
|
127 (require 'base64)
|
14
|
128
|
|
129 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
|
130 ;;; File-name-handler-alist functions
|
|
131 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
|
132 (defun url-setup-file-name-handlers ()
|
|
133 ;; Setup file-name handlers.
|
|
134 '(cond
|
|
135 ((not (boundp 'file-name-handler-alist))
|
|
136 nil) ; Don't load if no alist
|
|
137 ((rassq 'url-file-handler file-name-handler-alist)
|
|
138 nil) ; Don't load twice
|
|
139 (t
|
|
140 (setq file-name-handler-alist
|
|
141 (let ((new-handler (cons
|
|
142 (concat "^/*"
|
|
143 (substring url-nonrelative-link1 nil))
|
|
144 'url-file-handler)))
|
|
145 (if file-name-handler-alist
|
|
146 (append (list new-handler) file-name-handler-alist)
|
|
147 (list new-handler)))))))
|
|
148
|
|
149 (defun url-file-handler (operation &rest args)
|
|
150 ;; Function called from the file-name-handler-alist routines. OPERATION
|
|
151 ;; is what needs to be done ('file-exists-p, etc). args are the arguments
|
|
152 ;; that would have been passed to OPERATION."
|
|
153 (let ((fn (get operation 'url-file-handlers))
|
|
154 (url (car args))
|
|
155 (myargs (cdr args)))
|
|
156 (if (= (string-to-char url) ?/)
|
|
157 (setq url (substring url 1 nil)))
|
|
158 (if fn (apply fn url myargs)
|
|
159 (let (file-name-handler-alist)
|
|
160 (apply operation url myargs)))))
|
|
161
|
|
162 (defun url-file-handler-identity (&rest args)
|
|
163 (car args))
|
|
164
|
|
165 (defun url-file-handler-null (&rest args)
|
|
166 nil)
|
|
167
|
|
168 (put 'file-directory-p 'url-file-handlers 'url-file-handler-null)
|
|
169 (put 'substitute-in-file-name 'url-file-handlers 'url-file-handler-identity)
|
|
170 (put 'file-writable-p 'url-file-handlers 'url-file-handler-null)
|
|
171 (put 'file-truename 'url-file-handlers 'url-file-handler-identity)
|
|
172 (put 'insert-file-contents 'url-file-handlers 'url-insert-file-contents)
|
|
173 (put 'expand-file-name 'url-file-handlers 'url-expand-file-name)
|
|
174 (put 'directory-files 'url-file-handlers 'url-directory-files)
|
|
175 (put 'file-directory-p 'url-file-handlers 'url-file-directory-p)
|
|
176 (put 'file-writable-p 'url-file-handlers 'url-file-writable-p)
|
|
177 (put 'file-readable-p 'url-file-handlers 'url-file-exists)
|
|
178 (put 'file-executable-p 'url-file-handlers 'null)
|
|
179 (put 'file-symlink-p 'url-file-handlers 'null)
|
|
180 (put 'file-exists-p 'url-file-handlers 'url-file-exists)
|
|
181 (put 'copy-file 'url-file-handlers 'url-copy-file)
|
|
182 (put 'file-attributes 'url-file-handlers 'url-file-attributes)
|
|
183 (put 'file-name-all-completions 'url-file-handlers
|
|
184 'url-file-name-all-completions)
|
|
185 (put 'file-name-completion 'url-file-handlers 'url-file-name-completion)
|
|
186 (put 'file-local-copy 'url-file-handlers 'url-file-local-copy)
|
|
187
|
|
188
|
|
189 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
|
190 ;;; Utility functions
|
|
191 ;;; -----------------
|
|
192 ;;; Various functions used around the url code.
|
|
193 ;;; Some of these qualify as hacks, but hey, this is elisp.
|
|
194 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
|
195
|
|
196 (if (fboundp 'mm-string-to-tokens)
|
|
197 (fset 'url-string-to-tokens 'mm-string-to-tokens)
|
|
198 (defun url-string-to-tokens (str &optional delim)
|
|
199 "Return a list of words from the string STR"
|
|
200 (setq delim (or delim ? ))
|
|
201 (let (results y)
|
|
202 (mapcar
|
|
203 (function
|
|
204 (lambda (x)
|
|
205 (cond
|
|
206 ((and (= x delim) y) (setq results (cons y results) y nil))
|
|
207 ((/= x delim) (setq y (concat y (char-to-string x))))
|
|
208 (t nil)))) str)
|
|
209 (nreverse (cons y results)))))
|
|
210
|
|
211 (defun url-days-between (date1 date2)
|
|
212 ;; Return the number of days between date1 and date2.
|
|
213 (- (url-day-number date1) (url-day-number date2)))
|
|
214
|
|
215 (defun url-day-number (date)
|
|
216 (let ((dat (mapcar (function (lambda (s) (and s (string-to-int s)) ))
|
|
217 (timezone-parse-date date))))
|
|
218 (timezone-absolute-from-gregorian
|
|
219 (nth 1 dat) (nth 2 dat) (car dat))))
|
|
220
|
|
221 (defun url-seconds-since-epoch (date)
|
|
222 ;; Returns a number that says how many seconds have
|
|
223 ;; lapsed between Jan 1 12:00:00 1970 and DATE."
|
|
224 (let* ((tdate (mapcar (function (lambda (ti) (and ti (string-to-int ti))))
|
|
225 (timezone-parse-date date)))
|
|
226 (ttime (mapcar (function (lambda (ti) (and ti (string-to-int ti))))
|
|
227 (timezone-parse-time
|
|
228 (aref (timezone-parse-date date) 3))))
|
|
229 (edate (mapcar (function (lambda (ti) (and ti (string-to-int ti))))
|
|
230 (timezone-parse-date "Jan 1 12:00:00 1970")))
|
|
231 (tday (- (timezone-absolute-from-gregorian
|
|
232 (nth 1 tdate) (nth 2 tdate) (nth 0 tdate))
|
|
233 (timezone-absolute-from-gregorian
|
|
234 (nth 1 edate) (nth 2 edate) (nth 0 edate)))))
|
|
235 (+ (nth 2 ttime)
|
|
236 (* (nth 1 ttime) 60)
|
|
237 (* (nth 0 ttime) 60 60)
|
|
238 (* tday 60 60 24))))
|
|
239
|
|
240 (defun url-match (s x)
|
|
241 ;; Return regexp match x in s.
|
|
242 (substring s (match-beginning x) (match-end x)))
|
|
243
|
|
244 (defun url-split (str del)
|
|
245 ;; Split the string STR, with DEL (a regular expression) as the delimiter.
|
|
246 ;; Returns an assoc list that you can use with completing-read."
|
|
247 (let (x y)
|
|
248 (while (string-match del str)
|
|
249 (setq y (substring str 0 (match-beginning 0))
|
|
250 str (substring str (match-end 0) nil))
|
|
251 (if (not (string-match "^[ \t]+$" y))
|
|
252 (setq x (cons (list y y) x))))
|
|
253 (if (not (equal str ""))
|
|
254 (setq x (cons (list str str) x)))
|
|
255 x))
|
|
256
|
|
257 (defun url-replace-regexp (regexp to-string)
|
|
258 (goto-char (point-min))
|
|
259 (while (re-search-forward regexp nil t)
|
|
260 (replace-match to-string t nil)))
|
|
261
|
|
262 (defun url-clear-tmp-buffer ()
|
|
263 (set-buffer (get-buffer-create url-working-buffer))
|
|
264 (if buffer-read-only (toggle-read-only))
|
|
265 (erase-buffer))
|
|
266
|
|
267 (defun url-maybe-relative (url)
|
|
268 (url-retrieve (url-expand-file-name url)))
|
|
269
|
|
270 (defun url-buffer-is-hypertext (&optional buff)
|
|
271 "Return t if a buffer contains HTML, as near as we can guess."
|
|
272 (setq buff (or buff (current-buffer)))
|
|
273 (save-excursion
|
|
274 (set-buffer buff)
|
|
275 (let ((case-fold-search t))
|
|
276 (goto-char (point-min))
|
|
277 (re-search-forward
|
|
278 "<\\(TITLE\\|HEAD\\|BASE\\|H[0-9]\\|ISINDEX\\|P\\)>" nil t))))
|
|
279
|
|
280 (defun url-percentage (x y)
|
|
281 (if (fboundp 'float)
|
|
282 (round (* 100 (/ x (float y))))
|
|
283 (/ (* x 100) y)))
|
|
284
|
20
|
285 (defun url-pretty-length (n)
|
|
286 (cond
|
|
287 ((< n 1024)
|
|
288 (format "%d bytes" n))
|
|
289 ((< n (* 1024 1024))
|
|
290 (format "%dk" (/ n 1024.0)))
|
|
291 (t
|
|
292 (format "%2.2fM" (/ n (* 1024 1024.0))))))
|
|
293
|
14
|
294 (defun url-after-change-function (&rest args)
|
|
295 ;; The nitty gritty details of messaging the HTTP/1.0 status messages
|
|
296 ;; in the minibuffer."
|
|
297 (or url-current-content-length
|
|
298 (save-excursion
|
|
299 (goto-char (point-min))
|
|
300 (skip-chars-forward " \t\n")
|
|
301 (if (not (looking-at "HTTP/[0-9]\.[0-9]"))
|
|
302 (setq url-current-content-length 0)
|
|
303 (setq url-current-isindex
|
|
304 (and (re-search-forward "$\r*$" nil t) (point)))
|
|
305 (if (re-search-forward
|
|
306 "^content-type:[ \t]*\\([^\r\n]+\\)\r*$"
|
|
307 url-current-isindex t)
|
|
308 (setq url-current-mime-type (downcase
|
|
309 (url-eat-trailing-space
|
|
310 (buffer-substring
|
|
311 (match-beginning 1)
|
|
312 (match-end 1))))))
|
|
313 (goto-char (point-min))
|
|
314 (if (re-search-forward "^content-length:\\([^\r\n]+\\)\r*$"
|
|
315 url-current-isindex t)
|
|
316 (setq url-current-content-length
|
|
317 (string-to-int (buffer-substring (match-beginning 1)
|
|
318 (match-end 1))))
|
|
319 (setq url-current-content-length nil))))
|
|
320 )
|
|
321 (let ((current-length (max (point-max)
|
|
322 (if url-current-isindex
|
|
323 (- (point-max) url-current-isindex)
|
|
324 (point-max)))))
|
|
325 (cond
|
|
326 ((and url-current-content-length (> url-current-content-length 1)
|
|
327 url-current-mime-type)
|
20
|
328 (url-lazy-message "Reading [%s]... %s of %s (%d%%)"
|
14
|
329 url-current-mime-type
|
20
|
330 (url-pretty-length current-length)
|
|
331 (url-pretty-length url-current-content-length)
|
14
|
332 (url-percentage current-length
|
|
333 url-current-content-length)))
|
|
334 ((and url-current-content-length (> url-current-content-length 1))
|
20
|
335 (url-lazy-message "Reading... %s of %s (%d%%)"
|
|
336 (url-pretty-length current-length)
|
|
337 (url-pretty-length url-current-content-length)
|
14
|
338 (url-percentage current-length
|
|
339 url-current-content-length)))
|
|
340 ((and (/= 1 current-length) url-current-mime-type)
|
20
|
341 (url-lazy-message "Reading [%s]... %s"
|
|
342 url-current-mime-type
|
|
343 (url-pretty-length current-length)))
|
14
|
344 ((/= 1 current-length)
|
20
|
345 (url-lazy-message "Reading... %s."
|
|
346 (url-pretty-length current-length)))
|
14
|
347 (t (url-lazy-message "Waiting for response...")))))
|
|
348
|
|
349 (defun url-insert-entities-in-string (string)
|
|
350 "Convert HTML markup-start characters to entity references in STRING.
|
|
351 Also replaces the \" character, so that the result may be safely used as
|
|
352 an attribute value in a tag. Returns a new string with the result of the
|
|
353 conversion. Replaces these characters as follows:
|
|
354 & ==> &
|
|
355 < ==> <
|
|
356 > ==> >
|
|
357 \" ==> ""
|
|
358 (if (string-match "[&<>\"]" string)
|
|
359 (save-excursion
|
|
360 (set-buffer (get-buffer-create " *entity*"))
|
|
361 (erase-buffer)
|
|
362 (buffer-disable-undo (current-buffer))
|
|
363 (insert string)
|
|
364 (goto-char (point-min))
|
|
365 (while (progn
|
|
366 (skip-chars-forward "^&<>\"")
|
|
367 (not (eobp)))
|
|
368 (insert (cdr (assq (char-after (point))
|
|
369 '((?\" . """)
|
|
370 (?& . "&")
|
|
371 (?< . "<")
|
|
372 (?> . ">")))))
|
|
373 (delete-char 1))
|
|
374 (buffer-string))
|
|
375 string))
|
|
376
|
|
377 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
|
378 ;;; Information information
|
|
379 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
|
380 (defvar url-process-lookup-table nil)
|
|
381
|
|
382 (defun url-process-get (proc prop &optional default)
|
|
383 "Get a value associated to PROC as property PROP
|
|
384 in plist stored in `url-process-lookup-table'"
|
|
385 (or (plist-get (cdr-safe (assq proc url-process-lookup-table)) prop)
|
|
386 default))
|
|
387
|
|
388 (defun url-process-put (proc prop val)
|
|
389 "Associate to PROC as property PROP the value VAL
|
|
390 in plist stored in `url-process-lookup-table'"
|
|
391 (let ((node (assq proc url-process-lookup-table)))
|
|
392 (if (not node)
|
|
393 (setq url-process-lookup-table (cons (cons proc (list prop val))
|
|
394 url-process-lookup-table))
|
|
395 (setcdr node (plist-put (cdr node) prop val)))))
|
|
396
|
|
397 (defun url-gc-process-lookup-table ()
|
|
398 (let (new)
|
|
399 (while url-process-lookup-table
|
|
400 (if (not (memq (process-status (caar url-process-lookup-table))
|
|
401 '(stop closed nil)))
|
|
402 (setq new (cons (car url-process-lookup-table) new)))
|
|
403 (setq url-process-lookup-table (cdr url-process-lookup-table)))
|
|
404 (setq url-process-lookup-table new)))
|
|
405
|
|
406 (defun url-process-list ()
|
|
407 (url-gc-process-lookup-table)
|
|
408 (let ((processes (process-list))
|
|
409 (retval nil))
|
|
410 (while processes
|
|
411 (if (url-process-get (car processes) 'url)
|
|
412 (setq retval (cons (car processes) retval)))
|
|
413 (setq processes (cdr processes)))
|
|
414 retval))
|
|
415
|
|
416 (defun url-list-processes ()
|
|
417 (interactive)
|
|
418 (let ((processes (url-process-list))
|
|
419 proc total-len len type url
|
|
420 (url-status-buf (get-buffer-create "URL Status Display")))
|
|
421 (set-buffer url-status-buf)
|
|
422 (erase-buffer)
|
|
423 (display-buffer url-status-buf)
|
|
424 (insert
|
|
425 (eval-when-compile (format "%-40s %-20s %-15s" "URL" "Size" "Type")) "\n"
|
|
426 (eval-when-compile (make-string 77 ?-)) "\n")
|
|
427 (while processes
|
|
428 (setq proc (car processes)
|
|
429 processes (cdr processes))
|
|
430 (save-excursion
|
|
431 (set-buffer (process-buffer proc))
|
|
432 (setq total-len url-current-content-length
|
|
433 len (max (point-max)
|
|
434 (if url-current-isindex
|
|
435 (- (point-max) url-current-isindex)
|
|
436 (point-max)))
|
|
437 type url-current-mime-type
|
|
438 url (url-process-get proc 'url))
|
|
439 (set-buffer url-status-buf)
|
|
440 (insert
|
|
441 (format "%-40s%s%-20s %-15s\n"
|
|
442 (url-process-get proc 'url)
|
|
443 (if (> (length url) 40)
|
|
444 (format "\n%-40s " " ")
|
|
445 " ")
|
|
446 (if total-len
|
|
447 (format "%d of %d" len total-len)
|
|
448 (format "%d" len))
|
|
449 (or type "unknown")))))))
|
|
450
|
|
451
|
|
452 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
|
453 ;;; file-name-handler stuff calls this
|
|
454 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
|
455
|
|
456 (defun url-have-visited-url (url &rest args)
|
|
457 "Return non-nil iff the user has visited URL before.
|
|
458 The return value is a cons of the url and the date last accessed as a string"
|
|
459 (cl-gethash url url-global-history-hash-table))
|
|
460
|
|
461 (defun url-directory-files (url &rest args)
|
|
462 "Return a list of files on a server."
|
|
463 nil)
|
|
464
|
|
465 (defun url-file-writable-p (url &rest args)
|
|
466 "Return t iff a url is writable by this user"
|
|
467 nil)
|
|
468
|
|
469 (defun url-copy-file (url &rest args)
|
|
470 "Copy a url to the specified filename."
|
|
471 nil)
|
|
472
|
|
473 (defun url-file-directly-accessible-p (url)
|
|
474 "Returns t iff the specified URL is directly accessible
|
|
475 on your filesystem. (nfs, local file, etc)."
|
|
476 (let* ((urlobj (if (vectorp url) url (url-generic-parse-url url)))
|
|
477 (type (url-type urlobj)))
|
|
478 (and (member type '("file" "ftp"))
|
|
479 (not (url-host urlobj)))))
|
|
480
|
|
481 ;;;###autoload
|
|
482 (defun url-file-attributes (url &rest args)
|
|
483 "Return a list of attributes of URL.
|
|
484 Value is nil if specified file cannot be opened.
|
|
485 Otherwise, list elements are:
|
|
486 0. t for directory, string (name linked to) for symbolic link, or nil.
|
|
487 1. Number of links to file.
|
|
488 2. File uid.
|
|
489 3. File gid.
|
|
490 4. Last access time, as a list of two integers.
|
|
491 First integer has high-order 16 bits of time, second has low 16 bits.
|
|
492 5. Last modification time, likewise.
|
|
493 6. Last status change time, likewise.
|
|
494 7. Size in bytes. (-1, if number is out of range).
|
|
495 8. File modes, as a string of ten letters or dashes as in ls -l.
|
|
496 If URL is on an http server, this will return the content-type if possible.
|
|
497 9. t iff file's gid would change if file were deleted and recreated.
|
|
498 10. inode number.
|
|
499 11. Device number.
|
|
500
|
|
501 If file does not exist, returns nil."
|
|
502 (and url
|
|
503 (let* ((urlobj (url-generic-parse-url url))
|
|
504 (type (url-type urlobj))
|
|
505 (url-automatic-caching nil)
|
|
506 (data nil)
|
|
507 (exists nil))
|
|
508 (cond
|
|
509 ((equal type "http")
|
|
510 (cond
|
|
511 ((not url-be-anal-about-file-attributes)
|
|
512 (setq data (list
|
|
513 (url-file-directory-p url) ; Directory
|
|
514 1 ; number of links to it
|
|
515 0 ; UID
|
|
516 0 ; GID
|
|
517 (cons 0 0) ; Last access time
|
|
518 (cons 0 0) ; Last mod. time
|
|
519 (cons 0 0) ; Last status time
|
|
520 -1 ; file size
|
|
521 (mm-extension-to-mime
|
|
522 (url-file-extension (url-filename urlobj)))
|
|
523 nil ; gid would change
|
|
524 0 ; inode number
|
|
525 0 ; device number
|
|
526 )))
|
|
527 (t ; HTTP/1.0, use HEAD
|
|
528 (let ((url-request-method "HEAD")
|
|
529 (url-request-data nil)
|
|
530 (url-working-buffer " *url-temp*"))
|
|
531 (save-excursion
|
|
532 (condition-case ()
|
|
533 (progn
|
|
534 (url-retrieve url)
|
|
535 (setq data (and
|
|
536 (setq exists
|
|
537 (cdr
|
|
538 (assoc "status"
|
|
539 url-current-mime-headers)))
|
|
540 (>= exists 200)
|
|
541 (< exists 300)
|
|
542 (list
|
|
543 (url-file-directory-p url) ; Directory
|
|
544 1 ; links to
|
|
545 0 ; UID
|
|
546 0 ; GID
|
|
547 (cons 0 0) ; Last access time
|
|
548 (cons 0 0) ; Last mod. time
|
|
549 (cons 0 0) ; Last status time
|
|
550 (or ; Size in bytes
|
|
551 (cdr (assoc "content-length"
|
|
552 url-current-mime-headers))
|
|
553 -1)
|
|
554 (or
|
|
555 (cdr (assoc "content-type"
|
|
556 url-current-mime-headers))
|
|
557 (mm-extension-to-mime
|
|
558 (url-file-extension
|
|
559 (url-filename urlobj)))) ; content-type
|
|
560 nil ; gid would change
|
|
561 0 ; inode number
|
|
562 0 ; device number
|
|
563 ))))
|
|
564 (error nil))
|
|
565 (and (not data)
|
|
566 (setq data (list (url-file-directory-p url)
|
|
567 1 0 0 (cons 0 0) (cons 0 0) (cons 0 0)
|
|
568 -1 (mm-extension-to-mime
|
|
569 (url-file-extension
|
26
|
570 (url-filename
|
|
571 url-current-object)))
|
14
|
572 nil 0 0)))
|
|
573 (kill-buffer " *url-temp*"))))))
|
|
574 ((member type '("ftp" "file"))
|
|
575 (let ((fname (if (url-host urlobj)
|
|
576 (concat "/"
|
|
577 (if (url-user urlobj)
|
|
578 (concat (url-user urlobj) "@")
|
|
579 "")
|
|
580 (url-host urlobj) ":"
|
|
581 (url-filename urlobj))
|
|
582 (url-filename urlobj))))
|
|
583 (setq data (or (file-attributes fname) (make-list 12 nil)))
|
|
584 (setcar (cdr (cdr (cdr (cdr (cdr (cdr (cdr (cdr data))))))))
|
|
585 (mm-extension-to-mime (url-file-extension fname)))))
|
|
586 (t nil))
|
|
587 data)))
|
|
588
|
|
589 (defun url-file-name-all-completions (file dirname &rest args)
|
|
590 "Return a list of all completions of file name FILE in directory DIR.
|
|
591 These are all file names in directory DIR which begin with FILE."
|
|
592 ;; need to rewrite
|
|
593 )
|
|
594
|
|
595 (defun url-file-name-completion (file dirname &rest args)
|
|
596 "Complete file name FILE in directory DIR.
|
|
597 Returns the longest string
|
|
598 common to all filenames in DIR that start with FILE.
|
|
599 If there is only one and FILE matches it exactly, returns t.
|
|
600 Returns nil if DIR contains no name starting with FILE."
|
|
601 (apply 'url-file-name-all-completions file dirname args))
|
|
602
|
|
603 (defun url-file-local-copy (file &rest args)
|
|
604 "Copy the file FILE into a temporary file on this machine.
|
|
605 Returns the name of the local copy, or nil, if FILE is directly
|
|
606 accessible."
|
|
607 nil)
|
|
608
|
|
609 (defun url-insert-file-contents (url &rest args)
|
|
610 "Insert the contents of the URL in this buffer."
|
|
611 (interactive "sURL: ")
|
|
612 (save-excursion
|
|
613 (let ((old-asynch url-be-asynchronous))
|
|
614 (setq-default url-be-asynchronous nil)
|
|
615 (let ((buf (current-buffer))
|
|
616 (url-working-buffer (cdr (url-retrieve url))))
|
|
617 (setq-default url-be-asynchronous old-asynch)
|
16
|
618 (set-buffer url-working-buffer)
|
|
619 (url-uncompress)
|
14
|
620 (set-buffer buf)
|
|
621 (insert-buffer url-working-buffer)
|
|
622 (setq buffer-file-name url)
|
|
623 (save-excursion
|
|
624 (set-buffer url-working-buffer)
|
|
625 (set-buffer-modified-p nil))
|
|
626 (kill-buffer url-working-buffer)))))
|
|
627
|
|
628 (defun url-file-directory-p (url &rest args)
|
|
629 "Return t iff a url points to a directory"
|
|
630 (equal (substring url -1 nil) "/"))
|
|
631
|
|
632 (defun url-file-exists (url &rest args)
|
|
633 "Return t iff a file exists."
|
|
634 (let* ((urlobj (url-generic-parse-url url))
|
|
635 (type (url-type urlobj))
|
|
636 (exists nil))
|
|
637 (cond
|
|
638 ((equal type "http") ; use head
|
|
639 (let ((url-request-method "HEAD")
|
|
640 (url-request-data nil)
|
|
641 (url-working-buffer " *url-temp*"))
|
|
642 (save-excursion
|
|
643 (url-retrieve url)
|
|
644 (setq exists (or (cdr
|
|
645 (assoc "status" url-current-mime-headers)) 500))
|
|
646 (kill-buffer " *url-temp*")
|
|
647 (setq exists (and (>= exists 200) (< exists 300))))))
|
|
648 ((member type '("ftp" "file")) ; file-attributes
|
|
649 (let ((fname (if (url-host urlobj)
|
|
650 (concat "/"
|
|
651 (if (url-user urlobj)
|
|
652 (concat (url-user urlobj) "@")
|
|
653 "")
|
|
654 (url-host urlobj) ":"
|
|
655 (url-filename urlobj))
|
|
656 (url-filename urlobj))))
|
|
657 (setq exists (file-exists-p fname))))
|
|
658 (t nil))
|
|
659 exists))
|
|
660
|
|
661 ;;;###autoload
|
|
662 (defun url-normalize-url (url)
|
|
663 "Return a 'normalized' version of URL. This strips out default port
|
|
664 numbers, etc."
|
|
665 (let (type data grok retval)
|
|
666 (setq data (url-generic-parse-url url)
|
|
667 type (url-type data))
|
|
668 (if (member type '("www" "about" "mailto" "mailserver" "info"))
|
|
669 (setq retval url)
|
|
670 (setq retval (url-recreate-url data)))
|
|
671 retval))
|
|
672
|
|
673 ;;;###autoload
|
|
674 (defun url-buffer-visiting (url)
|
|
675 "Return the name of a buffer (if any) that is visiting URL."
|
|
676 (setq url (url-normalize-url url))
|
|
677 (let ((bufs (buffer-list))
|
|
678 (found nil))
|
|
679 (if (condition-case ()
|
|
680 (string-match "\\(.*\\)#" url)
|
|
681 (error nil))
|
|
682 (setq url (url-match url 1)))
|
|
683 (while (and bufs (not found))
|
|
684 (save-excursion
|
|
685 (set-buffer (car bufs))
|
|
686 (setq found (if (and
|
|
687 (not (string-match " \\*URL-?[0-9]*\\*" (buffer-name (car bufs))))
|
|
688 (memq major-mode '(url-mode w3-mode))
|
|
689 (equal (url-view-url t) url)) (car bufs) nil)
|
|
690 bufs (cdr bufs))))
|
|
691 found))
|
|
692
|
|
693 (defun url-file-size (url &rest args)
|
|
694 "Return the size of a file in bytes, or -1 if can't be determined."
|
|
695 (let* ((urlobj (url-generic-parse-url url))
|
|
696 (type (url-type urlobj))
|
|
697 (size -1)
|
|
698 (data nil))
|
|
699 (cond
|
|
700 ((equal type "http") ; use head
|
|
701 (let ((url-request-method "HEAD")
|
|
702 (url-request-data nil)
|
|
703 (url-working-buffer " *url-temp*"))
|
|
704 (save-excursion
|
|
705 (url-retrieve url)
|
|
706 (setq size (or (cdr
|
|
707 (assoc "content-length" url-current-mime-headers))
|
|
708 -1))
|
|
709 (kill-buffer " *url-temp*"))))
|
|
710 ((member type '("ftp" "file")) ; file-attributes
|
|
711 (let ((fname (if (url-host urlobj)
|
|
712 (concat "/"
|
|
713 (if (url-user urlobj)
|
|
714 (concat (url-user urlobj) "@")
|
|
715 "")
|
|
716 (url-host urlobj) ":"
|
|
717 (url-filename urlobj))
|
|
718 (url-filename urlobj))))
|
|
719 (setq data (file-attributes fname)
|
|
720 size (nth 7 data))))
|
|
721 (t nil))
|
|
722 (cond
|
|
723 ((stringp size) (string-to-int size))
|
|
724 ((integerp size) size)
|
|
725 ((null size) -1)
|
|
726 (t -1))))
|
|
727
|
|
728 (defun url-generate-new-buffer-name (start)
|
|
729 "Create a new buffer name based on START."
|
|
730 (let ((x 1)
|
|
731 name)
|
|
732 (if (not (get-buffer start))
|
|
733 start
|
|
734 (progn
|
|
735 (setq name (format "%s<%d>" start x))
|
|
736 (while (get-buffer name)
|
|
737 (setq x (1+ x)
|
|
738 name (format "%s<%d>" start x)))
|
|
739 name))))
|
|
740
|
|
741 (defun url-generate-unique-filename (&optional fmt)
|
|
742 "Generate a unique filename in url-temporary-directory"
|
|
743 (if (not fmt)
|
|
744 (let ((base (format "url-tmp.%d" (user-real-uid)))
|
|
745 (fname "")
|
|
746 (x 0))
|
|
747 (setq fname (format "%s%d" base x))
|
|
748 (while (file-exists-p (expand-file-name fname url-temporary-directory))
|
|
749 (setq x (1+ x)
|
|
750 fname (concat base (int-to-string x))))
|
|
751 (expand-file-name fname url-temporary-directory))
|
|
752 (let ((base (concat "url" (int-to-string (user-real-uid))))
|
|
753 (fname "")
|
|
754 (x 0))
|
|
755 (setq fname (format fmt (concat base (int-to-string x))))
|
|
756 (while (file-exists-p (expand-file-name fname url-temporary-directory))
|
|
757 (setq x (1+ x)
|
|
758 fname (format fmt (concat base (int-to-string x)))))
|
|
759 (expand-file-name fname url-temporary-directory))))
|
|
760
|
|
761 (defun url-lazy-message (&rest args)
|
|
762 "Just like `message', but is a no-op if called more than once a second.
|
|
763 Will not do anything if url-show-status is nil."
|
|
764 (if (or (null url-show-status)
|
|
765 (= url-lazy-message-time
|
|
766 (setq url-lazy-message-time (nth 1 (current-time)))))
|
|
767 nil
|
|
768 (apply 'message args)))
|
|
769
|
|
770
|
|
771 (defun url-kill-process (proc)
|
|
772 "Kill the process PROC - knows about all the various gateway types,
|
|
773 and acts accordingly."
|
16
|
774 (delete-process proc))
|
14
|
775
|
|
776 (defun url-accept-process-output (proc)
|
|
777 "Allow any pending output from subprocesses to be read by Emacs.
|
|
778 It is read into the process' buffers or given to their filter functions.
|
|
779 Where possible, this will not exit until some output is received from PROC,
|
|
780 or 1 second has elapsed."
|
|
781 (accept-process-output proc 1))
|
|
782
|
|
783 (defun url-process-status (proc)
|
|
784 "Return the process status of a url buffer"
|
16
|
785 (process-status proc))
|
14
|
786
|
|
787
|
|
788 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
|
789 ;;; Miscellaneous functions
|
|
790 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
|
791 (defun url-setup-privacy-info ()
|
|
792 (interactive)
|
|
793 (setq url-system-type
|
|
794 (cond
|
|
795 ((or (eq url-privacy-level 'paranoid)
|
|
796 (and (listp url-privacy-level)
|
|
797 (memq 'os url-privacy-level)))
|
|
798 nil)
|
|
799 ((eq system-type 'Apple-Macintosh) "Macintosh")
|
|
800 ((eq system-type 'next-mach) "NeXT")
|
|
801 ((eq system-type 'windows-nt) "Windows-NT; 32bit")
|
|
802 ((eq system-type 'ms-windows) "Windows; 16bit")
|
|
803 ((eq system-type 'ms-dos) "MS-DOS; 32bit")
|
|
804 ((and (eq system-type 'vax-vms) (device-type))
|
|
805 "VMS; X11")
|
|
806 ((eq system-type 'vax-vms) "VMS; TTY")
|
|
807 ((eq (device-type) 'x) "X11")
|
|
808 ((eq (device-type) 'ns) "NeXTStep")
|
|
809 ((eq (device-type) 'pm) "OS/2")
|
|
810 ((eq (device-type) 'win32) "Windows; 32bit")
|
|
811 ((eq (device-type) 'tty) "(Unix?); TTY")
|
|
812 (t "UnkownPlatform")))
|
|
813
|
|
814 (setq url-personal-mail-address (or url-personal-mail-address
|
26
|
815 user-mail-address
|
|
816 (format "%s@%s" (user-real-login-name)
|
|
817 (system-name))))
|
14
|
818
|
|
819 (if (or (memq url-privacy-level '(paranoid high))
|
|
820 (and (listp url-privacy-level)
|
|
821 (memq 'email url-privacy-level)))
|
|
822 (setq url-personal-mail-address nil))
|
|
823
|
|
824 (if (or (eq url-privacy-level 'paranoid)
|
|
825 (and (listp url-privacy-level)
|
|
826 (memq 'os url-privacy-level)))
|
|
827 (setq url-os-type nil)
|
|
828 (let ((vers (emacs-version)))
|
|
829 (if (string-match "(\\([^, )]+\\))$" vers)
|
|
830 (setq url-os-type (url-match vers 1))
|
|
831 (setq url-os-type (symbol-name system-type))))))
|
|
832
|
|
833 (defun url-handle-no-scheme (url)
|
|
834 (let ((temp url-registered-protocols)
|
|
835 (found nil))
|
|
836 (while (and temp (not found))
|
|
837 (if (and (not (member (car (car temp)) '("auto" "www")))
|
|
838 (string-match (concat "^" (car (car temp)) "\\.")
|
|
839 url))
|
|
840 (setq found t)
|
|
841 (setq temp (cdr temp))))
|
|
842 (cond
|
|
843 (found ; Found something like ftp.spry.com
|
|
844 (url-retrieve (concat (car (car temp)) "://" url)))
|
|
845 ((string-match "^www\\." url)
|
|
846 (url-retrieve (concat "http://" url)))
|
|
847 ((string-match "\\(\\.[^\\.]+\\)\\(\\.[^\\.]+\\)" url)
|
|
848 ;; Ok, we have at least two dots in the filename, just stick http on it
|
|
849 (url-retrieve (concat "http://" url)))
|
|
850 (t
|
|
851 (url-retrieve (concat "http://www." url ".com"))))))
|
|
852
|
|
853 (defun url-setup-save-timer ()
|
|
854 "Reset the history list timer."
|
|
855 (interactive)
|
|
856 (cond
|
|
857 ((featurep 'itimer)
|
|
858 (if (get-itimer "url-history-saver")
|
|
859 (delete-itimer (get-itimer "url-history-saver")))
|
|
860 (start-itimer "url-history-saver" 'url-write-global-history
|
|
861 url-global-history-save-interval
|
|
862 url-global-history-save-interval))
|
|
863 ((fboundp 'run-at-time)
|
|
864 (run-at-time url-global-history-save-interval
|
|
865 url-global-history-save-interval
|
|
866 'url-write-global-history))
|
|
867 (t nil)))
|
|
868
|
|
869 (defvar url-download-minor-mode nil)
|
|
870
|
|
871 (defun url-download-minor-mode (on)
|
|
872 (setq url-download-minor-mode (if on
|
|
873 (1+ (or url-download-minor-mode 0))
|
|
874 (1- (or url-download-minor-mode 1))))
|
|
875 (if (<= url-download-minor-mode 0)
|
|
876 (setq url-download-minor-mode nil)))
|
|
877
|
|
878 (defun url-do-setup ()
|
|
879 "Do setup - this is to avoid conflict with user settings when URL is
|
|
880 dumped with emacs."
|
|
881 (if url-setup-done
|
|
882 nil
|
|
883
|
|
884 (add-minor-mode 'url-download-minor-mode " Webbing" nil)
|
|
885
|
|
886 ;; Make OS/2 happy
|
|
887 (setq tcp-binary-process-input-services
|
|
888 (append '("http" "80")
|
|
889 tcp-binary-process-input-services))
|
|
890
|
|
891 ;; Register all the protocols we can handle
|
|
892 (url-register-protocol 'file)
|
|
893 (url-register-protocol 'ftp nil nil "21")
|
|
894 (url-register-protocol 'gopher nil nil "70")
|
|
895 (url-register-protocol 'http nil nil "80")
|
|
896 (url-register-protocol 'https nil nil "443")
|
|
897 (url-register-protocol 'nfs nil nil "2049")
|
|
898 (url-register-protocol 'info nil 'url-identity-expander)
|
|
899 (url-register-protocol 'mailserver nil 'url-identity-expander)
|
|
900 (url-register-protocol 'finger nil 'url-identity-expander "79")
|
|
901 (url-register-protocol 'mailto nil 'url-identity-expander)
|
|
902 (url-register-protocol 'news nil 'url-identity-expander "119")
|
|
903 (url-register-protocol 'nntp nil 'url-identity-expander "119")
|
|
904 (url-register-protocol 'irc nil 'url-identity-expander "6667")
|
16
|
905 (url-register-protocol 'data nil 'url-identity-expander)
|
14
|
906 (url-register-protocol 'rlogin)
|
|
907 (url-register-protocol 'telnet)
|
|
908 (url-register-protocol 'tn3270)
|
|
909 (url-register-protocol 'proxy)
|
|
910 (url-register-protocol 'auto 'url-handle-no-scheme)
|
|
911
|
|
912 ;; Register all the authentication schemes we can handle
|
|
913 (url-register-auth-scheme "basic" nil 4)
|
|
914 (url-register-auth-scheme "digest" nil 7)
|
|
915
|
|
916 ;; Filename handler stuff for emacsen that support it
|
|
917 (url-setup-file-name-handlers)
|
|
918
|
|
919 (setq url-cookie-file
|
|
920 (or url-cookie-file
|
|
921 (expand-file-name "~/.w3cookies")))
|
|
922
|
|
923 (setq url-global-history-file
|
|
924 (or url-global-history-file
|
|
925 (and (memq system-type '(ms-dos ms-windows))
|
|
926 (expand-file-name "~/mosaic.hst"))
|
|
927 (and (memq system-type '(axp-vms vax-vms))
|
|
928 (expand-file-name "~/mosaic.global-history"))
|
|
929 (condition-case ()
|
|
930 (expand-file-name "~/.mosaic-global-history")
|
|
931 (error nil))))
|
|
932
|
|
933 ;; Parse the global history file if it exists, so that it can be used
|
|
934 ;; for URL completion, etc.
|
|
935 (if (and url-global-history-file
|
|
936 (file-exists-p url-global-history-file))
|
|
937 (url-parse-global-history))
|
|
938
|
|
939 ;; Setup save timer
|
|
940 (and url-global-history-save-interval (url-setup-save-timer))
|
|
941
|
|
942 (if (and url-cookie-file
|
|
943 (file-exists-p url-cookie-file))
|
|
944 (url-cookie-parse-file url-cookie-file))
|
|
945
|
|
946 ;; Read in proxy gateways
|
|
947 (let ((noproxy (and (not (assoc "no_proxy" url-proxy-services))
|
|
948 (or (getenv "NO_PROXY")
|
|
949 (getenv "no_PROXY")
|
|
950 (getenv "no_proxy")))))
|
|
951 (if noproxy
|
|
952 (setq url-proxy-services
|
|
953 (cons (cons "no_proxy"
|
|
954 (concat "\\("
|
|
955 (mapconcat
|
|
956 (function
|
|
957 (lambda (x)
|
|
958 (cond
|
|
959 ((= x ?,) "\\|")
|
|
960 ((= x ? ) "")
|
|
961 ((= x ?.) (regexp-quote "."))
|
|
962 ((= x ?*) ".*")
|
|
963 ((= x ??) ".")
|
|
964 (t (char-to-string x)))))
|
|
965 noproxy "") "\\)"))
|
|
966 url-proxy-services))))
|
|
967
|
|
968 ;; Set the password entry funtion based on user defaults or guess
|
|
969 ;; based on which remote-file-access package they are using.
|
|
970 (cond
|
|
971 (url-passwd-entry-func nil) ; Already been set
|
26
|
972 ((fboundp 'read-passwd) ; Use secure password if available
|
14
|
973 (setq url-passwd-entry-func 'read-passwd))
|
|
974 ((or (featurep 'efs) ; Using EFS
|
|
975 (featurep 'efs-auto)) ; or autoloading efs
|
|
976 (if (not (fboundp 'read-passwd))
|
|
977 (autoload 'read-passwd "passwd" "Read in a password" nil))
|
|
978 (setq url-passwd-entry-func 'read-passwd))
|
|
979 ((or (featurep 'ange-ftp) ; Using ange-ftp
|
|
980 (and (boundp 'file-name-handler-alist)
|
|
981 (not (string-match "Lucid" (emacs-version)))))
|
|
982 (setq url-passwd-entry-func 'ange-ftp-read-passwd))
|
|
983 (t
|
26
|
984 (url-warn
|
|
985 'security
|
|
986 "(url-setup): Can't determine how to read passwords, winging it.")))
|
14
|
987
|
|
988 ;; Set up the news service if they haven't done so
|
|
989 (setq url-news-server
|
|
990 (cond
|
|
991 (url-news-server url-news-server)
|
|
992 ((and (boundp 'gnus-default-nntp-server)
|
|
993 (not (equal "" gnus-default-nntp-server)))
|
|
994 gnus-default-nntp-server)
|
|
995 ((and (boundp 'gnus-nntp-server)
|
|
996 (not (null gnus-nntp-server))
|
|
997 (not (equal "" gnus-nntp-server)))
|
|
998 gnus-nntp-server)
|
|
999 ((and (boundp 'nntp-server-name)
|
|
1000 (not (null nntp-server-name))
|
|
1001 (not (equal "" nntp-server-name)))
|
|
1002 nntp-server-name)
|
|
1003 ((getenv "NNTPSERVER") (getenv "NNTPSERVER"))
|
|
1004 (t "news")))
|
|
1005
|
|
1006 ;; Set up the MIME accept string if they haven't got it hardcoded yet
|
|
1007 (or url-mime-accept-string
|
|
1008 (setq url-mime-accept-string (url-parse-viewer-types)))
|
|
1009 (or url-mime-encoding-string
|
|
1010 (setq url-mime-encoding-string
|
|
1011 (mapconcat 'car
|
|
1012 mm-content-transfer-encodings
|
|
1013 ", ")))
|
|
1014
|
|
1015 (url-setup-privacy-info)
|
|
1016 (run-hooks 'url-load-hook)
|
|
1017 (setq url-setup-done t)))
|
|
1018
|
|
1019 ;;;###autoload
|
|
1020 (defun url-get-url-at-point (&optional pt)
|
|
1021 "Get the URL closest to point, but don't change your
|
|
1022 position. Has a preference for looking backward when not
|
|
1023 directly on a symbol."
|
|
1024 ;; Not at all perfect - point must be right in the name.
|
|
1025 (save-excursion
|
|
1026 (if pt (goto-char pt))
|
|
1027 (let ((filename-chars "%.?@a-zA-Z0-9---()_/:~=&") start url)
|
|
1028 (save-excursion
|
|
1029 ;; first see if you're just past a filename
|
|
1030 (if (not (eobp))
|
|
1031 (if (looking-at "[] \t\n[{}()]") ; whitespace or some parens
|
|
1032 (progn
|
|
1033 (skip-chars-backward " \n\t\r({[]})")
|
|
1034 (if (not (bobp))
|
|
1035 (backward-char 1)))))
|
|
1036 (if (string-match (concat "[" filename-chars "]")
|
|
1037 (char-to-string (following-char)))
|
|
1038 (progn
|
|
1039 (skip-chars-backward filename-chars)
|
|
1040 (setq start (point))
|
|
1041 (skip-chars-forward filename-chars))
|
|
1042 (setq start (point)))
|
|
1043 (setq url (if (fboundp 'buffer-substring-no-properties)
|
|
1044 (buffer-substring-no-properties start (point))
|
|
1045 (buffer-substring start (point)))))
|
|
1046 (if (string-match "^URL:" url)
|
|
1047 (setq url (substring url 4 nil)))
|
|
1048 (if (string-match "\\.$" url)
|
|
1049 (setq url (substring url 0 -1)))
|
|
1050 (if (not (string-match url-nonrelative-link url))
|
|
1051 (setq url nil))
|
|
1052 url)))
|
|
1053
|
|
1054 (defun url-eat-trailing-space (x)
|
|
1055 ;; Remove spaces/tabs at the end of a string
|
|
1056 (let ((y (1- (length x)))
|
|
1057 (skip-chars (list ? ?\t ?\n)))
|
|
1058 (while (and (>= y 0) (memq (aref x y) skip-chars))
|
|
1059 (setq y (1- y)))
|
|
1060 (substring x 0 (1+ y))))
|
|
1061
|
|
1062 (defun url-strip-leading-spaces (x)
|
|
1063 ;; Remove spaces at the front of a string
|
|
1064 (let ((y (1- (length x)))
|
|
1065 (z 0)
|
|
1066 (skip-chars (list ? ?\t ?\n)))
|
|
1067 (while (and (<= z y) (memq (aref x z) skip-chars))
|
|
1068 (setq z (1+ z)))
|
|
1069 (substring x z nil)))
|
|
1070
|
|
1071 (defun url-convert-newlines-to-spaces (x)
|
|
1072 "Convert newlines and carriage returns embedded in a string into spaces,
|
|
1073 and swallow following whitespace.
|
|
1074 The argument is not side-effected, but may be returned by this function."
|
|
1075 (if (string-match "[\n\r]+\\s-*" x) ; [\\n\\r\\t ]
|
|
1076 (concat (substring x 0 (match-beginning 0)) " "
|
|
1077 (url-convert-newlines-to-spaces
|
|
1078 (substring x (match-end 0))))
|
|
1079 x))
|
|
1080
|
|
1081 ;; Test cases
|
|
1082 ;; (url-convert-newlines-to-spaces "foo bar") ; nothing happens
|
|
1083 ;; (url-convert-newlines-to-spaces "foo\n \t bar") ; whitespace converted
|
|
1084 ;;
|
|
1085 ;; This implementation doesn't mangle the match-data, is fast, and doesn't
|
|
1086 ;; create garbage, but it leaves whitespace.
|
|
1087 ;; (defun url-convert-newlines-to-spaces (x)
|
|
1088 ;; "Convert newlines and carriage returns embedded in a string into spaces.
|
|
1089 ;; The string is side-effected, then returned."
|
|
1090 ;; (let ((i 0)
|
|
1091 ;; (limit (length x)))
|
|
1092 ;; (while (< i limit)
|
|
1093 ;; (if (or (= ?\n (aref x i))
|
|
1094 ;; (= ?\r (aref x i)))
|
|
1095 ;; (aset x i ? ))
|
|
1096 ;; (setq i (1+ i)))
|
|
1097 ;; x))
|
|
1098
|
|
1099 (defun url-expand-file-name (url &optional default)
|
|
1100 "Convert URL to a fully specified URL, and canonicalize it.
|
|
1101 Second arg DEFAULT is a URL to start with if URL is relative.
|
|
1102 If DEFAULT is nil or missing, the current buffer's URL is used.
|
|
1103 Path components that are `.' are removed, and
|
|
1104 path components followed by `..' are removed, along with the `..' itself."
|
|
1105 (if url
|
|
1106 (setq url (mapconcat (function (lambda (x)
|
16
|
1107 (if (memq x '(? ?\n ?\r))
|
|
1108 ""
|
|
1109 (char-to-string x))))
|
14
|
1110 (url-strip-leading-spaces
|
|
1111 (url-eat-trailing-space url)) "")))
|
|
1112 (cond
|
|
1113 ((null url) nil) ; Something hosed! Be graceful
|
|
1114 ((string-match "^#" url) ; Offset link, use it raw
|
|
1115 url)
|
|
1116 (t
|
|
1117 (let* ((urlobj (url-generic-parse-url url))
|
|
1118 (inhibit-file-name-handlers t)
|
|
1119 (defobj (cond
|
|
1120 ((vectorp default) default)
|
|
1121 (default (url-generic-parse-url default))
|
|
1122 (url-current-object url-current-object)
|
|
1123 (t (url-generic-parse-url (url-view-url t)))))
|
|
1124 (expander (cdr-safe
|
|
1125 (cdr-safe
|
|
1126 (assoc (or (url-type urlobj)
|
|
1127 (url-type defobj))
|
|
1128 url-registered-protocols)))))
|
|
1129 (if (string-match "^//" url)
|
|
1130 (setq urlobj (url-generic-parse-url (concat (url-type defobj) ":"
|
|
1131 url))))
|
|
1132 (if (fboundp expander)
|
|
1133 (funcall expander urlobj defobj)
|
|
1134 (message "Unknown URL scheme: %s" (or (url-type urlobj)
|
|
1135 (url-type defobj)))
|
|
1136 (url-identity-expander urlobj defobj))
|
|
1137 (url-recreate-url urlobj)))))
|
|
1138
|
|
1139 (defun url-default-expander (urlobj defobj)
|
|
1140 ;; The default expansion routine - urlobj is modified by side effect!
|
|
1141 (url-set-type urlobj (or (url-type urlobj) (url-type defobj)))
|
|
1142 (url-set-port urlobj (or (url-port urlobj)
|
|
1143 (and (string= (url-type urlobj)
|
|
1144 (url-type defobj))
|
|
1145 (url-port defobj))))
|
|
1146 (if (not (string= "file" (url-type urlobj)))
|
|
1147 (url-set-host urlobj (or (url-host urlobj) (url-host defobj))))
|
|
1148 (if (string= "ftp" (url-type urlobj))
|
|
1149 (url-set-user urlobj (or (url-user urlobj) (url-user defobj))))
|
|
1150 (if (string= (url-filename urlobj) "")
|
|
1151 (url-set-filename urlobj "/"))
|
|
1152 (if (string-match "^/" (url-filename urlobj))
|
|
1153 nil
|
|
1154 (url-set-filename urlobj
|
|
1155 (url-remove-relative-links
|
|
1156 (concat (url-basepath (url-filename defobj))
|
|
1157 (url-filename urlobj))))))
|
|
1158
|
|
1159 (defun url-identity-expander (urlobj defobj)
|
|
1160 (url-set-type urlobj (or (url-type urlobj) (url-type defobj))))
|
|
1161
|
|
1162 (defconst url-unreserved-chars
|
|
1163 '(
|
|
1164 ?a ?b ?c ?d ?e ?f ?g ?h ?i ?j ?k ?l ?m ?n ?o ?p ?q ?r ?s ?t ?u ?v ?w ?x ?y ?z
|
|
1165 ?A ?B ?C ?D ?E ?F ?G ?H ?I ?J ?K ?L ?M ?N ?O ?P ?Q ?R ?S ?T ?U ?V ?W ?X ?Y ?Z
|
|
1166 ?0 ?1 ?2 ?3 ?4 ?5 ?6 ?7 ?8 ?9
|
|
1167 ?$ ?- ?_ ?. ?! ?~ ?* ?' ?\( ?\) ?,)
|
|
1168 "A list of characters that are _NOT_ reserve in the URL spec.
|
|
1169 This is taken from draft-fielding-url-syntax-02.txt - check your local
|
|
1170 internet drafts directory for a copy.")
|
|
1171
|
|
1172 (defun url-hexify-string (str)
|
|
1173 "Escape characters in a string"
|
|
1174 (mapconcat
|
|
1175 (function
|
|
1176 (lambda (char)
|
|
1177 (if (not (memq char url-unreserved-chars))
|
|
1178 (if (< char 16)
|
|
1179 (upcase (format "%%0%x" char))
|
|
1180 (upcase (format "%%%x" char)))
|
|
1181 (char-to-string char))))
|
|
1182 (mule-decode-string str) ""))
|
|
1183
|
|
1184 (defun url-make-sequence (start end)
|
|
1185 "Make a sequence (list) of numbers from START to END"
|
|
1186 (cond
|
|
1187 ((= start end) '())
|
|
1188 ((> start end) '())
|
|
1189 (t
|
|
1190 (let ((sqnc '()))
|
|
1191 (while (<= start end)
|
|
1192 (setq sqnc (cons end sqnc)
|
|
1193 end (1- end)))
|
|
1194 sqnc))))
|
|
1195
|
|
1196 (defun url-file-extension (fname &optional x)
|
|
1197 "Return the filename extension of FNAME. If optional variable X is t,
|
|
1198 then return the basename of the file with the extension stripped off."
|
|
1199 (if (and fname (string-match "\\.[^./]+$" fname))
|
|
1200 (if x (substring fname 0 (match-beginning 0))
|
|
1201 (substring fname (match-beginning 0) nil))
|
|
1202 ;;
|
|
1203 ;; If fname has no extension, and x then return fname itself instead of
|
|
1204 ;; nothing. When caching it allows the correct .hdr file to be produced
|
|
1205 ;; for filenames without extension.
|
|
1206 ;;
|
|
1207 (if x
|
|
1208 fname
|
|
1209 "")))
|
|
1210
|
|
1211 (defun url-basepath (file &optional x)
|
|
1212 "Return the base pathname of FILE, or the actual filename if X is true"
|
|
1213 (cond
|
|
1214 ((null file) "")
|
|
1215 (x (file-name-nondirectory file))
|
|
1216 (t (file-name-directory file))))
|
|
1217
|
|
1218 (defun url-parse-query-string (query &optional downcase)
|
|
1219 (let (retval pairs cur key val)
|
|
1220 (setq pairs (split-string query "&"))
|
|
1221 (while pairs
|
|
1222 (setq cur (car pairs)
|
|
1223 pairs (cdr pairs))
|
|
1224 (if (not (string-match "=" cur))
|
|
1225 nil ; Grace
|
|
1226 (setq key (url-unhex-string (substring cur 0 (match-beginning 0)))
|
|
1227 val (url-unhex-string (substring cur (match-end 0) nil)))
|
|
1228 (if downcase
|
|
1229 (setq key (downcase key)))
|
|
1230 (setq cur (assoc key retval))
|
|
1231 (if cur
|
|
1232 (setcdr cur (cons val (cdr cur)))
|
|
1233 (setq retval (cons (list key val) retval)))))
|
|
1234 retval))
|
|
1235
|
|
1236 (defun url-unhex (x)
|
|
1237 (if (> x ?9)
|
|
1238 (if (>= x ?a)
|
|
1239 (+ 10 (- x ?a))
|
|
1240 (+ 10 (- x ?A)))
|
|
1241 (- x ?0)))
|
|
1242
|
|
1243 (defun url-unhex-string (str &optional allow-newlines)
|
|
1244 "Remove %XXX embedded spaces, etc in a url.
|
|
1245 If optional second argument ALLOW-NEWLINES is non-nil, then allow the
|
|
1246 decoding of carriage returns and line feeds in the string, which is normally
|
|
1247 forbidden in URL encoding."
|
|
1248 (setq str (or str ""))
|
|
1249 (let ((tmp "")
|
|
1250 (case-fold-search t))
|
|
1251 (while (string-match "%[0-9a-f][0-9a-f]" str)
|
|
1252 (let* ((start (match-beginning 0))
|
|
1253 (ch1 (url-unhex (elt str (+ start 1))))
|
|
1254 (code (+ (* 16 ch1)
|
|
1255 (url-unhex (elt str (+ start 2))))))
|
|
1256 (setq tmp (concat
|
|
1257 tmp (substring str 0 start)
|
|
1258 (cond
|
|
1259 (allow-newlines
|
|
1260 (char-to-string code))
|
|
1261 ((or (= code ?\n) (= code ?\r))
|
|
1262 " ")
|
|
1263 (t (char-to-string code))))
|
|
1264 str (substring str (match-end 0)))))
|
|
1265 (setq tmp (concat tmp str))
|
|
1266 tmp))
|
|
1267
|
|
1268 (defun url-clean-text ()
|
|
1269 "Clean up a buffer, removing any excess garbage from a gateway mechanism,
|
|
1270 and decoding any MIME content-transfer-encoding used."
|
|
1271 (set-buffer url-working-buffer)
|
|
1272 (goto-char (point-min))
|
|
1273 (url-replace-regexp "Connection closed by.*" "")
|
|
1274 (goto-char (point-min))
|
|
1275 (url-replace-regexp "Process WWW.*" ""))
|
|
1276
|
|
1277 (defun url-remove-compressed-extensions (filename)
|
|
1278 (while (assoc (url-file-extension filename) url-uncompressor-alist)
|
|
1279 (setq filename (url-file-extension filename t)))
|
|
1280 filename)
|
|
1281
|
|
1282 (defun url-uncompress ()
|
|
1283 "Do any necessary uncompression on `url-working-buffer'"
|
|
1284 (set-buffer url-working-buffer)
|
|
1285 (if (not url-inhibit-uncompression)
|
26
|
1286 (let* ((decoder nil)
|
14
|
1287 (code-1 (cdr-safe
|
|
1288 (assoc "content-transfer-encoding"
|
|
1289 url-current-mime-headers)))
|
|
1290 (code-2 (cdr-safe
|
|
1291 (assoc "content-encoding" url-current-mime-headers)))
|
|
1292 (done nil)
|
|
1293 (default-process-coding-system
|
|
1294 (cons mule-no-coding-system mule-no-coding-system)))
|
|
1295 (mapcar
|
|
1296 (function
|
|
1297 (lambda (code)
|
|
1298 (setq decoder (and (not (member code done))
|
|
1299 (cdr-safe
|
|
1300 (assoc code mm-content-transfer-encodings)))
|
|
1301 done (cons code done))
|
20
|
1302 (if (not decoder)
|
|
1303 nil
|
|
1304 (message "Decoding (%s)..." code)
|
|
1305 (cond
|
|
1306 ((stringp decoder)
|
|
1307 (call-process-region (point-min) (point-max) decoder t t nil))
|
|
1308 ((listp decoder)
|
|
1309 (apply 'call-process-region (point-min) (point-max)
|
|
1310 (car decoder) t t nil (cdr decoder)))
|
|
1311 ((and (symbolp decoder) (fboundp decoder))
|
|
1312 (funcall decoder (point-min) (point-max)))
|
|
1313 (t
|
|
1314 (error "Bad entry for %s in `mm-content-transfer-encodings'"
|
|
1315 code)))
|
|
1316 (message "Decoding (%s)... done." code))))
|
|
1317 (list code-1 code-2))))
|
14
|
1318 (set-buffer-modified-p nil))
|
|
1319
|
|
1320 (defun url-filter (proc string)
|
|
1321 (save-excursion
|
|
1322 (set-buffer url-working-buffer)
|
|
1323 (insert string)
|
|
1324 (if (string-match "\nConnection closed by" string)
|
|
1325 (progn (set-process-filter proc nil)
|
|
1326 (url-sentinel proc string))))
|
|
1327 string)
|
|
1328
|
|
1329 (defun url-default-callback (buf)
|
|
1330 (url-download-minor-mode nil)
|
26
|
1331 (url-store-in-cache)
|
14
|
1332 (cond
|
|
1333 ((save-excursion (set-buffer buf)
|
|
1334 (and url-current-callback-func
|
|
1335 (fboundp url-current-callback-func)))
|
|
1336 (save-excursion
|
|
1337 (save-window-excursion
|
|
1338 (set-buffer buf)
|
|
1339 (cond
|
|
1340 ((listp url-current-callback-data)
|
|
1341 (apply url-current-callback-func
|
|
1342 url-current-callback-data))
|
|
1343 (url-current-callback-data
|
|
1344 (funcall url-current-callback-func
|
|
1345 url-current-callback-data))
|
|
1346 (t
|
|
1347 (funcall url-current-callback-func))))))
|
|
1348 ((fboundp 'w3-sentinel)
|
|
1349 (set-variable 'w3-working-buffer buf)
|
|
1350 (w3-sentinel))
|
|
1351 (t
|
|
1352 (message "Retrieval for %s complete." buf))))
|
|
1353
|
|
1354 (defun url-sentinel (proc string)
|
|
1355 (let* ((buf (process-buffer proc))
|
|
1356 (url-working-buffer (and buf (get-buffer buf)))
|
|
1357 status)
|
|
1358 (if (not url-working-buffer)
|
|
1359 (url-warn 'url (format "Process %s completed with no buffer!" proc))
|
|
1360 (save-excursion
|
|
1361 (set-buffer url-working-buffer)
|
|
1362 (remove-hook 'after-change-functions 'url-after-change-function)
|
|
1363 (if url-be-asynchronous
|
|
1364 (progn
|
|
1365 (widen)
|
|
1366 (url-clean-text)
|
|
1367 (cond
|
|
1368 ((and (null proc) (not url-working-buffer)) nil)
|
|
1369 ((url-mime-response-p)
|
|
1370 (setq status (url-parse-mime-headers))))
|
|
1371 (if (not url-current-mime-type)
|
|
1372 (setq url-current-mime-type (mm-extension-to-mime
|
|
1373 (url-file-extension
|
26
|
1374 (url-filename
|
|
1375 url-current-object)))))
|
|
1376 (if (member status '(401 301 302 303 204))
|
|
1377 nil
|
|
1378 (funcall url-default-retrieval-proc (buffer-name url-working-buffer)))))))))
|
14
|
1379
|
|
1380 (defun url-remove-relative-links (name)
|
|
1381 ;; Strip . and .. from pathnames
|
|
1382 (let ((new (if (not (string-match "^/" name))
|
|
1383 (concat "/" name)
|
|
1384 name)))
|
|
1385 (while (string-match "/\\(\\./\\)" new)
|
|
1386 (setq new (concat (substring new 0 (match-beginning 1))
|
|
1387 (substring new (match-end 1)))))
|
|
1388 (while (string-match "/\\([^/]*/\\.\\./\\)" new)
|
|
1389 (setq new (concat (substring new 0 (match-beginning 1))
|
|
1390 (substring new (match-end 1)))))
|
|
1391 (while (string-match "^/\\.\\.\\(/\\)" new)
|
|
1392 (setq new (substring new (match-beginning 1) nil)))
|
|
1393 new))
|
|
1394
|
|
1395 (defun url-truncate-url-for-viewing (url &optional width)
|
|
1396 "Return a shortened version of URL that is WIDTH characters or less wide.
|
|
1397 WIDTH defaults to the current frame width."
|
|
1398 (let* ((fr-width (or width (frame-width)))
|
|
1399 (str-width (length url))
|
|
1400 (tail (file-name-nondirectory url))
|
|
1401 (fname nil)
|
|
1402 (modified 0)
|
|
1403 (urlobj nil))
|
|
1404 ;; The first thing that can go are the search strings
|
|
1405 (if (and (>= str-width fr-width)
|
|
1406 (string-match "?" url))
|
|
1407 (setq url (concat (substring url 0 (match-beginning 0)) "?...")
|
|
1408 str-width (length url)
|
|
1409 tail (file-name-nondirectory url)))
|
|
1410 (if (< str-width fr-width)
|
|
1411 nil ; Hey, we are done!
|
|
1412 (setq urlobj (url-generic-parse-url url)
|
|
1413 fname (url-filename urlobj)
|
|
1414 fr-width (- fr-width 4))
|
|
1415 (while (and (>= str-width fr-width)
|
|
1416 (string-match "/" fname))
|
|
1417 (setq fname (substring fname (match-end 0) nil)
|
|
1418 modified (1+ modified))
|
|
1419 (url-set-filename urlobj fname)
|
|
1420 (setq url (url-recreate-url urlobj)
|
|
1421 str-width (length url)))
|
|
1422 (if (> modified 1)
|
|
1423 (setq fname (concat "/.../" fname))
|
|
1424 (setq fname (concat "/" fname)))
|
|
1425 (url-set-filename urlobj fname)
|
|
1426 (setq url (url-recreate-url urlobj)))
|
|
1427 url))
|
|
1428
|
|
1429 (defun url-view-url (&optional no-show)
|
|
1430 "View the current document's URL. Optional argument NO-SHOW means
|
|
1431 just return the URL, don't show it in the minibuffer."
|
|
1432 (interactive)
|
26
|
1433 (if (not url-current-object)
|
|
1434 nil
|
|
1435 (if no-show
|
|
1436 (url-recreate-url url-current-object)
|
|
1437 (message "%s" (url-recreate-url url-current-object)))))
|
14
|
1438
|
|
1439 (defun url-parse-Netscape-history (fname)
|
|
1440 ;; Parse a Netscape/X style global history list.
|
|
1441 (let (pos ; Position holder
|
|
1442 url ; The URL
|
|
1443 time) ; Last time accessed
|
|
1444 (goto-char (point-min))
|
|
1445 (skip-chars-forward "^\n")
|
|
1446 (skip-chars-forward "\n \t") ; Skip past the tag line
|
|
1447 (setq url-global-history-hash-table (make-hash-table :size 131
|
|
1448 :test 'equal))
|
|
1449 ;; Here we will go to the end of the line and
|
|
1450 ;; skip back over a token, since we might run
|
|
1451 ;; into spaces in URLs, depending on how much
|
|
1452 ;; smarter netscape is than the old XMosaic :)
|
|
1453 (while (not (eobp))
|
|
1454 (setq pos (point))
|
|
1455 (end-of-line)
|
|
1456 (skip-chars-backward "^ \t")
|
|
1457 (skip-chars-backward " \t")
|
|
1458 (setq url (buffer-substring pos (point))
|
|
1459 pos (1+ (point)))
|
|
1460 (skip-chars-forward "^\n")
|
|
1461 (setq time (buffer-substring pos (point)))
|
|
1462 (skip-chars-forward "\n")
|
|
1463 (setq url-history-changed-since-last-save t)
|
|
1464 (cl-puthash url time url-global-history-hash-table))))
|
|
1465
|
|
1466 (defun url-parse-Mosaic-history-v1 (fname)
|
|
1467 ;; Parse an NCSA Mosaic/X style global history list
|
|
1468 (goto-char (point-min))
|
|
1469 (skip-chars-forward "^\n")
|
|
1470 (skip-chars-forward "\n \t") ; Skip past the tag line
|
|
1471 (skip-chars-forward "^\n")
|
|
1472 (skip-chars-forward "\n \t") ; Skip past the second tag line
|
|
1473 (setq url-global-history-hash-table (make-hash-table :size 131
|
|
1474 :test 'equal))
|
|
1475 (let (pos ; Temporary position holder
|
|
1476 bol ; Beginning-of-line
|
|
1477 url ; URL
|
|
1478 time ; Time
|
|
1479 last-end ; Last ending point
|
|
1480 )
|
|
1481 (while (not (eobp))
|
|
1482 (setq bol (point))
|
|
1483 (end-of-line)
|
|
1484 (setq pos (point)
|
|
1485 last-end (point))
|
|
1486 (skip-chars-backward "^ \t" bol) ; Skip over year
|
|
1487 (skip-chars-backward " \t" bol)
|
|
1488 (skip-chars-backward "^ \t" bol) ; Skip over time
|
|
1489 (skip-chars-backward " \t" bol)
|
|
1490 (skip-chars-backward "^ \t" bol) ; Skip over day #
|
|
1491 (skip-chars-backward " \t" bol)
|
|
1492 (skip-chars-backward "^ \t" bol) ; Skip over month
|
|
1493 (skip-chars-backward " \t" bol)
|
|
1494 (skip-chars-backward "^ \t" bol) ; Skip over day abbrev.
|
|
1495 (if (bolp)
|
|
1496 nil ; Malformed entry!!! Ack! Bailout!
|
|
1497 (setq time (buffer-substring pos (point)))
|
|
1498 (skip-chars-backward " \t")
|
|
1499 (setq pos (point)))
|
|
1500 (beginning-of-line)
|
|
1501 (setq url (buffer-substring (point) pos))
|
|
1502 (goto-char (min (1+ last-end) (point-max))) ; Goto next line
|
|
1503 (if (/= (length url) 0)
|
|
1504 (progn
|
|
1505 (setq url-history-changed-since-last-save t)
|
|
1506 (cl-puthash url time url-global-history-hash-table))))))
|
|
1507
|
|
1508 (defun url-parse-Mosaic-history-v2 (fname)
|
|
1509 ;; Parse an NCSA Mosaic/X style global history list (version 2)
|
|
1510 (goto-char (point-min))
|
|
1511 (skip-chars-forward "^\n")
|
|
1512 (skip-chars-forward "\n \t") ; Skip past the tag line
|
|
1513 (skip-chars-forward "^\n")
|
|
1514 (skip-chars-forward "\n \t") ; Skip past the second tag line
|
|
1515 (setq url-global-history-hash-table (make-hash-table :size 131
|
|
1516 :test 'equal))
|
|
1517 (let (pos ; Temporary position holder
|
|
1518 bol ; Beginning-of-line
|
|
1519 url ; URL
|
|
1520 time ; Time
|
|
1521 last-end ; Last ending point
|
|
1522 )
|
|
1523 (while (not (eobp))
|
|
1524 (setq bol (point))
|
|
1525 (end-of-line)
|
|
1526 (setq pos (point)
|
|
1527 last-end (point))
|
|
1528 (skip-chars-backward "^ \t" bol) ; Skip over time
|
|
1529 (if (bolp)
|
|
1530 nil ; Malformed entry!!! Ack! Bailout!
|
|
1531 (setq time (buffer-substring pos (point)))
|
|
1532 (skip-chars-backward " \t")
|
|
1533 (setq pos (point)))
|
|
1534 (beginning-of-line)
|
|
1535 (setq url (buffer-substring (point) pos))
|
|
1536 (goto-char (min (1+ last-end) (point-max))) ; Goto next line
|
|
1537 (if (/= (length url) 0)
|
|
1538 (progn
|
|
1539 (setq url-history-changed-since-last-save t)
|
|
1540 (cl-puthash url time url-global-history-hash-table))))))
|
|
1541
|
|
1542 (defun url-parse-Emacs-history (&optional fname)
|
|
1543 ;; Parse out the Emacs-w3 global history file for completion, etc.
|
|
1544 (or fname (setq fname (expand-file-name url-global-history-file)))
|
|
1545 (cond
|
|
1546 ((not (file-exists-p fname))
|
|
1547 (message "%s does not exist." fname))
|
|
1548 ((not (file-readable-p fname))
|
|
1549 (message "%s is unreadable." fname))
|
|
1550 (t
|
|
1551 (condition-case ()
|
|
1552 (load fname nil t)
|
|
1553 (error (message "Could not load %s" fname)))
|
|
1554 (if (boundp 'url-global-history-completion-list)
|
|
1555 ;; Hey! Automatic conversion of old format!
|
|
1556 (progn
|
|
1557 (setq url-global-history-hash-table (make-hash-table :size 131
|
|
1558 :test 'equal)
|
|
1559 url-history-changed-since-last-save t)
|
|
1560 (mapcar (function
|
|
1561 (lambda (x)
|
|
1562 (cl-puthash (car x) (cdr x)
|
|
1563 url-global-history-hash-table)))
|
|
1564 (symbol-value 'url-global-history-completion-list)))))))
|
|
1565
|
|
1566 (defun url-parse-global-history (&optional fname)
|
|
1567 ;; Parse out the mosaic global history file for completions, etc.
|
|
1568 (or fname (setq fname (expand-file-name url-global-history-file)))
|
|
1569 (cond
|
|
1570 ((not (file-exists-p fname))
|
|
1571 (message "%s does not exist." fname))
|
|
1572 ((not (file-readable-p fname))
|
|
1573 (message "%s is unreadable." fname))
|
|
1574 (t
|
|
1575 (save-excursion
|
|
1576 (set-buffer (get-buffer-create " *url-tmp*"))
|
|
1577 (erase-buffer)
|
|
1578 (insert-file-contents-literally fname)
|
|
1579 (goto-char (point-min))
|
|
1580 (cond
|
|
1581 ((looking-at "(setq") (url-parse-Emacs-history fname))
|
|
1582 ((looking-at "ncsa-mosaic-.*-1$") (url-parse-Mosaic-history-v1 fname))
|
|
1583 ((looking-at "ncsa-mosaic-.*-2$") (url-parse-Mosaic-history-v2 fname))
|
|
1584 ((or (looking-at "MCOM-") (looking-at "netscape"))
|
|
1585 (url-parse-Netscape-history fname))
|
|
1586 (t
|
|
1587 (url-warn 'url (format "Cannot deduce type of history file: %s"
|
|
1588 fname))))))))
|
|
1589
|
|
1590 (defun url-write-Emacs-history (fname)
|
|
1591 ;; Write an Emacs-w3 style global history list into FNAME
|
|
1592 (erase-buffer)
|
|
1593 (let ((count 0))
|
|
1594 (cl-maphash (function
|
|
1595 (lambda (key value)
|
|
1596 (setq count (1+ count))
|
|
1597 (insert "(cl-puthash \"" key "\""
|
|
1598 (if (not (stringp value)) " '" "")
|
|
1599 (prin1-to-string value)
|
|
1600 " url-global-history-hash-table)\n")))
|
|
1601 url-global-history-hash-table)
|
|
1602 (goto-char (point-min))
|
|
1603 (insert (format
|
|
1604 "(setq url-global-history-hash-table (make-hash-table :size %d :test 'equal))\n"
|
|
1605 (/ count 4)))
|
|
1606 (goto-char (point-max))
|
|
1607 (insert "\n")
|
|
1608 (write-file fname)))
|
|
1609
|
|
1610 (defun url-write-Netscape-history (fname)
|
|
1611 ;; Write a Netscape-style global history list into FNAME
|
|
1612 (erase-buffer)
|
|
1613 (let ((last-valid-time "785305714")) ; Picked out of thin air,
|
|
1614 ; in case first in assoc list
|
|
1615 ; doesn't have a valid time
|
|
1616 (goto-char (point-min))
|
|
1617 (insert "MCOM-Global-history-file-1\n")
|
|
1618 (cl-maphash (function
|
|
1619 (lambda (url time)
|
|
1620 (if (or (not (stringp time)) (string-match " \t" time))
|
|
1621 (setq time last-valid-time)
|
|
1622 (setq last-valid-time time))
|
|
1623 (insert url " " time "\n")))
|
|
1624 url-global-history-hash-table)
|
|
1625 (write-file fname)))
|
|
1626
|
|
1627 (defun url-write-Mosaic-history-v1 (fname)
|
|
1628 ;; Write a Mosaic/X-style global history list into FNAME
|
|
1629 (erase-buffer)
|
|
1630 (goto-char (point-min))
|
|
1631 (insert "ncsa-mosaic-history-format-1\nGlobal\n")
|
|
1632 (cl-maphash (function
|
|
1633 (lambda (url time)
|
|
1634 (if (listp time)
|
|
1635 (setq time (current-time-string time)))
|
|
1636 (if (or (not (stringp time))
|
|
1637 (not (string-match " " time)))
|
|
1638 (setq time (current-time-string)))
|
|
1639 (insert url " " time "\n")))
|
|
1640 url-global-history-hash-table)
|
|
1641 (write-file fname))
|
|
1642
|
|
1643 (defun url-write-Mosaic-history-v2 (fname)
|
|
1644 ;; Write a Mosaic/X-style global history list into FNAME
|
|
1645 (let ((last-valid-time "827250806"))
|
|
1646 (erase-buffer)
|
|
1647 (goto-char (point-min))
|
|
1648 (insert "ncsa-mosaic-history-format-2\nGlobal\n")
|
|
1649 (cl-maphash (function
|
|
1650 (lambda (url time)
|
|
1651 (if (listp time)
|
|
1652 (setq time last-valid-time)
|
|
1653 (setq last-valid-time time))
|
|
1654 (if (not (stringp time))
|
|
1655 (setq time last-valid-time))
|
|
1656 (insert url " " time "\n")))
|
|
1657 url-global-history-hash-table)
|
|
1658 (write-file fname)))
|
|
1659
|
|
1660 (defun url-write-global-history (&optional fname)
|
|
1661 "Write the global history file into `url-global-history-file'.
|
|
1662 The type of data written is determined by what is in the file to begin
|
|
1663 with. If the type of storage cannot be determined, then prompt the
|
|
1664 user for what type to save as."
|
|
1665 (interactive)
|
|
1666 (or fname (setq fname (expand-file-name url-global-history-file)))
|
|
1667 (cond
|
|
1668 ((not url-history-changed-since-last-save) nil)
|
|
1669 ((not (file-writable-p fname))
|
|
1670 (message "%s is unwritable." fname))
|
|
1671 (t
|
|
1672 (let ((make-backup-files nil)
|
|
1673 (version-control nil)
|
|
1674 (require-final-newline t))
|
|
1675 (save-excursion
|
|
1676 (set-buffer (get-buffer-create " *url-tmp*"))
|
|
1677 (erase-buffer)
|
|
1678 (condition-case ()
|
|
1679 (insert-file-contents-literally fname)
|
|
1680 (error nil))
|
|
1681 (goto-char (point-min))
|
|
1682 (cond
|
|
1683 ((looking-at "ncsa-mosaic-.*-1$") (url-write-Mosaic-history-v1 fname))
|
|
1684 ((looking-at "ncsa-mosaic-.*-2$") (url-write-Mosaic-history-v2 fname))
|
|
1685 ((looking-at "MCOM-") (url-write-Netscape-history fname))
|
|
1686 ((looking-at "netscape") (url-write-Netscape-history fname))
|
|
1687 ((looking-at "(setq") (url-write-Emacs-history fname))
|
|
1688 (t (url-write-Emacs-history fname)))
|
|
1689 (kill-buffer (current-buffer))))))
|
|
1690 (setq url-history-changed-since-last-save nil))
|
|
1691
|
|
1692
|
|
1693 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
|
1694 ;;; The main URL fetching interface
|
|
1695 ;;; -------------------------------
|
|
1696 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
|
1697
|
|
1698 ;;;###autoload
|
|
1699 (defun url-popup-info (url)
|
|
1700 "Retrieve the HTTP/1.0 headers and display them in a temp buffer."
|
|
1701 (let* ((urlobj (url-generic-parse-url url))
|
|
1702 (type (url-type urlobj))
|
|
1703 data)
|
|
1704 (cond
|
|
1705 ((string= type "http")
|
|
1706 (let ((url-request-method "HEAD")
|
|
1707 (url-automatic-caching nil)
|
|
1708 (url-inhibit-mime-parsing t)
|
|
1709 (url-working-buffer " *popup*"))
|
|
1710 (save-excursion
|
|
1711 (set-buffer (get-buffer-create url-working-buffer))
|
|
1712 (erase-buffer)
|
|
1713 (setq url-be-asynchronous nil)
|
|
1714 (url-retrieve url)
|
|
1715 (subst-char-in-region (point-min) (point-max) ?\r ? )
|
|
1716 (buffer-string))))
|
|
1717 ((or (string= type "file") (string= type "ftp"))
|
|
1718 (setq data (url-file-attributes url))
|
|
1719 (set-buffer (get-buffer-create
|
|
1720 (url-generate-new-buffer-name "*Header Info*")))
|
|
1721 (erase-buffer)
|
|
1722 (if data
|
|
1723 (concat (if (stringp (nth 0 data))
|
|
1724 (concat " Linked to: " (nth 0 data))
|
|
1725 (concat " Directory: " (if (nth 0 data) "Yes" "No")))
|
|
1726 "\n Links: " (int-to-string (nth 1 data))
|
|
1727 "\n File UID: " (int-to-string (nth 2 data))
|
|
1728 "\n File GID: " (int-to-string (nth 3 data))
|
|
1729 "\n Last Access: " (current-time-string (nth 4 data))
|
|
1730 "\nLast Modified: " (current-time-string (nth 5 data))
|
|
1731 "\n Last Changed: " (current-time-string (nth 6 data))
|
|
1732 "\n Size (bytes): " (int-to-string (nth 7 data))
|
|
1733 "\n File Type: " (or (nth 8 data) "text/plain"))
|
|
1734 (concat "No info found for " url)))
|
|
1735 ((and (string= type "news") (string-match "@" url))
|
|
1736 (let ((art (url-filename urlobj)))
|
|
1737 (if (not (string= (substring art -1 nil) ">"))
|
|
1738 (setq art (concat "<" art ">")))
|
|
1739 (url-get-headers-from-article-id art)))
|
|
1740 (t (concat "Don't know how to find information on " url)))))
|
|
1741
|
|
1742 (defun url-decode-text ()
|
|
1743 ;; Decode text transmitted by NNTP.
|
|
1744 ;; 0. Delete status line.
|
|
1745 ;; 1. Delete `^M' at end of line.
|
|
1746 ;; 2. Delete `.' at end of buffer (end of text mark).
|
|
1747 ;; 3. Delete `.' at beginning of line."
|
|
1748 (save-excursion
|
|
1749 (set-buffer nntp-server-buffer)
|
|
1750 ;; Insert newline at end of buffer.
|
|
1751 (goto-char (point-max))
|
|
1752 (if (not (bolp))
|
|
1753 (insert "\n"))
|
|
1754 ;; Delete status line.
|
|
1755 (goto-char (point-min))
|
|
1756 (delete-region (point) (progn (forward-line 1) (point)))
|
|
1757 ;; Delete `^M' at end of line.
|
|
1758 ;; (replace-regexp "\r$" "")
|
|
1759 (while (not (eobp))
|
|
1760 (end-of-line)
|
|
1761 (if (= (preceding-char) ?\r)
|
|
1762 (delete-char -1))
|
|
1763 (forward-line 1)
|
|
1764 )
|
|
1765 ;; Delete `.' at end of buffer (end of text mark).
|
|
1766 (goto-char (point-max))
|
|
1767 (forward-line -1) ;(beginning-of-line)
|
|
1768 (if (looking-at "^\\.$")
|
|
1769 (delete-region (point) (progn (forward-line 1) (point))))
|
|
1770 ;; Replace `..' at beginning of line with `.'.
|
|
1771 (goto-char (point-min))
|
|
1772 ;; (replace-regexp "^\\.\\." ".")
|
|
1773 (while (search-forward "\n.." nil t)
|
|
1774 (delete-char -1))
|
|
1775 ))
|
|
1776
|
|
1777 (defun url-get-headers-from-article-id (art)
|
|
1778 ;; Return the HEAD of ART (a usenet news article)
|
|
1779 (cond
|
|
1780 ((string-match "flee" nntp-version)
|
|
1781 (nntp/command "HEAD" art)
|
|
1782 (save-excursion
|
|
1783 (set-buffer nntp-server-buffer)
|
|
1784 (while (progn (goto-char (point-min))
|
|
1785 (not (re-search-forward "^.\r*$" nil t)))
|
|
1786 (url-accept-process-output nntp/connection))))
|
|
1787 (t
|
|
1788 (nntp-send-command "^\\.\r$" "HEAD" art)
|
|
1789 (url-decode-text)))
|
|
1790 (save-excursion
|
|
1791 (set-buffer nntp-server-buffer)
|
|
1792 (buffer-string)))
|
|
1793
|
|
1794 (defvar url-external-retrieval-program "www"
|
|
1795 "*Name of the external executable to run to retrieve URLs.")
|
|
1796
|
|
1797 (defvar url-external-retrieval-args '("-source")
|
|
1798 "*A list of arguments to pass to `url-external-retrieval-program' to
|
|
1799 retrieve a URL by its HTML source.")
|
|
1800
|
|
1801 (defun url-retrieve-externally (url &optional no-cache)
|
|
1802 (let ((url-working-buffer (if (and url-multiple-p
|
|
1803 (string-equal url-working-buffer
|
|
1804 url-default-working-buffer))
|
|
1805 (url-get-working-buffer-name)
|
|
1806 url-working-buffer)))
|
|
1807 (if (get-buffer-create url-working-buffer)
|
|
1808 (save-excursion
|
|
1809 (set-buffer url-working-buffer)
|
|
1810 (set-buffer-modified-p nil)
|
|
1811 (kill-buffer url-working-buffer)))
|
|
1812 (set-buffer (get-buffer-create url-working-buffer))
|
|
1813 (let* ((args (append url-external-retrieval-args (list url)))
|
|
1814 (urlobj (url-generic-parse-url url))
|
|
1815 (type (url-type urlobj)))
|
|
1816 (if (or (member type '("www" "about" "mailto" "mailserver"))
|
|
1817 (url-file-directly-accessible-p urlobj))
|
|
1818 (url-retrieve-internally url)
|
|
1819 (url-lazy-message "Retrieving %s..." url)
|
|
1820 (apply 'call-process url-external-retrieval-program
|
|
1821 nil t nil args)
|
26
|
1822 (url-lazy-message "Retrieving %s... done" url)))))
|
14
|
1823
|
|
1824 (defun url-get-normalized-date (&optional specified-time)
|
|
1825 ;; Return a 'real' date string that most HTTP servers can understand.
|
|
1826 (require 'timezone)
|
|
1827 (let* ((raw (if specified-time (current-time-string specified-time)
|
|
1828 (current-time-string)))
|
|
1829 (gmt (timezone-make-date-arpa-standard raw
|
|
1830 (nth 1 (current-time-zone))
|
|
1831 "GMT"))
|
|
1832 (parsed (timezone-parse-date gmt))
|
|
1833 (day (cdr-safe (assoc (substring raw 0 3) weekday-alist)))
|
|
1834 (year nil)
|
|
1835 (month (car
|
|
1836 (rassoc
|
|
1837 (string-to-int (aref parsed 1)) monthabbrev-alist)))
|
|
1838 )
|
|
1839 (setq day (or (car-safe (rassoc day weekday-alist))
|
|
1840 (substring raw 0 3))
|
|
1841 year (aref parsed 0))
|
|
1842 ;; This is needed for plexus servers, or the server will hang trying to
|
|
1843 ;; parse the if-modified-since header. Hopefully, I can take this out
|
|
1844 ;; soon.
|
|
1845 (if (and year (> (length year) 2))
|
|
1846 (setq year (substring year -2 nil)))
|
|
1847
|
|
1848 (concat day ", " (aref parsed 2) "-" month "-" year " "
|
|
1849 (aref parsed 3) " " (or (aref parsed 4)
|
|
1850 (concat "[" (nth 1 (current-time-zone))
|
|
1851 "]")))))
|
|
1852
|
|
1853 (defun url-get-working-buffer-name ()
|
|
1854 "Get a working buffer name such as ` *URL-<i>*' without a live process and empty"
|
|
1855 (let ((num 1)
|
|
1856 name buf)
|
|
1857 (while (progn (setq name (format " *URL-%d*" num))
|
|
1858 (setq buf (get-buffer name))
|
|
1859 (and buf (or (get-buffer-process buf)
|
|
1860 (save-excursion (set-buffer buf)
|
|
1861 (> (point-max) 1)))))
|
|
1862 (setq num (1+ num)))
|
|
1863 name))
|
|
1864
|
|
1865 (defun url-default-find-proxy-for-url (urlobj host)
|
|
1866 (cond
|
|
1867 ((or (and (assoc "no_proxy" url-proxy-services)
|
|
1868 (string-match
|
|
1869 (cdr
|
|
1870 (assoc "no_proxy" url-proxy-services))
|
|
1871 host))
|
|
1872 (equal "www" (url-type urlobj)))
|
|
1873 "DIRECT")
|
|
1874 ((cdr (assoc (url-type urlobj) url-proxy-services))
|
|
1875 (concat "PROXY " (cdr (assoc (url-type urlobj) url-proxy-services))))
|
|
1876 ;;
|
|
1877 ;; Should check for socks
|
|
1878 ;;
|
|
1879 (t
|
|
1880 "DIRECT")))
|
|
1881
|
|
1882 (defvar url-proxy-locator 'url-default-find-proxy-for-url)
|
|
1883
|
|
1884 (defun url-find-proxy-for-url (url host)
|
|
1885 (let ((proxies (split-string (funcall url-proxy-locator url host) " *; *"))
|
|
1886 (proxy nil)
|
|
1887 (case-fold-search t))
|
|
1888 ;; Not sure how I should handle gracefully degrading from one proxy to
|
|
1889 ;; another, so for now just deal with the first one
|
|
1890 ;; (while proxies
|
|
1891 (setq proxy (pop proxies))
|
|
1892 (cond
|
|
1893 ((string-match "^direct" proxy) nil)
|
|
1894 ((string-match "^proxy +" proxy)
|
|
1895 (concat "http://" (substring proxy (match-end 0)) "/"))
|
|
1896 ((string-match "^socks +" proxy)
|
|
1897 (concat "socks://" (substring proxy (match-end 0))))
|
|
1898 (t
|
|
1899 (url-warn 'url (format "Unknown proxy directive: %s" proxy) 'critical)
|
|
1900 nil))))
|
|
1901
|
|
1902 (defun url-retrieve-internally (url &optional no-cache)
|
26
|
1903 (let* ((url-working-buffer (if (and url-multiple-p
|
|
1904 (string-equal
|
|
1905 (if (bufferp url-working-buffer)
|
|
1906 (buffer-name url-working-buffer)
|
|
1907 url-working-buffer)
|
|
1908 url-default-working-buffer))
|
|
1909 (url-get-working-buffer-name)
|
|
1910 url-working-buffer))
|
|
1911 (urlobj (url-generic-parse-url url))
|
|
1912 (type (url-type urlobj))
|
|
1913 (url-using-proxy (if (url-host urlobj)
|
|
1914 (url-find-proxy-for-url urlobj
|
|
1915 (url-host urlobj))
|
|
1916 nil))
|
|
1917 (handler nil)
|
|
1918 (original-url url)
|
|
1919 (cached nil))
|
|
1920 (if url-using-proxy (setq type "proxy"))
|
|
1921 (setq cached (url-is-cached url)
|
|
1922 cached (and cached (not (url-cache-expired url cached)))
|
|
1923 handler (if cached
|
|
1924 'url-extract-from-cache
|
|
1925 (car-safe
|
|
1926 (cdr-safe (assoc (or type "auto")
|
|
1927 url-registered-protocols))))
|
|
1928 url (if cached (url-create-cached-filename url) url))
|
|
1929 (save-excursion
|
|
1930 (set-buffer (get-buffer-create url-working-buffer))
|
|
1931 (setq url-current-can-be-cached (not no-cache)
|
|
1932 url-current-object urlobj))
|
|
1933 (if (and handler (fboundp handler))
|
|
1934 (funcall handler url)
|
|
1935 (set-buffer (get-buffer-create url-working-buffer))
|
|
1936 (erase-buffer)
|
|
1937 (setq url-current-mime-type "text/html")
|
|
1938 (insert "<title> Link Error! </title>\n"
|
|
1939 "<h1> An error has occurred... </h1>\n"
|
|
1940 (format "The link type `<code>%s</code>'" type)
|
|
1941 " is unrecognized or unsupported at this time.<p>\n"
|
|
1942 "If you feel this is an error in Emacs-W3, please "
|
|
1943 "<a href=\"mailto://" url-bug-address "\">send me mail.</a>"
|
|
1944 "<p><address>William Perry</address><br>"
|
|
1945 "<address>" url-bug-address "</address>"))
|
|
1946 (cond
|
|
1947 ((and url-be-asynchronous (not cached) (member type '("http" "proxy")))
|
|
1948 nil)
|
|
1949 (url-be-asynchronous
|
|
1950 (funcall url-default-retrieval-proc (buffer-name)))
|
|
1951 ((not (get-buffer url-working-buffer)) nil)
|
|
1952 ((and (not url-inhibit-mime-parsing)
|
|
1953 (or cached (url-mime-response-p t)))
|
|
1954 (or cached (url-parse-mime-headers nil t))))
|
|
1955 (if (and (or (not url-be-asynchronous)
|
|
1956 (not (equal type "http")))
|
|
1957 url-current-object
|
|
1958 (not url-current-mime-type))
|
|
1959 (if (url-buffer-is-hypertext)
|
|
1960 (setq url-current-mime-type "text/html")
|
|
1961 (setq url-current-mime-type (mm-extension-to-mime
|
|
1962 (url-file-extension
|
|
1963 (url-filename
|
|
1964 url-current-object))))))
|
|
1965 (if (not url-be-asynchronous)
|
|
1966 (url-store-in-cache url-working-buffer))
|
|
1967 (if (not url-global-history-hash-table)
|
|
1968 (setq url-global-history-hash-table (make-hash-table :size 131
|
|
1969 :test 'equal)))
|
|
1970 (if (not (string-match "^\\(about\\|www\\):" original-url))
|
|
1971 (progn
|
|
1972 (setq url-history-changed-since-last-save t)
|
|
1973 (cl-puthash original-url (current-time)
|
|
1974 url-global-history-hash-table)))
|
|
1975 (cons cached url-working-buffer)))
|
14
|
1976
|
|
1977 ;;;###autoload
|
|
1978 (defun url-retrieve (url &optional no-cache expected-md5)
|
|
1979 "Retrieve a document over the World Wide Web.
|
|
1980 The document should be specified by its fully specified
|
|
1981 Uniform Resource Locator. No parsing is done, just return the
|
|
1982 document as the server sent it. The document is left in the
|
|
1983 buffer specified by url-working-buffer. url-working-buffer is killed
|
|
1984 immediately before starting the transfer, so that no buffer-local
|
|
1985 variables interfere with the retrieval. HTTP/1.0 redirection will
|
|
1986 be honored before this function exits."
|
|
1987 (url-do-setup)
|
|
1988 (if (and (fboundp 'set-text-properties)
|
|
1989 (subrp (symbol-function 'set-text-properties)))
|
|
1990 (set-text-properties 0 (length url) nil url))
|
|
1991 (if (and url (string-match "^url:" url))
|
|
1992 (setq url (substring url (match-end 0) nil)))
|
|
1993 (let ((status (url-retrieve-internally url no-cache)))
|
|
1994 status))
|
|
1995
|
|
1996 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
|
1997 ;;; How to register a protocol
|
|
1998 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
|
1999 (defun url-register-protocol (protocol &optional retrieve expander defport)
|
|
2000 "Register a protocol with the URL retrieval package.
|
|
2001 PROTOCOL is the type of protocol being registers (http, nntp, etc),
|
|
2002 and is the first chunk of the URL. ie: http:// URLs will be
|
|
2003 handled by the protocol registered as 'http'. PROTOCOL can
|
|
2004 be either a symbol or a string - it is converted to a string,
|
|
2005 and lowercased before being registered.
|
|
2006 RETRIEVE (optional) is the function to be called with a url as its
|
|
2007 only argument. If this argument is omitted, then this looks
|
|
2008 for a function called 'url-PROTOCOL'. A warning is shown if
|
|
2009 the function is undefined, but the protocol is still
|
|
2010 registered.
|
|
2011 EXPANDER (optional) is the function to call to expand a relative link
|
|
2012 of type PROTOCOL. If omitted, this defaults to
|
|
2013 `url-default-expander'
|
|
2014
|
|
2015 Any proxy information is read in from environment variables at this
|
|
2016 time, so this function should only be called after dumping emacs."
|
|
2017 (let* ((protocol (cond
|
|
2018 ((stringp protocol) (downcase protocol))
|
|
2019 ((symbolp protocol) (downcase (symbol-name protocol)))
|
|
2020 (t nil)))
|
|
2021
|
|
2022 (retrieve (or retrieve (intern (concat "url-" protocol))))
|
|
2023 (expander (or expander 'url-default-expander))
|
|
2024 (cur-protocol (assoc protocol url-registered-protocols))
|
|
2025 (urlobj nil)
|
|
2026 (cur-proxy (assoc protocol url-proxy-services))
|
|
2027 (env-proxy (or (getenv (concat protocol "_proxy"))
|
|
2028 (getenv (concat protocol "_PROXY"))
|
|
2029 (getenv (upcase (concat protocol "_PROXY"))))))
|
|
2030
|
|
2031 (if (not protocol)
|
|
2032 (error "Invalid data to url-register-protocol."))
|
|
2033
|
|
2034 (if (not (fboundp retrieve))
|
|
2035 (message "Warning: %s registered, but no function found." protocol))
|
|
2036
|
|
2037 ;; Store the default port, if none previously specified and
|
|
2038 ;; defport given
|
|
2039 (if (and defport (not (assoc protocol url-default-ports)))
|
|
2040 (setq url-default-ports (cons (cons protocol defport)
|
|
2041 url-default-ports)))
|
|
2042
|
|
2043 ;; Store the appropriate information for later
|
|
2044 (if cur-protocol
|
|
2045 (setcdr cur-protocol (cons retrieve expander))
|
|
2046 (setq url-registered-protocols (cons (cons protocol
|
|
2047 (cons retrieve expander))
|
|
2048 url-registered-protocols)))
|
|
2049
|
|
2050 ;; Store any proxying information - this will not overwrite an old
|
|
2051 ;; entry, so that people can still set this information in their
|
|
2052 ;; .emacs file
|
|
2053 (cond
|
|
2054 (cur-proxy nil) ; Keep their old settings
|
|
2055 ((null env-proxy) nil) ; No proxy setup
|
|
2056 ;; First check if its something like hostname:port
|
|
2057 ((string-match "^\\([^:]+\\):\\([0-9]+\\)$" env-proxy)
|
|
2058 (setq urlobj (url-generic-parse-url nil)) ; Get a blank object
|
|
2059 (url-set-type urlobj "http")
|
|
2060 (url-set-host urlobj (url-match env-proxy 1))
|
|
2061 (url-set-port urlobj (url-match env-proxy 2)))
|
|
2062 ;; Then check if its a fully specified URL
|
|
2063 ((string-match url-nonrelative-link env-proxy)
|
|
2064 (setq urlobj (url-generic-parse-url env-proxy))
|
|
2065 (url-set-type urlobj "http")
|
|
2066 (url-set-target urlobj nil))
|
|
2067 ;; Finally, fall back on the assumption that its just a hostname
|
|
2068 (t
|
|
2069 (setq urlobj (url-generic-parse-url nil)) ; Get a blank object
|
|
2070 (url-set-type urlobj "http")
|
|
2071 (url-set-host urlobj env-proxy)))
|
|
2072
|
|
2073 (if (and (not cur-proxy) urlobj)
|
|
2074 (progn
|
|
2075 (setq url-proxy-services
|
|
2076 (cons (cons protocol (concat (url-host urlobj) ":"
|
|
2077 (url-port urlobj)))
|
|
2078 url-proxy-services))
|
|
2079 (message "Using a proxy for %s..." protocol)))))
|
|
2080
|
|
2081 (provide 'url)
|