blob: 0eafb7a14880822818660412d7c57018b6916c8c (
plain) (
blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
|
;;; my-net.el -- Network related extensions for emacs core -*- lexical-binding: t -*-
;; Copyright (C) 2023 Free Software Foundation.
;; Author: Yuchen Pei <id@ypei.org>
;; Package-Requires: ((emacs "28.2"))
;; This file is part of dotfiles.
;; dotfiles is free software: you can redistribute it and/or modify it under
;; the terms of the GNU Affero General Public License as published by
;; the Free Software Foundation, either version 3 of the License, or
;; (at your option) any later version.
;; dotfiles is distributed in the hope that it will be useful, but WITHOUT
;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General
;; Public License for more details.
;; You should have received a copy of the GNU Affero General Public
;; License along with dotfiles. If not, see <https://www.gnu.org/licenses/>.
;;; Commentary:
;; Network related extensions for emacs core.
;;; Code:
;;; net utilities
(defvar my-download-dir "~/Downloads")
(defun my-make-file-name-from-url (url)
(file-name-nondirectory
(directory-file-name
(car (url-path-and-query (url-generic-parse-url
(url-unhex-string url)))))))
;; stolen from `eww-make-unique-file-name'
(defun my-make-unique-file-name (file directory)
"Uniquefy FILE under DIRECTORY.
Like `expand-file-name', but make sure the file name has not been taken."
(cond
((zerop (length file))
(setq file "!"))
((string-match "\\`[.]" file)
(setq file (concat "!" file))))
(let ((count 1)
(stem file)
(suffix ""))
(when (string-match "\\`\\(.*\\)\\([.][^.]+\\)" file)
(setq stem (match-string 1 file)
suffix (match-string 2 file)))
(while (file-exists-p (expand-file-name file directory))
(setq file (format "%s(%d)%s" stem count suffix))
(setq count (1+ count)))
(expand-file-name file directory)))
(defun my-fetch-url (url &optional no-overwrite)
"Fetch URL to a buffer, save it to a file, and switch to the buffer.
The file is saved under `my-download-dir'.
If NO-OVERWRITE is non-nil, do not overwrite any existing file."
(interactive "sURL: ")
(let ((file-name
(if no-overwrite
(my-make-unique-file-name
(my-make-file-name-from-url url)
my-download-dir)
(expand-file-name
(my-make-file-name-from-url url)
my-download-dir))))
(url-retrieve url 'my-fetch-url-save-and-switch (list file-name))))
(defun my-fetch-url-save-and-switch (status file-name)
"A `url-retrieve' callback that saves the payload and switch to it.
It checks the STATUS, and if it is ok, saves the payload to FILE-NAME."
(when (plist-get status :error)
(error "My fetch failed: %s" (plist-get status :error)))
(my-delete-http-header)
(let ((to-insert (buffer-string))
(buffer (find-file-noselect file-name))
(coding-system-for-write 'utf-8))
(kill-buffer)
(with-current-buffer buffer
(let ((inhibit-read-only t))
(erase-buffer)
(insert to-insert))
(goto-char (point-min))
(save-buffer)
(revert-buffer t t))
(switch-to-buffer buffer))
)
(defun my-kill-http-header ()
(my-skip-http-header)
(let ((killed (buffer-substring-no-properties (point-min) (point))))
(delete-region (point-min) (point))
killed))
(defun my-parse-http-header (text)
(let ((status) (fields))
(with-temp-buffer
(insert text)
(goto-char (point-min))
(re-search-forward "^HTTP.*\\([0-9]\\{3\\}\\).*$")
(setq status (match-string 1))
(while (re-search-forward "^\\(.*?\\): \\(.*\\)$" nil t)
(push (cons (intern (match-string 1)) (match-string 2)) fields)))
(list (cons 'status status) (cons 'fields fields))))
(defvar my-client-buffer-name "*my-api*")
(defun my-url-fetch-json (url &optional decompression with-header)
(my-url-fetch-internal
url
(lambda ()
(json-read-from-string (decode-coding-string (buffer-string) 'utf-8)))
decompression
with-header))
(defun my-url-fetch-dom (url &optional decompression with-header)
(my-url-fetch-internal
url
(lambda () (libxml-parse-html-region (point) (point-max)))
decompression
with-header))
(defun my-url-fetch-internal (url buffer-processor decompression with-header)
(with-current-buffer (get-buffer-create my-client-buffer-name)
(goto-char (point-max))
(insert "[" (current-time-string) "] Request: " url "\n"))
(with-current-buffer (url-retrieve-synchronously url t)
(let ((header (my-kill-http-header)) (status) (fields))
(goto-char (point-min))
(setq header (my-parse-http-header header)
status (alist-get 'status header)
fields (alist-get 'fields header))
(with-current-buffer my-client-buffer-name
(insert "[" (current-time-string) "] Response: " status "\n"))
(when decompression
(call-process-region (point) (point-max) "gunzip" t t t)
(goto-char (point-min)))
(call-interactively 'delete-trailing-whitespace)
(if (string= status "200")
(unless (= (point) (point-max))
(if with-header
(list
(cons 'header fields)
(cons 'json (funcall buffer-processor)))
(funcall buffer-processor)))
(error "HTTP error: %s" (buffer-substring (point) (point-max)))))))
(provide 'my-net)
;;; my-net.el ends here
|