summaryrefslogtreecommitdiff
path: root/arch-proxy.py
blob: 1ce6fa808d9d73e47d1306c85aba44b60c7b25e5 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
#!/usr/bin/env python3
# Arch Linux packages proxy
#
# Proxies requests, caching files with ".pkg.tar.*" suffix.
# If the cached file exists, serve that file.
# Otherwise, try to download file (as normally) and optionally cache it with
# ".download" extension. If the file exists and the file is not being
# downloaded, resume the download (Range requests).

import argparse
import http.server
import os
import re
import socket
from datetime import datetime
import requests
from contextlib import closing, contextmanager
import fcntl

DATE_FORMAT = '%a, %d %b %Y %H:%M:%S GMT'
def text_to_epoch(text):
    return datetime.strptime(text, DATE_FORMAT).timestamp()
def epoch_to_text(epoch):
    return datetime.fromtimestamp(epoch).strftime(DATE_FORMAT)

class BadRequest(Exception):
    pass

class RequestHandler(http.server.BaseHTTPRequestHandler):
    def send_ok(self, size, headers={}, upstream=None, range_offset=None):
        if range_offset is None:
            code = 200
        else:
            if range_offset >= size:
                # TODO need different status code
                raise BadRequest("416 Requested Range Not Satisfiable")
            code = 206
            content_range = "bytes %d-%d/%d" % (range_offset, size - 1, size)
            headers["Content-Range"] = content_range
            size -= range_offset
        self.log_message('"%s" %d %s %s', self.requestline, code, size,
                "HIT" if upstream is None else "MISS:%s" % (upstream,))
        self.send_response_only(code)
        self.send_header('Content-Length', size)
        for k, v in headers.items():
            self.send_header(k, v)
        self.end_headers()

    def request_data(self, head_only=False, mtime_out=None, range_offset=None):
        """
        Retrieves the full response body. The given "range_offset" serves only
        as hint for the response to the client, it is not used with the upstream
        request.
        """
        method = "HEAD" if head_only else "GET"
        streamable = not head_only
        status_code = None
        urls = list(self.get_upstream_urls())
        # Try each upstream. If one fails, log it and try another. On success,
        # return the response data. If all upstreams fail, fail the request.
        for i, url in enumerate(urls):
            with closing(requests.request(method, url, stream=streamable)) as r:
                status_code = r.status_code
                if status_code == 200:
                    yield from self.process_upstream_response(r, head_only,
                            mtime_out, i, range_offset)
                    return
                self.log_message('"%s" %d - SKIP:%d', self.requestline,
                        status_code, i)
        self.log_request(status_code)
        self.send_response_only(status_code)
        self.end_headers()

    def process_upstream_response(self, r, head_only, mtime_out, upstream,
            range_offset):
        if r:
            response_headers = {}
            if 'Last-Modified' in r.headers:
                try:
                    mtime = text_to_epoch(r.headers['Last-Modified'])
                    response_headers['Last-Modified'] = epoch_to_text(mtime)
                    if mtime_out:
                        mtime_out[0] = mtime
                except ValueError:
                    self.log_error("Unable to parse Last-Modified header")
            self.send_ok(int(r.headers['Content-Length']), response_headers,
                    upstream=upstream, range_offset=range_offset)
            if not head_only:
                yield from r.iter_content(4096)

    @contextmanager
    def open_write_cache(self, path):
        if self.server.is_readonly:
            yield None
            return
        temp_path = path + ".download"
        try:
            with open(temp_path, 'wb') as f:
                # Prevent concurrent writers
                fcntl.lockf(f, fcntl.LOCK_EX | fcntl.LOCK_NB)
                yield f
        except OSError as e:
            self.log_error("Failed to create cache file %s: %s", temp_path, e)
            yield None

    def finish_cache(self, mtime):
        path = self.get_local_path()
        temp_path = path + ".download"
        if mtime:
            os.utime(temp_path, times=(mtime, mtime))
        try:
            os.rename(temp_path, path)
        except OSError as e:
            self.log_error("Failed to rename %s", temp_path)
            try:
                os.unlink(temp_path)
            except OSError as e:
                self.log_error("Failed to remove %s", temp_path)

    def parse_range(self):
        value = self.headers.get('Range')
        if value is not None:
            # Only support "continue" range requests, resuming previous
            # download. Anything more complex is not needed at the moment.
            m = re.match(r'bytes=(?P<from>\d+)-$', value)
            if not m:
                raise BadRequest("Unsupported range request: %s" % value)
            return int(m.group("from"))

    @staticmethod
    def skip_range_chunk(chunk, skip_bytes):
        if skip_bytes:
            chunksize = len(chunk)
            if chunksize > skip_bytes:
                chunk = chunk[skip_bytes:]
                skip_bytes = None
            else:
                chunk = b''
                skip_bytes -= chunksize
        return chunk, (skip_bytes or None)

    @classmethod
    def skip_range(cls, it, skip_bytes):
        for chunk in it:
            chunk, skip_bytes = cls.skip_range_chunk(chunk, skip_bytes)
            if chunk:
                yield chunk


    def request_data_with_cache(self, head_only=False):
        range_offset = self.parse_range()
        if not self.is_cacheable():
            # Not cacheable, directly obtain data and bypass cache
            remote_data = self.request_data(range_offset=range_offset)
            yield from self.skip_range(remote_data, range_offset)
            return

        path = self.get_local_path()
        try:
            # Try to open cached file and yield data from it
            stat_info = os.stat(path)
            response_headers = {'Last-Modified':
                    epoch_to_text(stat_info.st_mtime)}
            self.send_ok(stat_info.st_size, response_headers,
                    range_offset=range_offset)
            if not head_only:
                with open(path, 'rb') as f:
                    if range_offset:
                        f.seek(range_offset)
                    yield from f
        except FileNotFoundError:
            # File does not exist, so try to pipe upstream
            # (optionally writing to cache file)
            mtime_pointer = [None]
            remote_data = self.request_data(head_only=head_only,
                    mtime_out=mtime_pointer, range_offset=range_offset)
            if head_only:
                list(remote_data)  # consume yield and StopIteration
            if not head_only and remote_data:
                cache_ok = False
                with self.open_write_cache(path) as cache_file:
                    cache_ok = cache_file is not None
                    if cache_ok:
                        # Overwrite the temporary cache file from begin to end,
                        # but do not write include the first "range_offset"
                        # bytes in the response.
                        skip = range_offset
                        for chunk in remote_data:
                            cache_file.write(chunk)
                            chunk, skip = self.skip_range_chunk(chunk, skip)
                            if chunk:
                                yield chunk
                if cache_ok:
                    # Write was successful, now fix mtime and rename
                    self.finish_cache(mtime_pointer[0])
                else:
                    # Cache file unavailable, just pass all data
                    yield from self.skip_range(remote_data, range_offset)

    def do_GET(self):
        try:
            data = self.request_data_with_cache()
            if data:
                for chunk in data:
                    self.wfile.write(chunk)
        except (BrokenPipeError, ConnectionResetError):
            self.log_error("GET %s - (connection aborted)", self.path)
        except BadRequest as e:
            self.log_error("GET %s - Bad Request: %s", self.path, e)
            self.send_response(400)
        except Exception as e:
            self.log_error("GET %s failed: %s", self.path, e)
            import traceback; traceback.print_exc()
            self.send_response(502)

    def do_HEAD(self):
        try:
            list(self.request_data_with_cache(True))
        except (BrokenPipeError, ConnectionResetError):
            self.log_error("HEAD %s - (connection aborted)", self.path)
        except BadRequest as e:
            self.log_error("HEAD %s - Bad Request: %s", self.path, e)
            self.send_response(400)
        except Exception as e:
            self.log_error("HEAD %s failed: %s", self.path, e)
            import traceback; traceback.print_exc()
            self.send_response(502)

    def get_upstream_urls(self):
        # If an old version is requested, retrieve the databases from the
        # archive mirror and do not fallback.
        if self.server.archive_url and self.is_date_sensitive_request():
            yield self.server.archive_url + self.path
            return
        for prefix in self.server.mirrors:
            yield prefix + self.path

    def get_local_path(self):
        filename = os.path.basename(self.path)
        return os.path.join(self.server.cachedir, filename)

    def is_cacheable(self):
        """Whether the requested file should be cached."""
        # Support .pkg.tar.xz, .pkg.tar.zst, etc.
        basename = os.path.splitext(self.path)[0]
        return basename.endswith(".pkg.tar")

    def is_date_sensitive_request(self):
        """Whether the resource is ephemeral."""
        path = self.path
        if path.endswith(".sig"):
            path = path[:-4]
        suffixes = [".db", ".files", ".abs.tar.gz"]
        return any(path.endswith(suffix) for suffix in suffixes)

class SomeServer(http.server.HTTPServer):
    def __init__(self, addr, handler, args):
        self.allow_reuse_address = True
        if ':' in addr[0]:
            self.address_family = socket.AF_INET6
        super().__init__(addr, handler)
        self.cachedir = args.cachedir
        self.is_readonly = args.readonly
        self.mirrors = [m.rstrip('/') for m in args.mirrors]
        if not args.date:
            self.archive_url = None
        else:
            archive_mirror = "https://archive.archlinux.org/repos/"
            self.archive_url = archive_mirror + args.date
            self.mirrors.append(self.archive_url)

    def dump_config(self):
        yesno = lambda x: "yes" if x else "no"
        print("Listen address:  %s:%s" % self.socket.getsockname()[:2])
        print("Cache directory: %s" % self.cachedir)
        print("Read-only cache: %s" % yesno(self.is_readonly))
        print("Using archive:   %s" % yesno(self.archive_url))
        print("Mirrors:")
        for mirror in self.mirrors:
            print(" %s" % mirror)

def mirror_url(string):
    scheme = string.split(":", 1)[0]
    if scheme not in ("http", "https"):
        raise argparse.ArgumentTypeError("%s is not a valid URL" % string)
    return string.rstrip("/") + "/"

def parse_date(string):
    m = re.match(r'^(\d{4})([/-]?)(\d{2})\2(\d{2})$', string)
    if not m:
        raise argparse.ArgumentTypeError("%s is not a valid date" % string)
    y, _, m, d = m.groups()
    return "%s/%s/%s" % (y, m, d)

parser = argparse.ArgumentParser(
        formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument("--readonly", action="store_true",
        help="Do not write downloaded results to the cache directory")
parser.add_argument("--cachedir", default=os.getcwd(),
        help="Cache directory")
parser.add_argument("--port", type=int, default=8001,
        help="Listen port")
parser.add_argument("--date", type=parse_date,
        help="Provide a repository snapshot from 'yyyy/mm/dd'")
parser.add_argument("--mirror", dest="mirrors", metavar='URL', nargs="+",
        type=mirror_url, default=["https://mirror.nl.leaseweb.net/archlinux"],
        help="Mirror list")

if __name__ == '__main__':
    args = parser.parse_args()
    addr = ('', args.port)
    server = SomeServer(addr, RequestHandler, args)
    server.dump_config()
    server.serve_forever()