-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathhttpcompressionserver.py
319 lines (278 loc) · 11.4 KB
/
httpcompressionserver.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
"""Add HTTP compression support to http.server.
When a request sent by the client includes an Accept-Encoding header, the
server handles the value (eg "gzip", "x-gzip" or "deflate") and tries to
compress the response body with the requested algorithm.
Class HTTPCompressionRequestHandler extends SimpleHTTPRequestHandler with
2 additional attributes:
- compressed_types: the list of mimetypes that will be returned compressed by
the server. By default, it is set to a list of commonly compressed types.
- compressions: a mapping between an Accept-Encoding value and a generator
that produces compressed data.
Chunked Transfer Encoding is used to send the compressed response.
"""
__version__ = "0.3"
__all__ = [
"ThreadingHTTPServer", "HTTPCompressionRequestHandler"
]
import datetime
import email.utils
import http.cookiejar
import io
import os
import socket
import socketserver
import sys
import urllib.parse
from functools import partial
from http import HTTPStatus
from http.server import (HTTPServer, BaseHTTPRequestHandler,
SimpleHTTPRequestHandler, CGIHTTPRequestHandler,
_url_collapse_path, test)
# Python might be built without zlib
try:
import zlib
except ImportError:
zlib = None
DEFAULT_BIND = '0.0.0.0'
# List of commonly compressed content types, copied from
# https://github.com/h5bp/server-configs-apache.
commonly_compressed_types = [
"application/atom+xml",
"application/javascript",
"application/json",
"application/ld+json",
"application/manifest+json",
"application/rdf+xml",
"application/rss+xml",
"application/schema+json",
"application/vnd.geo+json",
"application/vnd.ms-fontobject",
"application/x-font-ttf",
"application/x-javascript",
"application/x-web-app-manifest+json",
"application/xhtml+xml",
"application/xml",
"font/eot",
"font/opentype",
"image/bmp",
"image/svg+xml",
"image/vnd.microsoft.icon",
"image/x-icon",
"text/cache-manifest",
"text/css",
"text/html",
"text/javascript",
"text/plain",
"text/vcard",
"text/vnd.rim.location.xloc",
"text/vtt",
"text/x-component",
"text/x-cross-domain-policy",
"text/xml"
]
# Generators for HTTP compression
def _zlib_producer(fileobj, wbits):
"""Generator that yields data read from the file object fileobj,
compressed with the zlib library.
wbits is the same argument as for zlib.compressobj.
"""
bufsize = 2 << 17
producer = zlib.compressobj(wbits=wbits)
with fileobj:
while True:
buf = fileobj.read(bufsize)
if not buf: # end of file
yield producer.flush()
return
yield producer.compress(buf)
def _gzip_producer(fileobj):
"""Generator for gzip compression."""
return _zlib_producer(fileobj, 31)
def _deflate_producer(fileobj):
"""Generator for deflate compression."""
return _zlib_producer(fileobj, 15)
class ThreadingHTTPServer(socketserver.ThreadingMixIn, HTTPServer):
daemon_threads = True
class HTTPCompressionRequestHandler(SimpleHTTPRequestHandler):
"""Extends SimpleHTTPRequestHandler to support HTTP compression
"""
server_version = "CompressionHTTP/" + __version__
# List of Content Types that are returned with HTTP compression.
# Set to the commonly_compressed_types by default.
compressed_types = commonly_compressed_types
# Dictionary mapping an encoding (in an Accept-Encoding header) to a
# generator of compressed data. By default, provided zlib is available,
# the supported encodings are gzip and deflate.
# Override if a subclass wants to use other compression algorithms.
compressions = {}
if zlib:
compressions = {
'deflate': _deflate_producer,
'gzip': _gzip_producer,
'x-gzip': _gzip_producer # alias for gzip
}
def do_GET(self):
"""Serve a GET request."""
f = self.send_head()
if f:
try:
if hasattr(f, "read"):
self.copyfile(f, self.wfile)
else:
# Generator for compressed data
if self.protocol_version >= "HTTP/1.1":
# Chunked Transfer
for data in f:
if data:
self.wfile.write(self._make_chunk(data))
self.wfile.write(self._make_chunk(b''))
else:
for data in f:
self.wfile.write(data)
finally:
f.close()
def _make_chunk(self, data):
"""Make a data chunk in Chunked Transfer Encoding format."""
return f"{len(data):X}".encode("ascii") + b"\r\n" + data + b"\r\n"
def send_head(self):
"""Common code for GET and HEAD commands.
This sends the response code and MIME headers.
Return value is either:
- a file object (which has to be copied to the outputfile by the
caller unless the command was HEAD, and must be closed by the caller
under all circumstances)
- a generator of pieces of compressed data if HTTP compression is used
- None, in which case the caller has nothing further to do
"""
path = self.translate_path(self.path)
f = None
if os.path.isdir(path):
parts = urllib.parse.urlsplit(self.path)
if not parts.path.endswith('/'):
# redirect browser - doing basically what apache does
self.send_response(HTTPStatus.MOVED_PERMANENTLY)
new_parts = (parts[0], parts[1], parts[2] + '/',
parts[3], parts[4])
new_url = urllib.parse.urlunsplit(new_parts)
self.send_header("Location", new_url)
self.end_headers()
return None
for index in "index.html", "index.htm":
index = os.path.join(path, index)
if os.path.exists(index):
path = index
break
else:
return self.list_directory(path)
ctype = self.guess_type(path)
try:
f = open(path, 'rb')
except OSError:
self.send_error(HTTPStatus.NOT_FOUND, "File not found")
return None
try:
fs = os.fstat(f.fileno())
content_length = fs[6]
# Use browser cache if possible
if ("If-Modified-Since" in self.headers
and "If-None-Match" not in self.headers):
# compare If-Modified-Since and time of last file modification
try:
ims = email.utils.parsedate_to_datetime(
self.headers["If-Modified-Since"])
except (TypeError, IndexError, OverflowError, ValueError):
# ignore ill-formed values
pass
else:
if ims.tzinfo is None:
# obsolete format with no timezone, cf.
# https://tools.ietf.org/html/rfc7231#section-7.1.1.1
ims = ims.replace(tzinfo=datetime.timezone.utc)
if ims.tzinfo is datetime.timezone.utc:
# compare to UTC datetime of last modification
last_modif = datetime.datetime.fromtimestamp(
fs.st_mtime, datetime.timezone.utc)
# remove microseconds, like in If-Modified-Since
last_modif = last_modif.replace(microsecond=0)
if last_modif <= ims:
self.send_response(HTTPStatus.NOT_MODIFIED)
self.end_headers()
f.close()
return None
self.send_response(HTTPStatus.OK)
self.send_header("Content-type", ctype)
self.send_header("Last-Modified",
self.date_time_string(fs.st_mtime))
if ctype not in self.compressed_types:
self.send_header("Content-Length", str(content_length))
self.end_headers()
return f
# Use HTTP compression if possible
# Get accepted encodings ; "encodings" is a dictionary mapping
# encodings to their quality ; eg for header "gzip; q=0.8",
# encodings["gzip"] is set to 0.8
accept_encoding = self.headers.get_all("Accept-Encoding", ())
encodings = {}
for accept in http.cookiejar.split_header_words(accept_encoding):
params = iter(accept)
encoding = next(params, ("", ""))[0]
quality, value = next(params, ("", ""))
if quality == "q" and value:
try:
q = float(value)
except ValueError:
# Invalid quality : ignore encoding
q = 0
else:
q = 1 # quality defaults to 1
if q:
encodings[encoding] = max(encodings.get(encoding, 0), q)
compressions = set(encodings).intersection(self.compressions)
compression = None
if compressions:
# Take the encoding with highest quality
compression = max((encodings[enc], enc)
for enc in compressions)[1]
elif '*' in encodings and self.compressions:
# If no specified encoding is supported but "*" is accepted,
# take one of the available compressions.
compression = list(self.compressions)[0]
if compression:
# If at least one encoding is accepted, send data compressed
# with the selected compression algorithm.
producer = self.compressions[compression]
self.send_header("Content-Encoding", compression)
if content_length < 2 << 18:
# For small files, load content in memory
with f:
content = b''.join(producer(f))
content_length = len(content)
f = io.BytesIO(content)
else:
chunked = self.protocol_version >= "HTTP/1.1"
if chunked:
# Use Chunked Transfer Encoding (RFC 7230 section 4.1)
self.send_header("Transfer-Encoding", "chunked")
self.end_headers()
# Return a generator of pieces of compressed data
return producer(f)
self.send_header("Content-Length", str(content_length))
self.end_headers()
return f
except:
f.close()
raise
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--bind', '-b', default=DEFAULT_BIND, metavar='ADDRESS',
help='Specify alternate bind address '
'[default: all interfaces]')
parser.add_argument('port', action='store',
default=8000, type=int,
nargs='?',
help='Specify alternate port [default: 8000]')
args = parser.parse_args()
test(HandlerClass=HTTPCompressionRequestHandler,
port=args.port,
bind=args.bind)