Update cherrpy to 17.4.2
This commit is contained in:
@@ -1,12 +1,14 @@
|
||||
"""CherryPy Library"""
|
||||
"""CherryPy Library."""
|
||||
|
||||
# Deprecated in CherryPy 3.2 -- remove in CherryPy 3.3
|
||||
from cherrypy.lib.reprconf import unrepr, modules, attributes
|
||||
|
||||
def is_iterator(obj):
|
||||
'''Returns a boolean indicating if the object provided implements
|
||||
the iterator protocol (i.e. like a generator). This will return
|
||||
false for objects which iterable, but not iterators themselves.'''
|
||||
"""Detect if the object provided implements the iterator protocol.
|
||||
|
||||
(i.e. like a generator).
|
||||
|
||||
This will return False for objects which are iterable,
|
||||
but not iterators themselves.
|
||||
"""
|
||||
from types import GeneratorType
|
||||
if isinstance(obj, GeneratorType):
|
||||
return True
|
||||
@@ -16,22 +18,23 @@ def is_iterator(obj):
|
||||
# Types which implement the protocol must return themselves when
|
||||
# invoking 'iter' upon them.
|
||||
return iter(obj) is obj
|
||||
|
||||
|
||||
|
||||
def is_closable_iterator(obj):
|
||||
|
||||
"""Detect if the given object is both closable and iterator."""
|
||||
# Not an iterator.
|
||||
if not is_iterator(obj):
|
||||
return False
|
||||
|
||||
|
||||
# A generator - the easiest thing to deal with.
|
||||
import inspect
|
||||
if inspect.isgenerator(obj):
|
||||
return True
|
||||
|
||||
|
||||
# A custom iterator. Look for a close method...
|
||||
if not (hasattr(obj, 'close') and callable(obj.close)):
|
||||
return False
|
||||
|
||||
|
||||
# ... which doesn't require any arguments.
|
||||
try:
|
||||
inspect.getcallargs(obj.close)
|
||||
@@ -40,18 +43,24 @@ def is_closable_iterator(obj):
|
||||
else:
|
||||
return True
|
||||
|
||||
class file_generator(object):
|
||||
|
||||
"""Yield the given input (a file object) in chunks (default 64k). (Core)"""
|
||||
class file_generator(object):
|
||||
"""Yield the given input (a file object) in chunks (default 64k).
|
||||
|
||||
(Core)
|
||||
"""
|
||||
|
||||
def __init__(self, input, chunkSize=65536):
|
||||
"""Initialize file_generator with file ``input`` for chunked access."""
|
||||
self.input = input
|
||||
self.chunkSize = chunkSize
|
||||
|
||||
def __iter__(self):
|
||||
"""Return iterator."""
|
||||
return self
|
||||
|
||||
def __next__(self):
|
||||
"""Return next chunk of file."""
|
||||
chunk = self.input.read(self.chunkSize)
|
||||
if chunk:
|
||||
return chunk
|
||||
@@ -63,8 +72,10 @@ class file_generator(object):
|
||||
|
||||
|
||||
def file_generator_limited(fileobj, count, chunk_size=65536):
|
||||
"""Yield the given file object in chunks, stopping after `count`
|
||||
bytes has been emitted. Default chunk size is 64kB. (Core)
|
||||
"""Yield the given file object in chunks.
|
||||
|
||||
Stopps after `count` bytes has been emitted.
|
||||
Default chunk size is 64kB. (Core)
|
||||
"""
|
||||
remaining = count
|
||||
while remaining > 0:
|
||||
@@ -77,9 +88,9 @@ def file_generator_limited(fileobj, count, chunk_size=65536):
|
||||
|
||||
|
||||
def set_vary_header(response, header_name):
|
||||
"Add a Vary header to a response"
|
||||
varies = response.headers.get("Vary", "")
|
||||
varies = [x.strip() for x in varies.split(",") if x.strip()]
|
||||
"""Add a Vary header to a response."""
|
||||
varies = response.headers.get('Vary', '')
|
||||
varies = [x.strip() for x in varies.split(',') if x.strip()]
|
||||
if header_name not in varies:
|
||||
varies.append(header_name)
|
||||
response.headers['Vary'] = ", ".join(varies)
|
||||
response.headers['Vary'] = ', '.join(varies)
|
||||
|
||||
@@ -1,97 +0,0 @@
|
||||
import cherrypy
|
||||
from cherrypy.lib import httpauth
|
||||
|
||||
|
||||
def check_auth(users, encrypt=None, realm=None):
|
||||
"""If an authorization header contains credentials, return True or False.
|
||||
"""
|
||||
request = cherrypy.serving.request
|
||||
if 'authorization' in request.headers:
|
||||
# make sure the provided credentials are correctly set
|
||||
ah = httpauth.parseAuthorization(request.headers['authorization'])
|
||||
if ah is None:
|
||||
raise cherrypy.HTTPError(400, 'Bad Request')
|
||||
|
||||
if not encrypt:
|
||||
encrypt = httpauth.DIGEST_AUTH_ENCODERS[httpauth.MD5]
|
||||
|
||||
if hasattr(users, '__call__'):
|
||||
try:
|
||||
# backward compatibility
|
||||
users = users() # expect it to return a dictionary
|
||||
|
||||
if not isinstance(users, dict):
|
||||
raise ValueError(
|
||||
"Authentication users must be a dictionary")
|
||||
|
||||
# fetch the user password
|
||||
password = users.get(ah["username"], None)
|
||||
except TypeError:
|
||||
# returns a password (encrypted or clear text)
|
||||
password = users(ah["username"])
|
||||
else:
|
||||
if not isinstance(users, dict):
|
||||
raise ValueError("Authentication users must be a dictionary")
|
||||
|
||||
# fetch the user password
|
||||
password = users.get(ah["username"], None)
|
||||
|
||||
# validate the authorization by re-computing it here
|
||||
# and compare it with what the user-agent provided
|
||||
if httpauth.checkResponse(ah, password, method=request.method,
|
||||
encrypt=encrypt, realm=realm):
|
||||
request.login = ah["username"]
|
||||
return True
|
||||
|
||||
request.login = False
|
||||
return False
|
||||
|
||||
|
||||
def basic_auth(realm, users, encrypt=None, debug=False):
|
||||
"""If auth fails, raise 401 with a basic authentication header.
|
||||
|
||||
realm
|
||||
A string containing the authentication realm.
|
||||
|
||||
users
|
||||
A dict of the form: {username: password} or a callable returning
|
||||
a dict.
|
||||
|
||||
encrypt
|
||||
callable used to encrypt the password returned from the user-agent.
|
||||
if None it defaults to a md5 encryption.
|
||||
|
||||
"""
|
||||
if check_auth(users, encrypt):
|
||||
if debug:
|
||||
cherrypy.log('Auth successful', 'TOOLS.BASIC_AUTH')
|
||||
return
|
||||
|
||||
# inform the user-agent this path is protected
|
||||
cherrypy.serving.response.headers[
|
||||
'www-authenticate'] = httpauth.basicAuth(realm)
|
||||
|
||||
raise cherrypy.HTTPError(
|
||||
401, "You are not authorized to access that resource")
|
||||
|
||||
|
||||
def digest_auth(realm, users, debug=False):
|
||||
"""If auth fails, raise 401 with a digest authentication header.
|
||||
|
||||
realm
|
||||
A string containing the authentication realm.
|
||||
users
|
||||
A dict of the form: {username: password} or a callable returning
|
||||
a dict.
|
||||
"""
|
||||
if check_auth(users, realm=realm):
|
||||
if debug:
|
||||
cherrypy.log('Auth successful', 'TOOLS.DIGEST_AUTH')
|
||||
return
|
||||
|
||||
# inform the user-agent this path is protected
|
||||
cherrypy.serving.response.headers[
|
||||
'www-authenticate'] = httpauth.digestAuth(realm)
|
||||
|
||||
raise cherrypy.HTTPError(
|
||||
401, "You are not authorized to access that resource")
|
||||
@@ -1,8 +1,9 @@
|
||||
# This file is part of CherryPy <http://www.cherrypy.org/>
|
||||
# -*- coding: utf-8 -*-
|
||||
# vim:ts=4:sw=4:expandtab:fileencoding=utf-8
|
||||
"""HTTP Basic Authentication tool.
|
||||
|
||||
__doc__ = """This module provides a CherryPy 3.x tool which implements
|
||||
This module provides a CherryPy 3.x tool which implements
|
||||
the server-side of HTTP Basic Access Authentication, as described in
|
||||
:rfc:`2617`.
|
||||
|
||||
@@ -14,18 +15,23 @@ as the credentials store::
|
||||
basic_auth = {'tools.auth_basic.on': True,
|
||||
'tools.auth_basic.realm': 'earth',
|
||||
'tools.auth_basic.checkpassword': checkpassword,
|
||||
'tools.auth_basic.accept_charset': 'UTF-8',
|
||||
}
|
||||
app_config = { '/' : basic_auth }
|
||||
|
||||
"""
|
||||
|
||||
import binascii
|
||||
import unicodedata
|
||||
import base64
|
||||
|
||||
import cherrypy
|
||||
from cherrypy._cpcompat import ntou, tonative
|
||||
|
||||
|
||||
__author__ = 'visteya'
|
||||
__date__ = 'April 2009'
|
||||
|
||||
import binascii
|
||||
from cherrypy._cpcompat import base64_decode
|
||||
import cherrypy
|
||||
|
||||
|
||||
def checkpassword_dict(user_password_dict):
|
||||
"""Returns a checkpassword function which checks credentials
|
||||
@@ -42,9 +48,10 @@ def checkpassword_dict(user_password_dict):
|
||||
return checkpassword
|
||||
|
||||
|
||||
def basic_auth(realm, checkpassword, debug=False):
|
||||
def basic_auth(realm, checkpassword, debug=False, accept_charset='utf-8'):
|
||||
"""A CherryPy tool which hooks at before_handler to perform
|
||||
HTTP Basic Access Authentication, as specified in :rfc:`2617`.
|
||||
HTTP Basic Access Authentication, as specified in :rfc:`2617`
|
||||
and :rfc:`7617`.
|
||||
|
||||
If the request has an 'authorization' header with a 'Basic' scheme, this
|
||||
tool attempts to authenticate the credentials supplied in that header. If
|
||||
@@ -64,27 +71,50 @@ def basic_auth(realm, checkpassword, debug=False):
|
||||
|
||||
"""
|
||||
|
||||
fallback_charset = 'ISO-8859-1'
|
||||
|
||||
if '"' in realm:
|
||||
raise ValueError('Realm cannot contain the " (quote) character.')
|
||||
request = cherrypy.serving.request
|
||||
|
||||
auth_header = request.headers.get('authorization')
|
||||
if auth_header is not None:
|
||||
try:
|
||||
# split() error, base64.decodestring() error
|
||||
msg = 'Bad Request'
|
||||
with cherrypy.HTTPError.handle((ValueError, binascii.Error), 400, msg):
|
||||
scheme, params = auth_header.split(' ', 1)
|
||||
if scheme.lower() == 'basic':
|
||||
username, password = base64_decode(params).split(':', 1)
|
||||
charsets = accept_charset, fallback_charset
|
||||
decoded_params = base64.b64decode(params.encode('ascii'))
|
||||
decoded_params = _try_decode(decoded_params, charsets)
|
||||
decoded_params = ntou(decoded_params)
|
||||
decoded_params = unicodedata.normalize('NFC', decoded_params)
|
||||
decoded_params = tonative(decoded_params)
|
||||
username, password = decoded_params.split(':', 1)
|
||||
if checkpassword(realm, username, password):
|
||||
if debug:
|
||||
cherrypy.log('Auth succeeded', 'TOOLS.AUTH_BASIC')
|
||||
request.login = username
|
||||
return # successful authentication
|
||||
# split() error, base64.decodestring() error
|
||||
except (ValueError, binascii.Error):
|
||||
raise cherrypy.HTTPError(400, 'Bad Request')
|
||||
|
||||
charset = accept_charset.upper()
|
||||
charset_declaration = (
|
||||
(', charset="%s"' % charset)
|
||||
if charset != fallback_charset
|
||||
else ''
|
||||
)
|
||||
# Respond with 401 status and a WWW-Authenticate header
|
||||
cherrypy.serving.response.headers[
|
||||
'www-authenticate'] = 'Basic realm="%s"' % realm
|
||||
cherrypy.serving.response.headers['www-authenticate'] = (
|
||||
'Basic realm="%s"%s' % (realm, charset_declaration)
|
||||
)
|
||||
raise cherrypy.HTTPError(
|
||||
401, "You are not authorized to access that resource")
|
||||
401, 'You are not authorized to access that resource')
|
||||
|
||||
|
||||
def _try_decode(subject, charsets):
|
||||
for charset in charsets[:-1]:
|
||||
try:
|
||||
return tonative(subject, charset)
|
||||
except ValueError:
|
||||
pass
|
||||
return tonative(subject, charsets[-1])
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
# This file is part of CherryPy <http://www.cherrypy.org/>
|
||||
# -*- coding: utf-8 -*-
|
||||
# vim:ts=4:sw=4:expandtab:fileencoding=utf-8
|
||||
"""HTTP Digest Authentication tool.
|
||||
|
||||
__doc__ = """An implementation of the server-side of HTTP Digest Access
|
||||
An implementation of the server-side of HTTP Digest Access
|
||||
Authentication, which is described in :rfc:`2617`.
|
||||
|
||||
Example usage, using the built-in get_ha1_dict_plain function which uses a dict
|
||||
@@ -14,21 +15,28 @@ of plaintext passwords as the credentials store::
|
||||
'tools.auth_digest.realm': 'wonderland',
|
||||
'tools.auth_digest.get_ha1': get_ha1,
|
||||
'tools.auth_digest.key': 'a565c27146791cfb',
|
||||
'tools.auth_digest.accept_charset': 'UTF-8',
|
||||
}
|
||||
app_config = { '/' : digest_auth }
|
||||
"""
|
||||
|
||||
import time
|
||||
import functools
|
||||
from hashlib import md5
|
||||
|
||||
from six.moves.urllib.request import parse_http_list, parse_keqv_list
|
||||
|
||||
import cherrypy
|
||||
from cherrypy._cpcompat import ntob, tonative
|
||||
|
||||
|
||||
__author__ = 'visteya'
|
||||
__date__ = 'April 2009'
|
||||
|
||||
|
||||
import time
|
||||
from hashlib import md5
|
||||
from cherrypy._cpcompat import parse_http_list, parse_keqv_list
|
||||
def md5_hex(s):
|
||||
return md5(ntob(s, 'utf-8')).hexdigest()
|
||||
|
||||
import cherrypy
|
||||
from cherrypy._cpcompat import ntob
|
||||
md5_hex = lambda s: md5(ntob(s)).hexdigest()
|
||||
|
||||
qop_auth = 'auth'
|
||||
qop_auth_int = 'auth-int'
|
||||
@@ -36,6 +44,9 @@ valid_qops = (qop_auth, qop_auth_int)
|
||||
|
||||
valid_algorithms = ('MD5', 'MD5-sess')
|
||||
|
||||
FALLBACK_CHARSET = 'ISO-8859-1'
|
||||
DEFAULT_CHARSET = 'UTF-8'
|
||||
|
||||
|
||||
def TRACE(msg):
|
||||
cherrypy.log(msg, context='TOOLS.AUTH_DIGEST')
|
||||
@@ -130,24 +141,47 @@ def H(s):
|
||||
return md5_hex(s)
|
||||
|
||||
|
||||
class HttpDigestAuthorization (object):
|
||||
def _try_decode_header(header, charset):
|
||||
global FALLBACK_CHARSET
|
||||
|
||||
"""Class to parse a Digest Authorization header and perform re-calculation
|
||||
of the digest.
|
||||
for enc in (charset, FALLBACK_CHARSET):
|
||||
try:
|
||||
return tonative(ntob(tonative(header, 'latin1'), 'latin1'), enc)
|
||||
except ValueError as ve:
|
||||
last_err = ve
|
||||
else:
|
||||
raise last_err
|
||||
|
||||
|
||||
class HttpDigestAuthorization(object):
|
||||
"""
|
||||
Parses a Digest Authorization header and performs
|
||||
re-calculation of the digest.
|
||||
"""
|
||||
|
||||
scheme = 'digest'
|
||||
|
||||
def errmsg(self, s):
|
||||
return 'Digest Authorization header: %s' % s
|
||||
|
||||
def __init__(self, auth_header, http_method, debug=False):
|
||||
@classmethod
|
||||
def matches(cls, header):
|
||||
scheme, _, _ = header.partition(' ')
|
||||
return scheme.lower() == cls.scheme
|
||||
|
||||
def __init__(
|
||||
self, auth_header, http_method,
|
||||
debug=False, accept_charset=DEFAULT_CHARSET[:],
|
||||
):
|
||||
self.http_method = http_method
|
||||
self.debug = debug
|
||||
scheme, params = auth_header.split(" ", 1)
|
||||
self.scheme = scheme.lower()
|
||||
if self.scheme != 'digest':
|
||||
|
||||
if not self.matches(auth_header):
|
||||
raise ValueError('Authorization scheme is not "Digest"')
|
||||
|
||||
self.auth_header = auth_header
|
||||
self.auth_header = _try_decode_header(auth_header, accept_charset)
|
||||
|
||||
scheme, params = self.auth_header.split(' ', 1)
|
||||
|
||||
# make a dict of the params
|
||||
items = parse_http_list(params)
|
||||
@@ -180,7 +214,7 @@ class HttpDigestAuthorization (object):
|
||||
)
|
||||
if not has_reqd:
|
||||
raise ValueError(
|
||||
self.errmsg("Not all required parameters are present."))
|
||||
self.errmsg('Not all required parameters are present.'))
|
||||
|
||||
if self.qop:
|
||||
if self.qop not in valid_qops:
|
||||
@@ -188,13 +222,13 @@ class HttpDigestAuthorization (object):
|
||||
self.errmsg("Unsupported value for qop: '%s'" % self.qop))
|
||||
if not (self.cnonce and self.nc):
|
||||
raise ValueError(
|
||||
self.errmsg("If qop is sent then "
|
||||
"cnonce and nc MUST be present"))
|
||||
self.errmsg('If qop is sent then '
|
||||
'cnonce and nc MUST be present'))
|
||||
else:
|
||||
if self.cnonce or self.nc:
|
||||
raise ValueError(
|
||||
self.errmsg("If qop is not sent, "
|
||||
"neither cnonce nor nc can be present"))
|
||||
self.errmsg('If qop is not sent, '
|
||||
'neither cnonce nor nc can be present'))
|
||||
|
||||
def __str__(self):
|
||||
return 'authorization : %s' % self.auth_header
|
||||
@@ -239,7 +273,7 @@ class HttpDigestAuthorization (object):
|
||||
except ValueError: # int() error
|
||||
pass
|
||||
if self.debug:
|
||||
TRACE("nonce is stale")
|
||||
TRACE('nonce is stale')
|
||||
return True
|
||||
|
||||
def HA2(self, entity_body=''):
|
||||
@@ -251,14 +285,14 @@ class HttpDigestAuthorization (object):
|
||||
#
|
||||
# If the "qop" value is "auth-int", then A2 is:
|
||||
# A2 = method ":" digest-uri-value ":" H(entity-body)
|
||||
if self.qop is None or self.qop == "auth":
|
||||
if self.qop is None or self.qop == 'auth':
|
||||
a2 = '%s:%s' % (self.http_method, self.uri)
|
||||
elif self.qop == "auth-int":
|
||||
a2 = "%s:%s:%s" % (self.http_method, self.uri, H(entity_body))
|
||||
elif self.qop == 'auth-int':
|
||||
a2 = '%s:%s:%s' % (self.http_method, self.uri, H(entity_body))
|
||||
else:
|
||||
# in theory, this should never happen, since I validate qop in
|
||||
# __init__()
|
||||
raise ValueError(self.errmsg("Unrecognized value for qop!"))
|
||||
raise ValueError(self.errmsg('Unrecognized value for qop!'))
|
||||
return H(a2)
|
||||
|
||||
def request_digest(self, ha1, entity_body=''):
|
||||
@@ -279,10 +313,10 @@ class HttpDigestAuthorization (object):
|
||||
ha2 = self.HA2(entity_body)
|
||||
# Request-Digest -- RFC 2617 3.2.2.1
|
||||
if self.qop:
|
||||
req = "%s:%s:%s:%s:%s" % (
|
||||
req = '%s:%s:%s:%s:%s' % (
|
||||
self.nonce, self.nc, self.cnonce, self.qop, ha2)
|
||||
else:
|
||||
req = "%s:%s" % (self.nonce, ha2)
|
||||
req = '%s:%s' % (self.nonce, ha2)
|
||||
|
||||
# RFC 2617 3.2.2.2
|
||||
#
|
||||
@@ -302,25 +336,44 @@ class HttpDigestAuthorization (object):
|
||||
return digest
|
||||
|
||||
|
||||
def www_authenticate(realm, key, algorithm='MD5', nonce=None, qop=qop_auth,
|
||||
stale=False):
|
||||
def _get_charset_declaration(charset):
|
||||
global FALLBACK_CHARSET
|
||||
charset = charset.upper()
|
||||
return (
|
||||
(', charset="%s"' % charset)
|
||||
if charset != FALLBACK_CHARSET
|
||||
else ''
|
||||
)
|
||||
|
||||
|
||||
def www_authenticate(
|
||||
realm, key, algorithm='MD5', nonce=None, qop=qop_auth,
|
||||
stale=False, accept_charset=DEFAULT_CHARSET[:],
|
||||
):
|
||||
"""Constructs a WWW-Authenticate header for Digest authentication."""
|
||||
if qop not in valid_qops:
|
||||
raise ValueError("Unsupported value for qop: '%s'" % qop)
|
||||
if algorithm not in valid_algorithms:
|
||||
raise ValueError("Unsupported value for algorithm: '%s'" % algorithm)
|
||||
|
||||
HEADER_PATTERN = (
|
||||
'Digest realm="%s", nonce="%s", algorithm="%s", qop="%s"%s%s'
|
||||
)
|
||||
|
||||
if nonce is None:
|
||||
nonce = synthesize_nonce(realm, key)
|
||||
s = 'Digest realm="%s", nonce="%s", algorithm="%s", qop="%s"' % (
|
||||
realm, nonce, algorithm, qop)
|
||||
if stale:
|
||||
s += ', stale="true"'
|
||||
return s
|
||||
|
||||
stale_param = ', stale="true"' if stale else ''
|
||||
|
||||
charset_declaration = _get_charset_declaration(accept_charset)
|
||||
|
||||
return HEADER_PATTERN % (
|
||||
realm, nonce, algorithm, qop, stale_param, charset_declaration,
|
||||
)
|
||||
|
||||
|
||||
def digest_auth(realm, get_ha1, key, debug=False):
|
||||
"""A CherryPy tool which hooks at before_handler to perform
|
||||
def digest_auth(realm, get_ha1, key, debug=False, accept_charset='utf-8'):
|
||||
"""A CherryPy tool that hooks at before_handler to perform
|
||||
HTTP Digest Access Authentication, as specified in :rfc:`2617`.
|
||||
|
||||
If the request has an 'authorization' header with a 'Digest' scheme,
|
||||
@@ -333,7 +386,7 @@ def digest_auth(realm, get_ha1, key, debug=False):
|
||||
A string containing the authentication realm.
|
||||
|
||||
get_ha1
|
||||
A callable which looks up a username in a credentials store
|
||||
A callable that looks up a username in a credentials store
|
||||
and returns the HA1 string, which is defined in the RFC to be
|
||||
MD5(username : realm : password). The function's signature is:
|
||||
``get_ha1(realm, username)``
|
||||
@@ -349,43 +402,63 @@ def digest_auth(realm, get_ha1, key, debug=False):
|
||||
request = cherrypy.serving.request
|
||||
|
||||
auth_header = request.headers.get('authorization')
|
||||
nonce_is_stale = False
|
||||
if auth_header is not None:
|
||||
try:
|
||||
auth = HttpDigestAuthorization(
|
||||
auth_header, request.method, debug=debug)
|
||||
except ValueError:
|
||||
raise cherrypy.HTTPError(
|
||||
400, "The Authorization header could not be parsed.")
|
||||
|
||||
if debug:
|
||||
TRACE(str(auth))
|
||||
respond_401 = functools.partial(
|
||||
_respond_401, realm, key, accept_charset, debug)
|
||||
|
||||
if auth.validate_nonce(realm, key):
|
||||
ha1 = get_ha1(realm, auth.username)
|
||||
if ha1 is not None:
|
||||
# note that for request.body to be available we need to
|
||||
# hook in at before_handler, not on_start_resource like
|
||||
# 3.1.x digest_auth does.
|
||||
digest = auth.request_digest(ha1, entity_body=request.body)
|
||||
if digest == auth.response: # authenticated
|
||||
if debug:
|
||||
TRACE("digest matches auth.response")
|
||||
# Now check if nonce is stale.
|
||||
# The choice of ten minutes' lifetime for nonce is somewhat
|
||||
# arbitrary
|
||||
nonce_is_stale = auth.is_nonce_stale(max_age_seconds=600)
|
||||
if not nonce_is_stale:
|
||||
request.login = auth.username
|
||||
if debug:
|
||||
TRACE("authentication of %s successful" %
|
||||
auth.username)
|
||||
return
|
||||
if not HttpDigestAuthorization.matches(auth_header or ''):
|
||||
respond_401()
|
||||
|
||||
# Respond with 401 status and a WWW-Authenticate header
|
||||
header = www_authenticate(realm, key, stale=nonce_is_stale)
|
||||
msg = 'The Authorization header could not be parsed.'
|
||||
with cherrypy.HTTPError.handle(ValueError, 400, msg):
|
||||
auth = HttpDigestAuthorization(
|
||||
auth_header, request.method,
|
||||
debug=debug, accept_charset=accept_charset,
|
||||
)
|
||||
|
||||
if debug:
|
||||
TRACE(str(auth))
|
||||
|
||||
if not auth.validate_nonce(realm, key):
|
||||
respond_401()
|
||||
|
||||
ha1 = get_ha1(realm, auth.username)
|
||||
|
||||
if ha1 is None:
|
||||
respond_401()
|
||||
|
||||
# note that for request.body to be available we need to
|
||||
# hook in at before_handler, not on_start_resource like
|
||||
# 3.1.x digest_auth does.
|
||||
digest = auth.request_digest(ha1, entity_body=request.body)
|
||||
if digest != auth.response:
|
||||
respond_401()
|
||||
|
||||
# authenticated
|
||||
if debug:
|
||||
TRACE('digest matches auth.response')
|
||||
# Now check if nonce is stale.
|
||||
# The choice of ten minutes' lifetime for nonce is somewhat
|
||||
# arbitrary
|
||||
if auth.is_nonce_stale(max_age_seconds=600):
|
||||
respond_401(stale=True)
|
||||
|
||||
request.login = auth.username
|
||||
if debug:
|
||||
TRACE('authentication of %s successful' % auth.username)
|
||||
|
||||
|
||||
def _respond_401(realm, key, accept_charset, debug, **kwargs):
|
||||
"""
|
||||
Respond with 401 status and a WWW-Authenticate header
|
||||
"""
|
||||
header = www_authenticate(
|
||||
realm, key,
|
||||
accept_charset=accept_charset,
|
||||
**kwargs
|
||||
)
|
||||
if debug:
|
||||
TRACE(header)
|
||||
cherrypy.serving.response.headers['WWW-Authenticate'] = header
|
||||
raise cherrypy.HTTPError(
|
||||
401, "You are not authorized to access that resource")
|
||||
401, 'You are not authorized to access that resource')
|
||||
|
||||
@@ -37,9 +37,11 @@ import sys
|
||||
import threading
|
||||
import time
|
||||
|
||||
import six
|
||||
|
||||
import cherrypy
|
||||
from cherrypy.lib import cptools, httputil
|
||||
from cherrypy._cpcompat import copyitems, ntob, set_daemon, sorted, Event
|
||||
from cherrypy._cpcompat import Event
|
||||
|
||||
|
||||
class Cache(object):
|
||||
@@ -48,19 +50,19 @@ class Cache(object):
|
||||
|
||||
def get(self):
|
||||
"""Return the current variant if in the cache, else None."""
|
||||
raise NotImplemented
|
||||
raise NotImplementedError
|
||||
|
||||
def put(self, obj, size):
|
||||
"""Store the current variant in the cache."""
|
||||
raise NotImplemented
|
||||
raise NotImplementedError
|
||||
|
||||
def delete(self):
|
||||
"""Remove ALL cached variants of the current resource."""
|
||||
raise NotImplemented
|
||||
raise NotImplementedError
|
||||
|
||||
def clear(self):
|
||||
"""Reset the cache to its initial, empty state."""
|
||||
raise NotImplemented
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
# ------------------------------ Memory Cache ------------------------------- #
|
||||
@@ -170,7 +172,7 @@ class MemoryCache(Cache):
|
||||
# Run self.expire_cache in a separate daemon thread.
|
||||
t = threading.Thread(target=self.expire_cache, name='expire_cache')
|
||||
self.expiration_thread = t
|
||||
set_daemon(t, True)
|
||||
t.daemon = True
|
||||
t.start()
|
||||
|
||||
def clear(self):
|
||||
@@ -197,7 +199,8 @@ class MemoryCache(Cache):
|
||||
now = time.time()
|
||||
# Must make a copy of expirations so it doesn't change size
|
||||
# during iteration
|
||||
for expiration_time, objects in copyitems(self.expirations):
|
||||
items = list(six.iteritems(self.expirations))
|
||||
for expiration_time, objects in items:
|
||||
if expiration_time <= now:
|
||||
for obj_size, uri, sel_header_values in objects:
|
||||
try:
|
||||
@@ -265,7 +268,7 @@ class MemoryCache(Cache):
|
||||
self.store.pop(uri, None)
|
||||
|
||||
|
||||
def get(invalid_methods=("POST", "PUT", "DELETE"), debug=False, **kwargs):
|
||||
def get(invalid_methods=('POST', 'PUT', 'DELETE'), debug=False, **kwargs):
|
||||
"""Try to obtain cached output. If fresh enough, raise HTTPError(304).
|
||||
|
||||
If POST, PUT, or DELETE:
|
||||
@@ -291,9 +294,9 @@ def get(invalid_methods=("POST", "PUT", "DELETE"), debug=False, **kwargs):
|
||||
request = cherrypy.serving.request
|
||||
response = cherrypy.serving.response
|
||||
|
||||
if not hasattr(cherrypy, "_cache"):
|
||||
if not hasattr(cherrypy, '_cache'):
|
||||
# Make a process-wide Cache object.
|
||||
cherrypy._cache = kwargs.pop("cache_class", MemoryCache)()
|
||||
cherrypy._cache = kwargs.pop('cache_class', MemoryCache)()
|
||||
|
||||
# Take all remaining kwargs and set them on the Cache object.
|
||||
for k, v in kwargs.items():
|
||||
@@ -328,7 +331,7 @@ def get(invalid_methods=("POST", "PUT", "DELETE"), debug=False, **kwargs):
|
||||
if directive == 'max-age':
|
||||
if len(atoms) != 1 or not atoms[0].isdigit():
|
||||
raise cherrypy.HTTPError(
|
||||
400, "Invalid Cache-Control header")
|
||||
400, 'Invalid Cache-Control header')
|
||||
max_age = int(atoms[0])
|
||||
break
|
||||
elif directive == 'no-cache':
|
||||
@@ -353,13 +356,13 @@ def get(invalid_methods=("POST", "PUT", "DELETE"), debug=False, **kwargs):
|
||||
return False
|
||||
|
||||
# Copy the response headers. See
|
||||
# https://bitbucket.org/cherrypy/cherrypy/issue/721.
|
||||
# https://github.com/cherrypy/cherrypy/issues/721.
|
||||
response.headers = rh = httputil.HeaderMap()
|
||||
for k in h:
|
||||
dict.__setitem__(rh, k, dict.__getitem__(h, k))
|
||||
|
||||
# Add the required Age header
|
||||
response.headers["Age"] = str(age)
|
||||
response.headers['Age'] = str(age)
|
||||
|
||||
try:
|
||||
# Note that validate_since depends on a Last-Modified header;
|
||||
@@ -402,10 +405,19 @@ def tee_output():
|
||||
output.append(chunk)
|
||||
yield chunk
|
||||
|
||||
# save the cache data
|
||||
body = ntob('').join(output)
|
||||
cherrypy._cache.put((response.status, response.headers or {},
|
||||
body, response.time), len(body))
|
||||
# Save the cache data, but only if the body isn't empty.
|
||||
# e.g. a 304 Not Modified on a static file response will
|
||||
# have an empty body.
|
||||
# If the body is empty, delete the cache because it
|
||||
# contains a stale Threading._Event object that will
|
||||
# stall all consecutive requests until the _Event times
|
||||
# out
|
||||
body = b''.join(output)
|
||||
if not body:
|
||||
cherrypy._cache.delete()
|
||||
else:
|
||||
cherrypy._cache.put((response.status, response.headers or {},
|
||||
body, response.time), len(body))
|
||||
|
||||
response = cherrypy.serving.response
|
||||
response.body = tee(response.body)
|
||||
@@ -457,14 +469,14 @@ def expires(secs=0, force=False, debug=False):
|
||||
secs = (86400 * secs.days) + secs.seconds
|
||||
|
||||
if secs == 0:
|
||||
if force or ("Pragma" not in headers):
|
||||
headers["Pragma"] = "no-cache"
|
||||
if force or ('Pragma' not in headers):
|
||||
headers['Pragma'] = 'no-cache'
|
||||
if cherrypy.serving.request.protocol >= (1, 1):
|
||||
if force or "Cache-Control" not in headers:
|
||||
headers["Cache-Control"] = "no-cache, must-revalidate"
|
||||
if force or 'Cache-Control' not in headers:
|
||||
headers['Cache-Control'] = 'no-cache, must-revalidate'
|
||||
# Set an explicit Expires date in the past.
|
||||
expiry = httputil.HTTPDate(1169942400.0)
|
||||
else:
|
||||
expiry = httputil.HTTPDate(response.time + secs)
|
||||
if force or "Expires" not in headers:
|
||||
headers["Expires"] = expiry
|
||||
if force or 'Expires' not in headers:
|
||||
headers['Expires'] = expiry
|
||||
|
||||
@@ -23,10 +23,15 @@ it will call ``serve()`` for you.
|
||||
import re
|
||||
import sys
|
||||
import cgi
|
||||
from cherrypy._cpcompat import quote_plus
|
||||
import os
|
||||
import os.path
|
||||
localFile = os.path.join(os.path.dirname(__file__), "coverage.cache")
|
||||
|
||||
from six.moves import urllib
|
||||
|
||||
import cherrypy
|
||||
|
||||
|
||||
localFile = os.path.join(os.path.dirname(__file__), 'coverage.cache')
|
||||
|
||||
the_coverage = None
|
||||
try:
|
||||
@@ -42,8 +47,8 @@ except ImportError:
|
||||
|
||||
import warnings
|
||||
warnings.warn(
|
||||
"No code coverage will be performed; "
|
||||
"coverage.py could not be imported.")
|
||||
'No code coverage will be performed; '
|
||||
'coverage.py could not be imported.')
|
||||
|
||||
def start():
|
||||
pass
|
||||
@@ -193,7 +198,7 @@ def _percent(statements, missing):
|
||||
return 0
|
||||
|
||||
|
||||
def _show_branch(root, base, path, pct=0, showpct=False, exclude="",
|
||||
def _show_branch(root, base, path, pct=0, showpct=False, exclude='',
|
||||
coverage=the_coverage):
|
||||
|
||||
# Show the directory name and any of our children
|
||||
@@ -204,11 +209,11 @@ def _show_branch(root, base, path, pct=0, showpct=False, exclude="",
|
||||
|
||||
if newpath.lower().startswith(base):
|
||||
relpath = newpath[len(base):]
|
||||
yield "| " * relpath.count(os.sep)
|
||||
yield '| ' * relpath.count(os.sep)
|
||||
yield (
|
||||
"<a class='directory' "
|
||||
"href='menu?base=%s&exclude=%s'>%s</a>\n" %
|
||||
(newpath, quote_plus(exclude), name)
|
||||
(newpath, urllib.parse.quote_plus(exclude), name)
|
||||
)
|
||||
|
||||
for chunk in _show_branch(
|
||||
@@ -225,22 +230,22 @@ def _show_branch(root, base, path, pct=0, showpct=False, exclude="",
|
||||
for name in files:
|
||||
newpath = os.path.join(path, name)
|
||||
|
||||
pc_str = ""
|
||||
pc_str = ''
|
||||
if showpct:
|
||||
try:
|
||||
_, statements, _, missing, _ = coverage.analysis2(newpath)
|
||||
except:
|
||||
except Exception:
|
||||
# Yes, we really want to pass on all errors.
|
||||
pass
|
||||
else:
|
||||
pc = _percent(statements, missing)
|
||||
pc_str = ("%3d%% " % pc).replace(' ', ' ')
|
||||
pc_str = ('%3d%% ' % pc).replace(' ', ' ')
|
||||
if pc < float(pct) or pc == -1:
|
||||
pc_str = "<span class='fail'>%s</span>" % pc_str
|
||||
else:
|
||||
pc_str = "<span class='pass'>%s</span>" % pc_str
|
||||
|
||||
yield TEMPLATE_ITEM % ("| " * (relpath.count(os.sep) + 1),
|
||||
yield TEMPLATE_ITEM % ('| ' * (relpath.count(os.sep) + 1),
|
||||
pc_str, newpath, name)
|
||||
|
||||
|
||||
@@ -260,8 +265,8 @@ def _graft(path, tree):
|
||||
break
|
||||
atoms.append(tail)
|
||||
atoms.append(p)
|
||||
if p != "/":
|
||||
atoms.append("/")
|
||||
if p != '/':
|
||||
atoms.append('/')
|
||||
|
||||
atoms.reverse()
|
||||
for node in atoms:
|
||||
@@ -286,15 +291,15 @@ class CoverStats(object):
|
||||
if root is None:
|
||||
# Guess initial depth. Files outside this path will not be
|
||||
# reachable from the web interface.
|
||||
import cherrypy
|
||||
root = os.path.dirname(cherrypy.__file__)
|
||||
self.root = root
|
||||
|
||||
@cherrypy.expose
|
||||
def index(self):
|
||||
return TEMPLATE_FRAMESET % self.root.lower()
|
||||
index.exposed = True
|
||||
|
||||
def menu(self, base="/", pct="50", showpct="",
|
||||
@cherrypy.expose
|
||||
def menu(self, base='/', pct='50', showpct='',
|
||||
exclude=r'python\d\.\d|test|tut\d|tutorial'):
|
||||
|
||||
# The coverage module uses all-lower-case names.
|
||||
@@ -305,37 +310,36 @@ class CoverStats(object):
|
||||
|
||||
# Start by showing links for parent paths
|
||||
yield "<div id='crumbs'>"
|
||||
path = ""
|
||||
path = ''
|
||||
atoms = base.split(os.sep)
|
||||
atoms.pop()
|
||||
for atom in atoms:
|
||||
path += atom + os.sep
|
||||
yield ("<a href='menu?base=%s&exclude=%s'>%s</a> %s"
|
||||
% (path, quote_plus(exclude), atom, os.sep))
|
||||
yield "</div>"
|
||||
% (path, urllib.parse.quote_plus(exclude), atom, os.sep))
|
||||
yield '</div>'
|
||||
|
||||
yield "<div id='tree'>"
|
||||
|
||||
# Then display the tree
|
||||
tree = get_tree(base, exclude, self.coverage)
|
||||
if not tree:
|
||||
yield "<p>No modules covered.</p>"
|
||||
yield '<p>No modules covered.</p>'
|
||||
else:
|
||||
for chunk in _show_branch(tree, base, "/", pct,
|
||||
for chunk in _show_branch(tree, base, '/', pct,
|
||||
showpct == 'checked', exclude,
|
||||
coverage=self.coverage):
|
||||
yield chunk
|
||||
|
||||
yield "</div>"
|
||||
yield "</body></html>"
|
||||
menu.exposed = True
|
||||
yield '</div>'
|
||||
yield '</body></html>'
|
||||
|
||||
def annotated_file(self, filename, statements, excluded, missing):
|
||||
source = open(filename, 'r')
|
||||
buffer = []
|
||||
for lineno, line in enumerate(source.readlines()):
|
||||
lineno += 1
|
||||
line = line.strip("\n\r")
|
||||
line = line.strip('\n\r')
|
||||
empty_the_buffer = True
|
||||
if lineno in excluded:
|
||||
template = TEMPLATE_LOC_EXCLUDED
|
||||
@@ -352,6 +356,7 @@ class CoverStats(object):
|
||||
buffer = []
|
||||
yield template % (lineno, cgi.escape(line))
|
||||
|
||||
@cherrypy.expose
|
||||
def report(self, name):
|
||||
filename, statements, excluded, missing, _ = self.coverage.analysis2(
|
||||
name)
|
||||
@@ -366,22 +371,21 @@ class CoverStats(object):
|
||||
yield '</table>'
|
||||
yield '</body>'
|
||||
yield '</html>'
|
||||
report.exposed = True
|
||||
|
||||
|
||||
def serve(path=localFile, port=8080, root=None):
|
||||
if coverage is None:
|
||||
raise ImportError("The coverage module could not be imported.")
|
||||
raise ImportError('The coverage module could not be imported.')
|
||||
from coverage import coverage
|
||||
cov = coverage(data_file=path)
|
||||
cov.load()
|
||||
|
||||
import cherrypy
|
||||
cherrypy.config.update({'server.socket_port': int(port),
|
||||
'server.thread_pool': 10,
|
||||
'environment': "production",
|
||||
'environment': 'production',
|
||||
})
|
||||
cherrypy.quickstart(CoverStats(cov, root))
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
if __name__ == '__main__':
|
||||
serve(*tuple(sys.argv[1:]))
|
||||
|
||||
@@ -187,9 +187,19 @@ To format statistics reports::
|
||||
|
||||
"""
|
||||
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
import threading
|
||||
import time
|
||||
|
||||
import six
|
||||
|
||||
import cherrypy
|
||||
from cherrypy._cpcompat import json
|
||||
|
||||
# ------------------------------- Statistics -------------------------------- #
|
||||
|
||||
import logging
|
||||
if not hasattr(logging, 'statistics'):
|
||||
logging.statistics = {}
|
||||
|
||||
@@ -210,12 +220,6 @@ def extrapolate_statistics(scope):
|
||||
|
||||
# -------------------- CherryPy Applications Statistics --------------------- #
|
||||
|
||||
import sys
|
||||
import threading
|
||||
import time
|
||||
|
||||
import cherrypy
|
||||
|
||||
appstats = logging.statistics.setdefault('CherryPy Applications', {})
|
||||
appstats.update({
|
||||
'Enabled': True,
|
||||
@@ -246,7 +250,9 @@ appstats.update({
|
||||
'Requests': {},
|
||||
})
|
||||
|
||||
proc_time = lambda s: time.time() - s['Start Time']
|
||||
|
||||
def proc_time(s):
|
||||
return time.time() - s['Start Time']
|
||||
|
||||
|
||||
class ByteCountWrapper(object):
|
||||
@@ -292,7 +298,8 @@ class ByteCountWrapper(object):
|
||||
return data
|
||||
|
||||
|
||||
average_uriset_time = lambda s: s['Count'] and (s['Sum'] / s['Count']) or 0
|
||||
def average_uriset_time(s):
|
||||
return s['Count'] and (s['Sum'] / s['Count']) or 0
|
||||
|
||||
|
||||
def _get_threading_ident():
|
||||
@@ -300,6 +307,7 @@ def _get_threading_ident():
|
||||
return threading.get_ident()
|
||||
return threading._get_ident()
|
||||
|
||||
|
||||
class StatsTool(cherrypy.Tool):
|
||||
|
||||
"""Record various information about the current request."""
|
||||
@@ -390,28 +398,22 @@ class StatsTool(cherrypy.Tool):
|
||||
sq.pop(0)
|
||||
|
||||
|
||||
import cherrypy
|
||||
cherrypy.tools.cpstats = StatsTool()
|
||||
|
||||
|
||||
# ---------------------- CherryPy Statistics Reporting ---------------------- #
|
||||
|
||||
import os
|
||||
thisdir = os.path.abspath(os.path.dirname(__file__))
|
||||
|
||||
try:
|
||||
import json
|
||||
except ImportError:
|
||||
try:
|
||||
import simplejson as json
|
||||
except ImportError:
|
||||
json = None
|
||||
|
||||
|
||||
missing = object()
|
||||
|
||||
locale_date = lambda v: time.strftime('%c', time.gmtime(v))
|
||||
iso_format = lambda v: time.strftime('%Y-%m-%d %H:%M:%S', time.gmtime(v))
|
||||
|
||||
def locale_date(v):
|
||||
return time.strftime('%c', time.gmtime(v))
|
||||
|
||||
|
||||
def iso_format(v):
|
||||
return time.strftime('%Y-%m-%d %H:%M:%S', time.gmtime(v))
|
||||
|
||||
|
||||
def pause_resume(ns):
|
||||
@@ -475,6 +477,7 @@ class StatsPage(object):
|
||||
},
|
||||
}
|
||||
|
||||
@cherrypy.expose
|
||||
def index(self):
|
||||
# Transform the raw data into pretty output for HTML
|
||||
yield """
|
||||
@@ -578,7 +581,6 @@ table.stats2 th {
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
index.exposed = True
|
||||
|
||||
def get_namespaces(self):
|
||||
"""Yield (title, scalars, collections) for each namespace."""
|
||||
@@ -611,12 +613,7 @@ table.stats2 th {
|
||||
"""Return ([headers], [rows]) for the given collection."""
|
||||
# E.g., the 'Requests' dict.
|
||||
headers = []
|
||||
try:
|
||||
# python2
|
||||
vals = v.itervalues()
|
||||
except AttributeError:
|
||||
# python3
|
||||
vals = v.values()
|
||||
vals = six.itervalues(v)
|
||||
for record in vals:
|
||||
for k3 in record:
|
||||
format = formatting.get(k3, missing)
|
||||
@@ -678,22 +675,22 @@ table.stats2 th {
|
||||
return headers, subrows
|
||||
|
||||
if json is not None:
|
||||
@cherrypy.expose
|
||||
def data(self):
|
||||
s = extrapolate_statistics(logging.statistics)
|
||||
cherrypy.response.headers['Content-Type'] = 'application/json'
|
||||
return json.dumps(s, sort_keys=True, indent=4)
|
||||
data.exposed = True
|
||||
|
||||
@cherrypy.expose
|
||||
def pause(self, namespace):
|
||||
logging.statistics.get(namespace, {})['Enabled'] = False
|
||||
raise cherrypy.HTTPRedirect('./')
|
||||
pause.exposed = True
|
||||
pause.cp_config = {'tools.allow.on': True,
|
||||
'tools.allow.methods': ['POST']}
|
||||
|
||||
@cherrypy.expose
|
||||
def resume(self, namespace):
|
||||
logging.statistics.get(namespace, {})['Enabled'] = True
|
||||
raise cherrypy.HTTPRedirect('./')
|
||||
resume.exposed = True
|
||||
resume.cp_config = {'tools.allow.on': True,
|
||||
'tools.allow.methods': ['POST']}
|
||||
|
||||
@@ -4,8 +4,11 @@ import logging
|
||||
import re
|
||||
from hashlib import md5
|
||||
|
||||
import six
|
||||
from six.moves import urllib
|
||||
|
||||
import cherrypy
|
||||
from cherrypy._cpcompat import basestring, unicodestr
|
||||
from cherrypy._cpcompat import text_or_bytes
|
||||
from cherrypy.lib import httputil as _httputil
|
||||
from cherrypy.lib import is_iterator
|
||||
|
||||
@@ -31,7 +34,7 @@ def validate_etags(autotags=False, debug=False):
|
||||
response = cherrypy.serving.response
|
||||
|
||||
# Guard against being run twice.
|
||||
if hasattr(response, "ETag"):
|
||||
if hasattr(response, 'ETag'):
|
||||
return
|
||||
|
||||
status, reason, msg = _httputil.valid_status(response.status)
|
||||
@@ -70,24 +73,24 @@ def validate_etags(autotags=False, debug=False):
|
||||
if debug:
|
||||
cherrypy.log('If-Match conditions: %s' % repr(conditions),
|
||||
'TOOLS.ETAGS')
|
||||
if conditions and not (conditions == ["*"] or etag in conditions):
|
||||
raise cherrypy.HTTPError(412, "If-Match failed: ETag %r did "
|
||||
"not match %r" % (etag, conditions))
|
||||
if conditions and not (conditions == ['*'] or etag in conditions):
|
||||
raise cherrypy.HTTPError(412, 'If-Match failed: ETag %r did '
|
||||
'not match %r' % (etag, conditions))
|
||||
|
||||
conditions = request.headers.elements('If-None-Match') or []
|
||||
conditions = [str(x) for x in conditions]
|
||||
if debug:
|
||||
cherrypy.log('If-None-Match conditions: %s' % repr(conditions),
|
||||
'TOOLS.ETAGS')
|
||||
if conditions == ["*"] or etag in conditions:
|
||||
if conditions == ['*'] or etag in conditions:
|
||||
if debug:
|
||||
cherrypy.log('request.method: %s' %
|
||||
request.method, 'TOOLS.ETAGS')
|
||||
if request.method in ("GET", "HEAD"):
|
||||
if request.method in ('GET', 'HEAD'):
|
||||
raise cherrypy.HTTPRedirect([], 304)
|
||||
else:
|
||||
raise cherrypy.HTTPError(412, "If-None-Match failed: ETag %r "
|
||||
"matched %r" % (etag, conditions))
|
||||
raise cherrypy.HTTPError(412, 'If-None-Match failed: ETag %r '
|
||||
'matched %r' % (etag, conditions))
|
||||
|
||||
|
||||
def validate_since():
|
||||
@@ -111,7 +114,7 @@ def validate_since():
|
||||
since = request.headers.get('If-Modified-Since')
|
||||
if since and since == lastmod:
|
||||
if (status >= 200 and status <= 299) or status == 304:
|
||||
if request.method in ("GET", "HEAD"):
|
||||
if request.method in ('GET', 'HEAD'):
|
||||
raise cherrypy.HTTPRedirect([], 304)
|
||||
else:
|
||||
raise cherrypy.HTTPError(412)
|
||||
@@ -184,7 +187,7 @@ def proxy(base=None, local='X-Forwarded-Host', remote='X-Forwarded-For',
|
||||
# This is for lighttpd/pound/Mongrel's 'X-Forwarded-Proto: https'
|
||||
scheme = s
|
||||
if not scheme:
|
||||
scheme = request.base[:request.base.find("://")]
|
||||
scheme = request.base[:request.base.find('://')]
|
||||
|
||||
if local:
|
||||
lbase = request.headers.get(local, None)
|
||||
@@ -193,14 +196,12 @@ def proxy(base=None, local='X-Forwarded-Host', remote='X-Forwarded-For',
|
||||
if lbase is not None:
|
||||
base = lbase.split(',')[0]
|
||||
if not base:
|
||||
base = request.headers.get('Host', '127.0.0.1')
|
||||
port = request.local.port
|
||||
if port != 80 and not base.endswith(':%s' % port):
|
||||
base += ':%s' % port
|
||||
default = urllib.parse.urlparse(request.base).netloc
|
||||
base = request.headers.get('Host', default)
|
||||
|
||||
if base.find("://") == -1:
|
||||
if base.find('://') == -1:
|
||||
# add http:// or https:// if needed
|
||||
base = scheme + "://" + base
|
||||
base = scheme + '://' + base
|
||||
|
||||
request.base = base
|
||||
|
||||
@@ -210,8 +211,8 @@ def proxy(base=None, local='X-Forwarded-Host', remote='X-Forwarded-For',
|
||||
cherrypy.log('Testing remote %r:%r' % (remote, xff), 'TOOLS.PROXY')
|
||||
if xff:
|
||||
if remote == 'X-Forwarded-For':
|
||||
#Bug #1268
|
||||
xff = xff.split(',')[0].strip()
|
||||
# Grab the first IP in a comma-separated list. Ref #1268.
|
||||
xff = next(ip.strip() for ip in xff.split(','))
|
||||
request.remote.ip = xff
|
||||
|
||||
|
||||
@@ -238,6 +239,8 @@ def response_headers(headers=None, debug=False):
|
||||
'TOOLS.RESPONSE_HEADERS')
|
||||
for name, value in (headers or []):
|
||||
cherrypy.serving.response.headers[name] = value
|
||||
|
||||
|
||||
response_headers.failsafe = True
|
||||
|
||||
|
||||
@@ -283,7 +286,7 @@ class SessionAuth(object):
|
||||
|
||||
"""Assert that the user is logged in."""
|
||||
|
||||
session_key = "username"
|
||||
session_key = 'username'
|
||||
debug = False
|
||||
|
||||
def check_username_and_password(self, username, password):
|
||||
@@ -304,7 +307,7 @@ class SessionAuth(object):
|
||||
|
||||
def login_screen(self, from_page='..', username='', error_msg='',
|
||||
**kwargs):
|
||||
return (unicodestr("""<html><body>
|
||||
return (six.text_type("""<html><body>
|
||||
Message: %(error_msg)s
|
||||
<form method="post" action="do_login">
|
||||
Login: <input type="text" name="username" value="%(username)s" size="10" />
|
||||
@@ -315,7 +318,7 @@ Message: %(error_msg)s
|
||||
<br />
|
||||
<input type="submit" />
|
||||
</form>
|
||||
</body></html>""") % vars()).encode("utf-8")
|
||||
</body></html>""") % vars()).encode('utf-8')
|
||||
|
||||
def do_login(self, username, password, from_page='..', **kwargs):
|
||||
"""Login. May raise redirect, or return True if request handled."""
|
||||
@@ -324,15 +327,15 @@ Message: %(error_msg)s
|
||||
if error_msg:
|
||||
body = self.login_screen(from_page, username, error_msg)
|
||||
response.body = body
|
||||
if "Content-Length" in response.headers:
|
||||
if 'Content-Length' in response.headers:
|
||||
# Delete Content-Length header so finalize() recalcs it.
|
||||
del response.headers["Content-Length"]
|
||||
del response.headers['Content-Length']
|
||||
return True
|
||||
else:
|
||||
cherrypy.serving.request.login = username
|
||||
cherrypy.session[self.session_key] = username
|
||||
self.on_login(username)
|
||||
raise cherrypy.HTTPRedirect(from_page or "/")
|
||||
raise cherrypy.HTTPRedirect(from_page or '/')
|
||||
|
||||
def do_logout(self, from_page='..', **kwargs):
|
||||
"""Logout. May raise redirect, or return True if request handled."""
|
||||
@@ -362,9 +365,9 @@ Message: %(error_msg)s
|
||||
locals(),
|
||||
)
|
||||
response.body = self.login_screen(url)
|
||||
if "Content-Length" in response.headers:
|
||||
if 'Content-Length' in response.headers:
|
||||
# Delete Content-Length header so finalize() recalcs it.
|
||||
del response.headers["Content-Length"]
|
||||
del response.headers['Content-Length']
|
||||
return True
|
||||
self._debug_message('Setting request.login to %(username)r', locals())
|
||||
request.login = username
|
||||
@@ -386,14 +389,14 @@ Message: %(error_msg)s
|
||||
return True
|
||||
elif path.endswith('do_login'):
|
||||
if request.method != 'POST':
|
||||
response.headers['Allow'] = "POST"
|
||||
response.headers['Allow'] = 'POST'
|
||||
self._debug_message('do_login requires POST')
|
||||
raise cherrypy.HTTPError(405)
|
||||
self._debug_message('routing %(path)r to do_login', locals())
|
||||
return self.do_login(**request.params)
|
||||
elif path.endswith('do_logout'):
|
||||
if request.method != 'POST':
|
||||
response.headers['Allow'] = "POST"
|
||||
response.headers['Allow'] = 'POST'
|
||||
raise cherrypy.HTTPError(405)
|
||||
self._debug_message('routing %(path)r to do_logout', locals())
|
||||
return self.do_logout(**request.params)
|
||||
@@ -407,24 +410,28 @@ def session_auth(**kwargs):
|
||||
for k, v in kwargs.items():
|
||||
setattr(sa, k, v)
|
||||
return sa.run()
|
||||
session_auth.__doc__ = """Session authentication hook.
|
||||
|
||||
Any attribute of the SessionAuth class may be overridden via a keyword arg
|
||||
to this function:
|
||||
|
||||
""" + "\n".join(["%s: %s" % (k, type(getattr(SessionAuth, k)).__name__)
|
||||
for k in dir(SessionAuth) if not k.startswith("__")])
|
||||
session_auth.__doc__ = (
|
||||
"""Session authentication hook.
|
||||
|
||||
Any attribute of the SessionAuth class may be overridden via a keyword arg
|
||||
to this function:
|
||||
|
||||
""" + '\n'.join(['%s: %s' % (k, type(getattr(SessionAuth, k)).__name__)
|
||||
for k in dir(SessionAuth) if not k.startswith('__')])
|
||||
)
|
||||
|
||||
|
||||
def log_traceback(severity=logging.ERROR, debug=False):
|
||||
"""Write the last error's traceback to the cherrypy error log."""
|
||||
cherrypy.log("", "HTTP", severity=severity, traceback=True)
|
||||
cherrypy.log('', 'HTTP', severity=severity, traceback=True)
|
||||
|
||||
|
||||
def log_request_headers(debug=False):
|
||||
"""Write request headers to the cherrypy error log."""
|
||||
h = [" %s: %s" % (k, v) for k, v in cherrypy.serving.request.header_list]
|
||||
cherrypy.log('\nRequest Headers:\n' + '\n'.join(h), "HTTP")
|
||||
h = [' %s: %s' % (k, v) for k, v in cherrypy.serving.request.header_list]
|
||||
cherrypy.log('\nRequest Headers:\n' + '\n'.join(h), 'HTTP')
|
||||
|
||||
|
||||
def log_hooks(debug=False):
|
||||
@@ -440,13 +447,13 @@ def log_hooks(debug=False):
|
||||
points.append(k)
|
||||
|
||||
for k in points:
|
||||
msg.append(" %s:" % k)
|
||||
msg.append(' %s:' % k)
|
||||
v = request.hooks.get(k, [])
|
||||
v.sort()
|
||||
for h in v:
|
||||
msg.append(" %r" % h)
|
||||
msg.append(' %r' % h)
|
||||
cherrypy.log('\nRequest Hooks for ' + cherrypy.url() +
|
||||
':\n' + '\n'.join(msg), "HTTP")
|
||||
':\n' + '\n'.join(msg), 'HTTP')
|
||||
|
||||
|
||||
def redirect(url='', internal=True, debug=False):
|
||||
@@ -531,7 +538,7 @@ def accept(media=None, debug=False):
|
||||
"""
|
||||
if not media:
|
||||
return
|
||||
if isinstance(media, basestring):
|
||||
if isinstance(media, text_or_bytes):
|
||||
media = [media]
|
||||
request = cherrypy.serving.request
|
||||
|
||||
@@ -547,12 +554,12 @@ def accept(media=None, debug=False):
|
||||
# Note that 'ranges' is sorted in order of preference
|
||||
for element in ranges:
|
||||
if element.qvalue > 0:
|
||||
if element.value == "*/*":
|
||||
if element.value == '*/*':
|
||||
# Matches any type or subtype
|
||||
if debug:
|
||||
cherrypy.log('Match due to */*', 'TOOLS.ACCEPT')
|
||||
return media[0]
|
||||
elif element.value.endswith("/*"):
|
||||
elif element.value.endswith('/*'):
|
||||
# Matches any subtype
|
||||
mtype = element.value[:-1] # Keep the slash
|
||||
for m in media:
|
||||
@@ -572,36 +579,23 @@ def accept(media=None, debug=False):
|
||||
# No suitable media-range found.
|
||||
ah = request.headers.get('Accept')
|
||||
if ah is None:
|
||||
msg = "Your client did not send an Accept header."
|
||||
msg = 'Your client did not send an Accept header.'
|
||||
else:
|
||||
msg = "Your client sent this Accept header: %s." % ah
|
||||
msg += (" But this resource only emits these media types: %s." %
|
||||
", ".join(media))
|
||||
msg = 'Your client sent this Accept header: %s.' % ah
|
||||
msg += (' But this resource only emits these media types: %s.' %
|
||||
', '.join(media))
|
||||
raise cherrypy.HTTPError(406, msg)
|
||||
|
||||
|
||||
class MonitoredHeaderMap(_httputil.HeaderMap):
|
||||
|
||||
def transform_key(self, key):
|
||||
self.accessed_headers.add(key)
|
||||
return super(MonitoredHeaderMap, self).transform_key(key)
|
||||
|
||||
def __init__(self):
|
||||
self.accessed_headers = set()
|
||||
|
||||
def __getitem__(self, key):
|
||||
self.accessed_headers.add(key)
|
||||
return _httputil.HeaderMap.__getitem__(self, key)
|
||||
|
||||
def __contains__(self, key):
|
||||
self.accessed_headers.add(key)
|
||||
return _httputil.HeaderMap.__contains__(self, key)
|
||||
|
||||
def get(self, key, default=None):
|
||||
self.accessed_headers.add(key)
|
||||
return _httputil.HeaderMap.get(self, key, default=default)
|
||||
|
||||
if hasattr({}, 'has_key'):
|
||||
# Python 2
|
||||
def has_key(self, key):
|
||||
self.accessed_headers.add(key)
|
||||
return _httputil.HeaderMap.has_key(self, key)
|
||||
super(MonitoredHeaderMap, self).__init__()
|
||||
|
||||
|
||||
def autovary(ignore=None, debug=False):
|
||||
@@ -628,3 +622,19 @@ def autovary(ignore=None, debug=False):
|
||||
v.sort()
|
||||
resp_h['Vary'] = ', '.join(v)
|
||||
request.hooks.attach('before_finalize', set_response_header, 95)
|
||||
|
||||
|
||||
def convert_params(exception=ValueError, error=400):
|
||||
"""Convert request params based on function annotations, with error handling.
|
||||
|
||||
exception
|
||||
Exception class to catch.
|
||||
|
||||
status
|
||||
The HTTP error code to return to the client on failure.
|
||||
"""
|
||||
request = cherrypy.serving.request
|
||||
types = request.handler.callable.__annotations__
|
||||
with cherrypy.HTTPError.handle(exception, error):
|
||||
for key in set(types).intersection(request.params):
|
||||
request.params[key] = types[key](request.params[key])
|
||||
|
||||
@@ -1,8 +1,11 @@
|
||||
import struct
|
||||
import time
|
||||
import io
|
||||
|
||||
import six
|
||||
|
||||
import cherrypy
|
||||
from cherrypy._cpcompat import basestring, BytesIO, ntob, unicodestr
|
||||
from cherrypy._cpcompat import text_or_bytes
|
||||
from cherrypy.lib import file_generator
|
||||
from cherrypy.lib import is_closable_iterator
|
||||
from cherrypy.lib import set_vary_header
|
||||
@@ -34,6 +37,7 @@ def decode(encoding=None, default_encoding='utf-8'):
|
||||
default_encoding = [default_encoding]
|
||||
body.attempt_charsets = body.attempt_charsets + default_encoding
|
||||
|
||||
|
||||
class UTF8StreamEncoder:
|
||||
def __init__(self, iterator):
|
||||
self._iterator = iterator
|
||||
@@ -46,7 +50,7 @@ class UTF8StreamEncoder:
|
||||
|
||||
def __next__(self):
|
||||
res = next(self._iterator)
|
||||
if isinstance(res, unicodestr):
|
||||
if isinstance(res, six.text_type):
|
||||
res = res.encode('utf-8')
|
||||
return res
|
||||
|
||||
@@ -63,7 +67,7 @@ class UTF8StreamEncoder:
|
||||
class ResponseEncoder:
|
||||
|
||||
default_encoding = 'utf-8'
|
||||
failmsg = "Response body could not be encoded with %r."
|
||||
failmsg = 'Response body could not be encoded with %r.'
|
||||
encoding = None
|
||||
errors = 'strict'
|
||||
text_only = True
|
||||
@@ -95,7 +99,7 @@ class ResponseEncoder:
|
||||
|
||||
def encoder(body):
|
||||
for chunk in body:
|
||||
if isinstance(chunk, unicodestr):
|
||||
if isinstance(chunk, six.text_type):
|
||||
chunk = chunk.encode(encoding, self.errors)
|
||||
yield chunk
|
||||
self.body = encoder(self.body)
|
||||
@@ -108,7 +112,7 @@ class ResponseEncoder:
|
||||
self.attempted_charsets.add(encoding)
|
||||
body = []
|
||||
for chunk in self.body:
|
||||
if isinstance(chunk, unicodestr):
|
||||
if isinstance(chunk, six.text_type):
|
||||
try:
|
||||
chunk = chunk.encode(encoding, self.errors)
|
||||
except (LookupError, UnicodeError):
|
||||
@@ -128,7 +132,7 @@ class ResponseEncoder:
|
||||
encoder = self.encode_stream
|
||||
else:
|
||||
encoder = self.encode_string
|
||||
if "Content-Length" in response.headers:
|
||||
if 'Content-Length' in response.headers:
|
||||
# Delete Content-Length header so finalize() recalcs it.
|
||||
# Encoded strings may be of different lengths from their
|
||||
# unicode equivalents, and even from each other. For example:
|
||||
@@ -139,7 +143,7 @@ class ResponseEncoder:
|
||||
# 6
|
||||
# >>> len(t.encode("utf7"))
|
||||
# 8
|
||||
del response.headers["Content-Length"]
|
||||
del response.headers['Content-Length']
|
||||
|
||||
# Parse the Accept-Charset request header, and try to provide one
|
||||
# of the requested charsets (in order of user preference).
|
||||
@@ -154,7 +158,7 @@ class ResponseEncoder:
|
||||
if self.debug:
|
||||
cherrypy.log('Specified encoding %r' %
|
||||
encoding, 'TOOLS.ENCODE')
|
||||
if (not charsets) or "*" in charsets or encoding in charsets:
|
||||
if (not charsets) or '*' in charsets or encoding in charsets:
|
||||
if self.debug:
|
||||
cherrypy.log('Attempting encoding %r' %
|
||||
encoding, 'TOOLS.ENCODE')
|
||||
@@ -174,7 +178,7 @@ class ResponseEncoder:
|
||||
else:
|
||||
for element in encs:
|
||||
if element.qvalue > 0:
|
||||
if element.value == "*":
|
||||
if element.value == '*':
|
||||
# Matches any charset. Try our default.
|
||||
if self.debug:
|
||||
cherrypy.log('Attempting default encoding due '
|
||||
@@ -189,7 +193,7 @@ class ResponseEncoder:
|
||||
if encoder(encoding):
|
||||
return encoding
|
||||
|
||||
if "*" not in charsets:
|
||||
if '*' not in charsets:
|
||||
# If no "*" is present in an Accept-Charset field, then all
|
||||
# character sets not explicitly mentioned get a quality
|
||||
# value of 0, except for ISO-8859-1, which gets a quality
|
||||
@@ -205,39 +209,27 @@ class ResponseEncoder:
|
||||
# No suitable encoding found.
|
||||
ac = request.headers.get('Accept-Charset')
|
||||
if ac is None:
|
||||
msg = "Your client did not send an Accept-Charset header."
|
||||
msg = 'Your client did not send an Accept-Charset header.'
|
||||
else:
|
||||
msg = "Your client sent this Accept-Charset header: %s." % ac
|
||||
_charsets = ", ".join(sorted(self.attempted_charsets))
|
||||
msg += " We tried these charsets: %s." % (_charsets,)
|
||||
msg = 'Your client sent this Accept-Charset header: %s.' % ac
|
||||
_charsets = ', '.join(sorted(self.attempted_charsets))
|
||||
msg += ' We tried these charsets: %s.' % (_charsets,)
|
||||
raise cherrypy.HTTPError(406, msg)
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
response = cherrypy.serving.response
|
||||
self.body = self.oldhandler(*args, **kwargs)
|
||||
|
||||
if isinstance(self.body, basestring):
|
||||
# strings get wrapped in a list because iterating over a single
|
||||
# item list is much faster than iterating over every character
|
||||
# in a long string.
|
||||
if self.body:
|
||||
self.body = [self.body]
|
||||
else:
|
||||
# [''] doesn't evaluate to False, so replace it with [].
|
||||
self.body = []
|
||||
elif hasattr(self.body, 'read'):
|
||||
self.body = file_generator(self.body)
|
||||
elif self.body is None:
|
||||
self.body = []
|
||||
self.body = prepare_iter(self.body)
|
||||
|
||||
ct = response.headers.elements("Content-Type")
|
||||
ct = response.headers.elements('Content-Type')
|
||||
if self.debug:
|
||||
cherrypy.log('Content-Type: %r' % [str(h)
|
||||
for h in ct], 'TOOLS.ENCODE')
|
||||
if ct and self.add_charset:
|
||||
ct = ct[0]
|
||||
if self.text_only:
|
||||
if ct.value.lower().startswith("text/"):
|
||||
if ct.value.lower().startswith('text/'):
|
||||
if self.debug:
|
||||
cherrypy.log(
|
||||
'Content-Type %s starts with "text/"' % ct,
|
||||
@@ -261,10 +253,33 @@ class ResponseEncoder:
|
||||
if self.debug:
|
||||
cherrypy.log('Setting Content-Type %s' % ct,
|
||||
'TOOLS.ENCODE')
|
||||
response.headers["Content-Type"] = str(ct)
|
||||
response.headers['Content-Type'] = str(ct)
|
||||
|
||||
return self.body
|
||||
|
||||
|
||||
def prepare_iter(value):
|
||||
"""
|
||||
Ensure response body is iterable and resolves to False when empty.
|
||||
"""
|
||||
if isinstance(value, text_or_bytes):
|
||||
# strings get wrapped in a list because iterating over a single
|
||||
# item list is much faster than iterating over every character
|
||||
# in a long string.
|
||||
if value:
|
||||
value = [value]
|
||||
else:
|
||||
# [''] doesn't evaluate to False, so replace it with [].
|
||||
value = []
|
||||
# Don't use isinstance here; io.IOBase which has an ABC takes
|
||||
# 1000 times as long as, say, isinstance(value, str)
|
||||
elif hasattr(value, 'read'):
|
||||
value = file_generator(value)
|
||||
elif value is None:
|
||||
value = []
|
||||
return value
|
||||
|
||||
|
||||
# GZIP
|
||||
|
||||
|
||||
@@ -273,15 +288,15 @@ def compress(body, compress_level):
|
||||
import zlib
|
||||
|
||||
# See http://www.gzip.org/zlib/rfc-gzip.html
|
||||
yield ntob('\x1f\x8b') # ID1 and ID2: gzip marker
|
||||
yield ntob('\x08') # CM: compression method
|
||||
yield ntob('\x00') # FLG: none set
|
||||
yield b'\x1f\x8b' # ID1 and ID2: gzip marker
|
||||
yield b'\x08' # CM: compression method
|
||||
yield b'\x00' # FLG: none set
|
||||
# MTIME: 4 bytes
|
||||
yield struct.pack("<L", int(time.time()) & int('FFFFFFFF', 16))
|
||||
yield ntob('\x02') # XFL: max compression, slowest algo
|
||||
yield ntob('\xff') # OS: unknown
|
||||
yield struct.pack('<L', int(time.time()) & int('FFFFFFFF', 16))
|
||||
yield b'\x02' # XFL: max compression, slowest algo
|
||||
yield b'\xff' # OS: unknown
|
||||
|
||||
crc = zlib.crc32(ntob(""))
|
||||
crc = zlib.crc32(b'')
|
||||
size = 0
|
||||
zobj = zlib.compressobj(compress_level,
|
||||
zlib.DEFLATED, -zlib.MAX_WBITS,
|
||||
@@ -293,15 +308,15 @@ def compress(body, compress_level):
|
||||
yield zobj.flush()
|
||||
|
||||
# CRC32: 4 bytes
|
||||
yield struct.pack("<L", crc & int('FFFFFFFF', 16))
|
||||
yield struct.pack('<L', crc & int('FFFFFFFF', 16))
|
||||
# ISIZE: 4 bytes
|
||||
yield struct.pack("<L", size & int('FFFFFFFF', 16))
|
||||
yield struct.pack('<L', size & int('FFFFFFFF', 16))
|
||||
|
||||
|
||||
def decompress(body):
|
||||
import gzip
|
||||
|
||||
zbuf = BytesIO()
|
||||
zbuf = io.BytesIO()
|
||||
zbuf.write(body)
|
||||
zbuf.seek(0)
|
||||
zfile = gzip.GzipFile(mode='rb', fileobj=zbuf)
|
||||
@@ -318,9 +333,9 @@ def gzip(compress_level=5, mime_types=['text/html', 'text/plain'],
|
||||
values in the mime_types arg before calling this function.
|
||||
|
||||
The provided list of mime-types must be of one of the following form:
|
||||
* type/subtype
|
||||
* type/*
|
||||
* type/*+subtype
|
||||
* `type/subtype`
|
||||
* `type/*`
|
||||
* `type/*+subtype`
|
||||
|
||||
No compression is performed if any of the following hold:
|
||||
* The client sends no Accept-Encoding request header
|
||||
@@ -332,7 +347,7 @@ def gzip(compress_level=5, mime_types=['text/html', 'text/plain'],
|
||||
request = cherrypy.serving.request
|
||||
response = cherrypy.serving.response
|
||||
|
||||
set_vary_header(response, "Accept-Encoding")
|
||||
set_vary_header(response, 'Accept-Encoding')
|
||||
|
||||
if not response.body:
|
||||
# Response body is empty (might be a 304 for instance)
|
||||
@@ -342,7 +357,7 @@ def gzip(compress_level=5, mime_types=['text/html', 'text/plain'],
|
||||
|
||||
# If returning cached content (which should already have been gzipped),
|
||||
# don't re-zip.
|
||||
if getattr(request, "cached", False):
|
||||
if getattr(request, 'cached', False):
|
||||
if debug:
|
||||
cherrypy.log('Not gzipping cached response', context='TOOLS.GZIP')
|
||||
return
|
||||
@@ -410,12 +425,12 @@ def gzip(compress_level=5, mime_types=['text/html', 'text/plain'],
|
||||
# Return a generator that compresses the page
|
||||
response.headers['Content-Encoding'] = 'gzip'
|
||||
response.body = compress(response.body, compress_level)
|
||||
if "Content-Length" in response.headers:
|
||||
if 'Content-Length' in response.headers:
|
||||
# Delete Content-Length header so finalize() recalcs it.
|
||||
del response.headers["Content-Length"]
|
||||
del response.headers['Content-Length']
|
||||
|
||||
return
|
||||
|
||||
if debug:
|
||||
cherrypy.log('No acceptable encoding found.', context='GZIP')
|
||||
cherrypy.HTTPError(406, "identity, gzip").set_response()
|
||||
cherrypy.HTTPError(406, 'identity, gzip').set_response()
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import gc
|
||||
import inspect
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
|
||||
@@ -36,7 +35,7 @@ class ReferrerTree(object):
|
||||
refs = gc.get_referrers(obj)
|
||||
self.ignore.append(refs)
|
||||
if len(refs) > self.maxparents:
|
||||
return [("[%s referrers]" % len(refs), [])]
|
||||
return [('[%s referrers]' % len(refs), [])]
|
||||
|
||||
try:
|
||||
ascendcode = self.ascend.__code__
|
||||
@@ -72,20 +71,20 @@ class ReferrerTree(object):
|
||||
return self.peek(repr(obj))
|
||||
|
||||
if isinstance(obj, dict):
|
||||
return "{" + ", ".join(["%s: %s" % (self._format(k, descend=False),
|
||||
return '{' + ', '.join(['%s: %s' % (self._format(k, descend=False),
|
||||
self._format(v, descend=False))
|
||||
for k, v in obj.items()]) + "}"
|
||||
for k, v in obj.items()]) + '}'
|
||||
elif isinstance(obj, list):
|
||||
return "[" + ", ".join([self._format(item, descend=False)
|
||||
for item in obj]) + "]"
|
||||
return '[' + ', '.join([self._format(item, descend=False)
|
||||
for item in obj]) + ']'
|
||||
elif isinstance(obj, tuple):
|
||||
return "(" + ", ".join([self._format(item, descend=False)
|
||||
for item in obj]) + ")"
|
||||
return '(' + ', '.join([self._format(item, descend=False)
|
||||
for item in obj]) + ')'
|
||||
|
||||
r = self.peek(repr(obj))
|
||||
if isinstance(obj, (str, int, float)):
|
||||
return r
|
||||
return "%s: %s" % (type(obj), r)
|
||||
return '%s: %s' % (type(obj), r)
|
||||
|
||||
def format(self, tree):
|
||||
"""Return a list of string reprs from a nested list of referrers."""
|
||||
@@ -93,7 +92,7 @@ class ReferrerTree(object):
|
||||
|
||||
def ascend(branch, depth=1):
|
||||
for parent, grandparents in branch:
|
||||
output.append((" " * depth) + self._format(parent))
|
||||
output.append((' ' * depth) + self._format(parent))
|
||||
if grandparents:
|
||||
ascend(grandparents, depth + 1)
|
||||
ascend(tree)
|
||||
@@ -114,20 +113,22 @@ class RequestCounter(SimplePlugin):
|
||||
|
||||
def after_request(self):
|
||||
self.count -= 1
|
||||
|
||||
|
||||
request_counter = RequestCounter(cherrypy.engine)
|
||||
request_counter.subscribe()
|
||||
|
||||
|
||||
def get_context(obj):
|
||||
if isinstance(obj, _cprequest.Request):
|
||||
return "path=%s;stage=%s" % (obj.path_info, obj.stage)
|
||||
return 'path=%s;stage=%s' % (obj.path_info, obj.stage)
|
||||
elif isinstance(obj, _cprequest.Response):
|
||||
return "status=%s" % obj.status
|
||||
return 'status=%s' % obj.status
|
||||
elif isinstance(obj, _cpwsgi.AppResponse):
|
||||
return "PATH_INFO=%s" % obj.environ.get('PATH_INFO', '')
|
||||
elif hasattr(obj, "tb_lineno"):
|
||||
return "tb_lineno=%s" % obj.tb_lineno
|
||||
return ""
|
||||
return 'PATH_INFO=%s' % obj.environ.get('PATH_INFO', '')
|
||||
elif hasattr(obj, 'tb_lineno'):
|
||||
return 'tb_lineno=%s' % obj.tb_lineno
|
||||
return ''
|
||||
|
||||
|
||||
class GCRoot(object):
|
||||
@@ -136,26 +137,27 @@ class GCRoot(object):
|
||||
|
||||
classes = [
|
||||
(_cprequest.Request, 2, 2,
|
||||
"Should be 1 in this request thread and 1 in the main thread."),
|
||||
'Should be 1 in this request thread and 1 in the main thread.'),
|
||||
(_cprequest.Response, 2, 2,
|
||||
"Should be 1 in this request thread and 1 in the main thread."),
|
||||
'Should be 1 in this request thread and 1 in the main thread.'),
|
||||
(_cpwsgi.AppResponse, 1, 1,
|
||||
"Should be 1 in this request thread only."),
|
||||
'Should be 1 in this request thread only.'),
|
||||
]
|
||||
|
||||
@cherrypy.expose
|
||||
def index(self):
|
||||
return "Hello, world!"
|
||||
index.exposed = True
|
||||
return 'Hello, world!'
|
||||
|
||||
@cherrypy.expose
|
||||
def stats(self):
|
||||
output = ["Statistics:"]
|
||||
output = ['Statistics:']
|
||||
|
||||
for trial in range(10):
|
||||
if request_counter.count > 0:
|
||||
break
|
||||
time.sleep(0.5)
|
||||
else:
|
||||
output.append("\nNot all requests closed properly.")
|
||||
output.append('\nNot all requests closed properly.')
|
||||
|
||||
# gc_collect isn't perfectly synchronous, because it may
|
||||
# break reference cycles that then take time to fully
|
||||
@@ -173,11 +175,11 @@ class GCRoot(object):
|
||||
for x in gc.garbage:
|
||||
trash[type(x)] = trash.get(type(x), 0) + 1
|
||||
if trash:
|
||||
output.insert(0, "\n%s unreachable objects:" % unreachable)
|
||||
output.insert(0, '\n%s unreachable objects:' % unreachable)
|
||||
trash = [(v, k) for k, v in trash.items()]
|
||||
trash.sort()
|
||||
for pair in trash:
|
||||
output.append(" " + repr(pair))
|
||||
output.append(' ' + repr(pair))
|
||||
|
||||
# Check declared classes to verify uncollected instances.
|
||||
# These don't have to be part of a cycle; they can be
|
||||
@@ -193,25 +195,24 @@ class GCRoot(object):
|
||||
if lenobj < minobj or lenobj > maxobj:
|
||||
if minobj == maxobj:
|
||||
output.append(
|
||||
"\nExpected %s %r references, got %s." %
|
||||
'\nExpected %s %r references, got %s.' %
|
||||
(minobj, cls, lenobj))
|
||||
else:
|
||||
output.append(
|
||||
"\nExpected %s to %s %r references, got %s." %
|
||||
'\nExpected %s to %s %r references, got %s.' %
|
||||
(minobj, maxobj, cls, lenobj))
|
||||
|
||||
for obj in objs:
|
||||
if objgraph is not None:
|
||||
ig = [id(objs), id(inspect.currentframe())]
|
||||
fname = "graph_%s_%s.png" % (cls.__name__, id(obj))
|
||||
fname = 'graph_%s_%s.png' % (cls.__name__, id(obj))
|
||||
objgraph.show_backrefs(
|
||||
obj, extra_ignore=ig, max_depth=4, too_many=20,
|
||||
filename=fname, extra_info=get_context)
|
||||
output.append("\nReferrers for %s (refcount=%s):" %
|
||||
output.append('\nReferrers for %s (refcount=%s):' %
|
||||
(repr(obj), sys.getrefcount(obj)))
|
||||
t = ReferrerTree(ignore=[objs], maxdepth=3)
|
||||
tree = t.ascend(obj)
|
||||
output.extend(t.format(tree))
|
||||
|
||||
return "\n".join(output)
|
||||
stats.exposed = True
|
||||
return '\n'.join(output)
|
||||
|
||||
@@ -1,6 +0,0 @@
|
||||
import warnings
|
||||
warnings.warn('cherrypy.lib.http has been deprecated and will be removed '
|
||||
'in CherryPy 3.3 use cherrypy.lib.httputil instead.',
|
||||
DeprecationWarning)
|
||||
|
||||
from cherrypy.lib.httputil import *
|
||||
@@ -1,373 +0,0 @@
|
||||
"""
|
||||
This module defines functions to implement HTTP Digest Authentication
|
||||
(:rfc:`2617`).
|
||||
This has full compliance with 'Digest' and 'Basic' authentication methods. In
|
||||
'Digest' it supports both MD5 and MD5-sess algorithms.
|
||||
|
||||
Usage:
|
||||
First use 'doAuth' to request the client authentication for a
|
||||
certain resource. You should send an httplib.UNAUTHORIZED response to the
|
||||
client so he knows he has to authenticate itself.
|
||||
|
||||
Then use 'parseAuthorization' to retrieve the 'auth_map' used in
|
||||
'checkResponse'.
|
||||
|
||||
To use 'checkResponse' you must have already verified the password
|
||||
associated with the 'username' key in 'auth_map' dict. Then you use the
|
||||
'checkResponse' function to verify if the password matches the one sent
|
||||
by the client.
|
||||
|
||||
SUPPORTED_ALGORITHM - list of supported 'Digest' algorithms
|
||||
SUPPORTED_QOP - list of supported 'Digest' 'qop'.
|
||||
"""
|
||||
__version__ = 1, 0, 1
|
||||
__author__ = "Tiago Cogumbreiro <cogumbreiro@users.sf.net>"
|
||||
__credits__ = """
|
||||
Peter van Kampen for its recipe which implement most of Digest
|
||||
authentication:
|
||||
http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/302378
|
||||
"""
|
||||
|
||||
__license__ = """
|
||||
Copyright (c) 2005, Tiago Cogumbreiro <cogumbreiro@users.sf.net>
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright notice,
|
||||
this list of conditions and the following disclaimer.
|
||||
* Redistributions in binary form must reproduce the above copyright notice,
|
||||
this list of conditions and the following disclaimer in the documentation
|
||||
and/or other materials provided with the distribution.
|
||||
* Neither the name of Sylvain Hellegouarch nor the names of his
|
||||
contributors may be used to endorse or promote products derived from
|
||||
this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
|
||||
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
"""
|
||||
|
||||
__all__ = ("digestAuth", "basicAuth", "doAuth", "checkResponse",
|
||||
"parseAuthorization", "SUPPORTED_ALGORITHM", "md5SessionKey",
|
||||
"calculateNonce", "SUPPORTED_QOP")
|
||||
|
||||
##########################################################################
|
||||
import time
|
||||
from hashlib import md5
|
||||
|
||||
from cherrypy._cpcompat import base64_decode, ntob
|
||||
from cherrypy._cpcompat import parse_http_list, parse_keqv_list
|
||||
|
||||
MD5 = "MD5"
|
||||
MD5_SESS = "MD5-sess"
|
||||
AUTH = "auth"
|
||||
AUTH_INT = "auth-int"
|
||||
|
||||
SUPPORTED_ALGORITHM = (MD5, MD5_SESS)
|
||||
SUPPORTED_QOP = (AUTH, AUTH_INT)
|
||||
|
||||
##########################################################################
|
||||
# doAuth
|
||||
#
|
||||
DIGEST_AUTH_ENCODERS = {
|
||||
MD5: lambda val: md5(ntob(val)).hexdigest(),
|
||||
MD5_SESS: lambda val: md5(ntob(val)).hexdigest(),
|
||||
# SHA: lambda val: sha.new(ntob(val)).hexdigest (),
|
||||
}
|
||||
|
||||
|
||||
def calculateNonce(realm, algorithm=MD5):
|
||||
"""This is an auxaliary function that calculates 'nonce' value. It is used
|
||||
to handle sessions."""
|
||||
|
||||
global SUPPORTED_ALGORITHM, DIGEST_AUTH_ENCODERS
|
||||
assert algorithm in SUPPORTED_ALGORITHM
|
||||
|
||||
try:
|
||||
encoder = DIGEST_AUTH_ENCODERS[algorithm]
|
||||
except KeyError:
|
||||
raise NotImplementedError("The chosen algorithm (%s) does not have "
|
||||
"an implementation yet" % algorithm)
|
||||
|
||||
return encoder("%d:%s" % (time.time(), realm))
|
||||
|
||||
|
||||
def digestAuth(realm, algorithm=MD5, nonce=None, qop=AUTH):
|
||||
"""Challenges the client for a Digest authentication."""
|
||||
global SUPPORTED_ALGORITHM, DIGEST_AUTH_ENCODERS, SUPPORTED_QOP
|
||||
assert algorithm in SUPPORTED_ALGORITHM
|
||||
assert qop in SUPPORTED_QOP
|
||||
|
||||
if nonce is None:
|
||||
nonce = calculateNonce(realm, algorithm)
|
||||
|
||||
return 'Digest realm="%s", nonce="%s", algorithm="%s", qop="%s"' % (
|
||||
realm, nonce, algorithm, qop
|
||||
)
|
||||
|
||||
|
||||
def basicAuth(realm):
|
||||
"""Challengenes the client for a Basic authentication."""
|
||||
assert '"' not in realm, "Realms cannot contain the \" (quote) character."
|
||||
|
||||
return 'Basic realm="%s"' % realm
|
||||
|
||||
|
||||
def doAuth(realm):
|
||||
"""'doAuth' function returns the challenge string b giving priority over
|
||||
Digest and fallback to Basic authentication when the browser doesn't
|
||||
support the first one.
|
||||
|
||||
This should be set in the HTTP header under the key 'WWW-Authenticate'."""
|
||||
|
||||
return digestAuth(realm) + " " + basicAuth(realm)
|
||||
|
||||
|
||||
##########################################################################
|
||||
# Parse authorization parameters
|
||||
#
|
||||
def _parseDigestAuthorization(auth_params):
|
||||
# Convert the auth params to a dict
|
||||
items = parse_http_list(auth_params)
|
||||
params = parse_keqv_list(items)
|
||||
|
||||
# Now validate the params
|
||||
|
||||
# Check for required parameters
|
||||
required = ["username", "realm", "nonce", "uri", "response"]
|
||||
for k in required:
|
||||
if k not in params:
|
||||
return None
|
||||
|
||||
# If qop is sent then cnonce and nc MUST be present
|
||||
if "qop" in params and not ("cnonce" in params
|
||||
and "nc" in params):
|
||||
return None
|
||||
|
||||
# If qop is not sent, neither cnonce nor nc can be present
|
||||
if ("cnonce" in params or "nc" in params) and \
|
||||
"qop" not in params:
|
||||
return None
|
||||
|
||||
return params
|
||||
|
||||
|
||||
def _parseBasicAuthorization(auth_params):
|
||||
username, password = base64_decode(auth_params).split(":", 1)
|
||||
return {"username": username, "password": password}
|
||||
|
||||
AUTH_SCHEMES = {
|
||||
"basic": _parseBasicAuthorization,
|
||||
"digest": _parseDigestAuthorization,
|
||||
}
|
||||
|
||||
|
||||
def parseAuthorization(credentials):
|
||||
"""parseAuthorization will convert the value of the 'Authorization' key in
|
||||
the HTTP header to a map itself. If the parsing fails 'None' is returned.
|
||||
"""
|
||||
|
||||
global AUTH_SCHEMES
|
||||
|
||||
auth_scheme, auth_params = credentials.split(" ", 1)
|
||||
auth_scheme = auth_scheme.lower()
|
||||
|
||||
parser = AUTH_SCHEMES[auth_scheme]
|
||||
params = parser(auth_params)
|
||||
|
||||
if params is None:
|
||||
return
|
||||
|
||||
assert "auth_scheme" not in params
|
||||
params["auth_scheme"] = auth_scheme
|
||||
return params
|
||||
|
||||
|
||||
##########################################################################
|
||||
# Check provided response for a valid password
|
||||
#
|
||||
def md5SessionKey(params, password):
|
||||
"""
|
||||
If the "algorithm" directive's value is "MD5-sess", then A1
|
||||
[the session key] is calculated only once - on the first request by the
|
||||
client following receipt of a WWW-Authenticate challenge from the server.
|
||||
|
||||
This creates a 'session key' for the authentication of subsequent
|
||||
requests and responses which is different for each "authentication
|
||||
session", thus limiting the amount of material hashed with any one
|
||||
key.
|
||||
|
||||
Because the server need only use the hash of the user
|
||||
credentials in order to create the A1 value, this construction could
|
||||
be used in conjunction with a third party authentication service so
|
||||
that the web server would not need the actual password value. The
|
||||
specification of such a protocol is beyond the scope of this
|
||||
specification.
|
||||
"""
|
||||
|
||||
keys = ("username", "realm", "nonce", "cnonce")
|
||||
params_copy = {}
|
||||
for key in keys:
|
||||
params_copy[key] = params[key]
|
||||
|
||||
params_copy["algorithm"] = MD5_SESS
|
||||
return _A1(params_copy, password)
|
||||
|
||||
|
||||
def _A1(params, password):
|
||||
algorithm = params.get("algorithm", MD5)
|
||||
H = DIGEST_AUTH_ENCODERS[algorithm]
|
||||
|
||||
if algorithm == MD5:
|
||||
# If the "algorithm" directive's value is "MD5" or is
|
||||
# unspecified, then A1 is:
|
||||
# A1 = unq(username-value) ":" unq(realm-value) ":" passwd
|
||||
return "%s:%s:%s" % (params["username"], params["realm"], password)
|
||||
|
||||
elif algorithm == MD5_SESS:
|
||||
|
||||
# This is A1 if qop is set
|
||||
# A1 = H( unq(username-value) ":" unq(realm-value) ":" passwd )
|
||||
# ":" unq(nonce-value) ":" unq(cnonce-value)
|
||||
h_a1 = H("%s:%s:%s" % (params["username"], params["realm"], password))
|
||||
return "%s:%s:%s" % (h_a1, params["nonce"], params["cnonce"])
|
||||
|
||||
|
||||
def _A2(params, method, kwargs):
|
||||
# If the "qop" directive's value is "auth" or is unspecified, then A2 is:
|
||||
# A2 = Method ":" digest-uri-value
|
||||
|
||||
qop = params.get("qop", "auth")
|
||||
if qop == "auth":
|
||||
return method + ":" + params["uri"]
|
||||
elif qop == "auth-int":
|
||||
# If the "qop" value is "auth-int", then A2 is:
|
||||
# A2 = Method ":" digest-uri-value ":" H(entity-body)
|
||||
entity_body = kwargs.get("entity_body", "")
|
||||
H = kwargs["H"]
|
||||
|
||||
return "%s:%s:%s" % (
|
||||
method,
|
||||
params["uri"],
|
||||
H(entity_body)
|
||||
)
|
||||
|
||||
else:
|
||||
raise NotImplementedError("The 'qop' method is unknown: %s" % qop)
|
||||
|
||||
|
||||
def _computeDigestResponse(auth_map, password, method="GET", A1=None,
|
||||
**kwargs):
|
||||
"""
|
||||
Generates a response respecting the algorithm defined in RFC 2617
|
||||
"""
|
||||
params = auth_map
|
||||
|
||||
algorithm = params.get("algorithm", MD5)
|
||||
|
||||
H = DIGEST_AUTH_ENCODERS[algorithm]
|
||||
KD = lambda secret, data: H(secret + ":" + data)
|
||||
|
||||
qop = params.get("qop", None)
|
||||
|
||||
H_A2 = H(_A2(params, method, kwargs))
|
||||
|
||||
if algorithm == MD5_SESS and A1 is not None:
|
||||
H_A1 = H(A1)
|
||||
else:
|
||||
H_A1 = H(_A1(params, password))
|
||||
|
||||
if qop in ("auth", "auth-int"):
|
||||
# If the "qop" value is "auth" or "auth-int":
|
||||
# request-digest = <"> < KD ( H(A1), unq(nonce-value)
|
||||
# ":" nc-value
|
||||
# ":" unq(cnonce-value)
|
||||
# ":" unq(qop-value)
|
||||
# ":" H(A2)
|
||||
# ) <">
|
||||
request = "%s:%s:%s:%s:%s" % (
|
||||
params["nonce"],
|
||||
params["nc"],
|
||||
params["cnonce"],
|
||||
params["qop"],
|
||||
H_A2,
|
||||
)
|
||||
elif qop is None:
|
||||
# If the "qop" directive is not present (this construction is
|
||||
# for compatibility with RFC 2069):
|
||||
# request-digest =
|
||||
# <"> < KD ( H(A1), unq(nonce-value) ":" H(A2) ) > <">
|
||||
request = "%s:%s" % (params["nonce"], H_A2)
|
||||
|
||||
return KD(H_A1, request)
|
||||
|
||||
|
||||
def _checkDigestResponse(auth_map, password, method="GET", A1=None, **kwargs):
|
||||
"""This function is used to verify the response given by the client when
|
||||
he tries to authenticate.
|
||||
Optional arguments:
|
||||
entity_body - when 'qop' is set to 'auth-int' you MUST provide the
|
||||
raw data you are going to send to the client (usually the
|
||||
HTML page.
|
||||
request_uri - the uri from the request line compared with the 'uri'
|
||||
directive of the authorization map. They must represent
|
||||
the same resource (unused at this time).
|
||||
"""
|
||||
|
||||
if auth_map['realm'] != kwargs.get('realm', None):
|
||||
return False
|
||||
|
||||
response = _computeDigestResponse(
|
||||
auth_map, password, method, A1, **kwargs)
|
||||
|
||||
return response == auth_map["response"]
|
||||
|
||||
|
||||
def _checkBasicResponse(auth_map, password, method='GET', encrypt=None,
|
||||
**kwargs):
|
||||
# Note that the Basic response doesn't provide the realm value so we cannot
|
||||
# test it
|
||||
pass_through = lambda password, username=None: password
|
||||
encrypt = encrypt or pass_through
|
||||
try:
|
||||
candidate = encrypt(auth_map["password"], auth_map["username"])
|
||||
except TypeError:
|
||||
# if encrypt only takes one parameter, it's the password
|
||||
candidate = encrypt(auth_map["password"])
|
||||
return candidate == password
|
||||
|
||||
AUTH_RESPONSES = {
|
||||
"basic": _checkBasicResponse,
|
||||
"digest": _checkDigestResponse,
|
||||
}
|
||||
|
||||
|
||||
def checkResponse(auth_map, password, method="GET", encrypt=None, **kwargs):
|
||||
"""'checkResponse' compares the auth_map with the password and optionally
|
||||
other arguments that each implementation might need.
|
||||
|
||||
If the response is of type 'Basic' then the function has the following
|
||||
signature::
|
||||
|
||||
checkBasicResponse(auth_map, password) -> bool
|
||||
|
||||
If the response is of type 'Digest' then the function has the following
|
||||
signature::
|
||||
|
||||
checkDigestResponse(auth_map, password, method='GET', A1=None) -> bool
|
||||
|
||||
The 'A1' argument is only used in MD5_SESS algorithm based responses.
|
||||
Check md5SessionKey() for more info.
|
||||
"""
|
||||
checker = AUTH_RESPONSES[auth_map["auth_scheme"]]
|
||||
return checker(auth_map, password, method=method, encrypt=encrypt,
|
||||
**kwargs)
|
||||
@@ -7,13 +7,24 @@ FuManChu will personally hang you up by your thumbs and submit you
|
||||
to a public caning.
|
||||
"""
|
||||
|
||||
import functools
|
||||
import email.utils
|
||||
import re
|
||||
from binascii import b2a_base64
|
||||
from cherrypy._cpcompat import BaseHTTPRequestHandler, HTTPDate, ntob, ntou
|
||||
from cherrypy._cpcompat import basestring, bytestr, iteritems, nativestr
|
||||
from cherrypy._cpcompat import reversed, sorted, unicodestr, unquote_qs
|
||||
from cgi import parse_header
|
||||
from email.header import decode_header
|
||||
|
||||
import six
|
||||
from six.moves import range, builtins, map
|
||||
from six.moves.BaseHTTPServer import BaseHTTPRequestHandler
|
||||
|
||||
import cherrypy
|
||||
from cherrypy._cpcompat import ntob, ntou
|
||||
from cherrypy._cpcompat import unquote_plus
|
||||
|
||||
response_codes = BaseHTTPRequestHandler.responses.copy()
|
||||
|
||||
# From https://bitbucket.org/cherrypy/cherrypy/issue/361
|
||||
# From https://github.com/cherrypy/cherrypy/issues/361
|
||||
response_codes[500] = ('Internal Server Error',
|
||||
'The server encountered an unexpected condition '
|
||||
'which prevented it from fulfilling the request.')
|
||||
@@ -22,34 +33,34 @@ response_codes[503] = ('Service Unavailable',
|
||||
'request due to a temporary overloading or '
|
||||
'maintenance of the server.')
|
||||
|
||||
import re
|
||||
import urllib
|
||||
|
||||
HTTPDate = functools.partial(email.utils.formatdate, usegmt=True)
|
||||
|
||||
|
||||
def urljoin(*atoms):
|
||||
"""Return the given path \*atoms, joined into a single URL.
|
||||
r"""Return the given path \*atoms, joined into a single URL.
|
||||
|
||||
This will correctly join a SCRIPT_NAME and PATH_INFO into the
|
||||
original URL, even if either atom is blank.
|
||||
"""
|
||||
url = "/".join([x for x in atoms if x])
|
||||
while "//" in url:
|
||||
url = url.replace("//", "/")
|
||||
url = '/'.join([x for x in atoms if x])
|
||||
while '//' in url:
|
||||
url = url.replace('//', '/')
|
||||
# Special-case the final url of "", and return "/" instead.
|
||||
return url or "/"
|
||||
return url or '/'
|
||||
|
||||
|
||||
def urljoin_bytes(*atoms):
|
||||
"""Return the given path *atoms, joined into a single URL.
|
||||
"""Return the given path `*atoms`, joined into a single URL.
|
||||
|
||||
This will correctly join a SCRIPT_NAME and PATH_INFO into the
|
||||
original URL, even if either atom is blank.
|
||||
"""
|
||||
url = ntob("/").join([x for x in atoms if x])
|
||||
while ntob("//") in url:
|
||||
url = url.replace(ntob("//"), ntob("/"))
|
||||
url = b'/'.join([x for x in atoms if x])
|
||||
while b'//' in url:
|
||||
url = url.replace(b'//', b'/')
|
||||
# Special-case the final url of "", and return "/" instead.
|
||||
return url or ntob("/")
|
||||
return url or b'/'
|
||||
|
||||
|
||||
def protocol_from_http(protocol_str):
|
||||
@@ -72,9 +83,9 @@ def get_ranges(headervalue, content_length):
|
||||
return None
|
||||
|
||||
result = []
|
||||
bytesunit, byteranges = headervalue.split("=", 1)
|
||||
for brange in byteranges.split(","):
|
||||
start, stop = [x.strip() for x in brange.split("-", 1)]
|
||||
bytesunit, byteranges = headervalue.split('=', 1)
|
||||
for brange in byteranges.split(','):
|
||||
start, stop = [x.strip() for x in brange.split('-', 1)]
|
||||
if start:
|
||||
if not stop:
|
||||
stop = content_length - 1
|
||||
@@ -108,9 +119,9 @@ def get_ranges(headervalue, content_length):
|
||||
# If the entity is shorter than the specified suffix-length,
|
||||
# the entire entity-body is used.
|
||||
if int(stop) > content_length:
|
||||
result.append((0, content_length))
|
||||
result.append((0, content_length))
|
||||
else:
|
||||
result.append((content_length - int(stop), content_length))
|
||||
result.append((content_length - int(stop), content_length))
|
||||
|
||||
return result
|
||||
|
||||
@@ -126,14 +137,14 @@ class HeaderElement(object):
|
||||
self.params = params
|
||||
|
||||
def __cmp__(self, other):
|
||||
return cmp(self.value, other.value)
|
||||
return builtins.cmp(self.value, other.value)
|
||||
|
||||
def __lt__(self, other):
|
||||
return self.value < other.value
|
||||
|
||||
def __str__(self):
|
||||
p = [";%s=%s" % (k, v) for k, v in iteritems(self.params)]
|
||||
return str("%s%s" % (self.value, "".join(p)))
|
||||
p = [';%s=%s' % (k, v) for k, v in six.iteritems(self.params)]
|
||||
return str('%s%s' % (self.value, ''.join(p)))
|
||||
|
||||
def __bytes__(self):
|
||||
return ntob(self.__str__())
|
||||
@@ -141,32 +152,17 @@ class HeaderElement(object):
|
||||
def __unicode__(self):
|
||||
return ntou(self.__str__())
|
||||
|
||||
@staticmethod
|
||||
def parse(elementstr):
|
||||
"""Transform 'token;key=val' to ('token', {'key': 'val'})."""
|
||||
# Split the element into a value and parameters. The 'value' may
|
||||
# be of the form, "token=token", but we don't split that here.
|
||||
atoms = [x.strip() for x in elementstr.split(";") if x.strip()]
|
||||
if not atoms:
|
||||
initial_value = ''
|
||||
else:
|
||||
initial_value = atoms.pop(0).strip()
|
||||
params = {}
|
||||
for atom in atoms:
|
||||
atom = [x.strip() for x in atom.split("=", 1) if x.strip()]
|
||||
key = atom.pop(0)
|
||||
if atom:
|
||||
val = atom[0]
|
||||
else:
|
||||
val = ""
|
||||
params[key] = val
|
||||
initial_value, params = parse_header(elementstr)
|
||||
return initial_value, params
|
||||
parse = staticmethod(parse)
|
||||
|
||||
@classmethod
|
||||
def from_str(cls, elementstr):
|
||||
"""Construct an instance from a string of the form 'token;key=val'."""
|
||||
ival, params = cls.parse(elementstr)
|
||||
return cls(ival, params)
|
||||
from_str = classmethod(from_str)
|
||||
|
||||
|
||||
q_separator = re.compile(r'; *q *=')
|
||||
@@ -183,6 +179,7 @@ class AcceptElement(HeaderElement):
|
||||
have been the other way around, but it's too late to fix now.
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def from_str(cls, elementstr):
|
||||
qvalue = None
|
||||
# The first "q" parameter (if any) separates the initial
|
||||
@@ -196,21 +193,35 @@ class AcceptElement(HeaderElement):
|
||||
|
||||
media_type, params = cls.parse(media_range)
|
||||
if qvalue is not None:
|
||||
params["q"] = qvalue
|
||||
params['q'] = qvalue
|
||||
return cls(media_type, params)
|
||||
from_str = classmethod(from_str)
|
||||
|
||||
@property
|
||||
def qvalue(self):
|
||||
val = self.params.get("q", "1")
|
||||
'The qvalue, or priority, of this value.'
|
||||
val = self.params.get('q', '1')
|
||||
if isinstance(val, HeaderElement):
|
||||
val = val.value
|
||||
return float(val)
|
||||
qvalue = property(qvalue, doc="The qvalue, or priority, of this value.")
|
||||
try:
|
||||
return float(val)
|
||||
except ValueError as val_err:
|
||||
"""Fail client requests with invalid quality value.
|
||||
|
||||
Ref: https://github.com/cherrypy/cherrypy/issues/1370
|
||||
"""
|
||||
six.raise_from(
|
||||
cherrypy.HTTPError(
|
||||
400,
|
||||
'Malformed HTTP header: `{}`'.
|
||||
format(str(self)),
|
||||
),
|
||||
val_err,
|
||||
)
|
||||
|
||||
def __cmp__(self, other):
|
||||
diff = cmp(self.qvalue, other.qvalue)
|
||||
diff = builtins.cmp(self.qvalue, other.qvalue)
|
||||
if diff == 0:
|
||||
diff = cmp(str(self), str(other))
|
||||
diff = builtins.cmp(str(self), str(other))
|
||||
return diff
|
||||
|
||||
def __lt__(self, other):
|
||||
@@ -219,7 +230,10 @@ class AcceptElement(HeaderElement):
|
||||
else:
|
||||
return self.qvalue < other.qvalue
|
||||
|
||||
|
||||
RE_HEADER_SPLIT = re.compile(',(?=(?:[^"]*"[^"]*")*[^"]*$)')
|
||||
|
||||
|
||||
def header_elements(fieldname, fieldvalue):
|
||||
"""Return a sorted HeaderElement list from a comma-separated header string.
|
||||
"""
|
||||
@@ -228,7 +242,7 @@ def header_elements(fieldname, fieldvalue):
|
||||
|
||||
result = []
|
||||
for element in RE_HEADER_SPLIT.split(fieldvalue):
|
||||
if fieldname.startswith("Accept") or fieldname == 'TE':
|
||||
if fieldname.startswith('Accept') or fieldname == 'TE':
|
||||
hv = AcceptElement.from_str(element)
|
||||
else:
|
||||
hv = HeaderElement.from_str(element)
|
||||
@@ -238,14 +252,14 @@ def header_elements(fieldname, fieldvalue):
|
||||
|
||||
|
||||
def decode_TEXT(value):
|
||||
r"""Decode :rfc:`2047` TEXT (e.g. "=?utf-8?q?f=C3=BCr?=" -> "f\xfcr")."""
|
||||
try:
|
||||
# Python 3
|
||||
from email.header import decode_header
|
||||
except ImportError:
|
||||
from email.Header import decode_header
|
||||
r"""
|
||||
Decode :rfc:`2047` TEXT
|
||||
|
||||
>>> decode_TEXT("=?utf-8?q?f=C3=BCr?=") == b'f\xfcr'.decode('latin-1')
|
||||
True
|
||||
"""
|
||||
atoms = decode_header(value)
|
||||
decodedvalue = ""
|
||||
decodedvalue = ''
|
||||
for atom, charset in atoms:
|
||||
if charset is not None:
|
||||
atom = atom.decode(charset)
|
||||
@@ -253,41 +267,51 @@ def decode_TEXT(value):
|
||||
return decodedvalue
|
||||
|
||||
|
||||
def decode_TEXT_maybe(value):
|
||||
"""
|
||||
Decode the text but only if '=?' appears in it.
|
||||
"""
|
||||
return decode_TEXT(value) if '=?' in value else value
|
||||
|
||||
|
||||
def valid_status(status):
|
||||
"""Return legal HTTP status Code, Reason-phrase and Message.
|
||||
|
||||
The status arg must be an int, or a str that begins with an int.
|
||||
The status arg must be an int, a str that begins with an int
|
||||
or the constant from ``http.client`` stdlib module.
|
||||
|
||||
If status is an int, or a str and no reason-phrase is supplied,
|
||||
a default reason-phrase will be provided.
|
||||
If status has no reason-phrase is supplied, a default reason-
|
||||
phrase will be provided.
|
||||
|
||||
>>> from six.moves import http_client
|
||||
>>> from six.moves.BaseHTTPServer import BaseHTTPRequestHandler
|
||||
>>> valid_status(http_client.ACCEPTED) == (
|
||||
... int(http_client.ACCEPTED),
|
||||
... ) + BaseHTTPRequestHandler.responses[http_client.ACCEPTED]
|
||||
True
|
||||
"""
|
||||
|
||||
if not status:
|
||||
status = 200
|
||||
|
||||
status = str(status)
|
||||
parts = status.split(" ", 1)
|
||||
if len(parts) == 1:
|
||||
# No reason supplied.
|
||||
code, = parts
|
||||
reason = None
|
||||
else:
|
||||
code, reason = parts
|
||||
reason = reason.strip()
|
||||
code, reason = status, None
|
||||
if isinstance(status, six.string_types):
|
||||
code, _, reason = status.partition(' ')
|
||||
reason = reason.strip() or None
|
||||
|
||||
try:
|
||||
code = int(code)
|
||||
except ValueError:
|
||||
raise ValueError("Illegal response status from server "
|
||||
"(%s is non-numeric)." % repr(code))
|
||||
except (TypeError, ValueError):
|
||||
raise ValueError('Illegal response status from server '
|
||||
'(%s is non-numeric).' % repr(code))
|
||||
|
||||
if code < 100 or code > 599:
|
||||
raise ValueError("Illegal response status from server "
|
||||
"(%s is out of range)." % repr(code))
|
||||
raise ValueError('Illegal response status from server '
|
||||
'(%s is out of range).' % repr(code))
|
||||
|
||||
if code not in response_codes:
|
||||
# code is unknown but not illegal
|
||||
default_reason, message = "", ""
|
||||
default_reason, message = '', ''
|
||||
else:
|
||||
default_reason, message = response_codes[code]
|
||||
|
||||
@@ -328,15 +352,15 @@ def _parse_qs(qs, keep_blank_values=0, strict_parsing=0, encoding='utf-8'):
|
||||
nv = name_value.split('=', 1)
|
||||
if len(nv) != 2:
|
||||
if strict_parsing:
|
||||
raise ValueError("bad query field: %r" % (name_value,))
|
||||
raise ValueError('bad query field: %r' % (name_value,))
|
||||
# Handle case of a control-name with no equal sign
|
||||
if keep_blank_values:
|
||||
nv.append('')
|
||||
else:
|
||||
continue
|
||||
if len(nv[1]) or keep_blank_values:
|
||||
name = unquote_qs(nv[0], encoding)
|
||||
value = unquote_qs(nv[1], encoding)
|
||||
name = unquote_plus(nv[0], encoding, errors='strict')
|
||||
value = unquote_plus(nv[1], encoding, errors='strict')
|
||||
if name in d:
|
||||
if not isinstance(d[name], list):
|
||||
d[name] = [d[name]]
|
||||
@@ -346,7 +370,7 @@ def _parse_qs(qs, keep_blank_values=0, strict_parsing=0, encoding='utf-8'):
|
||||
return d
|
||||
|
||||
|
||||
image_map_pattern = re.compile(r"[0-9]+,[0-9]+")
|
||||
image_map_pattern = re.compile(r'[0-9]+,[0-9]+')
|
||||
|
||||
|
||||
def parse_query_string(query_string, keep_blank_values=True, encoding='utf-8'):
|
||||
@@ -359,60 +383,84 @@ def parse_query_string(query_string, keep_blank_values=True, encoding='utf-8'):
|
||||
if image_map_pattern.match(query_string):
|
||||
# Server-side image map. Map the coords to 'x' and 'y'
|
||||
# (like CGI::Request does).
|
||||
pm = query_string.split(",")
|
||||
pm = query_string.split(',')
|
||||
pm = {'x': int(pm[0]), 'y': int(pm[1])}
|
||||
else:
|
||||
pm = _parse_qs(query_string, keep_blank_values, encoding=encoding)
|
||||
return pm
|
||||
|
||||
|
||||
class CaseInsensitiveDict(dict):
|
||||
####
|
||||
# Inlined from jaraco.collections 1.5.2
|
||||
# Ref #1673
|
||||
class KeyTransformingDict(dict):
|
||||
"""
|
||||
A dict subclass that transforms the keys before they're used.
|
||||
Subclasses may override the default transform_key to customize behavior.
|
||||
"""
|
||||
@staticmethod
|
||||
def transform_key(key):
|
||||
return key
|
||||
|
||||
def __init__(self, *args, **kargs):
|
||||
super(KeyTransformingDict, self).__init__()
|
||||
# build a dictionary using the default constructs
|
||||
d = dict(*args, **kargs)
|
||||
# build this dictionary using transformed keys.
|
||||
for item in d.items():
|
||||
self.__setitem__(*item)
|
||||
|
||||
def __setitem__(self, key, val):
|
||||
key = self.transform_key(key)
|
||||
super(KeyTransformingDict, self).__setitem__(key, val)
|
||||
|
||||
def __getitem__(self, key):
|
||||
key = self.transform_key(key)
|
||||
return super(KeyTransformingDict, self).__getitem__(key)
|
||||
|
||||
def __contains__(self, key):
|
||||
key = self.transform_key(key)
|
||||
return super(KeyTransformingDict, self).__contains__(key)
|
||||
|
||||
def __delitem__(self, key):
|
||||
key = self.transform_key(key)
|
||||
return super(KeyTransformingDict, self).__delitem__(key)
|
||||
|
||||
def get(self, key, *args, **kwargs):
|
||||
key = self.transform_key(key)
|
||||
return super(KeyTransformingDict, self).get(key, *args, **kwargs)
|
||||
|
||||
def setdefault(self, key, *args, **kwargs):
|
||||
key = self.transform_key(key)
|
||||
return super(KeyTransformingDict, self).setdefault(
|
||||
key, *args, **kwargs)
|
||||
|
||||
def pop(self, key, *args, **kwargs):
|
||||
key = self.transform_key(key)
|
||||
return super(KeyTransformingDict, self).pop(key, *args, **kwargs)
|
||||
|
||||
def matching_key_for(self, key):
|
||||
"""
|
||||
Given a key, return the actual key stored in self that matches.
|
||||
Raise KeyError if the key isn't found.
|
||||
"""
|
||||
try:
|
||||
return next(e_key for e_key in self.keys() if e_key == key)
|
||||
except StopIteration:
|
||||
raise KeyError(key)
|
||||
####
|
||||
|
||||
|
||||
class CaseInsensitiveDict(KeyTransformingDict):
|
||||
|
||||
"""A case-insensitive dict subclass.
|
||||
|
||||
Each key is changed on entry to str(key).title().
|
||||
"""
|
||||
|
||||
def __getitem__(self, key):
|
||||
return dict.__getitem__(self, str(key).title())
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
dict.__setitem__(self, str(key).title(), value)
|
||||
|
||||
def __delitem__(self, key):
|
||||
dict.__delitem__(self, str(key).title())
|
||||
|
||||
def __contains__(self, key):
|
||||
return dict.__contains__(self, str(key).title())
|
||||
|
||||
def get(self, key, default=None):
|
||||
return dict.get(self, str(key).title(), default)
|
||||
|
||||
if hasattr({}, 'has_key'):
|
||||
def has_key(self, key):
|
||||
return str(key).title() in self
|
||||
|
||||
def update(self, E):
|
||||
for k in E.keys():
|
||||
self[str(k).title()] = E[k]
|
||||
|
||||
def fromkeys(cls, seq, value=None):
|
||||
newdict = cls()
|
||||
for k in seq:
|
||||
newdict[str(k).title()] = value
|
||||
return newdict
|
||||
fromkeys = classmethod(fromkeys)
|
||||
|
||||
def setdefault(self, key, x=None):
|
||||
key = str(key).title()
|
||||
try:
|
||||
return self[key]
|
||||
except KeyError:
|
||||
self[key] = x
|
||||
return x
|
||||
|
||||
def pop(self, key, default):
|
||||
return dict.pop(self, str(key).title(), default)
|
||||
@staticmethod
|
||||
def transform_key(key):
|
||||
return str(key).title()
|
||||
|
||||
|
||||
# TEXT = <any OCTET except CTLs, but including LWS>
|
||||
@@ -420,10 +468,10 @@ class CaseInsensitiveDict(dict):
|
||||
# A CRLF is allowed in the definition of TEXT only as part of a header
|
||||
# field continuation. It is expected that the folding LWS will be
|
||||
# replaced with a single SP before interpretation of the TEXT value."
|
||||
if nativestr == bytestr:
|
||||
header_translate_table = ''.join([chr(i) for i in xrange(256)])
|
||||
if str == bytes:
|
||||
header_translate_table = ''.join([chr(i) for i in range(256)])
|
||||
header_translate_deletechars = ''.join(
|
||||
[chr(i) for i in xrange(32)]) + chr(127)
|
||||
[chr(i) for i in range(32)]) + chr(127)
|
||||
else:
|
||||
header_translate_table = None
|
||||
header_translate_deletechars = bytes(range(32)) + bytes([127])
|
||||
@@ -440,7 +488,7 @@ class HeaderMap(CaseInsensitiveDict):
|
||||
"""
|
||||
|
||||
protocol = (1, 1)
|
||||
encodings = ["ISO-8859-1"]
|
||||
encodings = ['ISO-8859-1']
|
||||
|
||||
# Someday, when http-bis is done, this will probably get dropped
|
||||
# since few servers, clients, or intermediaries do it. But until then,
|
||||
@@ -463,31 +511,30 @@ class HeaderMap(CaseInsensitiveDict):
|
||||
"""Transform self into a list of (name, value) tuples."""
|
||||
return list(self.encode_header_items(self.items()))
|
||||
|
||||
@classmethod
|
||||
def encode_header_items(cls, header_items):
|
||||
"""
|
||||
Prepare the sequence of name, value tuples into a form suitable for
|
||||
transmitting on the wire for HTTP.
|
||||
"""
|
||||
for k, v in header_items:
|
||||
if isinstance(k, unicodestr):
|
||||
k = cls.encode(k)
|
||||
if not isinstance(v, six.string_types) and \
|
||||
not isinstance(v, six.binary_type):
|
||||
v = six.text_type(v)
|
||||
|
||||
if not isinstance(v, basestring):
|
||||
v = str(v)
|
||||
yield tuple(map(cls.encode_header_item, (k, v)))
|
||||
|
||||
if isinstance(v, unicodestr):
|
||||
v = cls.encode(v)
|
||||
@classmethod
|
||||
def encode_header_item(cls, item):
|
||||
if isinstance(item, six.text_type):
|
||||
item = cls.encode(item)
|
||||
|
||||
# See header_translate_* constants above.
|
||||
# Replace only if you really know what you're doing.
|
||||
k = k.translate(header_translate_table,
|
||||
header_translate_deletechars)
|
||||
v = v.translate(header_translate_table,
|
||||
header_translate_deletechars)
|
||||
|
||||
yield (k, v)
|
||||
encode_header_items = classmethod(encode_header_items)
|
||||
# See header_translate_* constants above.
|
||||
# Replace only if you really know what you're doing.
|
||||
return item.translate(
|
||||
header_translate_table, header_translate_deletechars)
|
||||
|
||||
@classmethod
|
||||
def encode(cls, v):
|
||||
"""Return the given header name or value, encoded for HTTP output."""
|
||||
for enc in cls.encodings:
|
||||
@@ -503,12 +550,11 @@ class HeaderMap(CaseInsensitiveDict):
|
||||
# because we never want to fold lines--folding has
|
||||
# been deprecated by the HTTP working group.
|
||||
v = b2a_base64(v.encode('utf-8'))
|
||||
return (ntob('=?utf-8?b?') + v.strip(ntob('\n')) + ntob('?='))
|
||||
return (b'=?utf-8?b?' + v.strip(b'\n') + b'?=')
|
||||
|
||||
raise ValueError("Could not encode header part %r using "
|
||||
"any of the encodings %r." %
|
||||
raise ValueError('Could not encode header part %r using '
|
||||
'any of the encodings %r.' %
|
||||
(v, cls.encodings))
|
||||
encode = classmethod(encode)
|
||||
|
||||
|
||||
class Host(object):
|
||||
@@ -521,9 +567,9 @@ class Host(object):
|
||||
|
||||
"""
|
||||
|
||||
ip = "0.0.0.0"
|
||||
ip = '0.0.0.0'
|
||||
port = 80
|
||||
name = "unknown.tld"
|
||||
name = 'unknown.tld'
|
||||
|
||||
def __init__(self, ip, port, name=None):
|
||||
self.ip = ip
|
||||
@@ -533,4 +579,4 @@ class Host(object):
|
||||
self.name = name
|
||||
|
||||
def __repr__(self):
|
||||
return "httputil.Host(%r, %r, %r)" % (self.ip, self.port, self.name)
|
||||
return 'httputil.Host(%r, %r, %r)' % (self.ip, self.port, self.name)
|
||||
|
||||
@@ -1,17 +1,15 @@
|
||||
import cherrypy
|
||||
from cherrypy._cpcompat import basestring, ntou, json_encode, json_decode
|
||||
from cherrypy._cpcompat import text_or_bytes, ntou, json_encode, json_decode
|
||||
|
||||
|
||||
def json_processor(entity):
|
||||
"""Read application/json data into request.json."""
|
||||
if not entity.headers.get(ntou("Content-Length"), ntou("")):
|
||||
if not entity.headers.get(ntou('Content-Length'), ntou('')):
|
||||
raise cherrypy.HTTPError(411)
|
||||
|
||||
body = entity.fp.read()
|
||||
try:
|
||||
with cherrypy.HTTPError.handle(ValueError, 400, 'Invalid JSON document'):
|
||||
cherrypy.serving.request.json = json_decode(body.decode('utf-8'))
|
||||
except ValueError:
|
||||
raise cherrypy.HTTPError(400, 'Invalid JSON document')
|
||||
|
||||
|
||||
def json_in(content_type=[ntou('application/json'), ntou('text/javascript')],
|
||||
@@ -36,12 +34,9 @@ def json_in(content_type=[ntou('application/json'), ntou('text/javascript')],
|
||||
request header, or it will raise "411 Length Required". If for any
|
||||
other reason the request entity cannot be deserialized from JSON,
|
||||
it will raise "400 Bad Request: Invalid JSON document".
|
||||
|
||||
You must be using Python 2.6 or greater, or have the 'simplejson'
|
||||
package importable; otherwise, ValueError is raised during processing.
|
||||
"""
|
||||
request = cherrypy.serving.request
|
||||
if isinstance(content_type, basestring):
|
||||
if isinstance(content_type, text_or_bytes):
|
||||
content_type = [content_type]
|
||||
|
||||
if force:
|
||||
@@ -74,9 +69,6 @@ def json_out(content_type='application/json', debug=False,
|
||||
Provide your own handler to use a custom encoder. For example
|
||||
cherrypy.config['tools.json_out.handler'] = <function>, or
|
||||
@json_out(handler=function).
|
||||
|
||||
You must be using Python 2.6 or greater, or have the 'simplejson'
|
||||
package importable; otherwise, ValueError is raised during processing.
|
||||
"""
|
||||
request = cherrypy.serving.request
|
||||
# request.handler may be set to None by e.g. the caching tool
|
||||
|
||||
@@ -1,147 +0,0 @@
|
||||
"""
|
||||
Platform-independent file locking. Inspired by and modeled after zc.lockfile.
|
||||
"""
|
||||
|
||||
import os
|
||||
|
||||
try:
|
||||
import msvcrt
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
try:
|
||||
import fcntl
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
||||
class LockError(Exception):
|
||||
|
||||
"Could not obtain a lock"
|
||||
|
||||
msg = "Unable to lock %r"
|
||||
|
||||
def __init__(self, path):
|
||||
super(LockError, self).__init__(self.msg % path)
|
||||
|
||||
|
||||
class UnlockError(LockError):
|
||||
|
||||
"Could not release a lock"
|
||||
|
||||
msg = "Unable to unlock %r"
|
||||
|
||||
|
||||
# first, a default, naive locking implementation
|
||||
class LockFile(object):
|
||||
|
||||
"""
|
||||
A default, naive locking implementation. Always fails if the file
|
||||
already exists.
|
||||
"""
|
||||
|
||||
def __init__(self, path):
|
||||
self.path = path
|
||||
try:
|
||||
fd = os.open(path, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
|
||||
except OSError:
|
||||
raise LockError(self.path)
|
||||
os.close(fd)
|
||||
|
||||
def release(self):
|
||||
os.remove(self.path)
|
||||
|
||||
def remove(self):
|
||||
pass
|
||||
|
||||
|
||||
class SystemLockFile(object):
|
||||
|
||||
"""
|
||||
An abstract base class for platform-specific locking.
|
||||
"""
|
||||
|
||||
def __init__(self, path):
|
||||
self.path = path
|
||||
|
||||
try:
|
||||
# Open lockfile for writing without truncation:
|
||||
self.fp = open(path, 'r+')
|
||||
except IOError:
|
||||
# If the file doesn't exist, IOError is raised; Use a+ instead.
|
||||
# Note that there may be a race here. Multiple processes
|
||||
# could fail on the r+ open and open the file a+, but only
|
||||
# one will get the the lock and write a pid.
|
||||
self.fp = open(path, 'a+')
|
||||
|
||||
try:
|
||||
self._lock_file()
|
||||
except:
|
||||
self.fp.seek(1)
|
||||
self.fp.close()
|
||||
del self.fp
|
||||
raise
|
||||
|
||||
self.fp.write(" %s\n" % os.getpid())
|
||||
self.fp.truncate()
|
||||
self.fp.flush()
|
||||
|
||||
def release(self):
|
||||
if not hasattr(self, 'fp'):
|
||||
return
|
||||
self._unlock_file()
|
||||
self.fp.close()
|
||||
del self.fp
|
||||
|
||||
def remove(self):
|
||||
"""
|
||||
Attempt to remove the file
|
||||
"""
|
||||
try:
|
||||
os.remove(self.path)
|
||||
except:
|
||||
pass
|
||||
|
||||
#@abc.abstract_method
|
||||
# def _lock_file(self):
|
||||
# """Attempt to obtain the lock on self.fp. Raise LockError if not
|
||||
# acquired."""
|
||||
|
||||
def _unlock_file(self):
|
||||
"""Attempt to obtain the lock on self.fp. Raise UnlockError if not
|
||||
released."""
|
||||
|
||||
|
||||
class WindowsLockFile(SystemLockFile):
|
||||
|
||||
def _lock_file(self):
|
||||
# Lock just the first byte
|
||||
try:
|
||||
msvcrt.locking(self.fp.fileno(), msvcrt.LK_NBLCK, 1)
|
||||
except IOError:
|
||||
raise LockError(self.fp.name)
|
||||
|
||||
def _unlock_file(self):
|
||||
try:
|
||||
self.fp.seek(0)
|
||||
msvcrt.locking(self.fp.fileno(), msvcrt.LK_UNLCK, 1)
|
||||
except IOError:
|
||||
raise UnlockError(self.fp.name)
|
||||
|
||||
if 'msvcrt' in globals():
|
||||
LockFile = WindowsLockFile
|
||||
|
||||
|
||||
class UnixLockFile(SystemLockFile):
|
||||
|
||||
def _lock_file(self):
|
||||
flags = fcntl.LOCK_EX | fcntl.LOCK_NB
|
||||
try:
|
||||
fcntl.flock(self.fp.fileno(), flags)
|
||||
except IOError:
|
||||
raise LockError(self.fp.name)
|
||||
|
||||
# no need to implement _unlock_file, it will be unlocked on close()
|
||||
|
||||
if 'fcntl' in globals():
|
||||
LockFile = UnixLockFile
|
||||
@@ -11,7 +11,7 @@ class Timer(object):
|
||||
A simple timer that will indicate when an expiration time has passed.
|
||||
"""
|
||||
def __init__(self, expiration):
|
||||
"Create a timer that expires at `expiration` (UTC datetime)"
|
||||
'Create a timer that expires at `expiration` (UTC datetime)'
|
||||
self.expiration = expiration
|
||||
|
||||
@classmethod
|
||||
@@ -26,7 +26,7 @@ class Timer(object):
|
||||
|
||||
|
||||
class LockTimeout(Exception):
|
||||
"An exception when a lock could not be acquired before a timeout period"
|
||||
'An exception when a lock could not be acquired before a timeout period'
|
||||
|
||||
|
||||
class LockChecker(object):
|
||||
@@ -43,5 +43,5 @@ class LockChecker(object):
|
||||
def expired(self):
|
||||
if self.timer.expired():
|
||||
raise LockTimeout(
|
||||
"Timeout acquiring lock for %(session_id)s" % vars(self))
|
||||
'Timeout acquiring lock for %(session_id)s' % vars(self))
|
||||
return False
|
||||
|
||||
@@ -10,9 +10,9 @@ You can profile any of your pages as follows::
|
||||
class Root:
|
||||
p = profiler.Profiler("/path/to/profile/dir")
|
||||
|
||||
@cherrypy.expose
|
||||
def index(self):
|
||||
self.p.run(self._index)
|
||||
index.exposed = True
|
||||
|
||||
def _index(self):
|
||||
return "Hello, world!"
|
||||
@@ -33,29 +33,36 @@ module from the command line, it will call ``serve()`` for you.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
def new_func_strip_path(func_name):
|
||||
"""Make profiler output more readable by adding `__init__` modules' parents
|
||||
"""
|
||||
filename, line, name = func_name
|
||||
if filename.endswith("__init__.py"):
|
||||
return os.path.basename(filename[:-12]) + filename[-12:], line, name
|
||||
return os.path.basename(filename), line, name
|
||||
|
||||
try:
|
||||
import profile
|
||||
import pstats
|
||||
pstats.func_strip_path = new_func_strip_path
|
||||
except ImportError:
|
||||
profile = None
|
||||
pstats = None
|
||||
|
||||
import io
|
||||
import os
|
||||
import os.path
|
||||
import sys
|
||||
import warnings
|
||||
|
||||
from cherrypy._cpcompat import StringIO
|
||||
import cherrypy
|
||||
|
||||
|
||||
try:
|
||||
import profile
|
||||
import pstats
|
||||
|
||||
def new_func_strip_path(func_name):
|
||||
"""Make profiler output more readable by adding `__init__` modules' parents
|
||||
"""
|
||||
filename, line, name = func_name
|
||||
if filename.endswith('__init__.py'):
|
||||
return (
|
||||
os.path.basename(filename[:-12]) + filename[-12:],
|
||||
line,
|
||||
name,
|
||||
)
|
||||
return os.path.basename(filename), line, name
|
||||
|
||||
pstats.func_strip_path = new_func_strip_path
|
||||
except ImportError:
|
||||
profile = None
|
||||
pstats = None
|
||||
|
||||
|
||||
_count = 0
|
||||
|
||||
@@ -64,7 +71,7 @@ class Profiler(object):
|
||||
|
||||
def __init__(self, path=None):
|
||||
if not path:
|
||||
path = os.path.join(os.path.dirname(__file__), "profile")
|
||||
path = os.path.join(os.path.dirname(__file__), 'profile')
|
||||
self.path = path
|
||||
if not os.path.exists(path):
|
||||
os.makedirs(path)
|
||||
@@ -73,7 +80,7 @@ class Profiler(object):
|
||||
"""Dump profile data into self.path."""
|
||||
global _count
|
||||
c = _count = _count + 1
|
||||
path = os.path.join(self.path, "cp_%04d.prof" % c)
|
||||
path = os.path.join(self.path, 'cp_%04d.prof' % c)
|
||||
prof = profile.Profile()
|
||||
result = prof.runcall(func, *args, **params)
|
||||
prof.dump_stats(path)
|
||||
@@ -83,12 +90,12 @@ class Profiler(object):
|
||||
""":rtype: list of available profiles.
|
||||
"""
|
||||
return [f for f in os.listdir(self.path)
|
||||
if f.startswith("cp_") and f.endswith(".prof")]
|
||||
if f.startswith('cp_') and f.endswith('.prof')]
|
||||
|
||||
def stats(self, filename, sortby='cumulative'):
|
||||
""":rtype stats(index): output of print_stats() for the given profile.
|
||||
"""
|
||||
sio = StringIO()
|
||||
sio = io.StringIO()
|
||||
if sys.version_info >= (2, 5):
|
||||
s = pstats.Stats(os.path.join(self.path, filename), stream=sio)
|
||||
s.strip_dirs()
|
||||
@@ -110,6 +117,7 @@ class Profiler(object):
|
||||
sio.close()
|
||||
return response
|
||||
|
||||
@cherrypy.expose
|
||||
def index(self):
|
||||
return """<html>
|
||||
<head><title>CherryPy profile data</title></head>
|
||||
@@ -119,23 +127,21 @@ class Profiler(object):
|
||||
</frameset>
|
||||
</html>
|
||||
"""
|
||||
index.exposed = True
|
||||
|
||||
@cherrypy.expose
|
||||
def menu(self):
|
||||
yield "<h2>Profiling runs</h2>"
|
||||
yield "<p>Click on one of the runs below to see profiling data.</p>"
|
||||
yield '<h2>Profiling runs</h2>'
|
||||
yield '<p>Click on one of the runs below to see profiling data.</p>'
|
||||
runs = self.statfiles()
|
||||
runs.sort()
|
||||
for i in runs:
|
||||
yield "<a href='report?filename=%s' target='main'>%s</a><br />" % (
|
||||
i, i)
|
||||
menu.exposed = True
|
||||
|
||||
@cherrypy.expose
|
||||
def report(self, filename):
|
||||
import cherrypy
|
||||
cherrypy.response.headers['Content-Type'] = 'text/plain'
|
||||
return self.stats(filename)
|
||||
report.exposed = True
|
||||
|
||||
|
||||
class ProfileAggregator(Profiler):
|
||||
@@ -147,7 +153,7 @@ class ProfileAggregator(Profiler):
|
||||
self.profiler = profile.Profile()
|
||||
|
||||
def run(self, func, *args, **params):
|
||||
path = os.path.join(self.path, "cp_%04d.prof" % self.count)
|
||||
path = os.path.join(self.path, 'cp_%04d.prof' % self.count)
|
||||
result = self.profiler.runcall(func, *args, **params)
|
||||
self.profiler.dump_stats(path)
|
||||
return result
|
||||
@@ -172,11 +178,11 @@ class make_app:
|
||||
|
||||
"""
|
||||
if profile is None or pstats is None:
|
||||
msg = ("Your installation of Python does not have a profile "
|
||||
msg = ('Your installation of Python does not have a profile '
|
||||
"module. If you're on Debian, try "
|
||||
"`sudo apt-get install python-profiler`. "
|
||||
"See http://www.cherrypy.org/wiki/ProfilingOnDebian "
|
||||
"for details.")
|
||||
'`sudo apt-get install python-profiler`. '
|
||||
'See http://www.cherrypy.org/wiki/ProfilingOnDebian '
|
||||
'for details.')
|
||||
warnings.warn(msg)
|
||||
|
||||
self.nextapp = nextapp
|
||||
@@ -197,20 +203,19 @@ class make_app:
|
||||
|
||||
def serve(path=None, port=8080):
|
||||
if profile is None or pstats is None:
|
||||
msg = ("Your installation of Python does not have a profile module. "
|
||||
msg = ('Your installation of Python does not have a profile module. '
|
||||
"If you're on Debian, try "
|
||||
"`sudo apt-get install python-profiler`. "
|
||||
"See http://www.cherrypy.org/wiki/ProfilingOnDebian "
|
||||
"for details.")
|
||||
'`sudo apt-get install python-profiler`. '
|
||||
'See http://www.cherrypy.org/wiki/ProfilingOnDebian '
|
||||
'for details.')
|
||||
warnings.warn(msg)
|
||||
|
||||
import cherrypy
|
||||
cherrypy.config.update({'server.socket_port': int(port),
|
||||
'server.thread_pool': 10,
|
||||
'environment': "production",
|
||||
'environment': 'production',
|
||||
})
|
||||
cherrypy.quickstart(Profiler(path))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
if __name__ == '__main__':
|
||||
serve(*tuple(sys.argv[1:]))
|
||||
|
||||
@@ -18,42 +18,14 @@ by adding a named handler to Config.namespaces. The name can be any string,
|
||||
and the handler must be either a callable or a context manager.
|
||||
"""
|
||||
|
||||
try:
|
||||
# Python 3.0+
|
||||
from configparser import ConfigParser
|
||||
except ImportError:
|
||||
from ConfigParser import ConfigParser
|
||||
from cherrypy._cpcompat import text_or_bytes
|
||||
from six.moves import configparser
|
||||
from six.moves import builtins
|
||||
|
||||
try:
|
||||
set
|
||||
except NameError:
|
||||
from sets import Set as set
|
||||
|
||||
try:
|
||||
basestring
|
||||
except NameError:
|
||||
basestring = str
|
||||
|
||||
try:
|
||||
# Python 3
|
||||
import builtins
|
||||
except ImportError:
|
||||
# Python 2
|
||||
import __builtin__ as builtins
|
||||
|
||||
import operator as _operator
|
||||
import operator
|
||||
import sys
|
||||
|
||||
|
||||
def as_dict(config):
|
||||
"""Return a dict from 'config' whether it is a dict, file, or filename."""
|
||||
if isinstance(config, basestring):
|
||||
config = Parser().dict_from_file(config)
|
||||
elif hasattr(config, 'read'):
|
||||
config = Parser().dict_from_file(config)
|
||||
return config
|
||||
|
||||
|
||||
class NamespaceSet(dict):
|
||||
|
||||
"""A dict of config namespace names and handlers.
|
||||
@@ -83,19 +55,19 @@ class NamespaceSet(dict):
|
||||
# Separate the given config into namespaces
|
||||
ns_confs = {}
|
||||
for k in config:
|
||||
if "." in k:
|
||||
ns, name = k.split(".", 1)
|
||||
if '.' in k:
|
||||
ns, name = k.split('.', 1)
|
||||
bucket = ns_confs.setdefault(ns, {})
|
||||
bucket[name] = config[k]
|
||||
|
||||
# I chose __enter__ and __exit__ so someday this could be
|
||||
# rewritten using Python 2.5's 'with' statement:
|
||||
# for ns, handler in self.iteritems():
|
||||
# for ns, handler in six.iteritems(self):
|
||||
# with handler as callable:
|
||||
# for k, v in ns_confs.get(ns, {}).iteritems():
|
||||
# for k, v in six.iteritems(ns_confs.get(ns, {})):
|
||||
# callable(k, v)
|
||||
for ns, handler in self.items():
|
||||
exit = getattr(handler, "__exit__", None)
|
||||
exit = getattr(handler, '__exit__', None)
|
||||
if exit:
|
||||
callable = handler.__enter__()
|
||||
no_exc = True
|
||||
@@ -103,7 +75,7 @@ class NamespaceSet(dict):
|
||||
try:
|
||||
for k, v in ns_confs.get(ns, {}).items():
|
||||
callable(k, v)
|
||||
except:
|
||||
except Exception:
|
||||
# The exceptional case is handled here
|
||||
no_exc = False
|
||||
if exit is None:
|
||||
@@ -120,7 +92,7 @@ class NamespaceSet(dict):
|
||||
handler(k, v)
|
||||
|
||||
def __repr__(self):
|
||||
return "%s.%s(%s)" % (self.__module__, self.__class__.__name__,
|
||||
return '%s.%s(%s)' % (self.__module__, self.__class__.__name__,
|
||||
dict.__repr__(self))
|
||||
|
||||
def __copy__(self):
|
||||
@@ -154,16 +126,8 @@ class Config(dict):
|
||||
dict.update(self, self.defaults)
|
||||
|
||||
def update(self, config):
|
||||
"""Update self from a dict, file or filename."""
|
||||
if isinstance(config, basestring):
|
||||
# Filename
|
||||
config = Parser().dict_from_file(config)
|
||||
elif hasattr(config, 'read'):
|
||||
# Open file object
|
||||
config = Parser().dict_from_file(config)
|
||||
else:
|
||||
config = config.copy()
|
||||
self._apply(config)
|
||||
"""Update self from a dict, file, or filename."""
|
||||
self._apply(Parser.load(config))
|
||||
|
||||
def _apply(self, config):
|
||||
"""Update self from a dict."""
|
||||
@@ -182,7 +146,7 @@ class Config(dict):
|
||||
self.namespaces({k: v})
|
||||
|
||||
|
||||
class Parser(ConfigParser):
|
||||
class Parser(configparser.ConfigParser):
|
||||
|
||||
"""Sub-class of ConfigParser that keeps the case of options and that
|
||||
raises an exception if the file cannot be read.
|
||||
@@ -192,7 +156,7 @@ class Parser(ConfigParser):
|
||||
return optionstr
|
||||
|
||||
def read(self, filenames):
|
||||
if isinstance(filenames, basestring):
|
||||
if isinstance(filenames, text_or_bytes):
|
||||
filenames = [filenames]
|
||||
for filename in filenames:
|
||||
# try:
|
||||
@@ -218,8 +182,8 @@ class Parser(ConfigParser):
|
||||
value = unrepr(value)
|
||||
except Exception:
|
||||
x = sys.exc_info()[1]
|
||||
msg = ("Config error in section: %r, option: %r, "
|
||||
"value: %r. Config values must be valid Python." %
|
||||
msg = ('Config error in section: %r, option: %r, '
|
||||
'value: %r. Config values must be valid Python.' %
|
||||
(section, option, value))
|
||||
raise ValueError(msg, x.__class__.__name__, x.args)
|
||||
result[section][option] = value
|
||||
@@ -232,6 +196,17 @@ class Parser(ConfigParser):
|
||||
self.read(file)
|
||||
return self.as_dict()
|
||||
|
||||
@classmethod
|
||||
def load(self, input):
|
||||
"""Resolve 'input' to dict from a dict, file, or filename."""
|
||||
is_file = (
|
||||
# Filename
|
||||
isinstance(input, text_or_bytes)
|
||||
# Open file object
|
||||
or hasattr(input, 'read')
|
||||
)
|
||||
return Parser().dict_from_file(input) if is_file else input.copy()
|
||||
|
||||
|
||||
# public domain "unrepr" implementation, found on the web and then improved.
|
||||
|
||||
@@ -241,7 +216,7 @@ class _Builder2:
|
||||
def build(self, o):
|
||||
m = getattr(self, 'build_' + o.__class__.__name__, None)
|
||||
if m is None:
|
||||
raise TypeError("unrepr does not recognize %s" %
|
||||
raise TypeError('unrepr does not recognize %s' %
|
||||
repr(o.__class__.__name__))
|
||||
return m(o)
|
||||
|
||||
@@ -254,7 +229,7 @@ class _Builder2:
|
||||
# e.g. IronPython 1.0.
|
||||
return eval(s)
|
||||
|
||||
p = compiler.parse("__tempvalue__ = " + s)
|
||||
p = compiler.parse('__tempvalue__ = ' + s)
|
||||
return p.getChildren()[1].getChildren()[0].getChildren()[1]
|
||||
|
||||
def build_Subscript(self, o):
|
||||
@@ -279,7 +254,7 @@ class _Builder2:
|
||||
if class_name == 'Keyword':
|
||||
kwargs.update(self.build(child))
|
||||
# Everything else becomes args
|
||||
else :
|
||||
else:
|
||||
args.append(self.build(child))
|
||||
|
||||
return callee(*args, **kwargs)
|
||||
@@ -327,7 +302,7 @@ class _Builder2:
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
raise TypeError("unrepr could not resolve the name %s" % repr(name))
|
||||
raise TypeError('unrepr could not resolve the name %s' % repr(name))
|
||||
|
||||
def build_Add(self, o):
|
||||
left, right = map(self.build, o.getChildren())
|
||||
@@ -356,7 +331,7 @@ class _Builder3:
|
||||
def build(self, o):
|
||||
m = getattr(self, 'build_' + o.__class__.__name__, None)
|
||||
if m is None:
|
||||
raise TypeError("unrepr does not recognize %s" %
|
||||
raise TypeError('unrepr does not recognize %s' %
|
||||
repr(o.__class__.__name__))
|
||||
return m(o)
|
||||
|
||||
@@ -369,7 +344,7 @@ class _Builder3:
|
||||
# e.g. IronPython 1.0.
|
||||
return eval(s)
|
||||
|
||||
p = ast.parse("__tempvalue__ = " + s)
|
||||
p = ast.parse('__tempvalue__ = ' + s)
|
||||
return p.body[0].value
|
||||
|
||||
def build_Subscript(self, o):
|
||||
@@ -394,16 +369,16 @@ class _Builder3:
|
||||
args.append(self.build(a))
|
||||
kwargs = {}
|
||||
for kw in o.keywords:
|
||||
if kw.arg is None: # double asterix `**`
|
||||
if kw.arg is None: # double asterix `**`
|
||||
rst = self.build(kw.value)
|
||||
if not isinstance(rst, dict):
|
||||
raise TypeError("Invalid argument for call."
|
||||
"Must be a mapping object.")
|
||||
raise TypeError('Invalid argument for call.'
|
||||
'Must be a mapping object.')
|
||||
# give preference to the keys set directly from arg=value
|
||||
for k, v in rst.items():
|
||||
if k not in kwargs:
|
||||
kwargs[k] = v
|
||||
else: # defined on the call as: arg=value
|
||||
else: # defined on the call as: arg=value
|
||||
kwargs[kw.arg] = self.build(kw.value)
|
||||
return callee(*args, **kwargs)
|
||||
|
||||
@@ -427,7 +402,7 @@ class _Builder3:
|
||||
kwargs = {}
|
||||
else:
|
||||
kwargs = self.build(o.kwargs)
|
||||
if o.keywords is not None: # direct a=b keywords
|
||||
if o.keywords is not None: # direct a=b keywords
|
||||
for kw in o.keywords:
|
||||
# preference because is a direct keyword against **kwargs
|
||||
kwargs[kw.arg] = self.build(kw.value)
|
||||
@@ -471,11 +446,13 @@ class _Builder3:
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
raise TypeError("unrepr could not resolve the name %s" % repr(name))
|
||||
raise TypeError('unrepr could not resolve the name %s' % repr(name))
|
||||
|
||||
def build_NameConstant(self, o):
|
||||
return o.value
|
||||
|
||||
build_Constant = build_NameConstant # Python 3.8 change
|
||||
|
||||
def build_UnaryOp(self, o):
|
||||
op, operand = map(self.build, [o.op, o.operand])
|
||||
return op(operand)
|
||||
@@ -485,13 +462,13 @@ class _Builder3:
|
||||
return op(left, right)
|
||||
|
||||
def build_Add(self, o):
|
||||
return _operator.add
|
||||
return operator.add
|
||||
|
||||
def build_Mult(self, o):
|
||||
return _operator.mul
|
||||
return operator.mul
|
||||
|
||||
def build_USub(self, o):
|
||||
return _operator.neg
|
||||
return operator.neg
|
||||
|
||||
def build_Attribute(self, o):
|
||||
parent = self.build(o.value)
|
||||
@@ -523,7 +500,7 @@ def attributes(full_attribute_name):
|
||||
"""Load a module and retrieve an attribute of that module."""
|
||||
|
||||
# Parse out the path, module, and attribute
|
||||
last_dot = full_attribute_name.rfind(".")
|
||||
last_dot = full_attribute_name.rfind('.')
|
||||
attr_name = full_attribute_name[last_dot + 1:]
|
||||
mod_path = full_attribute_name[:last_dot]
|
||||
|
||||
|
||||
@@ -4,13 +4,13 @@ You need to edit your config file to use sessions. Here's an example::
|
||||
|
||||
[/]
|
||||
tools.sessions.on = True
|
||||
tools.sessions.storage_type = "file"
|
||||
tools.sessions.storage_class = cherrypy.lib.sessions.FileSession
|
||||
tools.sessions.storage_path = "/home/site/sessions"
|
||||
tools.sessions.timeout = 60
|
||||
|
||||
This sets the session to be stored in files in the directory
|
||||
/home/site/sessions, and the session timeout to 60 minutes. If you omit
|
||||
``storage_type`` the sessions will be saved in RAM.
|
||||
``storage_class``, the sessions will be saved in RAM.
|
||||
``tools.sessions.on`` is the only required line for working sessions,
|
||||
the rest are optional.
|
||||
|
||||
@@ -57,6 +57,17 @@ However, CherryPy "recognizes" a session id by looking up the saved session
|
||||
data for that id. Therefore, if you never save any session data,
|
||||
**you will get a new session id for every request**.
|
||||
|
||||
A side effect of CherryPy overwriting unrecognised session ids is that if you
|
||||
have multiple, separate CherryPy applications running on a single domain (e.g.
|
||||
on different ports), each app will overwrite the other's session id because by
|
||||
default they use the same cookie name (``"session_id"``) but do not recognise
|
||||
each others sessions. It is therefore a good idea to use a different name for
|
||||
each, for example::
|
||||
|
||||
[/]
|
||||
...
|
||||
tools.sessions.name = "my_app_session_id"
|
||||
|
||||
================
|
||||
Sharing Sessions
|
||||
================
|
||||
@@ -94,15 +105,24 @@ import datetime
|
||||
import os
|
||||
import time
|
||||
import threading
|
||||
import types
|
||||
import binascii
|
||||
|
||||
import six
|
||||
from six.moves import cPickle as pickle
|
||||
import contextlib2
|
||||
|
||||
import zc.lockfile
|
||||
|
||||
import cherrypy
|
||||
from cherrypy._cpcompat import copyitems, pickle, random20, unicodestr
|
||||
from cherrypy.lib import httputil
|
||||
from cherrypy.lib import lockfile
|
||||
from cherrypy.lib import locking
|
||||
from cherrypy.lib import is_iterator
|
||||
|
||||
|
||||
if six.PY2:
|
||||
FileNotFoundError = OSError
|
||||
|
||||
|
||||
missing = object()
|
||||
|
||||
|
||||
@@ -115,17 +135,19 @@ class Session(object):
|
||||
id_observers = None
|
||||
"A list of callbacks to which to pass new id's."
|
||||
|
||||
def _get_id(self):
|
||||
@property
|
||||
def id(self):
|
||||
"""Return the current session id."""
|
||||
return self._id
|
||||
|
||||
def _set_id(self, value):
|
||||
@id.setter
|
||||
def id(self, value):
|
||||
self._id = value
|
||||
for o in self.id_observers:
|
||||
o(value)
|
||||
id = property(_get_id, _set_id, doc="The current session ID.")
|
||||
|
||||
timeout = 60
|
||||
"Number of minutes after which to delete session data."
|
||||
'Number of minutes after which to delete session data.'
|
||||
|
||||
locked = False
|
||||
"""
|
||||
@@ -138,16 +160,16 @@ class Session(object):
|
||||
automatically on the first attempt to access session data."""
|
||||
|
||||
clean_thread = None
|
||||
"Class-level Monitor which calls self.clean_up."
|
||||
'Class-level Monitor which calls self.clean_up.'
|
||||
|
||||
clean_freq = 5
|
||||
"The poll rate for expired session cleanup in minutes."
|
||||
'The poll rate for expired session cleanup in minutes.'
|
||||
|
||||
originalid = None
|
||||
"The session id passed by the client. May be missing or unsafe."
|
||||
'The session id passed by the client. May be missing or unsafe.'
|
||||
|
||||
missing = False
|
||||
"True if the session requested by the client did not exist."
|
||||
'True if the session requested by the client did not exist.'
|
||||
|
||||
regenerated = False
|
||||
"""
|
||||
@@ -155,7 +177,7 @@ class Session(object):
|
||||
internal calls to regenerate the session id."""
|
||||
|
||||
debug = False
|
||||
"If True, log debug information."
|
||||
'If True, log debug information.'
|
||||
|
||||
# --------------------- Session management methods --------------------- #
|
||||
|
||||
@@ -182,7 +204,7 @@ class Session(object):
|
||||
cherrypy.log('Expired or malicious session %r; '
|
||||
'making a new one' % id, 'TOOLS.SESSIONS')
|
||||
# Expired or malicious session. Make a new one.
|
||||
# See https://bitbucket.org/cherrypy/cherrypy/issue/709.
|
||||
# See https://github.com/cherrypy/cherrypy/issues/709.
|
||||
self.id = None
|
||||
self.missing = True
|
||||
self._regenerate()
|
||||
@@ -236,7 +258,7 @@ class Session(object):
|
||||
|
||||
def generate_id(self):
|
||||
"""Return a new session id."""
|
||||
return random20()
|
||||
return binascii.hexlify(os.urandom(20)).decode('ascii')
|
||||
|
||||
def save(self):
|
||||
"""Save session data."""
|
||||
@@ -335,13 +357,6 @@ class Session(object):
|
||||
self.load()
|
||||
return key in self._data
|
||||
|
||||
if hasattr({}, 'has_key'):
|
||||
def has_key(self, key):
|
||||
"""D.has_key(k) -> True if D has a key k, else False."""
|
||||
if not self.loaded:
|
||||
self.load()
|
||||
return key in self._data
|
||||
|
||||
def get(self, key, default=None):
|
||||
"""D.get(k[,d]) -> D[k] if k in D, else d. d defaults to None."""
|
||||
if not self.loaded:
|
||||
@@ -395,7 +410,7 @@ class RamSession(Session):
|
||||
"""Clean up expired sessions."""
|
||||
|
||||
now = self.now()
|
||||
for _id, (data, expiration_time) in copyitems(self.cache):
|
||||
for _id, (data, expiration_time) in list(six.iteritems(self.cache)):
|
||||
if expiration_time <= now:
|
||||
try:
|
||||
del self.cache[_id]
|
||||
@@ -410,7 +425,11 @@ class RamSession(Session):
|
||||
|
||||
# added to remove obsolete lock objects
|
||||
for _id in list(self.locks):
|
||||
if _id not in self.cache and self.locks[_id].acquire(blocking=False):
|
||||
locked = (
|
||||
_id not in self.cache
|
||||
and self.locks[_id].acquire(blocking=False)
|
||||
)
|
||||
if locked:
|
||||
lock = self.locks.pop(_id)
|
||||
lock.release()
|
||||
|
||||
@@ -471,9 +490,11 @@ class FileSession(Session):
|
||||
if isinstance(self.lock_timeout, (int, float)):
|
||||
self.lock_timeout = datetime.timedelta(seconds=self.lock_timeout)
|
||||
if not isinstance(self.lock_timeout, (datetime.timedelta, type(None))):
|
||||
raise ValueError("Lock timeout must be numeric seconds or "
|
||||
"a timedelta instance.")
|
||||
raise ValueError(
|
||||
'Lock timeout must be numeric seconds or a timedelta instance.'
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def setup(cls, **kwargs):
|
||||
"""Set up the storage system for file-based sessions.
|
||||
|
||||
@@ -485,12 +506,11 @@ class FileSession(Session):
|
||||
|
||||
for k, v in kwargs.items():
|
||||
setattr(cls, k, v)
|
||||
setup = classmethod(setup)
|
||||
|
||||
def _get_file_path(self):
|
||||
f = os.path.join(self.storage_path, self.SESSION_PREFIX + self.id)
|
||||
if not os.path.abspath(f).startswith(self.storage_path):
|
||||
raise cherrypy.HTTPError(400, "Invalid session id in cookie.")
|
||||
raise cherrypy.HTTPError(400, 'Invalid session id in cookie.')
|
||||
return f
|
||||
|
||||
def _exists(self):
|
||||
@@ -498,12 +518,12 @@ class FileSession(Session):
|
||||
return os.path.exists(path)
|
||||
|
||||
def _load(self, path=None):
|
||||
assert self.locked, ("The session load without being locked. "
|
||||
assert self.locked, ('The session load without being locked. '
|
||||
"Check your tools' priority levels.")
|
||||
if path is None:
|
||||
path = self._get_file_path()
|
||||
try:
|
||||
f = open(path, "rb")
|
||||
f = open(path, 'rb')
|
||||
try:
|
||||
return pickle.load(f)
|
||||
finally:
|
||||
@@ -511,21 +531,21 @@ class FileSession(Session):
|
||||
except (IOError, EOFError):
|
||||
e = sys.exc_info()[1]
|
||||
if self.debug:
|
||||
cherrypy.log("Error loading the session pickle: %s" %
|
||||
cherrypy.log('Error loading the session pickle: %s' %
|
||||
e, 'TOOLS.SESSIONS')
|
||||
return None
|
||||
|
||||
def _save(self, expiration_time):
|
||||
assert self.locked, ("The session was saved without being locked. "
|
||||
assert self.locked, ('The session was saved without being locked. '
|
||||
"Check your tools' priority levels.")
|
||||
f = open(self._get_file_path(), "wb")
|
||||
f = open(self._get_file_path(), 'wb')
|
||||
try:
|
||||
pickle.dump((self._data, expiration_time), f, self.pickle_protocol)
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
def _delete(self):
|
||||
assert self.locked, ("The session deletion without being locked. "
|
||||
assert self.locked, ('The session deletion without being locked. '
|
||||
"Check your tools' priority levels.")
|
||||
try:
|
||||
os.unlink(self._get_file_path())
|
||||
@@ -540,8 +560,8 @@ class FileSession(Session):
|
||||
checker = locking.LockChecker(self.id, self.lock_timeout)
|
||||
while not checker.expired():
|
||||
try:
|
||||
self.lock = lockfile.LockFile(path)
|
||||
except lockfile.LockError:
|
||||
self.lock = zc.lockfile.LockFile(path)
|
||||
except zc.lockfile.LockError:
|
||||
time.sleep(0.1)
|
||||
else:
|
||||
break
|
||||
@@ -551,8 +571,9 @@ class FileSession(Session):
|
||||
|
||||
def release_lock(self, path=None):
|
||||
"""Release the lock on the currently-loaded session data."""
|
||||
self.lock.release()
|
||||
self.lock.remove()
|
||||
self.lock.close()
|
||||
with contextlib2.suppress(FileNotFoundError):
|
||||
os.remove(self.lock._path)
|
||||
self.locked = False
|
||||
|
||||
def clean_up(self):
|
||||
@@ -560,8 +581,11 @@ class FileSession(Session):
|
||||
now = self.now()
|
||||
# Iterate over all session files in self.storage_path
|
||||
for fname in os.listdir(self.storage_path):
|
||||
if (fname.startswith(self.SESSION_PREFIX)
|
||||
and not fname.endswith(self.LOCK_SUFFIX)):
|
||||
have_session = (
|
||||
fname.startswith(self.SESSION_PREFIX)
|
||||
and not fname.endswith(self.LOCK_SUFFIX)
|
||||
)
|
||||
if have_session:
|
||||
# We have a session file: lock and load it and check
|
||||
# if it's expired. If it fails, nevermind.
|
||||
path = os.path.join(self.storage_path, fname)
|
||||
@@ -587,95 +611,8 @@ class FileSession(Session):
|
||||
def __len__(self):
|
||||
"""Return the number of active sessions."""
|
||||
return len([fname for fname in os.listdir(self.storage_path)
|
||||
if (fname.startswith(self.SESSION_PREFIX)
|
||||
and not fname.endswith(self.LOCK_SUFFIX))])
|
||||
|
||||
|
||||
class PostgresqlSession(Session):
|
||||
|
||||
""" Implementation of the PostgreSQL backend for sessions. It assumes
|
||||
a table like this::
|
||||
|
||||
create table session (
|
||||
id varchar(40),
|
||||
data text,
|
||||
expiration_time timestamp
|
||||
)
|
||||
|
||||
You must provide your own get_db function.
|
||||
"""
|
||||
|
||||
pickle_protocol = pickle.HIGHEST_PROTOCOL
|
||||
|
||||
def __init__(self, id=None, **kwargs):
|
||||
Session.__init__(self, id, **kwargs)
|
||||
self.cursor = self.db.cursor()
|
||||
|
||||
def setup(cls, **kwargs):
|
||||
"""Set up the storage system for Postgres-based sessions.
|
||||
|
||||
This should only be called once per process; this will be done
|
||||
automatically when using sessions.init (as the built-in Tool does).
|
||||
"""
|
||||
for k, v in kwargs.items():
|
||||
setattr(cls, k, v)
|
||||
|
||||
self.db = self.get_db()
|
||||
setup = classmethod(setup)
|
||||
|
||||
def __del__(self):
|
||||
if self.cursor:
|
||||
self.cursor.close()
|
||||
self.db.commit()
|
||||
|
||||
def _exists(self):
|
||||
# Select session data from table
|
||||
self.cursor.execute('select data, expiration_time from session '
|
||||
'where id=%s', (self.id,))
|
||||
rows = self.cursor.fetchall()
|
||||
return bool(rows)
|
||||
|
||||
def _load(self):
|
||||
# Select session data from table
|
||||
self.cursor.execute('select data, expiration_time from session '
|
||||
'where id=%s', (self.id,))
|
||||
rows = self.cursor.fetchall()
|
||||
if not rows:
|
||||
return None
|
||||
|
||||
pickled_data, expiration_time = rows[0]
|
||||
data = pickle.loads(pickled_data)
|
||||
return data, expiration_time
|
||||
|
||||
def _save(self, expiration_time):
|
||||
pickled_data = pickle.dumps(self._data, self.pickle_protocol)
|
||||
self.cursor.execute('update session set data = %s, '
|
||||
'expiration_time = %s where id = %s',
|
||||
(pickled_data, expiration_time, self.id))
|
||||
|
||||
def _delete(self):
|
||||
self.cursor.execute('delete from session where id=%s', (self.id,))
|
||||
|
||||
def acquire_lock(self):
|
||||
"""Acquire an exclusive lock on the currently-loaded session data."""
|
||||
# We use the "for update" clause to lock the row
|
||||
self.locked = True
|
||||
self.cursor.execute('select id from session where id=%s for update',
|
||||
(self.id,))
|
||||
if self.debug:
|
||||
cherrypy.log('Lock acquired.', 'TOOLS.SESSIONS')
|
||||
|
||||
def release_lock(self):
|
||||
"""Release the lock on the currently-loaded session data."""
|
||||
# We just close the cursor and that will remove the lock
|
||||
# introduced by the "for update" clause
|
||||
self.cursor.close()
|
||||
self.locked = False
|
||||
|
||||
def clean_up(self):
|
||||
"""Clean up expired sessions."""
|
||||
self.cursor.execute('delete from session where expiration_time < %s',
|
||||
(self.now(),))
|
||||
if (fname.startswith(self.SESSION_PREFIX) and
|
||||
not fname.endswith(self.LOCK_SUFFIX))])
|
||||
|
||||
|
||||
class MemcachedSession(Session):
|
||||
@@ -684,11 +621,12 @@ class MemcachedSession(Session):
|
||||
# Wrap all .get and .set operations in a single lock.
|
||||
mc_lock = threading.RLock()
|
||||
|
||||
# This is a seperate set of locks per session id.
|
||||
# This is a separate set of locks per session id.
|
||||
locks = {}
|
||||
|
||||
servers = ['127.0.0.1:11211']
|
||||
|
||||
@classmethod
|
||||
def setup(cls, **kwargs):
|
||||
"""Set up the storage system for memcached-based sessions.
|
||||
|
||||
@@ -700,21 +638,6 @@ class MemcachedSession(Session):
|
||||
|
||||
import memcache
|
||||
cls.cache = memcache.Client(cls.servers)
|
||||
setup = classmethod(setup)
|
||||
|
||||
def _get_id(self):
|
||||
return self._id
|
||||
|
||||
def _set_id(self, value):
|
||||
# This encode() call is where we differ from the superclass.
|
||||
# Memcache keys MUST be byte strings, not unicode.
|
||||
if isinstance(value, unicodestr):
|
||||
value = value.encode('utf-8')
|
||||
|
||||
self._id = value
|
||||
for o in self.id_observers:
|
||||
o(value)
|
||||
id = property(_get_id, _set_id, doc="The current session ID.")
|
||||
|
||||
def _exists(self):
|
||||
self.mc_lock.acquire()
|
||||
@@ -737,7 +660,7 @@ class MemcachedSession(Session):
|
||||
try:
|
||||
if not self.cache.set(self.id, (self._data, expiration_time), td):
|
||||
raise AssertionError(
|
||||
"Session data for id %r not set." % self.id)
|
||||
'Session data for id %r not set.' % self.id)
|
||||
finally:
|
||||
self.mc_lock.release()
|
||||
|
||||
@@ -766,13 +689,13 @@ class MemcachedSession(Session):
|
||||
def save():
|
||||
"""Save any changed session data."""
|
||||
|
||||
if not hasattr(cherrypy.serving, "session"):
|
||||
if not hasattr(cherrypy.serving, 'session'):
|
||||
return
|
||||
request = cherrypy.serving.request
|
||||
response = cherrypy.serving.response
|
||||
|
||||
# Guard against running twice
|
||||
if hasattr(request, "_sessionsaved"):
|
||||
if hasattr(request, '_sessionsaved'):
|
||||
return
|
||||
request._sessionsaved = True
|
||||
|
||||
@@ -786,28 +709,39 @@ def save():
|
||||
if is_iterator(response.body):
|
||||
response.collapse_body()
|
||||
cherrypy.session.save()
|
||||
|
||||
|
||||
save.failsafe = True
|
||||
|
||||
|
||||
def close():
|
||||
"""Close the session object for this request."""
|
||||
sess = getattr(cherrypy.serving, "session", None)
|
||||
if getattr(sess, "locked", False):
|
||||
sess = getattr(cherrypy.serving, 'session', None)
|
||||
if getattr(sess, 'locked', False):
|
||||
# If the session is still locked we release the lock
|
||||
sess.release_lock()
|
||||
if sess.debug:
|
||||
cherrypy.log('Lock released on close.', 'TOOLS.SESSIONS')
|
||||
|
||||
|
||||
close.failsafe = True
|
||||
close.priority = 90
|
||||
|
||||
|
||||
def init(storage_type='ram', path=None, path_header=None, name='session_id',
|
||||
def init(storage_type=None, path=None, path_header=None, name='session_id',
|
||||
timeout=60, domain=None, secure=False, clean_freq=5,
|
||||
persistent=True, httponly=False, debug=False, **kwargs):
|
||||
persistent=True, httponly=False, debug=False,
|
||||
# Py27 compat
|
||||
# *, storage_class=RamSession,
|
||||
**kwargs):
|
||||
"""Initialize session object (using cookies).
|
||||
|
||||
storage_class
|
||||
The Session subclass to use. Defaults to RamSession.
|
||||
|
||||
storage_type
|
||||
One of 'ram', 'file', 'postgresql', 'memcached'. This will be
|
||||
(deprecated)
|
||||
One of 'ram', 'file', memcached'. This will be
|
||||
used to look up the corresponding class in cherrypy.lib.sessions
|
||||
globals. For example, 'file' will use the FileSession class.
|
||||
|
||||
@@ -851,10 +785,13 @@ def init(storage_type='ram', path=None, path_header=None, name='session_id',
|
||||
you're using for more information.
|
||||
"""
|
||||
|
||||
# Py27 compat
|
||||
storage_class = kwargs.pop('storage_class', RamSession)
|
||||
|
||||
request = cherrypy.serving.request
|
||||
|
||||
# Guard against running twice
|
||||
if hasattr(request, "_session_init_flag"):
|
||||
if hasattr(request, '_session_init_flag'):
|
||||
return
|
||||
request._session_init_flag = True
|
||||
|
||||
@@ -866,11 +803,18 @@ def init(storage_type='ram', path=None, path_header=None, name='session_id',
|
||||
cherrypy.log('ID obtained from request.cookie: %r' % id,
|
||||
'TOOLS.SESSIONS')
|
||||
|
||||
# Find the storage class and call setup (first time only).
|
||||
storage_class = storage_type.title() + 'Session'
|
||||
storage_class = globals()[storage_class]
|
||||
if not hasattr(cherrypy, "session"):
|
||||
if hasattr(storage_class, "setup"):
|
||||
first_time = not hasattr(cherrypy, 'session')
|
||||
|
||||
if storage_type:
|
||||
if first_time:
|
||||
msg = 'storage_type is deprecated. Supply storage_class instead'
|
||||
cherrypy.log(msg)
|
||||
storage_class = storage_type.title() + 'Session'
|
||||
storage_class = globals()[storage_class]
|
||||
|
||||
# call setup first time only
|
||||
if first_time:
|
||||
if hasattr(storage_class, 'setup'):
|
||||
storage_class.setup(**kwargs)
|
||||
|
||||
# Create and attach a new Session instance to cherrypy.serving.
|
||||
@@ -887,7 +831,7 @@ def init(storage_type='ram', path=None, path_header=None, name='session_id',
|
||||
sess.id_observers.append(update_cookie)
|
||||
|
||||
# Create cherrypy.session which will proxy to cherrypy.serving.session
|
||||
if not hasattr(cherrypy, "session"):
|
||||
if not hasattr(cherrypy, 'session'):
|
||||
cherrypy.session = cherrypy._ThreadLocalProxy('session')
|
||||
|
||||
if persistent:
|
||||
@@ -941,24 +885,30 @@ def set_response_cookie(path=None, path_header=None, name='session_id',
|
||||
'/'
|
||||
)
|
||||
|
||||
# We'd like to use the "max-age" param as indicated in
|
||||
# http://www.faqs.org/rfcs/rfc2109.html but IE doesn't
|
||||
# save it to disk and the session is lost if people close
|
||||
# the browser. So we have to use the old "expires" ... sigh ...
|
||||
## cookie[name]['max-age'] = timeout * 60
|
||||
if timeout:
|
||||
e = time.time() + (timeout * 60)
|
||||
cookie[name]['expires'] = httputil.HTTPDate(e)
|
||||
cookie[name]['max-age'] = timeout * 60
|
||||
_add_MSIE_max_age_workaround(cookie[name], timeout)
|
||||
if domain is not None:
|
||||
cookie[name]['domain'] = domain
|
||||
if secure:
|
||||
cookie[name]['secure'] = 1
|
||||
if httponly:
|
||||
if not cookie[name].isReservedKey('httponly'):
|
||||
raise ValueError("The httponly cookie token is not supported.")
|
||||
raise ValueError('The httponly cookie token is not supported.')
|
||||
cookie[name]['httponly'] = 1
|
||||
|
||||
|
||||
def _add_MSIE_max_age_workaround(cookie, timeout):
|
||||
"""
|
||||
We'd like to use the "max-age" param as indicated in
|
||||
http://www.faqs.org/rfcs/rfc2109.html but IE doesn't
|
||||
save it to disk and the session is lost if people close
|
||||
the browser. So we have to use the old "expires" ... sigh ...
|
||||
"""
|
||||
expires = time.time() + timeout * 60
|
||||
cookie['expires'] = httputil.HTTPDate(expires)
|
||||
|
||||
|
||||
def expire():
|
||||
"""Expire the current session cookie."""
|
||||
name = cherrypy.serving.request.config.get(
|
||||
@@ -966,3 +916,4 @@ def expire():
|
||||
one_year = 60 * 60 * 24 * 365
|
||||
e = time.time() - one_year
|
||||
cherrypy.serving.response.cookie[name]['expires'] = httputil.HTTPDate(e)
|
||||
cherrypy.serving.response.cookie[name].pop('max-age', None)
|
||||
|
||||
@@ -1,23 +1,32 @@
|
||||
"""Module with helpers for serving static files."""
|
||||
|
||||
import os
|
||||
import platform
|
||||
import re
|
||||
import stat
|
||||
import mimetypes
|
||||
|
||||
try:
|
||||
from io import UnsupportedOperation
|
||||
except ImportError:
|
||||
UnsupportedOperation = object()
|
||||
from email.generator import _make_boundary as make_boundary
|
||||
from io import UnsupportedOperation
|
||||
|
||||
from six.moves import urllib
|
||||
|
||||
import cherrypy
|
||||
from cherrypy._cpcompat import ntob, unquote
|
||||
from cherrypy._cpcompat import ntob
|
||||
from cherrypy.lib import cptools, httputil, file_generator_limited
|
||||
|
||||
|
||||
mimetypes.init()
|
||||
mimetypes.types_map['.dwg'] = 'image/x-dwg'
|
||||
mimetypes.types_map['.ico'] = 'image/x-icon'
|
||||
mimetypes.types_map['.bz2'] = 'application/x-bzip2'
|
||||
mimetypes.types_map['.gz'] = 'application/x-gzip'
|
||||
def _setup_mimetypes():
|
||||
"""Pre-initialize global mimetype map."""
|
||||
if not mimetypes.inited:
|
||||
mimetypes.init()
|
||||
mimetypes.types_map['.dwg'] = 'image/x-dwg'
|
||||
mimetypes.types_map['.ico'] = 'image/x-icon'
|
||||
mimetypes.types_map['.bz2'] = 'application/x-bzip2'
|
||||
mimetypes.types_map['.gz'] = 'application/x-gzip'
|
||||
|
||||
|
||||
_setup_mimetypes()
|
||||
|
||||
|
||||
def serve_file(path, content_type=None, disposition=None, name=None,
|
||||
@@ -33,7 +42,6 @@ def serve_file(path, content_type=None, disposition=None, name=None,
|
||||
to the basename of path. If disposition is None, no Content-Disposition
|
||||
header will be written.
|
||||
"""
|
||||
|
||||
response = cherrypy.serving.response
|
||||
|
||||
# If path is relative, users should fix it by making path absolute.
|
||||
@@ -71,7 +79,7 @@ def serve_file(path, content_type=None, disposition=None, name=None,
|
||||
|
||||
if content_type is None:
|
||||
# Set content-type based on filename extension
|
||||
ext = ""
|
||||
ext = ''
|
||||
i = path.rfind('.')
|
||||
if i != -1:
|
||||
ext = path[i:].lower()
|
||||
@@ -86,7 +94,7 @@ def serve_file(path, content_type=None, disposition=None, name=None,
|
||||
if name is None:
|
||||
name = os.path.basename(path)
|
||||
cd = '%s; filename="%s"' % (disposition, name)
|
||||
response.headers["Content-Disposition"] = cd
|
||||
response.headers['Content-Disposition'] = cd
|
||||
if debug:
|
||||
cherrypy.log('Content-Disposition: %r' % cd, 'TOOLS.STATIC')
|
||||
|
||||
@@ -115,7 +123,6 @@ def serve_fileobj(fileobj, content_type=None, disposition=None, name=None,
|
||||
serve_fileobj(), expecting that the data would be served starting from that
|
||||
position.
|
||||
"""
|
||||
|
||||
response = cherrypy.serving.response
|
||||
|
||||
try:
|
||||
@@ -144,7 +151,7 @@ def serve_fileobj(fileobj, content_type=None, disposition=None, name=None,
|
||||
cd = disposition
|
||||
else:
|
||||
cd = '%s; filename="%s"' % (disposition, name)
|
||||
response.headers["Content-Disposition"] = cd
|
||||
response.headers['Content-Disposition'] = cd
|
||||
if debug:
|
||||
cherrypy.log('Content-Disposition: %r' % cd, 'TOOLS.STATIC')
|
||||
|
||||
@@ -158,12 +165,12 @@ def _serve_fileobj(fileobj, content_type, content_length, debug=False):
|
||||
# HTTP/1.0 didn't have Range/Accept-Ranges headers, or the 206 code
|
||||
request = cherrypy.serving.request
|
||||
if request.protocol >= (1, 1):
|
||||
response.headers["Accept-Ranges"] = "bytes"
|
||||
response.headers['Accept-Ranges'] = 'bytes'
|
||||
r = httputil.get_ranges(request.headers.get('Range'), content_length)
|
||||
if r == []:
|
||||
response.headers['Content-Range'] = "bytes */%s" % content_length
|
||||
message = ("Invalid Range (first-byte-pos greater than "
|
||||
"Content-Length)")
|
||||
response.headers['Content-Range'] = 'bytes */%s' % content_length
|
||||
message = ('Invalid Range (first-byte-pos greater than '
|
||||
'Content-Length)')
|
||||
if debug:
|
||||
cherrypy.log(message, 'TOOLS.STATIC')
|
||||
raise cherrypy.HTTPError(416, message)
|
||||
@@ -179,31 +186,25 @@ def _serve_fileobj(fileobj, content_type, content_length, debug=False):
|
||||
cherrypy.log(
|
||||
'Single part; start: %r, stop: %r' % (start, stop),
|
||||
'TOOLS.STATIC')
|
||||
response.status = "206 Partial Content"
|
||||
response.status = '206 Partial Content'
|
||||
response.headers['Content-Range'] = (
|
||||
"bytes %s-%s/%s" % (start, stop - 1, content_length))
|
||||
'bytes %s-%s/%s' % (start, stop - 1, content_length))
|
||||
response.headers['Content-Length'] = r_len
|
||||
fileobj.seek(start)
|
||||
response.body = file_generator_limited(fileobj, r_len)
|
||||
else:
|
||||
# Return a multipart/byteranges response.
|
||||
response.status = "206 Partial Content"
|
||||
try:
|
||||
# Python 3
|
||||
from email.generator import _make_boundary as make_boundary
|
||||
except ImportError:
|
||||
# Python 2
|
||||
from mimetools import choose_boundary as make_boundary
|
||||
response.status = '206 Partial Content'
|
||||
boundary = make_boundary()
|
||||
ct = "multipart/byteranges; boundary=%s" % boundary
|
||||
ct = 'multipart/byteranges; boundary=%s' % boundary
|
||||
response.headers['Content-Type'] = ct
|
||||
if "Content-Length" in response.headers:
|
||||
if 'Content-Length' in response.headers:
|
||||
# Delete Content-Length header so finalize() recalcs it.
|
||||
del response.headers["Content-Length"]
|
||||
del response.headers['Content-Length']
|
||||
|
||||
def file_ranges():
|
||||
# Apache compatibility:
|
||||
yield ntob("\r\n")
|
||||
yield b'\r\n'
|
||||
|
||||
for start, stop in r:
|
||||
if debug:
|
||||
@@ -211,23 +212,23 @@ def _serve_fileobj(fileobj, content_type, content_length, debug=False):
|
||||
'Multipart; start: %r, stop: %r' % (
|
||||
start, stop),
|
||||
'TOOLS.STATIC')
|
||||
yield ntob("--" + boundary, 'ascii')
|
||||
yield ntob("\r\nContent-type: %s" % content_type,
|
||||
yield ntob('--' + boundary, 'ascii')
|
||||
yield ntob('\r\nContent-type: %s' % content_type,
|
||||
'ascii')
|
||||
yield ntob(
|
||||
"\r\nContent-range: bytes %s-%s/%s\r\n\r\n" % (
|
||||
'\r\nContent-range: bytes %s-%s/%s\r\n\r\n' % (
|
||||
start, stop - 1, content_length),
|
||||
'ascii')
|
||||
fileobj.seek(start)
|
||||
gen = file_generator_limited(fileobj, stop - start)
|
||||
for chunk in gen:
|
||||
yield chunk
|
||||
yield ntob("\r\n")
|
||||
yield b'\r\n'
|
||||
# Final boundary
|
||||
yield ntob("--" + boundary + "--", 'ascii')
|
||||
yield ntob('--' + boundary + '--', 'ascii')
|
||||
|
||||
# Apache compatibility:
|
||||
yield ntob("\r\n")
|
||||
yield b'\r\n'
|
||||
response.body = file_ranges()
|
||||
return response.body
|
||||
else:
|
||||
@@ -244,7 +245,7 @@ def _serve_fileobj(fileobj, content_type, content_length, debug=False):
|
||||
def serve_download(path, name=None):
|
||||
"""Serve 'path' as an application/x-download attachment."""
|
||||
# This is such a common idiom I felt it deserved its own wrapper.
|
||||
return serve_file(path, "application/x-download", "attachment", name)
|
||||
return serve_file(path, 'application/x-download', 'attachment', name)
|
||||
|
||||
|
||||
def _attempt(filename, content_types, debug=False):
|
||||
@@ -268,7 +269,7 @@ def _attempt(filename, content_types, debug=False):
|
||||
return False
|
||||
|
||||
|
||||
def staticdir(section, dir, root="", match="", content_types=None, index="",
|
||||
def staticdir(section, dir, root='', match='', content_types=None, index='',
|
||||
debug=False):
|
||||
"""Serve a static resource from the given (root +) dir.
|
||||
|
||||
@@ -306,7 +307,7 @@ def staticdir(section, dir, root="", match="", content_types=None, index="",
|
||||
# If dir is relative, make absolute using "root".
|
||||
if not os.path.isabs(dir):
|
||||
if not root:
|
||||
msg = "Static dir requires an absolute dir (or root)."
|
||||
msg = 'Static dir requires an absolute dir (or root).'
|
||||
if debug:
|
||||
cherrypy.log(msg, 'TOOLS.STATICDIR')
|
||||
raise ValueError(msg)
|
||||
@@ -315,10 +316,18 @@ def staticdir(section, dir, root="", match="", content_types=None, index="",
|
||||
# Determine where we are in the object tree relative to 'section'
|
||||
# (where the static tool was defined).
|
||||
if section == 'global':
|
||||
section = "/"
|
||||
section = section.rstrip(r"\/")
|
||||
section = '/'
|
||||
section = section.rstrip(r'\/')
|
||||
branch = request.path_info[len(section) + 1:]
|
||||
branch = unquote(branch.lstrip(r"\/"))
|
||||
branch = urllib.parse.unquote(branch.lstrip(r'\/'))
|
||||
|
||||
# Requesting a file in sub-dir of the staticdir results
|
||||
# in mixing of delimiter styles, e.g. C:\static\js/script.js.
|
||||
# Windows accepts this form except not when the path is
|
||||
# supplied in extended-path notation, e.g. \\?\C:\static\js/script.js.
|
||||
# http://bit.ly/1vdioCX
|
||||
if platform.system() == 'Windows':
|
||||
branch = branch.replace('/', '\\')
|
||||
|
||||
# If branch is "", filename will end in a slash
|
||||
filename = os.path.join(dir, branch)
|
||||
@@ -338,11 +347,11 @@ def staticdir(section, dir, root="", match="", content_types=None, index="",
|
||||
if index:
|
||||
handled = _attempt(os.path.join(filename, index), content_types)
|
||||
if handled:
|
||||
request.is_index = filename[-1] in (r"\/")
|
||||
request.is_index = filename[-1] in (r'\/')
|
||||
return handled
|
||||
|
||||
|
||||
def staticfile(filename, root=None, match="", content_types=None, debug=False):
|
||||
def staticfile(filename, root=None, match='', content_types=None, debug=False):
|
||||
"""Serve a static resource from the given (root +) filename.
|
||||
|
||||
match
|
||||
|
||||
@@ -1,21 +1,19 @@
|
||||
"""XML-RPC tool helpers."""
|
||||
import sys
|
||||
|
||||
from six.moves.xmlrpc_client import (
|
||||
loads as xmlrpc_loads, dumps as xmlrpc_dumps,
|
||||
Fault as XMLRPCFault
|
||||
)
|
||||
|
||||
import cherrypy
|
||||
from cherrypy._cpcompat import ntob
|
||||
|
||||
|
||||
def get_xmlrpclib():
|
||||
try:
|
||||
import xmlrpc.client as x
|
||||
except ImportError:
|
||||
import xmlrpclib as x
|
||||
return x
|
||||
|
||||
|
||||
def process_body():
|
||||
"""Return (params, method) from request body."""
|
||||
try:
|
||||
return get_xmlrpclib().loads(cherrypy.request.body.read())
|
||||
return xmlrpc_loads(cherrypy.request.body.read())
|
||||
except Exception:
|
||||
return ('ERROR PARAMS', ), 'ERRORMETHOD'
|
||||
|
||||
@@ -31,9 +29,10 @@ def patched_path(path):
|
||||
|
||||
|
||||
def _set_response(body):
|
||||
"""Set up HTTP status, headers and body within CherryPy."""
|
||||
# The XML-RPC spec (http://www.xmlrpc.com/spec) says:
|
||||
# "Unless there's a lower-level error, always return 200 OK."
|
||||
# Since Python's xmlrpclib interprets a non-200 response
|
||||
# Since Python's xmlrpc_client interprets a non-200 response
|
||||
# as a "Protocol Error", we'll just return 200 every time.
|
||||
response = cherrypy.response
|
||||
response.status = '200 OK'
|
||||
@@ -43,15 +42,20 @@ def _set_response(body):
|
||||
|
||||
|
||||
def respond(body, encoding='utf-8', allow_none=0):
|
||||
xmlrpclib = get_xmlrpclib()
|
||||
if not isinstance(body, xmlrpclib.Fault):
|
||||
"""Construct HTTP response body."""
|
||||
if not isinstance(body, XMLRPCFault):
|
||||
body = (body,)
|
||||
_set_response(xmlrpclib.dumps(body, methodresponse=1,
|
||||
encoding=encoding,
|
||||
allow_none=allow_none))
|
||||
|
||||
_set_response(
|
||||
xmlrpc_dumps(
|
||||
body, methodresponse=1,
|
||||
encoding=encoding,
|
||||
allow_none=allow_none
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def on_error(*args, **kwargs):
|
||||
"""Construct HTTP response body for an error response."""
|
||||
body = str(sys.exc_info()[1])
|
||||
xmlrpclib = get_xmlrpclib()
|
||||
_set_response(xmlrpclib.dumps(xmlrpclib.Fault(1, body)))
|
||||
_set_response(xmlrpc_dumps(XMLRPCFault(1, body)))
|
||||
|
||||
Reference in New Issue
Block a user