gluon.globals 源代码

# -*- coding: utf-8 -*-

"""
| This file is part of the web2py Web Framework
| Copyrighted by Massimo Di Pierro <mdipierro@cs.depaul.edu>
| License: LGPLv3 (http://www.gnu.org/licenses/lgpl.html)

Contains the classes for the global used variables:

- Request
- Response
- Session

"""
from gluon._compat import pickle, BytesIO, StringIO, copyreg, Cookie, urlparse, PY2, iteritems, to_unicode, \
    to_native, to_bytes, unicodeT, long, hashlib_md5, urllib_quote, to_native
from gluon.storage import Storage, List
from gluon.streamer import streamer, stream_file_or_304_or_206, DEFAULT_CHUNK_SIZE
from gluon.contenttype import contenttype
from gluon.html import xmlescape, TABLE, TR, PRE, URL
from gluon.http import HTTP, redirect
from gluon.fileutils import up
from gluon.serializers import json, custom_json
import gluon.settings as settings
from gluon.utils import web2py_uuid, secure_dumps, secure_loads
from gluon.settings import global_settings
from gluon import recfile
from gluon.cache import CacheInRam
import hashlib
from pydal.contrib import portalocker
from pickle import Pickler, MARK, DICT, EMPTY_DICT
# from types import DictionaryType
import datetime
import re
import os
import sys
import traceback
import threading
import cgi
import copy
import tempfile
import json as json_parser


FMT = '%a, %d-%b-%Y %H:%M:%S PST'
PAST = 'Sat, 1-Jan-1971 00:00:00'
FUTURE = 'Tue, 1-Dec-2999 23:59:59'

try:
    # FIXME PY3
    from gluon.contrib.minify import minify
    have_minify = True
except ImportError:
    have_minify = False


__all__ = ['Request', 'Response', 'Session']

current = threading.local()  # thread-local storage for request-scope globals

css_template = '<link href="%s" rel="stylesheet" type="text/css" />'
js_template = '<script src="%s" type="text/javascript"></script>'
coffee_template = '<script src="%s" type="text/coffee"></script>'
typescript_template = '<script src="%s" type="text/typescript"></script>'
less_template = '<link href="%s" rel="stylesheet/less" type="text/css" />'
css_inline = '<style type="text/css">\n%s\n</style>'
js_inline = '<script type="text/javascript">\n%s\n</script>'

template_mapping = {
    'css': css_template,
    'js': js_template,
    'coffee': coffee_template,
    'ts': typescript_template,
    'less': less_template,
    'css:inline': css_inline,
    'js:inline': js_inline
}


# IMPORTANT:
# this is required so that pickled dict(s) and class.__dict__
# are sorted and web2py can detect without ambiguity when a session changes
class SortingPickler(Pickler):
    def save_dict(self, obj):
        self.write(EMPTY_DICT if self.bin else MARK + DICT)
        self.memoize(obj)
        self._batch_setitems([(key, obj[key]) for key in sorted(obj)])

if PY2:
    SortingPickler.dispatch = copy.copy(Pickler.dispatch)
    SortingPickler.dispatch[dict] = SortingPickler.save_dict
else:
    SortingPickler.dispatch_table = copyreg.dispatch_table.copy()
    SortingPickler.dispatch_table[dict] = SortingPickler.save_dict


def sorting_dumps(obj, protocol=None):
    file = StringIO()
    SortingPickler(file, protocol).dump(obj)
    return file.getvalue()
# END #####################################################################


def copystream(src, dest, size, chunk_size, cache_inc=None):
    while size > 0:
        if size < chunk_size:
            data = src.read(size)
            callable(cache_inc) and cache_inc(size)
        else:
            data = src.read(chunk_size)
            callable(cache_inc) and cache_inc(chunk_size)
        length = len(data)
        if length > size:
            (data, length) = (data[:size], size)
        size -= length
        if length == 0:
            break
        dest.write(data)
        if length < chunk_size:
            break
    dest.seek(0)
    return

def copystream_progress(request, chunk_size=10 ** 5):
    """
    Copies request.env.wsgi_input into request.body
    and stores progress upload status in cache_ram
    X-Progress-ID:length and X-Progress-ID:uploaded
    """
    env = request.env
    if not env.get('CONTENT_LENGTH', None):
        return BytesIO()
    source = env['wsgi.input']
    try:
        size = int(env['CONTENT_LENGTH'])
    except ValueError:
        raise HTTP(400, "Invalid Content-Length header")
    try:  # Android requires this
        dest = tempfile.NamedTemporaryFile()
    except NotImplementedError:  # and GAE this
        dest = tempfile.TemporaryFile()
    if 'X-Progress-ID' not in request.get_vars:
        copystream(source, dest, size, chunk_size)
        return dest
    cache_key = 'X-Progress-ID:' + request.get_vars['X-Progress-ID']
    cache_ram = CacheInRam(request)  # same as cache.ram because meta_storage
    cache_ram(cache_key + ':length', lambda: size, 0)
    cache_ram(cache_key + ':uploaded', lambda: 0, 0)
    copystream(source, dest, size, chunk_size,
               lambda v : cache_ram.increment(cache_key + ':uploaded', v))
    cache_ram(cache_key + ':length', None)
    cache_ram(cache_key + ':uploaded', None)
    return dest


[文档]class Request(Storage): """ Defines the request object and the default values of its members - env: environment variables, by gluon.main.wsgibase() - cookies - get_vars - post_vars - vars - folder - application - function - method - args - extension - now: datetime.datetime.now() - utcnow : datetime.datetime.utcnow() - is_local - is_https - restful() """ def __init__(self, env): Storage.__init__(self) self.env = Storage(env) self.env.web2py_path = global_settings.applications_parent self.env.update(global_settings) self.cookies = Cookie.SimpleCookie() self.method = self.env.get('REQUEST_METHOD') self._get_vars = None self._post_vars = None self._vars = None self._body = None self.folder = None self.application = None self.function = None self.args = List() self.extension = 'html' self.now = datetime.datetime.now() self.utcnow = datetime.datetime.utcnow() self.is_restful = False self.is_https = False self.is_local = False self.global_settings = settings.global_settings self._uuid = None
[文档] def parse_get_vars(self): """Takes the QUERY_STRING and unpacks it to get_vars """ query_string = self.env.get('query_string', '') dget = urlparse.parse_qs(query_string, keep_blank_values=1) # Ref: https://docs.python.org/2/library/cgi.html#cgi.parse_qs get_vars = self._get_vars = Storage(dget) for (key, value) in iteritems(get_vars): if isinstance(value, list) and len(value) == 1: get_vars[key] = value[0]
[文档] def parse_post_vars(self): """Takes the body of the request and unpacks it into post_vars. application/json is also automatically parsed """ env = self.env post_vars = self._post_vars = Storage() body = self.body # if content-type is application/json, we must read the body is_json = env.get('content_type', '')[:16] == 'application/json' if is_json: try: # In Python 3 versions prior to 3.6 load doesn't accept bytes and # bytearray, so we read the body convert to native and use loads # instead of load. # This line can be simplified to json_vars = json_parser.load(body) # if and when we drop support for python versions under 3.6 json_vars = json_parser.loads(to_native(body.read())) except: # incoherent request bodies can still be parsed "ad-hoc" json_vars = {} pass # update vars and get_vars with what was posted as json if isinstance(json_vars, dict): post_vars.update(json_vars) body.seek(0) # parse POST variables on POST, PUT, BOTH only in post_vars if body and not is_json and env.request_method in ('POST', 'PUT', 'DELETE', 'BOTH'): query_string = env.pop('QUERY_STRING', None) content_disposition = env.get('HTTP_CONTENT_DISPOSITION') if content_disposition: headers = {'content-disposition': content_disposition, 'content-type': env['CONTENT_TYPE'], 'content-length': env['CONTENT_LENGTH'], } else: headers = None dpost = cgi.FieldStorage(fp=body, environ=env, headers=headers, keep_blank_values=1) try: post_vars.update(dpost) except: pass if query_string is not None: env['QUERY_STRING'] = query_string # The same detection used by FieldStorage to detect multipart POSTs body.seek(0) def listify(a): return (not isinstance(a, list) and [a]) or a try: keys = sorted(dpost) except TypeError: keys = [] for key in keys: if key is None: continue # not sure why cgi.FieldStorage returns None key dpk = dpost[key] # if an element is not a file replace it with # its value else leave it alone pvalue = listify([(_dpk if _dpk.filename else _dpk.value) for _dpk in dpk] if isinstance(dpk, list) else (dpk if dpk.filename else dpk.value)) if len(pvalue): post_vars[key] = (len(pvalue) > 1 and pvalue) or pvalue[0]
@property def body(self): if self._body is None: try: self._body = copystream_progress(self) except IOError: raise HTTP(400, "Bad Request - HTTP body is incomplete") return self._body
[文档] def parse_all_vars(self): """Merges get_vars and post_vars to vars """ self._vars = copy.copy(self.get_vars) for key, value in iteritems(self.post_vars): if key not in self._vars: self._vars[key] = value else: if not isinstance(self._vars[key], list): self._vars[key] = [self._vars[key]] self._vars[key] += value if isinstance(value, list) else [value]
@property def get_vars(self): """Lazily parses the query string into get_vars """ if self._get_vars is None: self.parse_get_vars() return self._get_vars @property def post_vars(self): """Lazily parse the body into post_vars """ if self._post_vars is None: self.parse_post_vars() return self._post_vars @property def vars(self): """Lazily parses all get_vars and post_vars to fill vars """ if self._vars is None: self.parse_all_vars() return self._vars @property def uuid(self): """Lazily uuid """ if self._uuid is None: self.compute_uuid() return self._uuid
[文档] def compute_uuid(self): self._uuid = '%s/%s.%s.%s' % ( self.application, self.client.replace(':', '_'), self.now.strftime('%Y-%m-%d.%H-%M-%S'), web2py_uuid()) return self._uuid
[文档] def user_agent(self): from gluon.contrib import user_agent_parser session = current.session user_agent = session._user_agent if user_agent: return user_agent http_user_agent = self.env.http_user_agent or '' user_agent = user_agent_parser.detect(http_user_agent) for key, value in user_agent.items(): if isinstance(value, dict): user_agent[key] = Storage(value) user_agent = Storage(user_agent) user_agent.is_mobile = 'Mobile' in http_user_agent user_agent.is_tablet = 'Tablet' in http_user_agent session._user_agent = user_agent return user_agent
[文档] def requires_https(self): """ If request comes in over HTTP, redirects it to HTTPS and secures the session. """ cmd_opts = global_settings.cmd_options # checking if this is called within the scheduler or within the shell # in addition to checking if it's a cron job if (self.is_https or self.is_scheduler or cmd_opts and ( cmd_opts.shell or cmd_opts.cron_job)): current.session.secure() else: current.session.forget() redirect(URL(scheme='https', args=self.args, vars=self.vars))
[文档] def restful(self, ignore_extension=False): def wrapper(action, request=self): def f(_action=action, *a, **b): request.is_restful = True env = request.env is_json = env.content_type == 'application/json' method = env.request_method if not ignore_extension and len(request.args) and '.' in request.args[-1]: request.args[-1], _, request.extension = request.args[-1].rpartition('.') current.response.headers['Content-Type'] = \ contenttype('.' + request.extension.lower()) rest_action = _action().get(method, None) if not (rest_action and method == method.upper() and callable(rest_action)): raise HTTP(405, "method not allowed") try: res = rest_action(*request.args, **request.vars) if is_json and not isinstance(res, str): res = json(res) return res except TypeError as e: exc_type, exc_value, exc_traceback = sys.exc_info() if len(traceback.extract_tb(exc_traceback)) == 1: raise HTTP(400, "invalid arguments") else: raise f.__doc__ = action.__doc__ f.__name__ = action.__name__ return f return wrapper
[文档]class Response(Storage): """ Defines the response object and the default values of its members response.write( ) can be used to write in the output html """ def __init__(self): Storage.__init__(self) self.status = 200 self.headers = dict() self.headers['X-Powered-By'] = 'web2py' self.body = StringIO() self.session_id = None self.cookies = Cookie.SimpleCookie() self.postprocessing = [] self.flash = '' # used by the default view layout self.meta = Storage() # used by web2py_ajax.html self.menu = [] # used by the default view layout self.files = [] # used by web2py_ajax.html self._vars = None self._caller = lambda f: f() self._view_environment = None self._custom_commit = None self._custom_rollback = None self.generic_patterns = ['*'] self.delimiters = ('{{', '}}') self.formstyle = 'table3cols' self.form_label_separator = ': '
[文档] def write(self, data, escape=True): if not escape: self.body.write(str(data)) else: self.body.write(to_native(xmlescape(data)))
[文档] def render(self, *a, **b): from gluon.compileapp import run_view_in if len(a) > 2: raise SyntaxError( 'Response.render can be called with two arguments, at most') elif len(a) == 2: (view, self._vars) = (a[0], a[1]) elif len(a) == 1 and isinstance(a[0], str): (view, self._vars) = (a[0], {}) elif len(a) == 1 and hasattr(a[0], 'read') and callable(a[0].read): (view, self._vars) = (a[0], {}) elif len(a) == 1 and isinstance(a[0], dict): (view, self._vars) = (None, a[0]) else: (view, self._vars) = (None, {}) self._vars.update(b) self._view_environment.update(self._vars) if view: from gluon._compat import StringIO (obody, oview) = (self.body, self.view) (self.body, self.view) = (StringIO(), view) page = run_view_in(self._view_environment) self.body.close() (self.body, self.view) = (obody, oview) else: page = run_view_in(self._view_environment) return page
[文档] def include_meta(self): s = "\n" for meta in iteritems((self.meta or {})): k, v = meta if isinstance(v, dict): s += '<meta' + ''.join(' %s="%s"' % (to_native(xmlescape(key)), to_native(xmlescape(v[key]))) for key in v) + ' />\n' else: s += '<meta name="%s" content="%s" />\n' % (k, to_native(xmlescape(v))) self.write(s, escape=False)
[文档] def include_files(self, extensions=None): """ Includes files (usually in the head). Can minify and cache local files By default, caches in ram for 5 minutes. To change, response.cache_includes = (cache_method, time_expire). Example: (cache.disk, 60) # caches to disk for 1 minute. """ app = current.request.application # We start by building a files list in which adjacent files internal to # the application are placed in a list inside the files list. # # We will only minify and concat adjacent internal files as there's # no way to know if changing the order with which the files are apppended # will break things since the order matters in both CSS and JS and # internal files may be interleaved with external ones. files = [] # For the adjacent list we're going to use storage List to both distinguish # from the regular list and so we can add attributes internal = List() internal.has_js = False internal.has_css = False done = set() # to remove duplicates for item in self.files: if not isinstance(item, list): if item in done: continue done.add(item) if isinstance(item, (list, tuple)) or not item.startswith('/' + app): # also consider items in other web2py applications to be external if internal: files.append(internal) internal = List() internal.has_js = False internal.has_css = False files.append(item) continue if extensions and not item.rpartition('.')[2] in extensions: continue internal.append(item) if item.endswith('.js'): internal.has_js = True if item.endswith('.css'): internal.has_css = True if internal: files.append(internal) # We're done we can now minify if have_minify: for i, f in enumerate(files): if isinstance(f, List) and ((self.optimize_css and f.has_css) or (self.optimize_js and f.has_js)): # cache for 5 minutes by default key = hashlib_md5(repr(f)).hexdigest() cache = self.cache_includes or (current.cache.ram, 60 * 5) def call_minify(files=f): return List(minify.minify(files, URL('static', 'temp'), current.request.folder, self.optimize_css, self.optimize_js)) if cache: cache_model, time_expire = cache files[i] = cache_model('response.files.minified/' + key, call_minify, time_expire) else: files[i] = call_minify() def static_map(s, item): if isinstance(item, str): f = item.lower().split('?')[0] ext = f.rpartition('.')[2] # if static_version we need also to check for # static_version_urls. In that case, the _.x.x.x # bit would have already been added by the URL() # function if self.static_version and not self.static_version_urls: item = item.replace( '/static/', '/static/_%s/' % self.static_version, 1) tmpl = template_mapping.get(ext) if tmpl: s.append(tmpl % item) elif isinstance(item, (list, tuple)): f = item[0] tmpl = template_mapping.get(f) if tmpl: s.append(tmpl % item[1]) s = [] for item in files: if isinstance(item, List): for f in item: static_map(s, f) else: static_map(s, item) self.write(''.join(s), escape=False)
[文档] def stream(self, stream, chunk_size=DEFAULT_CHUNK_SIZE, request=None, attachment=False, filename=None ): """ If in a controller function:: return response.stream(file, 100) the file content will be streamed at 100 bytes at the time Args: stream: filename or read()able content chunk_size(int): Buffer size request: the request object attachment(bool): prepares the correct headers to download the file as an attachment. Usually creates a pop-up download window on browsers filename(str): the name for the attachment Note: for using the stream name (filename) with attachments the option must be explicitly set as function parameter (will default to the last request argument otherwise) """ headers = self.headers # for attachment settings and backward compatibility keys = [item.lower() for item in headers] if attachment: # FIXME: should be done like in next download method if filename is None: attname = "" else: attname = filename headers["Content-Disposition"] = \ 'attachment; filename="%s"' % attname if not request: request = current.request if isinstance(stream, (str, unicodeT)): stream_file_or_304_or_206(stream, chunk_size=chunk_size, request=request, headers=headers, status=self.status) # ## the following is for backward compatibility if hasattr(stream, 'name'): filename = stream.name if filename and 'content-type' not in keys: headers['Content-Type'] = contenttype(filename) if filename and 'content-length' not in keys: try: headers['Content-Length'] = \ os.path.getsize(filename) except OSError: pass env = request.env # Internet Explorer < 9.0 will not allow downloads over SSL unless caching is enabled if request.is_https and isinstance(env.http_user_agent, str) and \ not re.search(r'Opera', env.http_user_agent) and \ re.search(r'MSIE [5-8][^0-9]', env.http_user_agent): headers['Pragma'] = 'cache' headers['Cache-Control'] = 'private' if request and env.web2py_use_wsgi_file_wrapper: wrapped = env.wsgi_file_wrapper(stream, chunk_size) else: wrapped = streamer(stream, chunk_size=chunk_size) return wrapped
[文档] def download(self, request, db, chunk_size=DEFAULT_CHUNK_SIZE, attachment=True, download_filename=None): """ Example of usage in controller:: def download(): return response.download(request, db) Downloads from http://..../download/filename """ from pydal.helpers.regex import REGEX_UPLOAD_PATTERN from pydal.exceptions import NotAuthorizedException, NotFoundException current.session.forget(current.response) if not request.args: raise HTTP(404) name = request.args[-1] items = re.match(REGEX_UPLOAD_PATTERN, name) if not items: raise HTTP(404) t = items.group('table'); f = items.group('field') try: field = db[t][f] except (AttributeError, KeyError): raise HTTP(404) try: (filename, stream) = field.retrieve(name, nameonly=True) except NotAuthorizedException: raise HTTP(403) except NotFoundException: raise HTTP(404) except IOError: raise HTTP(404) headers = self.headers headers['Content-Type'] = contenttype(name) if download_filename is None: download_filename = filename if attachment: # Browsers still don't have a simple uniform way to have non ascii # characters in the filename so for now we are percent encoding it if isinstance(download_filename, unicodeT): download_filename = download_filename.encode('utf-8') download_filename = urllib_quote(download_filename) headers['Content-Disposition'] = \ 'attachment; filename="%s"' % download_filename.replace('"', '\\"') return self.stream(stream, chunk_size=chunk_size, request=request)
[文档] def json(self, data, default=None, indent=None): if 'Content-Type' not in self.headers: self.headers['Content-Type'] = 'application/json' return json(data, default=default or custom_json, indent=indent)
[文档] def xmlrpc(self, request, methods): from gluon.xmlrpc import handler """ assuming:: def add(a, b): return a+b if a controller function \"func\":: return response.xmlrpc(request, [add]) the controller will be able to handle xmlrpc requests for the add function. Example:: import xmlrpclib connection = xmlrpclib.ServerProxy( 'http://hostname/app/contr/func') print(connection.add(3, 4)) """ return handler(request, self, methods)
[文档] def toolbar(self): from gluon.html import DIV, SCRIPT, BEAUTIFY, TAG, A BUTTON = TAG.button admin = URL("admin", "default", "design", extension='html', args=current.request.application) from gluon.dal import DAL dbstats = [] dbtables = {} infos = DAL.get_instances() for k, v in iteritems(infos): dbstats.append(TABLE(*[TR(PRE(row[0]), '%.2fms' % (row[1]*1000)) for row in v['dbstats']])) dbtables[k] = dict(defined=v['dbtables']['defined'] or '[no defined tables]', lazy=v['dbtables']['lazy'] or '[no lazy tables]') u = web2py_uuid() backtotop = A('Back to top', _href="#totop-%s" % u) # Convert lazy request.vars from property to Storage so they # will be displayed in the toolbar. request = copy.copy(current.request) request.update(vars=current.request.vars, get_vars=current.request.get_vars, post_vars=current.request.post_vars) return DIV( BUTTON('design', _onclick="document.location='%s'" % admin), BUTTON('request', _onclick="jQuery('#request-%s').slideToggle()" % u), BUTTON('response', _onclick="jQuery('#response-%s').slideToggle()" % u), BUTTON('session', _onclick="jQuery('#session-%s').slideToggle()" % u), BUTTON('db tables', _onclick="jQuery('#db-tables-%s').slideToggle()" % u), BUTTON('db stats', _onclick="jQuery('#db-stats-%s').slideToggle()" % u), DIV(BEAUTIFY(request), backtotop, _class="w2p-toolbar-hidden", _id="request-%s" % u), DIV(BEAUTIFY(current.session), backtotop, _class="w2p-toolbar-hidden", _id="session-%s" % u), DIV(BEAUTIFY(current.response), backtotop, _class="w2p-toolbar-hidden", _id="response-%s" % u), DIV(BEAUTIFY(dbtables), backtotop, _class="w2p-toolbar-hidden", _id="db-tables-%s" % u), DIV(BEAUTIFY(dbstats), backtotop, _class="w2p-toolbar-hidden", _id="db-stats-%s" % u), SCRIPT("jQuery('.w2p-toolbar-hidden').hide()"), _id="totop-%s" % u )
[文档]class Session(Storage): """ Defines the session object and the default values of its members (None) - session_storage_type : 'file', 'db', or 'cookie' - session_cookie_compression_level : - session_cookie_expires : cookie expiration - session_cookie_key : for encrypted sessions in cookies - session_id : a number or None if no session - session_id_name : - session_locked : - session_masterapp : - session_new : a new session obj is being created - session_hash : hash of the pickled loaded session - session_pickled : picked session if session in cookie: - session_data_name : name of the cookie for session data if session in db: - session_db_record_id - session_db_table - session_db_unique_key if session in file: - session_file - session_filename """ REGEX_SESSION_FILE = r'^(?:[\w-]+/)?[\w.-]+$'
[文档] def connect(self, request=None, response=None, db=None, tablename='web2py_session', masterapp=None, migrate=True, separate=None, check_client=False, cookie_key=None, cookie_expires=None, compression_level=None ): """ Used in models, allows to customize Session handling Args: request: the request object response: the response object db: to store/retrieve sessions in db (a table is created) tablename(str): table name masterapp(str): points to another's app sessions. This enables a "SSO" environment among apps migrate: passed to the underlying db separate: with True, creates a folder with the 2 initials of the session id. Can also be a function, e.g. :: separate=lambda(session_name): session_name[-2:] check_client: if True, sessions can only come from the same ip cookie_key(str): secret for cookie encryption cookie_expires: sets the expiration of the cookie compression_level(int): 0-9, sets zlib compression on the data before the encryption """ request = request or current.request response = response or current.response masterapp = masterapp or request.application cookies = request.cookies self._unlock(response) response.session_masterapp = masterapp response.session_id_name = 'session_id_%s' % masterapp.lower() response.session_data_name = 'session_data_%s' % masterapp.lower() response.session_cookie_expires = cookie_expires response.session_client = str(request.client).replace(':', '.') current._session_cookie_key = cookie_key response.session_cookie_compression_level = compression_level # check if there is a session_id in cookies try: old_session_id = cookies[response.session_id_name].value except KeyError: old_session_id = None response.session_id = old_session_id # if we are supposed to use cookie based session data if cookie_key: response.session_storage_type = 'cookie' elif db: response.session_storage_type = 'db' else: response.session_storage_type = 'file' # why do we do this? # because connect may be called twice, by web2py and in models. # the first time there is no db yet so it should do nothing if (global_settings.db_sessions is True or masterapp in global_settings.db_sessions): return if response.session_storage_type == 'cookie': # check if there is session data in cookies if response.session_data_name in cookies: session_cookie_data = cookies[response.session_data_name].value else: session_cookie_data = None if session_cookie_data: data = secure_loads(session_cookie_data, cookie_key, compression_level=compression_level) if data: self.update(data) response.session_id = True # else if we are supposed to use file based sessions elif response.session_storage_type == 'file': response.session_new = False response.session_file = None # check if the session_id points to a valid sesion filename if response.session_id: if not re.match(self.REGEX_SESSION_FILE, response.session_id): response.session_id = None else: response.session_filename = \ os.path.join(up(request.folder), masterapp, 'sessions', response.session_id) try: response.session_file = \ recfile.open(response.session_filename, 'rb+') portalocker.lock(response.session_file, portalocker.LOCK_EX) response.session_locked = True self.update(pickle.load(response.session_file)) response.session_file.seek(0) oc = response.session_filename.split('/')[-1].split('-')[0] if check_client and response.session_client != oc: raise Exception("cookie attack") except: response.session_id = None if not response.session_id: uuid = web2py_uuid() response.session_id = '%s-%s' % (response.session_client, uuid) separate = separate and (lambda session_name: session_name[-2:]) if separate: prefix = separate(response.session_id) response.session_id = '%s/%s' % (prefix, response.session_id) response.session_filename = \ os.path.join(up(request.folder), masterapp, 'sessions', response.session_id) response.session_new = True # else the session goes in db elif response.session_storage_type == 'db': if global_settings.db_sessions is not True: global_settings.db_sessions.add(masterapp) # if had a session on file already, close it (yes, can happen) if response.session_file: self._close(response) # if on GAE tickets go also in DB if settings.global_settings.web2py_runtime_gae: request.tickets_db = db if masterapp == request.application: table_migrate = migrate else: table_migrate = False tname = tablename + '_' + masterapp table = db.get(tname, None) Field = db.Field if table is None: db.define_table( tname, Field('locked', 'boolean', default=False), Field('client_ip', length=64), Field('created_datetime', 'datetime', default=request.now), Field('modified_datetime', 'datetime'), Field('unique_key', length=64), Field('session_data', 'blob'), migrate=table_migrate, ) table = db[tname] # to allow for lazy table response.session_db_table = table if response.session_id: # Get session data out of the database try: (record_id, unique_key) = response.session_id.split(':') record_id = long(record_id) except (TypeError, ValueError): record_id = None # Select from database if record_id: row = table(record_id, unique_key=unique_key) # Make sure the session data exists in the database if row: # rows[0].update_record(locked=True) # Unpickle the data try: session_data = pickle.loads(row['session_data']) self.update(session_data) response.session_new = False except: record_id = None else: record_id = None if record_id: response.session_id = '%s:%s' % (record_id, unique_key) response.session_db_unique_key = unique_key response.session_db_record_id = record_id else: response.session_id = None response.session_new = True # if there is no session id yet, we'll need to create a # new session else: response.session_new = True # set the cookie now if you know the session_id so user can set # cookie attributes in controllers/models # cookie will be reset later # yet cookie may be reset later # Removed comparison between old and new session ids - should send # the cookie all the time if isinstance(response.session_id, str): response.cookies[response.session_id_name] = response.session_id response.cookies[response.session_id_name]['path'] = '/' if cookie_expires: response.cookies[response.session_id_name]['expires'] = \ cookie_expires.strftime(FMT) session_pickled = pickle.dumps(self, pickle.HIGHEST_PROTOCOL) response.session_hash = hashlib.md5(session_pickled).hexdigest() if self.flash: (response.flash, self.flash) = (self.flash, None)
[文档] def renew(self, clear_session=False): if clear_session: self.clear() request = current.request response = current.response session = response.session masterapp = response.session_masterapp cookies = request.cookies if response.session_storage_type == 'cookie': return # if the session goes in file if response.session_storage_type == 'file': self._close(response) uuid = web2py_uuid() response.session_id = '%s-%s' % (response.session_client, uuid) separate = (lambda s: s[-2:]) if session and response.session_id[2:3] == "/" else None if separate: prefix = separate(response.session_id) response.session_id = '%s/%s' % \ (prefix, response.session_id) response.session_filename = \ os.path.join(up(request.folder), masterapp, 'sessions', response.session_id) response.session_new = True # else the session goes in db elif response.session_storage_type == 'db': table = response.session_db_table # verify that session_id exists if response.session_file: self._close(response) if response.session_new: return # Get session data out of the database if response.session_id is None: return (record_id, sep, unique_key) = response.session_id.partition(':') if record_id.isdigit() and long(record_id) > 0: new_unique_key = web2py_uuid() row = table(record_id) if row and to_native(row['unique_key']) == to_native(unique_key): table._db(table.id == record_id).update(unique_key=new_unique_key) else: record_id = None if record_id: response.session_id = '%s:%s' % (record_id, new_unique_key) response.session_db_record_id = record_id response.session_db_unique_key = new_unique_key else: response.session_new = True
def _fixup_before_save(self): response = current.response rcookies = response.cookies scookies = rcookies.get(response.session_id_name) if not scookies: return if self._forget: del rcookies[response.session_id_name] return if self.get('httponly_cookies', True): scookies['HttpOnly'] = True if self._secure: scookies['secure'] = True if self._same_site is None: # Using SameSite Lax Mode is the default # You actually have to call session.samesite(False) if you really # dont want the extra protection provided by the SameSite header self._same_site = 'Lax' if self._same_site: if 'samesite' not in Cookie.Morsel._reserved: # Python version 3.7 and lower needs this Cookie.Morsel._reserved['samesite'] = 'SameSite' scookies['samesite'] = self._same_site
[文档] def clear_session_cookies(self): request = current.request response = current.response session = response.session masterapp = response.session_masterapp cookies = request.cookies rcookies = response.cookies # if not cookie_key, but session_data_name in cookies # expire session_data_name from cookies if response.session_data_name in cookies: rcookies[response.session_data_name] = 'expired' rcookies[response.session_data_name]['path'] = '/' rcookies[response.session_data_name]['expires'] = PAST if response.session_id_name in rcookies: del rcookies[response.session_id_name]
[文档] def clear(self): # see https://github.com/web2py/web2py/issues/735 response = current.response if response.session_storage_type == 'file': target = recfile.generate(response.session_filename) try: self._close(response) os.unlink(target) except: pass elif response.session_storage_type == 'db': table = response.session_db_table if response.session_id: (record_id, sep, unique_key) = response.session_id.partition(':') if record_id.isdigit() and long(record_id) > 0: table._db(table.id == record_id).delete() Storage.clear(self)
[文档] def is_new(self): if self._start_timestamp: return False else: self._start_timestamp = datetime.datetime.today() return True
[文档] def is_expired(self, seconds=3600): now = datetime.datetime.today() if not self._last_timestamp or \ self._last_timestamp + datetime.timedelta(seconds=seconds) > now: self._last_timestamp = now return False else: return True
[文档] def secure(self): self._secure = True
[文档] def samesite(self, mode='Lax'): self._same_site = mode
[文档] def forget(self, response=None): self._close(response) self._forget = True
def _try_store_in_cookie(self, request, response): if self._forget or self._unchanged(response): # self.clear_session_cookies() self.save_session_id_cookie() return False name = response.session_data_name compression_level = response.session_cookie_compression_level value = secure_dumps(dict(self), current._session_cookie_key, compression_level=compression_level) rcookies = response.cookies rcookies.pop(name, None) rcookies[name] = to_native(value) rcookies[name]['path'] = '/' expires = response.session_cookie_expires if isinstance(expires, datetime.datetime): expires = expires.strftime(FMT) if expires: rcookies[name]['expires'] = expires return True def _unchanged(self, response): if response.session_new: internal = ['_last_timestamp', '_secure', '_start_timestamp', '_same_site'] for item in self.keys(): if item not in internal: return False return True session_pickled = pickle.dumps(self, pickle.HIGHEST_PROTOCOL) response.session_pickled = session_pickled session_hash = hashlib.md5(session_pickled).hexdigest() return response.session_hash == session_hash def _try_store_in_db(self, request, response): # don't save if file-based sessions, # no session id, or session being forgotten # or no changes to session (Unless the session is new) if (not response.session_db_table or self._forget or (self._unchanged(response) and not response.session_new)): if (not response.session_db_table and global_settings.db_sessions is not True and response.session_masterapp in global_settings.db_sessions): global_settings.db_sessions.remove(response.session_masterapp) # self.clear_session_cookies() self.save_session_id_cookie() return False table = response.session_db_table record_id = response.session_db_record_id if response.session_new: unique_key = web2py_uuid() else: unique_key = response.session_db_unique_key session_pickled = response.session_pickled or pickle.dumps(self, pickle.HIGHEST_PROTOCOL) dd = dict(locked=0, client_ip=response.session_client, modified_datetime=request.now.isoformat(), session_data=session_pickled, unique_key=unique_key) if record_id: if not table._db(table.id == record_id).update(**dd): record_id = None if not record_id: record_id = table.insert(**dd) response.session_id = '%s:%s' % (record_id, unique_key) response.session_db_unique_key = unique_key response.session_db_record_id = record_id self.save_session_id_cookie() return True def _try_store_in_cookie_or_file(self, request, response): if response.session_storage_type == 'file': return self._try_store_in_file(request, response) if response.session_storage_type == 'cookie': return self._try_store_in_cookie(request, response) def _try_store_in_file(self, request, response): try: if (not response.session_id or not response.session_filename or self._forget or self._unchanged(response)): # self.clear_session_cookies() return False else: if response.session_new or not response.session_file: # Tests if the session sub-folder exists, if not, create it session_folder = os.path.dirname(response.session_filename) if not os.path.exists(session_folder): os.mkdir(session_folder) response.session_file = recfile.open(response.session_filename, 'wb') portalocker.lock(response.session_file, portalocker.LOCK_EX) response.session_locked = True if response.session_file: session_pickled = response.session_pickled or pickle.dumps(self, pickle.HIGHEST_PROTOCOL) response.session_file.write(session_pickled) response.session_file.truncate() return True finally: self._close(response) self.save_session_id_cookie() def _unlock(self, response): if response and response.session_file and response.session_locked: try: portalocker.unlock(response.session_file) response.session_locked = False except: # this should never happen but happens in Windows pass def _close(self, response): if response and response.session_file: self._unlock(response) try: response.session_file.close() del response.session_file except: pass
def pickle_session(s): return Session, (dict(s),) copyreg.pickle(Session, pickle_session)