Commits

Ginés Martínez Sánchez  committed 41648ac Draft

removed the first experimental wsgi server

  • Participants
  • Parent commits 64e0113

Comments (0)

Files changed (8)

File ginsfsm/wsgi/__init__.py

-def profile(cmd, globals, locals, sort_order, callers):  # pragma: no cover
-    # runs a command under the profiler and print profiling output at shutdown
-    import os
-    import profile
-    import pstats
-    import tempfile
-    fd, fn = tempfile.mkstemp()
-    try:
-        profile.runctx(cmd, globals, locals, fn)
-        stats = pstats.Stats(fn)
-        stats.strip_dirs()
-        # calls,time,cumulative and cumulative,calls,time are useful
-        stats.sort_stats(*sort_order or ('cumulative', 'calls', 'time'))
-        if callers:
-            stats.print_callers(.3)
-        else:
-            stats.print_stats(.3)
-    finally:
-        os.remove(fn)

File ginsfsm/wsgi/c_httpchannel.py

-# -*- encoding: utf-8 -*-
-
-from .parser import HTTPRequestParser
-from .c_wsgiapp import WSGIApp
-
-from ginsfsm.gobj import GObj
-
-
-def ac_disconnected(self, event):
-    """ httpchannel disconnected, inform to wsgi_server for to drop it.
-    """
-    self.post_event(self.wsgi_server, 'EV_DISCONNECTED')
-
-
-def ac_httpchannel_rx_data(self, event):
-    """ Received data. Can be one or more requests.
-    """
-    data = event.data
-    if not data:
-        return
-    new_request = self._current_request
-    requests = []
-    while data:
-        if new_request is None:
-            new_request = HTTPRequestParser(
-                url_scheme=self.wsgi_server.url_scheme,
-                inbuf_overflow=self.inbuf_overflow,
-                max_request_header_size=self.max_request_header_size,
-                max_request_body_size=self.max_request_body_size)
-        n = new_request.received(data)
-        if new_request.expect_continue and new_request.headers_finished:
-            # guaranteed by parser to be a 1.1 new_request
-            new_request.expect_continue = False
-            if not self.sent_continue:
-                # there's no current task, so we don't need to try to
-                # lock the outbuf to append to it.
-                self.gsock.outbufs[-1].append(b'HTTP/1.1 100 Continue\r\n\r\n')
-                self.sent_expect_continue = True
-                self.gsock.send_some()
-                new_request.completed = False
-        if new_request.completed:
-            # The new_request (with the body) is ready to use.
-            self._current_request = None
-            if not new_request.empty:
-                requests.append(new_request)
-            new_request = None
-        else:
-            self._current_request = new_request
-        if n >= len(data):
-            break
-        data = data[n:]
-
-    if requests:
-        self.send_event(self.wsgiapp, 'EV_EXECUTE_REQUESTS', requests=requests)
-
-
-def ac_timeout(self, event):
-    #TODO: channel_timeout = 120 inactivity timeout
-    pass
-
-
-HTTPCHANNEL_FSM = {
-    'event_list': ('EV_TIMEOUT', 'EV_DISCONNECTED', 'EV_RX_DATA',),
-    'state_list': ('ST_IDLE',),
-    'machine': {
-        'ST_IDLE':
-        (
-            ('EV_TIMEOUT',          ac_timeout,             'ST_IDLE'),
-            ('EV_DISCONNECTED',     ac_disconnected,        'ST_IDLE'),
-            ('EV_RX_DATA',          ac_httpchannel_rx_data, 'ST_IDLE'),
-        ),
-    }
-}
-
-HTTPCHANNEL_GCONFIG = {
-    'wsgi_server': [None, None, 0, None, "wsgi server"],
-    'gsock': [None, None, 0, None, "clisrv gsock"],
-    # A tempfile should be created if the pending input is larger than
-    # inbuf_overflow, which is measured in bytes. The default is 512K.  This
-    # is conservative.
-    'inbuf_overflow': [int, 524288, 0, None, ""],
-    # maximum number of bytes of all request headers combined (256K default)
-    'max_request_header_size': [int, 262144, 0, None, ""],
-    # maximum number of bytes in request body (1GB default)
-    'max_request_body_size': [int, 1073741824, 0, None, ""],
-}
-
-
-class HTTPChannel(GObj):
-    """  HTTP Channel gobj.
-    """
-    def __init__(self):
-        GObj.__init__(self, HTTPCHANNEL_FSM, HTTPCHANNEL_GCONFIG)
-        self._current_request = None  # current receiving request
-
-    def start_up(self):
-        self.gsock.delete_all_subscriptions()
-        self.gsock.subscribe_event(None, self)  # take on all gsock events
-
-        if self.name and len(self.name):
-            prefix_name = self.name
-        else:
-            prefix_name = None
-        self.wsgiapp = self.create_gobj(
-            prefix_name + '.wsgi-app' if prefix_name else None,
-            WSGIApp,
-            self,
-            wsgi_server=self.wsgi_server,
-            gsock=self.gsock,
-        )

File ginsfsm/wsgi/c_wsgi_server.py

-# -*- encoding: utf-8 -*-
-"""
-GObj :class:`GWSGIServer`
-=========================
-
-.. autoclass:: GWSGIServer
-    :members:
-
-"""
-
-from ginsfsm.gobj import GObj
-from ginsfsm.c_srv_sock import GServerSock
-from ginsfsm.wsgi.c_httpchannel import HTTPChannel
-
-
-def ac_new_httpchannel(self, event):
-    """ New gsock clisvr, create a new httpchannel
-    """
-    self._n_connected_clisrv += 1
-    gsock = event.source[-1]  # new gsock clisrv
-
-    if self.name and len(self.name):
-        prefix_name = self.name
-    else:
-        prefix_name = None
-    self.serversock = self.create_gobj(
-        prefix_name + '.http-channel' if prefix_name else None,
-        HTTPChannel,
-        self,
-        wsgi_server=self,
-        gsock=gsock,
-    )
-
-
-def ac_drop_httpchannel(self, event):
-    """ httpchannel closed, drop it.
-    """
-    # TODO: need to drop gsock, httpchannel and wsgiapp
-    channel = event.source[-1]
-    self._n_connected_clisrv -= 1
-    self.destroy_gobj(channel)
-
-
-def ac_timeout(self, event):
-    self.set_timeout(10)
-    print "Server's clients: %d, connected %d" % (
-        len(self.dl_childs), self._n_connected_clisrv)
-
-
-GWSGISERVER_FSM = {
-    'event_list': (
-        'EV_SET_TIMER: bottom output',
-        'EV_TIMEOUT: bottom input',
-        'EV_CONNECTED: bottom input',
-        'EV_DISCONNECTED: bottom input',
-    ),
-    'state_list': ('ST_IDLE',),
-    'machine': {
-        'ST_IDLE':
-        (
-            ('EV_TIMEOUT',          ac_timeout,             None),
-            ('EV_CONNECTED',        ac_new_httpchannel,     None),
-            ('EV_DISCONNECTED',     ac_drop_httpchannel,    None),
-        ),
-    }
-}
-
-GWSGISERVER_GCONFIG = {
-    'host': [str, '', 0, None, "listening host"],
-    'port': [int, 0, 0, None, "listening port"],
-    'origins': [None, None, 0, None,
-        "TODO:list of (host, port) tuples allowed to connect from"],
-    #TODO: implement a multi wsgi application
-    'application': [None, None, 0, None, "wsgi application"],
-    'url_scheme': [str, 'http', 0, None, "default ``wsgi.url_scheme`` value"],
-    'identity': [str, 'ginsfsm', 0, None,
-                 "server identity (sent in Server: header)"],
-}
-
-
-class GWSGIServer(GObj):
-    """  WSGI Server gobj.
-
-    The incoming connections will create a new :class:`ginsfsm.c_sock.GSock`
-    :term:`gobj`,
-    that will be child of the :attr:`subscriber` :term:`gobj`.
-
-        .. warning::  Remember destroy the accepted `gobj`
-           with :func:`destroy_gobj` when the `gobj` has been disconnected.
-
-           The `subcriber` knows when a new `gobj` has been accepted because it
-           receives the ``'EV_CONNECTED'`` event.
-
-           When the `subcriber` receives a ``'EV_DISCONNECTED'`` event must
-           destroy the `gobj` because the connection ceases to exist.
-
-    .. ginsfsm::
-       :fsm: GWSGISERVER_FSM
-       :gconfig: GWSGISERVER_GCONFIG
-
-    *Input-Events:*
-
-        The relationship is directly between the
-        accepted :class:`ginsfsm.c_sock.GSock` gobj and the :attr:`subscriber`.
-
-        See :class:`ginsfsm.c_sock.GSock` `input-events`.
-
-    *Output-Events:*
-
-        The relationship is directly between the
-        accepted :class:`ginsfsm.c_sock.GSock` gobj and the :attr:`subscriber`.
-
-        See :class:`ginsfsm.c_sock.GSock` `output-events`.
-    """
-
-    def __init__(self):
-        GObj.__init__(self, GWSGISERVER_FSM, GWSGISERVER_GCONFIG)
-        self._n_connected_clisrv = 0
-
-    def start_up(self):
-        if self.name and len(self.name):
-            prefix_name = self.name
-        else:
-            prefix_name = None
-        self.serversock = self.create_gobj(
-            prefix_name + '.sock-server' if prefix_name else None,
-            GServerSock,
-            self,
-            subscriber=self,
-            host=self.host,
-            port=self.port,
-            origins=self.origins,
-        )
-
-        # Used in environ
-        self.effective_host, self.effective_port = \
-            self.serversock.getsockname()
-        self.server_name = self._get_server_name(self.host)
-
-    def _get_server_name(self, ip):
-        """Given an IP or hostname, try to determine the server name."""
-        if ip:
-            srv_name = str(ip)
-        else:
-            srv_name = str(self.serversock.socketmod.gethostname())
-        # Convert to a host name if necessary.
-        for c in srv_name:
-            if c != '.' and not c.isdigit():
-                return srv_name
-        try:
-            if srv_name == '0.0.0.0':
-                return 'localhost'
-            srv_name = self.serversock.socketmod.gethostbyaddr(srv_name)[0]
-        except:
-            pass
-        return srv_name
-
-    def getsockname(self):
-        return self.serversock.socket.getsockname()

File ginsfsm/wsgi/c_wsgiapp.py

-# -*- encoding: utf-8 -*-
-
-import time
-import traceback
-import logging
-
-from .parser import HTTPRequestParser
-from .task import (
-    ErrorTask,
-    WSGITask,
-    )
-from .utilities import (
-    InternalServerError,
-    )
-
-from ginsfsm.gobj import GObj
-from ginsfsm.deferred import DeferredInterrupt
-
-
-def ac_execute_requests(self, event):
-    """Execute all pending requests """
-    requests = event.requests
-    gsock = self.gsock
-    ext_event = None
-    if hasattr(event, 'ext_event'):
-        ext_event = event.ext_event
-    while requests:
-        request = requests[0]
-        if request.error:
-            task = ErrorTask(self.wsgi_server, gsock, request)
-        else:
-            # Añado esto en get_environment de task.py,despues de environ = {}
-            # environ['gobj.gsock'] = gsock
-            task = WSGITask(self.wsgi_server, gsock, request, ext_event)
-        try:
-            task.service()
-        except DeferredInterrupt as e:
-
-            def callback(request, ext_event=None):
-                self.send_event(
-                    self,
-                    'EV_EXECUTE_REQUESTS',
-                    requests=[request],
-                    ext_event=ext_event,
-                )
-            self.gaplic.deferred_list.add_callback(
-                e.deferred_ref,
-                callback,
-                request
-            )
-        except:
-            logging.exception('Exception when serving %s' %
-                                  task.request.path)
-            if not task.wrote_header:
-                if self.expose_tracebacks:
-                    body = traceback.format_exc()
-                else:
-                    body = ('The server encountered an unexpected '
-                            'internal server error')
-                request = HTTPRequestParser(
-                    url_scheme=self.wsgi_server.url_scheme)
-                request.error = InternalServerError(body)
-                task = ErrorTask(self.wsgi_server, gsock, request)
-                task.service()  # must not fail
-            else:
-                task.close_on_finish = True
-        # we cannot allow self.requests to drop to empty til
-        # here; otherwise the mainloop gets confused
-        if task.close_on_finish:
-            gsock.close_when_flushed = True
-            for request in requests:
-                request._close()
-            requests = []
-        else:
-            request = requests.pop(0)
-            request._close()
-
-    gsock.send_some()
-    gsock.last_activity = time.time()
-
-
-WSGIAPP_FSM = {
-    'event_list': (
-        'EV_EXECUTE_REQUESTS: top input',
-        'EV_WSGIAPP_RESPONSE: top output',
-    ),
-    'state_list': ('ST_IDLE',),
-    'machine': {
-        'ST_IDLE':
-        (
-            ('EV_EXECUTE_REQUESTS', ac_execute_requests,    'ST_IDLE'),
-        ),
-    }
-}
-
-WSGIAPP_GCONFIG = {
-    'wsgi_server': [None, None, 0, None, "wsgi server"],
-    'gsock': [None, None, 0, None, "clisrv gsock"],
-    'expose_tracebacks': [bool, False, 0, None,
-        "expose tracebacks of uncaught exceptions"],
-}
-
-
-class WSGIApp(GObj):
-    """  Execute WSGI apps.
-    """
-
-    def __init__(self):
-        GObj.__init__(self, WSGIAPP_FSM, WSGIAPP_GCONFIG)
-
-    def start_up(self):
-        pass

File ginsfsm/wsgi/parser.py

-##############################################################################
-#
-# Copyright (c) 2001, 2002 Zope Foundation and Contributors.
-# All Rights Reserved.
-#
-# This software is subject to the provisions of the Zope Public License,
-# Version 2.1 (ZPL).  A copy of the ZPL should accompany this distribution.
-# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
-# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
-# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
-# FOR A PARTICULAR PURPOSE.
-#
-##############################################################################
-"""HTTP Request Parser
-
-This server uses asyncore to accept connections and do initial
-processing but threads to do work.
-"""
-import re
-from io import BytesIO
-
-from ginsfsm.compat import (
-    tostr,
-    urlparse,
-    unquote_bytes_to_wsgi,
-    )
-
-from ginsfsm.buffers import OverflowableBuffer
-
-from ginsfsm.wsgi.receiver import (
-    FixedStreamReceiver,
-    ChunkedReceiver,
-    )
-
-from ginsfsm.wsgi.utilities import (
-    find_double_newline,
-    RequestEntityTooLarge,
-    RequestHeaderFieldsTooLarge,
-    )
-
-
-class HTTPRequestParser(object):
-    """A structure that collects the HTTP request.
-
-    Once the stream is completed, the instance is passed to
-    a server task constructor.
-
-    """
-
-    completed = False  # Set once request is completed.
-    empty = False        # Set if no request was made.
-    expect_continue = False  # client sent "Expect: 100-continue" header
-    headers_finished = False  # True when headers have been read
-    header_plus = b''
-    chunked = False
-    content_length = 0
-    header_bytes_received = 0
-    body_bytes_received = 0
-    body_rcv = None
-    version = '1.0'
-    error = None
-    connection_close = False
-
-    # Other attributes: first_line, header, headers, command, uri, version,
-    # path, query, fragment
-
-    def __init__(self, url_scheme,
-                 inbuf_overflow=524288,
-                 max_request_header_size=262144,
-                 max_request_body_size=1073741824):
-        # headers is a mapping containing keys translated to uppercase
-        # with dashes turned into underscores.
-        self.url_scheme = url_scheme
-        self.inbuf_overflow = inbuf_overflow
-        self.max_request_header_size = max_request_header_size
-        self.max_request_body_size = max_request_body_size
-        self.headers = {}
-
-    def received(self, data):
-        """
-        Receives the HTTP stream for one request.  Returns the number of
-        bytes consumed.  Sets the completed flag once both the header and the
-        body have been received.
-        """
-        if self.completed:
-            return 0  # Can't consume any more.
-        datalen = len(data)
-        br = self.body_rcv
-        if br is None:
-            # In header.
-            s = self.header_plus + data
-            index = find_double_newline(s)
-            if index >= 0:
-                # Header finished.
-                header_plus = s[:index]
-                consumed = len(data) - (len(s) - index)
-                # Remove preceeding blank lines.
-                header_plus = header_plus.lstrip()
-                if not header_plus:
-                    self.empty = True
-                    self.completed = True
-                else:
-                    self.parse_header(header_plus)
-                    if self.body_rcv is None:
-                        # no content-length header and not a t-e: chunked
-                        # request
-                        self.completed = True
-                    if self.content_length > 0:
-                        max_body = self.max_request_body_size
-                        # we won't accept this request if the content-length
-                        # is too large
-                        if self.content_length >= max_body:
-                            self.error = RequestEntityTooLarge(
-                                'exceeds max_body of %s' % max_body)
-                            self.completed = True
-                self.headers_finished = True
-                return consumed
-            else:
-                # Header not finished yet.
-                self.header_bytes_received += datalen
-                max_header = self.max_request_header_size
-                if self.header_bytes_received >= max_header:
-                    self.parse_header(b'GET / HTTP/1.0\n')
-                    self.error = RequestHeaderFieldsTooLarge(
-                        'exceeds max_header of %s' % max_header)
-                    self.completed = True
-                self.header_plus = s
-                return datalen
-        else:
-            # In body.
-            consumed = br.received(data)
-            self.body_bytes_received += consumed
-            max_body = self.max_request_body_size
-            if self.body_bytes_received >= max_body:
-                # this will only be raised during t-e: chunked requests
-                self.error = RequestEntityTooLarge(
-                    'exceeds max_body of %s' % max_body)
-                self.completed = True
-            elif br.error:
-                # garbage in chunked encoding input probably
-                self.error = br.error
-                self.completed = True
-            elif br.completed:
-                self.completed = True
-            return consumed
-
-    def parse_header(self, header_plus):
-        """
-        Parses the header_plus block of text (the headers plus the
-        first line of the request).
-        """
-        index = header_plus.find(b'\n')
-        if index >= 0:
-            first_line = header_plus[:index].rstrip()
-            header = header_plus[index + 1:]
-        else:
-            first_line = header_plus.rstrip()
-            header = b''
-
-        self.first_line = first_line  # for testing
-
-        lines = get_header_lines(header)
-
-        headers = self.headers
-        for line in lines:
-            index = line.find(b':')
-            if index > 0:
-                key = line[:index]
-                value = line[index + 1:].strip()
-                key1 = tostr(key.upper().replace(b'-', b'_'))
-                # If a header already exists, we append subsequent values
-                # seperated by a comma. Applications already need to handle
-                # the comma seperated values, as HTTP front ends might do
-                # the concatenation for you (behavior specified in RFC2616).
-                try:
-                    headers[key1] += tostr(b', ' + value)
-                except KeyError:
-                    headers[key1] = tostr(value)
-            # else there's garbage in the headers?
-
-        # command, uri, version will be bytes
-        command, uri, version = crack_first_line(first_line)
-        version = tostr(version)
-        command = tostr(command)
-        self.command = command
-        self.version = version
-        (self.proxy_scheme,
-         self.proxy_netloc,
-         self.path,
-         self.query, self.fragment) = split_uri(uri)
-        self.url_scheme = self.url_scheme
-        connection = headers.get('CONNECTION', '')
-
-        if version == '1.0':
-            if connection.lower() != 'keep-alive':
-                self.connection_close = True
-
-        if version == '1.1':
-            te = headers.get('TRANSFER_ENCODING', '')
-            if te == 'chunked':
-                self.chunked = True
-                buf = OverflowableBuffer(self.inbuf_overflow)
-                self.body_rcv = ChunkedReceiver(buf)
-            expect = headers.get('EXPECT', '').lower()
-            self.expect_continue = expect == '100-continue'
-            if connection.lower() == 'close':
-                self.connection_close = True
-
-        if not self.chunked:
-            try:
-                cl = int(headers.get('CONTENT_LENGTH', 0))
-            except ValueError:
-                cl = 0
-            self.content_length = cl
-            if cl > 0:
-                buf = OverflowableBuffer(self.inbuf_overflow)
-                self.body_rcv = FixedStreamReceiver(cl, buf)
-
-    def get_body_stream(self):
-        body_rcv = self.body_rcv
-        if body_rcv is not None:
-            return body_rcv.getfile()
-        else:
-            return BytesIO()
-
-    def _close(self):
-        body_rcv = self.body_rcv
-        if body_rcv is not None:
-            body_rcv.getbuf()._close()
-
-
-def split_uri(uri):
-    # urlsplit handles byte input by returning bytes on py3, so
-    # scheme, netloc, path, query, and fragment are bytes
-    scheme, netloc, path, query, fragment = urlparse.urlsplit(uri)
-    return (
-        tostr(scheme),
-        tostr(netloc),
-        unquote_bytes_to_wsgi(path),
-        tostr(query),
-        tostr(fragment),
-        )
-
-
-def get_header_lines(header):
-    """
-    Splits the header into lines, putting multi-line headers together.
-    """
-    r = []
-    lines = header.split(b'\n')
-    for line in lines:
-        if line.startswith((b' ', b'\t')):
-            r[-1] = r[-1] + line[1:]
-        else:
-            r.append(line)
-    return r
-
-first_line_re = re.compile(
-b'([^ ]+) ((?:[^ :?#]+://[^ ?#/]*(?:[0-9]{1,5})?)?[^ ]+)(( HTTP/([0-9.]+))$|$)')
-
-
-def crack_first_line(line):
-    m = first_line_re.match(line)
-    if m is not None and m.end() == len(line):
-        if m.group(3):
-            version = m.group(5)
-        else:
-            version = None
-        command = m.group(1).upper()
-        uri = m.group(2)
-        return command, uri, version
-    else:
-        return b'', b'', b''

File ginsfsm/wsgi/receiver.py

-##############################################################################
-#
-# Copyright (c) 2001, 2002 Zope Foundation and Contributors.
-# All Rights Reserved.
-#
-# This software is subject to the provisions of the Zope Public License,
-# Version 2.1 (ZPL).  A copy of the ZPL should accompany this distribution.
-# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
-# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
-# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
-# FOR A PARTICULAR PURPOSE.
-#
-##############################################################################
-"""Data Chunk Receiver
-"""
-
-from ginsfsm.wsgi.utilities import (
-    find_double_newline,
-    BadRequest,
-    )
-
-
-class FixedStreamReceiver(object):
-
-    # See IStreamConsumer
-    completed = False
-    error = None
-
-    def __init__(self, cl, buf):
-        self.remain = cl
-        self.buf = buf
-
-    def received(self, data):
-        'See IStreamConsumer'
-        rm = self.remain
-        if rm < 1:
-            self.completed = True  # Avoid any chance of spinning
-            return 0
-        datalen = len(data)
-        if rm <= datalen:
-            self.buf.append(data[:rm])
-            self.remain = 0
-            self.completed = True
-            return rm
-        else:
-            self.buf.append(data)
-            self.remain -= datalen
-            return datalen
-
-    def getfile(self):
-        return self.buf.getfile()
-
-    def getbuf(self):
-        return self.buf
-
-
-class ChunkedReceiver(object):
-    chunk_remainder = 0
-    control_line = b''
-    all_chunks_received = False
-    trailer = b''
-    completed = False
-    error = None
-
-    # max_control_line = 1024
-    # max_trailer = 65536
-
-    def __init__(self, buf):
-        self.buf = buf
-
-    def received(self, s):
-        # Returns the number of bytes consumed.
-        if self.completed:
-            return 0
-        orig_size = len(s)
-        while s:
-            rm = self.chunk_remainder
-            if rm > 0:
-                # Receive the remainder of a chunk.
-                to_write = s[:rm]
-                self.buf.append(to_write)
-                written = len(to_write)
-                s = s[written:]
-                self.chunk_remainder -= written
-            elif not self.all_chunks_received:
-                # Receive a control line.
-                s = self.control_line + s
-                pos = s.find(b'\n')
-                if pos < 0:
-                    # Control line not finished.
-                    self.control_line = s
-                    s = ''
-                else:
-                    # Control line finished.
-                    line = s[:pos]
-                    s = s[pos + 1:]
-                    self.control_line = b''
-                    line = line.strip()
-                    if line:
-                        # Begin a new chunk.
-                        semi = line.find(b';')
-                        if semi >= 0:
-                            # discard extension info.
-                            line = line[:semi]
-                        try:
-                            sz = int(line.strip(), 16)  # hexadecimal
-                        except ValueError: # garbage in input
-                            self.error = BadRequest(
-                                'garbage in chunked encoding input')
-                            sz = 0
-                        if sz > 0:
-                            # Start a new chunk.
-                            self.chunk_remainder = sz
-                        else:
-                            # Finished chunks.
-                            self.all_chunks_received = True
-                    # else expect a control line.
-            else:
-                # Receive the trailer.
-                trailer = self.trailer + s
-                if trailer.startswith(b'\r\n'):
-                    # No trailer.
-                    self.completed = True
-                    return orig_size - (len(trailer) - 2)
-                elif trailer.startswith(b'\n'):
-                    # No trailer.
-                    self.completed = True
-                    return orig_size - (len(trailer) - 1)
-                pos = find_double_newline(trailer)
-                if pos < 0:
-                    # Trailer not finished.
-                    self.trailer = trailer
-                    s = b''
-                else:
-                    # Finished the trailer.
-                    self.completed = True
-                    self.trailer = trailer[:pos]
-                    return orig_size - (len(trailer) - pos)
-        return orig_size
-
-    def getfile(self):
-        return self.buf.getfile()
-
-    def getbuf(self):
-        return self.buf

File ginsfsm/wsgi/task.py

-##############################################################################
-#
-# Copyright (c) 2001, 2002 Zope Foundation and Contributors.
-# All Rights Reserved.
-#
-# This software is subject to the provisions of the Zope Public License,
-# Version 2.1 (ZPL).  A copy of the ZPL should accompany this distribution.
-# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
-# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
-# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
-# FOR A PARTICULAR PURPOSE.
-#
-##############################################################################
-
-import logging
-import socket
-import sys
-import time
-from ginsfsm.deferred import DeferredInterrupt
-
-from ginsfsm.buffers import ReadOnlyFileBasedBuffer
-
-from ginsfsm.compat import (
-    tobytes,
-    reraise,
-    )
-
-from ginsfsm.wsgi.utilities import (
-    build_http_date,
-    )
-
-rename_headers = {
-    'CONTENT_LENGTH' : 'CONTENT_LENGTH',
-    'CONTENT_TYPE'   : 'CONTENT_TYPE',
-    'CONNECTION'     : 'CONNECTION_TYPE',
-    }
-
-hop_by_hop = frozenset((
-    'connection',
-    'keep-alive',
-    'proxy-authenticate',
-    'proxy-authorization',
-    'te',
-    'trailers',
-    'transfer-encoding',
-    'upgrade'
-    ))
-
-
-class JustTesting(Exception):
-    pass
-
-
-class Task(object):
-    close_on_finish = False
-    status = '200 OK'
-    wrote_header = False
-    start_time = 0
-    content_length = None
-    content_bytes_written = 0
-    logged_write_excess = False
-    complete = False
-    chunked_response = False
-
-    def __init__(self, wsgi_server, gsock, request, ext_event=None):
-        self.wsgi_server = wsgi_server
-        self.gsock = gsock
-        self.request = request
-        self.ext_event = ext_event
-        self.response_headers = []
-        version = request.version
-        if version not in ('1.0', '1.1'):
-            # fall back to a version we support.
-            version = '1.0'
-        self.version = version
-
-    def service(self):
-        try:
-            try:
-                self.start()
-                self.execute()
-                self.finish()
-            except socket.error:
-                self.close_on_finish = True
-                raise  # ??? if self.gsock.adj.log_socket_errors:
-        except DeferredInterrupt:
-            raise
-        finally:
-            pass
-
-    def build_response_header(self):
-        version = self.version
-        # Figure out whether the connection should be closed.
-        connection = self.request.headers.get('CONNECTION', '').lower()
-        response_headers = self.response_headers
-        content_length_header = None
-        date_header = None
-        server_header = None
-        connection_close_header = None
-
-        for i, (headername, headerval) in enumerate(response_headers):
-            headername = '-'.join(
-                [x.capitalize() for x in headername.split('-')]
-                )
-            if headername == 'Content-Length':
-                content_length_header = headerval
-            if headername == 'Date':
-                date_header = headerval
-            if headername == 'Server':
-                server_header = headerval
-            if headername == 'Connection':
-                connection_close_header = headerval.lower()
-            # replace with properly capitalized version
-            response_headers[i] = (headername, headerval)
-
-        if content_length_header is None and self.content_length is not None:
-            content_length_header = str(self.content_length)
-            self.response_headers.append(
-                ('Content-Length', content_length_header)
-                )
-
-        def close_on_finish():
-            if connection_close_header is None:
-                response_headers.append(('Connection', 'close'))
-            self.close_on_finish = True
-
-        if version == '1.0':
-            if connection == 'keep-alive':
-                if not content_length_header:
-                    close_on_finish()
-                else:
-                    response_headers.append(('Connection', 'Keep-Alive'))
-            else:
-                close_on_finish()
-
-        elif version == '1.1':
-            if connection == 'close':
-                close_on_finish()
-
-            if not content_length_header:
-                response_headers.append(('Transfer-Encoding', 'chunked'))
-                self.chunked_response = True
-                if not self.close_on_finish:
-                    close_on_finish()
-
-            # under HTTP 1.1 keep-alive is default, no need to set the header
-        else:
-            raise AssertionError('neither HTTP/1.0 or HTTP/1.1')
-
-        # Set the Server and Date field, if not yet specified. This is needed
-        # if the server is used as a proxy.
-        ident = self.wsgi_server.identity
-        if not server_header:
-            response_headers.append(('Server', ident))
-        else:
-            response_headers.append(('Via', ident))
-        if not date_header:
-            response_headers.append(('Date', build_http_date(self.start_time)))
-
-        first_line = 'HTTP/%s %s' % (self.version, self.status)
-        next_lines = ['%s: %s' % hv for hv in sorted(self.response_headers)]
-        lines = [first_line] + next_lines
-        res = '%s\r\n\r\n' % '\r\n'.join(lines)
-        return tobytes(res)
-
-    def remove_content_length_header(self):
-        for i, (header_name, header_value) in enumerate(self.response_headers):
-            if header_name.lower() == 'content-length':
-                del self.response_headers[i]
-
-    def start(self):
-        self.start_time = time.time()
-
-    def finish(self):
-        if not self.wrote_header:
-            self.write(b'')
-        if self.chunked_response:
-            # not self.write, it will chunk it!
-            self.gsock.write_soon(b'0\r\n\r\n')
-        self.gsock.send_some()
-
-    def write(self, data):
-        if not self.complete:
-            raise RuntimeError('start_response was not called before body '
-                               'written')
-        gsock = self.gsock
-        if not self.wrote_header:
-            rh = self.build_response_header()
-            gsock.write_soon(rh)
-            self.wrote_header = True
-        if data:
-            towrite = data
-            cl = self.content_length
-            if self.chunked_response:
-                # use chunked encoding response
-                towrite = tobytes(hex(len(data))[2:].upper()) + b'\r\n'
-                towrite += data + b'\r\n'
-            elif cl is not None:
-                towrite = data[:cl - self.content_bytes_written]
-                self.content_bytes_written += len(towrite)
-                if towrite != data and not self.logged_write_excess:
-                    logging.error(
-                        'application-written content exceeded the number of '
-                        'bytes specified by Content-Length header (%s)' % cl)
-                    self.logged_write_excess = True
-            if towrite:
-                gsock.write_soon(towrite)
-
-
-class ErrorTask(Task):
-    """ An error task produces an error response """
-    complete = True
-
-    def execute(self):
-        e = self.request.error
-        body = '%s\r\n\r\n%s' % (e.reason, e.body)
-        tag = '\r\n\r\n(generated by ginsfsm.wsgi)'
-        body = body + tag
-        self.status = '%s %s' % (e.code, e.reason)
-        cl = len(body)
-        self.content_length = cl
-        self.response_headers.append(('Content-Length', str(cl)))
-        self.response_headers.append(('Content-Type', 'text/plain'))
-        self.response_headers.append(('Connection', 'close'))
-        self.close_on_finish = True
-        self.write(tobytes(body))
-
-
-class WSGITask(Task):
-    """A WSGI task produces a response from a WSGI application.
-    """
-    environ = None
-
-    def execute(self):
-        env = self.get_environment()
-
-        def start_response(status, headers, exc_info=None):
-            if self.complete and not exc_info:
-                raise AssertionError("start_response called a second time "
-                                     "without providing exc_info.")
-            if exc_info:
-                try:
-                    if self.complete:
-                        # higher levels will catch and handle raised exception:
-                        # 1. "service" method in task.py
-                        # 2. "service" method in gsock.py
-                        # 3. "handler_thread" method in task.py
-                        reraise(exc_info[0], exc_info[1], exc_info[2])
-                    else:
-                        # As per WSGI spec existing headers must be cleared
-                        self.response_headers = []
-                finally:
-                    exc_info = None
-
-            self.complete = True
-
-            if not status.__class__ is str:
-                raise AssertionError('status %s is not a string' % status)
-
-            self.status = status
-
-            # Prepare the headers for output
-            for k, v in headers:
-                if not k.__class__ is str:
-                    raise AssertionError(
-                        'Header name %r is not a string in %r' % (k, (k, v))
-                        )
-                if not v.__class__ is str:
-                    raise AssertionError(
-                        'Header value %r is not a string in %r' % (v, (k, v))
-                        )
-                kl = k.lower()
-                if kl == 'content-length':
-                    self.content_length = int(v)
-                elif kl in hop_by_hop:
-                    raise AssertionError(
-                        '%s is a "hop-by-hop" header; it cannot be used by '
-                        'a WSGI application (see PEP 3333)' % k)
-
-            self.response_headers.extend(headers)
-
-            # Return a method used to write the response data.
-            return self.write
-
-        # Call the application to handle the request and write a response
-        # TODO: do a multi wsgi-app
-        try:
-            app_iter = self.wsgi_server.application(env, start_response)
-        except DeferredInterrupt:
-            """ A gobj inside his gaplic need
-                to wait for data from another gaplic.
-            """
-            raise
-
-        try:
-            if app_iter.__class__ is ReadOnlyFileBasedBuffer:
-                cl = self.content_length
-                size = app_iter.prepare(cl)
-                if size:
-                    if cl != size:
-                        if cl is not None:
-                            self.remove_content_length_header()
-                        self.content_length = size
-                    self.write(b'')  # generate headers
-                    self.gsock.write_soon(app_iter)
-                    return
-
-            first_chunk_len = None
-            for chunk in app_iter:
-                if first_chunk_len is None:
-                    first_chunk_len = len(chunk)
-                    # Set a Content-Length header if one is not supplied.
-                    # start_response may not have been called until first
-                    # iteration as per PEP, so we must reinterrogate
-                    # self.content_length here
-                    if self.content_length is None:
-                        app_iter_len = None
-                        if hasattr(app_iter, '__len__'):
-                            app_iter_len = len(app_iter)
-                        if app_iter_len == 1:
-                            self.content_length = first_chunk_len
-                # transmit headers only after first iteration of the iterable
-                # that returns a non-empty bytestring (PEP 3333)
-                if chunk:
-                    self.write(chunk)
-
-            cl = self.content_length
-            if cl is not None:
-                if self.content_bytes_written != cl:
-                    # close the connection so the client isn't sitting around
-                    # waiting for more data when there are too few bytes
-                    # to service content-length
-                    self.close_on_finish = True
-                    logging.error(
-                        'application returned too few bytes (%s) '
-                        'for specified Content-Length (%s) via app_iter' % (
-                            self.content_bytes_written, cl),
-                        )
-        finally:
-            if hasattr(app_iter, 'close'):
-                app_iter.close()
-
-    def get_environment(self):
-        """Returns a WSGI environment."""
-        environ = self.environ
-        if environ is not None:
-            # Return the cached copy.
-            return environ
-
-        request = self.request
-        path = request.path
-        gsock = self.gsock
-        wsgi_server = self.wsgi_server
-
-        while path and path.startswith('/'):
-            path = path[1:]
-
-        environ = {}
-        environ['gsock'] = gsock
-        environ['ext_event'] = self.ext_event
-        environ['REQUEST_METHOD'] = request.command.upper()
-        environ['SERVER_PORT'] = str(wsgi_server.effective_port)
-        environ['SERVER_NAME'] = wsgi_server.server_name
-        environ['SERVER_SOFTWARE'] = wsgi_server.identity
-        environ['SERVER_PROTOCOL'] = 'HTTP/%s' % self.version
-        environ['SCRIPT_NAME'] = ''
-        environ['PATH_INFO'] = '/' + path
-        environ['QUERY_STRING'] = request.query
-        environ['REMOTE_ADDR'] = gsock.addr[0]
-
-        for key, value in request.headers.items():
-            value = value.strip()
-            mykey = rename_headers.get(key, None)
-            if mykey is None:
-                mykey = 'HTTP_%s' % key
-            if not mykey in environ:
-                environ[mykey] = value
-
-        # the following environment variables are required by the WSGI spec
-        environ['wsgi.version'] = (1, 0)
-        environ['wsgi.url_scheme'] = request.url_scheme
-        environ['wsgi.errors'] = sys.stderr  # apps should use the logging module
-        environ['wsgi.multithread'] = False
-        environ['wsgi.multiprocess'] = False
-        environ['wsgi.run_once'] = False
-        environ['wsgi.input'] = request.get_body_stream()
-        environ['wsgi.file_wrapper'] = ReadOnlyFileBasedBuffer
-
-        self.environ = environ
-        return environ

File ginsfsm/wsgi/utilities.py

-##############################################################################
-#
-# Copyright (c) 2004 Zope Foundation and Contributors.
-# All Rights Reserved.
-#
-# This software is subject to the provisions of the Zope Public License,
-# Version 2.1 (ZPL).  A copy of the ZPL should accompany this distribution.
-# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
-# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
-# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
-# FOR A PARTICULAR PURPOSE.
-#
-##############################################################################
-"""Utility functions
-"""
-
-import asyncore
-import logging
-import re
-import time
-import calendar
-
-logger = logging.getLogger('waitress')
-
-
-def find_double_newline(s):
-    """Returns the position just after a double newline in the given string."""
-    pos1 = s.find(b'\n\r\n')  # One kind of double newline
-    if pos1 >= 0:
-        pos1 += 3
-    pos2 = s.find(b'\n\n')    # Another kind of double newline
-    if pos2 >= 0:
-        pos2 += 2
-
-    if pos1 >= 0:
-        if pos2 >= 0:
-            return min(pos1, pos2)
-        else:
-            return pos1
-    else:
-        return pos2
-
-
-def concat(*args):
-    return ''.join(args)
-
-
-def join(seq, field=' '):
-    return field.join(seq)
-
-
-def group(s):
-    return '(' + s + ')'
-
-short_days = ['sun', 'mon', 'tue', 'wed', 'thu', 'fri', 'sat']
-long_days = ['sunday', 'monday', 'tuesday', 'wednesday',
-             'thursday', 'friday', 'saturday']
-
-short_day_reg = group(join(short_days, '|'))
-long_day_reg = group(join(long_days, '|'))
-
-daymap = {}
-for i in range(7):
-    daymap[short_days[i]] = i
-    daymap[long_days[i]] = i
-
-hms_reg = join(3 * [group('[0-9][0-9]')], ':')
-
-months = ['jan', 'feb', 'mar', 'apr', 'may', 'jun', 'jul',
-          'aug', 'sep', 'oct', 'nov', 'dec']
-
-monmap = {}
-for i in range(12):
-    monmap[months[i]] = i + 1
-
-months_reg = group(join(months, '|'))
-
-# From draft-ietf-http-v11-spec-07.txt/3.3.1
-#       Sun, 06 Nov 1994 08:49:37 GMT  ; RFC 822, updated by RFC 1123
-#       Sunday, 06-Nov-94 08:49:37 GMT ; RFC 850, obsoleted by RFC 1036
-#       Sun Nov  6 08:49:37 1994       ; ANSI C's asctime() format
-
-# rfc822 format
-rfc822_date = join(
-        [concat(short_day_reg, ','),            # day
-         group('[0-9][0-9]?'),                  # date
-         months_reg,                            # month
-         group('[0-9]+'),                       # year
-         hms_reg,                               # hour minute second
-         'gmt'
-         ],
-        ' '
-        )
-
-rfc822_reg = re.compile(rfc822_date)
-
-
-def unpack_rfc822(m):
-    g = m.group
-    return (
-            int(g(4)),             # year
-            monmap[g(3)],        # month
-            int(g(2)),             # day
-            int(g(5)),             # hour
-            int(g(6)),             # minute
-            int(g(7)),             # second
-            0,
-            0,
-            0
-            )
-
-    # rfc850 format
-rfc850_date = join(
-        [concat(long_day_reg, ','),
-         join(
-                 [group('[0-9][0-9]?'),
-                  months_reg,
-                  group('[0-9]+')
-                  ],
-                 '-'
-                 ),
-         hms_reg,
-         'gmt'
-         ],
-        ' '
-        )
-
-rfc850_reg = re.compile(rfc850_date)
-
-
-# they actually unpack the same way
-def unpack_rfc850(m):
-    g = m.group
-    yr = g(4)
-    if len(yr) == 2:
-        yr = '19' + yr
-    return (
-            int(yr),           # year
-            monmap[g(3)],        # month
-            int(g(2)),           # day
-            int(g(5)),           # hour
-            int(g(6)),           # minute
-            int(g(7)),           # second
-            0,
-            0,
-            0
-            )
-
-    # parsdate.parsedate        - ~700/sec.
-    # parse_http_date            - ~1333/sec.
-
-weekdayname = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun']
-monthname = [None, 'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun',
-             'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec']
-
-
-def build_http_date(when):
-    year, month, day, hh, mm, ss, wd, y, z = time.gmtime(when)
-    return "%s, %02d %3s %4d %02d:%02d:%02d GMT" % (
-            weekdayname[wd],
-            day, monthname[month], year,
-            hh, mm, ss)
-
-
-def parse_http_date(d):
-    d = d.lower()
-    m = rfc850_reg.match(d)
-    if m and m.end() == len(d):
-        retval = int(calendar.timegm(unpack_rfc850(m)))
-    else:
-        m = rfc822_reg.match(d)
-        if m and m.end() == len(d):
-            retval = int(calendar.timegm(unpack_rfc822(m)))
-        else:
-            return 0
-    return retval
-
-
-class logging_dispatcher(asyncore.dispatcher):
-    logger = logger
-
-    def log_info(self, message, type='info'):
-        severity = {
-            'info': logging.INFO,
-            'warning': logging.WARN,
-            'error': logging.ERROR,
-            }
-        self.logger.log(severity.get(type, logging.INFO), message)
-
-
-class Error(object):
-    def __init__(self, body):
-        self.body = body
-
-
-class BadRequest(Error):
-    code = 400
-    reason = 'Bad Request'
-
-
-class RequestHeaderFieldsTooLarge(BadRequest):
-    code = 431
-    reason = 'Request Header Fields Too Large'
-
-
-class RequestEntityTooLarge(BadRequest):
-    code = 413
-    reason = 'Request Entity Too Large'
-
-
-class InternalServerError(Error):
-    code = 500
-    reason = 'Internal Server Error'