Commits

Mike Bayer committed 6026d32

update to new doc system

Comments (0)

Files changed (24)

 =======
 0.7.10
 ======
+- changelog
+  - The changelog has been moved to the documentation.
+    This file will be maintained throughout remaining
+    0.7 maintenance for backwards compabitility, but
+    is removed in 0.8.
+
 - orm
   - [bug] Fixed Session accounting bug whereby replacing
     a deleted object in the identity map with another

doc/build/builder/autodoc_mods.py

+import re
+
+def autodoc_skip_member(app, what, name, obj, skip, options):
+    if what == 'class' and skip and \
+        name in ('__init__', '__eq__', '__ne__', '__lt__',
+                    '__le__', '__call__') and \
+        obj.__doc__:
+        return False
+    else:
+        return skip
+
+# im sure this is in the app somewhere, but I don't really
+# know where, so we're doing it here.
+_track_autodoced = {}
+def autodoc_process_docstring(app, what, name, obj, options, lines):
+    if what == "class":
+        _track_autodoced[name] = obj
+    elif what in ("attribute", "method") and \
+        options.get("inherited-members"):
+        m = re.match(r'(.*?)\.([\w_]+)$', name)
+        if m:
+            clsname, attrname = m.group(1, 2)
+            if clsname in _track_autodoced:
+                cls = _track_autodoced[clsname]
+                for supercls in cls.__mro__:
+                    if attrname in supercls.__dict__:
+                        break
+                if supercls is not cls:
+                    lines[:0] = [
+                        ".. container:: inherited_member",
+                        "",
+                        "    *inherited from the* :%s:`.%s.%s` *%s of* :class:`.%s`" % (
+                                    "attr" if what == "attribute"
+                                    else "meth",
+                                    supercls.__name__,
+                                    attrname,
+                                    what,
+                                    supercls.__name__
+                                ),
+                        ""
+                    ]
+
+
+def setup(app):
+    app.connect('autodoc-skip-member', autodoc_skip_member)
+    app.connect('autodoc-process-docstring', autodoc_process_docstring)
+

doc/build/builder/builders.py

-from sphinx.application import TemplateBridge
-from sphinx.builders.html import StandaloneHTMLBuilder
-from sphinx.highlighting import PygmentsBridge
-from sphinx.jinja2glue import BuiltinTemplateLoader
-from pygments import highlight
-from pygments.lexer import RegexLexer, bygroups, using
-from pygments.token import *
-from pygments.filter import Filter, apply_filters
-from pygments.lexers import PythonLexer, PythonConsoleLexer
-from pygments.formatters import HtmlFormatter, LatexFormatter
-import re
-from mako.lookup import TemplateLookup
-from mako.template import Template
-from mako import __version__
-import os
-
-rtd = os.environ.get('READTHEDOCS', None) == 'True'
-
-class MakoBridge(TemplateBridge):
-    def init(self, builder, *args, **kw):
-        self.jinja2_fallback = BuiltinTemplateLoader()
-        self.jinja2_fallback.init(builder, *args, **kw)
-
-        builder.config.html_context['release_date'] = builder.config['release_date']
-        builder.config.html_context['site_base'] = builder.config['site_base']
-
-        self.lookup = TemplateLookup(directories=builder.config.templates_path,
-            #format_exceptions=True,
-            imports=[
-                "from builder import util"
-            ]
-        )
-
-        if rtd:
-            import urllib2
-            template_url = builder.config['site_base'] + "/docs_base.mako"
-            template = urllib2.urlopen(template_url).read()
-            self.lookup.put_string("/rtd_base.mako", template)
-
-    def render(self, template, context):
-        template = template.replace(".html", ".mako")
-        context['prevtopic'] = context.pop('prev', None)
-        context['nexttopic'] = context.pop('next', None)
-        version = context['version']
-        pathto = context['pathto']
-
-        # RTD layout
-        if rtd:
-            # add variables if not present, such
-            # as if local test of READTHEDOCS variable
-            if 'MEDIA_URL' not in context:
-                context['MEDIA_URL'] = "http://media.readthedocs.org/"
-            if 'slug' not in context:
-                context['slug'] = context['project'].lower()
-            if 'url' not in context:
-                context['url'] = "/some/test/url"
-            if 'current_version' not in context:
-                context['current_version'] = "latest"
-
-            if 'name' not in context:
-                context['name'] = context['project'].lower()
-
-            context['rtd'] = True
-            context['toolbar'] = True
-            context['layout'] = "rtd_layout.mako"
-            context['base'] = "rtd_base.mako"
-            context['pdf_url'] = "%spdf/%s/%s/%s.pdf" % (
-                    context['MEDIA_URL'],
-                    context['slug'],
-                    context['current_version'],
-                    context['slug']
-            )
-        # local docs layout
-        else:
-            context['rtd'] = False
-            context['toolbar'] = False
-            context['layout'] = "layout.mako"
-            context['base'] = "static_base.mako"
-
-        context.setdefault('_', lambda x:x)
-        return self.lookup.get_template(template).render_unicode(**context)
-
-    def render_string(self, template, context):
-        # this is used for  .js, .css etc. and we don't have
-        # local copies of that stuff here so use the jinja render.
-        return self.jinja2_fallback.render_string(template, context)
-
-class StripDocTestFilter(Filter):
-    def filter(self, lexer, stream):
-        for ttype, value in stream:
-            if ttype is Token.Comment and re.match(r'#\s*doctest:', value):
-                continue
-            yield ttype, value
-
-class PyConWithSQLLexer(RegexLexer):
-    name = 'PyCon+SQL'
-    aliases = ['pycon+sql']
-
-    flags = re.IGNORECASE | re.DOTALL
-
-    tokens = {
-            'root': [
-                (r'{sql}', Token.Sql.Link, 'sqlpopup'),
-                (r'{opensql}', Token.Sql.Open, 'opensqlpopup'),
-                (r'.*?\n', using(PythonConsoleLexer))
-            ],
-            'sqlpopup':[
-                (
-                    r'(.*?\n)((?:PRAGMA|BEGIN|SELECT|INSERT|DELETE|ROLLBACK|COMMIT|ALTER|UPDATE|CREATE|DROP|PRAGMA|DESCRIBE).*?(?:{stop}\n?|$))',
-                    bygroups(using(PythonConsoleLexer), Token.Sql.Popup),
-                    "#pop"
-                )
-            ],
-            'opensqlpopup':[
-                (
-                    r'.*?(?:{stop}\n*|$)',
-                    Token.Sql,
-                    "#pop"
-                )
-            ]
-        }
-
-
-class PythonWithSQLLexer(RegexLexer):
-    name = 'Python+SQL'
-    aliases = ['pycon+sql']
-
-    flags = re.IGNORECASE | re.DOTALL
-
-    tokens = {
-            'root': [
-                (r'{sql}', Token.Sql.Link, 'sqlpopup'),
-                (r'{opensql}', Token.Sql.Open, 'opensqlpopup'),
-                (r'.*?\n', using(PythonLexer))
-            ],
-            'sqlpopup':[
-                (
-                    r'(.*?\n)((?:PRAGMA|BEGIN|SELECT|INSERT|DELETE|ROLLBACK|COMMIT|ALTER|UPDATE|CREATE|DROP|PRAGMA|DESCRIBE).*?(?:{stop}\n?|$))',
-                    bygroups(using(PythonLexer), Token.Sql.Popup),
-                    "#pop"
-                )
-            ],
-            'opensqlpopup':[
-                (
-                    r'.*?(?:{stop}\n*|$)',
-                    Token.Sql,
-                    "#pop"
-                )
-            ]
-        }
-
-
-def _strip_trailing_whitespace(iter_):
-    buf = list(iter_)
-    if buf:
-        buf[-1] = (buf[-1][0], buf[-1][1].rstrip())
-    for t, v in buf:
-        yield t, v
-
-class PopupSQLFormatter(HtmlFormatter):
-    def _format_lines(self, tokensource):
-        buf = []
-        for ttype, value in apply_filters(tokensource, [StripDocTestFilter()]):
-            if ttype in Token.Sql:
-                for t, v in HtmlFormatter._format_lines(self, iter(buf)):
-                    yield t, v
-                buf = []
-
-                if ttype is Token.Sql:
-                    yield 1, "<div class='show_sql'>%s</div>" % re.sub(r'(?:[{stop}|\n]*)$', '', value)
-                elif ttype is Token.Sql.Link:
-                    yield 1, "<a href='#' class='sql_link'>sql</a>"
-                elif ttype is Token.Sql.Popup:
-                    yield 1, "<div class='popup_sql'>%s</div>" % re.sub(r'(?:[{stop}|\n]*)$', '', value)
-            else:
-                buf.append((ttype, value))
-
-        for t, v in _strip_trailing_whitespace(HtmlFormatter._format_lines(self, iter(buf))):
-            yield t, v
-
-class PopupLatexFormatter(LatexFormatter):
-    def _filter_tokens(self, tokensource):
-        for ttype, value in apply_filters(tokensource, [StripDocTestFilter()]):
-            if ttype in Token.Sql:
-                if ttype is not Token.Sql.Link and ttype is not Token.Sql.Open:
-                    yield Token.Literal, re.sub(r'{stop}', '', value)
-                else:
-                    continue
-            else:
-                yield ttype, value
-
-    def format(self, tokensource, outfile):
-        LatexFormatter.format(self, self._filter_tokens(tokensource), outfile)
-
-def autodoc_skip_member(app, what, name, obj, skip, options):
-    if what == 'class' and skip and \
-        name in ('__init__', '__eq__', '__ne__', '__lt__', '__le__', '__call__') and \
-        obj.__doc__:
-        return False
-    else:
-        return skip
-
-def setup(app):
-    app.add_lexer('pycon+sql', PyConWithSQLLexer())
-    app.add_lexer('python+sql', PythonWithSQLLexer())
-    app.add_config_value('release_date', "", True)
-    app.add_config_value('site_base', "", True)
-    app.add_config_value('build_number', "", 1)
-    app.connect('autodoc-skip-member', autodoc_skip_member)
-    PygmentsBridge.html_formatter = PopupSQLFormatter
-    PygmentsBridge.latex_formatter = PopupLatexFormatter
-

doc/build/builder/changelog.py

+import re
+from sphinx.util.compat import Directive
+from docutils.statemachine import StringList
+from docutils import nodes, utils
+import textwrap
+import itertools
+import collections
+import md5
+
+def _comma_list(text):
+    return re.split(r"\s*,\s*", text.strip())
+
+def _parse_content(content):
+    d = {}
+    d['text'] = []
+    idx = 0
+    for line in content:
+        idx += 1
+        m = re.match(r' *\:(.+?)\:(?: +(.+))?', line)
+        if m:
+            attrname, value = m.group(1, 2)
+            d[attrname] = value or ''
+        else:
+            break
+    d["text"] = content[idx:]
+    return d
+
+
+class EnvDirective(object):
+    @property
+    def env(self):
+        return self.state.document.settings.env
+
+class ChangeLogDirective(EnvDirective, Directive):
+    has_content = True
+
+    type_ = "change"
+
+    default_section = 'misc'
+
+    def _organize_by_section(self, changes):
+        compound_sections = [(s, s.split(" ")) for s in
+                                self.sections if " " in s]
+
+        bysection = collections.defaultdict(list)
+        all_sections = set()
+        for rec in changes:
+            inner_tag = rec['tags'].intersection(self.inner_tag_sort)
+            if inner_tag:
+                inner_tag = inner_tag.pop()
+            else:
+                inner_tag = ""
+
+            for compound, comp_words in compound_sections:
+                if rec['tags'].issuperset(comp_words):
+                    bysection[(compound, inner_tag)].append(rec)
+                    all_sections.add(compound)
+                    break
+            else:
+                intersect = rec['tags'].intersection(self.sections)
+                if intersect:
+                    for sec in rec['sorted_tags']:
+                        if sec in intersect:
+                            bysection[(sec, inner_tag)].append(rec)
+                            all_sections.add(sec)
+                            break
+                else:
+                    bysection[(self.default_section, inner_tag)].append(rec)
+        return bysection, all_sections
+
+    @classmethod
+    def changes(cls, env):
+        return env.temp_data['ChangeLogDirective_%s_changes' % cls.type_]
+
+    def _setup_run(self):
+        self.sections = self.env.config.changelog_sections
+        self.inner_tag_sort = self.env.config.changelog_inner_tag_sort + [""]
+        self.env.temp_data['ChangeLogDirective_%s_changes' % self.type_] = []
+        self._parsed_content = _parse_content(self.content)
+
+        p = nodes.paragraph('', '',)
+        self.state.nested_parse(self.content[1:], 0, p)
+
+    def run(self):
+        self._setup_run()
+        changes = self.changes(self.env)
+        output = []
+
+        self.version = version = self._parsed_content.get('version', '')
+        id_prefix = "%s-%s" % (self.type_, version)
+        topsection = self._run_top(id_prefix)
+        output.append(topsection)
+
+        bysection, all_sections = self._organize_by_section(changes)
+
+        counter = itertools.count()
+
+        sections_to_render = [s for s in self.sections if s in all_sections]
+        if not sections_to_render:
+            for cat in self.inner_tag_sort:
+                append_sec = self._append_node()
+
+                for rec in bysection[(self.default_section, cat)]:
+                    rec["id"] = "%s-%s" % (id_prefix, next(counter))
+
+                    self._render_rec(rec, None, cat, append_sec)
+
+                if append_sec.children:
+                    topsection.append(append_sec)
+        else:
+            for section in sections_to_render + [self.default_section]:
+                sec = nodes.section('',
+                        nodes.title(section, section),
+                        ids=["%s-%s" % (id_prefix, section.replace(" ", "-"))]
+                )
+
+                append_sec = self._append_node()
+                sec.append(append_sec)
+
+                for cat in self.inner_tag_sort:
+                    for rec in bysection[(section, cat)]:
+                        rec["id"] = "%s-%s" % (id_prefix, next(counter))
+                        self._render_rec(rec, section, cat, append_sec)
+
+                if append_sec.children:
+                    topsection.append(sec)
+
+        return output
+
+    def _append_node(self):
+        return nodes.bullet_list()
+
+    def _run_top(self, id_prefix):
+        version = self._parsed_content.get('version', '')
+        topsection = nodes.section('',
+                nodes.title(version, version),
+                ids=[id_prefix]
+            )
+
+        if self._parsed_content.get("released"):
+            topsection.append(nodes.Text("Released: %s" %
+                        self._parsed_content['released']))
+        else:
+            topsection.append(nodes.Text("no release date"))
+        return topsection
+
+
+    def _render_rec(self, rec, section, cat, append_sec):
+        para = rec['node'].deepcopy()
+
+        text = _text_rawsource_from_node(para)
+
+        to_hash = "%s %s" % (self.version, text[0:100])
+        targetid = "%s-%s" % (self.type_,
+                        md5.md5(to_hash.encode('ascii', 'ignore')
+                            ).hexdigest())
+        targetnode = nodes.target('', '', ids=[targetid])
+        para.insert(0, targetnode)
+        permalink = nodes.reference('', '',
+                        nodes.Text("(link)", "(link)"),
+                        refid=targetid,
+                        classes=['changeset-link']
+                    )
+        para.append(permalink)
+
+        insert_ticket = nodes.paragraph('')
+        para.append(insert_ticket)
+
+        i = 0
+        for collection, render, prefix in (
+                (rec['tickets'], self.env.config.changelog_render_ticket, "#%s"),
+                (rec['pullreq'], self.env.config.changelog_render_pullreq,
+                                            "pull request %s"),
+                (rec['changeset'], self.env.config.changelog_render_changeset, "r%s"),
+            ):
+            for refname in collection:
+                if i > 0:
+                    insert_ticket.append(nodes.Text(", ", ", "))
+                else:
+                    insert_ticket.append(nodes.Text(" ", " "))
+                i += 1
+                if render is not None:
+                    refuri = render % refname
+                    node = nodes.reference('', '',
+                            nodes.Text(prefix % refname, prefix % refname),
+                            refuri=refuri
+                        )
+                else:
+                    node = nodes.Text(prefix % refname, prefix % refname)
+                insert_ticket.append(node)
+
+        if rec['tags']:
+            tag_node = nodes.strong('',
+                        " ".join("[%s]" % t for t
+                            in
+                                [t1 for t1 in [section, cat]
+                                    if t1 in rec['tags']] +
+
+                                list(rec['tags'].difference([section, cat]))
+                        ) + " "
+                    )
+            para.children[0].insert(0, tag_node)
+
+        append_sec.append(
+            nodes.list_item('',
+                nodes.target('', '', ids=[rec['id']]),
+                para
+            )
+        )
+
+
+class ChangeDirective(EnvDirective, Directive):
+    has_content = True
+
+    type_ = "change"
+    parent_cls = ChangeLogDirective
+
+    def run(self):
+        content = _parse_content(self.content)
+        p = nodes.paragraph('', '',)
+        sorted_tags = _comma_list(content.get('tags', ''))
+        rec = {
+            'tags': set(sorted_tags).difference(['']),
+            'tickets': set(_comma_list(content.get('tickets', ''))).difference(['']),
+            'pullreq': set(_comma_list(content.get('pullreq', ''))).difference(['']),
+            'changeset': set(_comma_list(content.get('changeset', ''))).difference(['']),
+            'node': p,
+            'type': self.type_,
+            "title": content.get("title", None),
+            'sorted_tags': sorted_tags
+        }
+
+        if "declarative" in rec['tags']:
+            rec['tags'].add("orm")
+
+        self.state.nested_parse(content['text'], 0, p)
+        self.parent_cls.changes(self.env).append(rec)
+
+        return []
+
+def _text_rawsource_from_node(node):
+    src = []
+    stack = [node]
+    while stack:
+        n = stack.pop(0)
+        if isinstance(n, nodes.Text):
+            src.append(n.rawsource)
+        stack.extend(n.children)
+    return "".join(src)
+
+def _rst2sphinx(text):
+    return StringList(
+        [line.strip() for line in textwrap.dedent(text).split("\n")]
+    )
+
+
+def make_ticket_link(name, rawtext, text, lineno, inliner,
+                      options={}, content=[]):
+    env = inliner.document.settings.env
+    render_ticket = env.config.changelog_render_ticket or "%s"
+    prefix = "#%s"
+    if render_ticket:
+        ref = render_ticket % text
+        node = nodes.reference(rawtext, prefix % text, refuri=ref, **options)
+    else:
+        node = nodes.Text(prefix % text, prefix % text)
+    return [node], []
+
+def setup(app):
+    app.add_directive('changelog', ChangeLogDirective)
+    app.add_directive('change', ChangeDirective)
+    app.add_config_value("changelog_sections", [], 'env')
+    app.add_config_value("changelog_inner_tag_sort", [], 'env')
+    app.add_config_value("changelog_render_ticket",
+            None,
+            'env'
+        )
+    app.add_config_value("changelog_render_pullreq",
+            None,
+            'env'
+        )
+    app.add_config_value("changelog_render_changeset",
+            None,
+            'env'
+        )
+    app.add_role('ticket', make_ticket_link)

doc/build/builder/dialect_info.py

+import re
+from sphinx.util.compat import Directive
+from docutils import nodes
+
+class DialectDirective(Directive):
+    has_content = True
+
+    _dialects = {}
+
+    def _parse_content(self):
+        d = {}
+        d['default'] = self.content[0]
+        d['text'] = []
+        idx = 0
+        for line in self.content[1:]:
+            idx += 1
+            m = re.match(r'\:(.+?)\: +(.+)', line)
+            if m:
+                attrname, value = m.group(1, 2)
+                d[attrname] = value
+            else:
+                break
+        d["text"] = self.content[idx + 1:]
+        return d
+
+    def _dbapi_node(self):
+
+        dialect_name, dbapi_name = self.dialect_name.split("+")
+
+        try:
+            dialect_directive = self._dialects[dialect_name]
+        except KeyError:
+            raise Exception("No .. dialect:: %s directive has been established"
+                                    % dialect_name)
+
+        output = []
+
+        content = self._parse_content()
+
+        parent_section_ref = self.state.parent.children[0]['ids'][0]
+        self._append_dbapi_bullet(dialect_name, dbapi_name,
+                                        content['name'], parent_section_ref)
+
+        p = nodes.paragraph('', '',
+                    nodes.Text(
+                        "Support for the %s database via the %s driver." % (
+                                dialect_directive.database_name,
+                                content['name']
+                        ),
+                        "Support for the %s database via the %s driver." % (
+                                dialect_directive.database_name,
+                                content['name']
+                        )
+                    ),
+        )
+
+        self.state.nested_parse(content['text'], 0, p)
+        output.append(p)
+
+        if "url" in content or "driverurl" in content:
+            sec = nodes.section(
+                    '',
+                    nodes.title("DBAPI", "DBAPI"),
+                    ids=["dialect-%s-%s-url" % (dialect_name, dbapi_name)]
+            )
+            if "url" in content:
+                text = "Documentation and download information (if applicable) "\
+                        "for %s is available at:\n" % content["name"]
+                uri = content['url']
+                sec.append(
+                    nodes.paragraph('', '',
+                        nodes.Text(text, text),
+                        nodes.reference('', '',
+                            nodes.Text(uri, uri),
+                            refuri=uri,
+                        )
+                    )
+                )
+            if "driverurl" in content:
+                text = "Drivers for this database are available at:\n"
+                sec.append(
+                    nodes.paragraph('', '',
+                        nodes.Text(text, text),
+                        nodes.reference('', '',
+                            nodes.Text(content['driverurl'], content['driverurl']),
+                            refuri=content['driverurl']
+                        )
+                    )
+                )
+            output.append(sec)
+
+
+        if "connectstring" in content:
+            sec = nodes.section(
+                    '',
+                    nodes.title("Connecting", "Connecting"),
+                    nodes.paragraph('', '',
+                        nodes.Text("Connect String:", "Connect String:"),
+                        nodes.literal_block(content['connectstring'],
+                            content['connectstring'])
+                    ),
+                    ids=["dialect-%s-%s-connect" % (dialect_name, dbapi_name)]
+            )
+            output.append(sec)
+
+        return output
+
+    def _dialect_node(self):
+        self._dialects[self.dialect_name] = self
+
+        content = self._parse_content()
+        self.database_name = content['name']
+
+        self.bullets = nodes.bullet_list()
+        text = "The following dialect/DBAPI options are available.  "\
+                "Please refer to individual DBAPI sections for connect information."
+        sec = nodes.section('',
+                nodes.paragraph('', '',
+                    nodes.Text(
+                        "Support for the %s database." % content['name'],
+                        "Support for the %s database." % content['name']
+                    ),
+                ),
+                nodes.title("DBAPI Support", "DBAPI Support"),
+                nodes.paragraph('', '',
+                    nodes.Text(text, text),
+                    self.bullets
+                ),
+                ids=["dialect-%s" % self.dialect_name]
+            )
+
+        return [sec]
+
+    def _append_dbapi_bullet(self, dialect_name, dbapi_name, name, idname):
+        env = self.state.document.settings.env
+        dialect_directive = self._dialects[dialect_name]
+
+        list_node = nodes.list_item('',
+                nodes.paragraph('', '',
+                    nodes.reference('', '',
+                                nodes.Text(name, name),
+                                refdocname=self.docname,
+                                refuri=env.app.builder.get_relative_uri(
+                                        dialect_directive.docname, self.docname) +
+                                            "#" + idname
+                            ),
+                    #nodes.Text(" ", " "),
+                    #nodes.reference('', '',
+                    #            nodes.Text("(connectstring)", "(connectstring)"),
+                    #            refdocname=self.docname,
+                    #            refuri=env.app.builder.get_relative_uri(
+                    #                    dialect_directive.docname, self.docname) +
+                    ##                        "#" + ("dialect-%s-%s-connect" %
+                    #                                (dialect_name, dbapi_name))
+                    #        )
+                    )
+            )
+        dialect_directive.bullets.append(list_node)
+
+    def run(self):
+        env = self.state.document.settings.env
+        self.docname = env.docname
+
+        self.dialect_name = dialect_name = self.content[0]
+
+        has_dbapi = "+" in dialect_name
+        if has_dbapi:
+            return self._dbapi_node()
+        else:
+            return self._dialect_node()
+
+def setup(app):
+    app.add_directive('dialect', DialectDirective)
+

doc/build/builder/mako.py

+from __future__ import absolute_import
+
+from sphinx.application import TemplateBridge
+from sphinx.jinja2glue import BuiltinTemplateLoader
+from mako.lookup import TemplateLookup
+import os
+
+rtd = os.environ.get('READTHEDOCS', None) == 'True'
+
+class MakoBridge(TemplateBridge):
+    def init(self, builder, *args, **kw):
+        self.jinja2_fallback = BuiltinTemplateLoader()
+        self.jinja2_fallback.init(builder, *args, **kw)
+
+        builder.config.html_context['release_date'] = builder.config['release_date']
+        builder.config.html_context['site_base'] = builder.config['site_base']
+
+        self.lookup = TemplateLookup(directories=builder.config.templates_path,
+            #format_exceptions=True,
+            imports=[
+                "from builder import util"
+            ]
+        )
+
+        if rtd:
+            import urllib2
+            template_url = builder.config['site_base'] + "/docs_base.mako"
+            template = urllib2.urlopen(template_url).read()
+            self.lookup.put_string("/rtd_base.mako", template)
+
+    def render(self, template, context):
+        template = template.replace(".html", ".mako")
+        context['prevtopic'] = context.pop('prev', None)
+        context['nexttopic'] = context.pop('next', None)
+
+        # RTD layout
+        if rtd:
+            # add variables if not present, such
+            # as if local test of READTHEDOCS variable
+            if 'MEDIA_URL' not in context:
+                context['MEDIA_URL'] = "http://media.readthedocs.org/"
+            if 'slug' not in context:
+                context['slug'] = context['project'].lower()
+            if 'url' not in context:
+                context['url'] = "/some/test/url"
+            if 'current_version' not in context:
+                context['current_version'] = "latest"
+
+            if 'name' not in context:
+                context['name'] = context['project'].lower()
+
+            context['rtd'] = True
+            context['toolbar'] = True
+            context['layout'] = "rtd_layout.mako"
+            context['base'] = "rtd_base.mako"
+            context['pdf_url'] = "%spdf/%s/%s/%s.pdf" % (
+                    context['MEDIA_URL'],
+                    context['slug'],
+                    context['current_version'],
+                    context['slug']
+            )
+        # local docs layout
+        else:
+            context['rtd'] = False
+            context['toolbar'] = False
+            context['layout'] = "layout.mako"
+            context['base'] = "static_base.mako"
+
+        context.setdefault('_', lambda x: x)
+        return self.lookup.get_template(template).render_unicode(**context)
+
+    def render_string(self, template, context):
+        # this is used for  .js, .css etc. and we don't have
+        # local copies of that stuff here so use the jinja render.
+        return self.jinja2_fallback.render_string(template, context)
+
+def setup(app):
+    app.config['template_bridge'] = "builder.mako.MakoBridge"
+    app.add_config_value('release_date', "", 'env')
+    app.add_config_value('site_base', "", 'env')
+    app.add_config_value('build_number', "", 'env')
+

doc/build/builder/sqlformatter.py

+from pygments.lexer import RegexLexer, bygroups, using
+from pygments.token import Token
+from pygments.filter import Filter
+from pygments.filter import apply_filters
+from pygments.lexers import PythonLexer, PythonConsoleLexer
+from sphinx.highlighting import PygmentsBridge
+from pygments.formatters import HtmlFormatter, LatexFormatter
+
+import re
+
+
+def _strip_trailing_whitespace(iter_):
+    buf = list(iter_)
+    if buf:
+        buf[-1] = (buf[-1][0], buf[-1][1].rstrip())
+    for t, v in buf:
+        yield t, v
+
+
+class StripDocTestFilter(Filter):
+    def filter(self, lexer, stream):
+        for ttype, value in stream:
+            if ttype is Token.Comment and re.match(r'#\s*doctest:', value):
+                continue
+            yield ttype, value
+
+class PyConWithSQLLexer(RegexLexer):
+    name = 'PyCon+SQL'
+    aliases = ['pycon+sql']
+
+    flags = re.IGNORECASE | re.DOTALL
+
+    tokens = {
+            'root': [
+                (r'{sql}', Token.Sql.Link, 'sqlpopup'),
+                (r'{opensql}', Token.Sql.Open, 'opensqlpopup'),
+                (r'.*?\n', using(PythonConsoleLexer))
+            ],
+            'sqlpopup': [
+                (
+                    r'(.*?\n)((?:PRAGMA|BEGIN|SELECT|INSERT|DELETE|ROLLBACK|'
+                        'COMMIT|ALTER|UPDATE|CREATE|DROP|PRAGMA'
+                        '|DESCRIBE).*?(?:{stop}\n?|$))',
+                    bygroups(using(PythonConsoleLexer), Token.Sql.Popup),
+                    "#pop"
+                )
+            ],
+            'opensqlpopup': [
+                (
+                    r'.*?(?:{stop}\n*|$)',
+                    Token.Sql,
+                    "#pop"
+                )
+            ]
+        }
+
+
+class PythonWithSQLLexer(RegexLexer):
+    name = 'Python+SQL'
+    aliases = ['pycon+sql']
+
+    flags = re.IGNORECASE | re.DOTALL
+
+    tokens = {
+            'root': [
+                (r'{sql}', Token.Sql.Link, 'sqlpopup'),
+                (r'{opensql}', Token.Sql.Open, 'opensqlpopup'),
+                (r'.*?\n', using(PythonLexer))
+            ],
+            'sqlpopup': [
+                (
+                    r'(.*?\n)((?:PRAGMA|BEGIN|SELECT|INSERT|DELETE|ROLLBACK'
+                        '|COMMIT|ALTER|UPDATE|CREATE|DROP'
+                        '|PRAGMA|DESCRIBE).*?(?:{stop}\n?|$))',
+                    bygroups(using(PythonLexer), Token.Sql.Popup),
+                    "#pop"
+                )
+            ],
+            'opensqlpopup': [
+                (
+                    r'.*?(?:{stop}\n*|$)',
+                    Token.Sql,
+                    "#pop"
+                )
+            ]
+        }
+
+class PopupSQLFormatter(HtmlFormatter):
+    def _format_lines(self, tokensource):
+        buf = []
+        for ttype, value in apply_filters(tokensource, [StripDocTestFilter()]):
+            if ttype in Token.Sql:
+                for t, v in HtmlFormatter._format_lines(self, iter(buf)):
+                    yield t, v
+                buf = []
+
+                if ttype is Token.Sql:
+                    yield 1, "<div class='show_sql'>%s</div>" % \
+                                        re.sub(r'(?:[{stop}|\n]*)$', '', value)
+                elif ttype is Token.Sql.Link:
+                    yield 1, "<a href='#' class='sql_link'>sql</a>"
+                elif ttype is Token.Sql.Popup:
+                    yield 1, "<div class='popup_sql'>%s</div>" % \
+                                        re.sub(r'(?:[{stop}|\n]*)$', '', value)
+            else:
+                buf.append((ttype, value))
+
+        for t, v in _strip_trailing_whitespace(
+                        HtmlFormatter._format_lines(self, iter(buf))):
+            yield t, v
+
+class PopupLatexFormatter(LatexFormatter):
+    def _filter_tokens(self, tokensource):
+        for ttype, value in apply_filters(tokensource, [StripDocTestFilter()]):
+            if ttype in Token.Sql:
+                if ttype is not Token.Sql.Link and ttype is not Token.Sql.Open:
+                    yield Token.Literal, re.sub(r'{stop}', '', value)
+                else:
+                    continue
+            else:
+                yield ttype, value
+
+    def format(self, tokensource, outfile):
+        LatexFormatter.format(self, self._filter_tokens(tokensource), outfile)
+
+def setup(app):
+    app.add_lexer('pycon+sql', PyConWithSQLLexer())
+    app.add_lexer('python+sql', PythonWithSQLLexer())
+
+    PygmentsBridge.html_formatter = PopupSQLFormatter
+    PygmentsBridge.latex_formatter = PopupLatexFormatter
+

doc/build/changelog/changelog_01.rst

+
+==============
+0.1 Changelog
+==============
+
+                
+.. changelog::
+    :version: 0.1.7
+    :released: Fri May 05 2006
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      some fixes to topological sort algorithm
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      added DISTINCT ON support to Postgres (just supply distinct=[col1,col2..])
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      added __mod__ (% operator) to sql expressions
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      "order_by" mapper property inherited from inheriting mapper
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      fix to column type used when mapper UPDATES/DELETEs
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      with convert_unicode=True, reflection was failing, has been fixed
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      types types types!  still werent working....have to use TypeDecorator again :(
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      mysql binary type converts array output to buffer, fixes PickleType
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      fixed the attributes.py memory leak once and for all
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      unittests are qualified based on the databases that support each one
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      fixed bug where column defaults would clobber VALUES clause of insert objects
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      fixed bug where table def w/ schema name would force engine connection
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      fix for parenthesis to work correctly with subqueries in INSERT/UPDATE
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      HistoryArraySet gets extend() method
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      fixed lazyload support for other comparison operators besides =
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      lazyload fix where two comparisons in the join condition point to the
+      samem column
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      added "construct_new" flag to mapper, will use __new__ to create instances
+      instead of __init__ (standard in 0.2)
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      added selectresults.py to SVN, missed it last time
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      tweak to allow a many-to-many relationship from a table to itself via
+      an association table
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      small fix to "translate_row" function used by polymorphic example
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      create_engine uses cgi.parse_qsl to read query string (out the window in 0.2)
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      tweaks to CAST operator
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      fixed function names LOCAL_TIME/LOCAL_TIMESTAMP -> LOCALTIME/LOCALTIMESTAMP
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      fixed order of ORDER BY/HAVING in compile
+
+.. changelog::
+    :version: 0.1.6
+    :released: Wed Apr 12 2006
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      support for MS-SQL added courtesy Rick Morrison, Runar Petursson
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      the latest SQLSoup from J. Ellis
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      ActiveMapper has preliminary support for inheritance (Jeff Watkins)
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      added a "mods" system which allows pluggable modules that modify/augment
+      core functionality, using the function "install_mods(*modnames)".
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      added the first "mod", SelectResults, which modifies mapper selects to
+      return generators that turn ranges into LIMIT/OFFSET queries
+      (Jonas Borgstr?
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      factored out querying capabilities of Mapper into a separate Query object
+      which is Session-centric.  this improves the performance of mapper.using(session)
+      and makes other things possible.
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      objectstore/Session refactored, the official way to save objects is now
+      via the flush() method.  The begin/commit functionality of Session is factored
+      into LegacySession which is still established as the default behavior, until
+      the 0.2 series.
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      types system is bound to an engine at query compile time, not schema
+      construction time.  this simplifies the types system as well as the ProxyEngine.
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      added 'version_id' keyword argument to mapper. this keyword should reference a
+      Column object with type Integer, preferably non-nullable, which will be used on
+      the mapped table to track version numbers. this number is incremented on each
+      save operation and is specifed in the UPDATE/DELETE conditions so that it
+      factors into the returned row count, which results in a ConcurrencyError if the
+      value received is not the expected count.
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      added 'entity_name' keyword argument to mapper. a mapper is now associated
+      with a class via the class object as well as an optional entity_name parameter,
+      which is a string defaulting to None. any number of primary mappers can be
+      created for a class, qualified by the entity name. instances of those classes
+      will issue all of their load and save operations through their
+      entity_name-qualified mapper, and maintain separate a identity in the identity
+      map for an otherwise equilvalent object.
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      overhaul to the attributes system. code has been clarified, and also fixed to
+      support proper polymorphic behavior on object attributes.
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      added "for_update" flag to Select objects
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      some fixes for backrefs
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      fix for postgres1 DateTime type
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      documentation pages mostly switched over to Markdown syntax
+
+.. changelog::
+    :version: 0.1.5
+    :released: Mon Mar 27 2006
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      added SQLSession concept to SQLEngine. this object keeps track of retrieving a
+      connection from the connection pool as well as an in-progress transaction.
+      methods push_session() and pop_session() added to SQLEngine which push/pop a new
+      SQLSession onto the engine, allowing operation upon a second connection "nested"
+      within the previous one, allowing nested transactions. Other tricks are sure to
+      come later regarding SQLSession.
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      added nest_on argument to objectstore.Session. This is a single SQLEngine or
+      list of engines for which push_session()/pop_session() will be called each time
+      this Session becomes the active session (via objectstore.push_session() or
+      equivalent). This allows a unit of work Session to take advantage of the nested
+      transaction feature without explicitly calling push_session/pop_session on the
+      engine.
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      factored apart objectstore/unitofwork to separate "Session scoping" from
+      "uow commit heavy lifting"
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      added populate_instance() method to MapperExtension. allows an extension to
+      modify the population of object attributes. this method can call the
+      populate_instance() method on another mapper to proxy the attribute population
+      from one mapper to another; some row translation logic is also built in to help
+      with this.
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      fixed Oracle8-compatibility "use_ansi" flag which converts JOINs to
+      comparisons with the = and (+) operators, passes basic unittests
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      tweaks to Oracle LIMIT/OFFSET support
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      Oracle reflection uses ALL_** views instead of USER_** to get larger
+      list of stuff to reflect from
+
+    .. change::
+        :tags: 
+        :tickets: 105
+
+      fixes to Oracle foreign key reflection
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      objectstore.commit(obj1, obj2,...) adds an extra step to seek out private
+      relations on properties and delete child objects, even though its not a global
+      commit
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      lots and lots of fixes to mappers which use inheritance, strengthened the
+      concept of relations on a mapper being made towards the "local" table for that
+      mapper, not the tables it inherits.  allows more complex compositional patterns
+      to work with lazy/eager loading.
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      added support for mappers to inherit from others based on the same table,
+      just specify the same table as that of both parent/child mapper.
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      some minor speed improvements to the attributes system with regards to
+      instantiating and populating new objects.
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      fixed MySQL binary unit test
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      INSERTs can receive clause elements as VALUES arguments, not just literal
+      values
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      support for calling multi-tokened functions, i.e. schema.mypkg.func()
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      added J. Ellis' SQLSoup module to extensions package
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      added "polymorphic" examples illustrating methods to load multiple object types
+      from one mapper, the second of which uses the new populate_instance() method.
+      small improvements to mapper, UNION construct to help the examples along
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      improvements/fixes to session.refresh()/session.expire() (which may have
+      been called "invalidate" earlier..)
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      added session.expunge() which totally removes an object from the current
+      session
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      added *args, **kwargs pass-thru to engine.transaction(func) allowing easier
+      creation of transactionalizing decorator functions
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      added iterator interface to ResultProxy:  "for row in result:..."
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      added assertion to tx = session.begin(); tx.rollback(); tx.begin(), i.e. cant
+      use it after a rollback()
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      added date conversion on bind parameter fix to SQLite enabling dates to
+      work with pysqlite1
+
+    .. change::
+        :tags: 
+        :tickets: 116
+
+      improvements to subqueries to more intelligently construct their FROM
+      clauses
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      added PickleType to types.
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      fixed two bugs with column labels with regards to bind parameters: bind param
+      keynames they are now generated from a column "label" in all relevant cases to
+      take advantage of excess-name-length rules, and checks for a peculiar collision
+      against a column named the same as "tablename_colname" added
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      major overhaul to unit of work documentation, other documentation sections.
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      fixed attributes bug where if an object is committed, its lazy-loaded list got
+      blown away if it hadnt been loaded
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      added unique_connection() method to engine, connection pool to return a
+      connection that is not part of the thread-local context or any current
+      transaction
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      added invalidate() function to pooled connection.  will remove the connection
+      from the pool.  still need work for engines to auto-reconnect to a stale DB
+      though.
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      added distinct() function to column elements so you can do
+      func.count(mycol.distinct())
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      added "always_refresh" flag to Mapper, creates a mapper that will always
+      refresh the attributes of objects it gets/selects from the DB, overwriting any
+      changes made.
+
+.. changelog::
+    :version: 0.1.4
+    :released: Mon Mar 13 2006
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      create_engine() now uses genericized parameters; host/hostname,
+      db/dbname/database, password/passwd, etc. for all engine connections. makes
+       engine URIs much more "universal"
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      added support for SELECT statements embedded into a column clause, using the
+      flag "scalar=True"
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      another overhaul to EagerLoading when used in conjunction with mappers that
+      inherit; improvements to eager loads figuring out their aliased queries
+      correctly, also relations set up against a mapper with inherited mappers will
+      create joins against the table that is specific to the mapper itself (i.e. and
+      not any tables that are inherited/are further down the inheritance chain),
+      this can be overridden by using custom primary/secondary joins.
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      added J.Ellis patch to mapper.py so that selectone() throws an exception
+      if query returns more than one object row, selectfirst() to not throw the
+      exception. also adds selectfirst_by (synonymous with get_by) and selectone_by
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      added onupdate parameter to Column, will exec SQL/python upon an update
+      statement.Also adds "for_update=True" to all DefaultGenerator subclasses
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      added support for Oracle table reflection contributed by Andrija Zaric;
+      still some bugs to work out regarding composite primary keys/dictionary selection
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      checked in an initial Firebird module, awaiting testing.
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      added sql.ClauseParameters dictionary object as the result for
+      compiled.get_params(), does late-typeprocessing of bind parameters so
+      that the original values are easier to access
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      more docs for indexes, column defaults, connection pooling, engine construction
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      overhaul to the construction of the types system. uses a simpler inheritance
+      pattern so that any of the generic types can be easily subclassed, with no need
+      for TypeDecorator.
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      added "convert_unicode=False" parameter to SQLEngine, will cause all String
+      types to perform unicode encoding/decoding (makes Strings act like Unicodes)
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      added 'encoding="utf8"' parameter to engine.  the given encoding will be
+      used for all encode/decode calls within Unicode types as well as Strings
+      when convert_unicode=True.
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      improved support for mapping against UNIONs, added polymorph.py example
+      to illustrate multi-class mapping against a UNION
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      fix to SQLite LIMIT/OFFSET syntax
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      fix to Oracle LIMIT syntax
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      added backref() function, allows backreferences to have keyword arguments
+      that will be passed to the backref.
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      Sequences and ColumnDefault objects can do execute()/scalar() standalone
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      SQL functions (i.e. func.foo()) can do execute()/scalar() standalone
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      fix to SQL functions so that the ANSI-standard functions, i.e. current_timestamp
+      etc., do not specify parenthesis.  all other functions do.
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      added settattr_clean and append_clean to SmartProperty, which set
+      attributes without triggering a "dirty" event or any history. used as:
+      myclass.prop1.setattr_clean(myobject, 'hi')
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      improved support to column defaults when used by mappers; mappers will pull
+      pre-executed defaults from statement's executed bind parameters
+      (pre-conversion) to populate them into a saved object's attributes; if any
+      PassiveDefaults have fired off, will instead post-fetch the row from the DB to
+      populate the object.
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      added 'get_session().invalidate(*obj)' method to objectstore, instances will
+      refresh() themselves upon the next attribute access.
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      improvements to SQL func calls including an "engine" keyword argument so
+      they can be execute()d or scalar()ed standalone, also added func accessor to
+      SQLEngine
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      fix to MySQL4 custom table engines, i.e. TYPE instead of ENGINE
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      slightly enhanced logging, includes timestamps and a somewhat configurable
+      formatting system, in lieu of a full-blown logging system
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      improvements to the ActiveMapper class from the TG gang, including
+      many-to-many relationships
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      added Double and TinyInt support to mysql
+
+.. changelog::
+    :version: 0.1.3
+    :released: Thu Mar 02 2006
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      completed "post_update" feature, will add a second update statement before
+      inserts and after deletes in order to reconcile a relationship without any
+      dependencies being created; used when persisting two rows that are dependent
+      on each other
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      completed mapper.using(session) function, localized per-object Session
+      functionality; objects can be declared and manipulated as local to any
+      user-defined Session
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      fix to Oracle "row_number over" clause with multiple tables
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      mapper.get() was not selecting multiple-keyed objects if the mapper's table was a join,
+      such as in an inheritance relationship, this is fixed.
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      overhaul to sql/schema packages so that the sql package can run all on its own,
+      producing selects, inserts, etc. without any engine dependencies.  builds upon
+      new TableClause/ColumnClause lexical objects.  Schema's Table/Column objects
+      are the "physical" subclasses of them.  simplifies schema/sql relationship,
+      extensions (like proxyengine), and speeds overall performance by a large margin.
+      removes the entire getattr() behavior that plagued 0.1.1.
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      refactoring of how the mapper "synchronizes" data between two objects into a
+      separate module, works better with properties attached to a mapper that has an
+      additional inheritance relationship to one of the related tables, also the same
+      methodology used to synchronize parent/child objects now used by mapper to
+      synchronize between inherited and inheriting mappers.
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      made objectstore "check for out-of-identitymap" more aggressive, will perform the
+      check when object attributes are modified or the object is deleted
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      Index object fully implemented, can be constructed standalone, or via
+      "index" and "unique" arguments on Columns.
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      added "convert_unicode" flag to SQLEngine, will treat all String/CHAR types
+      as Unicode types, with raw-byte/utf-8 translation on the bind parameter and
+      result set side.
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      postgres maintains a list of ANSI functions that must have no parenthesis so
+      function calls with no arguments work consistently
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      tables can be created with no engine specified.  this will default their engine
+      to a module-scoped "default engine" which is a ProxyEngine.  this engine can
+      be connected via the function "global_connect".
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      added "refresh(*obj)" method to objectstore / Session to reload the attributes of
+      any set of objects from the database unconditionally
+
+.. changelog::
+    :version: 0.1.2
+    :released: Fri Feb 24 2006
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      fixed a recursive call in schema that was somehow running 994 times then returning
+      normally.  broke nothing, slowed down everything.  thanks to jpellerin for finding this.
+
+.. changelog::
+    :version: 0.1.1
+    :released: Thu Feb 23 2006
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      small fix to Function class so that expressions with a func.foo() use the type of the
+      Function object (i.e. the left side) as the type of the boolean expression, not the
+      other side which is more of a moving target (changeset 1020).
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      creating self-referring mappers with backrefs slightly easier (but still not that easy -
+      changeset 1019)
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      fixes to one-to-one mappings (changeset 1015)
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      psycopg1 date/time issue with None fixed (changeset 1005)
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      two issues related to postgres, which doesnt want to give you the "lastrowid"
+      since oids are deprecated:
+         * postgres database-side defaults that are on primary key cols *do* execute
+      explicitly beforehand, even though thats not the idea of a PassiveDefault.  this is
+      because sequences on columns get reflected as PassiveDefaults, but need to be explicitly
+      executed on a primary key col so we know what we just inserted.
+         * if you did add a row that has a bunch of database-side defaults on it,
+      and the PassiveDefault thing was working the old way, i.e. they just execute on
+      the DB side, the "cant get the row back without an OID" exception that occurred
+      also will not happen unless someone (usually the ORM) explicitly asks for it.
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      fixed a glitch with engine.execute_compiled where it was making a second
+      ResultProxy that just got thrown away.
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      began to implement newer logic in object properities.  you can now say
+      myclass.attr.property, which will give you the PropertyLoader corresponding to that
+      attribute, i.e. myclass.mapper.props['attr']
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      eager loading has been internally overhauled to use aliases at all times.  more
+      complicated chains of eager loads can now be created without any need for explicit
+      "use aliases"-type instructions.  EagerLoader code is also much simpler now.
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      a new somewhat experimental flag "use_update" added to relations, indicates that
+      this relationship should be handled by a second UPDATE statement, either after a
+      primary INSERT or before a primary DELETE.  handles circular row dependencies.
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      added exceptions module, all raised exceptions (except for some
+      KeyError/AttributeError exceptions) descend from these classes.
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      fix to date types with MySQL, returned timedelta converted to datetime.time
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      two-phase objectstore.commit operations (i.e. begin/commit) now return a
+      transactional object (SessionTrans), to more clearly indicate transaction boundaries.
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      Index object with create/drop support added to schema
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      fix to postgres, where it will explicitly pre-execute a PassiveDefault on a table
+      if it is a primary key column, pursuant to the ongoing "we cant get inserted rows
+      back from postgres" issue
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      change to information_schema query that gets back postgres table defs, now
+      uses explicit JOIN keyword, since one user had faster performance with 8.1
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      fix to engine.process_defaults so it works correctly with a table that has
+      different column name/column keys (changset 982)
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      a column can only be attached to one table - this is now asserted
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      postgres time types descend from Time type
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      fix to alltests so that it runs types test (now named testtypes)
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      fix to Join object so that it correctly exports its foreign keys (cs 973)
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      creating relationships against mappers that use inheritance fixed (cs 973)

doc/build/changelog/changelog_02.rst

+
+==============
+0.2 Changelog
+==============
+
+                
+.. changelog::
+    :version: 0.2.8
+    :released: Tue Sep 05 2006
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      cleanup on connection methods + documentation.  custom DBAPI
+      arguments specified in query string, 'connect_args' argument
+      to 'create_engine', or custom creation function via 'creator'
+      function to 'create_engine'.
+
+    .. change::
+        :tags: 
+        :tickets: 274
+
+      added "recycle" argument to Pool, is "pool_recycle" on create_engine,
+      defaults to 3600 seconds; connections after this age will be closed and
+      replaced with a new one, to handle db's that automatically close
+      stale connections
+
+    .. change::
+        :tags: 
+        :tickets: 121
+
+      changed "invalidate" semantics with pooled connection; will
+      instruct the underlying connection record to reconnect the next
+      time its called.  "invalidate" will also automatically be called
+      if any error is thrown in the underlying call to connection.cursor().
+      this will hopefully allow the connection pool to reconnect to a
+      database that had been stopped and started without restarting
+      the connecting application
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      eesh !  the tutorial doctest was broken for quite some time.
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      add_property() method on mapper does a "compile all mappers"
+      step in case the given property references a non-compiled mapper
+      (as it did in the case of the tutorial !)
+
+    .. change::
+        :tags: 
+        :tickets: 277
+
+      check for pg sequence already existing before create
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      if a contextual session is established via MapperExtension.get_session
+      (as it is using the sessioncontext plugin, etc), a lazy load operation
+      will use that session by default if the parent object is not
+      persistent with a session already.
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      lazy loads will not fire off for an object that does not have a
+      database identity (why?
+      see http://www.sqlalchemy.org/trac/wiki/WhyDontForeignKeysLoadData)
+
+    .. change::
+        :tags: 
+        :tickets: 
+
+      unit-of-work does a better check for "orphaned" objects that are