Kirill Simonov avatar Kirill Simonov committed 38835c3

Renamed Flow -> Space.

Comments (0)

Files changed (25)

src/htsql/core/tr/__init__.py

 """
 
 
-from . import (assemble, binding, bind, coerce, compile, dump, encode, flow,
+from . import (assemble, binding, bind, coerce, compile, dump, encode, space,
         fn, frame, lookup, plan, reduce, rewrite, signature, stitch, term,
         translate)
 

src/htsql/core/tr/assemble.py

 #
 
 
-"""
-:mod:`htsql.core.tr.assemble`
-=============================
-
-This module implements the assembling process.
-"""
-
-
 from ..util import Printable, Hashable, listof, maybe
 from ..adapter import Adapter, adapt, adapt_many
 from ..domain import BooleanDomain, UntypedDomain, Record, ID
 from .coerce import coerce
 from .binding import WeakSegmentBinding
-from .flow import (Code, SegmentCode, LiteralCode, FormulaCode, CastCode,
-                   RecordCode, IdentityCode, AnnihilatorCode, CorrelationCode,
-                   Unit, ColumnUnit, CompoundUnit)
-from .term import (PreTerm, Term, UnaryTerm, BinaryTerm, TableTerm,
-                   ScalarTerm, FilterTerm, JoinTerm, CorrelationTerm,
-                   EmbeddingTerm, ProjectionTerm, OrderTerm, SegmentTerm,
-                   QueryTerm)
-from .frame import (ScalarFrame, TableFrame, NestedFrame,
-                    SegmentFrame, QueryFrame,
-                    LiteralPhrase, TruePhrase, CastPhrase,
-                    ColumnPhrase, ReferencePhrase, EmbeddingPhrase,
-                    FormulaPhrase, Anchor, LeadingAnchor)
+from .space import (Code, SegmentCode, LiteralCode, FormulaCode, CastCode,
+        RecordCode, IdentityCode, AnnihilatorCode, CorrelationCode, Unit,
+        ColumnUnit, CompoundUnit)
+from .term import (PreTerm, Term, UnaryTerm, BinaryTerm, TableTerm, ScalarTerm,
+        FilterTerm, JoinTerm, CorrelationTerm, EmbeddingTerm, ProjectionTerm,
+        OrderTerm, SegmentTerm, QueryTerm)
+from .frame import (ScalarFrame, TableFrame, NestedFrame, SegmentFrame,
+        QueryFrame, LiteralPhrase, TruePhrase, CastPhrase, ColumnPhrase,
+        ReferencePhrase, EmbeddingPhrase, FormulaPhrase, Anchor, LeadingAnchor)
 from .signature import (Signature, IsEqualSig, IsTotallyEqualSig, IsInSig,
-                        IsNullSig, NullIfSig, IfNullSig, CompareSig,
-                        AndSig, OrSig, NotSig, SortDirectionSig, ToPredicateSig,
-                        FromPredicateSig)
+        IsNullSig, NullIfSig, IfNullSig, CompareSig, AndSig, OrSig, NotSig,
+        SortDirectionSig, ToPredicateSig, FromPredicateSig)
 
 
 class Claim(Hashable, Printable):
     are equal if their units, brokers, and targets are equal to each
     other.
 
-    `unit` (:class:`htsql.core.tr.flow.Unit`)
+    `unit` (:class:`htsql.core.tr.space.Unit`)
         The exported unit.
 
     `broker` (an integer)
 
         See also the `offsprings` attribute of :class:`htsql.core.tr.term.Term`.
 
-    `routes` (a dictionary `Unit | Flow -> tag`)
+    `routes` (a dictionary `Unit | Space -> tag`)
         Maps a unit to a term capable of evaluating the unit.
 
         The `routes` table is used when generating unit claims
         to determine the target term by the unit.
 
-        A key of the `routes` table is either a :class:`htsql.core.tr.flow.Unit`
-        node or a :class:`htsql.core.tr.flow.Flow` node.  The latter indicates
+        A key of the `routes` table is either a :class:`htsql.core.tr.space.Unit`
+        node or a :class:`htsql.core.tr.space.Space` node.  The latter indicates
         that the corresponding term is capable of exporting any primitive
-        unit from the given flow.
+        unit from the given space.
 
         See also the `routes` attribute of :class:`htsql.core.tr.term.Term`.
 
         capable of evaluating the unit and returns the corresponding
         :class:`Claim` object.
 
-        `unit` (:class:`htsql.core.tr.flow.Unit`)
+        `unit` (:class:`htsql.core.tr.space.Unit`)
             The unit to make a claim for.
         """
         # To make a claim, we need to find two terms:
 
         ## Extract the (tag of the) target term from the current routing
         ## table.  Recall that `routes` does not keep primitive units directly,
-        ## instead a flow node represents all primitive units that belong
-        ## to that flow.
+        ## instead a space node represents all primitive units that belong
+        ## to that space.
         #if unit.is_primitive:
-        #    assert unit.flow in self.gate.routes
-        #    target = self.gate.routes[unit.flow]
+        #    assert unit.space in self.gate.routes
+        #    target = self.gate.routes[unit.space]
         #if unit.is_compound:
         #    assert unit in self.gate.routes
         #    target = self.gate.routes[unit]
         """
         Appoints and assigns claims for all units of the given code.
 
-        `code` (:class:`htsql.core.tr.flow.Code`)
+        `code` (:class:`htsql.core.tr.space.Code`)
             A code object to schedule.
 
         `dispatcher` (:class:`htsql.core.tr.term.Term` or ``None``)
         It is assumed that the code node was previously scheduled
         with :meth:`schedule` and all the claims were satisfied.
 
-        `code` (:class:`htsql.core.tr.flow.Code`)
+        `code` (:class:`htsql.core.tr.space.Code`)
             The code node to evaluate.
 
         `dispatcher` (:class:`htsql.core.tr.term.Term` or ``None``)
             phrase = self.state.evaluate(code, router=self.term.kid)
             group.append(phrase)
         # It may happen that the kernel of the projection is empty, which
-        # means the range of the projection is the scalar flow.  SQL
+        # means the range of the projection is the scalar space.  SQL
         # recognizes scalar projections by detecting an aggregate in
         # the `SELECT` list, so, technically, we could keep the `GROUP BY`
         # list empty.  However, when collapsing frames, we must be able
 
     The adapter is polymorphic on the `Code` argument.
 
-    `code` (:class:`htsql.core.tr.flow.Code`)
+    `code` (:class:`htsql.core.tr.space.Code`)
         The code node to translate.
 
     `state` (:class:`AssemblingState`)
     Evaluates a formula node.
 
     This is an auxiliary adapter used to evaluate
-    :class:`htsql.core.tr.flow.FormulaCode` nodes.  The adapter is polymorphic
+    :class:`htsql.core.tr.space.FormulaCode` nodes.  The adapter is polymorphic
     on the formula signature.
 
     Unless overridden, the adapter evaluates the arguments of the formula
     and generates a new formula phrase with the same signature.
 
-    `code` (:class:`htsql.core.tr.flow.FormulaCode`)
+    `code` (:class:`htsql.core.tr.space.FormulaCode`)
         The formula node to evaluate.
 
     `state` (:class:`AssemblingState`)

src/htsql/core/tr/binding.py

 
     A binding graph is an intermediate representation of an HTSQL query.
     It is constructed from the syntax tree by the *binding* process and
-    further translated to the flow graph by the *encoding* process.
+    further translated to the space graph by the *encoding* process.
 
     A binding node represents an HTSQL expression or a naming scope (or both).
     Each binding node keeps a reference to the scope in which it was created;
     """
     A quotient scope.
 
-    A quotient of the `seed` flow by the given `kernels` is a flow of
+    A quotient of the `seed` space by the given `kernels` is a space of
     all unique values of ``kernels`` as it ranges over ``seed``.
 
     `seed`: :class:`Binding`
 
 class ClipBinding(CoverBinding):
     """
-    A slice of a flow.
+    A slice of a space.
 
     `order`: [(:class:`Binding`, ``+1`` or ``-1``)]
         Expressions to sort by.
     Represents a segment of an HTSQL query.
 
     `seed` (:class:`Binding` or ``None``)
-        The output flow.  If not set explicitly, should be inferred from
+        The output space.  If not set explicitly, should be inferred from
         `elements`.
 
     `elements` (a list of :class:`Binding`)
     """
     Represents a sorting expression.
 
-    A sort binding specifies the row order for the flow generated by the
-    `base` binding.  It may also apply a slice to the flow.
+    A sort binding specifies the row order for the space generated by the
+    `base` binding.  It may also apply a slice to the space.
 
     `order` (a list of :class:`Binding`)
         The expressions by which the base rows are sorted.
     """
     Represents a selector expression (``{...}`` operator).
 
-    A selector specifies output columns of a flow.
+    A selector specifies output columns of a space.
 
     `elements` (a list of :class:`Binding`)
         The output columns.

src/htsql/core/tr/compile.py

 #
 
 
-"""
-:mod:`htsql.core.tr.compile`
-============================
-
-This module implements the compiling process.
-"""
-
-
 from ..util import maybe, listof
 from ..adapter import Adapter, adapt, adapt_many
 from ..domain import BooleanDomain, IntegerDomain
 from ..error import Error, translate_guard
 from .coerce import coerce
 from .signature import (IsNullSig, IsEqualSig, AndSig, CompareSig,
-                        SortDirectionSig, RowNumberSig)
-from .flow import (Expression, QueryExpr, SegmentCode, Code, LiteralCode,
-        FormulaCode, Flow, RootFlow, ScalarFlow, TableFlow, QuotientFlow,
-        ComplementFlow, MonikerFlow, LocatorFlow, ForkedFlow, AttachFlow,
-        ClippedFlow, FilteredFlow, OrderedFlow, Unit, ScalarUnit, ColumnUnit,
-        AggregateUnit, CorrelatedUnit, KernelUnit, CoveringUnit,
+        SortDirectionSig, RowNumberSig)
+from .space import (Expression, QueryExpr, SegmentCode, Code, LiteralCode,
+        FormulaCode, Space, RootSpace, ScalarSpace, TableSpace, QuotientSpace,
+        ComplementSpace, MonikerSpace, LocatorSpace, ForkedSpace, AttachSpace,
+        ClippedSpace, FilteredSpace, OrderedSpace, Unit, ScalarUnit,
+        ColumnUnit, AggregateUnit, CorrelatedUnit, KernelUnit, CoveringUnit,
         CorrelationCode)
 from .term import (Term, ScalarTerm, TableTerm, FilterTerm, JoinTerm,
         EmbeddingTerm, CorrelationTerm, ProjectionTerm, OrderTerm, WrapperTerm,
 
     State attributes:
 
-    `root` (:class:`htsql.core.tr.flow.RootFlow`)
-        The root flow.
+    `root` (:class:`htsql.core.tr.space.RootSpace`)
+        The root space.
 
-    `baseline` (:class:`htsql.core.tr.flow.Flow`)
+    `baseline` (:class:`htsql.core.tr.space.Space`)
         When compiling a new term, indicates the leftmost axis that must
-        exported by the term.  Note that the baseline flow is always
+        exported by the term.  Note that the baseline space is always
         inflated.
     """
 
     def __init__(self):
         # The next term tag to be produced by `tag`.
         self.next_tag = 1
-        # The root scalar flow.
+        # The root scalar space.
         self.root = None
-        # The stack of previous baseline flows.
+        # The stack of previous baseline spaces.
         self.baseline_stack = []
-        # The current baseline flow.
+        # The current baseline space.
         self.baseline = None
         # Support for nested segments.
-        self.superflow_stack = []
-        self.superflow = None
+        self.superspace_stack = []
+        self.superspace = None
 
     def tag(self):
         """
         self.next_tag += 1
         return tag
 
-    def set_root(self, flow):
+    def set_root(self, space):
         """
-        Initializes the root, baseline and mask flows.
+        Initializes the root, baseline and mask spaces.
 
         This function must be called before state attributes `root`,
         `baseline` and `mask` could be used.
 
-        `flow` (:class:`htsql.core.tr.flow.RootFlow`)
-            A root scalar flow.
+        `space` (:class:`htsql.core.tr.space.RootSpace`)
+            A root scalar space.
         """
-        assert isinstance(flow, RootFlow)
-        # Check that the state flows are not yet initialized.
+        assert isinstance(space, RootSpace)
+        # Check that the state spaces are not yet initialized.
         assert self.root is None
         assert self.baseline is None
-        assert self.superflow is None
-        self.root = flow
-        self.baseline = flow
-        self.superflow = flow
+        assert self.superspace is None
+        self.root = space
+        self.baseline = space
+        self.superspace = space
 
     def flush(self):
         """
-        Clears the state flows.
+        Clears the state spaces.
         """
-        # Check that the state flows are initialized and the flow stacks
+        # Check that the state spaces are initialized and the space stacks
         # are exhausted.
         assert self.root is not None
         assert not self.baseline_stack
         assert self.baseline is self.root
         self.root = None
         self.baseline = None
-        self.superflow = None
+        self.superspace = None
 
     def push_baseline(self, baseline):
         """
-        Sets a new baseline flow.
+        Sets a new baseline space.
 
-        This function masks the current baseline flow.  To restore
-        the previous baseline flow, use :meth:`pop_baseline`.
+        This function masks the current baseline space.  To restore
+        the previous baseline space, use :meth:`pop_baseline`.
 
-        `baseline` (:class:`htsql.core.tr.flow.Flow`)
-            The new baseline flow.  Note that the baseline flow
+        `baseline` (:class:`htsql.core.tr.space.Space`)
+            The new baseline space.  Note that the baseline space
             must be inflated.
         """
-        assert isinstance(baseline, Flow) and baseline.is_inflated
+        assert isinstance(baseline, Space) and baseline.is_inflated
         self.baseline_stack.append(self.baseline)
         self.baseline = baseline
 
     def pop_baseline(self):
         """
-        Restores the previous baseline flow.
+        Restores the previous baseline space.
         """
         self.baseline = self.baseline_stack.pop()
 
-    def push_superflow(self, flow):
-        self.superflow_stack.append(self.superflow)
-        self.superflow = flow
+    def push_superspace(self, space):
+        self.superspace_stack.append(self.superspace)
+        self.superspace = space
 
-    def pop_superflow(self):
-        self.superflow = self.superflow_stack.pop()
+    def pop_superspace(self):
+        self.superspace = self.superspace_stack.pop()
 
     def compile(self, expression, baseline=None):
         """
         Compiles a new term node for the given expression.
 
-        `expression` (:class:`htsql.core.tr.flow.Expression`)
+        `expression` (:class:`htsql.core.tr.space.Expression`)
             An expression node.
 
-        `baseline` (:class:`htsql.core.tr.flow.Flow` or ``None``)
-            The baseline flow.  Specifies an axis flow that the compiled
-            term must export.  If not set, the current baseline flow of
+        `baseline` (:class:`htsql.core.tr.space.Space` or ``None``)
+            The baseline space.  Specifies an axis space that the compiled
+            term must export.  If not set, the current baseline space of
             the state is used.
 
-            When `expression` is a flow, the generated term must
-            export the flow itself as well as all inflated prefixes
-            up to the `baseline` flow.  It may (but it is not required)
+            When `expression` is a space, the generated term must
+            export the space itself as well as all inflated prefixes
+            up to the `baseline` space.  It may (but it is not required)
             export other axes as well.
         """
         # FIXME: potentially, we could implement a cache of `expression`
         # -> `term` to avoid generating the same term node more than once.
         # There are several complications though.  First, the term depends
         # not only on the expression, but also on the current baseline
-        # and mask flows.  Second, each compiled term must have a unique
+        # and mask spaces.  Second, each compiled term must have a unique
         # tag, therefore we'd have to replace the tags and route tables
         # of the cached term node.
         with translate_guard(expression):
         `term` (:class:`htsql.core.tr.term.Term`)
             A term node.
 
-        `expressions` (a list of :class:`htsql.core.tr.flow.Expression`)
+        `expressions` (a list of :class:`htsql.core.tr.space.Expression`)
             A list of expressions to inject into the given term.
         """
         assert isinstance(term, Term)
 
     # Utility functions used by implementations.
 
-    def compile_shoot(self, flow, trunk, codes=None):
+    def compile_shoot(self, space, trunk, codes=None):
         """
-        Compiles a term corresponding to the given flow.
+        Compiles a term corresponding to the given space.
 
         The compiled term is called *a shoot term* (relatively to
         the given *trunk term*).
 
-        `flow` (:class:`htsql.core.tr.flow.Flow`)
-            A flow node, for which the we compile a term.
+        `space` (:class:`htsql.core.tr.space.Space`)
+            A space node, for which the we compile a term.
 
-        `trunk` (:class:`htsql.core.tr.flow.Flow` or :class:`htsql.core.tr.term.Term`)
+        `trunk` (:class:`htsql.core.tr.space.Space` or :class:`htsql.core.tr.term.Term`)
            Expresses a promise that the compiled term will be
            (eventually) joined to a term corresponding to the
-           `trunk` flow.  If `trunk` is a :class:`htsql.core.tr.term.Term`
-           instance, use the term flow.
+           `trunk` space.  If `trunk` is a :class:`htsql.core.tr.term.Term`
+           instance, use the term space.
 
-        `codes` (a list of :class:`htsql.core.tr.flow.Expression` or ``None``)
+        `codes` (a list of :class:`htsql.core.tr.space.Expression` or ``None``)
            If provided, a list of expressions to be injected
            into the compiled term.
         """
 
         # Sanity check on the arguments.
-        assert isinstance(flow, Flow)
-        assert isinstance(trunk, (Flow, Term))
+        assert isinstance(space, Space)
+        assert isinstance(trunk, (Space, Term))
         assert isinstance(codes, maybe(listof(Expression)))
 
-        # If a term node is passed in place of a trunk flow, use
-        # the flow of the term.
+        # If a term node is passed in place of a trunk space, use
+        # the space of the term.
         if isinstance(trunk, Term):
-            trunk = trunk.flow
+            trunk = trunk.space
 
         # This condition is enforced by unmasking process -- all
-        # non-axial operations in the trunk flow are pruned from
-        # the given flow.
-        assert flow == flow.prune(trunk)
+        # non-axial operations in the trunk space are pruned from
+        # the given space.
+        assert space == space.prune(trunk)
 
-        # Determine the longest ancestor of the flow that contains
+        # Determine the longest ancestor of the space that contains
         # no non-axial operations.
-        baseline = flow
+        baseline = space
         while not baseline.is_inflated:
             baseline = baseline.base
 
-        # Handle the case when the given flow is not spanned by the
-        # trunk flow -- it happens when we construct a plural term
+        # Handle the case when the given space is not spanned by the
+        # trunk space -- it happens when we construct a plural term
         # for an aggregate unit.  In this case, before joining it
         # to the trunk term, the shoot term will be projected to some
-        # singular prefix of the given flow.  To enable such projection,
+        # singular prefix of the given space.  To enable such projection,
         # at least the base of the shoot baseline must be spanned by
-        # the trunk flow (then, we can project on the columns of
+        # the trunk space (then, we can project on the columns of
         # a foreign key that attaches the baseline to its base).
         if not trunk.spans(baseline):
             while not trunk.spans(baseline.base):
                 baseline = baseline.base
 
-        # Compile the term for the given flow up to the baseline.
-        term = self.state.compile(flow, baseline=baseline)
+        # Compile the term for the given space up to the baseline.
+        term = self.state.compile(space, baseline=baseline)
 
         # If provided, inject the given expressions.
         if codes is not None:
         # Return the compiled shoot term.
         return term
 
-    def glue_flows(self, flow, baseline, shoot, shoot_baseline):
+    def glue_spaces(self, space, baseline, shoot, shoot_baseline):
         """
-        Returns joints attaching the shoot flow to the trunk flow.
+        Returns joints attaching the shoot space to the trunk space.
 
-        The given flow nodes specify the shape of two term nodes:
+        The given space nodes specify the shape of two term nodes:
         the trunk term and the shoot term.  The function returns
         a list of :class:`htsql.core.tr.term.Joint` objects that could
         be used to attach the shoot term to the trunk term without
         changing the cardinality of the latter.
 
-        `flow` (:class:`htsql.core.tr.flow.Flow`)
-            The flow of the trunk term.
+        `space` (:class:`htsql.core.tr.space.Space`)
+            The space of the trunk term.
 
-        `baseline` (:class:`htsql.core.tr.flow.Flow`)
+        `baseline` (:class:`htsql.core.tr.space.Space`)
             The baseline of the trunk term.
 
-        `shoot` (:class:`htsql.core.tr.flow.Flow`)
-            The flow of the shoot term.
+        `shoot` (:class:`htsql.core.tr.space.Space`)
+            The space of the shoot term.
 
-        `shoot_baseline` (:class:`htsql.core.tr.flow.Flow`)
+        `shoot_baseline` (:class:`htsql.core.tr.space.Space`)
             The baseline of the shoot term.
         """
         # Sanity check on the arguments.
-        assert isinstance(flow, Flow)
-        assert isinstance(baseline, Flow)
+        assert isinstance(space, Space)
+        assert isinstance(baseline, Space)
         assert baseline.is_inflated
-        # The `flow` may represent not the trunk flow itself,
+        # The `space` may represent not the trunk space itself,
         # but one of its ancestors which may lie below `baseline`.
-        #assert flow.concludes(baseline)
-        assert isinstance(shoot, Flow)
-        assert isinstance(shoot_baseline, Flow)
+        #assert space.concludes(baseline)
+        assert isinstance(shoot, Space)
+        assert isinstance(shoot_baseline, Space)
         assert shoot_baseline.is_inflated
         assert shoot.concludes(shoot_baseline)
 
         # Verify that it is possible to join the terms without
         # changing the cardinality of the trunk.
-        assert (shoot_baseline.is_root or flow.spans(shoot_baseline.base))
+        assert (shoot_baseline.is_root or space.spans(shoot_baseline.base))
 
         # There are two ways the joints are generated:
         #
-        # - when the shoot baseline is an axis of the trunk flow,
+        # - when the shoot baseline is an axis of the trunk space,
         #   in this case we join the terms using parallel joints on
         #   the common axes;
         # - otherwise, join the terms using a serial joint between
         # Joints to attach the shoot to the trunk.
         joints = []
         # The backbone of the trunk term.
-        backbone = flow.inflate()
+        backbone = space.inflate()
         # The backbone of the shoot term.
         shoot_backbone = shoot.inflate()
-        # Check if the shoot baseline is an axis of the trunk flow.
+        # Check if the shoot baseline is an axis of the trunk space.
         if backbone.concludes(shoot_baseline):
             # In this case, we join the terms by all axes of the trunk
-            # flow that are exported by the shoot term.
+            # space that are exported by the shoot term.
             # Find the first inflated axis of the trunk exported
             # by the shoot.
             axis = backbone
             while not shoot_backbone.concludes(axis):
                 axis = axis.base
             # Now the axes between `axis` and `shoot_baseline` are common
-            # axes of the trunk flow and the shoot term.  For each of them,
+            # axes of the trunk space and the shoot term.  For each of them,
             # generate a parallel joint.  Note that we do not verify
             # (and, in general, it is not required) that these axes
             # are exported by the trunk term.  Apply `inject_joints()` on
             for axis in axes:
                 joints.extend(sew(axis))
         else:
-            # When the shoot does not touch the trunk flow, we attach it
+            # When the shoot does not touch the trunk space, we attach it
             # using a serial joint between the shoot baseline and its base.
             # Note that we do not verify (and it is not required) that
-            # the trunk term exports the base flow.  Apply `inject_joints()`
-            # on the trunk term to inject any necessary flows before
+            # the trunk term exports the base space.  Apply `inject_joints()`
+            # on the trunk term to inject any necessary spaces before
             # joining the terms using the joints.
             joints = tie(shoot_baseline)
 
             # We can try to optimize the joints when the base of the
-            # shoot baseline is an ancestor of the trunk flow, but not
+            # shoot baseline is an ancestor of the trunk space, but not
             # exported by the trunk term.  It this case, we prefer to
             # avoid adding an extra axis to the trunk term from below.
 
                 axis = baseline
                 while axis.base != origin:
                     axis = axis.base
-                # Ties from the shoot term to the origin flow.
+                # Ties from the shoot term to the origin space.
                 shoot_joints = joints
-                # Ties from the trunk term to the origin flow.
+                # Ties from the trunk term to the origin space.
                 trunk_joints = tie(axis)
                 # Check if both set of ties share the same origin
                 # expressions.
         assert isinstance(trunk_term, Term)
         assert isinstance(shoot_term, Term)
         # Delegate to an auxiliary method.
-        return self.glue_flows(trunk_term.flow, trunk_term.baseline,
-                               shoot_term.flow, shoot_term.baseline)
+        return self.glue_spaces(trunk_term.space, trunk_term.baseline,
+                               shoot_term.space, shoot_term.baseline)
 
     def inject_joints(self, term, joints):
         """
         """
         Attaches a shoot term to a trunk term.
 
-        The produced join term uses the flow and the routing
+        The produced join term uses the space and the routing
         table of the trunk term, but also includes the given
         extra routes.
 
 
             The shoot term must be singular relatively to the trunk term.
 
-        `extra_routes` (a mapping from a unit/flow to a term tag)
+        `extra_routes` (a mapping from a unit/space to a term tag)
             Any extra routes provided by the join.
         """
         # Sanity check on the arguments.
         assert isinstance(trunk_term, Term)
         assert isinstance(shoot_term, Term)
-        assert trunk_term.flow.spans(shoot_term.flow)
+        assert trunk_term.space.spans(shoot_term.space)
         assert isinstance(extra_routes, dict)
 
         # Join conditions that glue the terms.
         trunk_term = self.inject_joints(trunk_term, joints)
         # Determine if we could use an inner join to attach the shoot
         # to the trunk.  We could do it if the inner join does not
-        # decrease cardinality of the trunk.  It is so if the shoot flow
-        # dominates a closest ancestor of the trunk flow that is spanned
-        # by the shoot flow.
+        # decrease cardinality of the trunk.  It is so if the shoot space
+        # dominates a closest ancestor of the trunk space that is spanned
+        # by the shoot space.
         is_left = True
-        flow = trunk_term.flow
-        while not shoot_term.flow.spans(flow):
-            flow = flow.base
-        is_left = (not shoot_term.flow.dominates(flow))
+        space = trunk_term.space
+        while not shoot_term.space.spans(space):
+            space = space.base
+        is_left = (not shoot_term.space.dominates(space))
         is_right = False
         # Use the routing table of the trunk term, but also add
         # the given extra routes.
         # Generate and return a join term.
         return JoinTerm(self.state.tag(), trunk_term, shoot_term,
                         joints, is_left, is_right,
-                        trunk_term.flow, trunk_term.baseline, routes)
+                        trunk_term.space, trunk_term.baseline, routes)
 
 
 class Compile(CompileBase):
     - top-level expressions such as the whole query and the query segment,
       for which it builds respective top-level term nodes;
 
-    - flows, for which the adapter builds a corresponding relational
+    - spaces, for which the adapter builds a corresponding relational
       algebraic expression.
 
     After a term is built, it is typically augmented using the
 
     The adapter is polymorphic on the `Expression` argument.
 
-    `expression` (:class:`htsql.core.tr.flow.Expression`)
+    `expression` (:class:`htsql.core.tr.space.Expression`)
         An expression node.
 
     `state` (:class:`CompilingState`)
 
     The adapter is polymorphic on the `Expression` argument.
 
-    `expression` (:class:`htsql.core.tr.flow.Expression`)
+    `expression` (:class:`htsql.core.tr.space.Expression`)
         An expression node to inject.
 
     `term` (:class:`htsql.core.tr.term.Term`)
     adapt(QueryExpr)
 
     def __call__(self):
-        # Initialize the all state flows with a root scalar flow.
-        self.state.set_root(RootFlow(None, self.expression.binding))
+        # Initialize the all state spaces with a root scalar space.
+        self.state.set_root(RootSpace(None, self.expression.binding))
         # Compile the segment term.
         segment = None
         if self.expression.segment is not None:
             segment = self.state.compile(self.expression.segment)
-        # Shut down the state flows.
+        # Shut down the state spaces.
         self.state.flush()
         # Construct a query term.
         return QueryTerm(segment, self.expression)
     adapt(SegmentCode)
 
     def __call__(self):
-        if not self.state.superflow.spans(self.expression.root):
+        if not self.state.superspace.spans(self.expression.root):
             with translate_guard(self.expression.root):
                 raise Error("Expected a singular expression")
-        chain = self.state.superflow_stack + \
-                [self.state.superflow, self.expression.root,
-                 self.expression.flow]
-        # Get the ordering of the segment flow.  We must respect the ordering
+        chain = self.state.superspace_stack + \
+                [self.state.superspace, self.expression.root,
+                 self.expression.space]
+        # Get the ordering of the segment space.  We must respect the ordering
         # of the parent segment.
         order = []
         duplicates = set()
-        for flow in chain:
-            for code, direction in arrange(flow):
+        for space in chain:
+            for code, direction in arrange(space):
                 if code in duplicates:
                     continue
                 order.append((code, direction))
                  [code for code, direction in order])
         idx = 0
         while idx+1 < len(chain):
-            parent_flow = chain[idx]
-            child_flow = chain[idx+1]
+            parent_space = chain[idx]
+            child_space = chain[idx+1]
             is_native = False
-            while child_flow is not None:
-                if parent_flow.dominates(child_flow):
+            while child_space is not None:
+                if parent_space.dominates(child_space):
                     is_native = True
                     break
-                child_flow = child_flow.base
+                child_space = child_space.base
             if is_native:
                 del chain[idx]
                 if idx > 0:
                     idx -= 1
             else:
                 idx += 1
-        # Construct a term corresponding to the segment flow.
+        # Construct a term corresponding to the segment space.
         trunk_term = self.state.compile(chain[0], baseline=self.state.root)
-        for flow in chain[1:]:
-            shoot_term = self.compile_shoot(flow, trunk_term)
+        for space in chain[1:]:
+            shoot_term = self.compile_shoot(space, trunk_term)
             joints = self.glue_terms(trunk_term, shoot_term)
             trunk_term = self.inject_joints(trunk_term, joints)
             routes = {}
             routes.update(shoot_term.routes)
             trunk_term = JoinTerm(self.state.tag(), trunk_term, shoot_term,
                                   joints, False, False,
-                                  shoot_term.flow, self.state.root, routes)
+                                  shoot_term.space, self.state.root, routes)
         kid = trunk_term
         # Inject the expressions into the term.
         kid = self.state.inject(kid, codes)
         # The compiler does not guarantee that the produced term respects
-        # the flow ordering, so it is our responsitibity to wrap the term
+        # the space ordering, so it is our responsitibity to wrap the term
         # with an order node.
         if order:
             kid = OrderTerm(self.state.tag(), kid, order, None, None,
-                            kid.flow, kid.baseline, kid.routes.copy())
+                            kid.space, kid.baseline, kid.routes.copy())
         # Compile nested segments.
         subtrees = {}
         for segment in self.expression.code.segments:
             if segment in subtrees:
                 continue
-            self.state.push_superflow(self.expression.root)
-            self.state.push_superflow(self.expression.flow)
+            self.state.push_superspace(self.expression.root)
+            self.state.push_superspace(self.expression.space)
             term = self.state.compile(segment)
-            self.state.pop_superflow()
-            self.state.pop_superflow()
+            self.state.pop_superspace()
+            self.state.pop_superspace()
             subtrees[segment] = term
         # Construct keys for segment merging.
-        superkeys = [code for code, direction in arrange(self.state.superflow,
+        superkeys = [code for code, direction in arrange(self.state.superspace,
                                                          with_strong=False)]
-        keys = [code for code, direction in arrange(self.expression.flow,
+        keys = [code for code, direction in arrange(self.expression.space,
                                                     with_strong=False)]
         # Construct a segment term.
         return SegmentTerm(self.state.tag(), kid, self.expression,
                            superkeys, keys, subtrees,
-                           kid.flow, kid.baseline, kid.routes.copy())
+                           kid.space, kid.baseline, kid.routes.copy())
 
 
-class CompileFlow(Compile):
+class CompileSpace(Compile):
     """
-    Compile a term corresponding to a flow node.
+    Compile a term corresponding to a space node.
 
     This is an abstract class; see subclasses for implementations.
 
-    The general algorithm for compiling a term node for the given flow
+    The general algorithm for compiling a term node for the given space
     looks as follows:
 
-    - compile a term for the base flow;
+    - compile a term for the base space;
     - inject any necessary expressions;
-    - build a new term node that represents the flow operation.
+    - build a new term node that represents the space operation.
 
-    When compiling a term for a flow node, the current `baseline` flow
+    When compiling a term for a space node, the current `baseline` space
     denotes the leftmost axis that the term should be able to export.
     The compiler may (but does not have to) omit any axes nested under
     the `baseline` axis.
 
-    The generated term is not required to respect the ordering of the flow.
+    The generated term is not required to respect the ordering of the space.
 
     Constructor arguments:
 
-    `flow` (:class:`htsql.core.tr.flow.Flow`)
-        A flow node.
+    `space` (:class:`htsql.core.tr.space.Space`)
+        A space node.
 
     `state` (:class:`CompilingState`)
         The current state of the compiling process.
 
     Other attributes:
 
-    `backbone` (:class:`htsql.core.tr.flow.Flow`)
-        The inflation of the given flow.
+    `backbone` (:class:`htsql.core.tr.space.Space`)
+        The inflation of the given space.
 
-    `baseline` (:class:`htsql.core.tr.flow.Flow`)
+    `baseline` (:class:`htsql.core.tr.space.Space`)
         An alias to `state.baseline`.
     """
 
-    adapt(Flow)
+    adapt(Space)
 
-    def __init__(self, flow, state):
-        assert isinstance(flow, Flow)
-        # The inflation of the flow.
-        backbone = flow.inflate()
-        # Check that the baseline flow is an axis of the given flow.
-        assert flow.concludes(state.baseline)
-        super(CompileFlow, self).__init__(flow, state)
-        self.flow = flow
+    def __init__(self, space, state):
+        assert isinstance(space, Space)
+        # The inflation of the space.
+        backbone = space.inflate()
+        # Check that the baseline space is an axis of the given space.
+        assert space.concludes(state.baseline)
+        super(CompileSpace, self).__init__(space, state)
+        self.space = space
         self.state = state
         self.backbone = backbone
         # Extract commonly used state properties.
         self.baseline = state.baseline
 
 
-class InjectFlow(Inject):
+class InjectSpace(Inject):
 
-    adapt(Flow)
+    adapt(Space)
 
-    def __init__(self, flow, term, state):
-        assert isinstance(flow, Flow)
-        # It is a bug if we get the `flow` plural for the `term` here.
+    def __init__(self, space, term, state):
+        assert isinstance(space, Space)
+        # It is a bug if we get the `space` plural for the `term` here.
         # It is a responsibility of `InjectUnit` to guard against unexpected
         # plural expressions and to issue an appropriate HTSQL error.
-        assert term.flow.spans(flow)
-        super(InjectFlow, self).__init__(flow, term, state)
-        self.flow = flow
+        assert term.space.spans(space)
+        super(InjectSpace, self).__init__(space, term, state)
+        self.space = space
         self.term = term
         self.state = state
 
     def __call__(self):
-        # Note that this function works for all flow classes universally.
+        # Note that this function works for all space classes universally.
         # We start with checking for and handling several special cases;
-        # if none of them apply, we grow a shoot term for the given flow
+        # if none of them apply, we grow a shoot term for the given space
         # and attach it to the main term.
 
-        # Check if the flow is already exported.
-        if all(unit in self.term.routes for unit in spread(self.flow)):
-            # Not reachable since we only call `InjectFlow` from
+        # Check if the space is already exported.
+        if all(unit in self.term.routes for unit in spread(self.space)):
+            # Not reachable since we only call `InjectSpace` from
             # `InjectColumn` and `InjectKernel`, and those already
-            # verified that the flow is not exported.
+            # verified that the space is not exported.
             return self.term
 
-        # Check that the flow does not contain any non-axial operations
-        # of the term flow -- that's enforced by unmasking process.
-        assert self.flow == self.flow.prune(self.term.flow)
+        # Check that the space does not contain any non-axial operations
+        # of the term space -- that's enforced by unmasking process.
+        assert self.space == self.space.prune(self.term.space)
 
-        # A special case when the given flow is an ancestor of the term
-        # flow.  The fact that the flow is not exported by the term means
+        # A special case when the given space is an ancestor of the term
+        # space.  The fact that the space is not exported by the term means
         # that the term tree is optimized by cutting all axes below some
         # baseline.  Now we need to grow these axes back.
-        if self.term.flow.concludes(self.flow):
-            # Verify that the flow is not in the term.
-            assert self.term.baseline.base.concludes(self.flow)
+        if self.term.space.concludes(self.space):
+            # Verify that the space is not in the term.
+            assert self.term.baseline.base.concludes(self.space)
 
-            # Here we compile a term corresponding to the flow and
+            # Here we compile a term corresponding to the space and
             # attach it to the axis directly above it using a serial joint.
 
             # Compile a term for the missing axes.
             lkid = self.state.compile(self.term.baseline.base,
-                                       baseline=self.flow)
+                                       baseline=self.space)
             rkid = self.term
 
             # Join the terms using a serial joint.
             # Compile and return a join term.
             return JoinTerm(self.state.tag(), lkid, rkid, joints,
                             is_left, is_right,
-                            rkid.flow, lkid.baseline, routes)
+                            rkid.space, lkid.baseline, routes)
 
         # None of the special cases apply, so we use a general method:
-        # - grow a shoot term for the given flow;
+        # - grow a shoot term for the given space;
         # - attach the shoot to the main term.
 
-        # Compile a shoot term for the flow.
-        flow_term = self.compile_shoot(self.flow, self.term)
+        # Compile a shoot term for the space.
+        space_term = self.compile_shoot(self.space, self.term)
         # The routes to add.
         extra_routes = {}
-        for unit in spread(self.flow):
-            extra_routes[unit] = flow_term.routes[unit]
+        for unit in spread(self.space):
+            extra_routes[unit] = space_term.routes[unit]
         # Join the shoot to the main term.
-        return self.join_terms(self.term, flow_term, extra_routes)
+        return self.join_terms(self.term, space_term, extra_routes)
 
 
-class CompileScalar(CompileFlow):
+class CompileScalar(CompileSpace):
 
-    # The root flow is a special case of the scalar flow.
-    adapt_many(ScalarFlow, RootFlow)
+    # The root space is a special case of the scalar space.
+    adapt_many(ScalarSpace, RootSpace)
 
     def __call__(self):
-        # If we are at the baseline (always the case for the root flow),
+        # If we are at the baseline (always the case for the root space),
         # generate a scalar term.
-        if self.flow == self.baseline:
-            return ScalarTerm(self.state.tag(), self.flow, self.flow, {})
-        # Otherwise, compile a term for the parent flow and reuse
-        # it for the scalar flow.
-        term = self.state.compile(self.flow.base)
+        if self.space == self.baseline:
+            return ScalarTerm(self.state.tag(), self.space, self.space, {})
+        # Otherwise, compile a term for the parent space and reuse
+        # it for the scalar space.
+        term = self.state.compile(self.space.base)
         return WrapperTerm(self.state.tag(), term,
-                           self.flow, term.baseline, term.routes)
+                           self.space, term.baseline, term.routes)
 
 
-class CompileTable(CompileFlow):
+class CompileTable(CompileSpace):
 
-    # Used for both direct and fiber table flows.
-    adapt(TableFlow)
+    # Used for both direct and fiber table spaces.
+    adapt(TableSpace)
 
     def __call__(self):
         # We start with identifying and handling special cases, where
         # in the regular case.  If none of the special cases are applicable,
         # we use the generic algorithm.
 
-        # The first special case: we are at the baseline flow.
-        if self.flow == self.baseline:
+        # The first special case: we are at the baseline space.
+        if self.space == self.baseline:
             # Generate a single table term.
             tag = self.state.tag()
             # The routing table includes all the columns of the table.
             routes = {}
-            for unit in spread(self.flow):
+            for unit in spread(self.space):
                 routes[unit] = tag
-            return TableTerm(tag, self.flow, self.baseline, routes)
+            return TableTerm(tag, self.space, self.baseline, routes)
 
-        # Otherwise, we need a term corresponding to the parent flow.
-        term = self.state.compile(self.flow.base)
+        # Otherwise, we need a term corresponding to the parent space.
+        term = self.state.compile(self.space.base)
 
-        # The second special case, when the term of the parent flow could also
-        # serve as a term for the flow itself.  It is possible if the
+        # The second special case, when the term of the parent space could also
+        # serve as a term for the space itself.  It is possible if the
         # following two conditions are met:
-        # - the term exports the inflation of the given flow (`backbone`),
-        # - the given flow conforms (has the same cardinality as) its base.
+        # - the term exports the inflation of the given space (`backbone`),
+        # - the given space conforms (has the same cardinality as) its base.
         # This case usually corresponds to an HTSQL expression of the form:
         #   (A?p(B)).B,
         # where `B` is a singular, non-nullable link from `A` and `p(B)` is
         # a predicate expression on `B`.
-        if (self.flow.conforms(term.flow) and
+        if (self.space.conforms(term.space) and
             all(unit in term.routes for unit in spread(self.backbone))):
-            # We need to add the given flow to the routing table and
-            # replace the term flow.
+            # We need to add the given space to the routing table and
+            # replace the term space.
             routes = term.routes.copy()
-            for unit in spread(self.flow):
-                routes[unit] = routes[unit.clone(flow=self.backbone)]
+            for unit in spread(self.space):
+                routes[unit] = routes[unit.clone(space=self.backbone)]
             return WrapperTerm(self.state.tag(), term,
-                               self.flow, term.baseline, routes)
+                               self.space, term.baseline, routes)
 
         # Now the general case.  We take two terms:
-        # - the term compiled for the parent flow
-        # - and a table term corresponding to the flow table,
-        # and join them using the tie between the flow and its parent.
+        # - the term compiled for the parent space
+        # - and a table term corresponding to the space table,
+        # and join them using the tie between the space and its parent.
 
-        # This is the term for the flow base, we already generated it.
+        # This is the term for the space base, we already generated it.
         lkid = term
-        # This is a table term corresponding to the flow table.
+        # This is a table term corresponding to the space table.
         # Instead of generating it directly, we call `compile`
-        # on the same flow, but with a different baseline, so that it
+        # on the same space, but with a different baseline, so that it
         # will hit the first special case and produce a table term.
         rkid = self.state.compile(self.backbone, baseline=self.backbone)
-        # The connections between the flow to its base.
-        joints = tie(self.flow)
+        # The connections between the space to its base.
+        joints = tie(self.space)
         is_left = False
         is_right = False
         # We use the routing table of the base term with extra routes
-        # corresponding to the given flow and its inflation which we
+        # corresponding to the given space and its inflation which we
         # export from the table term.
         routes = lkid.routes.copy()
         routes = {}
         routes.update(lkid.routes)
         routes.update(rkid.routes)
-        for unit in spread(self.flow):
-            routes[unit] = routes[unit.clone(flow=self.backbone)]
+        for unit in spread(self.space):
+            routes[unit] = routes[unit.clone(space=self.backbone)]
         # Generate a join term node.
         return JoinTerm(self.state.tag(), lkid, rkid, joints,
-                        is_left, is_right, self.flow, lkid.baseline, routes)
+                        is_left, is_right, self.space, lkid.baseline, routes)
 
 
-class CompileQuotient(CompileFlow):
+class CompileQuotient(CompileSpace):
 
-    adapt(QuotientFlow)
+    adapt(QuotientSpace)
 
     def __call__(self):
-        # Normally, a quotient flow is represented by a seed term with
+        # Normally, a quotient space is represented by a seed term with
         # the baseline at the ground term.  If we can generate a term
         # with this shape, it is wrapped by a filter term to eliminate
         # `NULL` from the kernel and then by a projection term to
         # shorter than the ground.  In this case, the term has irregular
         # parallel and serial ties and therefore cannot represent
         # the quotient axis.  To hide the irregular structure, we are
-        # forced to generate a trunk term from the parent flow and
+        # forced to generate a trunk term from the parent space and
         # manually project and attach the seed term to the trunk term.
 
         # In addition, we may be asked to export some aggregates
-        # over the complement flow.  We generate aggregate expressions
+        # over the complement space.  We generate aggregate expressions
         # by pretending that the seed term actually represents
-        # the complement flow and injecting the expressions into it.
+        # the complement space and injecting the expressions into it.
 
-        # Start with generating a term for the seed flow.
+        # Start with generating a term for the seed space.
 
-        # The ground flow is expected to be the baseline of the seed term.
-        baseline = self.flow.ground
+        # The ground space is expected to be the baseline of the seed term.
+        baseline = self.space.ground
         # However, the ground may not be inflated, so we need to find
         # an inflated ancestor.
         while not baseline.is_inflated:
             baseline = baseline.base
         # The seed term.
-        seed_term = self.state.compile(self.flow.seed, baseline=baseline)
+        seed_term = self.state.compile(self.space.seed, baseline=baseline)
         # Inject the kernel and filter out `NULL` kernel values.
-        if self.flow.kernels:
+        if self.space.kernels:
             # Make sure the kernel expressions are exportable.
-            seed_term = self.state.inject(seed_term, self.flow.kernels)
+            seed_term = self.state.inject(seed_term, self.space.kernels)
             # Generate filters:
             #   !is_null(kernel)&...
             filters = []
-            for code in self.flow.kernels:
+            for code in self.space.kernels:
                 filter = FormulaCode(IsNullSig(-1), coerce(BooleanDomain()),
                                      code.binding, op=code)
                 filters.append(filter)
                 [filter] = filters
             else:
                 filter = FormulaCode(AndSig(), coerce(BooleanDomain()),
-                                     self.flow.binding, ops=filters)
+                                     self.space.binding, ops=filters)
             # The final seed term.
             seed_term = FilterTerm(self.state.tag(), seed_term, filter,
-                                   seed_term.flow, seed_term.baseline,
+                                   seed_term.space, seed_term.baseline,
                                    seed_term.routes.copy())
 
         # Wrap the term to have a target for composite units.
         seed_term = WrapperTerm(self.state.tag(), seed_term,
-                                seed_term.flow, seed_term.baseline,
+                                seed_term.space, seed_term.baseline,
                                 seed_term.routes.copy())
 
         # Indicates that the seed term has the regular shape.
-        is_regular = (seed_term.baseline == self.flow.ground)
+        is_regular = (seed_term.baseline == self.space.ground)
 
         # Inject aggregates suggested by the rewriter.
 
         # Clear out companions to avoid infinite recursion.
         quotient = self.backbone.clone(companions=[])
         # The plural space for the aggregates.
-        complement = ComplementFlow(quotient, self.flow.binding)
+        complement = ComplementSpace(quotient, self.space.binding)
         # We can only inject aggregates if the seed term has the regular shape.
-        if self.flow.companions and is_regular:
+        if self.space.companions and is_regular:
             # We are going to disguise the seed term as a complement.
             # The routing table for the complement term.
             routes = {}
             for code in seed_term.routes:
                 unit = CoveringUnit(code, complement, code.binding)
                 routes[unit] = seed_term.tag
-            for code in self.flow.kernels:
+            for code in self.space.kernels:
                 unit = CoveringUnit(code, complement, code.binding)
                 routes[unit] = seed_term.tag
-            for unit in spread(self.flow.seed.inflate()):
-                routes[unit.clone(flow=complement)] = seed_term.routes[unit]
+            for unit in spread(self.space.seed.inflate()):
+                routes[unit.clone(space=complement)] = seed_term.routes[unit]
             # Disguise the seed term as a complement term.
             complement_term = WrapperTerm(self.state.tag(), seed_term,
                                           complement, complement, routes)
             # Inject aggregate expressions.
             complement_term = self.state.inject(complement_term,
-                                                self.flow.companions)
+                                                self.space.companions)
             # Abort if the shape of the term changed.
             if complement_term.baseline == complement:
                 # Remember what we just injected.
-                aggregates = self.flow.companions
+                aggregates = self.space.companions
                 # Convert the complement term back to the seed term.
                 # The routing table of the seed term will now have
                 # extra aggregate expressions.
                 routes.update(seed_term.routes)
                 # Back to the seed term.
                 seed_term = WrapperTerm(self.state.tag(), complement_term,
-                                        seed_term.flow, seed_term.baseline,
+                                        seed_term.space, seed_term.baseline,
                                         routes)
 
         # Prepare for generating the quotient term.
 
-        # The term for the parent flow (may remain `None` if the baseline
+        # The term for the parent space (may remain `None` if the baseline
         # is at the quotient).
         trunk_term = None
         # The basis of the projection.
         basis = []
-        # The units exported by the projection (against the inflated flow).
+        # The units exported by the projection (against the inflated space).
         units = []
         # The join conditions attaching the quotient term to the parent term.
         joints = []
 
         # Handle the regular case first.
         if is_regular:
-            # Check if the term for the parent flow is necessary.
-            if self.flow != self.baseline:
-                # Generate the parent flow and the ties.
-                trunk_term = self.state.compile(self.flow.base)
-                joints = tie(self.flow)
+            # Check if the term for the parent space is necessary.
+            if self.space != self.baseline:
+                # Generate the parent space and the ties.
+                trunk_term = self.state.compile(self.space.base)
+                joints = tie(self.space)
 
         # The irregular case, the seed baseline is below the ground.
         else:
             # The trunk term is a must, even if the baseline is at
-            # the current flow.  In that case, we need to lower the baseline.
+            # the current space.  In that case, we need to lower the baseline.
             baseline = self.baseline
-            if baseline == self.flow:
+            if baseline == self.space:
                 baseline = baseline.base
             # Generate the trunk term.
-            trunk_term = self.state.compile(self.flow.base, baseline=baseline)
+            trunk_term = self.state.compile(self.space.base, baseline=baseline)
             # Join conditions between the trunk and the seed terms.
             seed_joints = self.glue_terms(trunk_term, seed_term)
             # Convert the join conditions to joints between the trunk
         # Generate the the projection basis and a list of exported units.
         # Note that in the irregular case, those are already prepopulated
         # from the join conditions.
-        # The units attaching the seed ground to the parent flow.
-        for lop, rop in tie(self.flow.ground):
+        # The units attaching the seed ground to the parent space.
+        for lop, rop in tie(self.space.ground):
             basis.append(rop)
             unit = KernelUnit(rop, self.backbone, rop.binding)
             units.append(unit)
         # The kernel expressions.
-        for code in self.flow.kernels:
+        for code in self.space.kernels:
             basis.append(code)
             unit = KernelUnit(code, self.backbone, code.binding)
             units.append(unit)
 
         # When the kernel is scalar, to ensure proper conversion to SQL,
         # force `GROUP BY` to contain a reference from a subframe.  For
-        # that, we create a permanent wrapper around the seed flow and
+        # that, we create a permanent wrapper around the seed space and
         # create a scalar unit pointing to that wrapper.  The unit
         # is added to the projection basis.
-        if all(not code.units for code in self.flow.kernels):
+        if all(not code.units for code in self.space.kernels):
             basis_code = LiteralCode(True, coerce(BooleanDomain()),
-                                     self.flow.binding)
-            basis_unit = ScalarUnit(basis_code, self.flow.seed,
+                                     self.space.binding)
+            basis_unit = ScalarUnit(basis_code, self.space.seed,
                                     basis_code.binding)
             basis.append(basis_unit)
             routes = seed_term.routes.copy()
             routes[basis_unit] = seed_term.tag
             seed_term = PermanentTerm(self.state.tag(), seed_term,
-                                      seed_term.flow, seed_term.baseline,
+                                      seed_term.space, seed_term.baseline,
                                       routes)
 
         # Generate the projection term.
         routes = {}
         routes.update(lkid.routes)
         routes.update(rkid.routes)
-        # Reparent exported units from the backbone to the original flow.
+        # Reparent exported units from the backbone to the original space.
         for unit in units:
-            routes[unit.clone(flow=self.flow)] = rkid.tag
+            routes[unit.clone(space=self.space)] = rkid.tag
         # Generate and return a join node.
         is_left = False
         is_right = False
         return JoinTerm(self.state.tag(), lkid, rkid, joints,
-                        is_left, is_right, self.flow, lkid.baseline, routes)
+                        is_left, is_right, self.space, lkid.baseline, routes)
 
 
-class CompileComplement(CompileFlow):
+class CompileComplement(CompileSpace):
 
-    adapt(ComplementFlow)
+    adapt(ComplementSpace)
 
     def __call__(self):
         # A complement term, just like a quotient term is represented
 
         # Since the quotient and the complement terms share the same
         # shape, we could reuse the complement term to export the respective
-        # quotient flow.  In this case, we need to apply kernel filters.
+        # quotient space.  In this case, we need to apply kernel filters.
 
         # As in the quotient case, the seed term may have an irregular
         # shape, that is, the term baseline lies below the seed ground.
         # In this case, we manually attach the seed term to the trunk.
 
-        # The flow node may contain extra code objects -- `companions`,
+        # The space node may contain extra code objects -- `companions`,
         # which indicate that the generated term should export covering
         # units wrapping the companions.
 
         # Generate the seed term.
 
-        # The baseline of the seed term is expected to be the seed ground flow.
-        baseline = self.flow.ground
+        # The baseline of the seed term is expected to be the seed ground space.
+        baseline = self.space.ground
         # However it may be not inflated, in which case we find the closest
         # inflated axis.
         while not baseline.is_inflated:
             baseline = baseline.base
         # Create the seed term.
-        seed_term = self.state.compile(self.flow.seed, baseline=baseline)
+        seed_term = self.state.compile(self.space.seed, baseline=baseline)
         # Make sure the seed term can export the quotient kernel and the
         # extra companion expressions.
         seed_term = self.state.inject(seed_term,
-                                      self.flow.kernels + self.flow.companions)
+                                      self.space.kernels + self.space.companions)
 
         # Indicates whether the seed term has a regular shape.
-        is_regular = (seed_term.baseline == self.flow.ground)
+        is_regular = (seed_term.baseline == self.space.ground)
 
-        # Indicates that the generated term can export the quotient flow:
+        # Indicates that the generated term can export the quotient space:
         # - we cannot omit generating the parent term because the baseline
-        #   is below the current flow or the seed term is irregular.
+        #   is below the current space or the seed term is irregular.
         # - there are no filters or other non-axial operations between
         #   the complement and its quotient;
-        # - the quotient flow does not have to export any aggregates.
+        # - the quotient space does not have to export any aggregates.
         # Note that the seed term may have an irregular shape.
-        has_quotient = ((self.baseline != self.flow or not is_regular) and
-                        isinstance(self.flow.base, QuotientFlow) and
-                        not self.flow.base.companions)
+        has_quotient = ((self.baseline != self.space or not is_regular) and
+                        isinstance(self.space.base, QuotientSpace) and
+                        not self.space.base.companions)
 
-        # If the term exports the quotient flow, we need to enforce the
+        # If the term exports the quotient space, we need to enforce the
         # condition: `!is_null(kernel)`.
-        if has_quotient and self.flow.kernels:
+        if has_quotient and self.space.kernels:
             # Generate a filter around the seed term.
             filters = []
-            for code in self.flow.kernels:
+            for code in self.space.kernels:
                 filter = FormulaCode(IsNullSig(-1), coerce(BooleanDomain()),
                                      code.binding, op=code)
                 filters.append(filter)
                 [filter] = filters
             else:
                 filter = FormulaCode(AndSig(), coerce(BooleanDomain()),
-                                     self.flow.binding, ops=filters)
+                                     self.space.binding, ops=filters)
             seed_term = FilterTerm(self.state.tag(), seed_term, filter,
-                                   seed_term.flow, seed_term.baseline,
+                                   seed_term.space, seed_term.baseline,
                                    seed_term.routes.copy())
 
         # Wrap the term to have a target for covering units.
         seed_term = WrapperTerm(self.state.tag(), seed_term,
-                                seed_term.flow, seed_term.baseline,
+                                seed_term.space, seed_term.baseline,
                                 seed_term.routes.copy())
 
         # Prepare for generating the complement term.
 
-        # The term for the parent (or grandparent if `has_quotient`) flow.
-        # May remain unset if the baseline at the current or the parent flow.
+        # The term for the parent (or grandparent if `has_quotient`) space.
+        # May remain unset if the baseline at the current or the parent space.
         trunk_term = None
-        # Flow units exported by the term.
+        # Space units exported by the term.
         covering_units = []
-        # Units from the parent quotient flow exported by the term.
+        # Units from the parent quotient space exported by the term.
         quotient_units = []
         # Join conditions attaching the term to the trunk.
         joints = []
 
         # Generate the trunk term if needed.
 
-        # The trunk flow.
-        axis = self.flow.base
-        # Use the grandparent flow if the quotient is already included
-        # in the complement flow.
+        # The trunk space.
+        axis = self.space.base
+        # Use the grandparent space if the quotient is already included
+        # in the complement space.
         if has_quotient:
             axis = axis.base
         # Determine the baseline.
         baseline = self.baseline
-        # If the baseline is above the trunk flow, we can avoid generating
+        # If the baseline is above the trunk space, we can avoid generating
         # the trunk term, but only if the seed term has the regular shape.
-        # Otherwise, lower the baseline till it reaches the trunk flow.
+        # Otherwise, lower the baseline till it reaches the trunk space.
         if not is_regular:
             while not axis.concludes(baseline):
                 baseline = baseline.base
                     covering_units.append(unit)
 
             # Add regular joints: the serial joints from the complement
-            # flow (or the parent flow if it is included).
+            # space (or the parent space if it is included).
             if has_quotient:
-                joints += tie(self.flow.base)
+                joints += tie(self.space.base)
             else:
-                joints += tie(self.flow)
+                joints += tie(self.space)
 
         # Populate units exported by the complement.
 
-        # Add units from the parent quotient flow if needed.
+        # Add units from the parent quotient space if needed.
         if has_quotient:
-            quotient_backbone = self.flow.base.inflate()
+            quotient_backbone = self.space.base.inflate()
             quotient_units = spread(quotient_backbone)
 
         # Wrap everything produced by the seed term.
             unit = CoveringUnit(code, self.backbone, code.binding)
             covering_units.append(unit)
         # Ensure we export serial ties.
-        for lop, rop in tie(self.flow.ground):
+        for lop, rop in tie(self.space.ground):
             unit = CoveringUnit(rop, self.backbone, rop.binding)
             covering_units.append(unit)
         # Export the kernel and any requested companion units.
-        for code in self.flow.kernels + self.flow.companions:
+        for code in self.space.kernels + self.space.companions:
             unit = CoveringUnit(code, self.backbone, code.binding)
             covering_units.append(unit)
 
         # Generate the routing table and the complement term.
         routes = {}
-        # Export units from the quotient flow, if any.
+        # Export units from the quotient space, if any.
         for unit in quotient_units:
             routes[unit] = seed_term.tag
         # Export complement units.
         for unit in covering_units:
             routes[unit] = seed_term.tag
         # Export native units.
-        for unit in spread(self.flow.seed):
-            routes[unit.clone(flow=self.backbone)] = seed_term.routes[unit]
+        for unit in spread(self.space.seed):
+            routes[unit.clone(space=self.backbone)] = seed_term.routes[unit]
         # The baseline for the complement term.
         baseline = self.backbone
         if has_quotient:
         routes = {}
         routes.update(lkid.routes)
         routes.update(rkid.routes)
-        # Now reparent the exported units to the given flow
+        # Now reparent the exported units to the given space
         # (rather than the backbone).
         for unit in quotient_units:
-            routes[unit.clone(flow=self.flow.base)] = seed_term.tag
+            routes[unit.clone(space=self.space.base)] = seed_term.tag
         for unit in covering_units:
-            routes[unit.clone(flow=self.flow)] = seed_term.tag
-        for unit in spread(self.flow.seed):
-            routes[unit.clone(flow=self.flow)] = seed_term.routes[unit]
+            routes[unit.clone(space=self.space)] = seed_term.tag
+        for unit in spread(self.space.seed):
+            routes[unit.clone(space=self.space)] = seed_term.routes[unit]
         is_left = False
         is_right = False
         # Generate and return the join term node.
         return JoinTerm(self.state.tag(), lkid, rkid, joints,
-                        is_left, is_right, self.flow, lkid.baseline, routes)
+                        is_left, is_right, self.space, lkid.baseline, routes)
 
 
-class CompileCovering(CompileFlow):
+class CompileCovering(CompileSpace):
 
-    # The implementation is shared by these three covering flows.
-    adapt_many(MonikerFlow,
-               ForkedFlow,
-               AttachFlow,
-               ClippedFlow)
+    # The implementation is shared by these three covering spaces.
+    adapt_many(MonikerSpace,
+               ForkedSpace,
+               AttachSpace,
+               ClippedSpace)
 
     def __call__(self):
-        # Moniker, forked and linked flows are represented as a seed term
+        # Moniker, forked and linked spaces are represented as a seed term
         # with the baseline at the seed ground.  The compilation processes
-        # for these types of flows are almost identical.
+        # for these types of spaces are almost identical.
 
         # If the seed term has an irregular shape, we must generate a term
-        # for the parent flow and add custom joints between the seed
+        # for the parent space and add custom joints between the seed
         # and the parent terms.  If the seed term is regular and the
-        # baseline is at the current flow, we avoid generating a parent term.
+        # baseline is at the current space, we avoid generating a parent term.
 
-        # The flow node may contain extra code objects -- `companions`,
+        # The space node may contain extra code objects -- `companions`,
         # which indicate that the generated term should export covering
         # units wrapping the companions.
 
         # Generate the seed term.
 
-        # The baseline of the seed term is expected to be the seed ground flow.
-        baseline = self.flow.ground
+        # The baseline of the seed term is expected to be the seed ground space.
+        baseline = self.space.ground
         # However it may be not inflated, in which case we find the closest
         # inflated axis.
         while not baseline.is_inflated:
             baseline = baseline.base
         # Create the seed term.
-        seed_term = self.state.compile(self.flow.seed, baseline=baseline)
+        seed_term = self.state.compile(self.space.seed, baseline=baseline)
         # The seed term may need to export some extra expressions.
         codes = []
-        # For the forked flow, it must export the kernel expressions.
-        if isinstance(self.flow, ForkedFlow):
-            codes += self.flow.kernels
-        # For the linked flow, it must export the linking expressions.
-        if isinstance(self.flow, AttachFlow):
-            codes += [rop for lop, rop in self.flow.images]
-        # A clipped flow must order itself (but only up to the base).
-        if isinstance(self.flow, ClippedFlow):
+        # For the forked space, it must export the kernel expressions.
+        if isinstance(self.space, ForkedSpace):
+            codes += self.space.kernels
+        # For the linked space, it must export the linking expressions.
+        if isinstance(self.space, AttachSpace):
+            codes += [rop for lop, rop in self.space.images]
+        # A clipped space must order itself (but only up to the base).
+        if isinstance(self.space, ClippedSpace):
             order = []
-            for code, direction in arrange(self.flow.seed):
-                if all(self.flow.base.spans(unit.flow)
+            for code, direction in arrange(self.space.seed):
+                if all(self.space.base.spans(unit.space)
                        for unit in code.units):
                     continue
                 codes.append(code)
                 order.append((code, direction))
-        if (isinstance(self.flow, AttachFlow) and
-                self.flow.filter is not None):
-            codes.append(self.flow.filter)
+        if (isinstance(self.space, AttachSpace) and
+                self.space.filter is not None):
+            codes.append(self.space.filter)
         # Any companion expressions must also be included.
-        codes += self.flow.companions
+        codes += self.space.companions
         seed_term = self.state.inject(seed_term, codes)
 
-        if (isinstance(self.flow, AttachFlow) and
-                self.flow.filter is not None):
+        if (isinstance(self.space, AttachSpace) and
+                self.space.filter is not None):
             seed_term = FilterTerm(self.state.tag(), seed_term,
-                                   self.flow.filter,
-                                   seed_term.flow,
+                                   self.space.filter,
+                                   seed_term.space,
                                    seed_term.baseline,
                                    seed_term.routes.copy())
 
         # Indicates whether the seed term has a regular shape.
-        is_regular = (seed_term.baseline == self.flow.ground)
+        is_regular = (seed_term.baseline == self.space.ground)
 
         # Wrap the term to have a target for covering units.
         seed_term = WrapperTerm(self.state.tag(), seed_term,
-                                seed_term.flow, seed_term.baseline,
+                                seed_term.space, seed_term.baseline,
                                 seed_term.routes.copy())
 
         # Generate the trunk term and join conditions (if needed).
 
-        # The term for the parent flow.  May remain `None` if we already
+        # The term for the parent space.  May remain `None` if we already
         # reached the baseline.
         trunk_term = None
         # Join conditions attaching the term to the trunk.
         joints = []
 
         # The regular case: make the parent term only if the
-        # baseline is below the given flow.
+        # baseline is below the given space.
         if is_regular:
-            if self.baseline != self.flow:
-                trunk_term = self.state.compile(self.flow.base)
+            if self.baseline != self.space:
+                trunk_term = self.state.compile(self.space.base)
             # We need the joints to produce covering units, so generate
             # them even when we do not use them for joining.
-            joints = tie(self.flow)
+            joints = tie(self.space)
 
         # The irregular case: we must create the parent term
-        # even if the baseline is above the parent flow.
+        # even if the baseline is above the parent space.
         else:
             # Lower the baseline if needed.
             baseline = self.baseline
-            if baseline == self.flow:
+            if baseline == self.space:
                 baseline = baseline.base
-            # Compile a term for the parent flow.
-            trunk_term = self.state.compile(self.flow.base, baseline=baseline)
+            # Compile a term for the parent space.
+            trunk_term = self.state.compile(self.space.base, baseline=baseline)
             # Generate custom joints.
             shoot_term = seed_term
-            # For the forked flow, this is tricky as we can't join the trunk
+            # For the forked space, this is tricky as we can't join the trunk
             # to the seed term as usual -- we must leave the seed axis
             # free of joints.  Note that the seed baseline lies below
             # `ground.base` since the seed term is irregular.
-            if isinstance(self.flow, ForkedFlow):
-                seed_joints = self.glue_flows(trunk_term.flow,
+            if isinstance(self.space, ForkedSpace):
+                seed_joints = self.glue_spaces(trunk_term.space,
                                               trunk_term.baseline,
-                                              self.flow.ground.base,
+                                              self.space.ground.base,
                                               seed_term.baseline)
             # Otherwise, just attach the shoot term to the trunk term.
             else:
                                     joint.rop.binding)
                 joints.append(joint.clone(rop=unit))
             # Append regular joints.
-            joints += tie(self.flow)
+            joints += tie(self.space)
 
-        # Slice a clipped flow.
-        if isinstance(self.flow, ClippedFlow):
+        # Slice a clipped space.
+        if isinstance(self.space, ClippedSpace):
             partition = []
             if not is_regular:
                 partition += [joint.rop for joint in seed_joints]
-            partition += [joint.rop for joint in tie(self.flow.ground)]
+            partition += [joint.rop for joint in tie(self.space.ground)]
             if partition:
                 seed_term = self.clip(seed_term, order, partition)
             else:
         for unit in units:
             routes[unit] = seed_term.tag
         # Export native units.
-        for unit in spread(self.flow.seed):
-            routes[unit.clone(flow=self.backbone)] = seed_term.routes[unit]
+        for unit in spread(self.space.seed):
+            routes[unit.clone(space=self.backbone)] = seed_term.routes[unit]
         # The covering term.
         term = WrapperTerm(self.state.tag(), seed_term,
                            self.backbone, self.backbone, routes)
         routes = {}
         routes.update(lkid.routes)
         routes.update(rkid.routes)
-        # Reparent the exported units from the flow backbone to the flow itself.
+        # Reparent the exported units from the space backbone to the space itself.
         for unit in units:
-            routes[unit.clone(flow=self.flow)] = seed_term.tag
-        for unit in spread(self.flow.seed):
-            routes[unit.clone(flow=self.flow)] = seed_term.routes[unit]
+            routes[unit.clone(space=self.space)] = seed_term.tag
+        for unit in spread(self.space.seed):
+            routes[unit.clone(space=self.space)] = seed_term.routes[unit]
         is_left = False
         is_right = False
         # Join the terms.
         return JoinTerm(self.state.tag(), lkid, rkid, joints,
-                        is_left, is_right, self.flow, lkid.baseline, routes)
+                        is_left, is_right, self.space, lkid.baseline, routes)
 
     def clip(self, term, order, partition):
         ops = []
                              code.domain, code.binding, base=code)
             ops.append(op)
         row_number_code = FormulaCode(RowNumberSig(), coerce(IntegerDomain()),
-                                      self.flow.binding,
+                                      self.space.binding,
                                       partition=partition, order=ops)
-        row_number_unit = ScalarUnit(row_number_code, term.flow.base,
-                                     term.flow.binding)
+        row_number_unit = ScalarUnit(row_number_code, term.space.base,
+                                     term.space.binding)
         tag = self.state.tag()
         routes = term.routes.copy()
         routes[row_number_unit] = tag
-        term = PermanentTerm(tag, term, term.flow, term.baseline, routes)
+        term = PermanentTerm(tag, term, term.space, term.baseline, routes)
         left_bound = 1
-        if self.flow.offset is not None:
-            left_bound = self.flow.offset+1
+        if self.space.offset is not None:
+            left_bound = self.space.offset+1
         right_bound = left_bound+1
-        if self.flow.limit is not None:
-            right_bound = left_bound+self.flow.limit
+        if self.space.limit is not None:
+            right_bound = left_bound+self.space.limit
         left_bound_code = LiteralCode(left_bound, coerce(IntegerDomain()),
-                                      term.flow.binding)
+                                      term.space.binding)
         right_bound_code = LiteralCode(right_bound, coerce(IntegerDomain()),
-                                       term.flow.binding)
+                                       term.space.binding)
         left_filter = FormulaCode(CompareSig('>='), coerce(BooleanDomain()),
-                                  term.flow.binding,
+                                  term.space.binding,
                                   lop=row_number_unit, rop=left_bound_code)
         right_filter = FormulaCode(CompareSig('<'), coerce(BooleanDomain()),
-                                   term.flow.binding,
+                                   term.space.binding,
                                    lop=row_number_unit, rop=right_bound_code)
         filter = FormulaCode(AndSig(), coerce(BooleanDomain()),
-                             term.flow.binding,
+                             term.space.binding,
                              ops=[left_filter, right_filter])
         return FilterTerm(self.state.tag(), term, filter,
-                          term.flow, term.baseline, term.routes.copy())
+                          term.space, term.baseline, term.routes.copy())
 
     def clip_root(self, term, order):
-        limit = self.flow.limit
+        limit = self.space.limit
         if limit is None:
             limit = 1
-        offset = self.flow.offset
+        offset = self.space.offset
         return OrderTerm(self.state.tag(), term, order, limit, offset,
-                         term.flow, term.baseline, term.routes.copy())
+                         term.space, term.baseline, term.routes.copy())
 
 
-class CompileFiltered(CompileFlow):
+class CompileFiltered(CompileSpace):
 
-    adapt(FilteredFlow)
+    adapt(FilteredSpace)
 
     def __call__(self):
-        # The term corresponding to the parent flow.
-        term = self.state.compile(self.flow.base)
+        # The term corresponding to the parent space.
+        term = self.state.compile(self.space.base)
         # Make sure the base term is able to produce the filter expression.
-        kid = self.state.inject(term, [self.flow.filter])
+        kid = self.state.inject(term, [self.space.filter])
         # Inherit the routing table from the base term, but add native
-        # units of the given flow.
+        # units of the given space.
         routes = kid.routes.copy()
-        for unit in spread(self.flow):
-            routes[unit] = routes[unit.clone(flow=self.backbone)]
+        for unit in spread(self.space):
+            routes[unit] = routes[unit.clone(space=self.backbone)]
         # Generate a filter term node.
-        return FilterTerm(self.state.tag(), kid, self.flow.filter,
-                          self.flow, kid.baseline, routes)
+        return FilterTerm(self.state.tag(), kid, self.space.filter,
+                          self.space, kid.baseline, routes)
 
 
-class CompileOrdered(CompileFlow):
+class CompileOrdered(CompileSpace):
 
-    adapt(OrderedFlow)
+    adapt(OrderedSpace)
 
     def __call__(self):
-        # An ordered flow has two functions:
+        # An ordered space has two functions:
         # - adding explicit row ordering;
         # - extracting a slice from the row set.
         # Note the first function could be ignored since the compiled terms
-        # are not required to respect the ordering of the underlying flow.
+        # are not required to respect the ordering of the underlying space.
 
-        # When the order flow does not apply limit/offset, we could simply
+        # When the order space does not apply limit/offset, we could simply
         # reuse the base term.
-        if self.flow.is_expanding:
-            # Generate a term for the flow base.
-            term = self.state.compile(self.flow.base)
-            # Update its routing table to include the given flow and
+        if self.space.is_expanding:
+            # Generate a term for the space base.
+            term = self.state.compile(self.space.base)
+            # Update its routing table to include the given space and
             # return the node.
             routes = term.routes.copy()
-            for unit in spread(self.flow):
-                routes[unit] = routes[unit.clone(flow=self.backbone)]
+            for unit in spread(self.space):
+                routes[unit] = routes[unit.clone(space=self.backbone)]
             return WrapperTerm(self.state.tag(), term,
-                               self.flow, term.baseline, routes)
+                               self.space, term.baseline, routes)
 
         # Applying limit/offset requires special care.  Since slicing
         # relies on precise row numbering, the base term must produce
         # exactly the rows of the base.  Therefore we cannot use any
         # baseline or unmask non-axial operations.
 
-        # Extract the flow ordering and make sure the base term is able
+        # Extract the space ordering and make sure the base term is able
         # to produce the order expressions.
-        order = arrange(self.flow)
+        order = arrange(self.space)
         codes = [code for code, direction in order]
-        kid = self.state.compile(self.flow.base,
+        kid = self.state.compile(self.space.base,
                                   baseline=self.state.root)
         kid = self.state.inject(kid, codes)
-        # Add the given flow to the routing table.
+        # Add the given space to the routing table.
         routes = kid.routes.copy()
-        for unit in spread(self.flow):
-            routes[unit] = routes[unit.clone(flow=self.backbone)]
+        for unit in spread(self.space):
+            routes[unit] = routes[unit.clone(space=self.backbone)]
         # Generate an order term.
         return OrderTerm(self.state.tag(), kid, order,
-                         self.flow.limit, self.flow.offset,
-                         self.flow, kid.baseline, routes)
+                         self.space.limit, self.space.offset,
+                         self.space, kid.baseline, routes)
 
 
 class InjectCode(Inject):
         super(InjectUnit, self).__init__(unit, term, state)
         self.unit = unit
         # Extract the unit attributes.
-        self.flow = unit.flow
+        self.space = unit.space
 
     def __call__(self):
         # Normally, this should never be reachable.  We raise an error here
     adapt(ColumnUnit)
 
     def __call__(self):
-        # To avoid an extra `inject()` call, check if the unit flow
+        # To avoid an extra `inject()` call, check if the unit space
         # is already exported by the term.
         if self.unit in self.term.routes:
             # Not reachable since already checked in `state.inject()`.
             return self.term
-        # Verify that the unit is singular on the term flow.
-        if not self.term.flow.spans(self.flow):
+        # Verify that the unit is singular on the term space.
+        if not self.term.space.spans(self.space):
             raise Error("Expected a singular expression")
-        # Inject the unit flow into the term.
-        return self.state.inject(self.term, [self.unit.flow])
+        # Inject the unit space into the term.
+        return self.state.inject(self.term, [self.unit.space])
 
 
 class InjectScalar(Inject):
     adapt(ScalarUnit)
 
     def __call__(self):
-        # Injects a batch of scalar units sharing the same flow.
+        # Injects a batch of scalar units sharing the same space.
 
         # To inject a scalar unit into a term, we need to do the following:
-        # - compile a term for the unit flow;
+        # - compile a term for the unit space;
         # - inject the unit into the unit term;
         # - attach the unit term to the main term.
 
         # If we compile a unit term for each unit individually, we may
         # end up with a lot of identical unit terms in the term tree.
         # To optimize the structure of the term tree, the rewriter
-        # collects all scalar units sharing the same flow and groups
+        # collects all scalar units sharing the same space and groups
         # them together so that the compiler could reuse the same term
         # for the whole group.
 
         # and the units suggested be injected together with it.
         units = [self.unit]
         for code in self.unit.companions:
-            companion_unit = ScalarUnit(code, self.flow, code.binding)
+            companion_unit = ScalarUnit(code, self.space, code.binding)
             # This test rarely fails since injecting any of the companions
             # injects the whole group.
             if companion_unit not in self.term.routes:
                 units.append(companion_unit)
 
         # Verify that the unit is singular relative to the term.
-        if not self.term.flow.spans(self.flow):
+        if not self.term.space.spans(self.space):
             raise Error("Expected a singular expression")
         # Extract the unit expressions.
         codes = [unit.code for unit in units]
 
-        # Handle the special case when the unit flow is equal to the
-        # term flow or dominates it.  In this case, we could inject
+        # Handle the special case when the unit space is equal to the
+        # term space or dominates it.  In this case, we could inject
         # the units directly to the main term and avoid creating
         # a separate unit term.
-        if self.flow.dominates(self.term.flow):
+        if self.space.dominates(self.term.space):
             # This is no longer reachable since unmasking removes
-            # scalar units that dominate their mask flow.
+            # scalar units that dominate their mask space.
             # Make sure the term could export all the units.
             term = self.state.inject(self.term, codes)
             # Add all the units to the routing table.  Note that we point
             for unit in units:
                 routes[unit] = tag
             # Wrap the term with the updated routing table.
-            return WrapperTerm(tag, term, term.flow, term.baseline, routes)
+            return WrapperTerm(tag, term, term.space, term.baseline, routes)
 
-        # The general case: compile a term for the unit flow.
-        unit_term = self.compile_shoot(self.flow, self.term, codes)
+        # The general case: compile a term for the unit space.
+        unit_term = self.compile_shoot(self.space, self.term, codes)
         # SQL syntax does not permit us evaluating arbitrary
         # expressions in terminal terms, so we wrap such terms with
         # a no-op wrapper.
         if unit_term.is_nullary:
             unit_term = WrapperTerm(self.state.tag(), unit_term,
-                                    unit_term.flow, unit_term.baseline,
+                                    unit_term.space, unit_term.baseline,
                                     unit_term.routes.copy())
         # And join it to the main term.
         extra_routes = dict((unit, unit_term.tag) for unit in units)
     def __init__(self, unit, term, state):
         super(InjectAggregate, self).__init__(unit, term, state)
         # Extract attributes of the unit.
-        self.plural_flow = unit.plural_flow
+        self.plural_space = unit.plural_space
 
     def __call__(self):
         # Injects a batch of aggregate units sharing the same plural
-        # and unit flows.
+        # and unit spaces.