Commits

Augie Fackler committed ba801f4

utility_commands: Implement rebuildmeta so that metadata can be rebuilt.
hg_delta_editor: Fixed some longstanding branch_info bugs detected while
rebuilding meta.

  • Participants
  • Parent commits 6fa97cf

Comments (0)

Files changed (7)

File hg_delta_editor.py

                         src_branch) = self._path_and_branch_for_path(src_p)
                         if src_p is None:
                             continue
-                    added_branches[br] = src_branch, src_rev, revision.revnum
+                    if (br not in self.branches or
+                        not (src_rev == 0 and src_branch == None)):
+                        added_branches[br] = src_branch, src_rev, revision.revnum
                 elif fi == '' and br in self.branches:
-                    br2 = br or 'default'
-                    if br2 not in self.repo.branchtags() and paths[p].action == 'D':
-                        self.branches_to_delete.add(br)
+                    self.branches_to_delete.add(br)
             else:
                 t_name = self._is_path_tag(p)
                 if t_name == False:

File notes/metadata.txt

 the repository.
 
 branch_info can be rebuilt during the rebuild of the revision map by recording
-the revisions of all active heads of server-side branches.
+the revisions of all active heads of server-side branches. branch_info contains
+a map from branch: (parent_branch, parent_branch_rev, branch_created_rev)

File rebuildmeta.py

+import os
+import pickle
+
+from mercurial import node
+
+import svnwrap
+import util
+
+@util.register_subcommand('rebuildmeta')
+def rebuildmeta(ui, repo, hg_repo_path, args, **opts):
+    """Rebuild hgsubversion metadata using values stored in revisions.
+    """
+    assert len(args) == 1, 'You must pass the svn URI used to create this repo.'
+    uuid = None
+    svn = svnwrap.SubversionRepo(url=args[0])
+    subdir = svn.subdir
+    svnmetadir = os.path.join(repo.path, 'svn')
+    if not os.path.exists(svnmetadir):
+        os.makedirs(svnmetadir)
+
+    revmap = open(os.path.join(svnmetadir, 'rev_map'), 'w')
+    revmap.write('1\n')
+    last_rev = -1
+    branchinfo = {}
+    noderevnums = {}
+    for rev in repo:
+        ctx = repo[rev]
+        convinfo = ctx.extra().get('convert_revision', None)
+        if convinfo:
+            assert convinfo.startswith('svn:')
+            revpath, revision = convinfo[40:].split('@')
+            if subdir and subdir[0] != '/':
+                subdir = '/' + subdir
+            if subdir and subdir[-1] == '/':
+                subdir = subdir[:-1]
+            assert revpath.startswith(subdir), ('That does not look like the '
+                                                'right location in the repo.')
+            if uuid is None:
+                uuid = convinfo[4:40]
+                assert uuid == svn.uuid, 'UUIDs did not match!'
+                urlfile = open(os.path.join(svnmetadir, 'url'), 'w')
+                urlfile.write(args[0])
+                urlfile.close()
+                uuidfile = open(os.path.join(svnmetadir, 'uuid'), 'w')
+                uuidfile.write(uuid)
+                uuidfile.close()
+            commitpath = revpath[len(subdir)+1:]
+            if commitpath.startswith('branches'):
+                commitpath = commitpath[len('branches/'):]
+            elif commitpath == 'trunk':
+                commitpath = ''
+            else:
+                assert False, 'Unhandled case in rebuildmeta'
+            revmap.write('%s %s %s\n' % (revision,
+                                         node.hex(ctx.node()),
+                                         commitpath))
+            revision = int(revision)
+            noderevnums[ctx.node()] = revision
+            if revision > last_rev:
+                last_rev = revision
+            branch = ctx.branch()
+            if branch == 'default':
+                branch = None
+            if branch not in branchinfo:
+                parent = ctx.parents()[0]
+                if (parent.node() in noderevnums
+                    and parent.branch() != ctx.branch()):
+                    parentbranch = parent.branch()
+                    if parentbranch == 'default':
+                        parentbranch = None
+                else:
+                    parentbranch = None
+                branchinfo[branch] = (parentbranch,
+                                      noderevnums.get(parent.node(), 0),
+                                      revision)
+            for c in ctx.children():
+                if c.branch() == 'closed-branches':
+                    if branch in branchinfo:
+                        del branchinfo[branch]
+    lastrevfile = open(os.path.join(svnmetadir, 'last_rev'), 'w')
+    lastrevfile.write(str(last_rev))
+    lastrevfile.close()
+    branchinfofile = open(os.path.join(svnmetadir, 'branch_info'), 'w')
+    pickle.dump(branchinfo, branchinfofile)
+    branchinfofile.close()
+    tagsinfo = {}
+    realtags = svn.tags
+    tagsleft = realtags.items()
+    while tagsleft:
+        tag, tagparent = tagsleft.pop(0)
+        source, rev = tagparent
+        if source.startswith('tags/'):
+            src = source[len('tags/'):]
+            if src in tagsinfo:
+                tagsinfo[tag] = tagsinfo[src]
+            elif src in realtags:
+                if (realtags[src][1] <= last_rev
+                    or realtags[src][0].startswith('tags/')):
+                    tagsleft.append(src)
+            else:
+                older_tags = svn.tags_at_rev(rev)
+                newsrc, newrev = older_tags[src]
+                tagsleft.append((tag, (newsrc, newrev)))
+        if source.startswith('branches/') or source == 'trunk':
+            source = determinebranch(source)
+            if rev <= last_rev:
+                tagsinfo[tag] = source, rev
+    tagsinfofile = open(os.path.join(svnmetadir, 'tag_info'), 'w')
+    pickle.dump(tagsinfo, tagsinfofile)
+    tagsinfofile.close()
+
+
+def determinebranch(branch):
+    if branch.startswith('branches'):
+        branch = branch[len('branches/'):]
+    elif branch == 'trunk':
+        branch = None
+    else:
+        assert False, 'Unhandled case while regenerating metadata.'
+    return branch

File svncommand.py

 from fetch_command import fetch_revisions
 from push_cmd import commit_from_rev
 from diff_cmd import diff_command
+from rebuildmeta import rebuildmeta
 # shut up, pyflakes, we must import those
-__x = [print_wc_url, fetch_revisions, commit_from_rev, diff_command]
+__x = [print_wc_url, fetch_revisions, commit_from_rev, diff_command, rebuildmeta]
 
 mode755 = (stat.S_IXUSR | stat.S_IXGRP| stat.S_IXOTH | stat.S_IRUSR |
            stat.S_IRGRP| stat.S_IROTH | stat.S_IWUSR)
             subcommand = candidates[0]
     path = os.path.dirname(repo.path)
     try:
-        opts['svn_url'] = open(os.path.join(repo.path, 'svn', 'url')).read()
+        if subcommand != 'rebuildmeta':
+            opts['svn_url'] = open(os.path.join(repo.path, 'svn', 'url')).read()
         return svn_subcommands[subcommand](ui, args=args,
                                            hg_repo_path=path,
                                            repo=repo,

File svnwrap/svn_swig_wrapper.py

 
         This returns a dictionary of tag: (source path, source rev)
         """
-        tags = self.list_dir('tags').keys()
+        return self.tags_at_rev(self.HEAD)
+
+    def tags_at_rev(self, revision):
+        try:
+            tags = self.list_dir('tags', revision=revision).keys()
+        except core.SubversionException, e:
+            if e.apr_err == 160013:
+                return {}
+            raise
         tag_info = {}
-        head = self.HEAD
         for t in tags:
             tag_info[t] = self._get_copy_source('tags/%s' % t,
-                                                cached_head=head)
+                                                cached_head=revision)
         return tag_info
 
     def _get_copy_source(self, path, cached_head=None):

File tests/run.py

 import test_push_renames
 import test_push_dirs
 import test_push_eol
+import test_rebuildmeta
 import test_tags
 import test_utility_commands
 
                                test_push_renames.suite(),
                                test_push_dirs.suite(),
                                test_push_eol.suite(),
+                               test_rebuildmeta.suite(),
                                test_tags.suite(),
                                test_utility_commands.suite(),
                               ])

File tests/test_rebuildmeta.py

+import os
+import pickle
+import unittest
+
+from mercurial import hg
+from mercurial import ui
+
+import test_util
+import rebuildmeta
+import hg_delta_editor
+
+subdir = {'truncatedhistory.svndump': '/project2',
+          'fetch_missing_files_subdir.svndump': '/foo',
+          }
+# List of expected "missing" branches - these are really files that happen
+# to be in the branches dir. This will be fixed at a later date.
+expected_branch_deltas = {'unrelatedbranch.svndump': ['c', ],
+                          'file_mixed_with_branches.svndump': ['README', ],
+                          }
+
+def _do_case(self, name, stupid):
+    self._load_fixture_and_fetch(name, subdir=subdir.get(name, ''), stupid=stupid)
+    assert len(self.repo) > 0
+    wc2_path = self.wc_path + '_clone'
+    u = ui.ui()
+    src, dest = hg.clone(u, self.wc_path, wc2_path, update=False)
+    rebuildmeta.rebuildmeta(u,
+                            dest,
+                            os.path.dirname(dest.path),
+                            args=[test_util.fileurl(self.repo_path +
+                                                    subdir.get(name, '')), ])
+    dest = hg.repository(u, os.path.dirname(dest.path))
+    self.assert_(open(os.path.join(src.path, 'svn', 'last_rev')).read() >=
+                     open(os.path.join(dest.path, 'svn', 'last_rev')).read())
+    for tf in ('rev_map', 'uuid', 'url'):
+        self.assertEqual(open(os.path.join(src.path, 'svn', tf)).read(),
+                         open(os.path.join(dest.path, 'svn', tf)).read())
+    self.assertEqual(pickle.load(open(os.path.join(src.path, 'svn',
+                                                   'tag_info'))),
+                     pickle.load(open(os.path.join(dest.path, 'svn',
+                                                   'tag_info'))))
+    self.assertEqual(src.branchtags(), dest.branchtags())
+    srcbi = pickle.load(open(os.path.join(src.path, 'svn', 'branch_info')))
+    for mustpop in expected_branch_deltas.get(name, []):
+        del srcbi[mustpop]
+    destbi = pickle.load(open(os.path.join(dest.path, 'svn', 'branch_info')))
+    self.assertEqual(sorted(srcbi.keys()), sorted(destbi.keys()))
+    for branch in destbi:
+        srcinfo = srcbi[branch]
+        destinfo = destbi[branch]
+        hge = hg_delta_editor.HgChangeReceiver(path=os.path.dirname(dest.path),
+                                                   repo=dest,
+                                                   ui_=u)
+        if destinfo[:2] == (None, 0):
+            self.assert_(srcinfo[2] <= destinfo[2])
+            self.assertEqual(srcinfo[0], destinfo[0])
+        else:
+            pr = sorted(filter(lambda x: x[1] == srcinfo[0] and x[0] <= srcinfo[1],
+                        hge.revmap.keys()), reverse=True)[0][0]
+            self.assertEqual(pr, destinfo[1])
+            self.assertEqual(srcinfo[2], destinfo[2])
+
+
+def buildmethod(case, name, stupid):
+    m = lambda self: self._do_case(case, stupid)
+    m.__name__ = name
+    m.__doc__ = ('Test rebuildmeta on %s with %s replay.' %
+                 (case, (stupid and 'stupid') or 'real'))
+    return m
+
+
+attrs = {'_do_case': _do_case,
+         }
+for case in (f for f in os.listdir(test_util.FIXTURES) if f.endswith('.svndump')):
+    name = 'test_' + case[:-len('.svndump')]
+    attrs[name] = buildmethod(case, name, False)
+    name += '_stupid'
+    attrs[name] = buildmethod(case, name, True)
+RebuildMetaTests = type('RebuildMetaTests', (test_util.TestBase, ), attrs)
+
+
+def suite():
+    all = [unittest.TestLoader().loadTestsFromTestCase(RebuildMetaTests),
+          ]
+    return unittest.TestSuite(all)