Marcin Kuzminski avatar Marcin Kuzminski committed 05528ad

Hacking for git support,and new faster repo scan

Comments (0)

Files changed (14)

docs/changelog.rst

 Changelog
 =========
 
+1.1.0 (**XXXX-XX-XX**)
+----------------------
+- git support
+- performance upgrade for cached repos list
+
+
 1.0.0 (**2010-10-xx**)
 ----------------------
 

rhodecode/__init__.py

 @author: marcink
 """
 
-VERSION = (1, 0, 0, 'rc4')
+VERSION = (1, 1, 0, 'beta')
 
 __version__ = '.'.join((str(each) for each in VERSION[:4]))
 

rhodecode/config/environment.py

     object
     """
     config = PylonsConfig()
-    
+
     # Pylons paths
     root = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
     paths = dict(root=root,
     config['routes.map'] = make_map(config)
     config['pylons.app_globals'] = app_globals.Globals(config)
     config['pylons.h'] = rhodecode.lib.helpers
-    
+
     # Setup cache object as early as possible
     import pylons
     pylons.cache._push_object(config['pylons.app_globals'].cache)
-    
+
     # Create the Mako TemplateLookup, with the default auto-escaping
     config['pylons.app_globals'].mako_lookup = TemplateLookup(
         directories=paths['templates'],
     if test:
         from rhodecode.lib.utils import create_test_env, create_test_index
         create_test_env('/tmp', config)
-        create_test_index('/tmp/*', True)
-        
+        create_test_index('/tmp', True)
+
     #MULTIPLE DB configs
     # Setup the SQLAlchemy database engine
     if config['debug'] and not test:
     init_model(sa_engine_db1)
     #init baseui
     config['pylons.app_globals'].baseui = make_ui('db')
-    
+
     repo2db_mapper(_get_repos_cached_initial(config['pylons.app_globals'], initial))
     set_available_permissions(config)
     set_base_path(config)
     set_rhodecode_config(config)
     # CONFIGURATION OPTIONS HERE (note: all config options will override
     # any Pylons config options)
-    
+
     return config

rhodecode/lib/app_globals.py

         self.cache = CacheManager(**parse_cache_config_options(config))
         self.available_permissions = None   # propagated after init_model
         self.baseui = None                  # propagated after init_model        
-        
+
     @LazyProperty
     def paths(self):
         if self.baseui:
             return self.baseui.configitems('paths')
-    
+
     @LazyProperty
     def base_path(self):
         if self.baseui:
-            return self.paths[0][1].replace('*', '')            
+            return self.paths[0][1]

rhodecode/lib/celerylib/tasks.py

 from rhodecode.lib.utils import OrderedDict
 from time import mktime
 from vcs.backends.hg import MercurialRepository
+from vcs.backends.git import GitRepository
+import os
 import traceback
+from vcs.backends import get_repo
+from vcs.utils.helpers import get_scm
 
 try:
     import json
 
     commits_by_day_author_aggregate = {}
     commits_by_day_aggregate = {}
-    repos_path = get_hg_ui_settings()['paths_root_path'].replace('*', '')
-    repo = MercurialRepository(repos_path + repo_name)
+    repos_path = get_hg_ui_settings()['paths_root_path']
+    p = os.path.join(repos_path, repo_name)
+    repo = get_repo(get_scm(p)[0], p)
 
     skip_date_limit = True
     parse_limit = 250 #limit for single task changeset parsing optimal for
     's', 'sh', 'tpl', 'txt', 'vim', 'wss', 'xhtml', 'xml', 'xsl', 'xslt', 'yaws']
 
 
-    repos_path = get_hg_ui_settings()['paths_root_path'].replace('*', '')
-    repo = MercurialRepository(repos_path + repo_name)
+    repos_path = get_hg_ui_settings()['paths_root_path']
+    p = os.path.join(repos_path, repo_name)
+    repo = get_repo(get_scm(p)[0], p)
+
     tip = repo.get_changeset()
 
     code_stats = {}

rhodecode/lib/db_manage.py

         paths = RhodeCodeUi()
         paths.ui_section = 'paths'
         paths.ui_key = '/'
-        paths.ui_value = os.path.join(path, '*')
+        paths.ui_value = path
 
 
         hgsettings1 = RhodeCodeSettings()

rhodecode/lib/helpers.py

         return str
     else:
         str = str.replace(replace * 2, replace)
-        return recursive_replace(str, replace)  
+        return recursive_replace(str, replace)
 
 class _ToolTip(object):
-    
+
     def __call__(self, tooltip_title, trim_at=50):
         """
         Special function just to wrap our text into nice formatted autowrapped
         text
         :param tooltip_title:
         """
-        
+
         return wrap_paragraphs(escape(tooltip_title), trim_at)\
                        .replace('\n', '<br/>')
-    
+
     def activate(self):
         """
         Adds tooltip mechanism to the given Html all tooltips have to have 
         Then a tooltip will be generated based on that
         All with yui js tooltip
         """
-        
+
         js = '''
         YAHOO.util.Event.onDOMReady(function(){
             function toolTipsId(){
                     
                 });
         });
-        '''         
+        '''
         return literal(js)
 
 tooltip = _ToolTip()
 
 class _FilesBreadCrumbs(object):
-    
+
     def __call__(self, repo_name, rev, paths):
         url_l = [link_to(repo_name, url('files_home',
                                         repo_name=repo_name,
                                         revision=rev, f_path=''))]
         paths_l = paths.split('/')
-        
+
         for cnt, p in enumerate(paths_l, 1):
             if p != '':
                 url_l.append(link_to(p, url('files_home',
     pygmentize function for annotation
     :param filenode:
     """
-    
+
     color_dict = {}
     def gen_color():
         """generator for getting 10k of evenly distibuted colors using hsv color
         and golden ratio.
-        """        
+        """
         import colorsys
         n = 10000
         golden_ratio = 0.618033988749895
             h %= 1
             HSV_tuple = [h, 0.95, 0.95]
             RGB_tuple = colorsys.hsv_to_rgb(*HSV_tuple)
-            yield map(lambda x:str(int(x * 256)), RGB_tuple)           
+            yield map(lambda x:str(int(x * 256)), RGB_tuple)
 
     cgenerator = gen_color()
-        
+
     def get_color_string(cs):
         if color_dict.has_key(cs):
             col = color_dict[cs]
         else:
             col = color_dict[cs] = cgenerator.next()
         return "color: rgb(%s)! important;" % (', '.join(col))
-        
+
     def url_func(changeset):
         tooltip_html = "<div style='font-size:0.8em'><b>Author:</b>" + \
-        " %s<br/><b>Date:</b> %s</b><br/><b>Message:</b> %s<br/></div>" 
-        
+        " %s<br/><b>Date:</b> %s</b><br/><b>Message:</b> %s<br/></div>"
+
         tooltip_html = tooltip_html % (changeset.author,
                                                changeset.date,
                                                tooltip(changeset.message))
                 class_='tooltip',
                 tooltip_title=tooltip_html
               )
-        
+
         uri += '\n'
-        return uri   
+        return uri
     return literal(annotate_highlight(filenode, url_func, **kwargs))
-      
+
 def repo_name_slug(value):
     """Return slug of name of repository
     This function is called on each creation/modification
     """
     slug = remove_formatting(value)
     slug = strip_tags(slug)
-    
+
     for c in """=[]\;'"<>,/~!@#$%^&*()+{}|: """:
         slug = slug.replace(c, '-')
     slug = recursive_replace(slug, '-')
     if not isinstance(repo, BaseRepository):
         raise Exception('You must pass an Repository '
                         'object as first argument got %s', type(repo))
-        
+
     try:
         cs = repo.get_changeset(rev)
     except RepositoryError:
 from mercurial import util
 from mercurial.templatefilters import age as _age, person as _person
 
-age = lambda  x:_age(x)
+age = lambda  x:x
 capitalize = lambda x: x.capitalize()
 date = lambda x: util.datestr(x)
 email = util.email
 isodate = lambda  x: util.datestr(x, '%Y-%m-%d %H:%M %1%2')
 isodatesec = lambda  x: util.datestr(x, '%Y-%m-%d %H:%M:%S %1%2')
 localdate = lambda  x: (x[0], util.makedate()[1])
-rfc822date = lambda  x: util.datestr(x, "%a, %d %b %Y %H:%M:%S %1%2")
-rfc822date_notz = lambda  x: util.datestr(x, "%a, %d %b %Y %H:%M:%S")
+rfc822date = lambda  x: x#util.datestr(x, "%a, %d %b %Y %H:%M:%S %1%2")
+rfc822date_notz = lambda  x: x#util.datestr(x, "%a, %d %b %Y %H:%M:%S")
 rfc3339date = lambda  x: util.datestr(x, "%Y-%m-%dT%H:%M:%S%1:%2")
 time_ago = lambda x: util.datestr(_age(x), "%a, %d %b %Y %H:%M:%S %1%2")
 
     baseurl_nossl = "http://www.gravatar.com/avatar/"
     baseurl_ssl = "https://secure.gravatar.com/avatar/"
     baseurl = baseurl_ssl if ssl_enabled else baseurl_nossl
-        
-    
+
+
     # construct the url
     gravatar_url = baseurl + hashlib.md5(email_address.lower()).hexdigest() + "?"
     gravatar_url += urllib.urlencode({'d':default, 's':str(size)})
     """safe unicode function. In case of UnicodeDecode error we try to return
     unicode with errors replace, if this failes we return unicode with 
     string_escape decoding """
-    
+
     try:
         u_str = unicode(str)
     except UnicodeDecodeError:
         except UnicodeDecodeError:
             #incase we have a decode error just represent as byte string
             u_str = unicode(str(str).encode('string_escape'))
-        
+
     return u_str

rhodecode/lib/indexers/__init__.py

+import os
+import sys
 from os.path import dirname as dn, join as jn
+
+#to get the rhodecode import
+sys.path.append(dn(dn(dn(os.path.realpath(__file__)))))
+
 from rhodecode.config.environment import load_environment
 from rhodecode.model.hg import HgModel
 from shutil import rmtree
 from whoosh.fields import TEXT, ID, STORED, Schema, FieldType
 from whoosh.index import create_in, open_dir
 from whoosh.formats import Characters
-from whoosh.highlight import highlight, SimpleFragmenter, HtmlFormatter   
+from whoosh.highlight import highlight, SimpleFragmenter, HtmlFormatter
 
-import os
-import sys
 import traceback
 
-#to get the rhodecode import
-sys.path.append(dn(dn(dn(os.path.realpath(__file__)))))
-
 
 #LOCATION WE KEEP THE INDEX
 IDX_LOCATION = jn(dn(dn(dn(dn(os.path.abspath(__file__))))), 'data', 'index')
 
 
 IDX_NAME = 'HG_INDEX'
-FORMATTER = HtmlFormatter('span', between='\n<span class="break">...</span>\n') 
+FORMATTER = HtmlFormatter('span', between='\n<span class="break">...</span>\n')
 FRAGMENTER = SimpleFragmenter(200)
-                            
+
+from paste.script import command
+import ConfigParser
+
+class MakeIndex(command.Command):
+
+    max_args = 1
+    min_args = 1
+
+    usage = "CONFIG_FILE"
+    summary = "Creates index for full text search given configuration file"
+    group_name = "Whoosh indexing"
+
+    parser = command.Command.standard_parser(verbose=True)
+#    parser.add_option('--repo-location',
+#                      action='store',
+#                      dest='repo_location',
+#                      help="Specifies repositories location to index",
+#                      )
+    parser.add_option('-f',
+                      action='store_true',
+                      dest='full_index',
+                      help="Specifies that index should be made full i.e"
+                            " destroy old and build from scratch",
+                      default=False)
+    def command(self):
+        config_name = self.args[0]
+
+        p = config_name.split('/')
+        if len(p) == 1:
+            root = '.'
+        else:
+            root = '/'.join(p[:-1])
+        print root
+        config = ConfigParser.ConfigParser({'here':root})
+        config.read(config_name)
+        print dict(config.items('app:main'))['index_dir']
+        index_location = dict(config.items('app:main'))['index_dir']
+        #return
+
+        #=======================================================================
+        # WHOOSH DAEMON
+        #=======================================================================
+        from rhodecode.lib.pidlock import LockHeld, DaemonLock
+        from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon
+        try:
+            l = DaemonLock()
+            WhooshIndexingDaemon(index_location=index_location)\
+                .run(full_index=self.options.full_index)
+            l.release()
+        except LockHeld:
+            sys.exit(1)
+
+
 class ResultWrapper(object):
     def __init__(self, search_type, searcher, matcher, highlight_items):
         self.search_type = search_type
         self.matcher = matcher
         self.highlight_items = highlight_items
         self.fragment_size = 200 / 2
-    
+
     @LazyProperty
     def doc_ids(self):
         docs_id = []
             chunks = [offsets for offsets in self.get_chunks()]
             docs_id.append([docnum, chunks])
             self.matcher.next()
-        return docs_id   
-        
+        return docs_id
+
     def __str__(self):
         return '<%s at %s>' % (self.__class__.__name__, len(self.doc_ids))
 
         slice = []
         for docid in self.doc_ids[i:j]:
             slice.append(self.get_full_content(docid))
-        return slice   
-                            
+        return slice
+
 
     def get_full_content(self, docid):
         res = self.searcher.stored_fields(docid[0])
         f_path = res['path'][res['path'].find(res['repository']) \
                              + len(res['repository']):].lstrip('/')
-        
+
         content_short = self.get_short_content(res, docid[1])
         res.update({'content_short':content_short,
                     'content_short_hl':self.highlight(content_short),
                     'f_path':f_path})
-        
-        return res        
-    
+
+        return res
+
     def get_short_content(self, res, chunks):
-        
+
         return ''.join([res['content'][chunk[0]:chunk[1]] for chunk in chunks])
-    
+
     def get_chunks(self):
         """
         Smart function that implements chunking the content
         but not overlap chunks so it doesn't highlight the same
         close occurrences twice.
-        :param matcher:
-        :param size:
+        @param matcher:
+        @param size:
         """
         memory = [(0, 0)]
         for span in self.matcher.spans():
             end = span.endchar or 0
             start_offseted = max(0, start - self.fragment_size)
             end_offseted = end + self.fragment_size
-            
+
             if start_offseted < memory[-1][1]:
                 start_offseted = memory[-1][1]
-            memory.append((start_offseted, end_offseted,))    
-            yield (start_offseted, end_offseted,)  
-        
+            memory.append((start_offseted, end_offseted,))
+            yield (start_offseted, end_offseted,)
+
     def highlight(self, content, top=5):
         if self.search_type != 'content':
             return ''
                  fragmenter=FRAGMENTER,
                  formatter=FORMATTER,
                  top=top)
-        return hl 
+        return hl

rhodecode/lib/indexers/daemon.py

 project_path = dn(dn(dn(dn(os.path.realpath(__file__)))))
 sys.path.append(project_path)
 
-from rhodecode.lib.pidlock import LockHeld, DaemonLock
+
 from rhodecode.model.hg import HgModel
 from rhodecode.lib.helpers import safe_unicode
 from whoosh.index import create_in, open_dir
 from shutil import rmtree
-from rhodecode.lib.indexers import INDEX_EXTENSIONS, IDX_LOCATION, SCHEMA, IDX_NAME
+from rhodecode.lib.indexers import INDEX_EXTENSIONS, SCHEMA, IDX_NAME
 
 from time import mktime
 from vcs.exceptions import ChangesetError, RepositoryError
 # add ch to logger
 log.addHandler(ch)
 
-def scan_paths(root_location):
-    return HgModel.repo_scan('/', root_location, None, True)
+def get_repos_location():
+    return HgModel.get_repos_location()
+
 
 class WhooshIndexingDaemon(object):
     """
     Deamon for atomic jobs
     """
 
-    def __init__(self, indexname='HG_INDEX', repo_location=None):
+    def __init__(self, indexname='HG_INDEX', index_location=None,
+                 repo_location=None):
         self.indexname = indexname
+
+        self.index_location = index_location
+        if not index_location:
+            raise Exception('You have to provide index location')
+
         self.repo_location = repo_location
-        self.repo_paths = scan_paths(self.repo_location)
+        if not repo_location:
+            raise Exception('You have to provide repositories location')
+
+
+
+        self.repo_paths = HgModel.repo_scan('/', self.repo_location, None, True)
         self.initial = False
-        if not os.path.isdir(IDX_LOCATION):
-            os.mkdir(IDX_LOCATION)
+        if not os.path.isdir(self.index_location):
+            os.mkdir(self.index_location)
             log.info('Cannot run incremental index since it does not'
                      ' yet exist running full build')
             self.initial = True
-        
+
     def get_paths(self, repo):
         """
         recursive walk in root dir and return a set of all path in that dir
         """
         index_paths_ = set()
         try:
-            tip = repo.get_changeset()
-            
-            for topnode, dirs, files in tip.walk('/'):
+            for topnode, dirs, files in repo.walk('/', 'tip'):
                 for f in files:
                     index_paths_.add(jn(repo.path, f.path))
                 for dir in dirs:
                     for f in files:
                         index_paths_.add(jn(repo.path, f.path))
-                
+
         except RepositoryError:
             pass
-        return index_paths_        
-    
+        return index_paths_
+
     def get_node(self, repo, path):
         n_path = path[len(repo.path) + 1:]
         node = repo.get_changeset().get_node(n_path)
         return node
-    
+
     def get_node_mtime(self, node):
         return mktime(node.last_changeset.date.timetuple())
-    
+
     def add_doc(self, writer, path, repo):
         """Adding doc to writer"""
         node = self.get_node(repo, path)
             log.debug('    >> %s' % path)
             #just index file name without it's content
             u_content = u''
-        
+
         writer.add_document(owner=unicode(repo.contact),
                         repository=safe_unicode(repo.name),
                         path=safe_unicode(path),
                         content=u_content,
                         modtime=self.get_node_mtime(node),
-                        extension=node.extension)             
+                        extension=node.extension)
 
-    
+
     def build_index(self):
-        if os.path.exists(IDX_LOCATION):
+        if os.path.exists(self.index_location):
             log.debug('removing previous index')
-            rmtree(IDX_LOCATION)
-            
-        if not os.path.exists(IDX_LOCATION):
-            os.mkdir(IDX_LOCATION)
-        
-        idx = create_in(IDX_LOCATION, SCHEMA, indexname=IDX_NAME)
+            rmtree(self.index_location)
+
+        if not os.path.exists(self.index_location):
+            os.mkdir(self.index_location)
+
+        idx = create_in(self.index_location, SCHEMA, indexname=IDX_NAME)
         writer = idx.writer()
-        
+
         for cnt, repo in enumerate(self.repo_paths.values()):
             log.debug('building index @ %s' % repo.path)
-        
+
             for idx_path in self.get_paths(repo):
                 self.add_doc(writer, idx_path, repo)
-        
+
         log.debug('>> COMMITING CHANGES <<')
         writer.commit(merge=True)
         log.debug('>>> FINISHED BUILDING INDEX <<<')
-            
-    
+
+
     def update_index(self):
         log.debug('STARTING INCREMENTAL INDEXING UPDATE')
-            
-        idx = open_dir(IDX_LOCATION, indexname=self.indexname)
+
+        idx = open_dir(self.index_location, indexname=self.indexname)
         # The set of all paths in the index
         indexed_paths = set()
         # The set of all paths we need to re-index
         to_index = set()
-        
+
         reader = idx.reader()
         writer = idx.writer()
-    
+
         # Loop over the stored fields in the index
         for fields in reader.all_stored_fields():
             indexed_path = fields['path']
             indexed_paths.add(indexed_path)
-            
+
             repo = self.repo_paths[fields['repository']]
-            
+
             try:
                 node = self.get_node(repo, indexed_path)
             except ChangesetError:
                 # This file was deleted since it was indexed
                 log.debug('removing from index %s' % indexed_path)
                 writer.delete_by_term('path', indexed_path)
-    
+
             else:
                 # Check if this file was changed since it was indexed
                 indexed_time = fields['modtime']
                     log.debug('adding to reindex list %s' % indexed_path)
                     writer.delete_by_term('path', indexed_path)
                     to_index.add(indexed_path)
-    
+
         # Loop over the files in the filesystem
         # Assume we have a function that gathers the filenames of the
         # documents to be indexed
                     # that wasn't indexed before. So index it!
                     self.add_doc(writer, path, repo)
                     log.debug('re indexing %s' % path)
-                    
+
         log.debug('>> COMMITING CHANGES <<')
         writer.commit(merge=True)
         log.debug('>>> FINISHED REBUILDING INDEX <<<')
-        
+
     def run(self, full_index=False):
         """Run daemon"""
         if full_index or self.initial:
             self.build_index()
         else:
             self.update_index()
-        
-if __name__ == "__main__":
-    arg = sys.argv[1:]
-    if len(arg) != 2:
-        sys.stderr.write('Please specify indexing type [full|incremental]' 
-                         'and path to repositories as script args \n')
-        sys.exit()
-    
-    
-    if arg[0] == 'full':
-        full_index = True
-    elif arg[0] == 'incremental':
-        # False means looking just for changes
-        full_index = False
-    else:
-        sys.stdout.write('Please use [full|incremental]' 
-                         ' as script first arg \n')
-        sys.exit()
-    
-    if not os.path.isdir(arg[1]):
-        sys.stderr.write('%s is not a valid path \n' % arg[1])
-        sys.exit()
-    else:
-        if arg[1].endswith('/'):
-            repo_location = arg[1] + '*'
-        else:
-            repo_location = arg[1] + '/*'
-    
-    try:
-        l = DaemonLock()
-        WhooshIndexingDaemon(repo_location=repo_location)\
-            .run(full_index=full_index)
-        l.release()
-        reload(logging)
-    except LockHeld:
-        sys.exit(1)
-

rhodecode/lib/utils.py

 # along with this program; if not, write to the Free Software
 # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
 # MA  02110-1301, USA.
+from UserDict import DictMixin
+from mercurial import ui, config, hg
+from mercurial.error import RepoError
+from rhodecode.model import meta
+from rhodecode.model.caching_query import FromCache
+from rhodecode.model.db import Repository, User, RhodeCodeUi, RhodeCodeSettings, \
+    UserLog
+from rhodecode.model.repo import RepoModel
+from rhodecode.model.user import UserModel
+from vcs.backends.base import BaseChangeset
+from vcs.backends.git import GitRepository
+from vcs.backends.hg import MercurialRepository
+from vcs.utils.lazy import LazyProperty
+import datetime
+import logging
+import os
 
 """
 Created on April 18, 2010
 Utilities for RhodeCode
 @author: marcink
 """
-from rhodecode.model.caching_query import FromCache
-from mercurial import ui, config, hg
-from mercurial.error import RepoError
-from rhodecode.model import meta
-from rhodecode.model.user import UserModel
-from rhodecode.model.repo import RepoModel
-from rhodecode.model.db import Repository, User, RhodeCodeUi, RhodeCodeSettings, UserLog
-from vcs.backends.base import BaseChangeset
-from vcs.utils.lazy import LazyProperty
-import logging
-import datetime
-import os
 
 log = logging.getLogger(__name__)
 
         sa.rollback()
         log.error('could not log user action:%s', str(e))
 
-def check_repo_dir(paths):
-    repos_path = paths[0][1].split('/')
-    if repos_path[-1] in ['*', '**']:
-        repos_path = repos_path[:-1]
-    if repos_path[0] != '/':
-        repos_path[0] = '/'
-    if not os.path.isdir(os.path.join(*repos_path)):
-        raise Exception('Not a valid repository in %s' % paths[0][1])
+def get_repos(path, recursive=False, initial=False):
+    """
+    Scans given path for repos and return (name,(type,path)) tuple 
+    :param prefix:
+    :param path:
+    :param recursive:
+    :param initial:
+    """
+    from vcs.utils.helpers import get_scm
+    from vcs.exceptions import VCSError
+    scm = get_scm(path)
+    if scm:
+        raise Exception('The given path %s should not be a repository got %s',
+                        path, scm)
+
+    for dirpath in os.listdir(path):
+        try:
+            yield dirpath, get_scm(os.path.join(path, dirpath))
+        except VCSError:
+            pass
+
+if __name__ == '__main__':
+    get_repos('', '/home/marcink/workspace-python')
+
 
 def check_repo_fast(repo_name, base_path):
     if os.path.isdir(os.path.join(base_path, repo_name)):return False
             for k, v in cfg.items(section):
                 baseui.setconfig(section, k, v)
                 log.debug('settings ui from file[%s]%s:%s', section, k, v)
-        if checkpaths:check_repo_dir(cfg.items('paths'))
-
 
     elif read_from == 'db':
         hg_ui = get_hg_ui_cached()
     @LazyProperty
     def raw_id(self):
         """
-        Returns raw string identifing this changeset, useful for web
+        Returns raw string identifying this changeset, useful for web
         representation.
         """
         return '0' * 40
     """
 
     sa = meta.Session()
+    rm = RepoModel(sa)
     user = sa.query(User).filter(User.admin == True).first()
 
-    rm = RepoModel()
+    for name, repo in initial_repo_list.items():
+        if not rm.get(name, cache=False):
+            log.info('repository %s not found creating default', name)
 
-    for name, repo in initial_repo_list.items():
-        if not RepoModel(sa).get(name, cache=False):
-            log.info('repository %s not found creating default', name)
+            if isinstance(repo, MercurialRepository):
+                repo_type = 'hg'
+            if isinstance(repo, GitRepository):
+                repo_type = 'git'
 
             form_data = {
                          'repo_name':name,
+                         'repo_type':repo_type,
                          'description':repo.description if repo.description != 'unknown' else \
                                         'auto description for %s' % name,
                          'private':False
 
     meta.Session.remove()
 
-from UserDict import DictMixin
 
 class OrderedDict(dict, DictMixin):
 

rhodecode/model/db.py

     ui_key = Column("ui_key", TEXT(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
     ui_value = Column("ui_value", TEXT(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
     ui_active = Column("ui_active", BOOLEAN(), nullable=True, unique=None, default=True)
-    
-    
-class User(Base): 
+
+
+class User(Base):
     __tablename__ = 'users'
     __table_args__ = (UniqueConstraint('username'), UniqueConstraint('email'), {'useexisting':True})
     user_id = Column("user_id", INTEGER(), nullable=False, unique=True, default=None, primary_key=True)
     lastname = Column("lastname", TEXT(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
     email = Column("email", TEXT(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
     last_login = Column("last_login", DATETIME(timezone=False), nullable=True, unique=None, default=None)
-    
+
     user_log = relation('UserLog')
     user_perms = relation('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id")
-    
+
     @LazyProperty
     def full_contact(self):
         return '%s %s <%s>' % (self.name, self.lastname, self.email)
-        
+
     def __repr__(self):
         return "<User('id:%s:%s')>" % (self.user_id, self.username)
-    
+
     def update_lastlogin(self):
         """Update user lastlogin"""
         import datetime
-        
+
         try:
             session = Session.object_session(self)
             self.last_login = datetime.datetime.now()
             session.commit()
             log.debug('updated user %s lastlogin', self.username)
         except Exception:
-            session.rollback()        
-    
-      
-class UserLog(Base): 
+            session.rollback()
+
+
+class UserLog(Base):
     __tablename__ = 'user_logs'
     __table_args__ = {'useexisting':True}
     user_log_id = Column("user_log_id", INTEGER(), nullable=False, unique=True, default=None, primary_key=True)
     user_id = Column("user_id", INTEGER(), ForeignKey(u'users.user_id'), nullable=False, unique=None, default=None)
     repository_id = Column("repository_id", INTEGER(length=None, convert_unicode=False, assert_unicode=None), ForeignKey(u'repositories.repo_id'), nullable=False, unique=None, default=None)
     repository_name = Column("repository_name", TEXT(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    user_ip = Column("user_ip", TEXT(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) 
+    user_ip = Column("user_ip", TEXT(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
     action = Column("action", TEXT(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
     action_date = Column("action_date", DATETIME(timezone=False), nullable=True, unique=None, default=None)
     revision = Column('revision', TEXT(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    
+
     user = relation('User')
     repository = relation('Repository')
-    
+
 class Repository(Base):
     __tablename__ = 'repositories'
     __table_args__ = (UniqueConstraint('repo_name'), {'useexisting':True},)
     repo_id = Column("repo_id", INTEGER(), nullable=False, unique=True, default=None, primary_key=True)
     repo_name = Column("repo_name", TEXT(length=None, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None)
+    repo_type = Column("repo_type", TEXT(length=None, convert_unicode=False, assert_unicode=None), nullable=False, unique=False, default=None)
     user_id = Column("user_id", INTEGER(), ForeignKey(u'users.user_id'), nullable=False, unique=False, default=None)
     private = Column("private", BOOLEAN(), nullable=True, unique=None, default=None)
     description = Column("description", TEXT(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
     fork_id = Column("fork_id", INTEGER(), ForeignKey(u'repositories.repo_id'), nullable=True, unique=False, default=None)
-    
+
     user = relation('User')
     fork = relation('Repository', remote_side=repo_id)
     repo_to_perm = relation('RepoToPerm', cascade='all')
-    
+
     def __repr__(self):
         return "<Repository('id:%s:%s')>" % (self.repo_id, self.repo_name)
-        
+
 class Permission(Base):
     __tablename__ = 'permissions'
     __table_args__ = {'useexisting':True}
     permission_id = Column("permission_id", INTEGER(), nullable=False, unique=True, default=None, primary_key=True)
     permission_name = Column("permission_name", TEXT(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
     permission_longname = Column("permission_longname", TEXT(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-    
+
     def __repr__(self):
         return "<Permission('%s:%s')>" % (self.permission_id, self.permission_name)
 
     repo_to_perm_id = Column("repo_to_perm_id", INTEGER(), nullable=False, unique=True, default=None, primary_key=True)
     user_id = Column("user_id", INTEGER(), ForeignKey(u'users.user_id'), nullable=False, unique=None, default=None)
     permission_id = Column("permission_id", INTEGER(), ForeignKey(u'permissions.permission_id'), nullable=False, unique=None, default=None)
-    repository_id = Column("repository_id", INTEGER(), ForeignKey(u'repositories.repo_id'), nullable=False, unique=None, default=None) 
-    
+    repository_id = Column("repository_id", INTEGER(), ForeignKey(u'repositories.repo_id'), nullable=False, unique=None, default=None)
+
     user = relation('User')
     permission = relation('Permission')
     repository = relation('Repository')
     user_to_perm_id = Column("user_to_perm_id", INTEGER(), nullable=False, unique=True, default=None, primary_key=True)
     user_id = Column("user_id", INTEGER(), ForeignKey(u'users.user_id'), nullable=False, unique=None, default=None)
     permission_id = Column("permission_id", INTEGER(), ForeignKey(u'permissions.permission_id'), nullable=False, unique=None, default=None)
-    
+
     user = relation('User')
     permission = relation('Permission')
 
     commit_activity = Column("commit_activity", BLOB(), nullable=False)#JSON data
     commit_activity_combined = Column("commit_activity_combined", BLOB(), nullable=False)#JSON data
     languages = Column("languages", BLOB(), nullable=False)#JSON data
-    
+
     repository = relation('Repository')
 

rhodecode/model/forms.py

 
 class ValidPath(formencode.validators.FancyValidator):
     def to_python(self, value, state):
-        isdir = os.path.isdir(value.replace('*', ''))
-        if (value.endswith('/*') or value.endswith('/**')) and isdir:
-            return value
-        elif not isdir:
+
+        if not os.path.isdir(value):
             msg = _('This is not a valid path')
-        else:
-            msg = _('You need to specify * or ** at the end of path (ie. /tmp/*)')
-
-        raise formencode.Invalid(msg, value, state,
+            raise formencode.Invalid(msg, value, state,
                                      error_dict={'paths_root_path':msg})
+        return value
 
 def UniqSystemEmail(old_data):
     class _UniqSystemEmail(formencode.validators.FancyValidator):

rhodecode/model/hg.py

 """
 from beaker.cache import cache_region
 from mercurial import ui
-from mercurial.hgweb.hgwebdir_mod import findrepos
 from rhodecode.lib import helpers as h
 from rhodecode.lib.utils import invalidate_cache
 from rhodecode.lib.auth import HasRepoPermissionAny
 from sqlalchemy.orm import joinedload
 from vcs.exceptions import RepositoryError, VCSError
 import logging
-import os
 import sys
 log = logging.getLogger(__name__)
 
 try:
     from vcs.backends.hg import MercurialRepository
+    from vcs.backends.git import GitRepository
 except ImportError:
     sys.stderr.write('You have to import vcs module')
     raise Exception('Unable to import vcs')
     """return cached dict with repos
     """
     g = app_globals
-    return HgModel.repo_scan(g.paths[0][0], g.paths[0][1], g.baseui, initial)
+    return HgModel().repo_scan(g.paths[0][1], g.baseui, initial)
 
 @cache_region('long_term', 'cached_repo_list')
 def _get_repos_cached():
     """
     log.info('getting all repositories list')
     from pylons import app_globals as g
-    return HgModel.repo_scan(g.paths[0][0], g.paths[0][1], g.baseui)
+    return HgModel().repo_scan(g.paths[0][1], g.baseui)
 
 @cache_region('super_short_term', 'cached_repos_switcher_list')
 def _get_repos_switcher_cached(cached_repo_list):
     return list(reversed(list(HgModel().get_repo(repo_name))))
 
 class HgModel(object):
-    """Mercurial Model
+    """
+    Mercurial Model
     """
 
-    def __init__(self):
-        pass
+    def __init__(self, sa=None):
+        if not sa:
+            self.sa = meta.Session()
+        else:
+            self.sa = sa
 
-    @staticmethod
-    def repo_scan(repos_prefix, repos_path, baseui, initial=False):
+    def repo_scan(self, repos_path, baseui, initial=False):
         """
         Listing of repositories in given path. This path should not be a 
         repository itself. Return a dictionary of repository objects
-        :param repos_path: path to directory it could take syntax with 
-        * or ** for deep recursive displaying repositories
+        
+        :param repos_path: path to directory containing repositories
+        :param baseui
+        :param initial: initial scann
         """
-        sa = meta.Session()
-        def check_repo_dir(path):
-            """Checks the repository
-            :param path:
-            """
-            repos_path = path.split('/')
-            if repos_path[-1] in ['*', '**']:
-                repos_path = repos_path[:-1]
-            if repos_path[0] != '/':
-                repos_path[0] = '/'
-            if not os.path.isdir(os.path.join(*repos_path)):
-                raise RepositoryError('Not a valid repository in %s' % path)
-        if not repos_path.endswith('*'):
-            raise VCSError('You need to specify * or ** at the end of path '
-                            'for recursive scanning')
+        log.info('scanning for repositories in %s', repos_path)
 
-        check_repo_dir(repos_path)
-        log.info('scanning for repositories in %s', repos_path)
-        repos = findrepos([(repos_prefix, repos_path)])
         if not isinstance(baseui, ui.ui):
             baseui = ui.ui()
 
+        from rhodecode.lib.utils import get_repos
+        repos = get_repos(repos_path)
+
+
         repos_list = {}
         for name, path in repos:
             try:
                     raise RepositoryError('Duplicate repository name %s found in'
                                     ' %s' % (name, path))
                 else:
+                    if path[0] == 'hg':
+                        repos_list[name] = MercurialRepository(path[1], baseui=baseui)
+                        repos_list[name].name = name
 
-                    repos_list[name] = MercurialRepository(path, baseui=baseui)
-                    repos_list[name].name = name
+                    if path[0] == 'git':
+                        repos_list[name] = GitRepository(path[1])
+                        repos_list[name].name = name
 
                     dbrepo = None
                     if not initial:
                         #for initial scann on application first run we don't
                         #have db repos yet.
-                        dbrepo = sa.query(Repository)\
+                        dbrepo = self.sa.query(Repository)\
                             .options(joinedload(Repository.fork))\
                             .filter(Repository.repo_name == name)\
                             .scalar()
                         if dbrepo.user:
                             repos_list[name].contact = dbrepo.user.full_contact
                         else:
-                            repos_list[name].contact = sa.query(User)\
+                            repos_list[name].contact = self.sa.query(User)\
                             .filter(User.admin == True).first().full_contact
             except OSError:
                 continue
-        meta.Session.remove()
+
         return repos_list
 
     def get_repos(self):
         for name, repo in _get_repos_cached().items():
-            if repo._get_hidden():
+
+            if isinstance(repo, MercurialRepository) and repo._get_hidden():
                 #skip hidden web repository
                 continue
 

rhodecode/templates/shortlog/shortlog_data.html

 	</tr>
 %for cnt,cs in enumerate(c.repo_changesets):
 	<tr class="parity${cnt%2}">
-		<td>${h.age(cs._ctx.date())} - ${h.rfc822date_notz(cs._ctx.date())} </td>
+		<td>${h.age(cs.date)} - ${h.rfc822date_notz(cs.date)} </td>
 		<td title="${cs.author}">${h.person(cs.author)}</td>
 		<td>r${cs.revision}:${cs.short_id}</td>
 		<td>
Tip: Filter by directory path e.g. /media app.js to search for public/media/app.js.
Tip: Use camelCasing e.g. ProjME to search for ProjectModifiedEvent.java.
Tip: Filter by extension type e.g. /repo .js to search for all .js files in the /repo directory.
Tip: Separate your search with spaces e.g. /ssh pom.xml to search for src/ssh/pom.xml.
Tip: Use ↑ and ↓ arrow keys to navigate and return to view the file.
Tip: You can also navigate files with Ctrl+j (next) and Ctrl+k (previous) and view the file with Ctrl+o.
Tip: You can also navigate files with Alt+j (next) and Alt+k (previous) and view the file with Alt+o.