Commits

b7w committed 518e3ac

Fix bug archiving even if key already exists

  • Participants
  • Parent commits d56b2ab
  • Branches dev

Comments (0)

Files changed (1)

             raise AppError('7z "{0}" not exists, enter full path to bin'.format(self.path_7z))
         try:
             subprocess.check_output(cmd_str, shell=True)
-            archives = glob.glob(path + '*')
+            archives = sorted(glob.glob(path + '*'))
             if split:
                 return [(i, i.split('.')[-1]) for i in archives]
             return [(i, None) for i in archives]
         except S3ResponseError as e:
             raise AppError('Can not connect to S3, {0}'.format(e))
 
+    def _archive_hash(self, path):
+        return re.search('\w{40}', path).group(0)
+
     def get_keys(self, bucket_name, root):
         bucket = self.conn.lookup(bucket_name)
         if not bucket:
         key = '/'.join([root, archive.package.path, file_name, ])
         return Key(bucket, name=key)
 
+    def is_exists(self, bucket_name, root, package):
+        path = '{0}/{1}'.format(root, package.path)
+        keys = self.get_keys(bucket_name, path)
+        return any(self._archive_hash(i.key) == package.hash for i in keys)
+
     def get_archives(self, bucket_name, root):
         """
         Return map of `archive name os str - versions of Key type`
         return sum(i.size for i in keys)
 
     def count(self, bucket_name, root):
-        def archive_hash(path):
-            return re.search('\w{40}', path).group(0)
-
         keys = self.get_keys(bucket_name, root)
-        return len(set([archive_hash(i.name) for i in keys]))
+        return len(set([self._archive_hash(i.name) for i in keys]))
 
     def count_unique(self, bucket_name, root):
         def base_name(path):
         finder = Finder(folder_config.LOCAL, folder_config.OPTIONS)
         for package in finder.find():
             exclude = folder_config.EXCLUDE and any(re.match(i, package.path) for i in folder_config.EXCLUDE)
-            if not exclude:
+            exists = storage.is_exists(folder_config.BUCKET, folder_config.REMOTE, package)
+            if not exclude and not exists:
                 archive = Archive(package, password=folder_config.PASSWORD, hide=folder_config.HIDE,
                                   level=folder_config.LEVEL, options=folder_config.OPTIONS, path_7z=config.PATH_7Z)
                 for tmp_file, part in archive.archive_to(config.TMP_PATH, archive.file_name):
                     key = storage.get_key(folder_config.BUCKET, folder_config.REMOTE, archive, part)
-                    if not key.exists():
-                        if not dry_run:
-                            storage.upload(key, archive, tmp_file, part)
-                        package_size = package.size / 1024 ** 2
-                        print('Uploaded /{0}/{1}  {{count: {2}, size: {3}mb, part: {4}}}'
-                              .format(folder_config.REMOTE, package.path, package.count, package_size, part or '000'))
-                    elif debug:
-                        print('Exists /{0}/{1}'.format(folder_config.REMOTE, archive.package.path))
+                    if not dry_run:
+                        storage.upload(key, archive, tmp_file, part)
+                    package_size = package.size / 1024 ** 2
+                    print('Uploaded /{0}/{1}  {{count: {2}, size: {3}mb, part: {4}}}'
+                          .format(folder_config.REMOTE, package.path, package.count, package_size, part or '000'))
                     archive.clear(tmp_file)
     info(config)
     clear(config)