Exemple #1
0
 def handle(self, *args, **options):
     # first: determine the type of server
     obj = Synchronization.objects.get_or_create(source='localhost', destination=options['tag'])[0]
     if isinstance(options['serial'], int):
         first_serial = options['serial']
         print(cyan(_('Dumping from serial %(syn)s') % {'syn': first_serial}))
     elif obj.last_serial is None:
         first_serial = 0
         print(cyan(_('No previous sync... Dumping all database')))
     else:
         first_serial = obj.last_serial
         print(cyan(_('Previous sync: serial %(syn)s') % {'syn': obj.last_serial}))
     packages = set()
     releases = set()
     result = {'data': {}}
     base_path = os.path.abspath(options['path'])
     last_serial = first_serial
     next_last_serial = -1
     for log in Log.objects.filter(id__gte=first_serial).order_by('id').select_related():
         p_path = os.path.join(base_path, log.package.name)
         if log.package.id not in packages:
             self.write_info(log.package.data(), p_path, 'package')
             result['data'][log.package.name] = {}
             packages.add(log.package.id)
         r_path = os.path.join(p_path, log.release.version)
         if log.release.id not in releases:
             self.write_info(log.release.data(), r_path, 'release')
             result['data'][log.package.name][log.release.version] = []
             releases.add(log.release.id)
         dst_path = os.path.join(r_path, log.download.filename)
         src_path = log.download.abspath
         shutil.copy2(src_path, dst_path)
         self.write_info(log.download.data(), r_path, log.download.filename)
         print(yellow(_('Adding %(fn)s (%(pk)s %(vn)s)') % {'pk': log.package.name, 'fn': log.download.filename,
                                                            'vn': log.release.version, }))
         result['data'][log.package.name][log.release.version].append(log.download.filename)
         last_serial = log.id
         next_last_serial = log.id + 1
     result['meta'] = {'first_id': first_serial, 'last_id': last_serial}
     self.write_info(result, base_path, 'sync')
     Synchronization.objects.filter(id=obj.id).update(last_serial=next_last_serial)
Exemple #2
0
 def handle(self, *args, **options):
     # first: determine the type of server
     obj = Synchronization.objects.get_or_create(source='localhost', destination=options['tag'])[0]
     if isinstance(options['serial'], int):
         first_serial = options['serial']
         print(cyan(_('Dumping from serial %(syn)s') % {'syn': first_serial}))
     elif obj.last_serial is None:
         first_serial = 0
         print(cyan(_('No previous sync... Dumping all database')))
     else:
         first_serial = obj.last_serial
         print(cyan(_('Previous sync: serial %(syn)s') % {'syn': obj.last_serial}))
     packages = set()
     releases = set()
     result = {'data': {}}
     base_path = os.path.abspath(options['path'])
     last_serial = first_serial
     next_last_serial = -1
     for log in Log.objects.filter(id__gte=first_serial).order_by('id').select_related():
         p_path = os.path.join(base_path, log.package.name)
         if log.package.id not in packages:
             self.write_info(log.package.data(), p_path, 'package')
             result['data'][log.package.name] = {}
             packages.add(log.package.id)
         r_path = os.path.join(p_path, log.release.version)
         if log.release.id not in releases:
             self.write_info(log.release.data(), r_path, 'release')
             result['data'][log.package.name][log.release.version] = []
             releases.add(log.release.id)
         dst_path = os.path.join(r_path, log.download.filename)
         src_path = log.download.abspath
         shutil.copy2(src_path, dst_path)
         self.write_info(log.download.data(), r_path, log.download.filename)
         print(yellow(_('Adding %(fn)s (%(pk)s %(vn)s)') % {'pk': log.package.name, 'fn': log.download.filename,
                                                            'vn': log.release.version, }))
         result['data'][log.package.name][log.release.version].append(log.download.filename)
         last_serial = log.id
         next_last_serial = log.id + 1
     result['meta'] = {'first_id': first_serial, 'last_id': last_serial}
     self.write_info(result, base_path, 'sync')
     Synchronization.objects.filter(id=obj.id).update(last_serial=next_last_serial)
Exemple #3
0
    def handle(self, *args, **options):
        # first: determine the type of server
        obj = Synchronization.objects.get_or_create(destination='localhost', source=options['tag'])[0]
        if obj.last_serial is None:
            first_expected_serial = 0
            print(cyan(_('No previous sync')))
        else:
            first_expected_serial = obj.last_serial + 1
            print(cyan(_('Previous sync: serial %(syn)s') % {'syn': obj.last_serial}))
        base_path = os.path.abspath(options['path'])
        try:
            data = self.read_info(base_path, 'sync')
            packages = data['data']
            first_serial = data['meta']['first_id']
            last_serial = data['meta']['last_id']
        except KeyError:
            print(red(_('Invalid sync file')))
            return
        except ValueError:
            print(red(_('Invalid md5 data')))
            return
        except IOError:
            print(red(_('Invalid sync file')))
            return
        if first_serial > first_expected_serial:
            print(red(_('Missing synchronization between %(f)d and %(l)d') % {'f': first_expected_serial,
                                                                              'l': first_serial}))
            if not options['force']:
                return
        stop = False
        for package_name, releases in packages.items():
            p_path = os.path.join(base_path, package_name)
            package_data = self.read_info(p_path, 'package')
            package, created = Package.objects.get_or_create(name=package_name)
            self.set_attr(('name', 'author', 'author_email', 'maintainer', 'maintainer_email', 'home_page', 'license',
                           'summary',  'download_url', 'project_url', ),
                          package_data, package)
            package.save()
            for version, filenames in releases.items():
                r_path = os.path.join(base_path, package_name, version)
                release_data = self.read_info(r_path, 'release')
                release, created = Release.objects.get_or_create(package=package, version=version)
                self.set_attr(('version', 'stable_version', 'description', 'platform', 'keywords', 'docs_url', ),
                              release_data, release)
                release.classifiers.clear()
                for value in release_data.get('classifiers', []):
                    release.classifiers.add(Classifier.get(value))
                for attr_name in ('requires', 'requires_dist', 'provides', 'provides_dist', 'requires_external',
                                  'requires_python', 'obsoletes', 'obsoletes_dist', ):
                    getattr(release, attr_name).clear()
                    for value in release_data.get(attr_name, []):
                        getattr(release, attr_name).add(Dependence.get(value))
                release.save()
                for filename in filenames:
                    filepath = os.path.join(r_path, filename)
                    download_data = self.read_info(r_path, filename)
                    if ReleaseDownload.objects.filter(package=package, release=release, filename=filename).count() > 0:
                        print(yellow(_('Duplicate file: %(f)s') % {'f': filepath}))
                        continue
                    download = ReleaseDownload(package=package, release=release, filename=filename)

                    self.set_attr(('md5_digest', 'downloads', 'pack', 'has_sig', 'comment_text', 'python_version'),
                                  download_data, download)
                    download.package_type = PackageType.get(download_data.get('packagetype'))
                    dirname = os.path.dirname(download.abspath)
                    if not os.path.isdir(dirname):
                        os.makedirs(dirname)
                    shutil.copy2(filepath, download.abspath)
                    download.file = download.relpath
                    download.url = settings.MEDIA_URL + download.relpath
                    download.size = os.path.getsize(filepath)
                    if download_data.get('upload_time'):
                        download.upload_time = datetime.datetime.strptime(download_data['upload_time'], DATE_FORMAT)\
                            .replace(tzinfo=utc)
                    with open(filepath, 'rb') as file_d:
                        md5 = hashlib.md5(file_d.read()).hexdigest()
                    download.md5_digest = md5
                    if md5 != download_data.get('md5_digest'):
                        print(red(_('Corrupted file: %(f)s') % {'f': filepath}))
                        stop = True
                        break
                    download.log()
                if stop:
                    break
            if stop:
                break
        if not stop:
            Synchronization.objects.filter(id=obj.id).update(last_serial=last_serial)
Exemple #4
0
    def handle(self, *args, **options):
        # first: determine the type of server
        sync_obj = Synchronization.objects.get_or_create(
            source=options['url'], destination='localhost')[0]
        self.retry = options['retry']
        socket.setdefaulttimeout(options['timeout'])
        download_limit = 0 if not options['limit'] else options['limit']
        if isinstance(options['serial'], int):
            first_serial = options['serial']
            init = False
            self.stdout.write(
                cyan(
                    _('Download from serial %(syn)s') % {'syn': first_serial}))
        elif sync_obj.last_serial is None or options['init_all']:
            first_serial = 0
            init = True
            self.stdout.write(
                cyan(_('No previous sync... Initializing database')))
        else:
            self.stdout.write(
                cyan(
                    _('Previous sync: serial %(syn)s') %
                    {'syn': sync_obj.last_serial}))
            first_serial = sync_obj.last_serial
            init = False
        self.connect(options['url'])
        last_serial = None

        if options['package']:
            # get all releases of the given package
            package_name = options['package']
            self.stdout.write(
                cyan(
                    _('Downloading all versions of %(pkg)s') %
                    {'pkg': package_name}))
            try:
                versions = self.try_download(
                    self.client.package_releases,
                    _('Unable to get releases of %(pkg)s') %
                    {'pkg': package_name}, package_name, True)
            except DownloadException:
                versions = []
            for version in versions:
                self.download_release(package_name, version)
        elif not options['latest'] and (not init or options['init_all']):
            # get all releases from the given serial
            last_serial = first_serial
            try:
                modified_packages = self.try_download(
                    self.client.changelog_since_serial,
                    _('Unable to download changelog'), first_serial + 1)
            except DownloadException:
                modified_packages = []
            counter = 0
            for (package_name, version, timestamp, action,
                 serial) in modified_packages:
                last_serial = max(serial, last_serial)
                if counter >= download_limit > 0:
                    break
                if version is None:
                    continue
                self.stdout.write(
                    cyan(
                        _('Found %(pkg)s-%(vsn)s') % {
                            'pkg': package_name,
                            'vsn': version
                        }))
                counter += self.download_release(package_name, version)
        else:
            # init: get the last version of all packages
            try:
                last_serial = self.try_download(
                    self.client.changelog_last_serial,
                    _('Unable to download changelog'))
                packages = self.try_download(self.client.list_packages,
                                             _('Unable to list packages'))
            except DownloadException:
                return
            counter = 0
            for package_name in packages:
                if counter >= download_limit > 0:
                    break
                self.stdout.write(
                    cyan(_('Found %(pkg)s') % {'pkg': package_name}))
                self.stdout.write(
                    yellow(
                        _('package releases (%(p)s)') % {'p': package_name}))
                try:
                    versions = self.try_download(
                        self.client.package_releases,
                        _('Unable to get releases of %(pkg)s') %
                        {'pkg': package_name}, package_name)
                except DownloadException:
                    continue
                versions = [x for x in versions if x]
                if not versions:
                    continue
                version = versions[0]
                self.stdout.write(
                    cyan(
                        _('Found %(pkg)s-%(vsn)s') % {
                            'pkg': package_name,
                            'vsn': version
                        }))
                counter += self.download_release(package_name, version)
        if last_serial is not None:
            Synchronization.objects.filter(id=sync_obj.id).update(
                last_serial=last_serial)
        for package_name, version in self.error_list:
            self.stderr.write((_('Unable to download %(p)s-%(v)s') % {
                'p': package_name,
                'v': version
            }))
Exemple #5
0
    def download_release(self, package_name, version):
        """ download all files attached to a given release of a given package
        """
        downloaded_files = 0
        package = self.package_cache.get(package_name)
        values = {'p': package_name, 'v': version}
        self.stdout.write(yellow(_('release data (%(p)s, %(v)s)') % values))
        try:
            release_data = self.try_download(
                self.client.release_data,
                _('Unable to get release date of %(p)s-%(v)s') % values,
                package_name, version)
            update_kwargs = {}
            # update package object with latest metadata
            for attr_name in ('home_page', 'license', 'summary',
                              'download_url', 'project_url', 'author',
                              'author_email', 'maintainer',
                              'maintainer_email'):
                if release_data.get(attr_name):
                    update_kwargs[attr_name] = release_data[attr_name][0:450]
            if update_kwargs:
                Package.objects.filter(id=package.id).update(**update_kwargs)
            self.stdout.write(
                yellow(_('package roles (%(p)s, %(v)s)') % values))
            roles = self.try_download(
                self.client.package_roles,
                _('Unable to get roles for %(p)s-%(v)s') % values,
                package_name)
            # roles of persons
            PackageRole.objects.filter(package=package).delete()
            package_roles = [
                PackageRole(package=package,
                            role=PackageRole.OWNER
                            if role == 'Owner' else PackageRole.MAINTAINER,
                            user=self.user_cache.get(username))
                for (role, username) in roles
            ]
            PackageRole.objects.bulk_create(package_roles)
        except DownloadException:
            self.error_list.append((package_name, version))
            return 0

        # update release object
        release = Release.objects.get_or_create(package=package,
                                                version=version)[0]
        for attr_name in ('stable_version', 'description', 'platform',
                          'docs_url', 'project_url', 'keywords'):
            if release_data.get(attr_name):
                setattr(package, attr_name, release_data.get(attr_name)[0:450])
        release.is_hidden = release_data.get('_pypi_hidden', False)
        for attr_name in ('classifiers', 'requires', 'requires_dist',
                          'provides', 'provides_dist', 'obsoletes',
                          'obsoletes_dist', 'requires_external',
                          'requires_python'):
            cls = Classifier if attr_name == 'classifiers' else Dependence
            if release_data.get(attr_name):
                getattr(release, attr_name).clear()
                for key in release_data[attr_name]:
                    getattr(release, attr_name).add(cls.get(key))
        release.save()

        # update archives
        self.stdout.write(yellow(_('release urls (%(p)s, %(v)s)') % values))
        try:
            release_urls = self.try_download(
                self.client.release_urls,
                _('Unable to get release urls of %(p)s-%(v)s') % values,
                package_name, version)
        except DownloadException:
            return 0
        for release_url in release_urls:
            md5_digest = release_url.get('md5_digest')
            c = ReleaseDownload.objects.filter(md5_digest=md5_digest,
                                               package=package,
                                               release=release).count()
            if c > 0 or not release_url.get('url') or not release_url.get(
                    'filename'):
                continue
            self.stdout.write(
                green(_('Downloading %(url)s') % {'url': release_url['url']}))
            filename = release_url['filename']
            download = ReleaseDownload(package=package,
                                       release=release,
                                       filename=filename)
            path = download.abspath
            path_dirname = os.path.dirname(path)
            if not os.path.isdir(path_dirname):
                os.makedirs(path_dirname)
            try:
                self.try_download(
                    self.download_release_file,
                    _('Unable to download file %(url)s') %
                    {'url': release_url}, path, release_url)
            except DownloadException:
                ReleaseMiss.objects.get_or_create(release=release)
                self.error_list.append((package_name, version))
                continue
            download.file = download.relpath
            download.url = settings.MEDIA_URL + download.relpath
            if release_url.get('packagetype'):
                download.package_type = PackageType.get(
                    release_url.get('packagetype'))
            if release_url.get('upload_time'):
                download.upload_time = datetime.datetime.strptime(release_url['upload_time'].value, "%Y%m%dT%H:%M:%S") \
                    .replace(tzinfo=utc)
            for attr_name in ('filename', 'size', 'downloads', 'has_sig',
                              'python_version', 'comment_text', 'md5_digest'):
                if release_url.get(attr_name):
                    setattr(download, attr_name, release_url[attr_name])
            download.log()
            downloaded_files += 1
        ReleaseMiss.objects.filter(release=release).delete()
        return downloaded_files
Exemple #6
0
    def handle(self, *args, **options):
        # first: determine the type of server
        obj = Synchronization.objects.get_or_create(source=options['url'], destination='localhost')[0]
        self.retry = options['retry']
        socket.setdefaulttimeout(options['timeout'])
        download_limit = 0 if not options['limit'] else options['limit']
        if isinstance(options['serial'], int):
            first_serial = options['serial']
            init = False
            self.stdout.write(cyan(_('Download from serial %(syn)s') % {'syn': first_serial}))
        elif obj.last_serial is None or options['init_all']:
            first_serial = 0
            init = True
            self.stdout.write(cyan(_('No previous sync... Initializing database')))
        else:
            self.stdout.write(cyan(_('Previous sync: serial %(syn)s') % {'syn': obj.last_serial}))
            first_serial = obj.last_serial
            init = False
        self.connect(options['url'])
        last_serial = None

        if options['package']:
            # get all releases of the given package
            package_name = options['package']
            self.stdout.write(cyan(_('Downloading all versions of %(pkg)s') % {'pkg': package_name}))
            try:
                versions = self.try_download(self.client.package_releases,
                                             _('Unable to get releases of %(pkg)s') % {'pkg': package_name},
                                             package_name, True)
            except DownloadException:
                versions = []
            for version in versions:
                self.download_release(package_name, version)
        elif not options['latest'] and (not init or options['init_all']):
            # get all releases from the given serial
            last_serial = first_serial
            try:
                modified_packages = self.try_download(self.client.changelog_since_serial,
                                                      _('Unable to download changelog'), first_serial + 1)
            except DownloadException:
                modified_packages = []
            counter = 0
            for (package_name, version, timestamp, action, serial) in modified_packages:
                last_serial = max(serial, last_serial)
                if counter >= download_limit > 0:
                    break
                if version is None:
                    continue
                self.stdout.write(cyan(_('Found %(pkg)s-%(vsn)s') % {'pkg': package_name, 'vsn': version}))
                counter += self.download_release(package_name, version)
        else:
            # init: get the last version of all packages
            try:
                last_serial = self.try_download(self.client.changelog_last_serial, _('Unable to download changelog'))
                packages = self.try_download(self.client.list_packages, _('Unable to list packages'))
            except DownloadException:
                return
            counter = 0
            for package_name in packages:
                if counter >= download_limit > 0:
                    break
                self.stdout.write(cyan(_('Found %(pkg)s') % {'pkg': package_name}))
                self.stdout.write(yellow(_('package releases (%(p)s)') % {'p': package_name}))
                try:
                    versions = self.try_download(self.client.package_releases,
                                                 _('Unable to get releases of %(pkg)s') % {'pkg': package_name},
                                                 package_name)
                except DownloadException:
                    continue
                versions = [x for x in versions if x]
                if not versions:
                    continue
                version = versions[0]
                self.stdout.write(cyan(_('Found %(pkg)s-%(vsn)s') % {'pkg': package_name, 'vsn': version}))
                counter += self.download_release(package_name, version)
        if last_serial is not None:
            Synchronization.objects.filter(id=obj.id).update(last_serial=last_serial)
        for package_name, version in self.error_list:
            self.stderr.write((_('Unable to download %(p)s-%(v)s') % {'p': package_name, 'v': version}))
Exemple #7
0
    def download_release(self, package_name, version):
        """ download all files attached to a given release of a given package
        """
        downloaded_files = 0
        package = self.package_cache.get(package_name)
        values = {'p': package_name, 'v': version}
        self.stdout.write(yellow(_('release data (%(p)s, %(v)s)') % values))
        try:
            release_data = self.try_download(self.client.release_data,
                                             _('Unable to get release date of %(p)s-%(v)s') % values, package_name,
                                             version)
            update_kwargs = {}
            # update package object with latest metadata
            for attr_name in ('home_page', 'license', 'summary', 'download_url', 'project_url',
                              'author', 'author_email', 'maintainer', 'maintainer_email'):
                if release_data.get(attr_name):
                    update_kwargs[attr_name] = release_data[attr_name]
            if update_kwargs:
                Package.objects.filter(id=package.id).update(**update_kwargs)
            self.stdout.write(yellow(_('package roles (%(p)s, %(v)s)') % values))
            roles = self.try_download(self.client.package_roles, _('Unable to get roles for %(p)s-%(v)s') % values,
                                      package_name)
            # roles of persons
            PackageRole.objects.filter(package=package).delete()
            package_roles = [PackageRole(package=package,
                                         role=PackageRole.OWNER if role == 'Owner' else PackageRole.MAINTAINER,
                                         user=self.user_cache.get(username)) for (role, username) in roles]
            PackageRole.objects.bulk_create(package_roles)
        except DownloadException:
            self.error_list.append((package_name, version))
            return 0

        # update release object
        release = Release.objects.get_or_create(package=package, version=version)[0]
        for attr_name in ('stable_version', 'description', 'platform', 'docs_url', 'project_url', 'keywords'):
            if release_data.get(attr_name):
                setattr(package, attr_name, release_data.get(attr_name))
        release.is_hidden = release_data.get('_pypi_hidden', False)
        for attr_name in ('classifiers', 'requires', 'requires_dist', 'provides', 'provides_dist',
                          'obsoletes', 'obsoletes_dist', 'requires_external', 'requires_python'):
            cls = Classifier if attr_name == 'classifiers' else Dependence
            if release_data.get(attr_name):
                getattr(release, attr_name).clear()
                for key in release_data[attr_name]:
                    getattr(release, attr_name).add(cls.get(key))
        release.save()

        # update archives
        self.stdout.write(yellow(_('release urls (%(p)s, %(v)s)') % values))
        try:
            release_urls = self.try_download(self.client.release_urls,
                                             _('Unable to get release urls of %(p)s-%(v)s') % values,
                                             package_name, version)
        except DownloadException:
            return 0
        for release_url in release_urls:
            md5_digest = release_url.get('md5_digest')
            c = ReleaseDownload.objects.filter(md5_digest=md5_digest, package=package, release=release).count()
            if c > 0 or not release_url.get('url') or not release_url.get('filename'):
                continue
            self.stdout.write(green(_('Downloading %(url)s') % {'url': release_url['url']}))
            filename = release_url['filename']
            download = ReleaseDownload(package=package, release=release, filename=filename)
            path = download.abspath
            path_dirname = os.path.dirname(path)
            if not os.path.isdir(path_dirname):
                os.makedirs(path_dirname)
            try:
                self.try_download(self.download_release_file, _('Unable to download file %(url)s') % {'url': release_url},
                                  path, release_url)
            except DownloadException:
                ReleaseMiss.objects.get_or_create(release=release)
                self.error_list.append((package_name, version))
                continue
            download.file = download.relpath
            download.url = settings.MEDIA_URL + download.relpath
            if release_url.get('packagetype'):
                download.package_type = PackageType.get(release_url.get('packagetype'))
            if release_url.get('upload_time'):
                download.upload_time = datetime.datetime.strptime(release_url['upload_time'].value, "%Y%m%dT%H:%M:%S") \
                    .replace(tzinfo=utc)
            for attr_name in ('filename', 'size', 'downloads', 'has_sig', 'python_version',
                              'comment_text', 'md5_digest'):
                if release_url.get(attr_name):
                    setattr(download, attr_name, release_url[attr_name])
            download.log()
            downloaded_files += 1
        ReleaseMiss.objects.filter(release=release).delete()
        return downloaded_files
Exemple #8
0
    def handle(self, *args, **options):
        # first: determine the type of server
        obj = Synchronization.objects.get_or_create(destination='localhost',
                                                    source=options['tag'])[0]
        if obj.last_serial is None:
            first_expected_serial = 0
            print(cyan(_('No previous sync')))
        else:
            first_expected_serial = obj.last_serial + 1
            print(
                cyan(
                    _('Previous sync: serial %(syn)s') %
                    {'syn': obj.last_serial}))
        base_path = os.path.abspath(options['path'])
        try:
            data = self.read_info(base_path, 'sync')
            packages = data['data']
            first_serial = data['meta']['first_id']
            last_serial = data['meta']['last_id']
        except KeyError:
            print(red(_('Invalid sync file')))
            return
        except ValueError:
            print(red(_('Invalid md5 data')))
            return
        except IOError:
            print(red(_('Invalid sync file')))
            return
        if first_serial > first_expected_serial:
            print(
                red(
                    _('Missing synchronization between %(f)d and %(l)d') % {
                        'f': first_expected_serial,
                        'l': first_serial
                    }))
            if not options['force']:
                return
        stop = False
        for package_name, releases in packages.items():
            p_path = os.path.join(base_path, package_name)
            package_data = self.read_info(p_path, 'package')
            package, created = Package.objects.get_or_create(name=package_name)
            self.set_attr((
                'name',
                'author',
                'author_email',
                'maintainer',
                'maintainer_email',
                'home_page',
                'license',
                'summary',
                'download_url',
                'project_url',
            ), package_data, package)
            package.save()
            for version, filenames in releases.items():
                r_path = os.path.join(base_path, package_name, version)
                release_data = self.read_info(r_path, 'release')
                release, created = Release.objects.get_or_create(
                    package=package, version=version)
                self.set_attr((
                    'version',
                    'stable_version',
                    'description',
                    'platform',
                    'keywords',
                    'docs_url',
                ), release_data, release)
                release.classifiers.clear()
                for value in release_data.get('classifiers', []):
                    release.classifiers.add(Classifier.get(value))
                for attr_name in (
                        'requires',
                        'requires_dist',
                        'provides',
                        'provides_dist',
                        'requires_external',
                        'requires_python',
                        'obsoletes',
                        'obsoletes_dist',
                ):
                    getattr(release, attr_name).clear()
                    for value in release_data.get(attr_name, []):
                        getattr(release, attr_name).add(Dependence.get(value))
                release.save()
                for filename in filenames:
                    filepath = os.path.join(r_path, filename)
                    download_data = self.read_info(r_path, filename)
                    if ReleaseDownload.objects.filter(
                            package=package, release=release,
                            filename=filename).count() > 0:
                        print(
                            yellow(
                                _('Duplicate file: %(f)s') % {'f': filepath}))
                        continue
                    download = ReleaseDownload(package=package,
                                               release=release,
                                               filename=filename)

                    self.set_attr(
                        ('md5_digest', 'downloads', 'pack', 'has_sig',
                         'comment_text', 'python_version'), download_data,
                        download)
                    download.package_type = PackageType.get(
                        download_data.get('packagetype'))
                    dirname = os.path.dirname(download.abspath)
                    if not os.path.isdir(dirname):
                        os.makedirs(dirname)
                    shutil.copy2(filepath, download.abspath)
                    download.file = download.relpath
                    download.url = settings.MEDIA_URL + download.relpath
                    download.size = os.path.getsize(filepath)
                    if download_data.get('upload_time'):
                        download.upload_time = datetime.datetime.strptime(download_data['upload_time'], DATE_FORMAT)\
                            .replace(tzinfo=utc)
                    with open(filepath, 'rb') as file_d:
                        md5 = hashlib.md5(file_d.read()).hexdigest()
                    download.md5_digest = md5
                    if md5 != download_data.get('md5_digest'):
                        print(red(
                            _('Corrupted file: %(f)s') % {'f': filepath}))
                        stop = True
                        break
                    download.log()
                if stop:
                    break
            if stop:
                break
        if not stop:
            Synchronization.objects.filter(id=obj.id).update(
                last_serial=last_serial)