def handle(self, *args, **options): # first: determine the type of server obj = Synchronization.objects.get_or_create(destination='localhost', source=options['tag'])[0] if obj.last_serial is None: first_expected_serial = 0 print(cyan(_('No previous sync'))) else: first_expected_serial = obj.last_serial + 1 print(cyan(_('Previous sync: serial %(syn)s') % {'syn': obj.last_serial})) base_path = os.path.abspath(options['path']) try: data = self.read_info(base_path, 'sync') packages = data['data'] first_serial = data['meta']['first_id'] last_serial = data['meta']['last_id'] except KeyError: print(red(_('Invalid sync file'))) return except ValueError: print(red(_('Invalid md5 data'))) return except IOError: print(red(_('Invalid sync file'))) return if first_serial > first_expected_serial: print(red(_('Missing synchronization between %(f)d and %(l)d') % {'f': first_expected_serial, 'l': first_serial})) if not options['force']: return stop = False for package_name, releases in packages.items(): p_path = os.path.join(base_path, package_name) package_data = self.read_info(p_path, 'package') package, created = Package.objects.get_or_create(name=package_name) self.set_attr(('name', 'author', 'author_email', 'maintainer', 'maintainer_email', 'home_page', 'license', 'summary', 'download_url', 'project_url', ), package_data, package) package.save() for version, filenames in releases.items(): r_path = os.path.join(base_path, package_name, version) release_data = self.read_info(r_path, 'release') release, created = Release.objects.get_or_create(package=package, version=version) self.set_attr(('version', 'stable_version', 'description', 'platform', 'keywords', 'docs_url', ), release_data, release) release.classifiers.clear() for value in release_data.get('classifiers', []): release.classifiers.add(Classifier.get(value)) for attr_name in ('requires', 'requires_dist', 'provides', 'provides_dist', 'requires_external', 'requires_python', 'obsoletes', 'obsoletes_dist', ): getattr(release, attr_name).clear() for value in release_data.get(attr_name, []): getattr(release, attr_name).add(Dependence.get(value)) release.save() for filename in filenames: filepath = os.path.join(r_path, filename) download_data = self.read_info(r_path, filename) if ReleaseDownload.objects.filter(package=package, release=release, filename=filename).count() > 0: print(yellow(_('Duplicate file: %(f)s') % {'f': filepath})) continue download = ReleaseDownload(package=package, release=release, filename=filename) self.set_attr(('md5_digest', 'downloads', 'pack', 'has_sig', 'comment_text', 'python_version'), download_data, download) download.package_type = PackageType.get(download_data.get('packagetype')) dirname = os.path.dirname(download.abspath) if not os.path.isdir(dirname): os.makedirs(dirname) shutil.copy2(filepath, download.abspath) download.file = download.relpath download.url = settings.MEDIA_URL + download.relpath download.size = os.path.getsize(filepath) if download_data.get('upload_time'): download.upload_time = datetime.datetime.strptime(download_data['upload_time'], DATE_FORMAT)\ .replace(tzinfo=utc) with open(filepath, 'rb') as file_d: md5 = hashlib.md5(file_d.read()).hexdigest() download.md5_digest = md5 if md5 != download_data.get('md5_digest'): print(red(_('Corrupted file: %(f)s') % {'f': filepath})) stop = True break download.log() if stop: break if stop: break if not stop: Synchronization.objects.filter(id=obj.id).update(last_serial=last_serial)
def setup(request): if request.method != 'POST': raise PermissionDenied(_('Only POST request are allowed')) ct_type = request.META.get('CONTENT_TYPE', '') infos = [x.strip().partition('=') for x in ct_type.split(';')] boundary, encoding = None, 'ascii' for info in infos: if info[0] == 'boundary': boundary = info[2] elif info[0] == 'charset': encoding = info[2] if boundary is None: raise PermissionDenied(_('Invalid POST form')) # parse the POST query by hand mid_boundary = ('\n--' + boundary + '\n').encode(encoding) end_boundary = ('\n--' + boundary + '--\n').encode(encoding) fields = request.body.split(mid_boundary) values = QueryDict('', mutable=True, encoding=encoding) files = {} for part in fields: lines = part.split(b'\n\n', 1) if len(lines) != 2: continue infos = [ x.strip().partition('=') for x in lines[0].decode(encoding).split(';') ] key, filename = None, None for info in infos: if info[0] == 'name': key = info[2][1:-1] elif info[0] == 'filename': filename = info[2][1:-1] if key is None: continue value = lines[1] if value.endswith(end_boundary): value = value[:-len(end_boundary)] if filename is None: values.setlistdefault(key, []) values.appendlist(key, value) else: files[key] = filename, value # the POST data are parsed, let's go action = values.get(':action') if action in ('submit', 'file_upload'): package_name = values.get('name', '') version_name = values.get('version', '') if not package_name or not version_name: raise PermissionDenied(_('No package name provided')) if request.user.is_anonymous: return HttpResponse(ugettext('You must be authenticated'), status=401) package, package_created = Package.objects.get_or_create( name=package_name) if package_created: PackageRole(package=package, user=request.user, role=PackageRole.OWNER).save() elif not request.user.is_superuser: if PackageRole.objects.filter(package=package, user=request.user).count() == 0: return HttpResponse( ugettext('You are not allowed to update this package'), status=401) for attr_name in ( 'name', 'home_page', 'author_email', 'download_url', 'author', 'license', 'summary', 'maintainer', 'maintainer_email', 'project_url', ): if values.get(attr_name): setattr(package, attr_name, values.get(attr_name)) package.save() release, created = Release.objects.get_or_create(package=package, version=version_name) for attr_name in ( 'stable_version', 'description', 'platform', 'keywords', 'docs_url', ): if values.get(attr_name): setattr(package, attr_name, values.get(attr_name)) release.classifiers.clear() for classifier in values.getlist('classifiers', []): release.classifiers.add(Classifier.get(classifier)) for attr_name in ('requires', 'requires_dist', 'provides', 'provides_dist', 'obsoletes', 'obsoletes_dist', 'requires_external', 'requires_python'): getattr(release, attr_name).clear() for dep in values.getlist(attr_name, []): getattr(release, attr_name).add(Dependence.get(dep)) release.save() if action == 'file_upload': if 'content' not in files: raise PermissionDenied filename, content = files['content'] # noinspection PyUnboundLocalVariable if ReleaseDownload.objects.filter(package=package, release=release, filename=filename).count() > 0: raise PermissionDenied md5 = hashlib.md5(content).hexdigest() if md5 != values.get('md5_digest'): raise PermissionDenied download = ReleaseDownload(package=package, release=release, filename=filename) path = download.abspath path_dirname = os.path.dirname(path) if not os.path.isdir(path_dirname): os.makedirs(path_dirname) with open(path, 'wb') as out_fd: out_fd.write(content) download.md5_digest = md5 download.size = len(content) download.upload_time = datetime.datetime.utcnow().replace( tzinfo=utc) download.url = settings.MEDIA_URL + path[MEDIA_ROOT_LEN:] download.file = download.relpath download.package_type = PackageType.get( values.get('filetype', 'source')) download.comment_text = values.get('comment', '') download.python_version = values.get('pyversion') download.log() template_values = {} return TemplateResponse(request, 'pythonnest/simple.html', template_values)
def setup(request): if request.method != 'POST': raise PermissionDenied(_('Only POST request are allowed')) ct_type = request.META.get('CONTENT_TYPE', '') infos = [x.strip().partition('=') for x in ct_type.split(';')] boundary, encoding = None, 'ascii' for info in infos: if info[0] == 'boundary': boundary = info[2] elif info[0] == 'charset': encoding = info[2] if boundary is None: raise PermissionDenied(_('Invalid POST form')) # parse the POST query by hand mid_boundary = ('\n--' + boundary + '\n').encode(encoding) end_boundary = ('\n--' + boundary + '--\n').encode(encoding) fields = request.body.split(mid_boundary) values = QueryDict('', mutable=True, encoding=encoding) files = {} for part in fields: lines = part.split(b'\n\n', 1) if len(lines) != 2: continue infos = [x.strip().partition('=') for x in lines[0].decode(encoding).split(';')] key, filename = None, None for info in infos: if info[0] == 'name': key = info[2][1:-1] elif info[0] == 'filename': filename = info[2][1:-1] if key is None: continue value = lines[1] if value.endswith(end_boundary): value = value[:-len(end_boundary)] if filename is None: values.setlistdefault(key, []) values.appendlist(key, value) else: files[key] = filename, value # the POST data are parsed, let's go action = values.get(':action') if action in ('submit', 'file_upload'): package_name = values.get('name', '') version_name = values.get('version', '') if not package_name or not version_name: raise PermissionDenied(_('No package name provided')) if request.user.is_anonymous: return HttpResponse(ugettext('You must be authenticated'), status=401) package, package_created = Package.objects.get_or_create(name=package_name) if package_created: PackageRole(package=package, user=request.user, role=PackageRole.OWNER).save() elif not request.user.is_superuser: if PackageRole.objects.filter(package=package, user=request.user).count() == 0: return HttpResponse(ugettext('You are not allowed to update this package'), status=401) for attr_name in ('name', 'home_page', 'author_email', 'download_url', 'author', 'license', 'summary', 'maintainer', 'maintainer_email', 'project_url', ): if values.get(attr_name): setattr(package, attr_name, values.get(attr_name)) package.save() release, created = Release.objects.get_or_create(package=package, version=version_name) for attr_name in ('stable_version', 'description', 'platform', 'keywords', 'docs_url',): if values.get(attr_name): setattr(package, attr_name, values.get(attr_name)) release.classifiers.clear() for classifier in values.getlist('classifiers', []): release.classifiers.add(Classifier.get(classifier)) for attr_name in ('requires', 'requires_dist', 'provides', 'provides_dist', 'obsoletes', 'obsoletes_dist', 'requires_external', 'requires_python'): getattr(release, attr_name).clear() for dep in values.getlist(attr_name, []): getattr(release, attr_name).add(Dependence.get(dep)) release.save() if action == 'file_upload': if 'content' not in files: raise PermissionDenied filename, content = files['content'] # noinspection PyUnboundLocalVariable if ReleaseDownload.objects.filter(package=package, release=release, filename=filename).count() > 0: raise PermissionDenied md5 = hashlib.md5(content).hexdigest() if md5 != values.get('md5_digest'): raise PermissionDenied download = ReleaseDownload(package=package, release=release, filename=filename) path = download.abspath path_dirname = os.path.dirname(path) if not os.path.isdir(path_dirname): os.makedirs(path_dirname) with open(path, 'wb') as out_fd: out_fd.write(content) download.md5_digest = md5 download.size = len(content) download.upload_time = datetime.datetime.utcnow().replace(tzinfo=utc) download.url = settings.MEDIA_URL + path[MEDIA_ROOT_LEN:] download.file = download.relpath download.package_type = PackageType.get(values.get('filetype', 'source')) download.comment_text = values.get('comment', '') download.python_version = values.get('pyversion') download.log() template_values = {} return TemplateResponse(request, 'pythonnest/simple.html', template_values)
def handle(self, *args, **options): # first: determine the type of server obj = Synchronization.objects.get_or_create(destination='localhost', source=options['tag'])[0] if obj.last_serial is None: first_expected_serial = 0 print(cyan(_('No previous sync'))) else: first_expected_serial = obj.last_serial + 1 print( cyan( _('Previous sync: serial %(syn)s') % {'syn': obj.last_serial})) base_path = os.path.abspath(options['path']) try: data = self.read_info(base_path, 'sync') packages = data['data'] first_serial = data['meta']['first_id'] last_serial = data['meta']['last_id'] except KeyError: print(red(_('Invalid sync file'))) return except ValueError: print(red(_('Invalid md5 data'))) return except IOError: print(red(_('Invalid sync file'))) return if first_serial > first_expected_serial: print( red( _('Missing synchronization between %(f)d and %(l)d') % { 'f': first_expected_serial, 'l': first_serial })) if not options['force']: return stop = False for package_name, releases in packages.items(): p_path = os.path.join(base_path, package_name) package_data = self.read_info(p_path, 'package') package, created = Package.objects.get_or_create(name=package_name) self.set_attr(( 'name', 'author', 'author_email', 'maintainer', 'maintainer_email', 'home_page', 'license', 'summary', 'download_url', 'project_url', ), package_data, package) package.save() for version, filenames in releases.items(): r_path = os.path.join(base_path, package_name, version) release_data = self.read_info(r_path, 'release') release, created = Release.objects.get_or_create( package=package, version=version) self.set_attr(( 'version', 'stable_version', 'description', 'platform', 'keywords', 'docs_url', ), release_data, release) release.classifiers.clear() for value in release_data.get('classifiers', []): release.classifiers.add(Classifier.get(value)) for attr_name in ( 'requires', 'requires_dist', 'provides', 'provides_dist', 'requires_external', 'requires_python', 'obsoletes', 'obsoletes_dist', ): getattr(release, attr_name).clear() for value in release_data.get(attr_name, []): getattr(release, attr_name).add(Dependence.get(value)) release.save() for filename in filenames: filepath = os.path.join(r_path, filename) download_data = self.read_info(r_path, filename) if ReleaseDownload.objects.filter( package=package, release=release, filename=filename).count() > 0: print( yellow( _('Duplicate file: %(f)s') % {'f': filepath})) continue download = ReleaseDownload(package=package, release=release, filename=filename) self.set_attr( ('md5_digest', 'downloads', 'pack', 'has_sig', 'comment_text', 'python_version'), download_data, download) download.package_type = PackageType.get( download_data.get('packagetype')) dirname = os.path.dirname(download.abspath) if not os.path.isdir(dirname): os.makedirs(dirname) shutil.copy2(filepath, download.abspath) download.file = download.relpath download.url = settings.MEDIA_URL + download.relpath download.size = os.path.getsize(filepath) if download_data.get('upload_time'): download.upload_time = datetime.datetime.strptime(download_data['upload_time'], DATE_FORMAT)\ .replace(tzinfo=utc) with open(filepath, 'rb') as file_d: md5 = hashlib.md5(file_d.read()).hexdigest() download.md5_digest = md5 if md5 != download_data.get('md5_digest'): print(red( _('Corrupted file: %(f)s') % {'f': filepath})) stop = True break download.log() if stop: break if stop: break if not stop: Synchronization.objects.filter(id=obj.id).update( last_serial=last_serial)