def handle_uploaded_image(request, package_id, gallery_slug): """ :param package_id: str :param gallery_slug: str :param request: Django Request :type request: HttpRequest """ i = request.FILES['image'] temp_root = settings.TEMP_ROOT allow_ext = ['.png', '.jpg', '.gif'] ext = os.path.splitext(i.name)[1].lower() if ext in allow_ext: if not os.path.exists(temp_root): mkdir_p(temp_root) image_temp_path = os.path.join(temp_root, str(uuid.uuid1()) + ext) with open(image_temp_path, 'wb+') as destination: for chunk in i.chunks(): destination.write(chunk) os.chmod(image_temp_path, 0o755) content = { 'id': package_id, 'slug': gallery_slug, 'path': image_temp_path } if settings.ENABLE_REDIS is True: queue = django_rq.get_queue('high') return queue.enqueue(handle_uploaded_screenshot, content) else: return handle_uploaded_screenshot(content) else: result_dict = { 'success': False, 'exception': _('Upload failed, unsupported file extension.') } return result_dict
def write_callback(self, temp_path): """ The async callback for method update_storage :type temp_path: str :param temp_path: Created temp deb file for updating result :return: No return value """ atomic = preferences.Setting.atomic_storage if atomic: root_res = os.path.join(settings.MEDIA_ROOT, 'versions') if not os.path.isdir(root_res): mkdir_p(root_res) target_dir = os.path.join(root_res, str(uuid.uuid1())) if not os.path.isdir(target_dir): mkdir_p(target_dir) target_path = os.path.join(target_dir, self.base_filename()) # os.rename(temp_path, target_path) shutil.move(temp_path, target_path) os.chmod(target_path, 0o755) self.storage.name = os.path.relpath(target_path, settings.MEDIA_ROOT) else: abs_path = os.path.join(settings.MEDIA_ROOT, self.storage.name) os.unlink(abs_path) # os.rename(temp_path, abs_path) shutil.move(temp_path, abs_path) os.chmod(abs_path, 0o755) self.update_hash() self.save()
def handle_uploaded_screenshot(content): """ :param content: Image info :type content: dict :return: Result Dict :rtype: dict """ result_dict = {} try: image_dir = os.path.join(settings.MEDIA_ROOT, 'photologue', 'photos') if not os.path.isdir(image_dir): mkdir_p(image_dir) file_name = os.path.basename(content['path']) with transaction.atomic(): content_id = content['id'] content_slug = content['slug'] gallery = Gallery.objects.filter(slug=content_slug).last() current_site = Site.objects.get(id=settings.SITE_ID) p_version = Version.objects.get(id=content_id) c_name = re.sub('[^A-Za-z0-9]', '', p_version.c_name) # filter if gallery: pass else: # create a new gallery gallery = Gallery.objects.create(title=c_name, slug=content_slug, description=p_version.c_depiction if p_version.c_depiction else 'None', is_public=1) gallery.sites.add(current_site) # save photo = Photo(title=c_name + '_' + str(uuid.uuid1()), slug=c_name.lower() + '_' + str(uuid.uuid1()), caption='', is_public=1) data = open(content['path'], 'rb') content_file = ContentFile(data.read()) photo.image.save(file_name, content_file) photo.save() photo.sites.add(current_site) gallery.photos.add(photo) data.close() if p_version.gallery is None: p_version.gallery = gallery p_version.save() result_dict.update({ "success": True, "version": p_version.id }) except Exception as e: # error handler result_dict.update({ "success": False, "exception": str(e) }) return result_dict
def handle_uploaded_file(request): """ :param request: Django Request :type request: HttpRequest """ f = request.FILES['package'] temp_root = settings.TEMP_ROOT if not os.path.exists(temp_root): mkdir_p(temp_root) package_temp_path = os.path.join(temp_root, str(uuid.uuid1()) + '.deb') with open(package_temp_path, 'wb+') as destination: for chunk in f.chunks(): destination.write(chunk) os.chmod(package_temp_path, 0755) return handle_uploaded_package.delay(package_temp_path)
def handle_uploaded_file(request): """ :param request: Django Request :type request: HttpRequest """ f = request.FILES['package'] temp_root = settings.TEMP_ROOT if not os.path.exists(temp_root): mkdir_p(temp_root) package_temp_path = os.path.join(temp_root, str(uuid.uuid1()) + '.deb') with open(package_temp_path, 'wb+') as destination: for chunk in f.chunks(): destination.write(chunk) os.chmod(package_temp_path, 0755) if settings.ENABLE_REDIS is True: queue = django_rq.get_queue('high') return queue.enqueue(handle_uploaded_package, package_temp_path) else: return handle_uploaded_package(package_temp_path)
def build_section_package_procedure(conf): build_uuid = uuid.uuid4() build_path = os.path.join(settings.TEMP_ROOT, str(build_uuid)) section_path = os.path.join(build_path, 'Applications/Cydia.app/Sections') if not os.path.isdir(section_path): mkdir_p(section_path) # copy section icons for section in conf['sections']: s_icon = os.path.join(settings.MEDIA_ROOT, section['icon']) s_name = section['name'].replace(' ', '_') if os.path.isfile(s_icon): dest_icon = os.path.join(section_path, s_name + '.png') shutil.copyfile(s_icon, dest_icon) os.chmod(dest_icon, 0o755) # copy gpg signature if needed pub_path = '/private/var/mobile/.gpg' gpg_enabled = preferences.Setting.gpg_signature if gpg_enabled: gpg_dest_path = os.path.join(build_path, 'private/var/mobile/.gpg') if not os.path.isdir(gpg_dest_path): mkdir_p(gpg_dest_path) os.chmod(gpg_dest_path, 0o755) password = preferences.Setting.gpg_password pub_name = "%s.pub" % Site.objects.get(id=settings.SITE_ID).domain pub_path = os.path.join(pub_path, pub_name) pubkey_path = os.path.join(gpg_dest_path, pub_name) if password is not None and len(password) > 0: subprocess.check_call([ "gpg", "-a", "--export", "--batch", "--yes", "--passphrase", password, "-o", pubkey_path ]) else: subprocess.check_call([ "gpg", "-a", "--export", "--batch", "--yes", "-o", pubkey_path ]) # generate DEBIAN debian_path = os.path.join(build_path, 'DEBIAN') if not os.path.isdir(debian_path): mkdir_p(debian_path) os.chmod(debian_path, 0o755) # generate control control_detail = conf["control"] control_text = \ """Package: %(package)s Name: %(name)s Version: %(version)s Architecture: %(architecture)s Section: Repositories Author: %(author-name)s <%(author-email)s> Maintainer: %(maintainer-name)s <%(maintainer-email)s> Sponsor: %(sponsor-name)s <%(sponsor-site)s> Description: %(description)s Essential: no Priority: required Depends: cydia """ % control_detail control_path = os.path.join(debian_path, 'control') with open(control_path, 'wb+') as destination: destination.write(control_text.encode("utf-8")) os.chmod(control_path, 0o755) # generate postinst postinst_text = \ """#!/bin/sh apt-key add "%(pub)s" """ % ({"pub": pub_path}) postinst_path = os.path.join(debian_path, 'postinst') with open(postinst_path, 'wb+') as destination: destination.write(postinst_text.encode("utf-8")) os.chmod(postinst_path, 0o755) # generate prerm prerm_text = \ """#!/bin/sh apt-key del "%(pub)s" """ % ({"pub": pub_path}) prerm_path = os.path.join(debian_path, 'prerm') with open(prerm_path, 'wb+') as destination: destination.write(prerm_text.encode("utf-8")) os.chmod(prerm_path, 0o755) # call dpkg to make debian package target_dir = os.path.join(settings.TEMP_ROOT, 'packages') if not os.path.isdir(target_dir): mkdir_p(target_dir) pkg_name = "%(package)s_%(version)s_%(architecture)s.deb" % control_detail pkg_path = os.path.join(target_dir, pkg_name) subprocess.check_call( ["dpkg-deb", "-Z", "gzip", "-b", build_path, pkg_path]) return handle_uploaded_package(pkg_path)
def handle_uploaded_package(path): """ :param path: Package Uploaded Path :type path: str :return: Result Dict :rtype: dict """ result_dict = {} try: uploaded_package = DebianPackage(path) control = uploaded_package.control version_dir = os.path.join(settings.MEDIA_ROOT, 'versions') if not os.path.isdir(version_dir): mkdir_p(version_dir) target_dir = os.path.join(version_dir, str(uuid.uuid1())) if not os.path.isdir(target_dir): mkdir_p(target_dir) target_path = os.path.join( target_dir, control.get('Package', 'undefined') + '_' + control.get('Version', 'undefined') + '_' + control.get('Architecture', 'undefined') + '.deb') with transaction.atomic(): p_section = Section.objects.filter( name=control.get('Section', None)).last() if p_section: pass else: # create a new section p_section_name = control.get('Section', None) if p_section_name: p_section = Section(name=p_section_name) p_section.save() # search version p_version = Version.objects.filter( c_package=control.get('Package', None), c_version=control.get('Version', None)).last() if p_version: # version conflict result_dict.update({ "success": False, "exception": _("Version Conflict: %s") % p_version.c_version }) else: # os.rename(path, target_path) shutil.move(path, target_path) p_version = Version() p_version.c_package = control.get('Package', None) p_version.c_version = control.get('Version', None) p_version.storage = os.path.relpath(target_path, settings.MEDIA_ROOT) p_version.maintainer_name = DebianPackage.value_for_field( control.get('Maintainer', None)) p_version.maintainer_email = DebianPackage.detail_for_field( control.get('Maintainer', None)) p_version.c_description = control.get('Description', "") p_version.c_section = p_section p_version.c_tag = control.get('Tag', None) p_version.c_architecture = control.get('Architecture', None) p_version.c_name = control.get('Name', None) p_version.author_name = DebianPackage.value_for_field( control.get('Author', None)) p_version.author_email = DebianPackage.detail_for_field( control.get('Author', None)) p_version.sponsor_name = DebianPackage.value_for_field( control.get('Sponsor', None)) p_version.sponsor_site = DebianPackage.detail_for_field( control.get('Sponsor', None)) p_version.c_depiction = control.get('Depiction', None) p_version.c_homepage = control.get('Homepage', None) p_version.c_priority = control.get('Priority', None) p_version.c_installed_size = control.get( 'Installed-Size', None) p_version.c_essential = control.get('Essential', None) p_version.c_depends = control.get('Depends', None) p_version.c_pre_depends = control.get('Pre-Depends', None) p_version.c_recommends = control.get('Recommends', None) p_version.c_suggests = control.get('Suggests', None) p_version.c_breaks = control.get('Breaks', None) p_version.c_conflicts = control.get('Conflicts', None) p_version.c_replaces = control.get('Replaces', None) p_version.c_provides = control.get('Provides', None) p_version.c_build_essential = control.get( 'Build-Essential', None) p_version.c_origin = control.get('Origin', None) p_version.c_bugs = control.get('Bugs', None) p_version.c_multi_arch = control.get('Multi-Arch', None) p_version.c_source = control.get('Source', None) p_version.c_subarchitecture = control.get( 'Subarchitecture', None) p_version.c_kernel_version = control.get( 'Kernel-Version', None) p_version.c_installer_menu_item = control.get( 'Installer-Menu-Item', None) p_version.c_built_using = control.get('Built-Using', None) p_version.c_built_for_profiles = control.get( 'Built-For-Profiles', None) p_version.c_icon = control.get('Icon', None) p_version.update_hash() p_version.save() # move resource result_dict.update({"success": True, "version": p_version.id}) except Exception as e: # error handler result_dict.update({ "success": False, # TODO: fix unicode bug "exception": str(e) }) return result_dict
def upload_view(request): """ :param request: Django Request :return: Http Response """ if preferences.Setting.active_release is None: messages.error( request, mark_safe( _("Active release not set: you cannot publish your " "repository without an active release. <a href=\"%s\">Add Release</a>" ) % reverse("admin:WEIPDCRM_release_add"))) # POST if request.method == 'POST': # action: upload if 'action' in request.POST and request.POST['action'] == 'upload': if 'ajax' in request.POST and request.POST['ajax'] == 'true': result_dict = {} if 'job' in request.POST: job_id = request.POST['job'] result_dict = {} m_job = queues.get_queue('high').fetch_job(job_id) if m_job is None: result_dict.update({ 'result': False, 'msg': _('No such job'), 'job': None }) else: result_dict.update({ 'result': True, 'msg': '', 'job': { 'id': m_job.id, 'is_failed': m_job.is_failed, 'is_finished': m_job.is_finished, 'result': m_job.result } }) else: form = UploadForm(request.POST, request.FILES) if form.is_valid(): # Handle File if settings.ENABLE_REDIS is True: m_job = handle_uploaded_file(request) result_dict.update({ 'status': True, 'msg': _('Upload succeed, proceeding...'), 'job': { 'id': m_job.id, 'result': m_job.result } }) else: m_result = handle_uploaded_file(request) succeed = m_result['success'] if succeed: result_dict.update({ 'status': True, 'msg': _('Upload succeed, proceeding...'), 'job': { 'id': None, 'result': { 'version': m_result['version'] } } }) else: result_dict.update({ 'status': False, 'msg': m_result['exception'], 'job': None }) else: result_dict.update({ 'status': False, 'msg': _('Upload failed, invalid form.'), 'job': None }) return HttpResponse(json.dumps(result_dict), content_type='application/json') else: # render upload result form = UploadForm(request.POST, request.FILES) if form.is_valid(): # Handle File if settings.ENABLE_REDIS is True: m_job = handle_uploaded_file(request) job_id = m_job.id msg = _('Upload succeed, proceeding...') else: m_result = handle_uploaded_file(request) if m_result["success"] is True: return redirect( Version.objects.get(id=int( m_result["version"])).get_admin_url()) else: job_id = '' msg = m_result["exception"] else: job_id = '' msg = _('Upload failed, invalid form.') form = UploadForm() context = admin.site.each_context(request) context.update({ 'title': _('Upload New Packages'), 'form': form, 'job_id': job_id, 'msg': msg }) template = 'admin/upload.html' return render(request, template, context) # action: async-import elif 'action' in request.POST and request.POST[ 'action'] == 'async-import': if not settings.ENABLE_REDIS: messages.error( request, mark_safe( _("To use this action, you must enable <b>Redis Queue</b>." ))) else: items = os.listdir(settings.UPLOAD_ROOT) import_items = [] for item in items: if item[-4:] == ".deb": item_path = os.path.join(settings.UPLOAD_ROOT, item) import_items.append(item_path) if len(import_items) > 0: temp_root = settings.TEMP_ROOT if not os.path.exists(temp_root): try: mkdir_p(temp_root) except OSError: pass import_jobs = [] queue = django_rq.get_queue('high') for import_item in import_items: package_temp_path = os.path.join( temp_root, str(uuid.uuid1()) + '.deb') shutil.copy(import_item, package_temp_path) os.chmod(package_temp_path, 0o755) import_job = queue.enqueue(handle_uploaded_package, package_temp_path) import_jobs.append(import_job) if len(import_jobs) == 1: messages.info( request, mark_safe( _("%(job_count)s package importing job have been added to the \"<a href=\"%(jobs)s\">high</a>\" queue." ).format( job_count=str(len(import_jobs)), jobs=reverse('rq_jobs', args=(1, )), ))) else: messages.info( request, mark_safe( _("%(job_count)s package importing jobs have been added to the \"<a href=\"%(jobs)s\">high</a>\" queue." ).format( job_count=str(len(import_jobs)), jobs=reverse('rq_jobs', args=(1, )), ))) else: messages.warning(request, _("There is no package to import.")) return redirect('upload') # GET elif request.method == 'GET': form = UploadForm() context = admin.site.each_context(request) context.update({ 'title': _('Upload New Packages'), 'form': form, 'job_id': '' }) template = 'admin/upload.html' return render(request, template, context)
def build_procedure(conf): """ This is the main package list building procedure. """ if not conf["build_p_diff"]: # Build Package file build_all_versions_enabled = conf["build_all"] # Get Package List QuerySet if build_all_versions_enabled: version_set = Version.objects.filter(enabled=True).order_by('-id') version_count = version_set.count() else: version_set = Version.objects.raw( "SELECT * FROM `WEIPDCRM_version` " "WHERE `enabled` = TRUE " "GROUP BY `c_package` " "ORDER BY `c_package`, `id` DESC" ) version_count = 0 for version in version_set: version_count += 1 # Check Empty if version_count == 0: raise ValueError(_("No enabled package available.")) # Preparing Temp Directory build_temp_path = os.path.join(settings.TEMP_ROOT, str(conf["build_uuid"])) if not os.path.exists(build_temp_path): mkdir_p(build_temp_path) # Create Temp Package file build_temp_package = open(os.path.join(build_temp_path, "Packages"), "wb+") # Generate Control List depiction_url = "" if preferences.Setting.advanced_mode: site = Site.objects.get(id=settings.SITE_ID) scheme = "http" if settings.SECURE_SSL is True: scheme = "https" depiction_url = "%s://%s" % (scheme, site.domain) for version_instance in version_set: # !!! HERE WE SHOULD USE ADVANCED CONTROL DICT !!! control_dict = version_instance.get_advanced_control_dict() if (not version_instance.custom_depiction) and len(depiction_url) != 0: control_dict["Depiction"] = depiction_url + version_instance.get_absolute_url() if version_instance.online_icon is not None and len(str(version_instance.online_icon)) > 0: control_dict["Icon"] = depiction_url + os.path.join(str(preferences.Setting.resources_alias), version_instance.online_icon.name) DebianPackage.get_control_content(control_dict, build_temp_package) build_temp_package.write("\n".encode("utf-8")) # Compression Gzip build_temp_package.seek(0) if conf["build_compression"] == 1 \ or conf["build_compression"] == 2 \ or conf["build_compression"] == 5 \ or conf["build_compression"] == 6: build_temp_package_gz = gzip.open(os.path.join(build_temp_path, "Packages.gz"), mode="wb") while True: cache = build_temp_package.read(16 * 1024) # 16k cache if not cache: break build_temp_package_gz.write(cache) build_temp_package_gz.close() # Compression Bzip build_temp_package.seek(0) if conf["build_compression"] == 3 \ or conf["build_compression"] == 4 \ or conf["build_compression"] == 5 \ or conf["build_compression"] == 6: build_temp_package_bz2 = bz2.BZ2File(os.path.join(build_temp_path, "Packages.bz2"), mode="wb") while True: cache = build_temp_package.read(16 * 1024) # 16k cache if not cache: break build_temp_package_bz2.write(cache) build_temp_package_bz2.close() # Close original Package file build_temp_package.close() # Release active_release = Release.objects.get(id=conf["build_release"]) active_release_control_dict = active_release.get_control_field() build_temp_release = open(os.path.join(build_temp_path, "Release"), mode="wb") DebianPackage.get_control_content(active_release_control_dict, build_temp_release) # Checksum if conf["build_secure"] is True: def hash_file(hash_obj, file_path): with open(file_path, "rb") as f: for block in iter(lambda: f.read(65535), b""): hash_obj.update(block) checksum_list = [ "Packages", "Packages.gz", "Packages.bz2" ] build_validation_titles = [ "MD5Sum", "SHA1", "SHA256", "SHA512" ] build_validation_methods = [ hashlib.md5, hashlib.sha1, hashlib.sha256, hashlib.sha512 ] # Using a loop to iter different validation methods for build_validation_index in range(0, 3): if conf["build_validation"] > build_validation_index: build_temp_release.write((build_validation_titles[build_validation_index] + ":\n").encode("utf-8")) for checksum_instance in checksum_list: checksum_path = os.path.join(build_temp_path, checksum_instance) if os.path.exists(checksum_path): m2 = build_validation_methods[build_validation_index]() hash_file(m2, checksum_path) p_hash = m2.hexdigest() p_size = os.path.getsize(checksum_path) build_temp_release.write( (" " + p_hash + " " + str(p_size) + " " + checksum_instance + "\n").encode("utf-8") ) build_temp_release.close() if conf["build_secure"] is True: # GPG Signature """ Use 'gpg --gen-key' to generate GnuPG key before using this function. """ password = preferences.Setting.gpg_password if password is not None and len(password) > 0: subprocess.check_call( ["gpg", "-abs", "--homedir", os.path.join(settings.BASE_DIR, '.gnupg'), "--batch", "--yes", "--pinentry-mode=loopback", "--passphrase", password, "-o", os.path.join(build_temp_path, "Release.gpg"), os.path.join(build_temp_path, "Release"), ] ) else: subprocess.check_call( ["gpg", "-abs", "--homedir", os.path.join(settings.BASE_DIR, '.gnupg'), "--batch", "--yes", "-o", os.path.join(build_temp_path, "Release.gpg"), os.path.join(build_temp_path, "Release"), ] ) # Preparing Directory release_root = os.path.join( settings.MEDIA_ROOT, "releases", str(active_release.id), ) build_path = os.path.join( release_root, "builds", str(conf["build_uuid"]) ) if not os.path.isdir(build_path): mkdir_p(build_path) # Publish rename_list = [ "Release", "Release.gpg", "Packages", "Packages.gz", "Packages.bz2" ] for rename_instance in rename_list: rename_path = os.path.join(build_temp_path, rename_instance) rename_to_path = os.path.join(build_path, rename_instance) active_path = os.path.join(release_root, rename_instance) if os.path.exists(rename_path): if os.path.exists(active_path): os.unlink(active_path) shutil.copyfile(rename_path, active_path) os.chmod(active_path, 0o755) # os.rename(rename_path, rename_to_path) shutil.move(rename_path, rename_to_path) os.chmod(rename_to_path, 0o755) else: if os.path.exists(rename_to_path): os.unlink(rename_to_path) if os.path.exists(active_path): os.unlink(active_path) def thumb_png(png_path): img = Image.open(png_path) img.thumbnail((60, 60), Image.ANTIALIAS) img.save(png_path) # Cydia Icon cydia_icon_path = os.path.join(release_root, "CydiaIcon.png") if os.path.exists(cydia_icon_path): os.unlink(cydia_icon_path) if active_release.icon is not None and len(str(active_release.icon)) > 0: src_path = os.path.join(settings.MEDIA_ROOT, active_release.icon.name) if os.path.exists(src_path): shutil.copyfile( src_path, cydia_icon_path ) else: src_path = os.path.join(settings.STATIC_ROOT, "img/CydiaIcon.png") if os.path.exists(src_path): shutil.copyfile( src_path, cydia_icon_path ) if os.path.exists(cydia_icon_path): thumb_png(cydia_icon_path) os.chmod(cydia_icon_path, 0o755) build_instance = Build.objects.get(uuid=str(conf["build_uuid"])) if build_instance is not None: build_instance.is_finished = True build_instance.save() else: # TODO: Pdiffs Feature pass