Example #1
0
def edit_chat_handler(chat_id, name, members):
    """Редактирует чат"""
    creator = current_user.id
    if members == "none":
        members = str(creator)
    else:
        members += f";{creator}"
    session = create_session()
    new_members, _ = edit_chat(chat_id, name, members, session=session)
    chat_avatar = request.data
    if chat_avatar:
        path = os.path.join(PATH_TO_ROOT, "static", "img", "chat_avatars",
                            str(chat_id))
        try:
            remove_dir(path)
        except FileNotFoundError:
            pass
        os.mkdir(path)
        load_image(chat_avatar, f"{path}/avatar.png")
        make_icon(chat_avatar, f"{path}/icon.png")
    for user_id in new_members:
        add_chat_to_user_chat_list(chat_id, user_id, session=session)
    return jsonify({
        "status": "ok",
    })
Example #2
0
def home(request):
    if request.method=="POST":
	log.info(request.POST)
	file_name = "/tmp/albums/"+request.POST['name']
	if os.path.exists(file_name):
	   remove_dir(file_name)
	os.makedirs(file_name)
	for i in request.POST.getlist('li1[]'):
	    urllib.urlretrieve(i, file_name+'/'+i.split('/')[-1])
	return HttpResponse(simplejson.dumps({'redirect_url':'/aldown//download/'+request.POST['name']+".zip"}),mimetype="application/json")
    return render_to_response("aldown.html",{})
Example #3
0
def home(request):
    if request.method == "POST":
        log.info(request.POST)
        file_name = "/tmp/albums/" + request.POST['name']
        if os.path.exists(file_name):
            remove_dir(file_name)
        os.makedirs(file_name)
        for i in request.POST.getlist('li1[]'):
            urllib.urlretrieve(i, file_name + '/' + i.split('/')[-1])
        return HttpResponse(simplejson.dumps({
            'redirect_url':
            '/aldown/download/' + request.POST['name'] + ".zip"
        }),
                            mimetype="application/json")
    return render_to_response("aldown.html", {})
Example #4
0
    def _run(self, scanObject, result, depth, args):
        '''Laika framework module logic execution'''
        moduleResult = []

        file_limit = int(get_option(args, 'filelimit', 'rarfilelimit', 0))
        byte_limit = int(get_option(args, 'bytelimit', 'rarbytelimit', 0))
        password = get_option(args, 'password', 'rarpassword')
        attempt_decrypt = strtobool(
            get_option(args, 'attemptdecrypt', 'rarattemptdecrypt', 'false'))
        temp_dir = get_option(args, 'tempdir', 'tempdir', '/tmp/laikaboss_tmp')
        if not os.path.isdir(temp_dir):
            os.mkdir(temp_dir)
            os.chmod(temp_dir, 0777)

        # A temp file must be created as UnRAR2 does not accept buffers
        with tempfile.NamedTemporaryFile(dir=temp_dir) as temp_file:
            temp_file.write(scanObject.buffer)
            temp_file.flush()

            # RAR can be password protected, which encrypts the headers
            headers_are_encrypted = False
            # RAR can encrypt the files while leaving the headers decrypted
            files_are_encrypted = False

            rar = None
            # list of the file info objects
            infos = []

            try:
                logging.debug('%s: Attempting to open rar file',
                              self.module_name)
                # If headers are encrypted, the following will raise IncorrectRARPassword
                rar = UnRAR2.RarFile(temp_file.name)
                infos = rar.infolist()
                logging.debug('%s: Succeeded opening rar file',
                              self.module_name)

                # If files are encrypted, the filename will be prefixed with a '*'
                for info in infos:
                    if info.filename.startswith('*'):
                        logging.debug('%s: Rar files are encrypted',
                                      self.module_name)
                        scanObject.addFlag('ENCRYPTED_RAR')
                        scanObject.addMetadata(self.module_name, "Encrypted",
                                               "Protected Files")
                        files_are_encrypted = True
                        break
            except IncorrectRARPassword:
                logging.debug('%s: Rar headers are encrypted',
                              self.module_name)
                scanObject.addFlag('ENCRYPTED_RAR')
                scanObject.addMetadata(self.module_name, "Encrypted",
                                       "Protected Header")
                headers_are_encrypted = True
            except InvalidRARArchive:
                logging.debug('%s: Invalid Rar file')

            if (headers_are_encrypted
                    or files_are_encrypted) and attempt_decrypt:
                logging.debug('%s: Attempting to decrypt', self.module_name)
                possible_passwords = []

                # Passwords are sometimes sent in the email content. Use the content of the parent
                # object as the list of possible passwords
                parent_object = getParentObject(result, scanObject)
                if parent_object:
                    possible_passwords = _create_word_list(
                        parent_object.buffer)

                if password:
                    possible_passwords.insert(0, password)

                explode_temp_dir = os.path.join(temp_dir, 'exploderar')
                for possible_password in possible_passwords:
                    try:
                        logging.debug("EXPLODE_RAR: Attempting password '%s'",
                                      possible_password)
                        rar = UnRAR2.RarFile(temp_file.name,
                                             password=possible_password)
                        # Extraction is needed to force the exception on encrypted files
                        if files_are_encrypted:
                            rar.extract(path=explode_temp_dir)
                        infos = rar.infolist()
                        logging.debug("EXPLODE_RAR: Found password '%s'",
                                      possible_password)
                        scanObject.addFlag('rar:decrypted')
                        scanObject.addMetadata(self.module_name, 'Password',
                                               possible_password)
                        break
                    except IncorrectRARPassword:
                        continue
                if os.path.exists(explode_temp_dir):
                    remove_dir(explode_temp_dir)

            scanObject.addMetadata(self.module_name, "Total_Files", len(infos))
            file_count = 0
            exceeded_byte_limit = False
            for info in infos:
                if byte_limit and info.size > byte_limit:
                    logging.debug(
                        "EXPLODE_RAR: skipping file due to byte limit")
                    exceeded_byte_limit = True
                    continue
                try:
                    content = rar.read_files(info.filename)[0][1]
                    if byte_limit and len(content) > byte_limit:
                        logging.debug(
                            "EXPLODE_RAR: skipping file due to byte limit")
                        exceeded_byte_limit = True
                        continue
                    moduleResult.append(
                        ModuleObject(
                            buffer=content,
                            externalVars=ExternalVars(filename=info.filename)))
                except IndexError:
                    pass
                file_count += 1
                if file_limit and file_count >= file_limit:
                    scanObject.addFlag("rar:err:LIMIT_EXCEEDED")
                    logging.debug("EXPLODE_RAR: breaking due to file limit")
                    break
            if exceeded_byte_limit:
                scanObject.addFlag("rar:err:BYTE_LIMIT_EXCEEDED")

        scanObject.addMetadata(self.module_name, "Unzipped", len(moduleResult))
        return moduleResult
Example #5
0
    def _run(self, scanObject, result, depth, args):
        '''Laika framework module logic execution'''
        moduleResult = []

        file_limit = int(get_option(args, 'filelimit', 'rarfilelimit', 0))
        byte_limit = int(get_option(args, 'bytelimit', 'rarbytelimit', 0))
        password = get_option(args, 'password', 'rarpassword')
        attempt_decrypt = strtobool(get_option(args, 'attemptdecrypt', 'rarattemptdecrypt', 'false'))
        temp_dir = get_option(args, 'tempdir', 'tempdir', '/tmp/laikaboss_tmp')
        if not os.path.isdir(temp_dir):
            os.mkdir(temp_dir)
            os.chmod(temp_dir, 0777)

        # A temp file must be created as UnRAR2 does not accept buffers
        with tempfile.NamedTemporaryFile(dir=temp_dir) as temp_file:
            temp_file.write(scanObject.buffer)
            temp_file.flush()

            # RAR can be password protected, which encrypts the headers
            headers_are_encrypted = False
            # RAR can encrypt the files while leaving the headers decrypted
            files_are_encrypted = False

            rar = None
            # list of the file info objects
            infos = []

            try:
                logging.debug('%s: Attempting to open rar file', self.module_name)
                # If headers are encrypted, the following will raise IncorrectRARPassword
                rar = UnRAR2.RarFile(temp_file.name)
                infos = rar.infolist()
                logging.debug('%s: Succeeded opening rar file', self.module_name)

                # If files are encrypted, the filename will be prefixed with a '*'
                for info in infos:
                    if info.filename.startswith('*'):
                        logging.debug('%s: Rar files are encrypted', self.module_name)
                        scanObject.addFlag('ENCRYPTED_RAR')
                        scanObject.addMetadata(self.module_name, "Encrypted", "Protected Files")
                        files_are_encrypted = True
                        break
            except IncorrectRARPassword:
                logging.debug('%s: Rar headers are encrypted', self.module_name)
                scanObject.addFlag('ENCRYPTED_RAR')
                scanObject.addMetadata(self.module_name, "Encrypted", "Protected Header")
                headers_are_encrypted = True
            except InvalidRARArchive:
                logging.debug('%s: Invalid Rar file')

            if (headers_are_encrypted or files_are_encrypted) and attempt_decrypt:
                logging.debug('%s: Attempting to decrypt', self.module_name)
                possible_passwords = []

                # Passwords are sometimes sent in the email content. Use the content of the parent
                # object as the list of possible passwords
                parent_object = getParentObject(result, scanObject)
                if parent_object:
                    possible_passwords = _create_word_list(parent_object.buffer)

                if password:
                    possible_passwords.insert(0, password)

                explode_temp_dir = os.path.join(temp_dir, 'exploderar')
                for possible_password in possible_passwords:
                    try:
                        logging.debug("EXPLODE_RAR: Attempting password '%s'", possible_password)
                        rar = UnRAR2.RarFile(temp_file.name, password=possible_password)
                        # Extraction is needed to force the exception on encrypted files
                        if files_are_encrypted:
                            rar.extract(path=explode_temp_dir)
                        infos = rar.infolist()
                        logging.debug("EXPLODE_RAR: Found password '%s'", possible_password)
                        scanObject.addFlag('rar:decrypted')
                        scanObject.addMetadata(self.module_name, 'Password', possible_password)
                        break
                    except IncorrectRARPassword:
                        continue
                if os.path.exists(explode_temp_dir):
                    remove_dir(explode_temp_dir)

            scanObject.addMetadata(self.module_name, "Total_Files", len(infos))
            file_count = 0
            exceeded_byte_limit = False
            for info in infos:
                if byte_limit and info.size > byte_limit:
                    logging.debug("EXPLODE_RAR: skipping file due to byte limit")
                    exceeded_byte_limit = True
                    continue
                try:
                    content = rar.read_files(info.filename)[0][1]
                    if byte_limit and len(content) > byte_limit:
                        logging.debug("EXPLODE_RAR: skipping file due to byte limit")
                        exceeded_byte_limit = True
                        continue
                    moduleResult.append(ModuleObject(buffer=content,
                        externalVars=ExternalVars(filename=info.filename)))
                except IndexError:
                    pass
                file_count += 1
                if file_limit and file_count >= file_limit:
                    scanObject.addFlag("rar:err:LIMIT_EXCEEDED")
                    logging.debug("EXPLODE_RAR: breaking due to file limit")
                    break
            if exceeded_byte_limit:
                scanObject.addFlag("rar:err:BYTE_LIMIT_EXCEEDED")

        scanObject.addMetadata(self.module_name, "Unzipped", len(moduleResult))
        return moduleResult
Example #6
0
def update_imported_docs(version_pk, api=None):
    """
    Check out or update the given project's repository.
    """
    if api is None:
        api = tastyapi.api

    version_data = api.version(version_pk).get()
    version = make_api_version(version_data)
    project = version.project

    # Make Dirs
    if not os.path.exists(project.doc_path):
        os.makedirs(project.doc_path)

    with project.repo_lock(getattr(settings, 'REPO_LOCK_SECONDS', 30)):
        update_docs_output = {}
        if not project.vcs_repo():
            raise ProjectImportError(("Repo type '{0}' unknown"
                                      .format(project.repo_type)))

        # Get the actual code on disk
        if version:
            log.info(
                LOG_TEMPLATE.format(
                    project=project.slug, 
                    version=version.slug, 
                    msg='Checking out version {slug}: {identifier}'.format(
                        slug=version.slug, 
                        identifier=version.identifier
                    )
                )
            )
            version_slug = version.slug
            version_repo = project.vcs_repo(version_slug)
            update_docs_output['checkout'] = version_repo.checkout(
                version.identifier
            )
        else:
            # Does this ever get called?
            log.info(LOG_TEMPLATE.format(project=project.slug, version=version.slug, msg='Updating to latest revision'))
            version_slug = 'latest'
            version_repo = project.vcs_repo(version_slug)
            update_docs_output['checkout'] = version_repo.update()

        # Ensure we have a conf file (an exception is raised if not)
        project.conf_file(version.slug)



        # Do Virtualenv bits:
        if project.use_virtualenv:
            build_dir = os.path.join(project.venv_path(version=version_slug), 'build')
            if os.path.exists(build_dir):
                log.info(LOG_TEMPLATE.format(project=project.slug, version=version.slug, msg='Removing existing build dir'))
                shutil.remove_dir(build_dir)
            if project.use_system_packages:
                site_packages = '--system-site-packages'
            else:
                site_packages = '--no-site-packages'
            # Here the command has been modified to support different
            # interpreters.
            update_docs_output['venv'] = run(
                '{cmd} {site_packages} {path}'.format(
                    cmd='virtualenv-2.7 -p {interpreter}'.format(
                        interpreter=project.python_interpreter),
                    site_packages=site_packages,
                    path=project.venv_path(version=version_slug)
                )
            )
            # Other code expects sphinx-build to be installed inside the
            # virtualenv.  Using the -I option makes sure it gets installed
            # even if it is already installed system-wide (and
            # --system-site-packages is used)
            if project.use_system_packages:
                ignore_option = '-I'
            else:
                ignore_option = ''
            if project.python_interpreter != 'python3':
                sphinx = 'sphinx==1.1.3'
                update_docs_output['sphinx'] = run(
                    ('{cmd} install -U {ignore_option} {sphinx} '
                     'virtualenv==1.10.1 setuptools==1.1 '
                     'docutils==0.11 git+git://github.com/ericholscher/readthedocs-sphinx-ext#egg=readthedocs_ext').format(
                        cmd=project.venv_bin(version=version_slug, bin='pip'),
                        sphinx=sphinx, ignore_option=ignore_option))
            else:
                sphinx = 'sphinx==1.1.3'
                # python 3 specific hax
                update_docs_output['sphinx'] = run(
                    ('{cmd} install -U {ignore_option} {sphinx} '
                     'virtualenv==1.9.1 docutils==0.11 git+git://github.com/ericholscher/readthedocs-sphinx-ext#egg=readthedocs_ext').format(
                        cmd=project.venv_bin(version=version_slug, bin='pip'),
                        sphinx=sphinx, ignore_option=ignore_option))

            if project.requirements_file:
                os.chdir(project.checkout_path(version_slug))
                update_docs_output['requirements'] = run(
                    '{cmd} install --exists-action=w -r {requirements}'.format(
                        cmd=project.venv_bin(version=version_slug, bin='pip'),
                        requirements=project.requirements_file))
            os.chdir(project.checkout_path(version_slug))
            if os.path.isfile("setup.py"):
                if getattr(settings, 'USE_PIP_INSTALL', False):
                    update_docs_output['install'] = run(
                        '{cmd} install --ignore-installed .'.format(
                            cmd=project.venv_bin(version=version_slug, bin='pip')))
                else:
                    update_docs_output['install'] = run(
                        '{cmd} setup.py install --force'.format(
                            cmd=project.venv_bin(version=version_slug,
                                                 bin='python')))
            else:
                update_docs_output['install'] = (999, "", "No setup.py, skipping install")

        # Update tags/version

        version_post_data = {'repo': version_repo.repo_url}

        if version_repo.supports_tags:
            version_post_data['tags'] = [
                {'identifier': v.identifier,
                 'verbose_name': v.verbose_name,
                 } for v in version_repo.tags
            ]

        if version_repo.supports_branches:
            version_post_data['branches'] = [
                {'identifier': v.identifier,
                 'verbose_name': v.verbose_name,
                 } for v in version_repo.branches
            ]

        try:
            apiv2.project(project.pk).sync_versions.post(version_post_data)
        except Exception, e:
            print "Sync Verisons Exception: %s" % e.message
Example #7
0
def update_imported_docs(version_pk, api=None):
    """
    Check out or update the given project's repository.
    """
    if api is None:
        api = tastyapi.api

    version_data = api.version(version_pk).get()
    version = make_api_version(version_data)
    project = version.project

    # Make Dirs
    if not os.path.exists(project.doc_path):
        os.makedirs(project.doc_path)

    with project.repo_lock(getattr(settings, 'REPO_LOCK_SECONDS', 30)):
        update_docs_output = {}
        if not project.vcs_repo():
            raise ProjectImportError(
                ("Repo type '{0}' unknown".format(project.repo_type)))

        # Get the actual code on disk
        if version:
            log.info(
                LOG_TEMPLATE.format(
                    project=project.slug,
                    version=version.slug,
                    msg='Checking out version {slug}: {identifier}'.format(
                        slug=version.slug, identifier=version.identifier)))
            version_slug = version.slug
            version_repo = project.vcs_repo(version_slug)
            update_docs_output['checkout'] = version_repo.checkout(
                version.identifier)
        else:
            # Does this ever get called?
            log.info(
                LOG_TEMPLATE.format(project=project.slug,
                                    version=version.slug,
                                    msg='Updating to latest revision'))
            version_slug = 'latest'
            version_repo = project.vcs_repo(version_slug)
            update_docs_output['checkout'] = version_repo.update()

        # Ensure we have a conf file (an exception is raised if not)
        project.conf_file(version.slug)

        # Do Virtualenv bits:
        if project.use_virtualenv:
            build_dir = os.path.join(project.venv_path(version=version_slug),
                                     'build')
            if os.path.exists(build_dir):
                log.info(
                    LOG_TEMPLATE.format(project=project.slug,
                                        version=version.slug,
                                        msg='Removing existing build dir'))
                shutil.remove_dir(build_dir)
            if project.use_system_packages:
                site_packages = '--system-site-packages'
            else:
                site_packages = '--no-site-packages'
            # Here the command has been modified to support different
            # interpreters.
            update_docs_output['venv'] = run(
                '{cmd} {site_packages} {path}'.format(
                    cmd='virtualenv-2.7 -p {interpreter}'.format(
                        interpreter=project.python_interpreter),
                    site_packages=site_packages,
                    path=project.venv_path(version=version_slug)))
            # Other code expects sphinx-build to be installed inside the
            # virtualenv.  Using the -I option makes sure it gets installed
            # even if it is already installed system-wide (and
            # --system-site-packages is used)
            if project.use_system_packages:
                ignore_option = '-I'
            else:
                ignore_option = ''
            if project.python_interpreter != 'python3':
                sphinx = 'sphinx==1.1.3'
                update_docs_output['sphinx'] = run((
                    '{cmd} install -U {ignore_option} {sphinx} '
                    'virtualenv==1.10.1 setuptools==1.1 '
                    'docutils==0.11 git+git://github.com/ericholscher/readthedocs-sphinx-ext#egg=readthedocs_ext'
                ).format(cmd=project.venv_bin(version=version_slug, bin='pip'),
                         sphinx=sphinx,
                         ignore_option=ignore_option))
            else:
                sphinx = 'sphinx==1.1.3'
                # python 3 specific hax
                update_docs_output['sphinx'] = run((
                    '{cmd} install -U {ignore_option} {sphinx} '
                    'virtualenv==1.9.1 docutils==0.11 git+git://github.com/ericholscher/readthedocs-sphinx-ext#egg=readthedocs_ext'
                ).format(cmd=project.venv_bin(version=version_slug, bin='pip'),
                         sphinx=sphinx,
                         ignore_option=ignore_option))

            if project.requirements_file:
                os.chdir(project.checkout_path(version_slug))
                update_docs_output['requirements'] = run(
                    '{cmd} install --exists-action=w -r {requirements}'.format(
                        cmd=project.venv_bin(version=version_slug, bin='pip'),
                        requirements=project.requirements_file))
            os.chdir(project.checkout_path(version_slug))
            if os.path.isfile("setup.py"):
                if getattr(settings, 'USE_PIP_INSTALL', False):
                    update_docs_output['install'] = run(
                        '{cmd} install --ignore-installed .'.format(
                            cmd=project.venv_bin(version=version_slug,
                                                 bin='pip')))
                else:
                    update_docs_output['install'] = run(
                        '{cmd} setup.py install --force'.format(
                            cmd=project.venv_bin(version=version_slug,
                                                 bin='python')))
            else:
                update_docs_output['install'] = (
                    999, "", "No setup.py, skipping install")

        # Update tags/version

        version_post_data = {'repo': version_repo.repo_url}

        if version_repo.supports_tags:
            version_post_data['tags'] = [{
                'identifier': v.identifier,
                'verbose_name': v.verbose_name,
            } for v in version_repo.tags]

        if version_repo.supports_branches:
            version_post_data['branches'] = [{
                'identifier': v.identifier,
                'verbose_name': v.verbose_name,
            } for v in version_repo.branches]

        try:
            apiv2.project(project.pk).sync_versions.post(version_post_data)
        except Exception, e:
            print "Sync Verisons Exception: %s" % e.message