Example #1
0
 def get_context_data(self, **kwargs):
     fname = self.request.FILES['file'].name
     if not all(ord(c) < 128 for c in fname):
         title_size = len(fname)
         if title_size > 30:
             fname = fname[(title_size - 30):]
     return {'hashsum': file_get_or_create(self.request.FILES['file'], fname, JobFile, True)[1]}
Example #2
0
def copy_files_with_replace(request, decision_id, files_qs):
    new_job_files = []
    files_to_replace = {}

    # Upload provided files first
    if request.data.get('files'):
        if isinstance(request.data['files'], str):
            try:
                files_map = json.loads(request.data['files'])
            except Exception as e:
                logger.error("Can't decode files data: {}".format(
                    request.data['files']))
                raise BridgeException("Can't decode files data: {}".format(e))
        elif isinstance(request.data['files'], dict):
            files_map = request.data['files']
        else:
            raise BridgeException('Wrong files data: "{}" ({})'.format(
                request.data['files'], type(request.data['files'])))
        for fname, fkey in files_map.items():
            if fkey in request.FILES:
                files_to_replace[fname] = request.FILES[fkey]
                new_file = file_get_or_create(files_to_replace[fname], fname,
                                              JobFile)
                new_job_files.append(
                    FileSystem(decision_id=decision_id,
                               file_id=new_file.id,
                               name=fname))

    # Copy other files
    for f_id, f_name in files_qs:
        if f_name in files_to_replace:
            continue
        new_job_files.append(
            FileSystem(decision_id=decision_id, file_id=f_id, name=f_name))
    FileSystem.objects.bulk_create(new_job_files)
Example #3
0
def validate_configuration(user, conf_str):
    validated_data = {}

    # Get configuration
    if conf_str:
        try:
            configuration = GetConfiguration(
                user_conf=json.loads(conf_str)).for_json()
        except Exception as e:
            logger.exception(e)
            raise exceptions.ValidationError(
                {'configuration': _('The configuration has wrong format')})
    else:
        configuration = get_default_configuration(user).for_json()

    validated_data['priority'] = configuration['priority']
    validated_data['weight'] = configuration['weight']

    # Validate task scheduler
    try:
        validated_data['scheduler'] = validate_scheduler(
            type=configuration['task scheduler'])
    except BridgeException as e:
        raise exceptions.ValidationError({'scheduler': str(e)})

    # Save configuration file
    conf_db = file_get_or_create(
        json.dumps(configuration, indent=2, sort_keys=True,
                   ensure_ascii=False), 'configuration.json', JobFile)
    validated_data['configuration_id'] = conf_db.id

    return validated_data
Example #4
0
def create_default_decision(request, job, configuration):
    """
    Creates decision with provided configuration and files copied from preset job.
    If 'files' are provided in request.data then those files will be replaced.
    :param request:
    :param job:
    :param configuration:
    :return:
    """
    # Get scheduler
    scheduler = validate_scheduler(type=configuration['task scheduler'])

    # Save configuration
    conf_db = file_get_or_create(
        json.dumps(configuration, indent=2, sort_keys=True,
                   ensure_ascii=False), 'configuration.json', JobFile)

    # Create decision
    decision = Decision.objects.create(title='',
                                       job=job,
                                       operator=request.user,
                                       scheduler=scheduler,
                                       configuration=conf_db,
                                       weight=configuration['weight'],
                                       priority=configuration['priority'])

    # Copy files for decision from preset job
    preset_job = job.preset.get_ancestors(include_self=True).filter(
        type=PRESET_JOB_TYPE[1][0]).first()
    preset_files_qs = PresetFile.objects.filter(preset=preset_job).values_list(
        'file_id', 'name')
    copy_files_with_replace(request, decision.id, preset_files_qs)

    return decision
Example #5
0
    def __replace_file(self, fp):
        if self._file_to_replace is None:
            raise ValueError("The file wasn't found")

        fp.seek(0)
        db_file = file_get_or_create(fp, fp.name, JobFile, True)[0]
        fs = FileSystem.objects.get(id=self._file_to_replace)
        fs.file = db_file
        fs.save()
Example #6
0
 def __convert(self):
     try:
         return ErrorTraceConvertionCache.objects.get(unsafe=self.unsafe, function=self.function).converted
     except ObjectDoesNotExist:
         self._parsed_trace = ConvertTrace(self.function.name, self.error_trace).pattern_error_trace
         et_file = file_get_or_create(BytesIO(
             json.dumps(self._parsed_trace, ensure_ascii=False, sort_keys=True, indent=4).encode('utf8')
         ), ET_FILE_NAME, ConvertedTraces
         )[0]
         ErrorTraceConvertionCache.objects.create(unsafe=self.unsafe, function=self.function, converted=et_file)
         return et_file
Example #7
0
def copy_files_with_replace(request, decision_id, files_qs):
    files_to_replace = {}
    if request.data.get('files'):
        for fname, fkey in request.data['files'].items():
            if fkey in request.FILES:
                files_to_replace[fname] = request.FILES[fkey]

    new_job_files = []
    for f_id, f_name in files_qs:
        fs_kwargs = {'decision_id': decision_id, 'file_id': f_id, 'name': f_name}
        if f_name in files_to_replace:
            new_file = file_get_or_create(files_to_replace[f_name], f_name, JobFile)
            fs_kwargs['file_id'] = new_file.id
        new_job_files.append(FileSystem(**fs_kwargs))
    FileSystem.objects.bulk_create(new_job_files)
Example #8
0
 def __save_children(self, parent_id, children):
     for child in children:
         file_id = None
         if child['type'] == 'file':
             if 'data' not in child or 'hashsum' not in child['data']:
                 if self._empty is None:
                     self._empty = file_get_or_create(BytesIO(), child['text'], JobFile, False)[0]
                 file_id = self._empty.id
             elif child['data']['hashsum'] in self._files:
                 file_id = self._files[child['data']['hashsum']]
             else:
                 raise ValueError('The file with hashsum %s was not uploaded before' % child['data']['hashsum'])
         new_id = self.__save_fs_obj(parent_id, child['text'], file_id)
         if child['type'] == 'folder':
             self.__save_children(new_id, child['children'])
Example #9
0
    def validate(self, attrs):
        # Get configuration
        conf_str = attrs.pop('configuration')
        try:
            configuration = GetConfiguration(
                user_conf=json.loads(conf_str)).for_json()
        except Exception as e:
            logger.exception(e)
            raise exceptions.ValidationError(
                {'configuration': _('The configuration has wrong format')})

        # Save configuration file
        conf_db = file_get_or_create(
            json.dumps(configuration,
                       indent=2,
                       sort_keys=True,
                       ensure_ascii=False), 'configuration.json', JobFile)
        attrs['file_id'] = conf_db.id

        return attrs
Example #10
0
 def _empty_file(self):
     db_file = file_get_or_create(io.BytesIO(), 'empty', JobFile, False)
     return db_file.id
Example #11
0
def create_jobfile():
    data = {'test': 'x', 'data': [1, 2, 3], 'new': None}
    res = file_get_or_create(json.dumps(data), 'test.json', JobFile)
    print("The db file:", res, res.pk)
    print("Delete:", res.delete())
Example #12
0
    def __get_file(self, path, fname):
        with open(path, mode='rb') as fp:
            hashsum = file_get_or_create(fp, fname, JobFile, True)[1]

        return {'type': 'file', 'text': fname, 'data': {'hashsum': hashsum}}