Пример #1
0
    def new(
        session,
        name: str,
        project_string_id: str,
        goal: str,
        user,
        member_created,
    ):

        # Avoid circular import
        # Maybe in future Project_permissions class could be part of project
        from shared.permissions.project_permissions import Project_permissions

        project = Project(  # Base object
            name=name,
            project_string_id=project_string_id,
            goal=goal,
            user_primary=user,  # Assumed to be primary
            member_created=member_created)

        # Permissions and user associations
        user.projects.append(project)
        user.current_project_string_id = project_string_id
        user.project_current = project

        permission_add_result, permission_add_error_message = Project_permissions.add(
            permission="admin", user=user, sub_type=project_string_id)

        session.add(user, project)

        session.flush()

        member_id = user.member_id

        Event.new(
            session=session,
            kind="new_project",
            member_id=member_id,
            success=True,
            project_id=project.id,
            email=user.email  # Caution, assumes user object is available
        )

        project.directory_default = WorkingDir.new_user_working_dir(
            session, None, project, user, project_default_dir=True)

        report_dashboard = ReportDashboard.new(project_id=project.id)
        session.add(report_dashboard)

        session.flush()  # Needed to work with adding default directory

        # careful this expects a default dir already assigned
        Project_Directory_List.add_default(
            session=session,
            working_dir_id=project.directory_default.id,
            project=project)

        return project
    def __fetch_object(self, opts):
        bucket = self.connection_client.get_bucket(opts['bucket_name'])
        blob = bucket.blob(opts['path'])
        blob_expiry = int(time.time() + (60 * 60 * 24 * 30))
        signed_url = blob.generate_signed_url(expiration=blob_expiry)
        # Deduct Media Type:
        # TODO Share this with existing process_media determine_media_type()
        extension = Path(opts['path']).suffix
        extension = extension.lower()
        media_type = None
        if extension in images_allowed_file_names:
            media_type = 'image'
        elif extension in videos_allowed_file_names:
            media_type = 'video'
        else:
            # TODO: Decide, do we want to raise an exception? or just do nothing?
            log = regular_log.default()
            log['error']['invalid_type'] = 'File must type of: {} {}'.format(
                str(images_allowed_file_names), str(videos_allowed_file_names))
            log['error']['file_name'] = opts['path']
            log['opts'] = opts
            with sessionMaker.session_scope() as session:
                Event.new(session=session,
                          member_id=opts['event_data']['request_user'],
                          kind='google_cloud_new_import_error',
                          description='New cloud import for {}'.format(
                              opts['path']),
                          error_log=log)
            raise LookupError('File must type of: {} {}'.format(
                str(images_allowed_file_names),
                str(videos_allowed_file_names)))
        # metadata = self.connection_client.head_object(Bucket=opts['bucket_name, Key=path)
        with sessionMaker.session_scope() as session:

            created_input = packet.enqueue_packet(
                self.config_data['project_string_id'],
                session=session,
                media_url=signed_url,
                media_type=media_type,
                job_id=opts.get('job_id'),
                video_split_duration=opts.get('video_split_duration'),
                directory_id=opts.get('directory_id'))
            log = regular_log.default()
            log['opts'] = opts
            Event.new(session=session,
                      member_id=opts['event_data']['request_user'],
                      kind='google_cloud_new_import_success',
                      description='New cloud import for {}'.format(
                          opts['path']),
                      error_log=opts)
        return {'result': created_input}
    def __send_export(self, opts):
        spec_list = [{'project_string_id': dict}]
        log = regular_log.default()
        log, input = regular_input.input_check_many(
            untrusted_input=self.config_data, spec_list=spec_list, log=log)
        if len(log["error"].keys()) >= 1:
            return {'log': log}
        spec_list = [
            {
                'path': str
            },
            {
                "format": {
                    'default': 'JSON',
                    'kind': str,
                    'valid_values_list': ['JSON', 'YAML']
                }
            },
            {
                'export_id': str
            },
            {
                'bucket_name': str
            },
        ]
        log = regular_log.default()
        log, input = regular_input.input_check_many(untrusted_input=opts,
                                                    spec_list=spec_list,
                                                    log=log,
                                                    string_len_not_zero=False)
        if len(log["error"].keys()) >= 1:
            return {'log': log}

        if not opts['path'].endswith('/') and opts['path'] != '':
            log['error'][
                'path'] = 'Path on bucket must be a folder, not a filename.'
            return {'log': log}

        with sessionMaker.session_scope() as session:
            project = Project.get_by_string_id(
                session, self.config_data['project_string_id'])
            member = session.query(Member).filter(
                Member.user_id == opts['event_data']['request_user']).first()
            export = session.query(Export).filter(
                Export.id == opts['export_id']).first()
            # Check perms and export status.
            export_check_result = check_export_permissions_and_status(
                export, self.config_data['project_string_id'], session)
            if len(export_check_result['error'].keys()) > 1:
                log = regular_log.default()
                log['error'] = export_check_result['error']
                log['error']['file_name'] = opts['path']
                log['opts'] = opts
                Event.new(
                    session=session,
                    member_id=opts['event_data']['request_user'],
                    kind='google_cloud_new_export_error',
                    description='Google cloud export error for {}'.format(
                        opts['path']),
                    error_log=log,
                    member=member,
                    project_id=project.id,
                    success=False)
                return export_check_result

            bucket = self.connection_client.get_bucket(opts['bucket_name'])
            result = export_view_core(export=export,
                                      format=opts['format'],
                                      return_type='bytes')
            filename = generate_file_name_from_export(export, session)

            if opts['path'] != '':
                blob = bucket.blob('{}{}.{}'.format(opts['path'], filename,
                                                    opts['format'].lower()))
            else:
                blob = bucket.blob('{}.{}'.format(filename,
                                                  opts['format'].lower()))
            blob.upload_from_string(result)
            log = regular_log.default()
            log['opts'] = opts
            Event.new(session=session,
                      member_id=opts['event_data']['request_user'],
                      kind='google_cloud_new_export_success',
                      description='New cloud export for {}'.format(blob.name),
                      error_log=opts,
                      member=member,
                      project_id=project.id,
                      success=True)
            return {'result': True}
    def __fetch_folder(self, opts):
        result = []

        if self.config_data.get('project_string_id') is None:
            return {'result': 'error'}
        paths = opts['path']
        if type(paths) != list:
            paths = [paths]
        with sessionMaker.session_scope() as session:
            project = Project.get_by_string_id(
                session, self.config_data.get('project_string_id'))
            member = session.query(Member).filter(
                Member.user_id == opts['event_data']['request_user']).first()
            for path in paths:
                blobs = self.connection_client.list_blobs(opts['bucket_name'],
                                                          prefix=path)
                for blob in blobs:
                    # Deduct Media Type:
                    if blob.name.endswith('/'):
                        continue

                    blob_expiry = int(time.time() + (60 * 60 * 24 * 30))
                    signed_url = blob.generate_signed_url(
                        expiration=blob_expiry)
                    extension = Path(blob.path).suffix
                    media_type = None
                    if extension in images_allowed_file_names:
                        media_type = 'image'
                    elif extension in videos_allowed_file_names:
                        media_type = 'video'
                    else:
                        logging.warn('File: {} must type of: {} {}'.format(
                            blob.name, str(images_allowed_file_names),
                            str(videos_allowed_file_names)))

                        log = regular_log.default()
                        log['error'][
                            'invalid_type'] = 'File must type of: {} {}'.format(
                                str(images_allowed_file_names),
                                str(videos_allowed_file_names))
                        log['error']['file_name'] = path
                        log['opts'] = opts
                        Event.new(
                            session=session,
                            member_id=opts['event_data']['request_user'],
                            kind='google_cloud_new_import_warning',
                            description=
                            'Skipped import for {}, invalid file type.'.format(
                                blob.name),
                            error_log=log,
                            project_id=project.id,
                            member=member,
                            success=False)
                        continue
                    result = []
                    # TODO: check Input() table for duplicate file?
                    created_input = packet.enqueue_packet(
                        self.config_data['project_string_id'],
                        session=session,
                        media_url=signed_url,
                        media_type=media_type,
                        job_id=opts.get('job_id'),
                        batch_id=opts.get('batch_id'),
                        file_name=path,
                        video_split_duration=opts.get('video_split_duration'),
                        directory_id=opts.get('directory_id'),
                        extract_labels_from_batch=True)
                    log = regular_log.default()
                    log['opts'] = opts
                    Event.new(session=session,
                              member_id=opts['event_data']['request_user'],
                              kind='google_cloud_new_import_success',
                              description='New cloud import for {}'.format(
                                  blob.name),
                              error_log=opts,
                              project_id=project.id,
                              member=member,
                              success=True)
                    result.append(created_input)
        return result