def __fetch_object(self, opts):
        bucket = self.connection_client.get_bucket(opts['bucket_name'])
        blob = bucket.blob(opts['path'])
        blob_expiry = int(time.time() + (60 * 60 * 24 * 30))
        signed_url = blob.generate_signed_url(expiration=blob_expiry)
        # Deduct Media Type:
        # TODO Share this with existing process_media determine_media_type()
        extension = Path(opts['path']).suffix
        extension = extension.lower()
        media_type = None
        if extension in images_allowed_file_names:
            media_type = 'image'
        elif extension in videos_allowed_file_names:
            media_type = 'video'
        else:
            # TODO: Decide, do we want to raise an exception? or just do nothing?
            log = regular_log.default()
            log['error']['invalid_type'] = 'File must type of: {} {}'.format(
                str(images_allowed_file_names), str(videos_allowed_file_names))
            log['error']['file_name'] = opts['path']
            log['opts'] = opts
            with sessionMaker.session_scope() as session:
                Event.new(session=session,
                          member_id=opts['event_data']['request_user'],
                          kind='google_cloud_new_import_error',
                          description='New cloud import for {}'.format(
                              opts['path']),
                          error_log=log)
            raise LookupError('File must type of: {} {}'.format(
                str(images_allowed_file_names),
                str(videos_allowed_file_names)))
        # metadata = self.connection_client.head_object(Bucket=opts['bucket_name, Key=path)
        with sessionMaker.session_scope() as session:

            created_input = packet.enqueue_packet(
                self.config_data['project_string_id'],
                session=session,
                media_url=signed_url,
                media_type=media_type,
                job_id=opts.get('job_id'),
                video_split_duration=opts.get('video_split_duration'),
                directory_id=opts.get('directory_id'))
            log = regular_log.default()
            log['opts'] = opts
            Event.new(session=session,
                      member_id=opts['event_data']['request_user'],
                      kind='google_cloud_new_import_success',
                      description='New cloud import for {}'.format(
                          opts['path']),
                      error_log=opts)
        return {'result': created_input}
Ejemplo n.º 2
0
def test():

    project_string_id = "sdk-test"
    permission_level = "Editor"
    with sessionMaker.session_scope() as session:
        auth = new.create(session, project_string_id, permission_level)
        print(auth.serialize_with_secret())
Ejemplo n.º 3
0
def test():

	project_string_id = ""
	client_id = ""
	#client_id = None
	#client_id = "123"

	with sessionMaker.session_scope() as session:
		result = revoke.by_client_id(session, project_string_id, client_id)
		print(result)
Ejemplo n.º 4
0
def user_view():
    with sessionMaker.session_scope() as s:

        user = User.get(s)
        if user is None:
            return jsonify({"none_found": True}), 400, {
                'ContentType': 'application/json'
            }

        out = jsonify(user=user.serialize())
        return out, 200, {'ContentType': 'application/json'}
Ejemplo n.º 5
0
def verify_otp_from_web():

    with sessionMaker.session_scope() as session:

        data = request.get_json(force=True)

        proposed_otp_code = data.get('otp', None)
        otp_current_session = data.get('otp_current_session', None)
        email = data.get('email', None)

        user = User.get_by_email(session, email)
        if user is None:
            return jsonify(error="No user"), 200, {
                'ContentType': 'application/json'
            }

        if user.otp_current_session_expiry <= time.time():
            User.new_login_history(session=session,
                                   success=False,
                                   otp_success=False,
                                   remote_address=request.remote_addr,
                                   user_id=user.id)
            return jsonify(error="Please login again, session expired"), 200, {
                'ContentType': 'application/json'
            }

        if user.otp_current_session != otp_current_session:
            User.new_login_history(session=session,
                                   success=False,
                                   otp_success=False,
                                   remote_address=request.remote_addr,
                                   user_id=user.id)
            return jsonify(error="Please login again, session invalid"), 200, {
                'ContentType': 'application/json'
            }

        if OneTimePass.verify_otp(user, proposed_otp_code) is True:

            User.new_login_history(session=session,
                                   success=True,
                                   otp_success=True,
                                   remote_address=request.remote_addr,
                                   user_id=user.id)

            setSecureCookie(user)

            return jsonify(user=user.serialize(), success=True), 200, {
                'ContentType': 'application/json'
            }

        return jsonify(error="Invalid code"), 200, {
            'ContentType': 'application/json'
        }
Ejemplo n.º 6
0
def generate_mask_by_project_id(project_string_id):
    # TODO use a thread, this is a long running process

    semantic_segmentation_data_prep = Semantic_segmentation_data_prep()

    with sessionMaker.session_scope() as session:
        project = Project.get(session, project_string_id)
        type = "joint"
        # type = "binary"
        semantic_segmentation_data_prep.generate_mask_core(
            session, project, type)

    return "ok", 200, {'ContentType': 'application/json'}
Ejemplo n.º 7
0
def tag_view_all_public():

    with sessionMaker.session_scope() as session:

        tag_list = session.query(Tag).filter(Tag.is_public == True).all()

        tag_list_serailized = []
        for tag in tag_list:
            tag_list_serailized.append(tag.name)

        out = jsonify(success=True, tag_list=tag_list_serailized)

        return out, 200, {'ContentType': 'application/json'}
Ejemplo n.º 8
0
def tag_view_by_project(project_string_id):

    with sessionMaker.session_scope() as session:

        project = Project.get_project(session, project_string_id)
        if project is None:
            return jsonify(success=False), 400, {
                'ContentType': 'application/json'
            }

        tag_list = project.serialize_tag_list_PUBLIC()
        out = jsonify(success=True, tag_list=tag_list)

        return out, 200, {'ContentType': 'application/json'}
Ejemplo n.º 9
0
def disable_otp_from_web():

    with sessionMaker.session_scope() as session:

        user = User.get(session)

        if not user:
            return "no user", 400, {'ContentType': 'application/json'}

        session.add(user)

        user.otp_enabled = False

        out = jsonify(success=True, user=user.serialize())

        return out, 200, {'ContentType': 'application/json'}
Ejemplo n.º 10
0
def user_exists(email):
    if settings.DIFFGRAM_SYSTEM_MODE not in [
            'testing_e2e', 'testing', 'sandbox'
    ]:
        return jsonify(message='Invalid System Mode'), 400

    with sessionMaker.session_scope() as session:
        user = User.get_by_email(session, email)
        if user is None:
            return jsonify({"none_found": True}), 400, {
                'ContentType': 'application/json'
            }

        out = jsonify(found=True)

        return out, 200, {'ContentType': 'application/json'}
Ejemplo n.º 11
0
    def by_project_core(project_string_id: str,
                        Roles: list,
                        apis_project_list: list = [],
                        apis_user_list: list = []):

        with sessionMaker.session_scope() as session:
            if not project_string_id or project_string_id == "null" or project_string_id == "undefined":
                raise Forbidden(default_denied_message)

            if request.authorization is not None:

                result = API_Permissions.by_project(
                    session=session,
                    project_string_id=project_string_id,
                    Roles=Roles)
                if result is not True:
                    raise Forbidden(default_denied_message +
                                    " API Permissions")

                # At the moment auth doesn't actually
                # get project as it has all results stored...
                # not clear that we would need a None check here
                # given its checked in auth?

                project = Project.get(session, project_string_id)
                if project is None:
                    raise Forbidden(default_denied_message +
                                    " Can't find project")

                # Project APIs, maybe should role this into API_Permissions
                check_all_apis(project=project,
                               apis_required_list=apis_project_list)

                return True

            result = Project_permissions.check_permissions(
                session=session,
                apis_project_list=apis_project_list,
                apis_user_list=apis_user_list,
                project_string_id=project_string_id,
                Roles=Roles)

            if result is True:
                return True
            else:
                raise Forbidden(default_denied_message)
Ejemplo n.º 12
0
def enable_otp_from_web():

    with sessionMaker.session_scope() as session:

        user = User.get(session)

        if not user:
            return "no user", 400, {'ContentType': 'application/json'}

        otp, qr_code_url, backup_code_list = OneTimePass.new(session, user)

        out = jsonify(success=True,
                      otp=otp,
                      qr_code_url=qr_code_url,
                      backup_code_list=backup_code_list,
                      user=user.serialize())

        return out, 200, {'ContentType': 'application/json'}
Ejemplo n.º 13
0
    def system_startup_events_check(service_name):
        """
            Checks multiple system settings changes and logs them if any changes detected.
        :param session:
        :return:
        """
        if settings.DIFFGRAM_INSTALL_FINGERPRINT is None:
            logger.error(
                'DIFFGRAM_INSTALL_FINGERPRINT is not Set as an ENV variable. \n'
                'Please set it by running install.py script again. Value will be stored in .env file.')
            return False
        if settings.DIFFGRAM_VERSION_TAG is None:
            logger.error(
                'DIFFGRAM_VERSION_TAG is not Set as an ENV variable. \n'
                'Please set it by running install.py script again. Value will be stored in .env file.')
            return False
        if settings.DIFFGRAM_HOST_OS is None:
            logger.error(
                'DIFFGRAM_HOST_OS is not Set as an ENV variable. \n'
                'Please set it by running install.py script again. Value will be stored in .env file.')
            return False

        with session_scope() as session:
            # Record Startup Time
            SystemEvents.new(
                session = session,
                kind = 'system_startup',
                description = 'Diffgram System startup for {} service'.format(service_name),
                install_fingerprint = settings.DIFFGRAM_INSTALL_FINGERPRINT,
                previous_version = None,
                diffgram_version = settings.DIFFGRAM_VERSION_TAG,
                host_os = settings.DIFFGRAM_HOST_OS,
                storage_backend = settings.DIFFGRAM_STATIC_STORAGE_PROVIDER,
                service_name = service_name,
                startup_time = datetime.datetime.utcnow(),
                shut_down_time = None,
                created_date = datetime.datetime.utcnow()
            )
            SystemEvents.check_version_upgrade(session = session, service_name = service_name)
            SystemEvents.check_os_change(session = session, service_name = service_name)
            return True
Ejemplo n.º 14
0
def annotation_example_toggle(project_string_id):
    with sessionMaker.session_scope() as s:

        data = request.get_json(force=True)
        reqiest_image = data.get('image', None)
        if reqiest_image is None:
            return json.dumps("image is None"), 400, {
                'ContentType': 'application/json'
            }

        request_image_id = reqiest_image.get('id', None)
        if request_image_id is None:
            return json.dumps("image_id is None"), 400, {
                'ContentType': 'application/json'
            }

        image = Image.get_by_id(s, request_image_id)
        image.is_annotation_example = not image.is_annotation_example
        s.add(image)

        out = {'success': True}
        return jsonify(out), 200, {'ContentType': 'application/json'}
Ejemplo n.º 15
0
def web_build_name_to_file_id_dict(project_string_id):
    """
	Given we know a label_name, and where we are working,
	return the label_file_id

	Arguments:
		project_string_id, integer
		working_dir_id, integer

	Returns:
		dict of label files
		or None / failure case

	"""
    log = {"success": False, "errors": []}

    directory_id = request.headers.get('directory_id', None)
    if directory_id is None:
        log["errors"].append("'directory_id' not supplied")
        return jsonify(log), 200

    with sessionMaker.session_scope() as session:

        project = Project.get(session, project_string_id)
        verify_result = WorkingDir.verify_directory_in_project(
            session, project, directory_id)
        if verify_result is False:
            log["errors"].append("Invalid directory id")
            log["success"] = False
            return jsonify(log=log), 200

        name_to_file_id, result = build_name_to_file_id_dict(
            session=session, directory_id=directory_id)
        if result == True:
            log["success"] = True

    return jsonify(log=log, name_to_file_id=name_to_file_id), 200
Ejemplo n.º 16
0
    def __fetch_object(self, opts):
        """Upload a file to diffgram from an S3 bucket

        :param s3_file_key: path of file to fetch from
        :return: file obj if file was uploaded, else False
        """
        spec_list = [{'bucket_name': str, 'path': str}]
        log = regular_log.default()
        log, input = regular_input.input_check_many(untrusted_input=opts,
                                                    spec_list=spec_list,
                                                    log=log)
        if len(log["error"].keys()) >= 1:
            return {'log': log}
        # This might be an issue. Currently not supporting urls with no expiration. Biggest time is 1 week.
        signed_url = self.connection_client.generate_presigned_url('get_object',
                                                                   Params={'Bucket': opts['bucket_name'],
                                                                           'Key': opts['path']},
                                                                   ExpiresIn=3600 * 24 * 6)  # 5 Days.

        with sessionMaker.session_scope() as session:

            project = Project.get_by_string_id(session, self.config_data.get('project_string_id'))
            member = session.query(Member).filter(Member.user_id == opts['event_data']['request_user']).first()
            # Deduct Media Type:
            extension = Path(opts['path']).suffix
            extension = extension.lower()
            media_type = None
            if extension in images_allowed_file_names:
                media_type = 'image'
            elif extension in videos_allowed_file_names:
                media_type = 'video'
            else:
                # TODO: Decide, do we want to raise an exception? or just do nothing?
                log = regular_log.default()
                log['error']['invalid_type'] = 'File must type of: {} {}'.format(str(images_allowed_file_names),
                                                                                 str(videos_allowed_file_names))
                log['error']['file_name'] = opts['path']
                log['opts'] = opts
                Event.new(
                    session=session,
                    member_id=opts['event_data']['request_user'],
                    kind='aws_s3_new_import_warning',
                    description='Skipped import for {}, invalid file type.'.format(opts['path']),
                    error_log=log,
                    project_id=project.id,
                    member=member,
                    success=False
                )
                return None
            # print('AAAAA', opts, opts.get('job_id'))
            # metadata = self.connection_client.head_object(Bucket=self.config_data['bucket_name'], Key=path)
            created_input = packet.enqueue_packet(self.config_data['project_string_id'],
                                                  session=session,
                                                  media_url=signed_url,
                                                  media_type=media_type,
                                                  job_id=opts.get('job_id'),
                                                  video_split_duration=opts.get('video_split_duration'),
                                                  directory_id=opts.get('directory_id'))
            log = regular_log.default()
            log['opts'] = opts
            Event.new(
                session=session,
                member_id=opts['event_data']['request_user'],
                kind='aws_s3_new_import_success',
                description='New cloud import for {}'.format(opts['path']),
                error_log=opts,
                project_id=project.id,
                member=member,
                success=True
            )
        return created_input
Ejemplo n.º 17
0
            def inner(*args, **kwds):

                job_id = kwds.get('job_id', None)
                if job_id is None or job_id == "null" or job_id == "undefined":
                    raise Forbidden("job_id is invalid")

                with sessionMaker.session_scope() as session:

                    # Permissions cascading from project
                    project_string_id = get_project_string_from_job_id(
                        session, job_id)

                    # API
                    if request.authorization is not None:

                        result = API_Permissions.by_project(
                            session=session,
                            project_string_id=project_string_id,
                            Roles=project_role_list)
                        if result is True:
                            return func(*args, **kwds)
                        else:
                            raise Forbidden("API access invalid")

                    # TODO do we need to validate user has applicable mode?
                    # ie they pass mode builder but are trainer?
                    # Basics should fail on project level check anyway here...

                    # User
                    # TODO move login stuff into the general User_Permissions
                    if LoggedIn() != True:
                        raise Forbidden("Login again.")

                    user = session.query(User).filter(
                        User.id == getUserID()).first()

                    if user is None:
                        raise Forbidden("Login again.")

                    # Want to use the builder API permissions instead of
                    # flags since a user may be testing this as a builder
                    # TODO deprecate 'mode' flag or have it as something else
                    # like "builder_only" or something

                    # Jan 3, 2020
                    # One downside of doing it this way is it means
                    # that we need to be careful with
                    # project_role_list list...

                    if user.api_enabled_builder is True:
                        result = Project_permissions.check_permissions(
                            session=session,
                            project_string_id=project_string_id,
                            Roles=project_role_list,
                            apis_project_list=apis_project_list,
                            apis_user_list=apis_user_list)

                        if result is True:
                            return func(*args, **kwds)
                        else:
                            raise Forbidden("Project access invalid")

                    if user.api_enabled_trainer is True:

                        # TODO refactor into function

                        # TODO handle "info" case of a trainer not yet
                        # on a job seeing basic stuff on active jobs...

                        # We allow trainers to see
                        # Basic info before they apply
                        # as long as job is active...

                        #if job.status != "active":
                        #	raise Forbidden("No access.")

                        User_Permissions.general(user=user,
                                                 apis_user_list=apis_user_list)

                        user_to_job = User_To_Job.get_single_by_ids(
                            session=session, user_id=user.id, job_id=job_id)

                        # TODO other status checking on this...

                        if user_to_job is None:
                            raise Forbidden(
                                "No access to this job. Please apply first.")

                        # Success case for trainer
                        return func(*args, **kwds)

                raise Forbidden("No access.")
Ejemplo n.º 18
0
    def __fetch_object(self, opts):
        """
        Upload a file to Diffgram from an Azure Blob

        :param opts: Dictionary with parameters for object fetching.
        :return: file obj if file was uploaded, else False
        """
        spec_list = [{'bucket_name': str, 'path': str}]
        log = regular_log.default()
        log, input = regular_input.input_check_many(untrusted_input = opts,
                                                    spec_list = spec_list,
                                                    log = log)
        if len(log["error"].keys()) >= 1:
            return {'log': log}
        shared_access_signature = BlobSharedAccessSignature(
            account_name = self.connection_client.account_name,
            account_key = self.connection_client.credential.account_key
        )

        expiration_offset = 40368000
        blob_name = opts['path']
        container = opts['bucket_name']
        added_seconds = datetime.timedelta(0, expiration_offset)
        expiry_time = datetime.datetime.utcnow() + added_seconds
        filename = blob_name.split("/")[-1]
        sas = shared_access_signature.generate_blob(
            container_name = container,
            blob_name = blob_name,
            start = datetime.datetime.utcnow(),
            expiry = expiry_time,
            permission = BlobSasPermissions(read = True),
            content_disposition = 'attachment; filename=' + filename,
        )
        sas_url = 'https://{}.blob.core.windows.net/{}/{}?{}'.format(
            self.connection_client.account_name,
            container,
            blob_name,
            sas
        )

        with sessionMaker.session_scope() as session:

            project = Project.get_by_string_id(session, self.config_data.get('project_string_id'))
            member = session.query(Member).filter(Member.user_id == opts['event_data']['request_user']).first()
            # Deduct Media Type:
            extension = Path(opts['path']).suffix
            extension = extension.lower()
            media_type = None
            if extension in images_allowed_file_names:
                media_type = 'image'
            elif extension in videos_allowed_file_names:
                media_type = 'video'
            else:
                # TODO: Decide, do we want to raise an exception? or just do nothing?
                log = regular_log.default()
                log['error']['invalid_type'] = 'File must type of: {} {}'.format(str(images_allowed_file_names),
                                                                                 str(videos_allowed_file_names))
                log['error']['file_name'] = opts['path']
                log['opts'] = opts
                Event.new(
                    session = session,
                    member_id = opts['event_data']['request_user'],
                    kind = 'microsoft_azure_new_import_warning',
                    description = 'Skipped import for {}, invalid file type.'.format(opts['path']),
                    error_log = log,
                    project_id = project.id,
                    member = member,
                    success = False
                )
                return None

            # metadata = self.connection_client.head_object(Bucket=self.config_data['bucket_name'], Key=path)
            created_input = packet.enqueue_packet(self.config_data['project_string_id'],
                                                  session = session,
                                                  media_url = sas_url,
                                                  media_type = media_type,
                                                  job_id = opts.get('job_id'),
                                                  batch_id = opts.get('batch_id'),
                                                  file_name = opts.get('path'),
                                                  video_split_duration = opts.get('video_split_duration'),
                                                  directory_id = opts.get('directory_id'),
                                                  extract_labels_from_batch = True)
            log = regular_log.default()
            log['opts'] = opts
            Event.new(
                session = session,
                member_id = opts['event_data']['request_user'],
                kind = 'microsoft_azure_new_import_success',
                description = 'New cloud import for {}'.format(opts['path']),
                error_log = opts,
                project_id = project.id,
                member = member,
                success = True
            )
        return created_input
Ejemplo n.º 19
0
    def __send_export(self, opts):
        spec_list = [{'project_string_id': dict}]
        log = regular_log.default()
        log, input = regular_input.input_check_many(untrusted_input = self.config_data,
                                                    spec_list = spec_list,
                                                    log = log)
        if len(log["error"].keys()) >= 1:
            return {'log': log}
        spec_list = [
            {'path': str},
            {"format": {
                'default': 'JSON',
                'kind': str,
                'valid_values_list': ['JSON', 'YAML']
            }},
            {'export_id': str},
            {'bucket_name': str},

        ]
        log = regular_log.default()
        log, input = regular_input.input_check_many(untrusted_input = opts,
                                                    spec_list = spec_list,
                                                    log = log,
                                                    string_len_not_zero = False)
        if len(log["error"].keys()) >= 1:
            return {'log': log}
        if not opts['path'].endswith('/') and opts['path'] != '':
            log['error']['path'] = 'Path on bucket must be a folder, not a filename.'
            return log

        with sessionMaker.session_scope() as session:
            project = Project.get_by_string_id(session, self.config_data['project_string_id'])
            member = session.query(Member).filter(Member.user_id == opts['event_data']['request_user']).first()
            export = session.query(Export).filter(Export.id == opts['export_id']).first()
            # Check perms and export status.
            export_check_result = check_export_permissions_and_status(export,
                                                                      self.config_data['project_string_id'],
                                                                      session)
            if len(export_check_result['error'].keys()) > 1:
                return export_check_result

            result = export_view_core(
                export = export,
                format = opts['format'],
                return_type = 'bytes')
            filename = generate_file_name_from_export(export, session)

            if opts['path'] != '':
                key = '{}{}.{}'.format(opts['path'], filename, opts['format'].lower())
            else:
                key = '{}.{}'.format(filename, opts['format'].lower())

            file = io.BytesIO(result)
            blob_client = self.connection_client.get_blob_client(container = opts['bucket_name'], blob = key)
            content_type = mimetypes.guess_type(filename)[0]
            my_content_settings = ContentSettings(content_type = content_type)
            blob_client.upload_blob(file, content_settings = my_content_settings)
            log = regular_log.default()
            log['opts'] = opts
            Event.new(
                session = session,
                member_id = opts['event_data']['request_user'],
                kind = 'microsoft_azure_new_export_success',
                description = 'New cloud export for {}{}'.format(opts['path'], filename),
                error_log = opts,
                member = member,
                project_id = project.id,
                success = True
            )
            return {'result': True}
    def __fetch_folder(self, opts):
        result = []

        if self.config_data.get('project_string_id') is None:
            return {'result': 'error'}
        paths = opts['path']
        if type(paths) != list:
            paths = [paths]
        with sessionMaker.session_scope() as session:
            project = Project.get_by_string_id(
                session, self.config_data.get('project_string_id'))
            member = session.query(Member).filter(
                Member.user_id == opts['event_data']['request_user']).first()
            for path in paths:
                blobs = self.connection_client.list_blobs(opts['bucket_name'],
                                                          prefix=path)
                for blob in blobs:
                    # Deduct Media Type:
                    if blob.name.endswith('/'):
                        continue

                    blob_expiry = int(time.time() + (60 * 60 * 24 * 30))
                    signed_url = blob.generate_signed_url(
                        expiration=blob_expiry)
                    extension = Path(blob.path).suffix
                    media_type = None
                    if extension in images_allowed_file_names:
                        media_type = 'image'
                    elif extension in videos_allowed_file_names:
                        media_type = 'video'
                    else:
                        logging.warn('File: {} must type of: {} {}'.format(
                            blob.name, str(images_allowed_file_names),
                            str(videos_allowed_file_names)))

                        log = regular_log.default()
                        log['error'][
                            'invalid_type'] = 'File must type of: {} {}'.format(
                                str(images_allowed_file_names),
                                str(videos_allowed_file_names))
                        log['error']['file_name'] = path
                        log['opts'] = opts
                        Event.new(
                            session=session,
                            member_id=opts['event_data']['request_user'],
                            kind='google_cloud_new_import_warning',
                            description=
                            'Skipped import for {}, invalid file type.'.format(
                                blob.name),
                            error_log=log,
                            project_id=project.id,
                            member=member,
                            success=False)
                        continue
                    result = []
                    # TODO: check Input() table for duplicate file?
                    created_input = packet.enqueue_packet(
                        self.config_data['project_string_id'],
                        session=session,
                        media_url=signed_url,
                        media_type=media_type,
                        job_id=opts.get('job_id'),
                        batch_id=opts.get('batch_id'),
                        file_name=path,
                        video_split_duration=opts.get('video_split_duration'),
                        directory_id=opts.get('directory_id'),
                        extract_labels_from_batch=True)
                    log = regular_log.default()
                    log['opts'] = opts
                    Event.new(session=session,
                              member_id=opts['event_data']['request_user'],
                              kind='google_cloud_new_import_success',
                              description='New cloud import for {}'.format(
                                  blob.name),
                              error_log=opts,
                              project_id=project.id,
                              member=member,
                              success=True)
                    result.append(created_input)
        return result
Ejemplo n.º 21
0
def update_tags(project_string_id):
    """
	Update tags to latest

	"""
    have_error = False
    error_message_list = []

    with sessionMaker.session_scope() as session:

        data = request.get_json(force=True)

        tag_list = data.get('tag_list', None)

        # tag_list could be none, ie deleted all tags...

        #if tag_list is None:
        #error_message_list.append("tag list is None")
        #return jsonify(error_message_list), 400, {'ContentType' : 'application/json'}

        project = Project.get(session, project_string_id)

        print(tag_list)

        rebuilt_tag_list = []

        for tag in tag_list:
            if valid_tag(tag):

                name = tag.lower()

                #Check if tag with same name already exists
                #If so can just add that database object to
                tag_db = session.query(Tag).filter(Tag.name == name).first()

                if not tag_db:

                    tag_db = Tag()
                    tag_db.name = name

                    tag_db.is_public = project.is_public

                    session.add(tag_db)

                if tag_db:
                    session.add(tag_db)

                # TODO handle counts properly ie on tag being removed etc.
                #tag_db.count += 1

                rebuilt_tag_list.append(tag_db)

        session.add(project)

        # This handles removing link to tag that's no longer in project

        project.tag_list = rebuilt_tag_list

    return json.dumps({'success': True}), 200, {
        'ContentType': 'application/json'
    }