示例#1
0
    def put(self, **kwargs):
        profile.clock_start("PUT_Task")
        # get values
        args = parser.parse_args()
        desc = args['desc']
        dur = args['dur']
        datasetid = kwargs["datasetid"]
        taskid = kwargs["taskid"]

        # existence check for dataset
        client = get_datastore_client()
        key = get_dataset_key(client, datasetid)
        entity = client.get(key)
        if (not entity):
            profile.clock_stop("PUT_Task")
            abort(404, message="Dataset {} does not exist".format(datasetid))

        # Update if the task exists, otherwise create a new task
        key = get_task_key(client, datasetid, taskid)
        entity = client.get(key)
        if (entity):
            # Update existing task
            update_task(client, key, desc, dur)
        else:
            # Create new task
            create_task(client, key, datasetid, taskid, desc, dur)

        profile.clock_stop("PUT_Task")
        return MESSAGE_SUCCESS, 200
示例#2
0
    def delete(self, **kwargs):
        profile.clock_start("DELETE_Task")
        # get values
        datasetid = kwargs["datasetid"]
        taskid = kwargs["taskid"]

        # existence check for dataset
        client = get_datastore_client()
        key = get_dataset_key(client, datasetid)
        entity = client.get(key)
        if (not entity):
            profile.clock_stop("DELETE_Task")
            abort(404, message="Dataset {} does not exist".format(datasetid))

        # existence check for task
        key = get_task_key(client, datasetid, taskid)
        entity = client.get(key)
        if (not entity):
            profile.clock_stop("DELETE_Task")
            abort(404, message="Task {} does not exist".format(taskid))

        # Delete task and return
        delete_task(client, key)
        profile.clock_stop("DELETE_Task")
        return MESSAGE_SUCCESS, 200
示例#3
0
    def get(self, **kwargs):
        profile.clock_start("GET_Bucket")
        # get values
        bucketname = kwargs["bucketname"]
        filename = kwargs["filename"]
        datasetid = kwargs["datasetid"]

        # existence check for dataset
        datastore_client = get_datastore_client()
        dataset_key = get_dataset_key(datastore_client, datasetid)
        entity = datastore_client.get(dataset_key)
        if entity:
            profile.clock_stop("GET_Bucket")
            abort(406, message="Dataset {} already exists".format(datasetid))

        # existence check for bucket
        storage_client = get_storage_client()
        try:
            bucket = storage_client.get_bucket(bucketname)
        except:
            profile.clock_stop("GET_Bucket")
            abort(404, message="Bucket {} does not exist".format(bucketname))

        # process bucket/file
        create_dataset_from_bucket(datastore_client, dataset_key, datasetid,
                                   bucket, filename)

        profile.clock_stop("GET_Bucket")
        return MESSAGE_SUCCESS, 200
示例#4
0
    def get(self, **kwargs):
        profile.clock_start("GET_Task")
        # get values
        datasetid = kwargs["datasetid"]
        taskid = kwargs["taskid"]

        # existence check for dataset
        client = get_datastore_client()
        key = get_dataset_key(client, datasetid)
        entity = client.get(key)
        if (not entity):
            profile.clock_stop("GET_Task")
            abort(404, message="Dataset {} does not exist".format(datasetid))

        # existence check for task
        key = get_task_key(client, datasetid, taskid)
        entity = client.get(key)
        if (not entity):
            profile.clock_stop("GET_Task")
            abort(404, message="Task {} does not exist".format(taskid))

        # Return task
        task = new_task(datasetid, taskid, entity['desc'], entity['dur'])
        profile.clock_stop("GET_Task")
        return marshal(task, task_fields), 200
示例#5
0
    def put(self, **kwargs):
        profile.clock_start("PUT_Dataset")
        # get values
        datasetid = kwargs["datasetid"]
        args = parser.parse_args()
        desc = args['desc']
        tasklistarg = args['tasklist']

        # Convert the input JSON to a list of task objects,
        # which is less efficient but MUCH safer.
        tasklist = []
        if (tasklistarg != None):
            for taskstr in tasklistarg:
                dict = eval(taskstr)
                task = new_task(datasetid, dict['taskid'], dict['desc'],
                                dict['dur'])
                tasklist.append(task)

        # existence check for dataset
        client = get_datastore_client()
        key = get_dataset_key(client, datasetid)
        entity = client.get(key)
        if (entity):
            # update existing dataset
            update_dataset(client, key, desc, tasklist)
        else:
            # create new dataset
            create_dataset(client, key, datasetid, desc, tasklist)

        profile.clock_stop("PUT_Dataset")
        return MESSAGE_SUCCESS, 200
示例#6
0
    def tearDownClass(cls):
        # Remove the test dataset
        profile.clock_start("tearDownClass", TEST_PROFILE_ID)
        url = DATASET_URL
        response = requests.delete(url)
        profile.clock_stop("tearDownClass", TEST_PROFILE_ID)

        # Print Profile reports
        print(profile.report(TEST_PROFILE_ID))
        response = requests.get(PROFILE_REPORT_URL)
        print("\nServer Profile:")
        print(json.dumps(response.json(), indent=4))
示例#7
0
    def delete(self, **kwargs):
        profile.clock_start("DELETE_Dataset")
        # existence check for dataset
        datasetid = kwargs["datasetid"]
        client = get_datastore_client()
        key = get_dataset_key(client, datasetid)
        entity = client.get(key)
        if (not entity):
            profile.clock_stop("DELETE_Dataset")
            abort(404, message="Dataset {} does not exist".format(datasetid))

        delete_dataset(client, key)
        profile.clock_stop("DELETE_Dataset")
        return MESSAGE_SUCCESS, 200
示例#8
0
    def get(self, **kwargs):
        profile.clock_start("GET_Dataset")
        # existence check for dataset
        datasetid = kwargs["datasetid"]
        client = get_datastore_client()
        key = get_dataset_key(client, datasetid)
        entity = client.get(key)
        if (not entity):
            profile.clock_stop("GET_Dataset")
            abort(404, message="Dataset {} does not exist".format(datasetid))

        # get tasks and return
        tasks = get_tasks(client, key, datasetid)
        profile.clock_stop("GET_Dataset")
        return marshal(tasks, task_fields), 200
示例#9
0
 def setUp(self):
     # Use Dataset PUT to initialize an entire dataset.  PUT overwrites all values, including tasks.
     profile.clock_start("setUp", TEST_PROFILE_ID)
     url = DATASET_URL
     payload = {
         "desc":
         "Test Dataset 01 01 2019",
         "tasklist": [{
             "taskid": "task1",
             "desc": "The 1st Task",
             "dur": "11"
         }, {
             "taskid": "task2",
             "desc": "The 2nd Task",
             "dur": "22"
         }, {
             "taskid": "task3",
             "desc": "The 3rd Task",
             "dur": "33"
         }]
     }
     response = requests.put(url, json=payload)
     profile.clock_stop("setUp", TEST_PROFILE_ID)
示例#10
0
def create_dataset_from_bucket(datastore_client, dataset_key, datasetid,
                               bucket, filename):
    # get dataset rows from the bucket file
    profile.clock_start("b_blobstr")
    filename = filename + FILE_EXTENSION
    blob = bucket.get_blob(filename)
    blobbytes = blob.download_as_string()
    blobstr = blobbytes.decode('utf8')
    profile.clock_stop("b_blobstr")

    # First create the dataset ancestor
    profile.clock_start("b_ancestor")
    desc = "Dataset Loaded from Bucket"
    entity = datastore.Entity(dataset_key,
                              exclude_from_indexes=['datasetid', 'desc'])
    entity.update({
        'created': datetime.datetime.utcnow(),
        'datasetid': datasetid,
        'desc': desc
    })
    datastore_client.put(entity)
    profile.clock_stop("b_ancestor")

    # Next create new tasks - Commit every N rows.
    profile.clock_start("b_tasks")
    lines = blobstr.split(NEWLINE)
    lines_processed = 0
    batch = None
    for line in lines:
        # batch start
        if (lines_processed == 0):
            batch = datastore_client.batch()
            batch.begin()
        # process the line
        values = line.split(DELIMITER)
        if (len(values) > 2):
            taskid = values[0]
            taskdesc = values[1]
            taskdur = values[2]
            tkey = get_task_key(datastore_client, datasetid, taskid)
            create_task(datastore_client, tkey, datasetid, taskid, taskdesc,
                        taskdur)
        lines_processed += 1
        # batch end / commit
        if (lines_processed >= BATCH_SIZE and batch is not None):
            batch.commit()
            lines_processed = 0

    # Finish batch processing (after loop processing)
    if (lines_processed > 0 and batch is not None):
        batch.commit()
    profile.clock_stop("b_tasks")
示例#11
0
 def get(self, **kwargs):
     profile.clock_start("GET_Datasets")
     client = get_datastore_client()
     datasets = get_datasets(client)
     profile.clock_stop("GET_Datasets")
     return marshal(datasets, dataset_fields), 200