示例#1
0
def add_person():
    if not request.json:
        raise InvalidUsage("No data supplied")

    param_list = ['id', 'data']
    check_param(request.json, param_list)

    data_list = ['diagnostic', 'locator', 'sessionId', 'timestamp']
    check_param(request.json['data'], data_list)

    timestamp_list = ["_seconds", "_nanoseconds"]
    check_param(request.json['data']['timestamp'], timestamp_list)

    try:
        report = IndividualReportFactory.build(request.json)
        try:
            session.add(report)
            session.commit()
        except:
            session.rollback()
            abort(500, "Could not insert to database")
    except TypeError:
        raise InvalidUsage(
            "Some parameter was wrongly typed (string, int, array).")
    except:
        message = (
            "Could not create Individual Report. Probably malformed json. JSON:{%s}, %s",
            request.json)
        abort(400, message)
def download_hours_frame(_from_hour: str, _to_hour: str, worker_frame_minute: int = 10):
    if 60 % worker_frame_minute != 0:
        raise Exception("wrong worker period")

    _from = int(datetime.strptime(_from_hour, DATE_HOUR_PARAM_FORMAT).timestamp())
    _to = int(datetime.strptime(_to_hour, DATE_HOUR_PARAM_FORMAT).timestamp())

    now = datetime.now()  # current date and time
    now_str = now.strftime("%Y-%m-%d-%H-%M-%S")
    backup_dir = BACKUP_DOCUMENTS_PATH / ('download-time-' + now_str)
    if not backup_dir.exists():
        backup_dir.mkdir()

    # iterate over hours
    _previous = _from
    _next = _previous + 60 * 60
    while _next <= _to:
        _prev_str = datetime.fromtimestamp(_previous).strftime(DATE_HOUR_PARAM_FORMAT)
        _next_str = datetime.fromtimestamp(_next).strftime(DATE_HOUR_PARAM_FORMAT)
        print("Download from " + _prev_str + " to " + _next_str)

        minutes = list()

        # iterate over minutes
        _previous_minute = _previous
        _to_minute = _next
        _next_minute = _previous_minute + 60 * worker_frame_minute
        while _next_minute <= _to_minute:
            minutes.append((_previous_minute, _next_minute))
            _previous_minute = _next_minute
            _next_minute = _previous_minute + worker_frame_minute * 60

        hour_data = []
        # launch pool
        process_count = int(60 / worker_frame_minute)
        with Pool(processes=process_count) as pool:
            res = pool.map(download_worker, minutes)
            for worker_results in res:
                if len(worker_results) > 0:
                    for data in worker_results:
                        hour_data.append(data)

        # save results to json file
        if len(hour_data) > 0:
            file_path = backup_dir / ('data-' + _prev_str + '_' + _next_str + '.json')
            with open(str(file_path), 'w') as outfile:
                json.dump(hour_data, outfile)

        try:
            # try batch commit
            for report_json in hour_data:
                report = IndividualReportFactory.build(report_json)
                session.add(report)
            session.commit()
        except IntegrityError:
            session.rollback()
            # duplication, then one by one
            print('Warn: Duplicate in batch commit')
            for report_json in hour_data:
                try:
                    report = IndividualReportFactory.build(report_json)
                    session.add(report)
                    session.commit()
                except IntegrityError:
                    session.rollback()
                    print('Warn: Duplicate doc id: ' + report_json['id'])

        _previous = _next
        _next = _previous + (60 * 60)
"""
Created by nuffer at 3/25/20

"""
import json
from analysis.utils.factory import IndividualReportFactory
from analysis.utils.db import session
from analysis import BACKUP_DOCUMENTS_PATH

if __name__ == '__main__':

    for dir in BACKUP_DOCUMENTS_PATH.glob('*'):
        for file in dir.glob('*.json'):
            print('Loading file: ' + str(file))
            with open(str(file)) as json_file:
                data = json.load(json_file)
                for report in data:
                    report = IndividualReportFactory.build(report)
                    session.add(report)
                session.commit()