Esempio n. 1
0
def object_import(request, file):
    if request['replace']:
        delete_all('Device')
    result = 'Topology successfully imported.'
    if allowed_file(secure_filename(file.filename), {'xls', 'xlsx'}):
        book = open_workbook(file_contents=file.read())
        for obj_type in ('Device', 'Link'):
            try:
                sheet = book.sheet_by_name(obj_type)
            except XLRDError:
                continue
            properties = sheet.row_values(0)
            for row_index in range(1, sheet.nrows):
                prop = dict(zip(properties, sheet.row_values(row_index)))
                try:
                    factory(obj_type, **prop).serialized
                except Exception as e:
                    info(f'{str(prop)} could not be imported ({str(e)})')
                    result = 'Partial import (see logs).'
            db.session.commit()
    if request['update_pools']:
        for pool in fetch_all('Pool'):
            pool.compute_pool()
        db.session.commit()
    return result
Esempio n. 2
0
def object_import(request: dict, file: FileStorage) -> str:
    if request["replace"]:
        delete_all("Device")
    result = "Topology successfully imported."
    if allowed_file(secure_filename(file.filename), {"xls", "xlsx"}):
        book = open_workbook(file_contents=file.read())
        for obj_type in ("Device", "Link"):
            try:
                sheet = book.sheet_by_name(obj_type)
            except XLRDError:
                continue
            properties = sheet.row_values(0)
            for row_index in range(1, sheet.nrows):
                prop = dict(zip(properties, sheet.row_values(row_index)))
                try:
                    factory(obj_type, **prop).serialized
                except Exception as e:
                    info(f"{str(prop)} could not be imported ({str(e)})")
                    result = "Partial import (see logs)."
            db.session.commit()
    if request["update_pools"]:
        for pool in fetch_all("Pool"):
            pool.compute_pool()
        db.session.commit()
    return result
Esempio n. 3
0
def migrate_import(app: Flask, request: dict) -> str:
    status, types = "Import successful.", request["import_export_types"]
    workflows: list = []
    edges: list = []
    if request.get("empty_database_before_import", False):
        delete_all(*types)
    for cls in types:
        path = app.path / "migrations" / request["name"] / f"{cls}.yaml"
        with open(path, "r") as migration_file:
            objects = load(migration_file)
            if cls == "Workflow":
                workflows = deepcopy(objects)
            if cls == "WorkflowEdge":
                edges = deepcopy(objects)
                continue
            for obj in objects:
                obj_cls = obj.pop("type") if cls == "Service" else cls
                # 1) We cannot import workflow edges before workflow, because a
                # workflow edge is defined by the workflow it belongs to.
                # Therefore, we import workflow before workflow edges but
                # strip off the edges, because they do not exist at this stage.
                # Edges will be defined later on upon importing workflow edges.
                # 2) At this stage, we cannot import jobs, because if workflows
                # A (ID 1) and B (ID 2) are created, and B is added to A as a
                # subworkflow, we won't be able to create A as B is one of its
                # jobs and does not exist yet. To work around this, we will
                # strip off the jobs at this stage, and reimport workflows a
                # second time at the end.
                if cls == "Workflow":
                    obj["edges"], obj["jobs"] = [], []
                try:
                    factory(obj_cls, **obj)
                except Exception as e:
                    info(f"{str(obj)} could not be imported ({str(e)})")
                    status = "Partial import (see logs)."
    for workflow in workflows:
        workflow["edges"] = []
        try:
            factory("Workflow", **workflow)
        except Exception as e:
            info(f"{str(workflow)} could not be imported ({str(e)})")
            status = "Partial import (see logs)."
    for edge in edges:
        try:
            factory("WorkflowEdge", **edge)
        except Exception as e:
            info(f"{str(edge)} could not be imported ({str(e)})")
            status = "Partial import (see logs)."
    if request.get("empty_database_before_import", False):
        create_default(app)
    return status
Esempio n. 4
0
def migrate_import(path_app, request):
    status = 'Import successful.'
    if request.get('empty_database_before_import', False):
        delete_all(*request['import_export_types'])
    for cls in request['import_export_types']:
        path = path_app / 'migrations' / request['name'] / f'{cls}.yaml'
        with open(path, 'r') as migration_file:
            for obj in load(migration_file):
                obj_cls = obj.pop('type') if cls == 'Service' else cls
                try:
                    factory(obj_cls, **obj)
                except Exception as e:
                    info(f'{str(obj)} could not be imported ({str(e)})')
                    status = 'Partial import (see logs).'
    return status
Esempio n. 5
0
def migrate_import(app, request):
    status = 'Import successful.'
    if request.get('empty_database_before_import', False):
        delete_all(*request['import_export_types'])
    for cls in request['import_export_types'][:-1]:
        path = app.path / 'migrations' / request['name'] / f'{cls}.yaml'
        with open(path, 'r') as migration_file:
            objects = load(migration_file)
            if cls == 'Workflow':
                workflows = deepcopy(objects)
            if cls == 'WorkflowEdge':
                edges = deepcopy(objects)
                continue
            for obj in objects:
                obj_cls = obj.pop('type') if cls == 'Service' else cls
                # 1) We cannot import workflow edges before workflow, because a
                # workflow edge is defined by the workflow it belongs to.
                # Therefore, we import workflow before workflow edges but
                # strip off the edges, because they do not exist at this stage.
                # Edges will be defined later on upon importing workflow edges.
                # 2) At this stage, we cannot import jobs, because if workflows
                # A (ID 1) and B (ID 2) are created, and B is added to A as a
                # subworkflow, we won't be able to create A as B is one of its
                # jobs and does not exist yet. To work around this, we will
                # strip off the jobs at this stage, and reimport workflows a
                # second time at the end.
                if cls == 'Workflow':
                    obj['edges'], obj['jobs'] = [], []
                try:
                    factory(obj_cls, **obj)
                except Exception as e:
                    info(f'{str(obj)} could not be imported ({str(e)})')
                    status = 'Partial import (see logs).'
    for workflow in workflows:
        workflow['edges'] = []
        factory('Workflow', **workflow)
    for edge in edges:
        factory('WorkflowEdge', **edge)
    if request.get('empty_database_before_import', False):
        create_default(app)
    return status