Ejemplo n.º 1
0
def get_viewer(id):
    if not utils.validate_id(id):
        abort(404)
    d = utils.storage_dir_for_id(id)
    
    ifc_files = [os.path.join(d, name) for name in os.listdir(d) if os.path.isfile(os.path.join(d, name)) and name.endswith('.ifc')]
    
    if len(ifc_files) == 0:
        abort(404)
    
    failedfn = os.path.join(utils.storage_dir_for_id(id), "failed")
    if os.path.exists(failedfn):
        return render_template('error.html', id=id)

    for ifc_fn in ifc_files:
        glbfn = ifc_fn.replace(".ifc", ".glb")
        if not os.path.exists(glbfn):
            abort(404)
            
    n_files = len(ifc_files) if "_" in ifc_files[0] else None
                    
    return render_template(
        'viewer.html',
        id=id,
        n_files=n_files,
        postfix=PIPELINE_POSTFIX
    )
Ejemplo n.º 2
0
def get_viewer(id):
    if not utils.validate_id(id):
        abort(404)

    failedfn = os.path.join(utils.storage_dir_for_id(id), "failed")
    if os.path.exists(failedfn):
        return render_template('error.html', id=id)

    glbfn = os.path.join(utils.storage_dir_for_id(id), id + ".glb")
    if not os.path.exists(glbfn):
        abort(404)

    return render_template('viewer.html', id=id, postfix=PIPELINE_POSTFIX)
Ejemplo n.º 3
0
def get_viewer(id):
    if not utils.validate_id(id):
        abort(404)
    d = utils.storage_dir_for_id(id)
    n_files = len([
        name for name in os.listdir(d) if os.path.isfile(os.path.join(d, name))
        and os.path.join(d, name).endswith('.ifc')
    ])

    for i in range(n_files):
        glbfn = os.path.join(utils.storage_dir_for_id(id),
                             id + "_" + str(i) + ".glb")
        if not os.path.exists(glbfn):
            abort(404)

    return render_template('viewer.html', **locals())
Ejemplo n.º 4
0
def process_upload_multiple(files, callback_url=None):
    id = utils.generate_id()
    d = utils.storage_dir_for_id(id)
    os.makedirs(d)

    file_id = 0
    session = database.Session()
    m = database.model(id, '')
    session.add(m)

    for file in files:
        fn = file.filename
        filewriter = lambda fn: file.save(fn)
        filewriter(os.path.join(d, id + "_" + str(file_id) + ".ifc"))
        file_id += 1

        m.files.append(database.file(id, ''))

    session.commit()

    session.close()

    if DEVELOPMENT:
        t = threading.Thread(
            target=lambda: worker.process_multiple(id, callback_url))
        t.start()

    else:
        q.enqueue(worker.process_multiple, id, callback_url)

    return id
Ejemplo n.º 5
0
def get_log(id, ext):
    log_entry_type = namedtuple('log_entry_type',
                                ("level", "message", "instance", "product"))

    if ext not in {'html', 'json'}:
        abort(404)

    if not utils.validate_id(id):
        abort(404)
    logfn = os.path.join(utils.storage_dir_for_id(id), "log.json")
    if not os.path.exists(logfn):
        abort(404)

    if ext == 'html':
        log = []
        for ln in open(logfn):
            l = ln.strip()
            if l:
                log.append(
                    json.loads(l,
                               object_hook=lambda d: log_entry_type(*(d.get(
                                   k, '') for k in log_entry_type._fields))))
        return render_template('log.html', id=id, log=log)
    else:
        return send_file(logfn, mimetype='text/plain')
Ejemplo n.º 6
0
def do_process(id):
    d = utils.storage_dir_for_id(id)

    tasks = [
        ifc_validation_task, xml_generation_task, geometry_generation_task,
        svg_generation_task, glb_optimize_task, gzip_task
    ]
    """
    # Create a file called task_print.py with the following
    # example content to add application-specific tasks

    import sys
    
    from worker import task as base_task
    
    class task(base_task):
        est_time = 1    
        
        def execute(self, directory, id):
            print("Executing task 'print' on ", id, ' in ', directory, file=sys.stderr)
    """

    for fn in glob.glob("task_*.py"):
        mdl = importlib.import_module(fn.split('.')[0])
        tasks.append(mdl.task)

    tasks.sort(key=lambda t: getattr(t, 'order', 10))

    elapsed = 0
    set_progress(id, elapsed)

    total_est_time = sum(map(operator.attrgetter('est_time'), tasks))

    for t in tasks:
        begin_end = (elapsed / total_est_time * 99,
                     (elapsed + t.est_time) / total_est_time * 99)
        task = t(begin_end)
        try:
            task(d, id)
        except:
            # Mark ID as failed
            with open(os.path.join(d, 'failed'), 'w') as f:
                pass
            break
        elapsed += t.est_time

    elapsed = 100
    set_progress(id, elapsed)
Ejemplo n.º 7
0
def do_process_multiple(id):
    d = utils.storage_dir_for_id(id)

    tasks = [
        xml_generation_task, geometry_generation_task, svg_generation_task
    ]
    """
    # Create a file called task_print.py with the following
    # example content to add application-specific tasks

    import sys
    
    from worker import task as base_task
    
    class task(base_task):
        est_time = 1    
        
        def execute(self, directory, id):
            print("Executing task 'print' on ", id, ' in ', directory, file=sys.stderr)
    """

    for fn in glob.glob("task_*.py"):
        mdl = importlib.import_module(fn.split('.')[0])
        tasks.append(mdl.task)

    tasks.sort(key=lambda t: getattr(t, 'order', 10))

    elapsed = 0
    set_progress(id, elapsed)

    total_est_time = sum(map(operator.attrgetter('est_time'), tasks))

    n_files = len([
        name for name in os.listdir(d) if os.path.isfile(os.path.join(d, name))
    ])

    for i in range(n_files):
        print("CHECK", id + "_" + str(i))
        for t in tasks:
            begin_end = (elapsed / total_est_time * 99,
                         (elapsed + t.est_time) / total_est_time * 99)
            task = t(begin_end)
            task(d, id + "_" + str(i))
            elapsed += t.est_time

    elapsed = 100
    set_progress(id, elapsed)
Ejemplo n.º 8
0
def process_upload(filewriter, callback_url=None):
    id = utils.generate_id()
    d = utils.storage_dir_for_id(id)
    os.makedirs(d)

    filewriter(os.path.join(d, id + ".ifc"))

    session = database.Session()
    session.add(database.model(id, ''))
    session.commit()
    session.close()

    if DEVELOPMENT:
        t = threading.Thread(target=lambda: worker.process(id, callback_url))
        t.start()
    else:
        q.enqueue(worker.process, id, callback_url)

    return id
Ejemplo n.º 9
0
def do_process(id):
    d = utils.storage_dir_for_id(id)
    input_files = [name for name in os.listdir(d) if os.path.isfile(os.path.join(d, name))]

    tasks = [
        ifc_validation_task,
        xml_generation_task,
        geometry_generation_task,
        svg_generation_task,
        glb_optimize_task,
        gzip_task
    ]
    
    tasks_on_aggregate = []
    
    is_multiple = any("_" in n for n in input_files)
    if is_multiple:
        tasks.append(svg_rename_task)
    
    """
    # Create a file called task_print.py with the following
    # example content to add application-specific tasks

    import sys
    
    from worker import task as base_task
    
    class task(base_task):
        est_time = 1    
        
        def execute(self, directory, id):
            print("Executing task 'print' on ", id, ' in ', directory, file=sys.stderr)
    """
    
    for fn in glob.glob("task_*.py"):
        mdl = importlib.import_module(fn.split('.')[0])
        if getattr(mdl.task, 'aggregate_model', False):
            tasks_on_aggregate.append(mdl.task)
        else:
            tasks.append(mdl.task)
        
    tasks.sort(key=lambda t: getattr(t, 'order', 10))
    tasks_on_aggregate.sort(key=lambda t: getattr(t, 'order', 10))

    elapsed = 0
    set_progress(id, elapsed)
    
    n_files = len([name for name in os.listdir(d) if os.path.isfile(os.path.join(d, name))])
    
    total_est_time = \
        sum(map(operator.attrgetter('est_time'), tasks)) * n_files + \
        sum(map(operator.attrgetter('est_time'), tasks_on_aggregate))
        
    def run_task(t, args, aggregate_model=False):
        nonlocal elapsed
        begin_end = (elapsed / total_est_time * 99, (elapsed + t.est_time) / total_est_time * 99)
        task = t(begin_end)
        try:
            task(d, *args)
        except:
            traceback.print_exc(file=sys.stdout)
            # Mark ID as failed
            with open(os.path.join(d, 'failed'), 'w') as f:
                pass
            return False
        elapsed += t.est_time
        return True
    
    for i in range(n_files):
        for t in tasks:
            if not run_task(t, ["%s_%d" % (id, i) if is_multiple else id]):
                break
        # to break out of nested loop
        else: continue
        break
            
    for t in tasks_on_aggregate:
        run_task(t, [id, input_files], aggregate_model=True)

    elapsed = 100
    set_progress(id, elapsed)