Esempio n. 1
0
def restore(env, src):
    assert os.path.exists(src), "Path already exists!"

    usezip = not os.path.isdir(src)

    if usezip:
        zipf = ZipFile(src)
        sqlitefile = NamedTemporaryFile(delete=False)
        sqlitesrc = zipf.open('db.sqlite', 'r')
        copyfileobj(sqlitesrc, sqlitefile)
        sqlitefile.close()
        engine = sa.create_engine('sqlite:///' + sqlitefile.name)
    else:
        engine = sa.create_engine('sqlite:///' + ptjoin(src, 'db.sqlite'))

    Base.metadata.bind = engine
    session = sessionmaker(bind=engine)()

    conn = env.core.DBSession.connection()
    conn.execute("BEGIN")

    metadata = env.metadata()

    metadata.drop_all(conn)
    metadata.create_all(conn)

    for objrec in session.query(BackupObject):
        cls = BackupBase.registry[objrec.identity]

        if objrec.is_binary and usezip:
            binfd = zipf.open(objrec.binfn)
        elif objrec.is_binary and not usezip:
            binfd = open(ptjoin(src, objrec.binfn))
        else:
            binfd = None

        obj = cls(
            comp=env._components[objrec.comp],
            key=objrec.objkey,
            value=objrec.value,
            binfd=binfd
        )

        obj.restore()

        if binfd:
            binfd.close()

    if usezip:
        os.remove(sqlitefile.name)

    conn.execute("COMMIT")
Esempio n. 2
0
 def openfile(fn):
     if usezip:
         tmpf = NamedTemporaryFile(delete=False)
         tmpf._target = fn
         return tmpf
     else:
         return open(ptjoin(dst, fn), "wb")
Esempio n. 3
0
 def openfile(fn):
     if nozip:
         return open(ptjoin(dst, fn), 'wb')
     else:
         tmpf = NamedTemporaryFile(delete=False)
         tmpf._target = fn
         return tmpf
Esempio n. 4
0
def restore(env, src):
    assert os.path.exists(src), "Path already exists!"

    usezip = not os.path.isdir(src)

    if usezip:
        zipf = ZipFile(src)
        sqlitefile = NamedTemporaryFile(delete=False)
        sqlitesrc = zipf.open("db.sqlite", "r")
        copyfileobj(sqlitesrc, sqlitefile)
        sqlitefile.close()
        engine = sa.create_engine("sqlite:///" + sqlitefile.name)
    else:
        engine = sa.create_engine("sqlite:///" + ptjoin(src, "db.sqlite"))

    Base.metadata.bind = engine
    session = sessionmaker(bind=engine)()

    conn = env.core.DBSession.connection()
    conn.execute("BEGIN")

    metadata = env.metadata()

    metadata.drop_all(conn)
    metadata.create_all(conn)

    for objrec in session.query(BackupObject):
        cls = BackupBase.registry[objrec.identity]

        if objrec.is_binary and usezip:
            binfd = zipf.open(objrec.binfn)
        elif objrec.is_binary and not usezip:
            binfd = open(ptjoin(src, objrec.binfn))
        else:
            binfd = None

        obj = cls(comp=env._components[objrec.comp], key=objrec.objkey, value=objrec.value, binfd=binfd)

        obj.restore()

        if binfd:
            binfd.close()

    if usezip:
        os.remove(sqlitefile.name)

    conn.execute("COMMIT")
Esempio n. 5
0
    def execute(cls, args, env):
        path = env.file_upload.path

        deleted_files = 0
        deleted_dirs = 0
        deleted_bytes = 0

        kept_files = 0
        kept_dirs = 0
        kept_bytes = 0

        cutstamp = datetime.now() - timedelta(days=3)

        for (dirpath, dirnames, filenames) in walk(path, topdown=False):
            relist = False

            for fn in filenames:
                if not fn.endswith('.meta'):
                    continue

                metaname = ptjoin(dirpath, fn)
                dataname = metaname[:-5] + '.data'
                metastat = stat(metaname)
                datastat = stat(dataname)
                metatime = datetime.fromtimestamp(metastat.st_mtime)
                datatime = datetime.fromtimestamp(datastat.st_mtime)

                if (metatime < cutstamp) and (datatime < cutstamp):
                    remove(metaname)
                    remove(dataname)
                    relist = True
                    deleted_files += 2
                    deleted_bytes += metastat.st_size + datastat.st_size
                else:
                    kept_files += 2
                    kept_bytes += metastat.st_size + datastat.st_size

            if (
                (not relist and len(filenames) == 0 and len(dirnames) == 0)
                or len(listdir(dirpath)) == 0  # NOQA: W503
            ):
                rmdir(dirpath)
                deleted_dirs += 1
            else:
                kept_dirs += 1

        print "Deleted | %6d files | %6d directories | %12d bytes |" % (
            deleted_files, deleted_dirs, deleted_bytes)
        print "Kept    | %6d files | %6d directories | %12d bytes |" % (
            kept_files, kept_dirs, kept_bytes)
Esempio n. 6
0
    def execute(cls, args, env):
        path = env.file_storage.path

        deleted_files = 0
        deleted_dirs = 0
        deleted_bytes = 0

        kept_files = 0
        kept_dirs = 0
        kept_bytes = 0

        for (dirpath, dirnames, filenames) in walk(path, topdown=False):
            relist = False

            for fn in filenames:
                obj = FileObj.filter_by(uuid=fn).first()
                fullfn = ptjoin(dirpath, fn)
                size = stat(fullfn).st_size

                if obj is None:
                    remove(fullfn)
                    relist = True
                    deleted_files += 1
                    deleted_bytes += size
                else:
                    kept_files += 1
                    kept_bytes += size

            if ((not relist and len(filenames) == 0 and len(dirnames) == 0)
                    or len(listdir(dirpath)) == 0):
                rmdir(dirpath)
                deleted_dirs += 1
            else:
                kept_dirs += 1

        print "Deleted | %6d files | %6d directories | %12d bytes |" % (
            deleted_files, deleted_dirs, deleted_bytes)
        print "Kept    | %6d files | %6d directories | %12d bytes |" % (
            kept_files, kept_dirs, kept_bytes)
Esempio n. 7
0
def run_VISSIM_file(filepath: str, clear_costs=False):
    import win32com.client as com2
    from os.path import abspath
    from os.path import join as ptjoin
    from os.path import split as ptsplit

    if clear_costs:
        working_folder = ptjoin(*list(ptsplit(abspath(filepath)))[:-1])
        weg_files = files_by_ext(working_folder, '.weg')
        bew_files = files_by_ext(working_folder, '.bew')
        for file in weg_files + bew_files:
            remove(file)

    # start new VISSIM instance and load file then run network
    print("Starting VISSIM for " + ptsplit(abspath(filepath))[-1])
    VissimInst = com2.gencache.EnsureDispatch("Vissim.Vissim")
    VissimInst.SuspendUpdateGUI()
    VissimInst.LoadNet(filepath)
    VissimInst.Graphics.CurrentNetworkWindow.SetAttValue("QuickMode", 1)
    VissimInst.Simulation.RunContinuous()
    VissimInst.ResumeUpdateGUI()
    VissimInst.Exit()
    print("Finished sim for " + ptsplit(abspath(filepath))[-1])
Esempio n. 8
0
def backup(env, dst, nozip=False):
    usezip = not nozip

    assert not os.path.exists(dst), "Path already exists!"

    if usezip:
        zipf = ZipFile(dst, "w")
    else:
        os.mkdir(dst)

    def openfile(fn):
        if usezip:
            tmpf = NamedTemporaryFile(delete=False)
            tmpf._target = fn
            return tmpf
        else:
            return open(ptjoin(dst, fn), "wb")

    def putfile(fd):
        if usezip:
            fd.flush()
            fd.close()
            zipf.write(fd.name, fd._target)
            os.remove(fd.name)
        else:
            pass

    sqlitefile = openfile("db.sqlite")
    engine = sa.create_engine("sqlite:///" + sqlitefile.name)

    try:
        buf = openfile("requirements")
        stdout = sys.stdout
        sys.stdout = buf
        pip.main(["freeze"])
        putfile(buf)
    finally:
        sys.stdout = stdout

    Base.metadata.bind = engine
    Base.metadata.create_all(engine)
    session = sessionmaker(bind=engine)()

    seq = 0
    for comp in env.chain("initialize"):
        compdir = None

        for itm in comp.backup():
            seq += 1

            obj = BackupObject(
                id=seq, comp=comp.identity, identity=itm.identity, objkey=itm.key, is_binary=itm.is_binary()
            )

            if obj.is_binary:
                if compdir is None:
                    compdir = "%s.bin" % comp.identity
                    if not usezip:
                        os.mkdir(ptjoin(dst, compdir))

                cleankey = re.sub("[^A-Za-z0-9]", "_", itm.key)[:64]

                obj.binfn = ptjoin(compdir, "%06d-%s" % (obj.id, cleankey))

                itm.binfd = openfile(obj.binfn)

            itm.backup()
            obj.value = itm.value

            if obj.is_binary:
                putfile(itm.binfd)

            session.add(obj)

    session.commit()

    putfile(sqlitefile)
Esempio n. 9
0
def backup(env, dst, nozip=False):
    if os.path.exists(dst):
        raise RuntimeError("Destination path already exists!")

    if nozip:
        os.mkdir(dst)
    else:
        zipf = ZipFile(dst, 'w', allowZip64=True)

    def openfile(fn):
        if nozip:
            return open(ptjoin(dst, fn), 'wb')
        else:
            tmpf = NamedTemporaryFile(delete=False)
            tmpf._target = fn
            return tmpf

    def putfile(fd):
        if nozip:
            pass
        else:
            fd.flush()
            fd.close()
            zipf.write(fd.name, fd._target)
            os.remove(fd.name)

    sqlitefile = openfile('db.sqlite')
    engine = sa.create_engine('sqlite:///' + sqlitefile.name)

    try:
        buf = openfile('requirements')
        stdout = sys.stdout
        sys.stdout = buf
        pip_main(['freeze', ])
        putfile(buf)
    finally:
        sys.stdout = stdout

    Base.metadata.bind = engine
    Base.metadata.create_all(engine)
    session = sessionmaker(bind=engine)()

    seq = 0
    for comp in env.chain('initialize'):
        compdir = None

        for itm in comp.backup():
            seq += 1

            obj = BackupObject(
                id=seq, comp=comp.identity, identity=itm.identity,
                objkey=itm.key, is_binary=itm.is_binary()
            )

            if obj.is_binary:
                if compdir is None:
                    compdir = '%s.bin' % comp.identity
                    if nozip:
                        os.mkdir(ptjoin(dst, compdir))

                cleankey = re.sub('[^A-Za-z0-9]', '_', itm.key)[:64]

                obj.binfn = ptjoin(compdir, '%06d-%s' % (
                    obj.id, cleankey))

                itm.binfd = openfile(obj.binfn)

            itm.backup()
            obj.value = itm.value

            if obj.is_binary:
                putfile(itm.binfd)

            session.add(obj)

    session.commit()

    putfile(sqlitefile)
Esempio n. 10
0
        np.around(dm * multiple, 0) for dm in demand_matrices
    ]

signal_files = files_by_ext(Vissim.AttValue('WorkingFolder'), '.sig')

# set OD demand matricies and save as to working folder
Vissim.SuspendUpdateGUI()
for volname in Volumes.keys():

    # Set OD matrix
    for index, demand in enumerate(
            DynamicAssignment.DynAssignDemands.GetAll()):
        set_IMatrix_from_numpy(demand.Matrix, Volumes[volname][index])

    # Save file to folder
    sim_folder = ptjoin(working_folder, volname)
    ensure_folder(sim_folder)
    file_name = ptjoin(sim_folder, volname + ".inpx")
    Vissim.SaveNetAs(file_name)

    # Copy signal controller files
    for sig_file in signal_files:
        copyfile(sig_file, ptjoin(sim_folder, ptsplit(sig_file)[-1]))

    Volumes[volname] = file_name

Vissim.ResumeUpdateGUI()


# Define function to open each sim file saved from above and run the simulation
def run_VISSIM_file(filepath: str, clear_costs=False):