Esempio n. 1
0
def fichier():

    nom = "../../" + input("Nom du fichier : ")

    rep = input("Format décimale: . séparateur: , ? ")
    if (
            "non" in rep
    ):  #Permet de convertir le fichier pour quil soit lisible par le programme
        fonctions.Lisible(nom, 1)

    try:
        user = input("identifiant : ")
        log = input("logiciel utilisé : ")
        index = input("présence d'un index ? ")
        titre = input("présence d'une première ligne avec les titres ? ")

        freq = input("fréquence du réseau : ")
        NbrSim = input("Nombre de simulation dans ce fichier : ")
        unite = input("Unité des valeurs ?")

        SignauxA = input("Nombre de signaux analogiques : ")
        SignauxD = input("Nombre de signaux digitaux : ")
        NbrSignaux = int(SignauxA) + int(SignauxD)

        dat.dat(nom, titre, index, NbrSim, NbrSignaux, 1)
        cfg.cfg(nom, user, log, index, titre, freq, NbrSim, unite, SignauxA,
                SignauxD, 1)

        print("Fichier converti")
        #Signale la fin de la convertion

    except FileNotFoundError:

        print("Pas de fichier trouvé à ce nom ")
Esempio n. 2
0
def OpenConnection():
    connection = Connection()
    connection.C = bottle.request.json

    for ipf in IP_FILTERS:
        if IpMatch(i_ip=connection['ip'], i_regexp=ipf):
            print("Filter out IP ", connection['ip'], ipf)
            return

    try:
        geoloc = IP2GeoLoc(connection['ip'], cfg()['GEOIP']['key'])
        if geoloc:
            connection['lat'] = geoloc['latitude']
            connection['lon'] = geoloc['longitude']
            connection['lon'] = geoloc['longitude']
            connection['country'] = geoloc['country_name']
            connection['city'] = geoloc['city']
    except:
        print(traceback.format_exc())
        print("Geo Location Failed")

    DB.OpenConnection(connection)

    notify_msg = 'Open {}#{} {}/{}'.format(connection['ip'],
                                           connection['server_instance'],
                                           connection['city'],
                                           connection['country'])
    for nr in G_NOTIFY_RECIPENTS:
        nr(notify_msg)

    # return connection updated with geolocation info
    bottle.response.content_type = "application/javascript"
    return connection.json()
Esempio n. 3
0
    def analyze(il, fl, re, docfg=False):
        """
        Analyze code
        :param il: instruction list
        :param fl: function list
        :param re: symbol reconstruction object
        :param docfg: True to evaluate call graph and cfg
        :return: [fbl, block labels, CFG table, CG table,] instruction list, symbol reconstruction object
        """
        u_fl = filter(lambda f: not f.is_lib, fl)

        if docfg:
            _cg = cg()
            _cg.set_funcs(fl)
            il = _cg.visit(il)

        il = re.adjust_loclabel(il)
        re.reassemble_dump(u_fl)
        il = re.adjust_jmpref(il)

        if docfg:
            _cfg = cfg()
            _cfg.set_funcs(fl)
            il = _cfg.visit(il)
            bbl = _cfg.get_bbl()
            il = re.add_bblock_label(bbl, il)
            return (_cfg.get_fbl(), bbl, _cfg.get_cfg_table(il), _cg.get_cg_table(), il, re)

        return (None, None, None, None, il, re)
Esempio n. 4
0
	def __init__(self, mypaf, cfgfile):


		self.mypaf = mypaf
		self.cfg   = cfg.cfg(self, cfgfile)
		self.db    = mypaf.db
		self.vb    = mypaf.vb
		self.vb.call("input", "__init__", [self, mypaf, cfgfile], "Initializing input class.") 

		self.build()
		self.setSources()
Esempio n. 5
0
def dossier():
    folder = "../../" + input("Nom du dossier : ")
    print("Assurez vous que tous les fichiers soient des .csv")
    try:

        files = [f for f in listdir(folder) if isfile(join(folder, f))]

        rep = input("Format décimale: . séparateur: , ? ")

        user = input("identifiant : ")
        log = input("logiciel utilisé : ")
        index = input("présence d'un index ? ")
        titre = input("présence d'une première ligne avec les titres ? ")

        freq = input("fréquence des réseaux : ")
        NbrSim = input("Nombre de simulation dans les fichiers : ")
        unite = input("Unité des valeurs ?")

        SignauxA = input("Nombre de signaux analogiques : ")
        SignauxD = input("Nombre de signaux digitaux : ")
        NbrSignaux = int(SignauxA) + int(SignauxD)

        for i in files:
            fichier = folder + "/" + i

            if (
                    "non" in rep
            ):  #Permet de convertir le fichier pour quil soit lisible par le programme
                fonctions.Lisible(fichier, 2)

            dat.dat(fichier, titre, index, NbrSim, NbrSignaux, 2)
            cfg.cfg(fichier, user, log, index, titre, freq, NbrSim, unite,
                    SignauxA, SignauxD, 2)
        print("Tous les fichiers du dossier sont convertis")
    except FileNotFoundError:
        print(
            "Le dossier est introuvable ou un fichier n'est pas dans le bon format"
        )
Esempio n. 6
0
def main():

    # load config
    config_file = './user.ini'
    if len(sys.argv) > 1:
        config_file = sys.argv[1]
    cfg(config_file)
    pprint(cfg())
    print("\n")

    # spyserver instances
    exec_path = cfg()['SPYSERVER']['exe']
    spy_configs = cfg()['SPYSERVER']['cfg_list']

    SPYSERVERS = []
    for spy_conf in spy_configs:
        SPYSERVERS.append(
            SpyServerMonitor(
                exec_path,
                spy_conf,
                [cfg()['DB']['ip'], cfg()['DB']['port']],
                no_lan_skip=not cfg()['MONITOR']['ignore_local_connections']))

    for ss in SPYSERVERS:
        ss.Start()
        time.sleep(1)

    try:
        while (True):
            time.sleep(1)
    except KeyboardInterrupt:
        pass
    except:
        print(traceback.format_exc())

    for ss in SPYSERVERS:
        try:
            ss.Stop()
        except:
            print(traceback.format_exc())
Esempio n. 7
0
	def loadXml(self,node):
		self.getText(node.childNodes)
		if node.nodeType!=Node.ELEMENT_NODE:
			for n in node.childNodes:
				if n.nodeType==Node.ELEMENT_NODE and n.localName == 'xdl_resource_report':
					self.loadXml(n)
		else:
			for n in node.childNodes:
				if n.nodeType==Node.ELEMENT_NODE and n.localName == 'pin':
					el=pin()
					el.loadXml(n)
					self.set_pin(el)
		
			for n in node.childNodes:
				if n.nodeType==Node.ELEMENT_NODE and n.localName == 'conn':
					el=conn()
					el.loadXml(n)
					self.set_conn(el)
		
			for n in node.childNodes:
				if n.nodeType==Node.ELEMENT_NODE and n.localName == 'cfg':
					el=cfg()
					el.loadXml(n)
					self.set_cfg(el)
		
			if node.hasAttributes():
				attrs=node.attributes
				attrId='a1'
				if attrId in attrs.keys():
					self.a1=str(attrs[attrId].value)
		
				attrId='a0'
				if attrId in attrs.keys():
					self.a0=str(attrs[attrId].value)
		
				attrId='a3'
				if attrId in attrs.keys():
					self.a3=str(attrs[attrId].value)
		
				attrId='a2'
				if attrId in attrs.keys():
					self.a2=str(attrs[attrId].value)
Esempio n. 8
0
    def analyze_one(il, fl, re):
        _cfg = cfg()
        _cg = cg()
        _cg.set_funcs(fl)
        _cfg.set_funcs(fl)
        u_fl = filter(lambda f: not f.is_lib, fl)
        print '     user defined func number', len(u_fl)
        _il = _cg.visit(il)
        _il = re.visit_type_infer_analysis([], _il)
        _il = re.share_lib_processing(_il)
        _il = re.adjust_loclabel(_il)

        re.reassemble_dump(u_fl)

        _il = re.adjust_jmpref(_il)
        _il = _cfg.visit(_il)

        bbl = _cfg.get_bbl()
        return (_cfg.get_fbl(), bbl, _cfg.get_cfg_table(_il),
                _cg.get_cg_table(), re.add_bblock_label(bbl, _il), re)
Esempio n. 9
0



#from skimage import img_as_ubyte, img_as_float
#from skimage.transform import rescale
#from cntk_helpers import *

#%matplotlib qt5


# =============================================================================
# PARAMETERS
# =============================================================================

p=cfg()

size_small=int(p.params['smallsize'])
ppmm=int(p.params['pixpermm'])

data_dir=r'E:\OneDrive\KS-XR\X-ray képek\201710'
save_dir=r'E:\OneDrive\KS-XR\X-ray képek\Test\roi'
save_dir_temp=r'E:\OneDrive\KS-XR\X-ray képek\Test\roi_crop'


measure_ids=dirlist_onelevel(data_dir)

ims=[]


for measure_id in measure_ids:
Esempio n. 10
0
def main():

    # load config
    config_file = './user.ini'
    if len(sys.argv) > 1:
        config_file = sys.argv[1]
    cfg(config_file)
    pprint(cfg())
    print("\n")

    dbfile = cfg()['DB']['file']
    host = cfg()['DB']['ip']
    port = int(cfg()['DB']['port'])

    global IP_FILTERS
    IP_FILTERS = cfg()['DB']['ip_filters']

    # notifiers
    global G_NOTIFY_RECIPENTS

    # G_NOTIFY_RECIPENTS = [ NotifySlack() ]
    # NotifySlack.NotifySlack() will not work with python < 3.6 - and armbian 5.7
    # therefore use system call to execute with python 2
    if 'SLACK' in cfg() and cfg()['SLACK']['use']:
        _key = cfg()['SLACK']['key']
        _chan = cfg()['SLACK']['channel']
        _cmd = './NotifySlack.py {} {} '.format(_key, _chan)
        G_NOTIFY_RECIPENTS.append(lambda msg: os.system(_cmd + msg))
        print("Adding slack notifications on channel ", _chan)
        print("\n")

    print('{}:{} {}'.format(host, port, dbfile))
    print('@{}'.format(getpass.getuser()))
    print("IP Filters: ", IP_FILTERS)
    print("Current dir: ", CurDir())

    os.chdir(CurDir())

    global DB
    DB = ConnectionsDB(dbfile)
    RUN(host, port)
Esempio n. 11
0
def make_training_patches_and_fit(arg_cfg, out_dir=None):
    data = dict()

    # Update config
    cfg_dict = cfg.cfg()
    arg_cfg['seq_type'] = cfg.datasetdir_to_type(arg_cfg['ds_dir'])
    cfg_dict.update(arg_cfg)
    conf = cfg.Bunch(cfg_dict)
    conf.out_dir_prefix = 'exp_vilar'

    # Write config to result dir
    if (out_dir is None):
        if ('dataOutDir' not in arg_cfg):
            conf.dataOutDir = utls.getDataOutDir(conf.dataOutRoot, conf.ds_dir,
                                                 conf.resultDir,
                                                 conf.out_dir_prefix,
                                                 conf.testing)
    else:
        conf.dataOutDir = out_dir

    conf.myGaze_fg = utls.readCsv(
        os.path.join(conf.root_path, conf.ds_dir, conf.locs_dir,
                     conf.csvFileName_fg))

    # Set logger
    utls.setup_logging(conf.dataOutDir)

    logger = logging.getLogger('vilar')

    logger.info('---------------------------')
    logger.info('Extracting training a patches and fit on: ' + conf.ds_dir)
    logger.info('type of sequence: ' + conf.seq_type)
    logger.info('gaze filename: ' + conf.csvFileName_fg)
    logger.info('Result dir:')
    logger.info(conf.dataOutDir)
    logger.info('n_frames : ' + str(conf.vilar_n_frames))
    logger.info('(gamma,C) = (' + str(conf.gamma) + ', ' + str(conf.C) + ')')
    logger.info(conf.dataOutDir)
    logger.info('---------------------------')

    # Make frame file names
    gt_dir = os.path.join(conf.root_path, conf.ds_dir, conf.truth_dir)
    gtFileNames = utls.makeFrameFileNames(conf.frame_prefix, conf.frameDigits,
                                          conf.truth_dir, conf.root_path,
                                          conf.ds_dir, conf.frame_extension)

    conf.frameFileNames = utls.makeFrameFileNames(conf.frame_prefix,
                                                  conf.frameDigits,
                                                  conf.frameDir,
                                                  conf.root_path, conf.ds_dir,
                                                  conf.frame_extension)

    # conf.myGaze_fg = utls.readCsv(conf.csvName_fg)
    conf.myGaze_fg = utls.readCsv(
        os.path.join(conf.root_path, conf.ds_dir, conf.locs_dir,
                     conf.csvFileName_fg))

    # conf.myGaze_bg = utls.readCsv(conf.csvName_bg)
    gt_positives = utls.getPositives(gtFileNames)

    conf.precomp_desc_path = os.path.join(conf.dataOutRoot, conf.ds_dir,
                                          conf.feats_dir)

    my_dataset = ds.DatasetVilar(conf)

    with open(os.path.join(conf.dataOutDir, 'cfg.yml'), 'w') as outfile:
        yaml.dump(conf, outfile, default_flow_style=True)

    # Extract seen and unseen patches (to fit SVC)
    if (os.path.exists(
            os.path.join(conf.dataOutDir, 'vilar', 'vilar_seen_patches_df.p'))
            & os.path.exists(
                os.path.join(conf.dataOutDir, 'vilar',
                             'vilar_unseen_patches_df.p'))):
        logger.info('seen and unseen patches already computed. Skipping.')
    else:
        my_dataset.calc_training_patches(save=True)

    if (not os.path.exists(os.path.join(conf.dataOutDir, 'clf.p'))):

        my_dataset.load_patches()

        X_pos = df_to_mat(my_dataset.seen_patches_df)
        X_neg = df_to_mat(my_dataset.unseen_patches_df)
        X_train = np.concatenate((X_pos, X_neg))

        y_pos = np.ones(df_to_mat(my_dataset.seen_patches_df).shape[0])
        y_neg = np.zeros(df_to_mat(my_dataset.unseen_patches_df).shape[0])
        y_train = np.concatenate((y_pos, y_neg))

        clf = SVC(gamma=conf.gamma,
                  C=conf.C,
                  class_weight='balanced',
                  verbose=True)
        logger.info('fitting')
        clf.fit(X_train, y_train)
        with open(os.path.join(conf.dataOutDir, 'clf.p'), 'wb') as f:
            pk.dump(clf, f)
        logger.info('Saved classifier.')
    else:
        with open(os.path.join(conf.dataOutDir, 'clf.p'), 'rb') as f:
            clf = pk.load(f)

    return conf, clf
Esempio n. 12
0
# Script:  dump2cfg.py
# Purpose: convert a LAMMPS dump file to CFG format
# Syntax:  dump2cfg.py dumpfile Nid Ntype Nx Ny Nz cfgfile
#          dumpfile = LAMMPS dump file in native LAMMPS format
#          Nid,Ntype,Nx,Ny,Nz = columns #s for ID,type,x,y,z
#                               (usually 1,2,3,4,5)
#          cfgfile = new CFG file
# Author:  Steve Plimpton (Sandia), sjplimp at sandia.gov

import sys, os
path = os.environ["LAMMPS_PYTHON_TOOLS"]
sys.path.append(path)
from dump import dump
from cfg import cfg

if len(sys.argv) != 8:
    raise StandardError, "Syntax: dump2cfg.py dumpfile Nid Ntype Nx Ny Nz cfgfile"

dumpfile = sys.argv[1]
nid = int(sys.argv[2])
ntype = int(sys.argv[3])
nx = int(sys.argv[4])
ny = int(sys.argv[5])
nz = int(sys.argv[6])
cfgfile = sys.argv[7]

d = dump(dumpfile)
d.map(nid, "id", ntype, "type", nx, "x", ny, "y", nz, "z")
c = cfg(d)
c.one(cfgfile)
Esempio n. 13
0
    def gen(self):
        fh = open(self.package_name + ".sv", "w")
        fh.write(self.header.replace("file_name", self.package_name + ".sv"))
        fh.write("`ifndef _%s_\n" % (self.package_name.upper()))
        fh.write("`define _%s_\n" % (self.package_name.upper()))
        fh.write("\n")
        fh.write("package %s;\n" % (self.package_name))
        fh.write("  import uvm_pkg::*;\n")
        fh.write("\n")
        fh.write("  `include \"%s.sv\"\n" % (self.defines_name))
        fh.write("  `include \"%s.sv\"\n" % (self.config_name))
        fh.write("  `include \"%s.sv\"\n" % (self.transaction_name))
        fh.write("  `include \"%s.sv\"\n" % (self.config_name))
        fh.write("  `include \"%s.sv\"\n" % (self.callback_name))
        fh.write("  `include \"%s.sv\"\n" % (self.cov_callback_name))
        fh.write("  `include \"%s.sv\"\n" % (self.master_driver_name))
        fh.write("  `include \"%s.sv\"\n" % (self.master_sequencer_name))
        fh.write("  `include \"%s.sv\"\n" % (self.master_sequence_name))
        fh.write("  `include \"%s.sv\"\n" % (self.slave_driver_name))
        fh.write("  `include \"%s.sv\"\n" % (self.slave_sequencer_name))
        fh.write("  `include \"%s.sv\"\n" % (self.slave_sequence_name))
        fh.write("  `include \"%s.sv\"\n" % (self.monitor_name))
        fh.write("  `include \"%s.sv\"\n" % (self.master_agent_name))
        fh.write("  `include \"%s.sv\"\n" % (self.slave_agent_name))
        fh.write("\n")
        fh.write("endpackage: %s\n" % (self.package_name))
        fh.write("\n")
        fh.write("`endif //_%s_\n" % (self.package_name.upper()))
        fh.close()

        #Generate agent components
        agent_defines = defines.defines(self.header, self.agent_setting)
        agent_defines.gen()

        agent_interface = interface.interface(self.header, self.agent_setting)
        agent_interface.gen()

        agent_cfg = cfg.cfg(self.header, self.agent_setting)
        agent_cfg.gen()

        agent_transaction = transaction.transaction(self.header,
                                                    self.agent_setting)
        agent_transaction.gen()

        agent_sequencer = sequencer.sequencer(self.header, self.agent_setting)
        agent_sequencer.sequencer_gen()

        agent_sequence = sequence.sequence(self.header, self.agent_setting)
        agent_sequence.sequence_gen()

        agent_drv = driver.driver(self.header, self.agent_setting)
        agent_drv.master_driver_gen()
        agent_drv.slave_driver_gen()

        agent_mon = monitor.monitor(self.header, self.agent_setting)
        agent_mon.monitor_gen()

        agent_callback = callback.callback(self.header, self.agent_setting)
        agent_callback.callback_gen()
        agent_callback.cov_callback_gen()

        agent_agent = agent.agent(self.header, self.agent_setting)
        agent_agent.agent_gen()
Esempio n. 14
0
def main(arg_cfg):
    data = dict()

    #Update config
    cfg_dict = cfg.cfg()
    arg_cfg['seq_type'] = cfg.datasetdir_to_type(arg_cfg['ds_dir'])
    cfg_dict.update(arg_cfg)
    conf = cfg.Bunch(cfg_dict)

    #Write config to result dir
    conf.dataOutDir = utls.getDataOutDir(conf.dataOutRoot, conf.ds_dir,
                                         conf.resultDir, conf.out_dir_prefix,
                                         conf.testing)

    #Set logger
    utls.setup_logging(conf.dataOutDir)

    logger = logging.getLogger('feat_extr')

    logger.info('---------------------------')
    logger.info('starting feature extraction on: ' + conf.ds_dir)
    logger.info('type of sequence: ' + conf.seq_type)
    logger.info('gaze filename: ' + conf.csvFileName_fg)
    logger.info('features type: ' + conf.feat_extr_algorithm)
    logger.info('Result dir:')
    logger.info(conf.dataOutDir)
    logger.info('---------------------------')

    #Make frame file names
    gt_dir = os.path.join(conf.root_path, conf.ds_dir, conf.truth_dir)
    gtFileNames = utls.makeFrameFileNames(conf.frame_prefix, conf.truth_dir,
                                          conf.root_path, conf.ds_dir,
                                          conf.frame_extension)

    conf.frameFileNames = utls.makeFrameFileNames(conf.frame_prefix,
                                                  conf.frame_dir,
                                                  conf.root_path, conf.ds_dir,
                                                  conf.frame_extension)

    conf.myGaze_fg = utls.readCsv(
        os.path.join(conf.root_path, conf.ds_dir, conf.locs_dir,
                     conf.csvFileName_fg))

    #conf.myGaze_bg = utls.readCsv(conf.csvName_bg)
    gt_positives = utls.getPositives(gtFileNames)

    if (conf.labelMatPath != ''):
        conf.labelMatPath = os.path.join(conf.dataOutRoot, conf.ds_dir,
                                         conf.frameDir, conf.labelMatPath)

    conf.precomp_desc_path = os.path.join(conf.dataOutRoot, conf.ds_dir,
                                          conf.feats_dir)

    # ---------- Descriptors/superpixel costs
    my_dataset = ds.Dataset(conf)

    my_dataset.load_superpix_from_file()
    my_dataset.calc_sp_feats_unet_gaze_rec(save=True)

    with open(os.path.join(conf.dataOutDir, 'cfg.yml'), 'w') as outfile:
        yaml.dump(conf, outfile, default_flow_style=True)

    logger.info('Finished feature extraction: ' + conf.ds_dir)

    return conf
Esempio n. 15
0
import libSys
import objSync
import gaeItem
import localItem
import cfg
import os
import pprint

if __name__ == '__main__':
    #Read current state from disk
    c = cfg.cfg()
    settings = c.getSettings()
    gAppBaseDir = settings.get('rootDir', 'd:\\tmp\\syncFile')
    
    itemDict = settings.get('otherSettings', {})
    #pprint.pprint(itemDict)
    itemDict['workingDir'] = settings.get('workingDir', os.path.join(gAppBaseDir, 'working'))
    itemDict['legacyFilePath'] = settings.get('legacyFilePath', os.path.join(gAppBaseDir, 'legacy'))
    #itemDict['abnormalFilePath'] = settings.get('abnormalFilePath', os.path.join(gAppBaseDir, 'abnormal'))
    itemDict['tmpFilePath'] = settings.get('tmpFilePath', os.path.join(gAppBaseDir, 'tmp'))
    
    itemDict['treeDb'] = itemDict.get('treeDb', {})

    if not os.path.exists(itemDict['workingDir']):
        os.makedirs(itemDict['workingDir'])

    if not os.path.exists(itemDict['legacyFilePath']):
        os.makedirs(itemDict['legacyFilePath'])

    #if not os.path.exists(self.settings['abnormalFilePath']):
    #    os.makedirs(self.settings['abnormalFilePath'])
# Script:  dump2cfg.py
# Purpose: convert a LAMMPS dump file to CFG format
# Syntax:  dump2cfg.py dumpfile Nid Ntype Nx Ny Nz cfgfile
#          dumpfile = LAMMPS dump file in native LAMMPS format
#          Nid,Ntype,Nx,Ny,Nz = columns #s for ID,type,x,y,z
#                               (usually 1,2,3,4,5)
#          cfgfile = new CFG file
# Author:  Steve Plimpton (Sandia), sjplimp at sandia.gov

import sys,os
path = os.environ["LAMMPS_PYTHON_TOOLS"]
sys.path.append(path)
from dump import dump
from cfg import cfg

if len(sys.argv) != 8:
  raise StandardError, "Syntax: dump2cfg.py dumpfile Nid Ntype Nx Ny Nz cfgfile"

dumpfile = sys.argv[1]
nid = int(sys.argv[2])
ntype = int(sys.argv[3])
nx = int(sys.argv[4])
ny = int(sys.argv[5])
nz = int(sys.argv[6])
cfgfile = sys.argv[7]

d = dump(dumpfile)
d.map(nid,"id",ntype,"type",nx,"x",ny,"y",nz,"z")
c = cfg(d)
c.one(cfgfile)