def frf_multiple_capter_simu_damaged(inFile=None, fichier_sampl="f_sampl_damaged.txt"): print( "*************************\n Simulation for damaged state \n *************************" ) #simu: a activer seulement si on veut de nouvelles donnees forces = simu.white_noise(SAMPLVALUE, CAPTVALUE) acc, measures, f_sampl = simu.simulation(forces, "F_damaged.txt", "Y_damaged.txt", fichier_sampl, damaged="f_sampl_undamaged.txt") #donnes de la simu forces = parser.get("F_damaged.txt")[:] measures = parser.get("Y_damaged.txt")[:] # res = [] for i in range(CAPTVALUE): res.append( frf.frf(tool.get_lines(forces, i), tool.get_lines(measures, i))) if inFile == None: return res else: parser.writeValues(inFile, res) print("Values written in " + inFile)
def frf_multiple_capter_file(inFile=None): #donnes du prof forces = parser.get("donnees/F.txt")[:] measures = parser.get("donnees/Y.txt")[:] # res = [] for i in range(CAPTVALUE): res.append(frf.frf(forces, tool.get_lines(measures, i))) if inFile == None: return res else: parser.writeValues(inFile, res) print("Values written in " + inFile)
def check_for_spam(): while True: global spam_count if spam_count > int(parser.get('MAX_PACKETS')): global spam_detected spam_detected = True print("SPAAAAM")
def main(): # Add seed random_seed = 42 torch.manual_seed(random_seed) args = parser.get() X_train = load('./datas/X_train.npy') y_train = load('./datas/y_train.npy') X_test = load('./datas/X_test.npy') train_dataset = data.DatasetXy(X_train, y_train) test_dataset = data.DatasetX(X_test) data_class = data.Dataloader(args, train_dataset, test_dataset) train, test = data_class.train(), data_class.test() model = models.get(args) optimizer = optimizers.get(args, model.parameters()) criterion = torch.nn.CrossEntropyLoss() for epoch in range(args.epochs): train_metrics = runner.run( model, criterion, optimizer, train, True, { "loss": metrics.loss, "accuracy": metrics.accuracy }, ) metrics.print_metrics(train_metrics) y_test_pred = runner.run( model, criterion, optimizer, test, False, { "loss": metrics.loss, "accuracy": metrics.accuracy }, ) print(y_test_pred) y_test_pred = [item for sublist in y_test_pred for item in sublist] #print((y_test_pred[0]).shape) #_, y_pred = torch.max(y_test_pred, dim = 1) #y_pred = torch.round(y_test_pred) # _, y_pred = torch.max(y_test_pred, dim = 1) # y_pred = y_pred.cpu().numpy() #print(len(y_pred_list)) #print(y_pred.type) y_test = np.asarray(y_test_pred) pd.DataFrame({ "Id": np.arange(len(y_test)), "Category": y_test }).astype(int).to_csv("solution.csv", index=False)
def add_coverage(coverage_ID, new_name, measurement_unit, other_info): installation_dir = os.path.abspath(os.path.dirname(sys.argv[0])) #LOAD CONFIGURATION try: parser = SafeConfigParser() parser.read("%s/../etc/pep_lib.ini" % installation_dir) except: logging.error("I am unable to load configuration") loging.debug(str(e)) return try: new_dict = dict(e.split('=') for e in other_info.split(';')) new_dict['function'] new_dict['label'] except: logging.error("other_info param is not a string in a valid format") conn = psycopg2.connect( "dbname='%s' user='******' host='%s' password='******'" % (parser.get("db", "name"), parser.get( "db", "user"), parser.get("db", "host"), parser.get("db", "pass"))) cur = conn.cursor() query = "SELECT * from data_ingestion_collectiontable where \"coverageID\" like '%s'" % ( coverage_ID) logging.debug("add_coverage select query: %s" % (query)) cur.execute(query) data = cur.fetchall() try: val = ['' if v is None else v for v in data[0]] query = "INSERT INTO data_ingestion_collectiontable(name, source, max_lat, max_lon, min_lat, min_lon, start_date,end_date,application,"+\ "\"group\", location,other_info,uploaded_by_id,status, \"IO\", \"coverageID\",measurement_unit,access,ipr,max_value,min_value) VALUES " query = "%s ('%s','%s','%s','%s','%s','%s','%s','%s','%s','%s','%s','%s',%s,'%s','%s','%s','%s','%s','%s','%s','%s')" % ( query, new_name, val[2], val[3], val[4], val[5], val[6], val[7], val[8], val[9], val[10], val[11], other_info, val[13], 'reprocess', 'O', val[16], measurement_unit, val[18], val[19], val[20], val[21]) logging.debug("add_coverage insert query: %s" % (query)) cur.execute(query) conn.commit() except Exception as e: raise logging.error( "Error to insert coverage on data_ingestion_collectiontable") logging.debug(str(e)) cur.close() conn.close()
def set_env(): logger.info("Setting environment variables from config file") parser = SafeConfigParser() parser.read('config.txt') os.environ["OS_PROJECT_DOMAIN_NAME"] = parser.get('environment', 'OS_PROJECT_DOMAIN_NAME') os.environ["OS_USER_DOMAIN_NAME"] = parser.get('environment', 'OS_USER_DOMAIN_NAME') os.environ["OS_PROJECT_NAME"] = parser.get('environment', 'OS_PROJECT_NAME') os.environ["OS_USERNAME"] = parser.get('environment', 'OS_USERNAME') os.environ["OS_PASSWORD"] = parser.get('environment', 'OS_PASSWORD') os.environ["OS_AUTH_URL"] = parser.get('environment', 'OS_AUTH_URL') os.environ["OS_IDENTITY_API_VERSION"] = parser.get('environment', 'OS_IDENTITY_API_VERSION') os.environ["OS_IMAGE_API_VERSION"] = parser.get('environment', 'OS_IMAGE_API_VERSION')
def selectDir(self): filename = 'settings.ini' parser = configparser.ConfigParser() parser.read(filename) global WACHTWOORD WACHTWOORD = parser.get('COMMON', 'wachtwoord') if debug: dialog = QtWidgets.QFileDialog() dialog.setFileMode(QtWidgets.QFileDialog.DirectoryOnly) dialog.setViewMode(QtWidgets.QFileDialog.Detail) if (dialog.exec()): a = dialog.selectedFiles() directory = a[0] + '/' else: directory = default else: directory = default try: open(directory + parser.get('PATHS', 'SHEETINFO')) gelukt = True except: gelukt = False if not gelukt: directory = default parser.set('PATHS', 'QUIZFOLDER', directory) with open(filename, 'w') as configfile: parser.write(configfile) if 'Test' in directory: pal = QPalette() pal.setColor(QPalette.Background, Qt.red) self.setAutoFillBackground(True) self.setPalette(pal) lijstje = directory.split('/') titel = lijstje[len(lijstje) - 2] self.setWindowTitle(self.windowTitle() + ' ' + titel) return gelukt
def set_env(): logger.info("Setting environment variables from config file") parser = SafeConfigParser() parser.read('config.txt') os.environ["OS_PROJECT_DOMAIN_NAME"] = parser.get( 'environment', 'OS_PROJECT_DOMAIN_NAME') os.environ["OS_USER_DOMAIN_NAME"] = parser.get('environment', 'OS_USER_DOMAIN_NAME') os.environ["OS_PROJECT_NAME"] = parser.get('environment', 'OS_PROJECT_NAME') os.environ["OS_USERNAME"] = parser.get('environment', 'OS_USERNAME') os.environ["OS_PASSWORD"] = parser.get('environment', 'OS_PASSWORD') os.environ["OS_AUTH_URL"] = parser.get('environment', 'OS_AUTH_URL') os.environ["OS_IDENTITY_API_VERSION"] = parser.get( 'environment', 'OS_IDENTITY_API_VERSION') os.environ["OS_IMAGE_API_VERSION"] = parser.get('environment', 'OS_IMAGE_API_VERSION')
def main(): # Add seed args = parser.get() data_class = data.Dataset(args) train, validation = data_class.train(), data_class.validation() model = models.get(args) optimizer = optimizers.get(args, model.parameters()) criterion = torch.nn.CrossEntropyLoss() for epoch in range(args.epochs): train_metrics = runner.run( model, criterion, optimizer, train, True, { "loss": metrics.loss, "accuracy": metrics.accuracy }, ) metrics.print_metrics(train_metrics) validation_metrics = runner.run( model, criterion, optimizer, validation, False, { "loss": metrics.loss, "accuracy": metrics.accuracy }, ) metrics.print_metrics(validation_metrics)
def get(): error.verify() return parser.get()
#!/usr/bin/python import parser parser=parser.initparser("/etc/solomon/conf.d/global.conf") pid=parser.get("global","pid") log=parser.get("global","log") check_interval=parser.get("global","check_interval") retry_interval=parser.get("global","retry_interval") retry_times=parser.get("global","retry_times") event_handler=parser.get("global","event_handler")
#!/usr/bin/python #-*- coding:utf-8 -*- import parser import subprocess import sys import smtplib from email.mime.text import MIMEText import time parser=parser.initparser("/etc/solomon/conf.d/email.conf") enabled=parser.get("email","enabled") description=parser.get("email","description") smtp_server=parser.get("email","smtp_server") smtp_port=parser.get("email","smtp_port") sender=parser.get("email","sender") send_to=parser.get("email","to").split(':') send_cc=parser.get("email","cc").split(':') user=parser.get("email","user") password=parser.get("email","password") HOST=parser.get("email","host") DATE=time.strftime('%Y-%m-%d %H:%M:%S',time.localtime(time.time())) def send_email(name): str="%s is down" % name RESON=str.upper() content='''<html> <hr/> <font>%s因故障发生主备机切换,请通知运维人员检查并修复故障。</font><br><br>
# Helper class to convert a DynamoDB item to JSON. class DecimalEncoder(json.JSONEncoder): def default(self, o): if isinstance(o, decimal.Decimal): if o % 1 > 0: return float(o) else: return int(o) return super(DecimalEncoder, self).default(o) ## Datos credenciales twitter from configparser import ConfigParser parser = ConfigParser() parser.read('api_auth.cfg') access_token = parser.get('api_tracker', 'access_token') access_token_secret = parser.get('api_tracker', 'access_token_secret') consumer_key = parser.get('api_tracker', 'consumer_key') consumer_secret = parser.get('api_tracker', 'consumer_secret') def get_tweet_list(twapi, idlist): tweets = twapi.statuses_lookup(id_=idlist, include_entities=False, trim_user=True) if len(idlist) != len(tweets): #Logger.warn('get_tweet_list: unexpected response size %d, expected %d', len(tweets), len(idlist)) print("error, algunos tweet ids no se pudieron consultar") # for tweet in tweets: # print('%s,%s' % (tweet.id, tweet.retweet_count )) return tweets
float(dict_args['offset'])) else: logging.error("Reprocessing: Unrecognized function") raise Exception('Error', 'Unrecognized function') if __name__ == "__main__": #LOAD CONFIGURATION try: parser = SafeConfigParser() parser.read("%s/../etc/pep_lib.ini" % installation_dir) #set logger logging.basicConfig( #stream= sys.stdout, filename=installation_dir + "/../log/FilesystemData.log", level=int(parser.get("Logger", "loglevel")), format='%(levelname)s\t| %(asctime)s | %(message)s') except Exception as e: print "I am unable to load configuration" print str(e) parser = argparse.ArgumentParser( prog=sys.argv[0], description= 'Utility used to reprocess entire collections on filesystem', epilog='SISTEMA GmbH <http://sistema.at>') parser.add_argument( '--standalone', help= 'Execute the module stand-alone, without going through ingester in das_ing', action='store_true')