def main(): # Configs config = configs.config() # Reading the selected data DataCSVFrame = pd.read_csv("DataFrame.csv", usecols=["Image_Index", "Finding_Labels"], index_col=False) labelsSet = set(DataCSVFrame["Finding_Labels"].values) # Dictionary with the label as key and the index in the set as value labelsDict = {} # Dictionary that is the reverse of the one above, to change back from value to the corresponding label labelDictClassify = {} # Filling the dictionaries for index, label in enumerate(labelsSet): labelsDict[label] = index labelDictClassify[index] = label # Path where all the images are stored imgPath = config.getImagePath() # Creating the dataset xrayDataset = DC.XRayDataset(DataCSVFrame, imgPath, labelsDict) # # Getting the first image from the dataset # imgs, labs = xrayDataset.__getitem__([8307]) # print(len(imgs)) # print(imgs) # Get the device (cpu/gpu) to run the model on device = DU.getDevice() # Gets the ranges of training and test data training, testing = DU.splitTrainTest(xrayDataset, config) # Get the train and validation sets trainSets, valSets = DU.trainValSets(training, config) # Initialize the criterion, optimizer and model criterion, optimizer, model = NM.modelInit(device) # Get the batchsize batchsize = config.getBatchSize() # Train the model trainedModel = NM.trainNetwork(device, xrayDataset, trainSets, valSets, config, model, criterion, optimizer, batchsize) # Save the model to be used for testing NM.save_model(trainedModel, config.getModelName())
def __init__(self): super(GUI, self).__init__() # run parameters and database paramters self.runParams = cfgs.config().runParams self.DB = {} self.gs = guiSlots() # set up main window self.title = 'DB Local' self.left = 0 self.top = 0 self.width = 800 self.height = 600 self.setWindowTitle(self.title) self.setGeometry(self.left, self.top, self.width, self.height) # create tabs self.tab_widget = TabWidget(self) # set tab widget as the central widget self.setCentralWidget(self.tab_widget) tab1 = self.tab_widget.tab1 tab2 = self.tab_widget.tab2 tab3 = self.tab_widget.tab3 # connect 'loadDB' function to clicked signal of load button tab1.dbLoad_btn.clicked.connect(lambda: self.gs.loadDB(self)) # connect 'loadDB' function to enter key press signal of database name textbox tab1.dbName_edit.returnPressed.connect(lambda: self.gs.loadDB(self)) #tab2.tablesCBox.currentIndexChanged.connect(self.loadTable) tab2.tablesCBox.currentIndexChanged.connect(lambda: self.gs.loadTable(self)) # This static method sets a font used to render tooltips QToolTip.setFont(QFont('SansSerif', 10)) # To create a tooltip, we call the setTooltip() method. We can use rich text formatting. # btn.setToolTip('This is a <b>QPushButton</b> widget') # connedt slot functions for buttons in tab2 tab2.recEdit_btn.clicked.connect(lambda: self.gs.editRecord(self)) tab2.recInsert_btn.clicked.connect(lambda: self.gs.insertRecord(self)) tab3.exec_btn.clicked.connect(lambda: self.gs.executeSQLQuery(self)) # show main window self.show()
from datetime import timedelta, datetime from airflow import DAG from airflow.operators.python_operator import PythonOperator import tasks import configs client, filename = configs.config() default_args = { 'owner': 'airflow', 'start_date': datetime(2020, 6, 3), 'depends_on_past': False, 'email': ['*****@*****.**'], 'retries': 1, 'retry_delay': timedelta(minutes=2), } dag = DAG(dag_id='corona', default_args=default_args, description="Collecting data", schedule_interval=timedelta(days=1), ) t1 = PythonOperator(task_id="fetch_data", python_callable=tasks.get_covidDdata, dag=dag) t2 = PythonOperator(task_id="creating_dataset", python_callable=tasks.create_dataset, dag=dag) t3 = PythonOperator(task_id="creating_newTable", python_callable=tasks.create_new_table, dag=dag) t4 = PythonOperator(task_id="inserting_data", python_callable=tasks.insert_data, dag=dag) t1 >> t2 >> t3 >> t4
import db import time import configs from sqlalchemy import Column, Integer from sqlalchemy.orm import declarative_base, Session class Test(declarative_base()): __tablename__ = 'test' id = Column(Integer, primary_key=True, autoincrement=True) state = Column(Integer, nullable=False) config = configs.config('configs') db_conf = db.get_database_config(config) db_engine = db.db_connect(db_conf) session = Session(bind=db_engine) session.begin() test: Test = session \ .query(Test) \ .with_for_update(skip_locked=True) \ .filter_by(state=1) \ .first() if test is None: print('No entry found.') else: print(test.id) test.state = 2 time.sleep(30)
def main(): # Configs config = configs.config() # Reading the selected data DataCSVFrame = pd.read_csv("DataFrame.csv", usecols=["Image_Index", "Finding_Labels"], index_col=False) # Get the labels labelsSet = set(DataCSVFrame["Finding_Labels"].values) # Dictionary with the label as key and the index in the set as value labelsDict = {} # Dictionary that is the reverse of the one above, to change back from value to the corresponding label labelDictClassify = {} # Filling the dictionaries for index, label in enumerate(labelsSet): labelsDict[label] = index labelDictClassify[index] = label # Path where all the images are stored imgPath = config.getImagePath() # Creating the dataset xrayDataset = DC.XRayDataset(DataCSVFrame, imgPath, labelsDict) #print(xrayDataset.xrayClassFrame) # Get the device (cpu/gpu) to run the model on device = DU.getDevice() # Gets the ranges of training and test data training, testing = DU.splitTrainTest(xrayDataset, config) # Initialize the model model = models.alexnet(pretrained=False, num_classes=4) model.to(device) if device == 'cuda': model = torch.nn.DataParallel(model) cudnn.benchmark = True #print(model) # Load the trained model cwd = os.path.dirname(os.path.realpath(__file__)) model.load_state_dict( torch.load("%s%s%s.pth" % (cwd, os.sep, config.getModelName()))) model.eval() # Testing the model wrongLabels, labelsCM, predsCM = NM.testing(xrayDataset, testing, model, device, labelDictClassify) # Confusion Matrix CMPlot.plot_confusion_matrix(labelsCM, predsCM, list(labelsSet), normalize=False, title="Confusion Matrix") # Show the matrix plt.show()
job = find_job_by_uri(session=session, uri=uri) job.started_at = None job.download_state = models.Data.DOWNLOAD_PENDING logging.warning(f'Job ' f'{colorama.Back.YELLOW}{colorama.Fore.BLACK}' f'cancelled' f'{colorama.Fore.RESET}{colorama.Back.RESET}' f'.') session.add(job) session.commit() session.close() return if __name__ == '__main__': config = configs.config(CONFIG_PATH) DB_CONF = db.get_database_config(config) WORKER_NAME = config.get('worker', 'name') RETRY_INTERVAL = config.getint('worker', 'retry_interval') RETRIES = config.getint('worker', 'retries') SOCKET_TIMEOUT = config.getint('worker', 'socket_timeout') DOWNLOAD_PATH = config.get('worker', 'download_path') SCHEDULE_ENABLED = config.getboolean('schedule', 'enabled') START_TIME = config.get('schedule', 'start_time') END_TIME = config.get('schedule', 'end_time') SCHEDULE_RETRY_INTERVAL = config.getint('schedule', 'retry_interval') colorama.init() logging.basicConfig( level=logging.INFO, format=