import sqlite3 as sl import sys sys.path.append('..') import logger from argparse import ArgumentParser DB_FILE = 'lcls2_interface.db' LOGGER = logger.custom_logger(__name__) def with_con(f): """sqlite connection decorator""" def with_con_(*args, **kwargs): con = sl.connect(DB_FILE) try: response = f(con, *args, **kwargs) con.commit() # Not always necessary, doesn't hurt for now except sl.Error as e: con.rollback() LOGGER.info('Unable to perform operation: {0}'.format(e)) con.close() return finally: con.close() return response return with_con_ @with_con def create_table(con, table, fields):
import simplejson as json from hdfs3 import HDFileSystem from conf import CONFIG from templater import Templater from constants import FOOTER_PATTERN from constants import HADOOP_PATH from constants import MESSAGE_VERSION from constants import SETTING from constants import TEMPLATE_PATH from pipeline_message_example import PIPELINE_MESSAGE from logger import custom_logger from typing import Dict, List, Any # Initialize logger logger = custom_logger() warnings.filterwarnings("ignore") print("top_level_directory : ", top_level_directory) print("HADOOP_PATH : ", HADOOP_PATH) def fixing_indentation(inp_str): """ Fix indentation and clean unused line in inp_str. example: before fixing indentation --------------------------------------------------------------- #!/usr/bin/env python3 # -*- coding: utf-8 -*- '''
from periphcollection import PeripheralCollection # new periperals from counter_p import CounterPeripheral from pwm import PWM from spi import SPI from testperiph import Testing from plat import BB import logger import logging logger.level = logging.DEBUG log = logger.custom_logger(__name__) class Serial(PeripheralCollection): def __init__(self, pwm=None, uart=None, uart_divisor=None, **kwargs): super().__init__(**kwargs) if uart is not None: serial1 = AsyncSerialPeripheral(divisor=uart_divisor, pins=uart) self.add(serial1) if __name__ == "__main__": from nmigen.cli import pysim platform = BB() u = platform.request('uart', 0)
import argparse from load_data import load_data_bis from train_MINST import train_model from logger import custom_logger def run(**kwargs): data = load_data_bis() acc, _ = train_model(data, **kwargs) return acc if __name__ == "__main__": # create logger logger = custom_logger("train_MINST", "log/runnee.log") # gets arguments parser = argparse.ArgumentParser(description="Runs MNIST") parser.add_argument("--n_epoch", help="Number of epochs") parser.add_argument("--batch_size", help="Batch size") parser.add_argument("--noeuds", help="Nombre de noeuds", nargs="*") parser.add_argument("--activation", help="Activation: relu, sigmoid, tanh") parser.add_argument("--learning_rate", help="Learning rate") parser.add_argument("--reg_l1", help="L1 regularization coefficient") parser.add_argument("--reg_l2", help="L2 regularization coefficient") parser.add_argument("--moment", help="Momentum for the gradient descent") parser.add_argument("--decay", help="Decay for the learning_rate") parser.add_argument("--nesterov", help="Using nesterov for the momentum") args = vars(parser.parse_args())