def __init__(self, obf, verbose=False, nthreads=None, ncores=None): self._state = None self._verbose = verbose obf.verbose(self._verbose) self._nthreads = nthreads self._ncores = ncores self.logger = utils.make_logger(self._verbose)
def __init__(self, verbose=False): self._verbose = verbose self.logger = utils.make_logger(self._verbose) self.nlayers = 0 self.ninputs = None self.bp = None self.randomized = False self.zero = None
def main() : global cs_inst parser = argparse.ArgumentParser( description="qtum-test-suite -- s3__contract-transfer-coin", formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('-d', '--dry-run', action='store_true', dest='dry_run', help='enable dry-run') prog_args = parser.parse_args() #print prog_args logger = utils.make_logger('./rt-data/run.log') cs_inst = utils.CSubprocess(logger) if prog_args.dry_run : dry_run() else : run(cs_inst, logger)
def __init__(self, delegate=None): # Public self.logger = make_logger(__name__) self.logger.info('Created new Wiimote!') self.delegate = delegate self.check_interval = 0.1 self.watchdog_timeout = 1.0 # Private self._is_connected = False self._worker_thread = None self._watchdog_thread = None self._should_terminate = False self._remote = None self._left_pressed = False self._right_pressed = False self._down_pressed = False self._up_pressed = False self._A_pressed = False self._B_pressed = False
import numpy as np import scipy import scipy.io as sio import os import time import pdb from numba import jit import utils import subthread logger = utils.make_logger('dataset', is_stdout=False, filename='./log/dataset.log') class MyThread(subthread.subthread): def work(self, args): filepath = args mat = sio.loadmat(filepath) logger.info('load file:{}'.format(filepath)) return mat threads = [] def add_thread(thread): global threads threads.append(thread)
from utils import make_logger from flask import Blueprint MWF = "mwf" # Maass waveforms mwf = Blueprint(MWF, __name__, template_folder="views/templates", static_folder="views/static") mwf_logger = make_logger(mwf) import backend import views
from __future__ import division from __future__ import print_function import tensorflow as tf import numpy as np from tensorflow.contrib.keras.python.keras.layers import Conv2D, Dense, MaxPooling2D, ZeroPadding2D, Flatten, BatchNormalization, Dropout, AveragePooling2D, Activation from tensorflow.contrib.keras.python.keras.layers import Conv3D, MaxPooling3D, AveragePooling3D, ZeroPadding3D from tensorflow.contrib.keras.python.keras import layers import logging import h5py import pdb import utils logger = utils.make_logger('resnet', is_stdout=False, filename='./log/resnet.log') class ResNet(object): def __init__(self, args): self.args = args self.init_input() self.init_model() self.init_loss() def init_input(self): self.input_img = tf.placeholder(tf.uint8, [None, 230, 102, 20, 1]) self.input_lr = tf.placeholder(tf.float32) self.input_label = tf.placeholder(tf.int32, [None])
# -*- coding: utf-8 -*- from base import app from utils import make_logger from flask import Blueprint OEIS_object_page = Blueprint("OEIS_object", __name__, template_folder='templates', static_folder="static") logger = make_logger(OEIS_object_page) @OEIS_object_page.context_processor def body_class(): return { 'body_class' : 'OEIS_object' } import main # This is one possibility to inject pages into the Flask framework. # For another, see the L-function page app.register_blueprint(OEIS_object_page, url_prefix="/OEIS/")
board.move_select(pg.K_LEFT) elif keypress[pg.K_RIGHT]: board.move_select(pg.K_RIGHT) # Place a stone elif keypress[pg.K_SPACE]: log.debug("placing stone") board.place_stone() # TODO elif keypress[pg.K_p]: log.debug("passing") board.pass_move() elif keypress[pg.K_u]: log.debug("undoing") board.undo() # Quit elif keypress[pg.K_ESCAPE]: log.debug("quitting") sys.exit(1) screen.fill(board_color) board.draw(screen) pg.display.flip() if __name__ == '__main__': log = utils.make_logger('go-app', verbose = True) test(log) main(log)
EXPERIMENT = f"{cfg.model}_{cfg.experiment}" MODEL_PATH = f"models/{EXPERIMENT}" LOG_PATH = f"logs/{EXPERIMENT}" utils.make_folder(MODEL_PATH) utils.make_folder(LOG_PATH) criterions = utils.define_losses() dataloaders = utils.make_data_novel(cfg) model = utils.build_structure_generator(cfg).to(cfg.device) optimizer = utils.make_optimizer(cfg, model) scheduler = utils.make_lr_scheduler(cfg, optimizer) logger = utils.make_logger(LOG_PATH) writer = utils.make_summary_writer(EXPERIMENT) def on_after_epoch(model, df_hist, images, epoch, saveEpoch): utils.save_best_model(MODEL_PATH, model, df_hist) utils.checkpoint_model(MODEL_PATH, model, epoch, saveEpoch) utils.log_hist(logger, df_hist) utils.write_on_board_losses_stg2(writer, df_hist) utils.write_on_board_images_stg2(writer, images, epoch) if cfg.lrSched is not None: def on_after_batch(iteration): utils.write_on_board_lr(writer, scheduler.get_lr(), iteration) scheduler.step(iteration) else: on_after_batch = None
# coding=utf-8 import tempfile from filecmp import dircmp from os.path import exists, join from zipfile import ZipFile, BadZipFile, ZipInfo from sltp import SLTP from utils import make_logger, Path, Progress from src.dummy_miz import dummy_miz from src.global_ import ENCODING from src.miz.mission import Mission logger = make_logger('miz') # noinspection PyAbstractClass class MizPath(Path): def __init__(self, path): Path.__init__(self, path) if not self.exists(): raise FileNotFoundError(path) if not self.isfile(): raise TypeError(path) if not self.ext == '.miz': raise ValueError(path)
def build(argv, build_logger): required_len_argv = 4 if len(argv) != required_len_argv: print( f'{required_len_argv} arguments should be given, not {len(argv)}.') print_help() return False board_type = argv[1] target = {v: k for k, v in TARGET_TO_SYMBOL.items()}.get(argv[2]) component_file_name = argv[3] component_name = 'intel_hls_qconv1x1_impl' if not target: print(f'Gien 2nd arg {argv[2]} is not supported.') print_help() src_prj_path = path.join(env.ROOT_DIR, 'projects', board_type) prj_name = f'{path.basename(src_prj_path)}.prj' PROJECT_DIR = path.join(env.ROOT_DIR, prj_name) component_prj = f'{component_file_name}.prj' IP_DIR = path.join(env.ROOT_DIR, component_prj) if target <= Target.SIM: symbol = TARGET_TO_SYMBOL[Target.SIM] name = TARGET_TO_NAME[Target.SIM] build_logger.info(f'Run {name}.') logger = make_logger(symbol, INFO) run_and_check('make clean', logger) cmd = f'make {symbol} -j{env.NUM_THREADS}' run_and_check(cmd, logger) cmd = f'./{symbol}.elf random' run_and_check(cmd, logger) if target <= Target.SYN: symbol = TARGET_TO_SYMBOL[Target.SYN] name = TARGET_TO_NAME[Target.SYN] build_logger.info(f'Run {name}.') logger = make_logger(symbol, INFO) run_and_check('make clean', logger) cmd = f'make {symbol} -j{env.NUM_THREADS}' run_and_check(cmd, logger) src_path = path.join(env.INTEL_HLS_DIR, component_prj) dst_path = path.join(IP_DIR) if path.exists(IP_DIR): shutil.rmtree(IP_DIR) shutil.move(src_path, dst_path) # cmd = f'./{symbol}.elf random' # run_and_check(cmd, logger) if target <= Target.QSYS: symbol = TARGET_TO_SYMBOL[Target.QSYS] name = TARGET_TO_NAME[Target.QSYS] build_logger.info(f'Run {name}.') logger = make_logger(symbol, INFO) if path.exists(PROJECT_DIR): shutil.rmtree(PROJECT_DIR) shutil.copytree(src_prj_path, PROJECT_DIR) component_dir = path.join(IP_DIR, 'components', component_name) qsys_tcl_file = path.join(PROJECT_DIR, 'soc_system.tcl') cmd = f'qsys-script --search-path={component_dir}/,$ --script={qsys_tcl_file}' run(cmd, logger, cwd=PROJECT_DIR) qsys_prj_file = path.join(PROJECT_DIR, 'soc_system.qsys') cmd = f'qsys-generate {qsys_prj_file} --search-path={component_dir}/,$ --synthesis=VHDL' run(cmd, logger, cwd=PROJECT_DIR) bootfiles_dir = path.join(PROJECT_DIR, 'bootfiles') if path.exists(bootfiles_dir): shutil.rmtree(bootfiles_dir) os.mkdir(bootfiles_dir) if target <= Target.QUARTUS: symbol = TARGET_TO_SYMBOL[Target.QUARTUS] name = TARGET_TO_NAME[Target.QUARTUS] build_logger.info(f'Run {name}.') logger = make_logger(symbol, INFO) quartus_prj_file = path.join(PROJECT_DIR, 'DE10_NANO_SoC_GHRD.qpf') cmd = f'quartus_sh --flow compile {quartus_prj_file}' run(cmd, logger, cwd=PROJECT_DIR) sof_file = path.join(PROJECT_DIR, 'output_files', 'DE10_NANO_SoC_GHRD.sof') rbf_file = path.join(bootfiles_dir, 'soc_system.rbf') cmd = f'quartus_cpf -c -o bitstream_compression=on {sof_file} {rbf_file}' run(cmd, logger, cwd=PROJECT_DIR) if target <= Target.BSP: symbol = TARGET_TO_SYMBOL[Target.QUARTUS] name = TARGET_TO_NAME[Target.QUARTUS] build_logger.info(f'Run {name}.') logger = make_logger(symbol, INFO) bsp_build_dir = path.join(PROJECT_DIR, 'bsp_build_dir') if path.exists(bsp_build_dir): shutil.rmtree(bsp_build_dir) os.mkdir(bsp_build_dir) hps_dir = path.join(PROJECT_DIR, 'hps_isw_handoff', 'soc_system_hps_0') cmd = f'bsp-create-settings --type spl --bsp-dir {bsp_build_dir} ' '--settings settings.bsp --preloader-settings-dir {hps_dir}' run(cmd, logger, cwd=PROJECT_DIR) run(f'make -j{env.NUM_THREADS}', logger, cwd=bsp_build_dir) preloader_file_name = 'preloader-mkpimage.bin' preloader_file = path.join(bsp_build_dir, preloader_file_name) shutil.move(preloader_file, path.join(bootfiles_dir, preloader_file_name)) uboot_file_name = 'u-boot.img' uboot_file = path.join(bsp_build_dir, 'uboot-socfpga', uboot_file_name) run('make clean', logger) run(f'make uboot -j{env.NUM_THREADS}', logger, cwd=bsp_build_dir) shutil.move(uboot_file, path.join(bootfiles_dir, uboot_file_name)) # temporary return True
shutil.move(preloader_file, path.join(bootfiles_dir, preloader_file_name)) uboot_file_name = 'u-boot.img' uboot_file = path.join(bsp_build_dir, 'uboot-socfpga', uboot_file_name) run('make clean', logger) run(f'make uboot -j{env.NUM_THREADS}', logger, cwd=bsp_build_dir) shutil.move(uboot_file, path.join(bootfiles_dir, uboot_file_name)) # temporary return True if __name__ == '__main__': clean_logs() build_logger = utils.make_logger('build', INFO) print('-------------------------------------') build_logger.info('Start.') print('-------------------------------------') return_flag = build(sys.argv, build_logger) print('-------------------------------------') if return_flag: build_logger.info('Succeeded!!!') else: build_logger.info('Failed...') print('-------------------------------------')
from utils import make_logger, logging, truncfile debuglog = 'debug.log' frmt = '%(name)s:%(levelno)s:%(lineno)s:%(message)s' truncfile(debuglog) # for logging things in main mainlog = make_logger( 'main_log', frmt=frmt, fpath=debuglog, stdout=True, # lvl=logging.DEBUG, # use one of the following to easily set the stdout log level lvl=logging.INFO, # lvl=logging.WARNING, # lvl=logging.ERROR, # lvl=logging.CRITICAL, flvl=logging.INFO) man_log = logging.getLogger("main_log.managers") ''' man_log = make_logger('managers', frmt=frmt, fpath=debuglog, stdout=True, # lvl=logging.DEBUG, # use one of the following to easily set the stdout log level lvl=logging.INFO, # lvl=logging.WARNING, # lvl=logging.ERROR, # lvl=logging.CRITICAL,
import re import tarfile from base import app, getDBConnection, fmtdatetime from flask import render_template, request, abort, Blueprint, url_for from flaskext.login import login_required, current_user from gridfs import GridFS from os import path from pymongo.objectid import ObjectId from urlparse import urlparse from urllib import urlopen from users import admin_required upload_page = Blueprint("upload", __name__, template_folder='templates') import utils logging = utils.make_logger(upload_page) # blueprint specific definition of the body_class variable @upload_page.context_processor def body_class(): return {'body_class': 'upload'} def get_bread(): return [("Upload", url_for(".index"))] @upload_page.route("/") @login_required def index():
# -*- coding: utf-8 -*- from base import app from utils import make_logger from flask import Blueprint artin_representations_page = Blueprint("artin_representations", __name__, template_folder='templates', static_folder="static") logger = make_logger(artin_representations_page) @artin_representations_page.context_processor def body_class(): return {'body_class': 'artin_representations'} import main app.register_blueprint(artin_representations_page, url_prefix="/ArtinRepresentation")
from utils import make_logger from flask import Blueprint MWFP = "mwfp" mwfp = Blueprint(MWFP, __name__, template_folder="views/templates") mwfp_logger = make_logger(mwfp) mwfp_dbname = 'HTPicard' import views import backend
parser.add_argument('--image-size', nargs='+', type=int, default=[256, 512]) parser.add_argument('--lr', type=float, default=1) parser.add_argument('--n_val', type=int, default=512) parser.add_argument('--output-dir', default='logs') parser.add_argument('--clip-grad', type=bool, default=True) parser.add_argument('--filters', nargs='+', type=int, default=[64, 128, 256, 512, 1024]) parser.add_argument('--patience', type=int, default=6) opt = parser.parse_args() logger = make_logger(opt.model, opt.output_dir) logger.info(opt) device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') logger.info(f'device: {device}') dataset = CarvanaDataset('~/data/datasets/carvana/train', '~/data/datasets/carvana/train_masks', transforms=CarvanaDatasetTransforms( opt.image_size)) # torch.manual_seed(0) n_train = len(dataset) - opt.n_val train_dataset, val_dataset = random_split( dataset, [n_train, opt.n_val], generator=torch.Generator().manual_seed(0))
def __init__(self, plus_pin, minus_pin): GPIO.setup([plus_pin, minus_pin], GPIO.OUT) self.logger = make_logger(__name__) self.plus_pin = plus_pin self.minus_pin = minus_pin self.stop()
from base import * from flask import Flask, session, g, render_template, url_for, request, redirect, make_response, abort from sage.all import * import tempfile, os import pymongo from Lfunction import * import LfunctionComp import LfunctionPlot from utils import to_dict, make_logger from Lfunctionutilities import lfuncDStex, lfuncEPtex, lfuncFEtex logger = make_logger("LF") ##import upload2Db.py ########################################################################### # Route functions ########################################################################### @app.route("/L/") @app.route("/L/<arg1>/") # arg1 is EllipticCurve, ModularForm, Character, etc @app.route("/L/<arg1>/<arg2>/") # arg2 is field @app.route("/L/<arg1>/<arg2>/<arg3>/") #arg3 is label @app.route("/L/<arg1>/<arg2>/<arg3>/<arg4>/") @app.route("/L/<arg1>/<arg2>/<arg3>/<arg4>/<arg5>/") @app.route("/L/<arg1>/<arg2>/<arg3>/<arg4>/<arg5>/<arg6>/") @app.route("/L/<arg1>/<arg2>/<arg3>/<arg4>/<arg5>/<arg6>/<arg7>/") @app.route("/L/<arg1>/<arg2>/<arg3>/<arg4>/<arg5>/<arg6>/<arg7>/<arg8>/") @app.route("/L/<arg1>/<arg2>/<arg3>/<arg4>/<arg5>/<arg6>/<arg7>/<arg8>/<arg9>/"
from .serializers import SymptomUploadModelSerializer, SymptomDiseaseModelSerializer, DiseaseModelSerializer, SymptomPhotoModelSerializer, SymptomDescriptionModelSerializer from utils import make_logger from rest_framework.response import Response from rest_framework.views import APIView from rest_framework import status from .repositories import get_first_depth, get_final_depth, get_next_depth from .models import SymptomUpload, SymptomDisease, Disease, Symptom, SymptomPhoto, SymptomDescription from rest_framework.authentication import TokenAuthentication from rest_framework.permissions import IsAuthenticated from rest_framework.viewsets import ModelViewSet from accounts.models import User from rest_framework_jwt.authentication import JSONWebTokenAuthentication logger = make_logger('DIAGNOSIS_VIEW') class DiseaseSymptom(APIView): def get(self, request): """ First Depth 증상 리스트를 보여준다. <p><b>ds_id [STRING/INT]: </b>선택 증상 id</p> """ first_depth_serializer = get_first_depth() fd_res = first_depth_serializer.data logger.debug('First Depth Symptom List : {}'.format(fd_res)) return Response(fd_res, status=200) def post(self, request):
from utils import make_logger from flask import Blueprint EMF="emf" emf = Blueprint(EMF, __name__, template_folder="views/templates",static_folder="views/static") emf_logger = make_logger(emf) import views import backend
# -*- coding: utf-8 -*- from base import app from utils import make_logger from flask import Blueprint local_fields_page = Blueprint("local_fields", __name__, template_folder='templates', static_folder="static") logger = make_logger(local_fields_page) @local_fields_page.context_processor def body_class(): return { 'body_class' : 'local_fields' } import main app.register_blueprint(local_fields_page, url_prefix="/LocalField")
from utils import make_logger from flask import Blueprint MWFP="mwfp" mwfp = Blueprint(MWFP, __name__, template_folder="views/templates") mwfp_logger=make_logger(mwfp) mwfp_dbname = 'HTPicard' import views import backend
from django.shortcuts import render from django.contrib.auth.decorators import login_required from utils import make_logger, get_jwt_token from rest_framework.views import APIView from rest_framework.response import Response from rest_framework import status from .serializers import UserInfoSerializer from .models import UserInfo, User from rest_framework.authentication import TokenAuthentication from rest_framework.viewsets import ModelViewSet from django.conf import settings from django.core.exceptions import ObjectDoesNotExist from django.shortcuts import get_object_or_404 from rest_framework_jwt.authentication import JSONWebTokenAuthentication logger = make_logger('LOGIN_VIEW') class UserInfoViewSet(ModelViewSet): """ [POST] 사용자 정보(축종, 지역, 사육두수, 핸드폰 번호)를 등록한다. <p><b> species [STRING]: </b>축종</p> <p><b> area [STRING]: </b>지역</p> <p><b> scale [STRING]: </b>사육두수</p> <p><b> phone [STRING]: </b>핸드폰 번호</p> """ queryset = UserInfo.objects.all() serializer_class = UserInfoSerializer authentication_classes = [TokenAuthentication] http_method_names = ['post']
# -*- coding: utf-8 -*- from base import app from utils import make_logger from flask import Blueprint knowledge_page = Blueprint("knowledge", __name__, template_folder='templates') logger = make_logger(knowledge_page) import main app.register_blueprint(knowledge_page, url_prefix="/knowledge")
# -*- encoding: utf-8 -*- # this holds all the flask-login specific logic (+ url mapping an rendering templates) # for the user management # author: harald schilly <*****@*****.**> import pymongo ASC = pymongo.ASCENDING import flask from functools import wraps from base import app, getDBConnection from flask import render_template, request, abort, Blueprint, url_for, make_response from flaskext.login import login_required, login_user, current_user, logout_user login_page = Blueprint("users", __name__, template_folder='templates') import utils logger = utils.make_logger(login_page) import re allowed_usernames = re.compile("^[a-zA-Z0-9._-]+$") from flaskext.login import LoginManager login_manager = LoginManager() import pwdmanager from pwdmanager import LmfdbUser, LmfdbAnonymousUser # TODO update this url, needed for the user login token base_url = "http://www.l-functions.org" @login_manager.user_loader
# -*- encoding: utf-8 -*- # this holds all the flask-login specific logic (+ url mapping an rendering templates) # for the user management # author: harald schilly <*****@*****.**> import pymongo ASC = pymongo.ASCENDING import flask from functools import wraps from base import app, getDBConnection from flask import render_template, request, abort, Blueprint, url_for, make_response from flaskext.login import login_required, login_user, current_user, logout_user login_page = Blueprint("users", __name__, template_folder='templates') import utils logger = utils.make_logger(login_page) import re allowed_usernames = re.compile("^[a-zA-Z0-9._-]+$") from flaskext.login import LoginManager login_manager = LoginManager() import pwdmanager from pwdmanager import LmfdbUser, LmfdbAnonymousUser # TODO update this url, needed for the user login token base_url = "http://www.l-functions.org" @login_manager.user_loader def load_user(userid):
#import ipdb; ipdb.set_trace() debuglog_path = Path(basedir, 'debug.log') debuglog = str(debuglog_path) frmt = '%(name)s:%(levelno)s:%(lineno)s:%(message)s' truncfile(debuglog) # for logging things in main mainlog = make_logger('main_log', frmt=frmt, fpath=debuglog, stdout=True, # lvl=logging.DEBUG, # use one of the following to easily set the stdout log level lvl=logging.INFO, # lvl=logging.WARNING, # lvl=logging.ERROR, # lvl=logging.CRITICAL, flvl = logging.INFO ) man_log = logging.getLogger("main_log.managers") ''' man_log = make_logger('managers', frmt=frmt, fpath=debuglog, stdout=True, # lvl=logging.DEBUG, # use one of the following to easily set the stdout log level lvl=logging.INFO,
def __init__(self, left_motor, right_motor): self.logger = make_logger(__name__) self.left_motor = left_motor self.right_motor = right_motor self.wiimote = Wiimote(self)
# -*- coding: utf-8 -*- import math from Lfunctionutilities import pair2complex, splitcoeff, seriescoeff from sage.all import * import sage.libs.lcalc.lcalc_Lfunction as lc import re import pymongo import bson import utils from modular_forms.elliptic_modular_forms.backend.web_modforms import * logger = utils.make_logger("LF") def get_attr_or_method(thiswillbeexecuted, attr_or_method_name): """ Given an object O and a string "text", this returns O.text() or O.text depending on whether text is an attribute or a method of O itself _or one of its superclasses_, which I will only know at running time. I think I need an eval for that. POD """ # I don't see a way around using eval for what I want to be able to do # Because of inheritance, which method should be called depends on self try: return eval("thiswillbeexecuted."+attr_or_method_name) except: return None def my_find_update(the_coll, search_dict, update_dict): """ This performs a search using search_dict, and updates each find in the_coll using update_dict. If there are none, update_dict is actually inserted. """
# /modular_forms/maass_forms/__init__.py from utils import make_logger from flask import Blueprint MAASSF = "maassf" maassf = Blueprint(MAASSF, __name__, template_folder="views/templates") maassf_logger = make_logger(maassf) import maass_waveforms import picard
from datetime import datetime import json import time import math import bs4 import pymongo import utils import settings logger = utils.make_logger("sb_nation") def get_all_blogs(): sb_nation_blogs = [] blogs = utils.get_response_from_target("http://www.sbnation.com/blogs") blogs_soup = bs4.BeautifulSoup(blogs.text, "html.parser") team_divs = blogs_soup.find("div", {"class": "l-main-float"}) sport_type = None for i, element in enumerate(team_divs): if type(element) == bs4.element.Tag: if element.name == "h2": sport_type = element.text elif element.name == "div": blog_info = element.find('a') url = blog_info['href'] blog_name = blog_info.find("h3", {
from utils import make_logger from flask import Blueprint MWF="mwf" # Maass waveforms mwf = Blueprint(MWF, __name__, template_folder="views/templates",static_folder="views/static") mwf_logger = make_logger(mwf) import backend import views
from flask import Flask, session, g, render_template, url_for, make_response, request, redirect from sage.all import * import tempfile import os from pymongo import ASCENDING from WebCharacter import * from utils import to_dict, parse_range, make_logger import ListCharacters try: from dirichlet_conrey import * except: logger.critical("dirichlet_conrey.pyx cython file is not available ...") logger = make_logger("DC") ############################################################################### # Route functions ############################################################################### @app.route("/Character/Dirichlet/") @app.route("/Character/Dirichlet/<arg1>") @app.route("/Character/Dirichlet/<arg1>/<arg2>") def render_Character(arg1=None, arg2=None): return DirichletCharacter.render_webpage(request, arg1, arg2) def render_webpage(request, arg1, arg2): args = request.args
loops = 0 while True: loops += 1 if loops % 25 == 0: log.debug('recorded <{}> loops'.format(loops)) # Decode chunks of audio data from the stream try: data = stream.read(CHUNK_SIZE) decoded = np.fromstring(data, 'Float32'); mx = max(decoded) recorded.append(mx) # On <C-c>, plot max of recorded data except KeyboardInterrupt as ee: log.debug('closing stream and ending PyAudio') stream.close() p.terminate() df = pd.DataFrame(columns = ['mx', 'time']) df['mx'] = recorded df['time'] = range(len(recorded)) plt = ggplot.ggplot(ggplot.aes(x='time', y='mx'), data=df) +\ ggplot.geom_line() pdb.set_trace() log.debug('quitting') sys.exit(1) if __name__ == "__main__": log = utils.make_logger('audio-reader') main(log)
# coding=utf-8 from utils import Progress, Downloader, make_logger logger = make_logger(__name__) def download(url, local_file, progress_title: str, progress_text: str = '', file_size: int = None): logger.info('downloading {} -> {}'.format(url, local_file)) Progress.start(progress_title) Progress.set_label(progress_text) def hook(data): Progress.set_value(float(data['percent_complete'])) dl = Downloader( url=url, filename=local_file, progress_hooks=[hook], content_length=file_size, ) return dl.download()
# -*- coding: utf-8 -*- from base import app from utils import make_logger from flask import Blueprint local_fields_page = Blueprint("local_fields", __name__, template_folder='templates', static_folder="static") logger = make_logger(local_fields_page) @local_fields_page.context_processor def body_class(): return {'body_class': 'local_fields'} import main app.register_blueprint(local_fields_page, url_prefix="/LocalField")
keypress = pg.key.get_pressed() if sum(keypress) > 0: key_name = pg.key.name(keypress.index(1)) log.debug("key pressed: <{}>".format(key_name)) # Pause, play if keypress[pg.K_SPACE]: pdb.set_trace() # Quit elif keypress[pg.K_ESCAPE]: log.debug("quitting") sys.exit(1) # Move time forward spc.next_time() # Draw images screen.fill(BLACK) spc.draw(screen) pg.display.flip() # Play sounds spc.set_volumes() if __name__ == "__main__": log = utils.make_logger('solar-system') test(log) main(log)
import tarfile from base import app, getDBConnection, fmtdatetime from flask import render_template, request, abort, Blueprint, url_for from flaskext.login import login_required, current_user from gridfs import GridFS from os import path from pymongo.objectid import ObjectId from urlparse import urlparse from urllib import urlopen from users import admin_required upload_page = Blueprint("upload", __name__, template_folder='templates') import utils logging = utils.make_logger(upload_page) # blueprint specific definition of the body_class variable @upload_page.context_processor def body_class(): return { 'body_class' : 'upload' } def get_bread(): return [("Upload", url_for(".index")) ] @upload_page.route("/") @login_required def index(): related_to = ""; if request.values.has_key('related_to'): related_to = request.values['related_to']
# make output dir output_dir = opt.output_dir if os.path.exists(output_dir): raise KeyError("Existing path: ", output_dir) os.makedirs(output_dir) # copy codes and config file files = list_dir_recursively_with_ignore('.', ignores=['diagrams', 'configs']) files = [(f[0], os.path.join(output_dir, "src", f[1])) for f in files] copy_files_and_create_dirs(files) shutil.copy2(args.config, output_dir) # logger logger = make_logger("project", opt.output_dir, 'log') # device if opt.device == 'cuda': os.environ['CUDA_VISIBLE_DEVICES'] = opt.device_id num_gpus = len(opt.device_id.split(',')) logger.info("Using {} GPUs.".format(num_gpus)) logger.info("Training on {}.\n".format(torch.cuda.get_device_name(0))) cudnn.benchmark = True device = torch.device(opt.device) # create the dataset for training dataset = make_dataset(opt.dataset) # init the network style_gan = StyleGAN(structure=opt.structure,
parser.add_argument('--image-size', nargs='+', type=int, default=[256, 512]) parser.add_argument('--lr', type=float, default=1) parser.add_argument('--n_val', type=int, default=512) parser.add_argument('--output-dir', default='logs') parser.add_argument('--clip-grad', type=bool, default=True) parser.add_argument('--filters', nargs='+', type=int, default=[64, 128, 256, 512, 1024]) parser.add_argument('--patience', type=int, default=6) opt = parser.parse_args() logger = make_logger(opt.model + '_re', opt.output_dir) logger.info(opt) device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') logger.info(f'device: {device}') dataset = CarvanaDataset('~/data/datasets/carvana/train', '~/data/datasets/carvana/train_masks', transforms=CarvanaDatasetTransforms( opt.image_size)) # torch.manual_seed(0) n_train = len(dataset) - opt.n_val train_dataset, val_dataset = random_split( dataset, [n_train, opt.n_val], generator=torch.Generator().manual_seed(0))
import logging import random import gym import numpy as np import tensorflow as tf from agent import Agent from utils import save_args, make_logger LOGGER = make_logger('./results/logs.txt', 'info') def experiment(config): """ A function that runs an experiment. args config (dict) hyperparameters and experiment setup """ with tf.Session() as sess: seed = config.pop('seed') if seed: seed = int(seed) random.seed(seed) tf.set_random_seed(seed) np.random.seed(seed) env_id = config.pop('env_id')
from base import * from flask import Flask, session, g, render_template, url_for, request, redirect, make_response, abort from sage.all import * import tempfile, os import pymongo from Lfunction import * import LfunctionComp import LfunctionPlot from utils import to_dict, make_logger from Lfunctionutilities import lfuncDStex, lfuncEPtex, lfuncFEtex logger = make_logger("LF") ##import upload2Db.py ########################################################################### # Route functions ########################################################################### @app.route("/L/") @app.route("/L/<arg1>/") # arg1 is EllipticCurve, ModularForm, Character, etc @app.route("/L/<arg1>/<arg2>/") # arg2 is field @app.route("/L/<arg1>/<arg2>/<arg3>/") #arg3 is label @app.route("/L/<arg1>/<arg2>/<arg3>/<arg4>/") @app.route("/L/<arg1>/<arg2>/<arg3>/<arg4>/<arg5>/") @app.route("/L/<arg1>/<arg2>/<arg3>/<arg4>/<arg5>/<arg6>/") @app.route("/L/<arg1>/<arg2>/<arg3>/<arg4>/<arg5>/<arg6>/<arg7>/") @app.route("/L/<arg1>/<arg2>/<arg3>/<arg4>/<arg5>/<arg6>/<arg7>/<arg8>/") @app.route("/L/<arg1>/<arg2>/<arg3>/<arg4>/<arg5>/<arg6>/<arg7>/<arg8>/<arg9>/")
#!/usr/bin/env python import sys from server import Server from utils import make_logger import uuid Log = make_logger() """ Checking if the external IP is set """ if len(sys.argv) > 2: ext_host = sys.argv[2] else: ext_host = 'localhost' """ And the internal one to which we bind the socket. """ if len(sys.argv) > 1: pikahost = sys.argv[1] else: pikahost = 'localhost' """ This is a hacky way of setting up "unique" server names without having to manually do it """ name = "server_" + str(uuid.uuid1()) s = Server(pikahost, name, ext_host) try: s.run() except KeyboardInterrupt: print "Control-c, shutting down.."
from utils import make_logger, logging, truncfile debuglog = 'debug.log' frmt = '%(name)s:%(levelno)s:%(lineno)s:%(message)s' truncfile(debuglog) # for logging things in edf edflog = make_logger('edf_log', frmt=frmt, fpath=debuglog, stdout=True, # lvl=logging.DEBUG, # use one of the following to easily set the stdout log level # lvl=logging.INFO, lvl=logging.WARNING, # lvl=logging.ERROR, # lvl=logging.CRITICAL, flvl = logging.WARNING ) # for logging things in main mainlog = make_logger('main_log', frmt=frmt, fpath=debuglog, stdout=True, # lvl=logging.DEBUG, # use one of the following to easily set the stdout log level # lvl=logging.INFO, lvl=logging.WARNING, # lvl=logging.ERROR, # lvl=logging.CRITICAL,