def __init__(self, day, parent=None): super().__init__(parent) self.signals = TimeLineWidget.Signals() self.logger = getLogger(__name__ + "." + self.__class__.__name__) self.logger.propagate = False self.setFSTimeLimits(None) self.setBlockTimeLimits(None) self.setSelTimeLimits(None) self.makeTools() self.setDay(day) self.setupUi()
def __init__(self, filepath): """ @summary: Constructor @param filepath: Path for input and output datafiles """ self.fullpath = filepath self.gbifRes = GBIFCodes() self._files = [] logname, _ = os.path.splitext(os.path.basename(__file__)) logfname = os.path.join(filepath, logname + '.log') if os.path.exists(logfname): os.remove(logfname) self._log = getLogger(logname, logfname)
def __init__(self, interpretedFname, metaFname, outFname): """ @summary: Constructor @param interpretedFname: Full filename containing records from the GBIF interpreted occurrence table @param metaFname: Full filename containing metadata for all data files in the Darwin Core GBIF Occurrence download: https://www.gbif.org/occurrence/search @param outFname: Full filename for the output BISON CSV file """ # self.gbifRes = GBIFCodes() self._files = [] # Interpreted GBIF occurrence file self.interpFname = interpretedFname self._if = None self._files.append(self._if) self._iCsvrdr = None # GBIF metadata file for occurrence files self._metaFname = metaFname self.fldMeta = None # Output BISON occurrence file self.outFname = outFname self._outf = None self._files.append(self._outf) self._outWriter = None pth, outbasename = os.path.split(outFname) outbase, ext = os.path.splitext(outbasename) # Input for Canonical name lookup self.name4LookupFname = os.path.join(pth, 'nameUUIDForLookup.csv') self._name4lookupf = None self._files.append(self._name4lookupf) self._name4lookupWriter = None self._name4lookup = {} logname, _ = os.path.splitext(os.path.basename(__file__)) logfname = os.path.join(pth, outbase + '.log') if os.path.exists(logfname): import time ts = int(time.time()) logfname = os.path.join(pth, outbase + '.log.{}'.format(ts)) self._log = getLogger(logname, logfname)
def _setLogger(self, logfname): scriptname, ext = os.path.splitext(os.path.basename(__file__)) self.log = getLogger(scriptname, logfname)
#!/usr/bin/env python import glob import re import os import time import numpy as np from image import Image import primitives import tools import unittest from astropy.io import fits from scipy import ndimage log = tools.getLogger('main') def buildcalibrations(filelist, lamlist, date="20160408", calibdir="calibrations/", order=3, lam1=1150, lam2=2400, R=25, genwavelengthsol=True, makehiresPSFlets=True, savehiresimages=True): """ """
import sys import shutil import time import tarfile import cPickle import hashlib import traceback import scan import mpi import tools import trajectory logger = tools.getLogger('dyn') if mpi.mpi: from mpi4py import MPI else: logger.fatal('You can not run ensemble simulation without MPI!') sys.exit(1) __author__ = "Beat Amrein" __email__ = "*****@*****.**" logger.debug('Starting...') if mpi.rank == mpi.root: print('Rank 0: Started @', time.time(), mpi.get_info())
def setLogLevel(self, level): getLogger(self.logger, level=level)
from filecmp import cmp as comparefiles from platform import node as hostname import gzip import os import subprocess import shutil import time import analysis import tools from tools import File __author__ = "Beat Amrein" __email__ = "*****@*****.**" logger = tools.getLogger('dyn.traj') # This class is running a simulation # Its essentially my python-implementation of submit.sh # RELEVANT FOR PYTHON - MPI Interaction: # http://stackoverflow.com/questions/10590064/interact-with-a-mpi-binary-via-a-non-mpi-python-script # TODO 1: Custom Exceptions / Handling # 2: if hot-atoms in first 4 steps, restart with smaller stepsize # 3: automatic mapping # 4: compress/uncompress pdbfile # 5: compress/uncompress topology # 6: compress/uncompress input files # 7: fastforward md.QdynPackage, when loaded # 8: mutation starting from just a sequence