def Start(self): if self.started: cx_Logging.Info("Service %s already started.", self.name) else: cx_Logging.Info("Starting service %s", self.name) win32service.StartService(self.handle, None) while True: if self.state != win32service.SERVICE_START_PENDING: break cx_Logging.Info(" waiting for service to start....") time.sleep(2)
def Stop(self): if self.stopped: cx_Logging.Info("Service %s already stopped.", self.name) else: cx_Logging.Info("Stopping service %s", self.name) win32service.ControlService(self.handle, win32service.SERVICE_CONTROL_STOP) while True: if self.state != win32service.SERVICE_STOP_PENDING: break cx_Logging.Info(" waiting for service to stop....") time.sleep(2)
def RemoveCachedRow(self, model, externalRow): pkAttrName, = model.pkAttrNames pkValue = getattr(externalRow, pkAttrName) row = self.GetCachedRowByPK(model, pkValue) if row is None: cx_Logging.Info("Ignoring row not cached for model %s (pk = %s)", model.__name__, pkValue) else: cx_Logging.Info("Removing cached row for model %s (pk = %s)", model.__name__, pkValue) del self.rowsByPK[model][pkValue] self.rowsByModel[model].remove(row)
def run(self): cx_Logging.Info("Running the web server") try: self.server = wsgi.Server((address, port), StaticFilesHandler(WSGIHandler())) self.server.start() except Exception as e: cx_Logging.ERROR("Exception: %s" % e)
def Open(self): pipeName = r"\\%s\pipe\%s" % (self.serverName or ".", self.name) if self.asServer: cx_Logging.Info("Creating pipe (as server): %s", self.name) sa = pywintypes.SECURITY_ATTRIBUTES() sa.SetSecurityDescriptorDacl(1, None, 0) self.handle = win32pipe.CreateNamedPipe( pipeName, win32pipe.PIPE_ACCESS_DUPLEX, win32pipe.PIPE_TYPE_MESSAGE | win32pipe.PIPE_WAIT, self.maxInstances, self.maxSize, self.maxSize, self.timeout, sa) win32pipe.ConnectNamedPipe(self.handle) else: cx_Logging.Info("Connecting to pipe (as client): %s on %s", self.name, self.serverName or ".") self.handle = win32file.CreateFile( pipeName, win32file.GENERIC_READ | win32file.GENERIC_WRITE, 0, None, win32file.OPEN_EXISTING, 0, None)
def PopulateRecoverStartSequence(self, database): cursor = database.connection.cursor() cursor.execute(""" select sequence# from v$log where status = 'CURRENT'""") startSequence, = cursor.fetchone() self.recoverStartSequence = int(startSequence) cx_Logging.Info("Log sequence is currently %s", self.recoverStartSequence)
def GetCachedRowByPK(self, model, pkValue): rowDict = self.rowsByPK.get(model) if rowDict is None: cx_Logging.Info("Getting cached rows by PK for model %s", model.__name__) rows = self.GetCachedRows(model) pkAttrName, = model.pkAttrNames rowDict = dict((getattr(r, pkAttrName), r) for r in rows) self.rowsByPK[model] = rowDict return rowDict.get(pkValue)
def Remove(path, log=True): """Remove a file or a directory tree recursively.""" if os.path.isdir(path): RemoveTree(path, log) else: if log: cx_Logging.Info("removing file %s...", path) if sys.platform == "win32": os.chmod(path, 0x1ff) os.remove(path)
def UpdateCachedRow(self, model, externalRow, contextItem=None): pkAttrName, = model.pkAttrNames pkValue = getattr(externalRow, pkAttrName) row = self.GetCachedRowByPK(model, pkValue) if row is not None: cx_Logging.Info("Updating cached row for model %s (pk = %s)", model.__name__, pkValue) else: row = model.New() self.rowsByModel[model].append(row) self.rowsByPK[model][pkValue] = row cx_Logging.Info("Creating cached row for model %s (pk = %s)", model.__name__, pkValue) for attrName in row.attrNames + row.extraAttrNames: if hasattr(externalRow, attrName): value = getattr(externalRow, attrName) elif hasattr(contextItem, attrName): value = getattr(contextItem, attrName) else: continue setattr(row, attrName, value)
def CopyFile(source, target, bufferSize=16 * 1024, log=True): """Copy the source to the target.""" if log: cx_Logging.Info("copying %s to %s...", source, target) sourceFile = open(source, "rb") if os.path.exists(target): Remove(target, log=log) targetFile = open(target, "wb") while True: buffer = sourceFile.read(bufferSize) if not buffer: break targetFile.write(buffer)
def RemoveTree(path, log=True): """Recursively remove a directory tree.""" try: currentDir = os.getcwd() except OSError: currentDir = None if currentDir is None or currentDir.startswith(path): os.chdir("/") if log: cx_Logging.Info("removing directory %s...", path) for name in os.listdir(path): fullName = os.path.join(path, name) Remove(fullName, log=log) if sys.platform == "win32": os.chmod(path, 0x1ff) os.rmdir(path)
def Backup(self, offline): if self.isTarFile: baseDirName = os.path.dirname(self.backupName) dirName = tempfile.mkdtemp(prefix="BackupDB_", dir=baseDirName) else: dirName = self.backupName if not os.path.exists(dirName): os.makedirs(dirName) backupFileNames = self.__BackupDatabaseToDir(dirName, offline) if self.isTarFile: mode = "w|%s" % self.compressionMode backupFile = tarfile.open(self.backupName, mode) for fileName in backupFileNames: archiveName = os.path.join(self.baseArchiveName, fileName[len(dirName) + 1:]) cx_Logging.Info("adding %s to archive", archiveName) backupFile.add(fileName, archiveName) backupFile.close() cx_ShellUtils.Remove(dirName)
def DatabaseBySid(self, sid, ignoreIfMissing=False): if self.serviceManager is not None: database = Database.Database(self, sid) if database.service is not None: binPath = database.service.binaryPathName.split()[0] database.oracleHome = os.path.dirname(os.path.dirname(binPath)) if database.service.manual: database.startMode = "Manual" else: database.startMode = "Auto" return database else: fileName = self.__EnvironmentConfig(sid) if not ignoreIfMissing or os.path.exists(fileName): cx_Logging.Info("Reading environment from %s", fileName) iniFile = cx_IniFile.IniFile() iniFile.Read(fileName) oracleHome = iniFile.GetValue("Environment", "OracleHome") startMode = iniFile.GetValue("Environment", "StartMode") return Database.Database(self, sid, oracleHome, startMode)
def InsertRowInDatabase(self, transaction, row): cursor = self.dataSource.connection.cursor() cursor.execute( """ select max(Year) from Years where Year < ?""", row.year) fetchedRow = cursor.fetchone() super(DataSet, self).InsertRowInDatabase(transaction, row) if fetchedRow is not None: copyFromYear, = fetchedRow cx_Logging.Info("Copying causes and donators from year %s", copyFromYear) for cause in Models.Causes.GetRows(self.dataSource, year=copyFromYear): setValues = dict( year=row.year, description=cause.description, deductible=cause.deductible, reported=cause.reported, notes=cause.notes, donationAccountCode=cause.donationAccountCode, looseCashAccountCode=cause.looseCashAccountCode) transaction.AddItem(tableName=Models.Causes.tableName, pkAttrName=Models.Causes.pkAttrNames[0], pkSequenceName="CauseId_s", setValues=setValues) for donator in Models.Donators.GetRows(self.dataSource, year=copyFromYear): setValues = dict(year=row.year, surname=donator.surname, givenNames=donator.givenNames, assignedNumber=donator.assignedNumber, addressLine1=donator.addressLine1, addressLine2=donator.addressLine2, addressLine3=donator.addressLine3) transaction.AddItem(tableName=Models.Donators.tableName, pkAttrName=Models.Donators.pkAttrNames[0], pkSequenceName="DonatorId_s", setValues=setValues)
def PopulateRecoverChangeNumber(self, database): cursor = database.connection.cursor() cursor.execute("alter system archive log current") cursor.execute(""" select name, next_change# from v$archived_log where sequence# >= :sequenceNumber and resetlogs_change# = ( select resetlogs_change# from v$database ) order by sequence#""", sequenceNumber=self.recoverStartSequence) archivedLogs = [] for name, nextChangeNumber in cursor: archivedLogs.append(name) self.recoverChangeNumber = nextChangeNumber cx_Logging.Info("Recover change number is %s", self.recoverChangeNumber) return archivedLogs
def __ExtractFile(self, tarInfo, targetName): cx_Logging.Info("extracting %s as %s", tarInfo.name, targetName) sourceFile = self.backupFile.extractfile(tarInfo) targetFile = file(targetName, "wb") shutil.copyfileobj(sourceFile, targetFile, 1048576)
def Stop(self): logger = logging.getLogger() cx_Logging.Info("Pulse2 Agent stopping...") logger.info("Pulse2 Agent stopped.") self.stopEvent.Set()
def Run(self): logger = logging.getLogger() cx_Logging.Info("Pulse2 Agent starting...") self.dp.mainloop() logger.info("Pulse2 Agent started.") self.stopEvent.Wait()
def on_quit(sysTrayIcon): cx_Logging.Info("Stopping the web server") webserver.server.stop()
# Check if the database is running. If not, start it. status = subprocess.call( [ os.path.join(settings.FREPPLE_HOME, "..", "pgsql", "bin", "pg_ctl.exe"), "--pgdata", os.path.join(settings.FREPPLE_LOGDIR, "database"), "--silent", "status", ], stdin=subprocess.DEVNULL, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, creationflags=CREATE_NO_WINDOW, ) if status: cx_Logging.Info("Starting the PostgreSQL database") subprocess.call( [ os.path.join( settings.FREPPLE_HOME, "..", "pgsql", "bin", "pg_ctl.exe" ), "--pgdata", os.path.join(settings.FREPPLE_LOGDIR, "database"), "--log", os.path.join(settings.FREPPLE_LOGDIR, "database", "server.log"), "-w", # Wait till it's up "start", ], stdin=subprocess.DEVNULL, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL,
options = parser.parse_args() # Using the included postgres database? if os.path.exists(os.path.join(settings.FREPPLE_HOME, '..', 'pgsql', 'bin', 'pg_ctl.exe')): # Check if the database is running. If not, start it. status = call([ os.path.join(settings.FREPPLE_HOME, '..', 'pgsql', 'bin', 'pg_ctl.exe'), "--pgdata", os.path.join(settings.FREPPLE_LOGDIR, 'database'), "--silent", "status" ], stdin=DEVNULL, stdout=DEVNULL, stderr=DEVNULL, creationflags=CREATE_NO_WINDOW ) if status: cx_Logging.Info("Starting the PostgreSQL database") call([ os.path.join(settings.FREPPLE_HOME, '..', 'pgsql', 'bin', 'pg_ctl.exe'), "--pgdata", os.path.join(settings.FREPPLE_LOGDIR, 'database'), "--log", os.path.join(settings.FREPPLE_LOGDIR, 'database', 'server.log'), "-w", # Wait till it's up "start" ], stdin=DEVNULL, stdout=DEVNULL, stderr=DEVNULL, creationflags=CREATE_NO_WINDOW ) cx_Logging.Info("Starting the web server") # Synchronize the scenario table with the settings from freppledb.common.models import Scenario
def GetCachedRows(self, model, refresh=False): rows = self.rowsByModel.get(model) if rows is None or refresh: cx_Logging.Info("Getting cached rows for model %s", model.__name__) rows = self.rowsByModel[model] = model.GetRows(self.dataSource) return rows
def __init__(self): cx_Logging.Info("creating handler instance") self.stopEvent = cx_Threads.Event()
def OnThreadStart(self): """Called when the thread is started. Override in child classes.""" cx_Logging.Info("thread %r starting" % self.name)
def OnThreadEnd(self): """Called when the thread is ended. Override in child classes.""" cx_Logging.Info("thread %r ending" % self.name)
def initialize(self, config_file_name): cx_Logging.Info("initializing: config file name is %r", config_file_name)
def run(self): cx_Logging.Info("running service....") self.stopEvent.Wait()
def stop(self): cx_Logging.Info("stopping service...") self.stopEvent.Set()