def __init__(self, session): DoesLogging.__init__(self) self.session = session self.log.info('init') self._table_map = {} pass
def __init__(self, connString=None, expand_workflow=True): if connString is None: raise ValueError("connString is required") DoesLogging.__init__(self) try: SQLAlchemyInit.__init__(self, connString, initializeToPegasusDB) except exc.OperationalError, e: self.log.error('init', msg='%s' % ErrorStrings.get_init_error(e)) raise StampedeDBNotFoundError
def __init__(self, connString=None, expand_workflow=True): if connString is None: raise ValueError("connString is required") DoesLogging.__init__(self) try: SQLAlchemyInit.__init__(self, connString, initializeToPegasusDB) except exceptions.OperationalError, e: self.log.error('init', msg='%s' % ErrorStrings.get_init_error(e)) raise StampedeDBNotFoundError
def __init__(self, connString=None): if connString is None: raise ValueError('Connection string is required') DoesLogging.__init__(self) try: SQLAlchemyInit.__init__(self, connString, initializeToDashboardDB) except exc.OperationalError, e: self.log.error('init', msg='%s' % ErrorStrings.get_init_error(e)) raise MasterDBNotFoundError
def __init__(self, connString=None, wf_id=None, wf_uuid=None): if connString is None: raise ValueError('Connection string is required') DoesLogging.__init__(self) try: SQLAlchemyInit.__init__(self, connString, initializeToPegasusDB) except exc.OperationalError, e: self.log.error('init', msg='%s' % ErrorStrings.get_init_error(e)) raise StampedeDBNotFoundError
def __init__(self, connString=None, mysql_engine=None, **kw): DoesLogging.__init__(self) if connString is None: raise ValueError("connString is required") _kw = { } dialect = dsn_dialect(connString) _kw[dialect] = { } if dialect == 'mysql': if mysql_engine is not None: _kw[dialect]['mysql_engine'] = mysql_engine try: SQLAlchemyInit.__init__(self, connString, initializeToPegasusDB, **_kw) except exc.OperationalError, e: self.log.error('init', msg='%s' % ErrorStrings.get_init_error(e)) raise RuntimeError
def __init__(self, connString, wf_uuid): """ Init object @type connString: string @param connString: SQLAlchemy connection string - REQUIRED @type wf_uuid: string @param wf_uuid: The wf_uuid string of the workflow to remove along with associated data from the database """ DoesLogging.__init__(self) self.log.info('init.start') SQLAlchemyInit.__init__(self, connString, initializeToDashboardDB ) self._wf_uuid = wf_uuid self.log.info('init.end')
def __init__(self, connString=None, mysql_engine=None, **kw): DoesLogging.__init__(self) if connString is None: raise ValueError("connString is required") _kw = {} dialect = dsn_dialect(connString) _kw[dialect] = {} if dialect == 'mysql': if mysql_engine is not None: _kw[dialect]['mysql_engine'] = mysql_engine try: SQLAlchemyInit.__init__(self, connString, initializeToPegasusDB, **_kw) except exceptions.OperationalError, e: self.log.error('init', msg='%s' % ErrorStrings.get_init_error(e)) raise RuntimeError
def __init__(self, add_hash="no", _validate=False, schemata=None): """Will be overridden by subclasses to take parameters specific to their function. """ DoesLogging.__init__(self) self._do_preprocess = False # may get set to True, below self.last_flush = time.time() self._validate = _validate # Parameter: add_hash try: self._add_hash = util.as_bool(add_hash) self._do_preprocess = True except ValueError, err: self.log.error("parameter.error", name="add_hash", value=add_hash, msg=err) self._add_hash = False
def __init__(self): DoesLogging.__init__(self) pass
def __init__(self, input_file, fullname='unknown', unparsed_file=None, parse_date=True, add_hash='no', **kw): """Initialize base parser. Parameters: input_file - File object (must support readline) fullname - For logging, the fully qualified name for the logger (matches 'qualname' in the logging config). unparsed_file - File object to place records that caused a parse exception parse_date - Whether to parse the ISO date to a number or represent it as a string. **kw - Remaining keyword, value pairs are appended to each line of the log. If the same keyword is in a parsed result, the newer value takes precedence. The exception to this is if the parser returns a string instead of a dictionary, e.g. the 'bp' parser: to avoid O(N*M) behavior where N is the number of the keywords and M is the length of the output string, duplicates are not checked. """ if not input_file: raise ValueError("input file cannot be empty") DoesLogging.__init__(self, fullname) # common parameters self._add_hash = self.boolParam(add_hash) # rest of parameters self._infile = nlreadline.BufferedReadline(input_file) if hasattr(input_file, 'fileno'): self._fake_file = False self._infile_rlist = (input_file.fileno(), ) # used in read_line else: # not a real file self._fake_file = True self._infile_rlist = () try: self._offs = self._infile.tell() except IOError: self._offs = 0 self._prev_len, self._saved_len = 0, 0 self._saved = [] self._name = fullname self._ufile = unparsed_file self._header_values = {} self._parser = NLSimpleParser(parse_date=parse_date) # Constant to add to each record self._const_nvp = {} # add GUID in env, if present guid = nlapi.getGuid(create=False) if guid: self._const_nvp['guid'] = guid # add user-provided values (can override guid) self._const_nvp.update(kw) # cache string-valued version, will be empty string if kw == {} self._const_nvp_str = ' '.join( ["%s=%s" % (k, v) for k, v in self._const_nvp.items()]) self.parse_date = parse_date
def __init__(self, verify=False, parse_date=True, **kw): DoesLogging.__init__(self) self.verify, self.parse_date = verify, parse_date
def __init__(self, input_file, fullname='unknown', unparsed_file=None, parse_date=True, add_hash='no', **kw): """Initialize base parser. Parameters: input_file - File object (must support readline) fullname - For logging, the fully qualified name for the logger (matches 'qualname' in the logging config). unparsed_file - File object to place records that caused a parse exception parse_date - Whether to parse the ISO date to a number or represent it as a string. **kw - Remaining keyword, value pairs are appended to each line of the log. If the same keyword is in a parsed result, the newer value takes precedence. The exception to this is if the parser returns a string instead of a dictionary, e.g. the 'bp' parser: to avoid O(N*M) behavior where N is the number of the keywords and M is the length of the output string, duplicates are not checked. """ if not input_file: raise ValueError("input file cannot be empty") DoesLogging.__init__(self, fullname) # common parameters self._add_hash = self.boolParam(add_hash) # rest of parameters self._infile = nlreadline.BufferedReadline(input_file) if hasattr(input_file, 'fileno'): self._fake_file = False self._infile_rlist = (input_file.fileno(),) # used in read_line else: # not a real file self._fake_file = True self._infile_rlist = () try: self._offs = self._infile.tell() except IOError: self._offs = 0 self._prev_len, self._saved_len = 0, 0 self._saved = [ ] self._name = fullname self._ufile = unparsed_file self._header_values = { } self._parser = NLSimpleParser(parse_date=parse_date) # Constant to add to each record self._const_nvp = { } # add GUID in env, if present guid = nlapi.getGuid(create=False) if guid: self._const_nvp['guid'] = guid # add user-provided values (can override guid) self._const_nvp.update(kw) # cache string-valued version, will be empty string if kw == {} self._const_nvp_str = ' '.join(["%s=%s" % (k,v) for k,v in self._const_nvp.items()]) self.parse_date = parse_date