def __init__(self, action, job_key=None): self._conf_mongodb = mongodb(MONGODB_CONF) self.c_job_activity = self._conf_mongodb.c_job_activity self.action = action self.objectid = job_key if job_key: # get previously init'd job... doc = self.c_job_activity.find_one(self._base_spec) if not doc: raise ValueError("Invalid job_key id: %s" % job_key) self.args = doc['args'] self.created = doc['created'] self.active = doc['active'] self.error = doc['error'] else: self.objectid = ObjectId() self.created = datetime.now(UTC) # which arguments are associated with this job self.args = None # will store datetime of completion, if there is one self.completed = None # default, we assume this job should run self.active = True # more defaults... self.error = None # initiate the job self.save()
def __init__(self, action, job_key=None): self._conf_mongodb = mongodb(MONGODB_CONF) self.c_job_activity = self._conf_mongodb.c_job_activity self.action = action self.objectid = job_key if job_key: # get previously init'd job... doc = self.c_job_activity.find_one(self._base_spec) if not doc: raise ValueError("Invalid job_key id: %s" % job_key) self.args = doc["args"] self.created = doc["created"] self.active = doc["active"] self.error = doc["error"] else: self.objectid = ObjectId() self.created = datetime.now(UTC) # which arguments are associated with this job self.args = None # will store datetime of completion, if there is one self.completed = None # default, we assume this job should run self.active = True # more defaults... self.error = None # initiate the job self.save()
def __init__(self, config_dir=None, metrique_config_file=None, mongodb_config_file=None): if not metrique_config_file: metrique_config_file = METRIQUE_CONF if not mongodb_config_file: mongodb_config_file = MONGODB_CONF self._config_dir = config_dir self._metrique_config_file = metrique_config_file self.metrique_config = metrique(metrique_config_file, config_dir) self._mongodb_config_file = metrique_config_file self.mongodb_config = mongodb(mongodb_config_file, config_dir)
class BaseDriver(object): ''' ''' metrique_config = metrique(METRIQUE_CONF) mongodb_config = mongodb(MONGODB_CONF) db_timeline_data = mongodb_config.db_timeline_data db_timeline_admin = mongodb_config.db_timeline_admin db_warehouse_data = mongodb_config.db_warehouse_data db_warehouse_admin = mongodb_config.db_warehouse_admin c_etl_activity = mongodb_config.c_etl_activity def __init__(self, name): ''' ''' self.name = name self.mask = 0 self.cube = {'fielddefs': {}} self.enabled = True def __str__(self): return self.name @property @memo def fields(self): ''' ''' fields = {} for field in self.cube['fielddefs']: if self.get_field_property('enabled', field, True): fields[field] = { 'help': self.get_field_property('help', field, ''), 'type': self.get_field_property('type', field, unicode), 'container': self.get_field_property('type', field, False) } return fields @property @memo def fieldmap(self): ''' Dictionary of field_id: field_name ''' fieldmap = defaultdict(str) for field in self.fields: field_id = self.get_field_property('what', field) if field_id is not None: fieldmap[field_id] = field return fieldmap # FIXME: split out into get_timeline... and drop the timeline arg... def get_collection(self, cube=None, timeline=False, admin=False, name=None): if name is None: name = self.name if cube: _d = drivermap[cube] collection = _d.get_collection(timeline=timeline, admin=admin) else: # use the cached timeline/warehouse collections # A) if they already exist; if not, create/cache them # B) if we're getting data only # otherwise, we need to return authorized db collections if admin and timeline: db = self.db_timeline_admin elif admin: db = self.db_warehouse_admin elif timeline: db = self.db_timeline_data else: db = self.db_warehouse_data collection = db[name] return collection def get_field_property(self, property, field=None, default=None): ''' First try to get the field's fielddef property, if defined Then try to get the default property, if defined Then return the default for when neither is found Or return None, if no default is defined ''' try: return self.cube['fielddefs'][field][property] except KeyError: try: return self.cube['defaults'][property] except KeyError: return default def extract_func(self, **kwargs): with ProcessPoolExecutor(MAX_WORKERS) as executor: future = executor.submit(_extract_func, self.name, **kwargs) return future.result()