class GremlinConnector(object): __app__ = None logger = None def __init__(self, **kwargs): if 'app' not in kwargs: raise GeneralException(message="app instance is required") self.__app__ = kwargs['app'] self.logger = Logger().get(self.__class__.__name__) def __get_connection__(self): global __g_db_pool__ if __g_db_pool__ is None: connection_properties = { "url": self.__app__.config['GREMLIN_URI'], "traversal_source": 'g', "pool_size": self.__app__.config['GREMLIN_POOL_SIZE'], "max_workers": self.__app__.config['GREMLIN_MAX_WORKERS'] } __g_db_pool__ = DriverRemoteConnection(**connection_properties) self.logger.info('Connected to {}'.format( self.__app__.config['GREMLIN_URI'])) self.logger.info('Initialized graph db pool') return __g_db_pool__ def get_traversal(self) -> GraphTraversalSource: return traversal().withRemote(self.__get_connection__())
def __init__(self, *args, **kwargs): self._key = kwargs.get( 'object_id') if 'object_id' in kwargs else kwargs.get( '_key', str(bson.ObjectId())) self.data = kwargs.get('data') if 'data' in kwargs else None self._id = kwargs.get('_id', None) self.object_id = kwargs.get('object_id', None) self.is_active = kwargs.get('is_active', None) self.created_at = kwargs.get('created_at', self.date_time_now()) self.updated_at = kwargs.get('updated_at', self.date_time_now()) self.created_by = kwargs.get('created_by', None) self.updated_by = kwargs.get('updated_by', None) self.logger = Logger().get(self.__class__.__name__) if '__db__' in kwargs: self.__db__ = kwargs.get('__db__')
def __init__(self, *args, **kwargs): self.data = kwargs.get('data') if 'data' in kwargs else None self.object_id = str(bson.ObjectId()) self.logger = Logger().get(self.__class__.__name__) pass
class TextOperations: logger = Logger().get(__name__) def _float(self, val): try: return float(val) except ValueError: return -100.0 def _int(self, val): try: return int(val) except ValueError: return -100 def normalize_string(self, value): return value if not isinstance(value, str) else ' '.join( value.split()) if value else '' def read_csv(self, file_loc=None, s3fp=None, field_names=[]): data = [] if file_loc: # file_path = os.path.join(BASE_DIR, file_loc.lstrip("/")) file_path = file_loc self.logger.info(f"READING FROM CSV FILE {file_path}") with open(file_path, 'r') as fp: reader = csv.DictReader( fp) if not field_names else csv.DictReader( fp, field_names) data = [dict(row) for row in reader] fp.close() elif s3fp: reader = csv.DictReader( s3fp) if not field_names else csv.DictReader( s3fp, field_names) data = [dict(row) for row in reader] if field_names: data.pop(0) return data def write_csv(self, file_loc, rows, field_names=[]): file_path = os.path.join(BASE_DIR, file_loc.lstrip("/")) self.logger.info(f"WRITING TO CSV FILE {file_path}") if not field_names: field_names = list(rows[0].keys()) with open(file_path, 'w') as fp: writer = csv.DictWriter(fp, fieldnames=list(field_names)) writer.writeheader() writer.writerows(rows) fp.close() def write_json(self, file_loc, data): file_path = os.path.join(BASE_DIR, file_loc.lstrip("/")) self.logger.info(f"WRITING TO JSON FILE {file_path}") with open(file_path, 'w') as fp: fp.write(json.dumps(data, indent=2)) fp.close() def read_json(self, file_loc=None, s3fp=None): data = None if file_loc: file_path = os.path.join(BASE_DIR, file_loc.lstrip("/")) self.logger.info(f"READING FROM JSON FILE {file_path}") with open(file_path, 'r') as fp: data = json.load(fp) fp.close() elif s3fp: data = json.load(s3fp) return data def proccess_ucode(self, ucode: str) -> str: ucode = str(ucode).upper() return "0" * (6 - len(ucode)) + ucode if len(ucode) < 6 else ucode
def __init__(self, *args, **kwargs): self.logger = Logger().get(self.__class__.__name__) self.__txt__ = TextOperations() self.settings = { "settings": { "number_of_shards": 1, "number_of_replicas": 1, "index": { "analysis": { "filter": { "filter_shingle": { "type": "shingle", "min_shingle_size": "2", "max_shingle_size": "15", "output_unigrams_if_no_shingles": "true", "output_unigrams": "false" }, "filter_stop": { "type": "stop" }, "filter_stemmer": { "type": "snowball", "language": "English" }, "filter_edge_ngram": { "type": "edgeNGram", "min_gram": "2", "max_gram": "50" }, "filter_ngram": { "type": "nGram", "min_gram": "2", "max_gram": "50" } }, "tokenizer": { "edge_ngram_tokenizer": { "type": "edge_ngram", "min_gram": "1", "max_gram": "50", "token_chars": ["letter", "digit", "whitespace", "symbol"] } }, "analyzer": { "keyword_analyzer": { "filter": [ "lowercase", "asciifolding", "trim", "reverse", "filter_ngram" ], "type": "custom", "tokenizer": "keyword" }, "shingle_analyzer": { "filter": [ "lowercase", "asciifolding", "trim", "reverse", "filter_stop", "filter_stemmer", "filter_shingle" ], "type": "custom", "tokenizer": "standard" }, "edge_ngram_analyzer": { "filter": [ "lowercase", "asciifolding", "trim", "reverse", "filter_edge_ngram" ], "type": "custom", "tokenizer": "edge_ngram_tokenizer" } } } } } } self.mappings = lambda x: {x: {"properties": {}}} self.text_mapping = { "type": "keyword", "fields": { "keywordstring": { "type": "text", "analyzer": "keyword_analyzer" }, "edgengram": { "type": "text", "analyzer": "edge_ngram_analyzer" }, "shingle": { "type": "text", "analyzer": "shingle_analyzer" }, "completion": { "type": "completion" } } } self.date_mapping = {"type": "date", "format": "yyyy-MM-dd HH:mm:ss"} self.boolean_mapping = { "type": "keyword", "fields": { "keywordstring": { "type": "text", "analyzer": "keyword_analyzer" } } } self.id_mapping = self.email_mapping = self.boolean_string_mapping = { "type": "keyword", "fields": { "keywordstring": { "type": "text", "analyzer": "keyword_analyzer" } } } self.list_mapping = {"type": "nested"} self.object_mapping = {"type": "object"} self.dict_mapping = {"properties": {}}
class BaseElasticRepo: def __init__(self, *args, **kwargs): self.logger = Logger().get(self.__class__.__name__) self.__txt__ = TextOperations() self.settings = { "settings": { "number_of_shards": 1, "number_of_replicas": 1, "index": { "analysis": { "filter": { "filter_shingle": { "type": "shingle", "min_shingle_size": "2", "max_shingle_size": "15", "output_unigrams_if_no_shingles": "true", "output_unigrams": "false" }, "filter_stop": { "type": "stop" }, "filter_stemmer": { "type": "snowball", "language": "English" }, "filter_edge_ngram": { "type": "edgeNGram", "min_gram": "2", "max_gram": "50" }, "filter_ngram": { "type": "nGram", "min_gram": "2", "max_gram": "50" } }, "tokenizer": { "edge_ngram_tokenizer": { "type": "edge_ngram", "min_gram": "1", "max_gram": "50", "token_chars": ["letter", "digit", "whitespace", "symbol"] } }, "analyzer": { "keyword_analyzer": { "filter": [ "lowercase", "asciifolding", "trim", "reverse", "filter_ngram" ], "type": "custom", "tokenizer": "keyword" }, "shingle_analyzer": { "filter": [ "lowercase", "asciifolding", "trim", "reverse", "filter_stop", "filter_stemmer", "filter_shingle" ], "type": "custom", "tokenizer": "standard" }, "edge_ngram_analyzer": { "filter": [ "lowercase", "asciifolding", "trim", "reverse", "filter_edge_ngram" ], "type": "custom", "tokenizer": "edge_ngram_tokenizer" } } } } } } self.mappings = lambda x: {x: {"properties": {}}} self.text_mapping = { "type": "keyword", "fields": { "keywordstring": { "type": "text", "analyzer": "keyword_analyzer" }, "edgengram": { "type": "text", "analyzer": "edge_ngram_analyzer" }, "shingle": { "type": "text", "analyzer": "shingle_analyzer" }, "completion": { "type": "completion" } } } self.date_mapping = {"type": "date", "format": "yyyy-MM-dd HH:mm:ss"} self.boolean_mapping = { "type": "keyword", "fields": { "keywordstring": { "type": "text", "analyzer": "keyword_analyzer" } } } self.id_mapping = self.email_mapping = self.boolean_string_mapping = { "type": "keyword", "fields": { "keywordstring": { "type": "text", "analyzer": "keyword_analyzer" } } } self.list_mapping = {"type": "nested"} self.object_mapping = {"type": "object"} self.dict_mapping = {"properties": {}} def index_exits(self, _index): try: return es.indices.exists(_index) except NotFoundError: self.logger.info(f"{_index} not found") raise NotFoundError except Exception as e: self.logger.error(f'ERROR WHILE CHECKING INDEX EXITS {_index}' + traceback.format_exc()) raise GeneralException(message=str(e)) def delete_database(self, _index): try: return es.indices.delete(index=_index) except NotFoundError: self.logger.error(f"{_index} not found") raise NotFoundError except Exception as e: self.logger.error(f'ERROR WHILE DELETING INDEX {_index}' + traceback.format_exc()) raise GeneralException(message=str(e)) def delete_by_id(self, params: Dict): for k, __id in params.items(): try: es.delete(index=k, doc_type=k + '_index', id=str(__id)) except NotFoundError: self.logger.error(f"{k} {__id} not found") except Exception as e: self.logger.error(f'ERROR WHILE DELETING INDEX {k} {__id}' + traceback.format_exc()) raise GeneralException(message=str(e)) pass pass def create_index(self, _index, _type, mappings=None): if self.is_connected: if '_index' not in _index: _index += '_index' if not es.indices.exists(_index): if mappings: if _type not in mappings: self.settings.update({"mappings": {_type: mappings}}) else: self.settings.update({"mappings": mappings}) es.indices.create(index=_index, body=self.settings) return _index raise GeneralException(message="Invalid Connection") def delete_index(self, _index): if self.is_connected: if '_index' not in _index: _index += '_index' if es.indices.exists(_index): es.indices.delete(index=_index) pass pass return _index
def __init__(self, *args, **kwargs): super(BaseController, self).__init__(*args, **kwargs) self.renderer = JSONRenderer() self.logger = Logger().get(self.__class__.__name__) pass
cors = CORS(app) # only wait for 1 second, regardless of the client's default if ENV != 'local': es.cluster.health(wait_for_status='yellow', request_timeout=1) migrate = Migrate(app, db) app.config['ARANGO_DATABASE'] = DATABASES['arango'] app.config['PROPAGATE_EXCEPTIONS'] = True doc.init_app(app) from skeletor.utility.logger import Logger app.logger = Logger().get(__name__) def before_send(event, hint): if 'exc_info' in hint: exc_type, exc_value, tb = hint['exc_info'] if hasattr(exc_value, 'report') and not exc_value.report: return None return event @app.before_request def before_request(*args, **kwargs): session.permanent = False user = session.get('user', None) if user:
def __init__(self, *args, **kwargs): self._schema = self.schema self.logger = Logger().get(self.__class__.__name__)
def __init__(self, **kwargs): if 'app' not in kwargs: raise GeneralException(message="app instance is required") self.__app__ = kwargs['app'] self.logger = Logger().get(self.__class__.__name__)