def get_posts(post_id=None): if post_id is None: procedure_name = "get_all_posts()" else: procedure_name = "get_post({0})".format(post_id) conn = None posts = [] try: conn = Connection().get_connection() with conn.cursor() as cursor: sql = "call " + procedure_name cursor.execute(sql) for row in cursor.fetchall(): posts.append(Post().dictionary_mapper(row)) except Error as e: print("There was a database error.") for arg in e.args: print(arg) finally: conn.close() return posts
def get_posts(post_id=None): if post_id is None: procedure_name = "get_all_posts()" else: procedure_name = "get_post({0})".format(post_id) conn = None posts = [] try: conn = Connection().get_connection() with conn.cursor() as cursor: sql = "call " + procedure_name cursor.execute(sql) for row in cursor.fetchall(): posts.append(Post().dictionary_mapper(row)) except Error as e: print("There was a database error.") for arg in e.args: print(arg) finally: conn.close() return posts
def __init__( self, *args, **kwargs, ): super().__init__(*args, **kwargs) self.conn = Connection() self.query = Query(self.conn) self.persistence = Persistence(self.conn)
def is_registered(cls, telegram_id): conn = Connection() user_data = conn.get_token(telegram_id) conn.close_connection() if user_data is None: return False return True
def is_blockeable(cls, telegram_id): conn = Connection() trials_tuple = conn.get_trial(telegram_id) if trials_tuple: INDEX_NUMBER_OF_TRIALS = 0 trials = trials_tuple[INDEX_NUMBER_OF_TRIALS] return trials >= 3 return False
def __init__(self, configuration: Configuration, configuration_key: str, connection: Connection): if not connection.has_bigquery() and not connection.has_orm(): raise ConfigurationMissingError( 'Missing a database configuration for this operation') self.configuration = configuration self.module_configuration = configuration.operations.get_custom_configuration_operation( configuration_key) self.mongodb = connection.mongodb self.check_service = Check(connection)
def is_blocked(cls, telegram_id): conn = Connection() trials = conn.get_blocked_date(telegram_id) if trials: BLOCKED_DATE_INDEX = 0 blocked_day = trials[BLOCKED_DATE_INDEX] if blocked_day == None: return False return blocked_day > datetime.now() return False
def __init__(self, native_connection: sqlite3.Connection): """ Constructor :param native_connection: Native connection object """ Connection.__init__(self) # Disable automatic transactions and save the connection object native_connection.isolation_level = None self.__native_connection = native_connection self.__in_transaction = False
def main(cls, msg): telegram_user_id = MessageInfoHandler.get_user_id(msg) if Verifier.is_registered(telegram_user_id): # Pesquisa o Token do usuário conn = Connection() query = conn.get_token(telegram_user_id) conn.close_connection() access_token = query[USER_DATA["ACCESS_TOKEN"]] return cls.request(access_token, msg) return cls.register_user(msg)
def __init__(self, configuration: Configuration, configuration_key: str, connection: Connection): if not connection.has_bigquery() and not connection.has_mongodb(): raise ConfigurationMissingError( 'Missing a database configuration for this operation') self.configuration = configuration self.module_configuration = configuration.operations.get_custom_configuration_operation( configuration_key) self.connection = connection self.mongodb = connection.mongodb self.bigquery = None self.matching_group_regex = re.compile(r'\$(\d+)')
class DatabaseAdapter: def __init__( self, *args, **kwargs, ): super().__init__(*args, **kwargs) self.conn = Connection() self.query = Query(self.conn) self.persistence = Persistence(self.conn) def select_all(self, table, *columns): return self.query.select_all(table, columns) def select_all_joined(self, from_table, join_table, on, columns, how='INNER'): return self.query.select_all_joined(from_table, join_table, on, columns, how) def select_custom_sql(self, which_query, where_value=False, replace_pattern='%value%'): return self.query.select_custom_sql(which_query, where_value, replace_pattern) def close(self): return self.conn.close()
def __init__(self, connection: Connection): self._connection = connection self._orm = None self._bigquery = None self._urlset_checks_table = None self._urlset_urls_table = None if connection.has_orm(): self._orm = self._connection.orm self._urlset_checks_table = ChecksUrlset(self._orm) self._urlset_urls_table = UrlsUrlset(self._orm) if connection.has_bigquery(): self._bigquery = self._connection.bigquery self._cached_url_ids = {}
def main(): with Connection().connection: with Cursor().cursor as cursor: # USE database result = database_use(DB_NAME) print(result) # Select All Elements From Table result = select_all(TB_NAME) rows = cursor.fetchall() print(result) # Get Table Description table_desc = [] for i in range(len(cursor.description)): desc = cursor.description[i] table_desc.append(desc[0]) # Create Dicts dicts = [] for row in rows: dicts.append( {table_desc[i]: row[i] for i in range(len(table_desc))}) # Save Dicts To Yaml yaml_dump = yaml.dump(dicts) result = save_to_yaml(TB_NAME + '.yaml', yaml_dump) print(result)
def add_debt(): with Connection().session() as session: new_debt = DebtsSchema().load(request.get_json(), session=session) DebtsRepository(session).insert(new_debt) session.commit() return jsonify(DebtsSchema().dump(new_debt))
def add_person(): with Connection().session() as session: new_person = PersonSchema().load(request.get_json(), session=session) PersonRepository(session).insert(new_person) session.commit() return jsonify(PersonSchema().dump(new_person))
def main(): global isVerbose, qryDir if len(sys.argv) > 3: dbOption = str(sys.argv[1]) qryDir = str(sys.argv[2]) isVerbose = str(sys.argv[3]) print "[INFO] Ready to execute queries in directory: " + qryDir else: print "Usage: python run.py <redshift db, options in config/config.json> <directory for queries> <isVerbose OPTIONS (0: not verbose, 1: verbose)>" sys.exit(1) with open('config/config.json') as config_file: conf = json.load(config_file) if not dbOption or dbOption not in conf: print "[ERROR] database option not found, please check if it's configured in config/config.json!" sys.exit(1) DB_NAME = conf[dbOption]["dbname"] HOST = conf[dbOption]["host"] PORT = conf[dbOption]["port"] USERNAME = conf[dbOption]["user"] PASSWORD = conf[dbOption]["passwd"] DB_SCHEMA = conf[dbOption]["dbschema"] print "[INFO] redshift cluster: %s.%s" % (DB_NAME, DB_SCHEMA) db_ops = Connection(DB_NAME, HOST, PORT, USERNAME, PASSWORD) test(db_ops, DB_SCHEMA) executeAllQueries(db_ops, DB_SCHEMA, qryDir, isVerbose)
def get_id_person(id): with Connection().session() as session: id_person = PersonRepository(session).get_id(id) if id_person: return jsonify(PersonSchema().dump(id_person)) else: raise InvalidRequest('Person not found', 404)
def update_id_debt(id): with Connection().session() as session: debt = request.get_json() debt['id'] = id update_debt = DebtsSchema().load(debt, session=session) debt_id = DebtsRepository(session).update(update_debt) return jsonify(DebtsSchema().dump(debt_id))
def add_property(): with Connection().session() as session: new_property = PropertySchema().load(request.get_json(), session=session) PropertyRepository(session).insert(new_property) session.commit() return jsonify(PropertySchema().dump(new_property))
def delete_id_person(id): with Connection().session() as session: debts_person = DebtsRepository(session).get_debts_by_person_id(id) if debts_person: raise InvalidRequest('This person has debts.') id_person = PersonRepository(session).delete_id(id) return jsonify(PersonSchema().dump(id_person))
def update_id_person(id): with Connection().session() as session: person = request.get_json() person['id'] = id update = PersonSchema().load(request.get_json(), session=session) id_person = PersonRepository(session).update(update) return jsonify(PersonSchema().dump(id_person))
def update_id_property(id): with Connection().session() as session: prop = request.get_json() prop['id'] = id property = PropertySchema().load(prop, session=session) property = PropertyRepository(session).update(property) return jsonify(PropertySchema().dump(property))
def __init__(self, dbname, user, password, host): initialize = Initialize(dbname, host, password, user) self.connection = None if not initialize.check_data(): if initialize.create(): self.connection = Connection(dbname=dbname, host=host, password=password, user=user) else: logger.error("There was some error while creating the DB") else: self.connection = Connection(dbname=dbname, host=host, password=password, user=user) if self.connection is None: raise ValueError("Unable to initialize database")
def patch_id_debt(id): value = request.json['value'] description = request.json['description'] person_id = request.json['person_id'] with Connection().session() as session: debt_id = DebtsRepository(session).patch_id(id, value, description, person_id) return jsonify(DebtsSchema().dump(debt_id))
def new_connection(self) -> Connection: """Creates new Memgraph connection""" args = dict( host=self._host, port=self._port, username=self._username, password=self._password, encrypted=self._encrypted) return Connection.create(**args)
def delete_id_person(id): with Connection().session() as session: property_person = PropertyRepository( session).get_property_by_person_id(id) if property_person: raise InvalidRequest('This person has property.') id_person = PersonRepository(session).delete_id(id) return jsonify(PersonSchema().dump(id_person))
def patch_id_person(id): name = request.json['name'] document = request.json['document'] address = request.json['address'] with Connection().session() as session: id_person = PersonRepository(session).patch_id(id, name, document, address) return jsonify(PersonSchema().dump(id_person))
def patch_id_property(id): value = request.json['value'] description = request.json['description'] person_id = request.json['person_id'] with Connection().session() as session: property_id = PropertyRepository(session).patch_id( id, value, description, person_id) return jsonify(PropertySchema().dump(property_id))
def run(configuration_hash: str, configuration_key: str, module: str, module_namespace: str): with open(Path.var_folder_path() + '/' + configuration_hash + '.pickle', 'rb') as handle: configuration = pickle.load(handle) if type(configuration) is not Configuration: raise ExitError('Could not unserialize configuration') custommodule = importlib.import_module('.' + module, package=module_namespace) connection = Connection(configuration) for customattribute in dir(custommodule): if customattribute == tocamelcase.convert(module): customclass = getattr(custommodule, customattribute) customclass(configuration, configuration_key, connection).run() connection.close()
def setup_periodic_tasks(sender, **kwargs): configurations = ConfigurationLoader().load_by_config_folder() for configuration in configurations: with Connection(configuration) as connection: if connection.has_orm(): connection.orm.tables.create_tables() if connection.has_mongodb(): connection.mongodb.migrations() with open( Path.var_folder_path() + '/' + configuration.hash + '.pickle', 'wb') as handle: pickle.dump(configuration, handle, protocol=pickle.HIGHEST_PROTOCOL) for configuration_key, aggregationModule in configuration.aggregations.config.items( ): module = aggregationModule.module cron = aggregationModule.cron sender.autodiscover_tasks(['modules.aggregation.custom'], module) if croniter.is_valid(cron) is True: (minute, hour, day_month, month, day_week) = str.split(cron, sep=' ') sender.add_periodic_task( crontab(minute, hour, day_week, day_month, month), run, [ configuration.hash, configuration_key, module, 'modules.aggregation.custom' ], time_limit=aggregationModule.runtime_limit, name='aggregation_' + configuration_key) for configuration_key, operationModule in configuration.operations.config.items( ): module = operationModule.module cron = operationModule.cron sender.autodiscover_tasks(['modules.operation.custom'], module) if croniter.is_valid(cron) is True: (minute, hour, day_month, month, day_week) = str.split(cron, sep=' ') sender.add_periodic_task( crontab(minute, hour, day_week, day_month, month), run, [ configuration.hash, configuration_key, module, 'modules.operation.custom' ], time_limit=operationModule.runtime_limit, name='operation_' + configuration_key)
async def get_audio_from_speech(speech, query_table, query_col): conn = Connection() query = Query(conn) if speech: transcription = speech.split(' ') for idx, word in enumerate(transcription): # Not enough words to sentence if len(transcription) - idx < 3: break sentence = word + ' ' + transcription[ idx + 1] + ' ' + transcription[idx + 2] query_result = query.query_all(table=query_table, column=query_col, where_col='validation', value=sentence) if query_result: conn.close() return randomize_query_result(query_result) conn.close() # Nothing found in database or speech not recognized return False
def edit_post(post_id, title, content): conn = None result = False try: conn = Connection().get_connection() with conn.cursor() as cursor: sql = "call edit_post({0}, '{1}', '{2}')".format(post_id, title, content) cursor.execute(sql) conn.commit() result = True except Error as e: print("There was a database error.") for arg in e.args: print(arg) conn.rollback() finally: conn.close() return result
def __del__(self): """ Destructor """ Connection.__del__(self)