def on_error(self, status_code: int): if status_code == 420: logger.warning(f"Enhance Your Calm; The App is Being Rate Limited For Making Too Many Requests!\n") return False else: logger.error(f"Error {status_code} when ingesting a tweet\n") sys.exit()
def find_and_replace_document(self, collection_name: str, filter: dict, updated_doc: dict): doc: Optional[UpdateResult] = None try: doc: UpdateResult = self.db[collection_name].replace_one( filter=filter, replacement=updated_doc) logger.warning("Document Updated!") except Exception as e: logger.error(e) return doc
def analyse_account_by_id(self, user_id: int) -> BotometerAnalysisOutput: output: BotometerAnalysisOutput = BotometerAnalysisOutput() try: # Check a single account by id response: dict = self.botometer_api.check_account(user_id) if response.get("cap", False): output: BotometerAnalysisOutput = BotometerAnalysisOutput( cap=response.get("cap", {}), display_scores=response.get("display_scores", {}), analysed=True) except Exception as e: logger.warning(e) return output
def make_sentences(text: str, min_char: int = 3) -> list: """ Break apart text into a list of sentences """ if len(text) > min_char: sentences: list = [ sent for sent in split_single(text) if len(sent) > min_char ] else: sentences: list = [] if not sentences: logger.warning("Default sentence was added") sentences: list = [SentimentAnalysisAPI.default_sentence] return sentences
def insert_document_to_collection(self, collection_name: str, document: dict): response: bool = False try: self.check_dbManager() if self.db is not None: self.db[collection_name].insert_one(document) response: bool = True else: logger.warning("Unable to insert data into collection %s", collection_name) except Exception as e: logger.error(e) self.status = 404 return response
def kill_streaming_thread(streaming_thread: StreamingThread) -> GeneralAPIResponse: response: GeneralAPIResponse = object.__new__(GeneralAPIResponse) try: # Still alive then kill it if streaming_thread.is_alive(): streaming_thread.kill() streaming_thread.join() message: str = f"Thread {streaming_thread.name} was killed!" status_code: int = 200 logger.warning(f"Thread {streaming_thread.name} was killed!") else: message: str = f"Thread {streaming_thread.name} is not alive!" status_code: int = 400 response: GeneralAPIResponse = GeneralAPIResponse( message=message, status_code=status_code, data={}) except Exception as e: logger.error(e) return response
def get_ratings_from_identifiers(self, index: str, identifiers: list, key: str): response = {"ratings": []} try: # Init Elasticsearch Manager if self.elasticsearch_connector is None: self.init_elasticsearch_connector() # Get the ratings given a set of ids for uuid in identifiers: response_es = self.elasticsearch_connector.retrieve_data_from_index_by_id( index, uuid) if response_es: response["ratings"].append(str(response_es[key])) else: logger.warning("Element not found in Elasticsearch!") except Exception as e: logger.error(e) return response
def start_kafka_offline_process(self): run: bool = True while run: try: # 1. Check if the consumer was initialised if self.kafka_manager.consumer is None: self.kafka_manager.init_kafka_consumer() # 1. Check if the producer was initialised if self.kafka_manager.producer is None: self.kafka_manager.init_kafka_producer() # 1. Read messages from Kafka for msg in self.kafka_manager.consumer: try: # 2. Process message logger.info('Loading Kafka Message') document: dict = loads(msg.value) # 3. Commit document self.kafka_manager.consumer.commit() # 4. Execute Analysis if document.get("status", 400) == 200: logger.info( 'Executing Source credibility analysis') response: GraphAnalyzerOutputDoc = self.process_source_credibility_analysis( document=document) # 4.1 Everything was fine if response.status == 200: output_doc: dict = response.__dict__ logger.info( 'Putting authors/publisher scores into Kafka' ) self.kafka_manager.put_data_into_topic( data=output_doc) logger.info('Done!') # Handle Connection Exception except ConnectionError as er: logger.error(er) sys.exit(1) # Handle CommitFailedError Exception except CommitFailedError as commitErr: logger.error("Not able to make a commit ..." + str(commitErr)) # restart kafka elements and go back to while loop self.kafka_manager.consumer = None self.kafka_manager.producer = None # Go out of the for loop break # Handle any other Exception except Exception as e: logger.error(e) # Perform commit and continue with next message self.kafka_manager.consumer.commit() continue # Handle While loop exceptions except ConnectionError as er: logger.error(er) sys.exit(1) except Exception as e: logger.warning(e) self.kafka_manager.consumer = None self.kafka_manager.producer = None