def create_market(self, data: dict): self.validations.new_market(data=data) self.validations.is_valid_registry(registry=data.get('registro')) LOGGER.debug('Creating new market service') new_market = Market(**data) market = self.database.insert_market(market=new_market) return self.parser.object_to_json(market=market)
def get_customer_by_id(self, customer_id: str) -> Customer: LOGGER.debug(f'Getting customer {customer_id} in database') customer = Customer.objects(id=customer_id).first() # pylint: disable=no-member if not customer: LOGGER.debug('None customer found to customer_id informed') raise NotFoundException return customer
def update_market(self, data: dict, registry: str): self.validations.update_market(data=data) self.validations.is_valid_registry(registry=registry) LOGGER.debug('Updating new market service') market = self.database.get_market_by_registry(registry=registry) market = self.database.update_market(market=market, data=data) return self.parser.object_to_json(market=market)
def get_product_by_id(self, product_id: str) -> Product: LOGGER.debug(f'Getting product {product_id} in database') product = Product.objects(id=product_id).first() # pylint: disable=no-member if not product: LOGGER.debug('None product found to product_id informed') raise NotFoundException return product
async def get_market_by_registry(self, request): try: LOGGER.debug('Getting market action') record = request.path_params.get('market_registry') market = self.service.get_market_by_registry(registry=record) return generate_response(data=market, status_code=200) except JSONDecodeError: LOGGER.debug("Invalid payload") return generate_response(message=[INVALID_PAYLOAD], status_code=400) except NotFoundException: LOGGER.debug('Market not found') return generate_response(status_code=404) except InvalidRecordException as exception: LOGGER.debug("Invalid record param") return generate_response(message=exception.messages, status_code=422) except Exception as exception: LOGGER.debug(traceback.format_exc()) LOGGER.critical(f"Unknown error: {exception}") return generate_response(status_code=500, message=[CRITICAL_ERROR])
async def create_market(self, request): try: LOGGER.debug('Creating market action') payload = await request.json() market = self.service.create_market(data=payload) return generate_response(data=market, status_code=201) except JSONDecodeError: LOGGER.debug("Invalid payload") return generate_response(message=[INVALID_PAYLOAD], status_code=400) except InvalidRecordException as exception: LOGGER.debug("Invalid record param") return generate_response(message=exception.messages, status_code=422) except MissingRequiredFieldsException as exception: return generate_response(message=exception.messages, status_code=400) except DataError: LOGGER.debug("Invalid value informed") return generate_response(message=[INVALID_FIELD_VALUE], status_code=422) except IntegrityError: return generate_response(message=[REGISTRY_ALREADY_USED], status_code=422) except Exception as exception: LOGGER.debug(traceback.format_exc()) LOGGER.critical(f"Unknown error: {exception}") return generate_response(status_code=500, message=[CRITICAL_ERROR])
def update_product_by_id(self, product_id: str, data: dict) -> Product: LOGGER.debug(f'Updating product {product_id} in database') product = Product.objects(id=product_id).first() # pylint: disable=no-member if not product: raise NotFoundException product.update(**data) # pylint: disable=no-member product = Product.objects(id=product_id).first() return product
def send(self, msg, receiver='all'): if receiver in self.receivers: message = '{}: {}'.format(receiver, msg) self.last_message = message logger.debug("ZMQPublisher: message={}".format(message)) self.socket.send_string(message) else: raise ValueError, 'receiver is not correct'
def update_customer_by_id(self, customer_id: str, data: dict) -> Customer: LOGGER.debug(f'Updating customer {customer_id} in database') customer = Customer.objects(id=customer_id).first() # pylint: disable=no-member if not customer: raise NotFoundException customer.update(**data) # pylint: disable=no-member customer = Customer.objects(id=customer_id).first() return customer
def get_products(self, page: int) -> (list, int): LOGGER.debug(f'Getting products page {page}') products_total = Product.objects().count() if page == 1: products = Product.objects().limit(LIMIT_PER_PAGE) else: products = Product.objects.skip( (page - 1) * LIMIT_PER_PAGE).limit(page * LIMIT_PER_PAGE) return products, products_total
def __set_difference(self, field_set: set, data: dict, is_update: bool = False): difference = field_set.difference(data.keys()) if difference and not is_update: LOGGER.debug('Missing field in data payload') raise MissingRequiredFieldsException(fields=difference) self.__fields_to_remove(data=data, field_set=field_set)
def test_stupid(self): publisher = ZMQPublisher() receivers = ['gui', 'all'] for i in xrange(10): barcode = hashlib.sha256(os.urandom(30).encode('base64')[:-1]).hexdigest()[:10] try: publisher.send(barcode, random.choice(receivers)) time.sleep(1) except Exception as e: logger.debug(e) self.assertTrue(False, 'something wrong')
def init_publisher(): publisher = ZMQPublisher() while True: try: # Let's do something here.... read from com port or ...etc... barcode = hashlib.sha256(os.urandom(30).encode('base64')[:-1]).hexdigest()[:10] publisher.send(barcode, random.choice(['gui', 'all'])) time.sleep(0.1) except KeyboardInterrupt: logger.debug('init_publisher while loop is stopping') break
def init_subscribe(queue): subscriber = ZMQSubscriber(queue=queue) i = 10 while i > 0: if not queue.empty(): logger.debug('message through queue={}'.format(queue.get())) else: logger.debug('queue is empty') time.sleep(0.2) i -= 1 time.sleep(5) subscriber.stop()
def get_products(self, page: str): try: LOGGER.debug('Getting products service') parse_page = self.__parser.page(page=page) products, products_total = self.__database.get_products( page=parse_page) return self.__parser.products(data=list(products.as_pymongo()), total=products_total, page=parse_page) except Exception as exception: LOGGER.debug(traceback.format_exc()) LOGGER.critical(exception) raise exception
def delete_product(self, product_id: str): try: is_object_id(object_id=product_id) LOGGER.debug(f'Deleting product service {product_id}') self.__database.delete_product_by_id(product_id=product_id) except NotFoundException as exception: raise exception except Exception as exception: LOGGER.debug(traceback.format_exc()) LOGGER.critical(exception) raise exception
def delete_customer(self, customer_id: str): try: is_object_id(object_id=customer_id) LOGGER.debug(f'Deleting customer service {customer_id}') self.__database.delete_customer_by_id(customer_id=customer_id) except NotFoundException as exception: raise exception except Exception as exception: LOGGER.debug(traceback.format_exc()) LOGGER.critical(exception) raise exception
def run_cmd(self, tool, cmd): import subprocess from settings import LOGGER as logger tool = subprocess.check_output(["which", tool]).strip() if not tool: raise Exception('{} not found'.format(tool)) try: args = " ".join(cmd).split(" ") pipe = subprocess.Popen(args, stderr=subprocess.PIPE, stdout=subprocess.PIPE) out, err = pipe.communicate() code = pipe.returncode except Exception as message: logger.debug('{}'.format(message))
def create_customer(self, data: dict): try: LOGGER.debug('Creating customer service') self.__validations.data(data=data) customer = Customer(**data) customer = self.__database.save(document=customer) return self.__parser.raw_to_json( data=customer.to_mongo().to_dict()) except NotUniqueError: LOGGER.info('Email already used') raise EmailAlreadyUsedException() except ValidationError as exception: LOGGER.debug(traceback.format_exc()) LOGGER.debug('Invalid fields value') raise InvalidFieldsValuesException(validations=exception.to_dict()) except MissingRequiredFieldsException as exception: raise exception except Exception as exception: LOGGER.debug(traceback.format_exc()) LOGGER.critical(exception) raise exception
def get_product(self, product_id: str): try: is_object_id(object_id=product_id) LOGGER.debug(f'Getting product {product_id}') product = self.__database.get_product_by_id(product_id=product_id) return self.__parser.raw_to_json(data=product.to_mongo().to_dict()) except NotFoundException as exception: raise exception except Exception as exception: LOGGER.debug(traceback.format_exc()) LOGGER.critical(exception) raise exception
def remove_favorite(self, product_id: str, customer_id: str): try: LOGGER.debug('Removing product to customer list') self.__validations.validate_customer_product_id(product_id=product_id, customer_id=customer_id) customer = self.__database.get_customer_by_id(customer_id=customer_id) product = self.__database.get_product_by_id(product_id=product_id) customer = self.__database.pull_product_to_favorites(customer=customer, product=product) return self.__parser.parse_customer_favorites(customer=customer) except NotUniqueError: LOGGER.info('Email already used') raise EmailAlreadyUsedException() except ValidationError as exception: LOGGER.debug(traceback.format_exc()) LOGGER.debug('Invalid fields value') raise InvalidFieldsValuesException(validations=exception.to_dict()) except MissingRequiredFieldsException as exception: raise exception except Exception as exception: LOGGER.debug(traceback.format_exc()) LOGGER.critical(exception) raise exception
def get_customer(self, customer_id: str): try: is_object_id(object_id=customer_id) LOGGER.debug(f'Getting customer {customer_id}') customer = self.__database.get_customer_by_id( customer_id=customer_id) return self.__parser.parse_customer_favorites(customer=customer) except NotFoundException as exception: raise exception except Exception as exception: LOGGER.debug(traceback.format_exc()) LOGGER.critical(exception) raise exception
def init_subscribe(queue): subscriber = ZMQSubscriber(queue=queue) dbproxy = DbProxy() while True: try: # Let's do something here.... read from queue and save to DB if not queue.empty(): raw_message = queue.get() logger.debug('message through queue={}'.format(raw_message)) # save to DB or.... # dbproxy.add_web_check(raw_message) else: logger.debug('queue is empty') time.sleep(0.1) except KeyboardInterrupt: subscriber.stop() break
def receive(self): # logger.debug('ZMQSubscriber: receive') while not self.stop_sig.is_set(): # logger.debug('ZMQSubscriber: trying to get something') try: message = self.socket.recv_string(zmq.NOBLOCK) # logger.debug("ZMQSubscriber: queue={}".format(self.queue)) if self.queue: self.queue.put(message) # logger.debug("ZMQSubscriber put message to queue. {}".format(message)) self.last_message = message logger.debug("ZMQSubscriber: data={}".format( self.last_message)) except zmq.ZMQError: # logger.debug('ZMQSubscriber: zmq.ZMQError') pass logger.debug('ZMQSubscriber: Stop')
def init_publisher(): publisher = ZMQPublisher() i=0 while True: try: # Let's do something here.... read from com port or ...etc... temp = serial.readline() if i >=5: publisher.send("TEMP " + str(temp)) else: i+=1 barcode = hashlib.sha256(os.urandom(30).encode('base64')[:-1]).hexdigest()[:10] #publisher.send(barcode, random.choice(['gui', 'all']))3 time.sleep(0.1) except KeyboardInterrupt: logger.debug('init_publisher while loop is stopping') break
def publish(records,max_queue_size=30,url=psettings.RABBITMQ_URL,exchange='MergerPipelineExchange',routing_key='FindNewRecordsRoute',LOGGER=LOGGER): #Its ok that we create/tear down this connection many times within this script; it is not a bottleneck #and likely slightly increases stability of the workflow w = RabbitMQWorker() w.connect(psettings.RABBITMQ_URL) #Hold onto the message if publishing it would cause the number of queued messages to exceed max_queue_size responses = [w.channel.queue_declare(queue=i,passive=True) for i in ['UpdateRecordsQueue','ReadRecordsQueue']] while any([r.method.message_count >= max_queue_size for r in responses]): LOGGER.debug(">%s messages in the relevant queue(s). I will wait 15s while they get consumed." % max_queue_size) time.sleep(15) responses = [w.channel.queue_declare(queue=i,passive=True) for i in ['UpdateRecordsQueue','ReadRecordsQueue']] payload = json.dumps(records) w.channel.basic_publish('MergerPipelineExchange','FindNewRecordsRoute',payload) LOGGER.debug("Published payload with hash: %s" % hash(payload)) w.connection.close()
async def update_customer(self, request): try: LOGGER.debug('Update customer action') customer_id = request.path_params.get('customer_id') payload = await request.json() customer = CustomerService().update_customer( data=payload, customer_id=customer_id) return generate_response(data=customer) except JSONDecodeError: LOGGER.debug("Invalid payload") return generate_response(message=[INVALID_PAYLOAD], status_code=400) except MissingRequiredFieldsException as exception: return generate_response(message=exception.messages, status_code=400) except NotFoundException: return generate_response(status_code=404) except (InvalidFieldsValuesException, EmailAlreadyUsedException) as exception: return generate_response(message=exception.messages, status_code=422) except Exception as exception: LOGGER.debug(traceback.format_exc()) LOGGER.critical(f"Unknown error: {exception}") return generate_response(status_code=500, message=[CRITICAL_ERROR])
async def update_product(self, request): try: LOGGER.debug('Update product action') product_id = request.path_params.get('product_id') payload = await request.json() product = ProductService().update_product(data=payload, product_id=product_id) return generate_response(data=product) except JSONDecodeError: LOGGER.debug("Invalid payload") return generate_response(message=[INVALID_PAYLOAD], status_code=400) except MissingRequiredFieldsException as exception: return generate_response(message=exception.messages, status_code=400) except NotFoundException: return generate_response(status_code=404) except InvalidFieldsValuesException as exception: return generate_response(message=exception.messages, status_code=422) except Exception as exception: LOGGER.debug(traceback.format_exc()) LOGGER.critical(f"Unknown error: {exception}") return generate_response(status_code=500, message=[CRITICAL_ERROR])
async def remove_favorite(self, request): try: LOGGER.debug('Removing customer favorite product action') customer_id = request.path_params.get('customer_id') product_id = request.path_params.get('product_id') customer = FavoritesService().remove_favorite( product_id=product_id, customer_id=customer_id) return generate_response(data=customer, status_code=201) except JSONDecodeError: LOGGER.debug("Invalid payload") return generate_response(message=[INVALID_PAYLOAD], status_code=400) except MissingRequiredFieldsException as exception: return generate_response(message=exception.messages, status_code=400) except DuplicateKeyError: return generate_response(message=[{ 'message': 'Duplicated email' }], status_code=400) except (InvalidFieldsValuesException, EmailAlreadyUsedException) as exception: return generate_response(message=exception.messages, status_code=422) except Exception as exception: LOGGER.debug(traceback.format_exc()) LOGGER.critical(f"Unknown error: {exception}") return generate_response(status_code=500, message=[CRITICAL_ERROR])
async def create_customer(self, request): try: LOGGER.debug('Creating customer action') payload = await request.json() customer = CustomerService().create_customer(data=payload) return generate_response(data=customer, status_code=201) except JSONDecodeError: LOGGER.debug("Invalid payload") return generate_response(message=[INVALID_PAYLOAD], status_code=400) except MissingRequiredFieldsException as exception: return generate_response(message=exception.messages, status_code=400) except DuplicateKeyError: return generate_response(message=[{ 'message': 'Duplicated email' }], status_code=400) except (InvalidFieldsValuesException, EmailAlreadyUsedException) as exception: return generate_response(message=exception.messages, status_code=422) except Exception as exception: LOGGER.debug(traceback.format_exc()) LOGGER.critical(f"Unknown error: {exception}") return generate_response(status_code=500, message=[CRITICAL_ERROR])
async def delete_customer(self, request): try: LOGGER.debug('Delete customer action') customer_id = request.path_params.get('customer_id') CustomerService().delete_customer(customer_id=customer_id) return generate_response(data={'_id': customer_id}) except JSONDecodeError: LOGGER.debug("Invalid payload") return generate_response(message=[INVALID_PAYLOAD], status_code=400) except MissingRequiredFieldsException as exception: return generate_response(message=exception.messages, status_code=400) except NotFoundException: return generate_response(status_code=404) except InvalidFieldsValuesException as exception: return generate_response(message=exception.messages, status_code=422) except Exception as exception: LOGGER.debug(traceback.format_exc()) LOGGER.critical(f"Unknown error: {exception}") return generate_response(status_code=500, message=[CRITICAL_ERROR])
def main(LOGGER=LOGGER,MONGO=MONGO,*args): PROJECT_HOME = os.path.abspath(os.path.dirname(__file__)) start = time.time() LOGGER.debug('--Start--') if args: sys.argv.extend(*args) parser = argparse.ArgumentParser() parser.add_argument( '--bibcode-files', nargs='*', default=CLASSIC_BIBCODES.values(), dest='updateTargets', help='full paths to bibcode files' ) parser.add_argument( '--bibcodes', nargs='*', default=None, dest='targetBibcodes', help='Only analyze the specified bibcodes' ) parser.add_argument( '--async', default=False, action='store_true', dest='async', help='start in async mode' ) parser.add_argument( '--load-records-from-files', nargs='*', default=None, dest='load_from_files', help='Load XML records from files via pickle instead of ADSExports', ) args = parser.parse_args() LOGGER.debug('Recieved args (%s)' % (args)) for target in args.updateTargets: targetRecords = [] LOGGER.info('Working on bibcodes in %s' % target) s = time.time() #Let's eventually use statsd for these timers :) with cd(PROJECT_HOME): with open(target) as fp: records = [] for line in fp: if not line or line.startswith("#"): continue r = tuple(line.strip().split('\t')) if args.targetBibcodes: if r[0] in args.targetBibcodes: records.append(r) else: records.append(r) if args.async and len(records) >= BIBCODES_PER_JOB: #We will miss the last batch of records unless it the total is evenly divisible by BIBCODES_PER_JOB publish(records) records = [] #TODO: Throttling? LOGGER.debug('[%s] Read took %0.1fs' % (target,(time.time()-s))) #Publish any leftovers in case the total was not evenly divisibly if args.async: if records: publish(records) else: s = time.time() records = utils.findChangedRecords(records,LOGGER,MONGO) LOGGER.info('[%s] Found %s records to be updated in %0.1fs' % (target,len(records),(time.time()-s))) if args.load_from_files: records,targets = utils.readRecordsFromFiles(records,args.load_from_files,LOGGER) else: records,targets = utils.readRecords(records,LOGGER) s = time.time() records = utils.updateRecords(records,targets,LOGGER) LOGGER.info('[%s] Updating %s records took %0.1fs' % (target,len(records),(time.time()-s))) s = time.time() utils.mongoCommit(records,LOGGER,MONGO) LOGGER.info('Wrote %s records to mongo in %0.1fs' % (len(records),(time.time()-s))) LOGGER.debug('--End-- (%0.1fs)' % (time.time()-start))