def graphql(event, context): """ Handles the graphql event. """ logger.debug('Event Received: graphql: {}'.format(json.dumps(event))) body = json.loads(event['body']) # Execute the GraphQL query. execution_results, _ = run_http_query(schema.root(), 'post', body) # Get the results and status code for the response. result, status_code = encode_execution_results(execution_results, default_format_error, False, lambda o: o) # Build the response with the status code and GraphQL results. response = { 'statusCode': status_code, 'headers': { 'Access-Control-Allow-Origin': "*", 'Access-Control-Allow-Credentials': 'true' }, 'body': json.dumps(result) } logger.debug('Event Response: graphql: {}'.format(json.dumps(response))) return response
def process_response(self, request, response): self.prof.close() out = StringIO() old_stdout = sys.stdout sys.stdout = out stats = hotshot.stats.load(self.tmpfile) stats.sort_stats("time", "calls") stats.print_stats() sys.stdout = old_stdout stats_str = out.getvalue() profiling_info = response.content if response and response.content and stats_str: profiling_info = stats_str profiling_info = "\n".join(profiling_info.split("\n")[:40]) profiling_info += self.summary_for_files(stats_str) logger.debug("\n" + profiling_info) return response
def process_response(self, request, response): self.prof.close() out = StringIO() old_stdout = sys.stdout sys.stdout = out stats = hotshot.stats.load(self.tmpfile) stats.sort_stats('time', 'calls') stats.print_stats() sys.stdout = old_stdout stats_str = out.getvalue() profiling_info = response.content if response and response.content and stats_str: profiling_info = stats_str profiling_info = "\n".join(profiling_info.split("\n")[:40]) profiling_info += self.summary_for_files(stats_str) logger.debug("\n"+profiling_info) return response
def __init__(self, url, path, cloud): saved_path = "./tmp" + path if not os.path.exists(os.path.dirname(saved_path)): try: os.makedirs(os.path.dirname(saved_path)) except OSError as exc: # Guard against race condition # don`t use lock as possiable as you can pass # almost all the files need to read fast , other wise the app will frozen or exit # previewableExts={ "mkv","mpv","mp3","mp4","flv","ts","mov","avi","aac","flac","asf","rma","rmvb", \ # "rm","ogg","mpeg","vob","m4a","wma","wmv","3gp","zip","rar","tar","7z","pdf","doc","docx","xls","xlsx","dmg" } # if you only copy from cloud,then put the file extension in this set,it will down load blazing fast nonPreviewAbleExts = {"chunk"} self.isPreviewAble = True if saved_path.split(".")[-1].lower() in nonPreviewAbleExts: self.isPreviewAble = False logger.debug(f"{saved_path} is chunable") self.path = path self.url = url self.saved_path = saved_path self.cloud = cloud self.user_headers = cloud.getHeader() self.part_size = 65536 * 4 self.block_infos = [] self.current_file_size = 0 self.file_size = 0 self.terminating = False self.mmap = None self.part_count = None
def test_get_quote_by_id(): logger.info("Running '/quote/id/{id}' tests") logger.info("Get '/admin/delete_all'") response = client.get("/admin/delete_all") logger.info("Post '/seed'") response = client.post("/seed", json=test_quotes_list) logger.info("Get '/quotes/id'") response = client.get("/quotes/id") msg = response.json() quote_ids = [] for quote in msg['quotes']: quote_ids.append(quote['id']) logger.debug("List of quote_ids", quote_ids=quote_ids) size_of_quotes = len(test_quotes_list['quotes']) for id in quote_ids: logger.info("Get '/quote/id/" + str(id) + "'") response = client.get("/quote/id/" + str(id)) assert response.status_code == 200 msg = response.json() del msg["backend"] logger.info("Quote Received " + str(msg["quotes"])) if msg["quotes"] not in test_quotes_list["quotes"]: assert False
def log_statistic(self): self.user_cache.log_statistic() self.domain_cache.log_statistic() msg = 'squid3 logger processed {0} records in {1} time' msg = msg.format(self.record_count, self.profiler_time) logger.debug(msg)
def sig_handler(signum, frame): global running if running: running = False logger.debug('heligated caught siglan {0}'.format(signum)) logger.info('heligated stopping') os.exit(0)
def log_statistic(self): self.user_cache.log_statistic() self.domain_cache.log_statistic() self.domain_filter_cache.log_statistic() msg = 'rederector processed {0} queries in {1} time' msg = msg.format(self.record_count, self.profiler_time) logger.debug(msg)
def return_json_quotes(filepath): logger.debug("Reading JSON Quotes from file", filepath=filepath) with open(filepath, mode="r") as fid: quotes_as_json = json.load(fid) logger.debug("JSON quotes from file", file=quotes_as_json) return quotes_as_json
def doWMI(): log.debug("doWMI : " + __name__ + " loaded") c = wmi.WMI() try: os = c.OperatingSystem() log.info("WMI: " + os[0].CSName) return True except AttributeError: return False
def get_db(): db_session = SessionLocal() logger.debug("DB session obtained") try: yield db_session finally: db_session.close() logger.debug("DB Closed")
def log_statistic(self): format_str = \ 'UserCache statistic [query count]:{0} ' \ '[miss count]:{1} [cache size]:{2}' msg = format_str.format( self.query_count, self.miss_count, len(self.cache_l1), ) logger.debug(msg)
def log_statistic(self): format_str = \ 'DomainCache statistic [query count]:{0} [miss count]:{1} ' \ '[L1 cache size]:{2} [L2 cache size]:{3}' msg = format_str.format( self.query_count, self.miss_count, len(self.cache_l1), len(self.cache_l2), ) logger.debug(msg)
def seed_db(db: Session, quoteList: schemas.QuotesList): if db.query(models.Quotes).first() is None: for quote in quoteList.quotes: create_quote(db=db, quote=quote) logger.info("database successfully seeded") logger.debug("databse seesed with", quote=quoteList) return True return None
def handle(self, *args, **options): logger.debug('start register IP in accounts_static_ip application') count = 0 for static_ip in StaticIp4.objects.all(): static_ip.register() count += 1 msg = 'static ip[{0}] successfully registered in ' \ 'accounts_static_ip application' msg = msg.format(count) logger.debug(msg)
async def get_quote( request: Request, settings: config.Settings = Depends(get_settings) ): quote = quoteCRUD.get_quote_simple(url=settings.url_quote_backend) logger.debug("Message Recieved: " + str(quote)) image = None if quote.name is not None: image = imageCRUD.get_image_simple(str(quote.name)) return templates.TemplateResponse( "index.html", {"request": request, "quote": quote, 'image': image} )
def get_image_simple(apiVersion: int = 1, name: str = None): try: if apiVersion == 2: url = settings.external_image_api_v2 params = {'author': name} else: url = settings.external_image_api params = {'name': name} with httpx.Client() as client: logger.debug("URL: " + url) logger.debug("Name: " + name) response = client.get(url, params=params) response.raise_for_status() try: json_object = response.json() if 'image' in json_object: return {'image': json_object['image']} else: raise ValueError except ValueError as e: error_msg = "Not Valid JSON" logger.error(error_msg) return Image(name='error', Image=error_msg) except httpx.RequestError as exc: error_msg = f"An error occured while requesting {exc.request.url!r}." logger.error(error_msg) return Image(name='error', Image=error_msg) except httpx.HTTPStatusError as exc: error_msg = f"Error response {exc.response.status_code} while requesting {exc.request.url!r}." logger.error(error_msg) return Image(name='error', Image=error_msg) except httpx.HTTPError as exc: error_msg = f"HTTPError Error while requesting {exc.request.url!r}." logger.error(error_msg) return Image(name='error', Image=error_msg) except httpx.InvalidURL as exc: error_msg = f"Error while requesting {exc.request.url!r}." logger.error(error_msg) return Image(name='error', Image=error_msg)
def get_image_as_base64(name: str = None): if name is not None: filename = translate_name_to_filename(name) filepath = str(pathlib.Path.cwd().joinpath("static", "images", filename)) image_path = pathlib.Path(filepath) logger.debug("Imagepath: " + str(image_path)) if image_path.is_file() and imghdr.what(image_path) is not None: with open(image_path, "rb") as img_file: image_base64 = base64.b64encode(img_file.read()).decode("utf-8") return image_base64 else: logger.error("Imagepath not image", Imagepath=str(image_path)) raise ValueError else: logger.error("Invlid name provided: " + str(name)) raise ValueError
def process_view(self, request, callback, callback_args, callback_kwargs): # turn on debugging in db backend to capture time debug = settings.DEBUG settings.DEBUG = True # get number of db queries before we do anything n = len(connection.queries) value = self.prof.runcall(callback, request, *callback_args, **callback_kwargs) # compute the db time for the queries just run queries = len(connection.queries) - n if queries: dbTime = reduce(add, [float(q['time']) for q in db.db.queries[n:]]) else: dbTime = 0.0 # restore debugging setting again settings.DEBUG = debug logger.debug("\nDB Profile\nQueries: " + str(queries) + "\ndb Execution Time: "+str(dbTime)) return value
def process_view(self, request, callback, callback_args, callback_kwargs): # turn on debugging in db backend to capture time debug = settings.DEBUG settings.DEBUG = True # get number of db queries before we do anything n = len(connection.queries) value = self.prof.runcall(callback, request, *callback_args, **callback_kwargs) # compute the db time for the queries just run queries = len(connection.queries) - n if queries: dbTime = reduce(add, [float(q["time"]) for q in db.db.queries[n:]]) else: dbTime = 0.0 # restore debugging setting again settings.DEBUG = debug logger.debug("\nDB Profile\nQueries: " + str(queries) + "\ndb Execution Time: " + str(dbTime)) return value
def handle(self, *args, **options): logger.debug('start generate config for iptables in firewall application') config = get_all_conf() self.stdout.write(config) logger.debug(config) logger.debug('config successfully generated for iptables in firewall application')
def handle(self, *args, **options): logger.debug('start generate excluded users config in squid3 application') config = gen_excluded_users() self.stdout.write(config) logger.debug(config) logger.debug('excluded users config successfully generated in squid3 application')
def handle(self, *args, **options): logger.debug('start generate intercept config in squid3 application') config = gen_intercept_conf() self.stdout.write(config) logger.debug(config) logger.debug('intercept config successfully generated in squid3 application')
def handle(self, *args, **options): logger.debug('start generate user classifier in firewall application') config = get_update_classifier() self.stdout.write(config) logger.debug(config) logger.debug('user classifier successfully generated in firewall application')
def authenticate(event, context): logger.debug('Event Received: authenticate') token = event['authorizationToken'].replace('Bearer ', '') api_key = Auth.authenticate(token) if api_key: logger.debug('Event Response: Allow') return generatePolicy(api_key, 'Allow', event['methodArn']) else: logger.debug('Event Response: Deny') return generatePolicy(None, 'Deny', event['methodArn'])
def handle(self, *args, **options): logger.debug('start generating config for squid3 daemon in squid3 application') config = gen_squid_conf() logger.debug(config) conf_file_name = CONFIG['SQUID_CONF_FILE'] try: f = open(conf_file_name, 'w') f.write(config) f.close() except IOError: msg = "can't write file {0}".format(conf_file_name) logger.error(msg) else: logger.debug('config for squid3 daemon successfully generated in squid3 application')
logger.exception('error in rederector') os._exit(1) if __name__ == '__main__': logger.info('rederector starting') sys.excepthook = except_hook event_loop_thread = threading.Thread(target=loop_run) event_loop_thread.start() logger.info('rederector started') try: while True: line = sys.stdin.readline() if len(line) <= 1: logger.debug('rederector received blank line') logger.info('rederector stopping') redirector.log_statistic() logger.info('rederector stoped') os._exit(0) url = redirector.redirect(line) sys.stdout.write(url) sys.stdout.flush() except KeyboardInterrupt: os._exit(0) except: logger.exception('error in rederector') os._exit(1)
def add_backend(fieldname: str = "detail", msg: str = None): content = {} content["backend"] = sql_drivername content[fieldname] = msg return content """ Bootstrap the DB if enviornment path to file is provided """ if json_file_path is not None: quotes_as_json = return_json_quotes(json_file_path) logger.debug("attempting to seed DB") db_session = SessionLocal() quoteList = schemas.QuotesList(quotes=quotes_as_json["quotes"]) if quotesCRUD.seed_db(db=db_session, quoteList=quoteList) is None: logger.info("DB Seed Unsuccessful") else: logger.info("database successfully seeded") db_session.close() @app.get("/", response_model=List[schemas.Quote]) async def quote(db: Session = Depends(get_db)): quote = jsonable_encoder(quotesCRUD.get_random_quote(db))
def mutate(self, info, title, sprint_id, participants, end_date, sprint_questions, background, deliverables, key_findings, next_steps, value, **kwargs): template_url = kwargs.get('template_url', None) presentation = None if template_url and template_url != '': logger.debug('Fetching template file: {0}'.format(template_url)) r = requests.get(template_url) logger.debug('Done fetching template file.') if r.status_code == 200: fd, tmp_filename = tempfile.mkstemp(suffix='.pptx') try: with os.fdopen(fd, 'wb') as tmp: tmp.write(r.content) presentation = Presentation(tmp_filename) finally: if os.path.isfile(tmp_filename): os.remove(tmp_filename) else: raise Exception( 'Could not load template_url: {0}'.format(template_url)) else: presentation = Presentation('assets/template_ki_empty.pptx') SLD_TITLE = 'Title Slide - Text Only' SLD_HEAD_COPY = 'Full Width Head' SLD_HEAD_BULLETS = 'Full Width Head + Bullets' # SLD_HEAD_SUBHEAD_COPY = 3 # SLD_HEAD_ONLY = 7 SLD_INSTRUCTIONS = 'INSTRUCTIONS' title_layout = presentation.slide_layouts.get_by_name(SLD_TITLE) plain_layout = presentation.slide_layouts.get_by_name(SLD_HEAD_COPY) bullet_layout = presentation.slide_layouts.get_by_name(SLD_HEAD_BULLETS) if title_layout is None or plain_layout is None or bullet_layout is None: raise Exception('Slide deck provided is not using the correct template') # deliverables slide = presentation.slides.add_slide(bullet_layout) shapes = slide.shapes title_shape = shapes.title body_shape = CreateSlideDeck.get_placeholder(slide, 'Text Placeholder 1') title_shape.text = 'Deliverables' tf = body_shape.text_frame for item in deliverables: p = tf.add_paragraph() p.text = item p.level = 1 CreateSlideDeck.add_notes(slide) CreateSlideDeck.move_to_front(presentation) # questions slide = presentation.slides.add_slide(bullet_layout) shapes = slide.shapes title_shape = shapes.title body_shape = CreateSlideDeck.get_placeholder(slide, 'Text Placeholder 1') title_shape.text = 'Sprint Questions' tf = body_shape.text_frame for item in sprint_questions: p = tf.add_paragraph() p.text = item p.level = 1 CreateSlideDeck.add_notes(slide) CreateSlideDeck.move_to_front(presentation) # value hypothesis slide = presentation.slides.add_slide(plain_layout) title_shape = CreateSlideDeck.get_placeholder(slide, 'Title 2') body_shape = CreateSlideDeck.get_placeholder(slide, 'Text Placeholder 1') title_shape.text = "Value Hypothesis" body_shape.text = value CreateSlideDeck.add_notes(slide) CreateSlideDeck.move_to_front(presentation) # background slide = presentation.slides.add_slide(plain_layout) title_shape = CreateSlideDeck.get_placeholder(slide, 'Title 2') body_shape = CreateSlideDeck.get_placeholder(slide, 'Text Placeholder 1') title_shape.text = "Background" body_shape.text = background CreateSlideDeck.add_notes(slide) CreateSlideDeck.move_to_front(presentation) # title slide slide = presentation.slides.add_slide(title_layout) title_shape = CreateSlideDeck.get_placeholder(slide, 'Title 1') body_shape1 = CreateSlideDeck.get_placeholder(slide, 'Text Placeholder 2') body_shape2 = CreateSlideDeck.get_placeholder(slide, 'Text Placeholder 3') title_shape.text = 'Rally {0}: {1}'.format(sprint_id, title) body_shape1.text = 'Completed {0}'.format(end_date) body_shape2.text = 'Rally participants {0}'.format(', '.join(participants)) CreateSlideDeck.add_notes(slide) CreateSlideDeck.move_to_front(presentation) # data/methods/results are already part of the deck # key findings slide = presentation.slides.add_slide(bullet_layout) shapes = slide.shapes title_shape = shapes.title body_shape = CreateSlideDeck.get_placeholder(slide, 'Text Placeholder 1') title_shape.text = 'Key Findings' tf = body_shape.text_frame for item in key_findings: p = tf.add_paragraph() p.text = item p.level = 1 CreateSlideDeck.add_notes(slide) # next steps slide = presentation.slides.add_slide(bullet_layout) shapes = slide.shapes title_shape = shapes.title body_shape = CreateSlideDeck.get_placeholder(slide, 'Text Placeholder 1') title_shape.text = 'Next Steps' tf = body_shape.text_frame items = next_steps for item in items: p = tf.add_paragraph() p.text = item p.level = 1 CreateSlideDeck.add_notes(slide) # remove INSTRUCTIONS slide before saving slides = list(presentation.slides) slides2 = list(presentation.slides._sldIdLst) rm_idx = next((i for i in range(len(slides)) if slides[i].slide_layout.name == SLD_INSTRUCTIONS), None) if rm_idx != None: presentation.slides._sldIdLst.remove(slides2[rm_idx]) timestamp = datetime.datetime.now().strftime("%Y%m%d%H%M%S%f") ppt_file_name = 'Rally_{0}_report-out-{1}.pptx'.format(sprint_id, timestamp) ppt_file_path = os.path.join(tempfile.gettempdir(), ppt_file_name) presentation.save(ppt_file_path) # Store on S3 try: logger.debug('Uploading SlideDeck to S3.') s3 = boto3.resource('s3') s3.meta.client.upload_file(ppt_file_path, Env.SLIDE_DECKS_BUCKET_NAME(), ppt_file_name) logger.debug('Finished uploading SlideDeck to S3.') # Generate a presigned URL that expires. presigned_url = s3.meta.client.generate_presigned_url( 'get_object', Params={'Bucket': Env.SLIDE_DECKS_BUCKET_NAME(), 'Key': ppt_file_name}, ExpiresIn=Env.SLIDE_DECKS_URL_EXPIRES_IN_SECONDS() ) finally: if os.path.isfile(ppt_file_path): os.remove(ppt_file_path) new_slide_deck = SlideDeck(url=presigned_url) return CreateSlideDeck(slide_deck=new_slide_deck)
def sig_handler(signum, frame): logger.debug('squid3 logger caught siglan {0}'.format(signum))
def tryWindows(): log.debug("Trying Windows") ret = core.windowsapi.doWMI() log.debug("Windows done") return ret
def handle(self, *args, **options): logger.debug('start clear URL entries in accounts_web application') RedirectUrl.objects.all().delete() logger.debug('all URL entries successfully deleted in accounts_web application')
def process_response(self, request, response): for query in connection.queries: logger.debug(query["sql"]) return response
def process_response(self, request, response): for query in connection.queries: logger.debug(query['sql']) return response
def tryUNIX(): log.debug("Trying UNIX") core.sshclient.doSSH() log.debug("UNIX done")