def count_logentries(): """Count logentries Returns number of logentries """ if request.method == 'POST' and request.headers[ 'Content-Type'] == 'application/json': find = dict() request_data = request.json request_find = request_data.get('find', dict()) for key, value in request_find.iteritems(): if key in ('level', 'owner', 'datetimestamp', 'tags', 'start', 'end'): find[key] = value elif request.method == 'GET': find, sort, limit = None, None, None else: raise InvalidAPIUsage( 'Unsupported Media Type. \"application/json\" required.\n', 415) entry = LogEntry() try: result = entry.count(find) except ValueError, e: raise InvalidAPIUsage(e.message, status_code=404)
def test_simple(self): for x in xrange(10): LogEntry.make(self._default_access, None, None, action="act{}".format(x)).put() req = self.get() self.assertEquals(len(req.json["result"]), 10)
def get_logentry_list(): """Logentries list Returns Logentries list """ if request.method == 'POST' and request.headers[ 'Content-Type'] == 'application/json': request_data = request.json request_find = request_data.get('find', dict()) sort = request_data.get('sort') limit = request_data.get('limit') skip = request_data.get('skip') find = dict() for key, value in request_find.iteritems(): if key in ('level', 'owner', 'datetimestamp', 'tags', 'start', 'end'): find[key] = value elif request.method == 'GET': find, sort, limit, skip = None, None, None, None else: raise InvalidAPIUsage( 'Unsupported Media Type. \"application/json\" required.\n', 415) entry = LogEntry() try: result = entry.get_entries(find=find, sort=sort, skip=skip, limit=limit) except ValueError, e: raise InvalidAPIUsage(e.message, status_code=404)
def get_logentry_list(): """Logentries list Returns Logentries list """ if request.method == 'POST' and request.headers['Content-Type'] == 'application/json': request_data = request.json request_find = request_data.get('find', dict()) sort = request_data.get('sort') limit = request_data.get('limit') skip = request_data.get('skip') find = dict() for key, value in request_find.iteritems(): if key in ('level', 'owner', 'datetimestamp', 'tags', 'start', 'end'): find[key] = value elif request.method == 'GET': find, sort, limit, skip = None, None, None, None else: raise InvalidAPIUsage('Unsupported Media Type. \"application/json\" required.\n', 415) entry = LogEntry() try: result = entry.get_entries(find=find, sort=sort, skip=skip, limit=limit) except ValueError, e: raise InvalidAPIUsage(e.message, status_code=404)
def test_with_week_counter(self): self._load_simple() LogEntry.make(self.app_access.key, "free_u", None, action="share_photo").put() LogEntry.make(self.app_access.key, "free_u", None, action="share_photo", when=(datetime.now() - timedelta(hours=25))).put() LogEntry.make(self.app_access.key, "free_u", None, action="share_photo", when=(datetime.now() - timedelta(days=3))).put() LogEntry.make(self.app_access.key, "free_u", None, action="share_photo", when=(datetime.now() - timedelta(days=6))).put() # outside LogEntry.make(self.app_access.key, "free_u", None, action="share_photo", when=(datetime.now() - timedelta(days=10))).put() profile_state = self.app_access.compile_profile_state( user_id="free_u") self.assertEquals(profile_state["profile"], "free") self.assertEquals(profile_state["default"], "deny") self.assertEquals(len(profile_state["states"]), 3) self.assertEquals(profile_state["states"]["share_photo"][0]["left"], 9) self.assertEquals(profile_state["states"]["share_photo"][0]["limit_to"], 10) self.assertEquals(profile_state["states"]["share_photo"][1]["left"], 16) self.assertEquals(profile_state["states"]["share_photo"][1]["limit_to"], 20)
def collect_form(request): page_message = None log_entry = None if request.method == 'GET': collect_form = CollectForm(initial=request.GET) else: collect_form = CollectForm(request.POST) if collect_form.is_valid(): log_entry = LogEntry() log_entry.log = collect_form.cleaned_data['log'] log_entry.subject = collect_form.cleaned_data['title'] log_entry.content = '<blockquote>%s</blockquote><p class="collection-note">%s</p>' % ( collect_form.cleaned_data['excerpt'] or '', collect_form.cleaned_data['note'] or '') log_entry.publish = collect_form.cleaned_data['make_public'] log_entry.source_url = collect_form.cleaned_data['url'] log_entry.issued = datetime.now() log_entry.save() page_message = 'The <a href="%s">log entry</a> was created.' % reverse( 'publish.views.log_entry', args=[], kwargs={ 'slug': log_entry.log.slug, 'pk': log_entry.id }) return render_to_response('publish/collect_form.html', { 'collect_form': collect_form, 'page_message': page_message, 'log_entry': log_entry }, context_instance=RequestContext(request))
def add_logentry(): """Creating new log entry and saving it to DB. Returns False, if validation had problems with validating inputs, returns False, if client tries to send not a json, otherwise return True and ObjectId. """ if request.headers['Content-Type'] == 'application/json': errors = [] request_data = request.json # Lets check selected level level = "" if "level" not in request_data: errors.append({'level': 'Field required.'}) else: level = request_data['level'] # Is level in level list in config file? if level not in config['level']: errors.append({'level': 'Unknown level type.'}) # Checking owner present (required) owner = "" if "owner" not in request_data: errors.append({'owner': 'Field required.'}) else: owner = request_data['owner'] # Checking data present (required) data = "" if "data" not in request_data: errors.append({'data': 'Field required.'}) else: data = request_data['data'] tags = [] # Tags isn't required. If it present lets try to convert it to python-list. # If successfully - add it to entry. If not - return full error and don't create entry in DB. if "tags" in request_data: tags = request.json['tags'] if not isinstance(tags, list): errors.append({'tags': 'Tags must be an array.'}) if not errors: entry = LogEntry(level, owner, data, tags) id_or_error = entry.save() if not isinstance(id_or_error, ObjectId): return jsonify({'OK': False, 'error': id_or_error}) # ___str___ is a string representation of JS ObjectID from MongoDB. return jsonify({'OK': True, 'id': id_or_error.__str__()}) else: return jsonify({"OK": False, 'errors': errors}) else: #TODO Here should be NORMAL exception. return jsonify({"errors": ["415 Unsupported Media Type. \"application/json\" required.\n",]})
def process_request(self, request): if settings.LOG_ACTIVE and settings.LOG_ACTIVE is True: entry = LogEntry( date_time=datetime.now(), method=request.META.get('REQUEST_METHOD'), path=request.path[:256], ip=request.META.get('REMOTE_ADDR'), ) entry.save()
def test_none_list(self): self.post(status=201, device_id="AMEI", entries=json.dumps( {"action": "upload_photo"})) self.assertEquals(LogEntry.query().count(), 1) entry = LogEntry.query().get() self.assertEquals(entry.action, "upload_photo") self.assertEquals(entry.quantity, 1) self.assertEquals(entry.device.string_id(), "AMEI") self.assertTrue(entry.user is None) self.assertEquals(entry.key.parent(), self._default_access)
def test_ensure_order(self): for x in xrange(10): LogEntry.make(self._default_access, None, None, action="act_{}".format(x)).put() req = self.get() results = req.json["result"] self.assertEquals(len(results), 10) self.assertEquals([x["action"] for x in results], ["act_{}".format(x) for x in xrange(9, -1, -1)])
def test_with_userd_list(self): self.post(status=201, user_id="custom_user_g+0001", entries=json.dumps( {"action": "start_app"})) self.assertEquals(LogEntry.query().count(), 1) entry = LogEntry.query().get() self.assertEquals(entry.action, "start_app") self.assertEquals(entry.quantity, 1) self.assertEquals(entry.user.string_id(), "custom_user_g+0001") self.assertTrue(entry.device is None) self.assertEquals(entry.key.parent(), self._default_access)
def test_multiple(self): self.post(status=201, device_id="Meito", entries=json.dumps([ {"action": "upload_photo"}, {"action": "upload_photo"} ])) self.assertEquals(LogEntry.query().count(), 2) for entry in LogEntry.query(): self.assertEquals(entry.quantity, 1) self.assertEquals(entry.device.string_id(), "Meito") self.assertTrue(entry.user is None) self.assertEquals(entry.key.parent(), self._default_access)
def test_user_and_device(self): self.post(status=201, user_id="custom_user_g+0004", device_id="Ameito192", entries=json.dumps([{"action": "start_app"}])) self.assertEquals(LogEntry.query().count(), 1) entry = LogEntry.query().get() self.assertEquals(entry.action, "start_app") self.assertEquals(entry.quantity, 1) self.assertEquals(entry.user.string_id(), "custom_user_g+0004") self.assertEquals(entry.device.string_id(), "Ameito192") self.assertEquals(entry.key.parent(), self._default_access)
def get_owners(): """Get owners list Returns owners list """ entry = LogEntry() try: result = entry.get_owners() except ValueError, e: raise InvalidAPIUsage(e.message, status_code=404)
def log_send_mail_done(sender, **kwargs): logger = logging.getLogger(__name__) email = kwargs['from_email'] subject = kwargs['subject'] logger.info(u'Send mail to Admin from %s, Theme - %s', email, subject) current_time = timezone.now() logger_info = u'Send mail to Admin from %s, Theme - %s' % (email, subject) log = LogEntry(log_datetime=current_time, status="INFO", signal='Send mail to Admin', info=logger_info) log.save()
def log_action(request, game_name): play = get_object_or_404(SavedPlay, pk=request.session['current_play_id_for_%s' % game_name]) log = request.POST.get('log','') for line in StringIO(log.encode('utf-8')): ev = simplejson.JSONDecoder().decode(line) entry = LogEntry(savedplay=play, type=ev['type'], ms=ev['time'], order=ev['order'], _data='') entry.data = ev['data'] entry.save() return json_to_response({'status':200})
def on_report(self, original_msg, response, sack, from_buffer=False): log = dict(response.log.__dict__) log_entry = LogEntry() log_entry.imei = log.get('unique_id', self.session_key) try: log_entry.gps_utc_time = time_utils.dt2ts(time_utils.to_dt( log.get('gps_utc_time'))) except (ValueError, TypeError): return if response.header in (conf.FIXED_REPORT, conf.OBD_REPORT): log_entry.gps_accuracy = log.get('gps_accuracy', None) log_entry.speed = log.get('speed', None) log_entry.altitude = log.get('altitude', None) log_entry.longitude = log.get('longitude', None) log_entry.latitude = log.get('latitude', None) # mapped_log['rpm'] = log.get('rpm', None) else: gen_log.warning("Common Protocol hasn't conform to report %s", response.header) raise gen.Return(None) session = self.backend.get_session() json_log = log_entry.json() try: """Everything I need to do here""" session.add(log_entry) session.commit() json_log = log_entry.json() except Exception as e: session.rollback() finally: session.close() my_data = {'d': { "lat": str(log.get('latitude', None)), "long": str(log.get('longitude', None)) }} try: dev = ibmiotf.device.Client(options) dev.connect() dev.publishEvent("gps", "json", my_data) dev.disconnect() except Exception as ex: gen_log.info('failed to publish %s', ex) gen_log.info('MESSAGE PUBLISHED %s', json_log) raise gen.Return(None)
def test_with_outer_user_counter(self): self._load_simple() LogEntry.make(self.app_access.key, "free_u", None, action="upload_photo").put() # none user LogEntry.make(self.app_access.key, "other", None, action="upload_photo").put() profile_state = self.app_access.compile_profile_state(user_id="free_u") self.assertEquals(profile_state["profile"], "free") self.assertEquals(profile_state["default"], "deny") self.assertEquals(len(profile_state["states"]), 3) self.assertEquals(profile_state["states"]["upload_photo"][0]["left"], 9) self.assertEquals(profile_state["states"]["upload_photo"][0]["limit_to"], 10)
def test_only_mine(self): for x in xrange(5): LogEntry.make(self._default_access, None, None, action="act_{}".format(x)).put() for x in xrange(5): LogEntry.make(Key(LogEntry, 1), None, None, action="act_{}".format(x)).put() req = self.get() results = req.json["result"] self.assertEquals(len(results), 5) self.assertEquals([x["action"] for x in results], ["act_{}".format(x) for x in xrange(4, -1, -1)])
def test_with_yesterday_counter(self): self._load_simple() LogEntry.make(self.app_access.key, "free_u", None, action="upload_photo").put() # yesterday LogEntry.make(self.app_access.key, "free_u", None, action="upload_photo", when=datetime.now() - timedelta(hours=25)).put() profile_state = self.app_access.compile_profile_state(user_id="free_u") self.assertEquals(profile_state["profile"], "free") self.assertEquals(profile_state["default"], "deny") self.assertEquals(len(profile_state["states"]), 3) self.assertEquals(profile_state["states"]["upload_photo"][0]["left"], 9) self.assertEquals(profile_state["states"]["upload_photo"][0]["limit_to"], 10)
def test_with_endless_counter(self): self._load_simple() LogEntry.make(self.app_access.key, "prem", None, action="quota").put() for x in xrange(50): LogEntry.make(self.app_access.key, "prem", None, action="quota", when=(datetime.now() - timedelta(days=x ** 2))).put() profile_state = self.app_access.compile_profile_state( user_id="prem") self.assertEquals(profile_state["profile"], "premium") self.assertEquals(profile_state["default"], "allow") self.assertEquals(len(profile_state["states"]), 2) self.assertEquals(profile_state["states"]["quota"][0]["left"], 949) self.assertEquals(profile_state["states"]["quota"][0]["total_max"], 1000)
def parse_line(line): """Parses one line and returns a LogEntry object if successful""" pattern = re.compile(REGEX) result = pattern.match(line, re.I) if result != None: part = result.groups() return LogEntry(part[0], part[3], part[4], part[5], part[6]) return None
def radiovis_api_add_log(request, secret): """Add a new log entrie""" if secret != config.API_SECRET: abort(404) return object = LogEntry() object.topic = request.form.get('topic') object.body = request.form.get('message') object.headers = request.form.get('headers') object.reception_timestamp = int(request.form.get('timestamp')) db.session.add(object) db.session.commit() return {}
def get_database(page: int = 1, level: Level = None, worker: Worker = None): print("page: ", page, "level: ", level, "worker: ", worker) number_of_records = 200 query = LogEntry.select().order_by(Desc(LogEntry.t)).paginate( page, number_of_records) if (not level is None) and (not worker is None): print("------------------------") query = LogEntry.select().where( (LogEntry.l == level), (LogEntry.w == worker)).order_by( Desc(LogEntry.t)).paginate(page, number_of_records) if (level is None) and (not worker is None): print("------------------------") query = LogEntry.select().where((LogEntry.w == worker)).order_by( Desc(LogEntry.t)).paginate(page, number_of_records) if (not level is None) and (worker is None): print("------------------------") query = LogEntry.select().where((LogEntry.l == level)).order_by( Desc(LogEntry.timestamp)).paginate(page, number_of_records) return [i.to_json() for i in query]
async def log_page(request: Request, key: str, user = Depends(auth)): doc = await logs.find_one({'key': key}) entry = LogEntry.parse_obj(doc) if entry.guild_id not in user.guilds: raise HTTPException( status_code=status.HTTP_401_UNAUTHORIZED, detail=f'You do not have access to modlogs for this guild' ) return templates.TemplateResponse('log.html', {'request': request, 'user': user, 'entry': entry})
def parse_and_save_to_db(folder: str): entries = [] for i, js in enumerate(yield_json_logs_from_folder(folder)): entryDict = { 'w': js.get('w'), 'l': js.get('l'), 't': js.get('t'), 'ctx': js.get('ctx'), 'p': js.get('p'), 'message': js.get('message'), 'mhu': js.get('mhu'), 'mht': js.get('mht'), 'mrss': js.get('mrss'), 'ct': js.get('ct'), 'cl': js.get('cl'), } entries.append(entryDict) if i % 70 == 0: LogEntry.insert_many(entries).execute() entries = []
def collect_form(request): page_message = None log_entry = None if request.method == 'GET': collect_form = CollectForm(initial=request.GET) else: collect_form = CollectForm(request.POST) if collect_form.is_valid(): log_entry = LogEntry() log_entry.log = collect_form.cleaned_data['log'] log_entry.subject = collect_form.cleaned_data['title'] log_entry.content = '<blockquote>%s</blockquote><p class="collection-note">%s</p>' % (collect_form.cleaned_data['excerpt'] or '', collect_form.cleaned_data['note'] or '') log_entry.publish = collect_form.cleaned_data['make_public'] log_entry.source_url = collect_form.cleaned_data['url'] log_entry.issued = datetime.now() log_entry.save() page_message = 'The <a href="%s">log entry</a> was created.' % reverse('publish.views.log_entry', args=[], kwargs={'slug':log_entry.log.slug, 'pk':log_entry.id}) return render_to_response('publish/collect_form.html', { 'collect_form':collect_form, 'page_message':page_message, 'log_entry':log_entry }, context_instance=RequestContext(request))
def post(self): # Move to logging default namespace previous_namespace = namespace_manager.get_namespace() namespace_manager.set_namespace('dedupe_log') current_namespace = namespace_manager.get_namespace() # Get parameters from request body params = json.loads(self.request.body) # Add "client" parameter params['client'] = CLIENT # Parse event coordinates latlon = params.pop('latlon') if latlon and latlon != "None": params['lat'], params['lon'] = map(float, latlon.split(",")) else: params['lat'] = None params['lon'] = None # Build and store LogEntry entity in default namespace log_entry = LogEntry(**params) log_entry_key = log_entry.put() # Restore previous namespace namespace_manager.set_namespace(previous_namespace) # Send response and finish call resp = { "result": "success", "message": "new log entry successfully added", "namespace": current_namespace, "logger_version": LOGGER_VERSION, "log_entry_key": log_entry_key.id(), "log_entry": params } logging.info(resp) return
def count_logentries(): """Count logentries Returns number of logentries """ if request.method == 'POST' and request.headers['Content-Type'] == 'application/json': find = dict() request_data = request.json request_find = request_data.get('find', dict()) for key, value in request_find.iteritems(): if key in ('level', 'owner', 'datetimestamp', 'tags', 'start', 'end'): find[key] = value elif request.method == 'GET': find, sort, limit = None, None, None else: raise InvalidAPIUsage('Unsupported Media Type. \"application/json\" required.\n', 415) entry = LogEntry() try: result = entry.count(find) except ValueError, e: raise InvalidAPIUsage(e.message, status_code=404)
def status_update(): jobID = request.form.get('jobID') if not jobID or jobID == '': return {'done': False, 'result': []} job_query = LogEntry.query(LogEntry.jobID==jobID) job_results = [] done = False for j in job_query: job_results.extend(j.record) j.key.delete() if j.crawlFinished == True: done = True return json.dumps({'result': job_results, 'done': done})
def post(self): device_id = self.request.POST.get("device_id") user_id = self.request.POST.get("user_id") if not device_id and not user_id: webapp2.abort(400, "either device_id or user_id must be provided") entries = json.loads(self.request.POST.get("entries")) app_key = self.app_access.key if isinstance(entries, dict): entries = [entries] keys = ndb.put_multi([LogEntry.make(app_key, user_id, device_id, **x) for x in entries]) self.response.status = 201 self._post_add(keys, None) return {"entries": len(keys)}
def goal_log_progress(request, gid): #add Functional test here """ Allows users to log their progress for a goal """ goal = Goal.objects.get(id=gid) if request.method == "GET": return render(request, 'goals/logGoal.html', {'goal' : goal}) elif request.method == "POST": if request.user.is_authenticated() and len(goal.beatmygoaluser_set.filter(username=request.user)) > 0: data = json.loads(request.body) response = LogEntry.create(log=goal.log, participant=request.user, amount=data['amount'], comment=data['comment']) if response['errors']: return HttpResponse(json.dumps(response), content_type='application/json') else: return HttpResponse(json.dumps({ "redirect":"/goals/" + str(gid), "errors" : response['errors'] }), content_type='application/json') else: return HttpResponse("Invalid request", status=500)
def create_db(): # try: # load file into dictionary # data = open('example.log') line_re = re.compile( r'^([^\s]+)\s([^\s]+)\s([^\s]+)\s\[([^\]]+)\]\s\"(GET|POST)\s([^\s]+)\s([^\"]+)\"\s([^\s]+)\s([^\s]+)\s([^\s]+)\s\"([^\"]+)\"\s\"([^\"]+)\"\s\"([^\"]+)\"\s' ) target = os.path.join("/Users/seb/dev/jala/logs", "access_log.2015-11-25") # target = os.path.join("C:\\", "dev", "access_log.2015-11-27") with open(target) as f: for line in f: tokens = line_re.match(line) if tokens: e = LogEntry(sourceip=tokens.group(1), request_id=tokens.group(2), request_user=tokens.group(3), timestamp=tokens.group(4), request_type=tokens.group(5), destination=tokens.group(6), protocol=tokens.group(7), return_code=tokens.group(8), size=tokens.group(9), duration=tokens.group(10), referrer=tokens.group(11), agent=tokens.group(12), session=tokens.group(13)) db_session.add(e) #db_session.commit() db_session.commit() except Exception as ex: print(ex) db_session.rollback() #Rollback the changes on error finally: db_session.close() #Close the connection
def api_handler(request, service): service_handler = Settings.get_service_handler(service) if service_handler: logger.debug("Hitting service %s." % service) log = LogEntry( url="%s %s" % (request.method, request.get_full_path()), application=service) request_body = request.META["wsgi.input"].read( request._stream.remaining) request.META["wsgi.input"] = StringIO(request_body) request._stream = LimitedStream( request.META["wsgi.input"], request._stream.remaining) service_handler.event_manager.add_listener( "wsgi_close", partial(handle_wsgi_close, log=log)) service_handler.app.event_manager.add_listener( "method_call_exception", partial(handle_exception, log=log)) try: response = csrf_exempt(service_handler)(request) except Exception: log.traceback = unicode(traceback.format_exc(), errors="ignore") raise else: if response.content: log.response = response.content finally: if request_body: log.request = request_body log.save() service_handler.event_manager = EventManager(service_handler) service_handler.app.event_manager = EventManager( service_handler.app) return response else: msg = "Service %s not found" % service logger.info(msg) raise Http404(msg)
def post(self): # Move to default namespace previous_namespace = namespace_manager.get_namespace() namespace_manager.set_namespace('query_log') current_namespace = namespace_manager.get_namespace() # Get parameters from request body params = json.loads(self.request.body) # Add "client" parameter params['client'] = CLIENT # Parse event coordinates latlon = params.pop('latlon') if latlon: params['lat'], params['lon'] = map(float, latlon.split(",")) else: params['lat'] = None params['lon'] = None # Remove "results_by_resource" from parameters for independent process res_counts = params.pop('res_counts') # Transform "matching_records" to <str> (for ">10000" entries) params['matching_records'] = str(params['matching_records']) # Build and store LogEntry entity in default namespace log_entry = LogEntry(**params) log_entry_key = log_entry.put() # Process results_by_resource if res_counts: res_counts = json.loads(res_counts) params['results_by_resource'] = [] for i in res_counts: r = ResourceLogEntry(id=i, parent=log_entry_key, count=res_counts[i]) params['results_by_resource'].append(r) # Update LogEntry entity (only if existing results_by_resource) log_entry = log_entry_key.get() log_entry.results_by_resource = params['results_by_resource'] log_entry_key = log_entry.put() # Restore previous namespace namespace_manager.set_namespace(previous_namespace) # Send response and finish call resp = { "result": "success", "message": "new log entry successfully added", "namespace": current_namespace, "logger_version": LOGGER_VERSION, "log_entry_key": log_entry.key.id(), "log_entry": params } logging.info(resp) return
def webpay_run(name, payment_pk, *args, **kwargs): """Runs a Webpay binary within a preconfigured environment. Before running the binary, this context manager recreates the directories and files in /tmp that the binary needs to run correctly, as if it were sitting in an standalone web server. Yields a subprocess.Popen instance with an open pipe to stdin and stdout. After running the binary, log and journal entries are captured and then temporary files are removed. Args: name: A string, basename of the binary as is found in assets_dir. payment_pk: An integer, primary key for the related payment. This is needed to associate the logs with their payment. *args: Extra positional arguments are appended to the command line before execution. Example: >>> webpay_run('tbk_bp_resultado.cgi', 3958) as cgi: output, _ = cgi.communicate("TBK_MONTO=384800&...\n") do_something_with(output) WARNING: Always use Popen with communicate() method when using PIPE or there will be deadlocks. """ from pprint import pprint # prepare the configuration files assets_dir = PaymentProcessor.get_backend_setting('ASSETS_DIR') tbk_config = PaymentProcessor.get_tbk_config(payment_pk, 'CLP') # FIXME tbk_param = PaymentProcessor.get_tbk_param() tbk_trace = PaymentProcessor.get_tbk_trace() temp_dir = mkdtemp() cgi_path = os.path.join(assets_dir, name) temp_cgi_path = os.path.join(temp_dir, name) datos_path = os.path.join(temp_dir, 'datos') os.mkdir(datos_path) with open(os.path.join(datos_path, 'tbk_config.dat'), 'w') as f: pprint("TBK_CONFIG: %s" % tbk_config) pprint('------------------------------------------') f.write(tbk_config) with open(os.path.join(datos_path, 'tbk_param.txt'), 'w') as f: f.write(tbk_param) with open(os.path.join(datos_path, 'tbk_trace.dat'), 'w') as f: f.write(tbk_trace) # prepare the public and private keys maestros_path = os.path.join(temp_dir, 'maestros') public_key, private_key = PaymentProcessor.get_keys() os.mkdir(maestros_path) with open(os.path.join(maestros_path, 'tbk_public_key.pem'), 'w') as f: f.write(public_key) with open(os.path.join(maestros_path, 'privada.pem'), 'w') as f: f.write(private_key) # prepare the log directory log_path = os.path.join(temp_dir, 'log') os.mkdir(log_path) # copy the binary to the temp dir and make it executable copyfile(cgi_path, temp_cgi_path) os.chmod(temp_cgi_path, S_IEXEC) yield Popen([temp_cgi_path] + list(args), stdin=PIPE, stdout=PIPE) # capture the logs try: from getpaid.models import Payment payment = Payment.objects.get(pk=payment_pk) for event_log in glob.glob(os.path.join(log_path, 'TBK_EVN*')): with open(event_log, 'r') as f: for line in map(str.strip, f.readlines()): pprint("TBK_ENV: %s" % line) from models import LogEntry entry = LogEntry.from_line(line=line, payment=payment) entry.save() pprint('------------------------------------------') for journal_log in glob.glob(os.path.join(log_path, 'tbk_bitacora_TR_NORMAL*')): st = os.stat(journal_log) date = datetime.date.fromtimestamp(st.st_mtime) with open(journal_log, 'r') as f: for line in map(str.strip, f.readlines()): pprint("TBK_BITACORA: %s" % line) from models import JournalEntry entry = JournalEntry(date=date, body=line, payment=payment) entry.save() pprint('------------------------------------------') except Payment.DoesNotExist: pass # clean up rmtree(temp_dir)
def patiently_parse_log_folder(folder: str): with Halo(text='Parsing...', spinner='dots'): parse_and_save_to_db(folder) LogEntry.raw('CREATE INDEX t_sort ON logentry (t);').execute()
async def root(request: Request, user = Depends(auth)): doc = await logs.find({'guild_id': {'$in': list(map(str, user.guilds))}}).sort([('created_at', -1)]).to_list(100) entries = [LogEntry.parse_obj(x) for x in doc] return templates.TemplateResponse('home.html', {'request': request, 'user': user, 'entries': entries})
def writeCrawlLog(links, isFinished, jobID): logEntry = LogEntry(record=links, crawlFinished=isFinished, jobID=jobID) logEntry.put()
def generate_ctx(): return LogEntry.all().order('-time')
def log(self, msg): LogEntry(ip=self.request.remote_addr, user=(self.current_user.key() if self.current_user else None), msg=msg, referrer=self.request.headers.get('Referrer')).put()
def add_logentry(): """Creating new log entry and saving it to DB. Returns False, if validation had problems with validating inputs, returns False, if client tries to send not a json, otherwise return True and ObjectId. """ if request.headers['Content-Type'] == 'application/json': errors = [] request_data = request.json # Lets check selected level level = "" if "level" not in request_data: errors.append({'level': 'Field required.'}) else: level = request_data['level'] # Is level in level list in config file? if level not in config['level']: errors.append({'level': 'Unknown level type.'}) # Checking owner present (required) owner = "" if "owner" not in request_data: errors.append({'owner': 'Field required.'}) else: owner = request_data['owner'] # Checking data present (required) data = "" if "data" not in request_data: errors.append({'data': 'Field required.'}) else: data = request_data['data'] tags = [] # Tags isn't required. If it present lets try to convert it to python-list. # If successfully - add it to entry. If not - return full error and don't create entry in DB. if "tags" in request_data: tags = request.json['tags'] if not isinstance(tags, list): errors.append({'tags': 'Tags must be an array.'}) if not errors: entry = LogEntry(level, owner, data, tags) id_or_error = entry.save() if not isinstance(id_or_error, ObjectId): return jsonify({'OK': False, 'error': id_or_error}) # ___str___ is a string representation of JS ObjectID from MongoDB. return jsonify({'OK': True, 'id': id_or_error.__str__()}) else: return jsonify({"OK": False, 'errors': errors}) else: #TODO Here should be NORMAL exception. return jsonify({ "errors": [ "415 Unsupported Media Type. \"application/json\" required.\n", ] })
def webpay_run(name, payment_pk, *args, **kwargs): """Runs a Webpay binary within a preconfigured environment. Before running the binary, this context manager recreates the directories and files in /tmp that the binary needs to run correctly, as if it were sitting in an standalone web server. Yields a subprocess.Popen instance with an open pipe to stdin and stdout. After running the binary, log and journal entries are captured and then temporary files are removed. Args: name: A string, basename of the binary as is found in assets_dir. payment_pk: An integer, primary key for the related payment. This is needed to associate the logs with their payment. *args: Extra positional arguments are appended to the command line before execution. Example: >>> webpay_run('tbk_bp_resultado.cgi', 3958) as cgi: output, _ = cgi.communicate("TBK_MONTO=384800&...\n") do_something_with(output) WARNING: Always use Popen with communicate() method when using PIPE or there will be deadlocks. """ from pprint import pprint # prepare the configuration files assets_dir = PaymentProcessor.get_backend_setting('WEBPAY_DOCS') tbk_config = PaymentProcessor.get_tbk_config(kwargs.get('request'), payment_pk, 'CLP') # FIXME tbk_param = PaymentProcessor.get_tbk_param() tbk_trace = PaymentProcessor.get_tbk_trace() # temp_dir = mkdtemp() cgi_path = os.path.join(assets_dir, name) # temp_cgi_path = os.path.join(assets_dir, name) datos_path = os.path.join(assets_dir, 'datos') # os.mkdir(datos_path) # with open(os.path.join(datos_path, 'tbk_config.dat'), 'w') as f: # pprint("TBK_CONFIG: %s" % tbk_config) # pprint('------------------------------------------') # f.write(tbk_config) # with open(os.path.join(datos_path, 'tbk_param.txt'), 'w') as f: # f.write(tbk_param) # with open(os.path.join(datos_path, 'tbk_trace.dat'), 'w') as f: # f.write(tbk_trace) # prepare the public and private keys maestros_path = os.path.join(assets_dir, 'maestros') public_key, private_key = PaymentProcessor.get_keys() # os.mkdir(maestros_path) # with open(os.path.join(maestros_path, 'tbk_public_key.pem'), 'w') as f: # f.write(public_key) # with open(os.path.join(maestros_path, 'privada.pem'), 'w') as f: # f.write(private_key) # prepare the log directory log_path = os.path.join(assets_dir, 'log') # os.mkdir(log_path) # copy the binary to the temp dir and make it executable # copyfile(cgi_path, temp_cgi_path) # os.chmod(cgi_path, S_IEXEC) yield Popen([sys.executable, cgi_path] + list(args), stdin=PIPE, stdout=PIPE) # capture the logs try: from getpaid.models import Payment payment = Payment.objects.get(pk=payment_pk) for event_log in glob.glob(os.path.join(log_path, 'TBK_EVN*')): with open(event_log, 'r') as f: for line in map(str.strip, f.readlines()): pprint("TBK_ENV: %s" % line) from models import LogEntry entry = LogEntry.from_line(line=line, payment=payment) entry.save() pprint('------------------------------------------') for journal_log in glob.glob( os.path.join(log_path, 'tbk_bitacora_TR_NORMAL*')): st = os.stat(journal_log) date = datetime.date.fromtimestamp(st.st_mtime) with open(journal_log, 'r') as f: for line in map(str.strip, f.readlines()): pprint("TBK_BITACORA: %s" % line) from models import JournalEntry entry = JournalEntry(date=date, body=line, payment=payment) entry.save() pprint('------------------------------------------') except Payment.DoesNotExist: pass
def did(self, user, action, change, **kwargs): LogEntry.make(self.app_access.key, user.user_id, user.device_id, action=action, quantity=change, **kwargs).put()
def sample_get(): it = LogEntry.raw('SELECT id from logentry where id % 10000 = 0').execute() print(it) print([el for el in it])
def log_models_changed_signal(sender, **kwargs): logger = logging.getLogger(__name__) try: kwargs['created'] except: kwargs['created'] = None if kwargs['created'] is None: log = 'deleted' elif kwargs['created']: log = 'added' else: log = 'updated' logger_info = '' signal_name = sender.__doc__ + ' is ' + log if sender == Student: student = kwargs['instance'] logger.info(u'Student %s: %s %s (ID: %d)', log, student.first_name, student.last_name, student.id) logger_info = u'Student %s: %s %s (ID: %d)' % ( log, student.first_name, student.last_name, student.id) elif sender == Group: group = kwargs['instance'] logger.info(u'Group %s: %s (ID: %d)', log, group.title, group.id) logger_info = u'Group %s: %s (ID: %d)' % (log, group.title, group.id) elif sender == Exam: exam = kwargs['instance'] try: exam_group = exam.exam_group.title except ObjectDoesNotExist: exam_group = 'Deleted Group' logger.info(u'Exam %s: %s for %s (ID: %d)', log, exam.name, exam_group, exam.id) logger_info = u'Exam %s: %s for %s (ID: %d)' % (log, exam.name, exam_group, exam.id) elif sender == Result: result = kwargs['instance'] try: first_name = result.result_student.first_name last_name = result.result_student.last_name except ObjectDoesNotExist: first_name = 'Already' last_name = 'Deleted' logger.info(u'Result %s: Student %s %s got mark %s for %s (ID: %d)', log, first_name, last_name, result.score, result.result_exam.name, result.id) logger_info = u'Result %s: Student %s %s got mark %s for %s (ID: %d)' % ( log, first_name, last_name, result.score, result.result_exam.name, result.id) signal_name = 'Result Model is ' + log elif sender == MonthJournal: journal = kwargs['instance'] month_name = [ _(u'Січень'), _(u'Лютий'), _(u'Березень'), _(u'Квітень'), _(u'Травень'), _(u'Червень'), _(u'Липень'), _(u'Серпень'), _(u'Вересень'), _(u'Жовтень'), _(u'Листопад'), _(u'Грудень') ] logger.info(u'Journal %s: Student %s %s for %s (ID: %d)', log, journal.student_name.first_name, journal.student_name.last_name, month_name[journal.date.month - 1], journal.id) logger_info = u'Journal %s: Student %s %s for %s (ID: %d)' % ( log, journal.student_name.first_name, journal.student_name.last_name, month_name[journal.date.month - 1], journal.id) elif sender == User: user = kwargs['instance'] logger.info(u'User %s: %s', log, user.username) logger_info = u'User %s: %s' % (log, user.username) signal_name = 'User Model is ' + log else: logger_info = False if logger_info: current_time = timezone.now() log = LogEntry(log_datetime=current_time, status='INFO', signal=signal_name, info=logger_info) log.save()