def run(self): while not self.stop or not self.files.empty(): file = self.files.get() if not file: self.files.task_done() continue path = "%s/%ss/%s_%s/%s/" % (self.outputdir, file.type, file.source, file.destination, file.timestamp) if not os.path.exists(path): os.makedirs(path) number = 1 filename = '%s %d.%s' % (file.name, number, file.fileEnding) while os.path.exists(path + filename): number += 1 filename = '%s %d.%s' % (file.name, number, file.fileEnding) filename = filename.rstrip('.') with open(path + filename, 'wb') as outfile: outfile.write(file.data) Utils.printl("Wrote file: %s%s" % (path, filename)) self.files.task_done()
def run(self): while not self.stop or not self.files.empty(): file = self.files.get() if not file: self.files.task_done() continue path = "%s/%ss/%s_%s/%s/" % (self.outputdir, file.type, file.source, file.destination, file.timestamp) if not os.path.exists(path): os.makedirs(path) number = 1 filename = '%s %d.%s' % (file.name, number, file.fileEnding) while os.path.exists(path + filename): number += 1 filename = '%s %d.%s' % (file.name, number, file.fileEnding) filename = filename.rstrip('.') with open(path + filename, 'wb') as outfile: outfile.write(file.data) Utils.printl("Wrote file: %s%s" % (path, filename)) with open(self.outputdir + '/files.csv', 'ab') as outcsv: csvwriter = csv.writer(outcsv, delimiter=',') csvwriter.writerow([file.pcapFile, file.firstPacketNumber, filename, file.fileEnding, file.size, file.md5.hexdigest(), file.source, file.destination, file.timestamp]) self.files.task_done()
def run(self): while not self.stop or not self.files.empty(): file = self.files.get() if not file: self.files.task_done() continue #path = "%s/%ss/%s_%s/%s/" % (self.outputdir, file.type, file.source, file.destination, file.timestamp) path = "{}/FileCarve/{}-{}/{}/{}/".format(self.outputdir,file.source,file.destination,file.portSrc,file.type) if not os.path.exists(path): os.makedirs(path) number = 0 if file.filename: filename = file.filename else: filename = '%s %d.%s' % (file.name, number, file.fileEnding) while os.path.exists(path + filename): number += 1 filename = '%s(%d).%s' % (file.name, number, file.fileEnding) filename = filename.rstrip('.') with open(path + filename, 'wb') as outfile: outfile.write(file.data) Utils.printl("Wrote file: %s%s" % (path, filename)) self.files.task_done()
def _add_event(self, period_string, event_id): """ Add a single event in the crontab. Will add a line like: <period_string> python /path/to/kalliope.py start --brain-file /path/to/brain.yml --run-synapse "<event_id>" E.g: 30 7 * * * python /home/me/kalliope/kalliope.py start --brain-file /home/me/brain.yml --run-synapse "Say-hello" :param period_string: crontab period :type period_string: str :param event_id: :type event_id: str :return: """ my_user_cron = CronTab(user=True) job = my_user_cron.new(command=self.base_command + " " + str("\"" + event_id + "\""), comment=CRONTAB_COMMENT) if CronSlices.is_valid(period_string): job.setall(period_string) job.enable() else: raise InvalidCrontabPeriod("The crontab period %s is not valid" % period_string) # write the file my_user_cron.write() Utils.print_info("Synapse \"%s\" added to the crontab" % event_id)
def parsePost(self, post): if post["geo"]: pid = post["id"] user = post["user"]["screen_name"] name = post["user"]["name"] text = post["text"].encode('ascii', 'replace') latitude = post["geo"]["coordinates"][0] longitude = post["geo"]["coordinates"][1] profile_picture = post["user"]["profile_image_url_https"] created_time = time.strftime('%Y-%m-%d %H:%M:%S', time.strptime(post["created_at"],'%a %b %d %H:%M:%S +0000 %Y')) photo = "" tags = [] for tag in post["entities"]["hashtags"]: tags.append(tag["text"]) print('[+] Tweet ID: %s' % pid) print('\t[+] User: %s' % user) print('\t[+] Full Name: %s' % name) print('\t[+] Date of Creation: %s' % created_time) print('\t[+] Latitude: %s' % latitude) print('\t[+] Longitude: %s' % longitude) print('\t[+] Hashtags: %s' % tags) print('\t[+] Text: %s' % text) print('') utils = Utils() person = utils.get_user(user, name, profile_picture) p = utils.save_post(pid, person, created_time, photo, text, latitude, longitude, "Twitter") utils.save_tags(tags, p) return pid
def parse_tip(self, place, tip): tipId = tip['id'] text = tip['text'] createdAt = datetime.fromtimestamp(int( tip['createdAt'])).strftime('%Y-%m-%d %H:%M:%S') user = tip['user'] userId = user['id'] firstName = user['firstName'] lastName = user['lastName'] name = "%s %s" % (firstName, lastName) photo = user['photo'] profilePicture = photo['prefix'] + 'original' + photo['suffix'] utils = Utils() person = utils.get_user(userId, name, profilePicture) tip = utils.save_tip(tid=tipId, author=person, place=place, text=text, dateOfCreation=createdAt) print('\t\t[+] Tip Id: %s' % tipId) print('\t\t[+] Created at: %s' % createdAt) print('\t\t[+] User ID: %s' % userId) print('\t\t[+] Full Name: %s' % name) print('\t\t[+] Profile Picture: %s' % profilePicture) print('\t\t[+] Text: %s' % text) print('')
def calc_cov_heuristic( objs, parent_stack: List[str], inpt: str, knowledge: KnowledgeHandling ) -> Tuple["HeuristicValue", Set[Tuple[Any, Any]], Set[Tuple[Any, Any]]]: """ Calculates based on the covered lines and the heuristic value of the parent the heurisitic value for the children. New heuristic value is: #newly covered lines + 1 - heuristic value of parent. -1 as if no new line is covered the heuristic value decreases over time :param inpt: The string used in the execution :param objs: the covered lines and comparisons :param parent_stack: the smallest stack occurred for the parent input """ # Todo also take stackdepth into account (maybe in combination with input length s.t. for longer inputs a small stack is preferred # Todo this obviously only works for recursive parsers, but for all others this is more or less a nop tmp_events = [] coverage_events = [] stack = HeuristicValue.calc_stack(objs) all_covered = set() # we need to consider the taken branches between the last comparison and the eof comparison last_was_real = False # cur_idx = -1 for obj in objs: # consider only basic block jumps up until the last "real" comparison if Utils.is_real_input_comparison( obj, Utils.max_index - 1): # and cur_idx != int(obj["index"][0]): # last_was_eof = False coverage_events += tmp_events tmp_events = [] last_was_real = True # cur_idx = int(obj["index"][0]) elif not Utils.is_real_input_comparison( obj, Utils.max_index - 1) and obj["type"] == "INPUT_COMPARISON" and last_was_real: last_was_real = False coverage_events += tmp_events tmp_events = [] # elif obj["type"] == "INPUT_COMPARISON" and obj["operator"] == "eof" and not last_was_eof: # last_was_eof = True # coverage_events += tmp_events # tmp_events = [] elif obj["type"] == "COVERAGE_EVENT": tmp_events.append(obj) all_covered.add((obj["old"], obj["new"])) cover_counter = 0 new_covered = 0 covered = set() for event in coverage_events: value = (event["old"], event["new"]) if value not in Utils.valid_covered: cover_counter += 2 new_covered += 1 else: cover_counter += 1 / Utils.valid_covered[value] covered.add(value) same_path_taken = HeuristicValue.check_same_path_taken(coverage_events) return HeuristicValue((new_covered, cover_counter), stack, parent_stack, inpt, same_path_taken, knowledge), covered, all_covered
def signal_handler(signal, frame): """ Used to catch a keyboard signal like Ctrl+C in order to kill the kalliope program :param signal: signal handler :param frame: execution frame """ print "\n" Utils.print_info("Ctrl+C pressed. Killing Kalliope") sys.exit(0)
def subClassified(request, pk, typeName, subType): user = get_object_or_404(User, pk=pk) urls = Utils.get_subclassified_urls(user.username, typeName, subType) typedict = Utils.get_type_dict(user) introduction = get_object_or_404(ClassifiedType, user=user, root_type=typeName, sub_type=subType).introduction return render(request, 'main/subClassified.html', locals())
def saveImage(request): typeName = request.POST.get("typeName") image = request.POST.get("image") image = json.loads(image) image = str(image).split(';base64,')[1] image = base64.b64decode(image) Utils.auto_classified_storage(request.user.username, typeName, image) return HttpResponse("success")
def _add_event(self, period_string, event_id): my_user_cron = CronTab(user=True) job = my_user_cron.new(command=self.base_command+" "+str("\"" + event_id + "\""), comment=CRONTAB_COMMENT) if CronSlices.is_valid(period_string): job.setall(period_string) job.enable() else: raise InvalidCrontabPeriod("The crontab period %s is not valid" % period_string) # write the file my_user_cron.write() Utils.print_info("Synapse \"%s\" added to the crontab" % event_id)
def main(): """ Entry point of Kalliope program """ # create arguments parser = argparse.ArgumentParser(description='Kalliope') parser.add_argument("action", help="[start|gui]") parser.add_argument("--run-synapse", help="Name of a synapse to load surrounded by quote") parser.add_argument("--brain-file", help="Full path of a brain file") parser.add_argument("--debug", action='store_true', help="Show debug output") # parse arguments from script parameters args = parser.parse_args() # require at least one parameter, the action if len(sys.argv[1:]) == 0: parser.print_usage() sys.exit(1) # check if we want debug configure_logging(debug=args.debug) logger.debug("kalliope args: %s" % args) # by default, no brain file is set. Use the default one: brain.yml in the root path brain_file = None # check if user set a brain.yml file if args.brain_file: brain_file = args.brain_file # load the brain once brain = BrainLoader.get_brain(file_path=brain_file) # check the user provide a valid action if args.action not in ACTION_LIST: Utils.print_warning("%s is not a recognised action\n" % args.action) parser.print_help() if args.action == "start": # user set a synapse to start if args.run_synapse is not None: SynapseLauncher.start_synapse(args.run_synapse, brain=brain) if args.run_synapse is None: # first, load events in crontab crontab_manager = CrontabManager(brain=brain) crontab_manager.load_events_in_crontab() Utils.print_success("Events loaded in crontab") # then start kalliope Utils.print_success("Starting Kalliope") Utils.print_info("Press Ctrl+C for stopping") # catch signal for killing on Ctrl+C pressed signal.signal(signal.SIGINT, signal_handler) # start the main controller MainController(brain=brain) if args.action == "gui": ShellGui(brain=brain)
def __init__(self, callback=None, **kwargs): """ Start recording the microphone and analyse audio with Bing api :param callback: The callback function to call to send the text :param kwargs: """ OrderListener.__init__(self) # callback function to call after the translation speech/tex self.callback = callback # obtain audio from the microphone r = sr.Recognizer() with sr.Microphone() as source: # listen for 1 second to calibrate the energy threshold for ambient noise levels r.adjust_for_ambient_noise(source) Utils.print_info("Say something!") audio = r.listen(source) # recognize speech using Bing Speech Recognition try: key = kwargs.get('key', None) language = kwargs.get('language', "en-US") show_all = kwargs.get('show_all', False) captured_audio = r.recognize_bing(audio, key=key, language=language, show_all=show_all) Utils.print_success("Bing Speech Recognition thinks you said %s" % captured_audio) self._analyse_audio(captured_audio) except sr.UnknownValueError: Utils.print_warning("Bing Speech Recognition could not understand audio") except sr.RequestError as e: Utils.print_danger("Could not request results from Bing Speech Recognition service; {0}".format(e))
def analyse_order(self, order): """ Receive an order, try to retreive it in the brain.yml to launch to attached plugins :return: """ order_analyser = OrderAnalyser(order, main_controller=self, brain_file=self.brain_file) order_analyser.start() # restart the trigger when the order analyser has finish his job Utils.print_info("Waiting for trigger detection") self.trigger_instance.unpause() # create a new order listener that will wait for start self.order_listener = OrderListener(self.analyse_order) # restart the trigger to catch the hotword self.trigger_instance.start()
def ebay2generic(item, info): try: full_img_url = item["IMAGE_URL"] generic = { "id": [info["id"]], "categories": info["categories"], "clickUrl": item["OFFER_URL_MIN_CATEGORY_BID"], "images": { "XLarge": full_img_url }, "status": info["status"], "shortDescription": item["OFFER_TITLE"], "longDescription": item["OFFER_DESCRIPTION"], "price": info["price"], "Brand": item["MANUFACTURER"], "Site": item["MERCHANT_NAME"], "download_data": { 'dl_version': today_date, 'first_dl': today_date, 'fp_version': constants.fingerprint_version }, "fingerprint": None, "gender": info["gender"], "ebay_raw": item } image = Utils.get_cv2_img_array(full_img_url) if image is None: #try again if 'https://' in full_img_url: image = Utils.get_cv2_img_array(full_img_url[8:]) elif 'http://' in full_img_url: image = Utils.get_cv2_img_array(full_img_url[7:]) else: image, generic = None, None if image is None: generic = None return image, generic img_hash = get_hash(image) generic["img_hash"] = img_hash except: print item generic = None image = None return image, generic
def _calc_tos_coverage(self, objs, tos): is_tos = False self.tos_cover_counter = 0 last_was_eof = False cur_idx = -1 tmp_events = set() for obj in objs: # consider only basic block jumps up until the last "real" comparison if obj["type"] == "INPUT_COMPARISON" and obj["operator"] == "eof": tmp_events = set() elif obj["type"] == "STACK_EVENT" and len(obj["stack"]) == len(self.min_stack_list) and obj["stack"][-1] == tos: is_tos = True elif obj["type"] == "STACK_EVENT" and obj["stack"] and obj["stack"][-1] != tos: is_tos = False elif Utils.is_real_input_comparison(obj, Utils.max_index - 1) and cur_idx != obj["index"][0]: last_was_eof = False self.tos_branches.update(tmp_events) tmp_events = set() cur_idx = int(obj["index"][0]) elif obj["type"] == "INPUT_COMPARISON" and obj["operator"] == "eof" and not last_was_eof: last_was_eof = True self.tos_branches.update(tmp_events) tmp_events = set() elif is_tos and obj["type"] == "COVERAGE_EVENT": tmp_events.add((obj["old"], obj["new"])) self.tos_cover_counter = len(self.tos_branches - Utils.valid_covered.keys())
def getRandomPhoto(request): if request.method == "POST": user = get_object_or_404(User, pk=request.session.get('_auth_user_id')) photoDict = Utils.get_random_photo(user.username) return JsonResponse({"photoDict": photoDict, "user_id": user.id}) else: return HttpResponse("Invalid Request", status=503)
def getTypeDict(request): if request.method == "POST": user = get_object_or_404(User, pk=request.session.get('_auth_user_id')) typedict = Utils.get_type_dict(user) return JsonResponse({"typedict": typedict}) else: return HttpResponse("违法访问", status=503)
def parse_venue(self, venue): venueId = venue['id'] name = venue['name'] checkinsCount = venue['stats']['checkinsCount'] print('[*] Venue: %s' % name) print('\t[+] Checkins Count: %s' % checkinsCount) print('\t[+] Venue ID: %s' % venueId) utils = Utils() place = utils.get_place(pid=venueId, name=name, checkins=checkinsCount) #place.tip_set.count() self.get_tips(place) #self.get_photos(venueId) print('')
def classified(request, pk): user = get_object_or_404(User, pk=pk) urls = Utils.get_total_img_urls(user.username) return render(request, 'main/classified_new.html', { 'user': user, 'urls': urls })
def analyse_order(self, order): """ Receive an order, try to retrieve it in the brain.yml to launch to attached plugins :param order: the sentence received :type order: str """ if order is not None: # maybe we have received a null audio from STT engine order_analyser = OrderAnalyser(order, main_controller=self, brain=self.brain) order_analyser.start() # restart the trigger when the order analyser has finish his job Utils.print_info("Waiting for trigger detection") self.trigger_instance.unpause() # create a new order listener that will wait for start self.order_listener = OrderListener(self.analyse_order) # restart the trigger to catch the hotword self.trigger_instance.start()
def delete(self, scan_uuid): """Cancel the specified scan schedule""" scan = ScanResource.get_by_uuid(scan_uuid, withResults=False) if scan["scheduled"] == False: abort(400, "Already canceled") data = { "start_at": Utils.get_default_datetime(), "end_at": Utils.get_default_datetime(), "started_at": Utils.get_default_datetime(), "ended_at": Utils.get_default_datetime(), "scheduled": False, "task_uuid": None, } ScanTable.update(data).where(ScanTable.id == scan["id"]).execute() return ScanResource.get_by_uuid(scan_uuid, withResults=False)
def __init__(self, brain_file=None): self.brain_file = brain_file # get global configuration self.settings = SettingLoader.get_settings() # run the api if the user want it if self.settings.rest_api.active: Utils.print_info("Starting REST API Listening port: %s" % self.settings.rest_api.port) app = Flask(__name__) flask_api = FlaskAPI(app, port=self.settings.rest_api.port, brain_file=brain_file) flask_api.start() # create an order listener object. This last will the trigger callback before starting self.order_listener = OrderListener(self.analyse_order) # Wait that the kalliope trigger is pronounced by the user self.trigger_instance = self._get_default_trigger() self.trigger_instance.start() Utils.print_info("Waiting for trigger detection")
def get(self): """Download all vulnerability list""" schema = VulnListInputSchema(only=["tz_offset", "fix_required", "keyword"]) params, errors = schema.load(request.args) if errors: abort(400, errors) vuln_query = VulnTable.select( VulnTable.oid, VulnTable.fix_required, VulnTable.advice, VulnTable.created_at, VulnTable.updated_at, ResultTable.name, ResultTable.cvss_base, ResultTable.cve, ResultTable.description, ).join(ResultTable, on=(VulnTable.oid == ResultTable.oid)) if "fix_required" in params and len(params["fix_required"]) > 0: vuln_query = vuln_query.where(VulnTable.fix_required == params["fix_required"]) if "keyword" in params and len(params["keyword"]) > 0: vuln_query = vuln_query.where( (VulnTable.oid ** "%{}%".format(params["keyword"])) | (ResultTable.name ** "%{}%".format(params["keyword"])) ) vuln_query = vuln_query.group_by( VulnTable.oid, VulnTable.fix_required, VulnTable.advice, VulnTable.created_at, VulnTable.updated_at, ResultTable.name, ResultTable.cvss_base, ResultTable.cve, ResultTable.description, ) vuln_query = vuln_query.order_by(VulnTable.oid.desc()) output = "" with tempfile.TemporaryFile("r+") as f: writer = csv.DictWriter( f, VulneravilityListDownload.VULNERABILITY_CSV_COLUMNS, extrasaction="ignore" ) writer.writeheader() for vuln in vuln_query.dicts(): vuln["description"] = Utils.format_openvas_description(vuln["description"]) vuln["created_at"] = vuln["created_at"] + timedelta(minutes=params["tz_offset"]) vuln["updated_at"] = vuln["updated_at"] + timedelta(minutes=params["tz_offset"]) writer.writerow(vuln) f.flush() f.seek(0) output += f.read() headers = {"Content-Type": "text/csv", "Content-Disposition": "attachment"} return Response(response=output, status=200, headers=headers)
def classifiedSpecific(request, pk, typeName): user = get_object_or_404(User, pk=pk) urls = Utils.get_specific_urls(user.username, typeName) return render(request, 'main/classifiedSpecific.html', { 'user': user, 'urls': urls, "typeName": typeName })
def register(request): login_form = LoginForm() if request.method == 'GET': # GET方式访问gate/register模块 register_form = RegistrationForm() return render( request, 'users/gate.html', { 'login_form': login_form, 'register_form': register_form, 'is_in_register': True }) elif request.method == 'POST': # 处理register功能的POST请求 register_form = RegistrationForm(request.POST) if register_form.is_valid(): username = register_form.cleaned_data['username'] email = register_form.cleaned_data['email'] password = register_form.cleaned_data['password2'] # 使用内置User自带create_user方法创建用户,不需要使用save() user = User.objects.create_user(username=username, password=password, email=email) Utils.create_user_media(username) # 如果直接使用objects.create()方法后不需要使用save() user_profile = UserProfile(user=user) user_profile.save() # 新建新用户相关的类名关系表 Utils.create_user_classifiedtype(user) return render( request, 'users/gate.html', { 'login_form': login_form, 'register_form': register_form, 'register_success': True }) else: return render( request, 'users/gate.html', { 'login_form': login_form, 'register_form': register_form, 'is_in_register': True })
def get_tts(cls, tts): """ Return an instance of a TTS module from the name of this module :param tts: TTS model :type tts: Tts :return: TTS module instance .. seealso:: TTS .. warnings:: Class Method and Public """ logger.debug("get TTS module \"%s\" with parameters %s" % (tts.name, tts.parameters)) return Utils.get_dynamic_class_instantiation("tts", tts.name.capitalize(), tts.parameters)
def parsePhoto(self, photo, latitude, longitude): title = photo['title'] farm = photo['farm'] server = photo['server'] photo_id = photo['id'] secret = photo['secret'] owner = photo['owner'] url = "https://farm%s.staticflickr.com/%s/%s_%s_n.jpg" % (farm, server, photo_id, secret) print('[+] Photo ID: %s' % photo_id) print('\t[*] URL: %s' % url) print('\t[*] Title: %s' % title) (user, name, profile_picture) = self.getPerson(owner) utils = Utils() person = utils.get_user(user, name, profile_picture) (created_time, tags) = self.getPhotoInfo(photo_id) p = utils.save_post(photo_id, person, created_time, url, title, latitude, longitude, "Flickr") utils.save_tags(tags, p) print('')
def patch(self, scan_uuid): """Schedule the specified scan""" scan = ScanResource.get_by_uuid(scan_uuid, withResults=False) if scan["scheduled"] == True: abort(400, "Already scheduled") schema = ScanUpdateSchema(only=["target", "start_at", "end_at", "slack_webhook_url"]) params, errors = schema.load(request.json) if errors: abort(400, errors) if "target" in params: scan["target"] = params["target"] if "slack_webhook_url" in params: scan["slack_webhook_url"] = params["slack_webhook_url"] with db.database.atomic(): task = PendingTask().add( { "audit_id": scan["audit_id"], "scan_id": scan["id"], "target": scan["target"], "start_at": params["start_at"], "end_at": params["end_at"], "slack_webhook_url": scan["slack_webhook_url"], } ) params["started_at"] = Utils.get_default_datetime() params["ended_at"] = Utils.get_default_datetime() params["task_uuid"] = task.uuid params["scheduled"] = True ScanTable.update(params).where(ScanTable.id == scan["id"]).execute() return ScanResource.get_by_uuid(scan_uuid, withResults=False)
def __init__(self, callback=None, **kwargs): """ Start recording the microphone and analyse audio with google api :param callback: The callback function to call to send the text :param kwargs: """ OrderListener.__init__(self) """ Start recording the microphone :return: """ # callback function to call after the translation speech/tex self.callback = callback # obtain audio from the microphone r = sr.Recognizer() with sr.Microphone() as source: # listen for 1 second to calibrate the energy threshold for ambient noise levels r.adjust_for_ambient_noise(source) Utils.print_info("Say something!") audio = r.listen(source) # recognize speech using Google Speech Recognition try: # for testing purposes, we're just using the default API key # to use another API key, use `r.recognize_google(audio, key="GOOGLE_SPEECH_RECOGNITION_API_KEY")` # instead of `r.recognize_google(audio)` key = kwargs.get('key', None) language = kwargs.get('language', "en-US") show_all = kwargs.get('show_all', False) captured_audio = r.recognize_google(audio, key=key, language=language, show_all=show_all) Utils.print_success( "Google Speech Recognition thinks you said %s" % captured_audio) self._analyse_audio(captured_audio) except sr.UnknownValueError: Utils.print_warning( "Google Speech Recognition could not understand audio") except sr.RequestError as e: Utils.print_danger( "Could not request results from Google Speech Recognition service; {0}" .format(e))
def patch(self, audit_uuid): """Update the specified audit""" audit = AuditResource.get_by_id(audit_uuid=audit_uuid, withContacts=False, withScans=False) schema = AuditUpdateSchema(only=[ "name", "description", "contacts", "password", "ip_restriction", "password_protection", "slack_default_webhook_url", ]) params, errors = schema.load(request.json) if errors: abort(400, errors) if params.get( "password_protection") == True and "password" not in params: abort(400, "Password must be provided when enforcing protection") if "password" in params: params["password"] = Utils.get_password_hash(params["password"]) if params.get("password_protection") == False: params["password"] = "" contacts = [] if "contacts" in params: contacts = params["contacts"] params.pop("contacts") with db.database.atomic(): if params != {}: AuditTable.update(params).where( AuditTable.id == audit["id"]).execute() if len(contacts) > 0: for contact in contacts: contact["audit_id"] = audit["id"] ContactTable.delete().where( ContactTable.audit_id == audit["id"]).execute() ContactTable.insert_many(contacts).execute() return AuditResource.get_by_id(audit_uuid=audit["uuid"], withContacts=True, withScans=True)
def parse_tip(self, place, tip): tipId = tip['id'] text = tip['text'] createdAt = datetime.fromtimestamp(int(tip['createdAt'])).strftime('%Y-%m-%d %H:%M:%S') user = tip['user'] userId = user['id'] firstName = user['firstName'] lastName = user['lastName'] name = "%s %s" % (firstName, lastName) photo = user['photo'] profilePicture = photo['prefix'] + 'original' + photo['suffix'] utils = Utils() person = utils.get_user(userId, name, profilePicture) tip = utils.save_tip(tid=tipId, author=person, place=place, text=text, dateOfCreation=createdAt) print('\t\t[+] Tip Id: %s' % tipId) print('\t\t[+] Created at: %s' % createdAt) print('\t\t[+] User ID: %s' % userId) print('\t\t[+] Full Name: %s' % name) print('\t\t[+] Profile Picture: %s' % profilePicture) print('\t\t[+] Text: %s' % text) print('')
def post(self): """Publish an API token for administrators""" if Utils.is_source_ip_permitted(request.access_route[0]) == False: abort(403, "Not allowed to access from your IP address") params, errors = AuthInputSchema().load(request.json) if errors: abort(400, errors) if params["password"] != app.config["ADMIN_PASSWORD"]: abort(401, "Invalid password") token = create_access_token(identity={"scope": "*", "restricted": False}) return {"token": token}, 200
def check_no_new_branches(objs): """ Checks if a list of 11 comparisons on different characters has no coverage event in between. If this happens the generator is stuck in a loop which is not able to proceed usefully. :param objs: """ seen_indexes = set() for obj in objs: if Utils.is_real_input_comparison(obj, Utils.max_index - 1): seen_indexes.add(tuple(obj["index"])) if len(seen_indexes) >= 10: return True else: seen_indexes = set() return False