def apod(): url = current_app.config['APOD_URL'] params = {'api_key': get_jwt(), **get_json(PayloadSchema())} apod_request = Request(url, params=params) response = apod_request.get().json() if 'thumbnail_url' in response: image_url = response['thumbnail_url'] else: image_url = (response['hdurl'] if bool(params['hd']) else response['url']) content = requests.get(image_url).content path = get_media_path() file_name = get_file_name(params['date']) save_image(content, path, file_name) return jsonify({ 'status': 'Success! Image saved.', 'path': os.path.join(path, file_name), })
def deliberate_observables(): relay_input = get_json(ObservableSchema(many=True)) observables = group_observables(relay_input) if not observables: return jsonify_data({}) start_time = datetime.utcnow() token = get_jwt() g.verdicts = [] for observable in observables: output = get_abuse_ipdb_outputs(observable, token) if output: g.verdicts.append(extract_verdicts(output, start_time)) relay_output = {} if g.verdicts: relay_output['verdicts'] = format_docs(g.verdicts) return jsonify_data(relay_output)
def refer_observables(): relay_input = get_json(ObservableSchema(many=True)) observables = group_observables(relay_input) if not observables: return jsonify_data({}) api_key = get_jwt() client = URLScanClient( base_url=current_app.config['URL_SCAN_API_URL'], api_key=api_key, user_agent=current_app.config['USER_AGENT'], observable_types=current_app.config['URL_SCAN_OBSERVABLE_TYPES']) g.references = [] for observable in observables: output = client.get_search_data(observable) if output and output['results']: g.references.extend(extract_references(observable)) return jsonify_data(g.references)
def observe_observables(): relay_input = get_json(ObservableSchema(many=True)) observables = group_observables(relay_input) if not observables: return jsonify_data({}) time_now = datetime.utcnow() # get dict with actual abuseipdb categories with titles and descriptions categories = get_categories() token = get_jwt() g.verdicts = [] g.judgements = [] g.indicators = [] g.sightings = [] g.relationships = [] for observable in observables: output = get_abuse_ipdb_outputs(observable, token) if output: g.verdicts.append(extract_verdicts(output, time_now)) output['categories_ids'] = [] output['relations'] = {} reports = output['data']['reports'] reports.sort(key=lambda x: x['reportedAt'], reverse=True) if len(reports) >= current_app.config['CTR_ENTITIES_LIMIT']: reports = reports[:current_app.config['CTR_ENTITIES_LIMIT']] for report in reports: g.judgements.append( extract_judgement(report, output, categories)) g.indicators.extend( extract_indicators(report, output, categories)) g.sightings.append(extract_sightings(report, output)) g.relationships.extend(extract_relationships(output)) relay_output = {} if g.judgements: relay_output['judgements'] = format_docs(g.judgements) if g.verdicts: relay_output['verdicts'] = format_docs(g.verdicts) if g.sightings: relay_output['sightings'] = format_docs(g.sightings) if g.indicators: relay_output['indicators'] = format_docs(g.indicators) if g.relationships: relay_output['relationships'] = format_docs(g.relationships) return jsonify_data(relay_output)
def deliberate_observables(): api_key = get_jwt() # Let's get the third party API Key data = { } # Let's create a data directory to be sent back to Threat Response g.verdicts = [ ] # Let's create a list into which we will store valid verdicts data results for every observables g.judgements = [ ] # Let's create a list into which we will store valid judgements data results for every observables relay_input = get_json(ObservableSchema(many=True)) observables = group_observables(relay_input) if not observables: return jsonify_data({}) observables = build_input_api(observables) for observable in observables: o_value = observable['value'] o_type = observable['type'].lower() # print single observable for which to send a reputation query to the third party print(green(o_value, bold=True)) disposition = call_api(o_value, api_key) # query the third party for the observable print(cyan(disposition, bold=True)) # translate the third party returned value to Threat Response Expected value disposition_tuple = get_disposition(disposition) print(cyan(disposition_tuple, bold=True)) # disposition_tuple is not empty then continue if not disposition_tuple: continue # disposition_tuple then get the current date and calculate end date as an end of life date for judgment and verdicts # We need these information as mandatory information to return to Threat Response start_time = datetime.utcnow() end_time = start_time + timedelta(weeks=1) valid_time = { 'start_time': start_time.isoformat() + 'Z', 'end_time': end_time.isoformat() + 'Z', } # Let's append a new verdict item into the verdicts list with the minimum of information expected by the CTIM format g.verdicts.append( get_verdict(o_value, o_type, disposition_tuple, valid_time)) g.judgements.append( get_judgement(o_value, o_type, disposition_tuple, valid_time)) # The g.verdicts list content all verdicts for every requested observable. Let's add this list into the data dictionnary and do some formatting stuffs if g.verdicts: data['verdicts'] = format_docs(g.verdicts) if g.judgements: data['judgements'] = format_docs(g.judgements) # Let's get ready to send back a valid CTIM JSON result to the original Threat Response request . Let's put it into the result dictionnary result = {'data': data} print( green(f"JSON result to be sent to Threat Response : \n{result}", bold=True)) return jsonify(result)
def observe_observables(): relay_input = get_json(ObservableSchema(many=True)) observables = group_observables(relay_input) if not observables: return jsonify_data({}) token = get_jwt() g.sightings = [] g.indicators = [] g.errors = [] for observable in observables: output, spycloud_catalogs = validate_spycloud_outputs( observable, token) if output: breaches = output['results'] breaches.sort( key=lambda x: x['spycloud_publish_date'], reverse=True) unique_catalog_id_set = set() if len(breaches) >= current_app.config['CTR_ENTITIES_LIMIT']: breaches = breaches[:current_app.config['CTR_ENTITIES_LIMIT']] for breach in breaches: g.sightings.append( extract_sightings(breach, output, spycloud_catalogs)) catalog_id = breach['source_id'] if catalog_id not in unique_catalog_id_set: if spycloud_catalogs[catalog_id]: g.indicators.append( extract_indicators(spycloud_catalogs[catalog_id])) unique_catalog_id_set.add(catalog_id) else: error_message = current_app.config[ 'CATALOG_ERROR_TEMPLATE'].format( catalog_id=catalog_id) g.errors.append(get_catalog_error(error_message)) relay_output = {} if g.sightings: relay_output['sightings'] = format_docs(g.sightings) if g.indicators: relay_output['indicators'] = format_docs(g.indicators) return jsonify_data(relay_output, g.errors)
def observe_observables(): _ = get_jwt() relay_input = get_json(ObservableSchema(many=True)) observables = group_observables(relay_input) if not observables: return jsonify_data({}) g.bundle = Bundle() for observable in observables: g.bundle.merge(observe(observable)) return jsonify_data(g.bundle.json())
def poll_history(url,save): #resp = requests.get(url=url) params = get_query_parameters(url) json = get_json(url,params) #print json if ('return' in json): data = json.get('return') else: data = json result = [] #print data for trade in data: if save: mtgox = MtgoxTrade() mtgox.amount = trade['amount'] mtgox.time = trade['date'] mtgox.price = trade['price'] mtgox.type = trade['trade_type'] mtgox.tid = trade['tid'] #print mtgox.tid mtgox.save() else: js = dict() js['time']= trade['date'] js['price']= trade['price'] js['amount']= trade['amount'] js['type']= trade['trade_type'] js['tid']= trade['tid'] result.append(js) #print js #print result length = len(data)-1 if (length >=1): last_data = data[length] else: return result #print last_data last_tid = last_data.get('tid') #print last_tid since = params.get('since') if (since is not None and long(last_tid)<long(since)): print 'Finish polling mgtox history data' return url = 'https://data.mtgox.com/api/1/BTCusd/trades?since='+last_tid if save: poll_history(url,save) return else: result.extend(poll_history(url,save)) return result
def respond_observables(): relay_input = get_json(ObservableSchema(many=True)) observables = get_scan_observables(relay_input) if not observables: return jsonify_data([]) g.actions = [] for observable in observables: g.actions.append(extract_action(observable)) return jsonify_data(g.actions)
def respond_trigger(): relay_input = get_json(ActionFormParamsSchema()) api_key = get_jwt() client = URLScanClient( base_url=current_app.config['URL_SCAN_API_URL'], api_key=api_key, user_agent=current_app.config['USER_AGENT'], observable_types=current_app.config['URL_SCAN_OBSERVABLE_TYPES'] ) client.make_scan(relay_input['observable_value']) return jsonify_data({'status': 'success'})
def sample_json(request): url = 'http://www.highcharts.com/samples/data/jsonp.php?filename=aapl-ohlcv.json&callback=?' from api.utils import get_json params = get_query_parameters(url) js = get_json(url,params) return HttpResponse(json.dumps(js), mimetype="application/json")
def tile_data(): _ = get_jwt() _ = get_json(DashboardTileDataSchema()) return jsonify_data({})
def sample_json(request): url = 'http://www.highcharts.com/samples/data/jsonp.php?filename=aapl-ohlcv.json&callback=?' from api.utils import get_json params = get_query_parameters(url) js = get_json(url, params) return HttpResponse(json.dumps(js), mimetype="application/json")
def observe_observables(): relay_input = get_json(ObservableSchema(many=True)) observables = group_observables(relay_input) if not observables: return jsonify_data({}) api_key = get_jwt() client = URLScanClient( base_url=current_app.config['URL_SCAN_API_URL'], api_key=api_key, user_agent=current_app.config['USER_AGENT'], observable_types=current_app.config['URL_SCAN_OBSERVABLE_TYPES']) g.sightings = [] g.judgements = [] g.indicators = [] g.relationships = [] for observable in observables: output = client.get_search_data(observable) if output: output['relationships'] = {} search_results = output['results'] search_results.sort(key=lambda x: x['task']['time'], reverse=True) if len(search_results) > current_app.config['CTR_ENTITIES_LIMIT']: search_results = \ search_results[:current_app.config['CTR_ENTITIES_LIMIT']] workers_number = min((os.cpu_count() or 1) * 5, len(search_results) or 1) with ThreadPoolExecutor(max_workers=workers_number) as executor: result_outputs = \ executor.map(client.get_result_data, search_results) for search_result in search_results: g.sightings.append(extract_sighting(output, search_result)) result_output = next(result_outputs) if result_output and \ result_output['verdicts']['overall']['malicious']: g.judgements.append( extract_judgement(output, result_output)) for category in \ result_output['verdicts']['overall']['categories']: if not output['relationships'].get(category): g.indicators.append( extract_indicator(result_output, category)) output['relationships'][category] = { 'sighting_ids': [g.sightings[-1]['id']], 'indicator_id': g.indicators[-1]['id'] } else: output['relationships'][category][ 'sighting_ids'].append(g.sightings[-1]['id']) g.relationships.extend( extract_relationships(output['relationships'])) relay_output = {} if g.sightings: relay_output['sightings'] = format_docs(g.sightings) if g.judgements: relay_output['judgements'] = format_docs(g.judgements) if g.indicators: relay_output['indicators'] = format_docs(g.indicators) if g.relationships: relay_output['relationships'] = format_docs(g.relationships) return jsonify_data(relay_output)