def add_file(): #tags = request.forms.get('name') upload = request.files.get('file') form_date = request.forms.get('file_date') try: # validate process_date(form_date) except ValueError: #response.status = 422 #status can't be added because angular will not show the message. return jsonize({'message': 'Invalid date format'}) logging.debug("add_file(). date=" + str(form_date)) if form_date is None: form_date = datetime.datetime.now() name = upload.filename data_bin = upload.file.read() file_id = hashlib.sha1(data_bin).hexdigest() logging.debug("add_file(): file_id=" + str(file_id)) status = upload_file(data_bin) process_file(file_id) #ToDo: add a redis job update_date(file_id, form_date) if (status == "ok"): return jsonize({'message': 'Added with ' + str(file_id)}) elif (status == "already exists"): return jsonize({'message': 'Already exists ' + str(file_id)}) elif (status == "virustotal"): return jsonize({'message': 'Already exists ' + str(file_id)}) else: return jsonize({'message': 'Error'})
def add_file(): # tags = request.forms.get('name') upload = request.files.get('file') form_date = request.forms.get('file_date') try: # validate process_date(form_date) except ValueError: # response.status = 422 #status can't be added because angular will not # show the message. return jsonize({'message': 'Invalid date format'}) logging.debug("add_file(). date=" + str(form_date)) if form_date is None: form_date = datetime.datetime.now() name = upload.filename data_bin = upload.file.read() file_id = hashlib.sha1(data_bin).hexdigest() logging.debug("add_file(): file_id=" + str(file_id)) status = upload_file(data_bin) process_file(file_id) # ToDo: add a redis job update_date(file_id, form_date) if(status == "ok"): return jsonize({'message': 'Added with ' + str(file_id)}) elif(status == "already exists"): return jsonize({'message': 'Already exists ' + str(file_id)}) elif(status == "virustotal"): return jsonize({'message': 'Already exists ' + str(file_id)}) else: return jsonize({'message': 'Error'})
def parse_vt_response(json_response): if json_response is None: logging.exception("parse_vt_response recieved None") raise ValueError("json_response is None") response_code = json_response.get("response_code") if(response_code != 1): return None positives = json_response.get("positives") total = json_response.get("total") if positives is None and json_response.get('scans') is not None: positives = total_positive(json_response.get('scans')) total = len(json_response.get('scans')) # scans uses antivirus as json key, and # imports uses dll's as keys. So they can't be saved # to mongo or seached easily. So we convert the dictionary (of scans) # and the array (of imports) into an array # dictionaries, where the key is now in 'name'. if json_response.get('scans') is not None: json_response["scans"] = key_dict_clean(json_response["scans"]) if json_response.get('additional_info') is not None and json_response.get('additional_info').get('imports') is not None: json_response["additional_info"]["imports"] = key_list_clean( json_response["additional_info"]["imports"]) if json_response.get('additional_info') is not None and json_response.get('additional_info').get('pe-resource-types') is not None: json_response["additional_info"]["pe-resource-types"] = key_list_clean( json_response["additional_info"]["pe-resource-types"]) json_response = rec_key_replace(json_response) for av_scan in json_response["scans"]: if av_scan["result"] is not None: av_scan["result"] = av_scan["result"].strip() ret = json_response ret["positives"] = positives ret["total"] = total # Trying to get the best date date_registers = ['first_seen', 'additional_info.first_seen_itw', 'scan_date'] for register in date_registers: vt_date = read_from_dictionary(register, json_response) if vt_date is not None: break try: # The "date" value is use to speed up time queries for av signatures ret["date"] = process_date(vt_date) except ValueError: ret["date"] = None logging.exception( "virusTotalApi->parse_vt_response: invalid date recieved by VT: " + str(vt_date)) return ret
def save_first_seen(self, file_id, vt_date): if vt_date is None: return None try: date = process_date(vt_date) except ValueError: print "MetaController()->save_first_seen: invalid date recieved by VT:" + str( vt_date) return old_date = self.get_first_date(file_id) if old_date is None or date < old_date: self.write(file_id, {"date": date})
def fix_date(r): str_date = r.get('date') if type(str_date) is not unicode: print("datenotunicode," + str(r.get('_id')) + "," + str(r.get('file_id')) + "," + str(r.get('date')) + "," + str(type(r.get('date')))) sys.stdout.flush() return False else: try: date = process_date(str_date) except Exception, e: print "failed to convert date for " + str(str_date) + " in " + str(r.get('_id')) sys.stdout.flush() return False
def fix_date(r): str_date = r.get('date', r.get('first_seen')) if type(str_date) is not unicode: print("datenotunicode," + str(r.get('_id')) + "," + str(r.get('sha1')) + "," + str(r.get('date')) + "," + str(type(r.get('date')))) sys.stdout.flush() return False else: try: date = process_date(str_date) except Exception, e: print("failed to convert date for " + str(str_date) + " in " + str(r.get('_id'))) sys.stdout.flush() return False
def parse_vt_response(json_response): if json_response is None: logging.exception("parse_vt_response recieved None") raise ValueError("json_response is None") response_code=json_response.get("response_code") if(response_code!=1): return None positives=json_response.get("positives") total=json_response.get("total") if positives is None and json_response.get('scans') is not None: positives = total_positive(json_response.get('scans')) total = len(json_response.get('scans')) # scans uses antivirus as json key, and # imports uses dll's as keys. So they can't be saved # to mongo or seached easily. So we convert the dictionary (of scans) # and the array (of imports) into an array # dictionaries, where the key is now in 'name'. if json_response.get('scans') is not None: json_response["scans"]=key_dict_clean(json_response["scans"]) if json_response.get('additional_info') is not None and json_response.get('additional_info').get('imports') is not None: json_response["additional_info"]["imports"]=key_list_clean(json_response["additional_info"]["imports"]) if json_response.get('additional_info') is not None and json_response.get('additional_info').get('pe-resource-types') is not None: json_response["additional_info"]["pe-resource-types"] = key_list_clean(json_response["additional_info"]["pe-resource-types"]) json_response = rec_key_replace(json_response) ret = json_response ret["positives"]=positives ret["total"]=total #Trying to get the best date date_registers = ['first_seen','additional_info.first_seen_itw','scan_date'] for register in date_registers: vt_date = read_from_dictionary(register,json_response) if vt_date != None: break try: #The "date" value is use to speed up time queries for av signatures ret["date"] = process_date(vt_date) except ValueError: ret["date"] = None logging.exception("virusTotalApi->parse_vt_response: invalid date recieved by VT: "+str(vt_date)) return ret
def get_tasks_on_queue(queue_name): q = Queue(queue_name, connection=Redis(host=envget('redis.host'))) jobs = q.jobs tasks = [] for job in jobs: task = {"date_enqueued": str( process_date(job.to_dict().get('enqueued_at')))} ''' to_dict() returns something like this: {u'origin': u'task_no_vt', u'status': u'queued', u'description': u"Api.task.generic_task('N7UFZ56FQDITJ34F40TZB50XAWVNW575QGIL4YEC')", u'created_at': '2017-03-03T20:14:47Z', u'enqueued_at': '2017-03-03T20:14:47Z', u'timeout': 31536000, u'data': '\x80\x02(X\x15\x00\x00\x00Api.task.generic_taskq\x01NU(N7UFZ56FQDITJ34F40TZB50XAWVNW575QGIL4YECq\x02\x85q\x03}q\x04tq\x05.'} ''' task_id = re.search('[A-Z0-9]{40}', job.to_dict().get('description')) if task_id is None: continue task['task_id'] = task_id.group(0) task['hashes'] = count_valid_hashes_in_task(task['task_id']) tasks.append(task) return tasks
def get_tasks_on_queue(queue_name): q = Queue(queue_name, connection=Redis(host=envget('redis.host'))) jobs = q.jobs tasks = [] for job in jobs: task = { "date_enqueued": str(process_date(job.to_dict().get('enqueued_at'))) } ''' to_dict() returns something like this: {u'origin': u'task_no_vt', u'status': u'queued', u'description': u"Api.task.generic_task('N7UFZ56FQDITJ34F40TZB50XAWVNW575QGIL4YEC')", u'created_at': '2017-03-03T20:14:47Z', u'enqueued_at': '2017-03-03T20:14:47Z', u'timeout': 31536000, u'data': '\x80\x02(X\x15\x00\x00\x00Api.task.generic_taskq\x01NU(N7UFZ56FQDITJ34F40TZB50XAWVNW575QGIL4YECq\x02\x85q\x03}q\x04tq\x05.'} ''' task_id = re.search('[A-Z0-9]{40}', job.to_dict().get('description')) if task_id is None: continue task['task_id'] = task_id.group(0) task['hashes'] = count_valid_hashes_in_task(task['task_id']) tasks.append(task) return tasks