def parse_schedule(request_params): start_time = None schedule = None schedule_type = request_params.get("schedule", None) if schedule_type: if schedule_type == "run_at": start_time = HXAPI.dt_from_str(request_params["run_at_value"]) elif schedule_type == "run_interval": schedule = {} interval_value = int(request_params["interval_value"]) interval_unit = request_params["interval_unit"] if interval_unit == "second": schedule["seconds"] = interval_value elif interval_unit == "minute": schedule["minutes"] = interval_value elif interval_unit == "hour": schedule["hours"] = interval_value elif interval_unit == "day": schedule["days"] = interval_value elif interval_unit == "week": schedule["weeks"] = interval_value elif interval_unit == "month": schedule["months"] = interval_value if request_params["interval_start"] == "interval_start_at": start_time = HXAPI.dt_from_str( request_params["interval_start_value"]) return (start_time, schedule)
def oiocCreate(self, iocname, ioc, username): with self._lock: return self._db.table("openioc").insert( { "ioc_id": str(secure_uuid4()), "iocname": str(iocname), "username": str(username), "ioc": str(ioc), "create_timestamp": HXAPI.dt_to_str(datetime.datetime.utcnow()), "update_timestamp": HXAPI.dt_to_str(datetime.datetime.utcnow()), } )
def taskProfileAdd(self, name, actor, params): with self._lock: return self._db.table("taskprofiles").insert( { "taskprofile_id": str(secure_uuid4()), "name": str(name), "actor": str(actor), "params": params, "create_timestamp": HXAPI.dt_to_str(datetime.datetime.utcnow()), "update_timestamp": HXAPI.dt_to_str(datetime.datetime.utcnow()), } )
def scriptCreate(self, scriptname, script, username): with self._lock: return self._db.table("scripts").insert( { "script_id": str(secure_uuid4()), "scriptname": str(scriptname), "username": str(username), "script": str(script), "create_timestamp": HXAPI.dt_to_str(datetime.datetime.utcnow()), "update_timestamp": HXAPI.dt_to_str(datetime.datetime.utcnow()), } )
def oiocCreate(self, iocname, ioc, username): return self._db_openioc.insert_one({ "ioc_id": str(secure_uuid4()), "iocname": str(iocname), "username": str(username), "ioc": str(ioc), "create_timestamp": HXAPI.dt_to_str(datetime.datetime.utcnow()), "update_timestamp": HXAPI.dt_to_str(datetime.datetime.utcnow()), })
def scriptCreate(self, scriptname, script, username): return self._db_scripts.insert_one({ "script_id": str(secure_uuid4()), "scriptname": str(scriptname), "username": str(username), "script": str(script), "create_timestamp": HXAPI.dt_to_str(datetime.datetime.utcnow()), "update_timestamp": HXAPI.dt_to_str(datetime.datetime.utcnow()), })
def multiFileCreate( self, username, profile_id, display_name=None, file_listing_id=None, api_mode=False, ): with self._lock: ts = HXAPI.dt_to_str(datetime.datetime.utcnow()) r = None try: return self._db.table("multi_file").insert( { "display_name": display_name or "Unnamed File Request", "username": username, "profile_id": profile_id, "files": [], "stopped": False, "api_mode": api_mode, "create_timestamp": ts, "update_timestamp": ts, "file_listing_id": file_listing_id, } ) except: # TODO: Not sure if the value returns that we'd ever see an exception if r: self._db.table("multi_file").remove(doc_ids=[r]) raise return None
def transform(element): if dict_key in element[dict_name]: del element[dict_name][dict_key] if update_timestamp and "update_timestamp" in element: element["update_timestamp"] = HXAPI.dt_to_str( datetime.datetime.utcnow() )
def ruleGet(self, rule_id): with self._lock: r = self._db.table("rules").get((tinydb.Query()["id"] == rule_id)) if r: return HXAPI.b64(r["rule"], decode=True, decode_string=True) else: return False
def sessionCreate(self, session_id): return self._db_session.insert_one({ "session_id": session_id, "session_data": {}, "update_timestamp": HXAPI.dt_to_str(datetime.datetime.utcnow()), })
def fileListingStop(self, file_listing_id): with self._lock: return self._db.table("file_listing").update( { "stopped": True, "update_timestamp": HXAPI.dt_to_str(datetime.datetime.utcnow()), }, doc_ids=[int(file_listing_id)], )
def sessionCreate(self, session_id): with self._lock: return self._db.table("session").insert( { "session_id": session_id, "session_data": {}, "update_timestamp": HXAPI.dt_to_str(datetime.datetime.utcnow()), } )
def transform(element): if type(value) is list: element[list_name].extend(value) else: element[list_name].append(value) if update_timestamp and "update_timestamp" in element: element["update_timestamp"] = HXAPI.dt_to_str( datetime.datetime.utcnow() )
def transform(element): for i in element[list_name]: if i[query_key] == query_value: i[k] = v break if update_timestamp and "update_timestamp" in element: element["update_timestamp"] = HXAPI.dt_to_str( datetime.datetime.utcnow() )
def multiFileStop(self, multi_file_id): with self._lock: return self._db.table("multi_file").update( { "stopped": True, "update_timestamp": HXAPI.dt_to_str(datetime.datetime.utcnow()), }, doc_ids=[int(multi_file_id)], )
def stackJobStop(self, stack_job_eid): with self._lock: return self._db.table("stacking").update( { "stopped": True, "update_timestamp": HXAPI.dt_to_str(datetime.datetime.utcnow()), }, doc_ids=[int(stack_job_eid)], )
def sessionUpdate(self, session_id, session_data): with self._lock: return self._db.table("session").update( { "session_data": session_data, "update_timestamp": HXAPI.dt_to_str(datetime.datetime.utcnow()), }, (tinydb.Query()["session_id"] == session_id), )
def sessionUpdate(self, session_id, session_data): return self._db_session.replace_one( {"session_id": session_id}, { "session_id": session_id, "session_data": dict(session_data), "update_timestamp": HXAPI.dt_to_str( datetime.datetime.utcnow()), }, )
def stackJobUpdateGroupBy(self, profile_id, bulk_download_eid, last_groupby): with self._lock: return self._db.table("stacking").update( { "last_groupby": last_groupby, "update_timestamp": HXAPI.dt_to_str(datetime.datetime.utcnow()), }, (tinydb.Query()["profile_id"] == profile_id) & (tinydb.Query()["bulk_download_eid"] == int(bulk_download_eid)), )
def fileListingStop(self, file_listing_id): return self._db_file_listing.update_one( {"_id": ObjectId(file_listing_id)}, { "$set": { "stopped": True, "update_timestamp": HXAPI.dt_to_str(datetime.datetime.utcnow()), } }, )
def stackJobStop(self, stack_job_eid): return self._db_stacking.update_one( {"_id": ObjectId(stack_job_eid)}, { "$set": { "stopped": True, "update_timestamp": HXAPI.dt_to_str(datetime.datetime.utcnow()), } }, )
def transform(element): if not dict_key in element[dict_name]: element[dict_name][dict_key] = dict_values else: if type(dict_values) is dict: element[dict_name][dict_key].update(dict_values) else: element[dict_name][dict_key] = dict_values if update_timestamp and "update_timestamp" in element: element["update_timestamp"] = HXAPI.dt_to_str( datetime.datetime.utcnow() )
def crypt_aes(key, iv, data, decrypt=False, base64_coding=True): cipher = AES.new(key, AES.MODE_OFB, iv) if decrypt: if base64_coding: data = HXAPI.b64(data, True) data = cipher.decrypt(data).decode("utf-8") # Implement PKCS7 de-padding pad_length = ord(data[-1:]) if 1 <= pad_length <= 15 and all(c == chr(pad_length) for c in data[-pad_length:]): data = data[:len(data) - pad_length:] else: # Implement PKCS7 padding pad_length = 16 - (len(data) % 16) if pad_length < 16: data += chr(pad_length) * pad_length data = data.encode("utf-8") data = cipher.encrypt(data) if base64_coding: data = HXAPI.b64(data) return data
def _time_replace(m): if m: now_time = datetime.datetime.utcnow() r = None if m.group(1).lower() == "now": r = now_time elif m.group(3).lower() == "m": r = now_time - datetime.timedelta(minutes=int(m.group(2))) elif m.group(3).lower() == "h": r = now_time - datetime.timedelta(hours=int(m.group(2))) return HXAPI.hx_strftime(r) return None
def multiFileStop(self, multi_file_id): return self.mongoStripKeys( self._db_multi_file.update_one( {"_id": ObjectId(multi_file_id)}, { "$set": { "stopped": True, "update_timestamp": HXAPI.dt_to_str(datetime.datetime.utcnow()), } }, ))
def is_session_valid(*args, **kwargs): ret = redirect(url_for("login", redirect_uri=request.full_path)) if session and "ht_user" in session and "ht_api_object" in session: o = HXAPI.deserialize(session["ht_api_object"]) h = hash(o) if o.restIsSessionValid(): kwargs["hx_api_object"] = o ret = f(*args, **kwargs) session["ht_api_object"] = o.serialize() return ret else: logger.warn( "The HX API token for the current session has expired, redirecting to the login page." ) return ret
def stackJobCreate(self, profile_id, bulk_download_eid, stack_type): ts = HXAPI.dt_to_str(datetime.datetime.utcnow()) r = self._db_stacking.insert_one({ "profile_id": profile_id, "bulk_download_eid": bulk_download_eid, "stopped": False, "stack_type": stack_type, "hosts": [], "results": [], "last_index": None, "last_groupby": [], "create_timestamp": ts, "update_timestamp": ts, }) return r.inserted_id
def alertAddAnnotation( self, profile_id, hx_alert_id, annotation, state, create_user ): with self._lock: return self._db.table("alert").update( self._db_append_to_list( "annotations", { "annotation": annotation, "state": int(state), "create_user": create_user, "create_timestamp": HXAPI.dt_to_str(datetime.datetime.utcnow()), }, ), (tinydb.Query()["profile_id"] == profile_id) & (tinydb.Query()["hx_alert_id"] == int(hx_alert_id)), )
def bulkDownloadCreate(self, profile_id, hostset_name=None, hostset_id=None, task_profile=None): r = None ts = HXAPI.dt_to_str(datetime.datetime.utcnow()) r = self._db_bulk_download.insert_one({ "profile_id": profile_id, "hostset_id": int(hostset_id), "hostset_name": hostset_name, "hosts": {}, "task_profile": task_profile, "stopped": False, "complete": False, "create_timestamp": ts, "update_timestamp": ts, }) return r.inserted_id
def multiFileCreate( self, username, profile_id, display_name=None, file_listing_id=None, api_mode=False, ): ts = HXAPI.dt_to_str(datetime.datetime.utcnow()) r = self._db_multi_file.insert_one({ "display_name": display_name or "Unnamed File Request", "username": username, "profile_id": profile_id, "files": [], "stopped": False, "api_mode": api_mode, "create_timestamp": ts, "update_timestamp": ts, "file_listing_id": file_listing_id, }) return r.inserted_id