def initialize_edl_context(params: dict): global EDL_ON_DEMAND_CACHE_PATH limit = try_parse_integer(params.get('edl_size'), EDL_LIMIT_ERR_MSG) query = params.get('indicators_query', '') collapse_ips = params.get('collapse_ips', DONT_COLLAPSE) url_port_stripping = params.get('url_port_stripping', False) url_protocol_stripping = params.get('url_protocol_stripping', False) drop_invalids = params.get('drop_invalids', False) add_comment_if_empty = params.get('add_comment_if_empty', True) mwg_type = params.get('mwg_type', "string") category_default = params.get('category_default', 'bc_category') category_attribute = params.get('category_attribute', '') fields_to_present = params.get('fields_filter', '') out_format = params.get('format', FORMAT_TEXT) csv_text = argToBoolean(params.get('csv_text', False)) url_truncate = params.get('url_truncate', False) if params.get('use_legacy_query'): # workaround for "msgpack: invalid code" error fields_to_present = 'use_legacy_query' offset = 0 request_args = RequestArguments( query, out_format, limit, offset, url_port_stripping, drop_invalids, collapse_ips, add_comment_if_empty, mwg_type, category_default, category_attribute, fields_to_present, csv_text, url_protocol_stripping, url_truncate) EDL_ON_DEMAND_CACHE_PATH = demisto.uniqueFile() ctx = request_args.to_context_json() ctx[EDL_ON_DEMAND_KEY] = True set_integration_context(ctx)
def start_and_return_bigquery_client(google_service_creds_json_string): cur_directory_path = os.getcwd() creds_file_name = '{0}.json'.format(demisto.uniqueFile()) path_to_save_creds_file = os.path.join(cur_directory_path, creds_file_name) with open(path_to_save_creds_file, "w") as creds_file: json.dump(json.loads(google_service_creds_json_string), creds_file) os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = path_to_save_creds_file creds_file.close() bigquery_client = bigquery.Client() return bigquery_client
def _get_client(self): # turning off the proxy as the SDK doesn't support it. handle_proxy() cur_directory_path = os.getcwd() credentials_file_name = demisto.uniqueFile() + '.json' credentials_file_path = os.path.join(cur_directory_path, credentials_file_name) with open(credentials_file_path, 'w') as creds_file: json.dump(self.service_account, creds_file) return translate_v3.TranslationServiceClient.from_service_account_json(credentials_file_path)
def main(): args = demisto.args() entry_id = args.get('entry_id', '') file_path = demisto.getFilePath(entry_id).get('path') wpa_password = args.get('wpa_password', '') rsa_decrypt_key_entry_id = args.get('rsa_decrypt_key_entry_id', '') rsa_key_file_path = None if rsa_decrypt_key_entry_id: rsa_key_file_path = demisto.getFilePath(rsa_decrypt_key_entry_id).get( 'path') conversation_number_to_display = int(args.get('convs_to_display', '15')) extracted_protocols = argToList(args.get('protocol_output', '')) if 'All' in extracted_protocols: extracted_protocols = ALL_SUPPORTED_PROTOCOLS is_flows = True is_reg_extract = args.get('extract_strings', 'False') == 'True' pcap_filter = args.get('pcap_filter', '') homemade_regex = args.get('custom_regex', '') # 'Layer (.+):' pcap_filter_new_file_path = '' pcap_filter_new_file_name = args.get('filtered_file_name', '') unique_ips = args.get('extract_ips', 'False') == 'True' if pcap_filter_new_file_name: temp = demisto.uniqueFile() pcap_filter_new_file_path = demisto.investigation()['id'] + '_' + temp try: pcap = PCAP(is_reg_extract, extracted_protocols, homemade_regex, unique_ips, entry_id) pcap.mine(file_path, wpa_password, rsa_key_file_path, is_flows, is_reg_extract, pcap_filter, pcap_filter_new_file_path) hr, ec, raw = pcap.get_outputs(conversation_number_to_display, is_flows, is_reg_extract) return_outputs(hr, ec, raw) except Exception as e: return_error(f'Unexpected error: {str(e)}', error=traceback.format_exc()) if pcap_filter_new_file_name: demisto.results({ 'Contents': '', 'ContentsFormat': formats['text'], 'Type': 3, 'File': pcap_filter_new_file_name, 'FileID': temp })
def initialize_edl_context(params: dict): global EDL_ON_DEMAND_CACHE_PATH limit = try_parse_integer(params.get('edl_size'), EDL_LIMIT_ERR_MSG) query = params.get('indicators_query', '') collapse_ips = params.get('collapse_ips', DONT_COLLAPSE) url_port_stripping = params.get('url_port_stripping', False) drop_invalids = params.get('drop_invalids', False) add_comment_if_empty = params.get('add_comment_if_empty', True) offset = 0 request_args = RequestArguments(query, limit, offset, url_port_stripping, drop_invalids, collapse_ips, add_comment_if_empty) EDL_ON_DEMAND_CACHE_PATH = demisto.uniqueFile() ctx = request_args.to_context_json() ctx[EDL_ON_DEMAND_KEY] = True set_integration_context(ctx)
def _init_kms_client(self): """Creates the Python API client for Google Cloud KMS using service account credentials.""" dictionary_test = json.loads(str(self.service_account)) if not isinstance(dictionary_test, dict): raise Exception( "Service Account json is not formatted well. You need to change the json file." ) credentials_file_name = demisto.uniqueFile() + '.json' credentials_file_path = os.path.join(os.getcwd(), credentials_file_name) with open(credentials_file_path, 'w') as creds_file: json_object = json.loads(str(self.service_account)) json.dump(json_object, creds_file) return kms.KeyManagementServiceClient.from_service_account_json( credentials_file_path)
def edl_dump_internal_list_command(): """ Dumps an instance context list to either a file or incident context """ destination = demisto.args().get('destination') list_name = demisto.args().get('list_name') dict_of_lists = demisto.getIntegrationContext() list_data = dict_of_lists.get(list_name, None) if not list_data: demisto.results({ 'Type': 11, 'Contents': 'List was not found in instance context or has no data.', 'ContentsFormat': formats['text'] }) sys.exit(0) if destination == 'file': # dump list as file internal_file_path = demisto.uniqueFile() try: with open(internal_file_path, 'w') as f: f.write("\n".join(list_data)) file_type = entryTypes['entryInfoFile'] with open(internal_file_path, 'rb') as file: file_entry = fileResult(internal_file_path, file.read(), file_type) demisto.results(file_entry) finally: shutil.rmtree(internal_file_path, ignore_errors=True) else: # update incident context md = tableToMarkdown('List items:', list_data, headers=[list_name]) ec = { 'ListName': list_name, 'ListItems': list_data } demisto.results({ 'Type': entryTypes['note'], 'Contents': md, 'ContentsFormat': formats['markdown'], 'EntryContext': { "PANOSEDL(val.ListName == obj.ListName)": ec } })
def main(): entry_id = demisto.args()["entry_id"] out_format = demisto.args().get('format', 'pdf') all_files = demisto.args().get('all_files', 'no') == 'yes' # URLS try: result = demisto.getFilePath(entry_id) if not result: return_error("Couldn't find entry id: {}".format(entry_id)) demisto.debug('going to convert: {}'.format(result)) file_path = result['path'] file_path_name_only = os.path.splitext(os.path.basename(file_path))[0] file_name = result.get('name') if file_name: # remove the extension file_name = os.path.splitext(file_name)[0] with tempfile.TemporaryDirectory() as outdir: files = convert_file(file_path, out_format, all_files, outdir) if not files: return_error( 'No file result returned for convert format: {}'.format( out_format)) return for f in files: temp = demisto.uniqueFile() shutil.copy(f, demisto.investigation()['id'] + '_' + temp) name = os.path.basename(f) if file_name: name = name.replace(file_path_name_only, file_name) demisto.results({ 'Contents': '', 'ContentsFormat': formats['text'], 'Type': entryTypes['file'], 'File': name, 'FileID': temp }) except subprocess.CalledProcessError as e: return_error("Failed converting file. Output: {}. Error: {}".format( e.output, e)) except Exception as e: return_error( "Failed converting file. General exception: {}.\n\nTrace:\n{}". format(e, traceback.format_exc()))
def test_get_pcap(mocker: MockerFixture): alert_id = 1 mock_file_result = { 'Contents': f'alert_{alert_id}_sniff.pcap', 'ContentsFormat': 'text', 'Type': EntryType.ENTRY_INFO_FILE, 'File': EntryFormat.TEXT, 'FileID': demisto.uniqueFile() } mocker.patch.object(demisto, 'incident', return_value={'CustomFields': { 'alertid': '1' }}) mocker.patch.object(demisto, 'executeCommand', return_value=mock_file_result) assert get_pcap() == mock_file_result
def blacklist_to_entry(data, saveToContext): if not isinstance(data, list): data = [data] ips = [d.get("ipAddress") for d in data] context = {"Blacklist": ips} temp = demisto.uniqueFile() with open(demisto.investigation()['id'] + '_' + temp, 'wb') as f: wr = csv.writer(f, quoting=csv.QUOTE_ALL) for ip in ips: wr.writerow([ip]) entry = { 'HumanReadable': '', 'Contents': ips, 'ContentsFormat': formats['json'], 'Type': entryTypes['file'], 'File': "Blacklist.csv", 'FileID': temp, 'EntryContext': {'AbuseIPDB': createContext(context if saveToContext else None, removeNull=True)} } return entry
def hash_list_to_file(hash_list): file_path = demisto.uniqueFile() with open(file_path, 'w') as f: f.write("\n".join(hash_list)) return [file_path]
def main(): try: # in order to support compression of the file compression = zipfile.ZIP_DEFLATED except Exception: compression = zipfile.ZIP_STORED try: args = demisto.args() zipName = None password = None fileEntryID = args.get('entryID') if 'zipName' in args: zipName = escape_illegal_characters_in_file_name(demisto.args().get('zipName')) + '.zip' if 'password' in args: password = demisto.args().get('password') if not fileEntryID: raise DemistoException('You must set an entryID when using the zip script') entry_ids = argToList(fileEntryID) file_names = list() for entry_id in entry_ids: res = demisto.executeCommand('getFilePath', {'id': entry_id}) if is_error(res): raise DemistoException( 'Failed to get the file path for entry: ' + entry_id + ' the error message was ' + get_error(res)) filePath = res[0]['Contents']['path'] fileCurrentName = escape_illegal_characters_in_file_name(res[0]['Contents']['name']) if not isfile(filePath): # in case that the user will send a directory raise DemistoException(entry_id + ' is not a file. Please recheck your input.') # Handling duplicate names. if fileCurrentName in file_names: name, ext = os.path.splitext(fileCurrentName) i = 0 while fileCurrentName in file_names: i += 1 fileCurrentName = f'{name} {i}{ext}' # copying the file to current location shutil.copy(filePath, fileCurrentName) file_names.append(fileCurrentName) if not zipName: # Preserving old behaviour. If only one file provided - will use its name .zip # Else will use a uuid. if len(file_names) == 1: fileCurrentName = file_names[0] else: fileCurrentName = demisto.uniqueFile() zipName = fileCurrentName + '.zip' # zipping the file if password: pyminizip.compress_multiple(file_names, ['./'] * len(file_names), zipName, password, 5) else: zf = zipfile.ZipFile(zipName, mode='w') try: for file_name in file_names: zf.write(file_name, compress_type=compression) # testing for file integrity ret = zf.testzip() if ret is not None: raise DemistoException('There was a problem with the zipping, file: ' + ret + ' is corrupted') finally: zf.close() with open(zipName, 'rb') as f: file_data = f.read() demisto.results(fileResult(zipName, file_data)) human_readable = tableToMarkdown( 'Zipped Files', [{'original name': file_names, 'zipped file': zipName}]) context: Dict[str, Any] = { 'ZippedFiles': zipName, 'ZipFile.ZippedFile': zipName } for entry_id in entry_ids: context[f'File(val.EntryID == {entry_id}).zipped'] = True raw_response = {'ZippedFiles': zipName} return_outputs(human_readable, context, raw_response) except Exception as exc: return_error(exc)