def _get_requirement(offset, limit, sort, search, requirement): """ Get the requirements used in the rules :param offset: First item to return. :param limit: Maximum number of items to return. :param sort: Sorts the items. Format: {"fields":["field1","field2"],"order":"asc|desc"}. :param search: Looks for items with the specified string. :param requirement: requirement to get (pci or dgpr) :return: Dictionary: {'items': array of items, 'totalItems': Number of items (without applying the limit)} """ if requirement != 'pci' and requirement != 'gdpr': raise OssecAPIException(1205, requirement) req = list({req for rule in Rule.get_rules(limit=None)['items'] for req in rule.to_dict()[requirement]}) if search: req = search_array(req, search['value'], search['negation']) if sort: req = sort_array(req, order=sort['order']) else: req = sort_array(req) return {'items': cut_array(req, offset, limit), 'totalItems': len(req)}
def join_search(self,cols): #Join trades with cols requested ts = self.ts ts = unstruct(ts[np.where(ts["SIZ"]!=0)[0]]) stk = ts[np.where(ts[:,0]==-1)[0],:] opt = ts[np.where(ts[:,0]!=-1)[0],:] opt = utils.sort_array(opt,(3,2,1,0)) lst = [] cols = ["IDX","STR","TNR"] + cols for l in range(2): I = opt[np.where(opt[:,1]==l)[0],2] K = opt[np.where(opt[:,1]==l)[0],3] st = self.search_raw(I,K,cols,l) leg = np.zeros((st.shape[0],1)) leg[:] = l st = np.hstack([leg,st]) #join leg lst.append(st) st = np.vstack(lst) st = np.vstack([ #join right np.hstack([np.ones((st.shape[0],1)),st]), #Calls np.hstack([np.zeros((st.shape[0],1)),st]) #Puts ]) st = st[utils.intersect2d(st[:,(0,1,2,3)],opt[:,(0,1,2,3)]),:] st = utils.sort_array(st,(3,2,1,0)) size = np.expand_dims(opt[:,-1],axis=0).T opt = np.hstack([size,st]) #join size return opt,stk #STK: RGT,LEG,IDX,STR,SIZ & OPT: SIZ,RGT,LEG,IDX,STR,TNR + cols requested
def build_trd(self): cols = [ "CPX", "PPX", "CDL", "PDL", "GAM", "VEG", "CTH", "PTH", "VOL", "CHS", "PHS", "CEP", "PEP", "VOM", "VAN" ] opt, stk = self.join_search(cols) opt_LEG = opt[:, 2] opt_TNR = opt[:, 5].astype(int) opt_SID = np.where(opt[:, 0] < 0, -1, 1) opt_RGT = opt[:, 1] opt_SIZ = opt[:, 0].astype(int) opt_STR = opt[:, 4] opt_PPX = np.where(opt_RGT == 1, opt[:, 6], opt[:, 7]) * -opt_SIZ opt_DEL = np.where(opt_RGT == 1, opt[:, 8], opt[:, 9]) * opt_SIZ opt_GAM = opt[:, 10] * opt_SIZ opt_VEG = opt[:, 11] * opt_SIZ opt_THT = np.where(opt_RGT == 1, opt[:, 12], opt[:, 13]) * opt_SIZ opt_VOL = opt[:, 14] opt_SPR = np.where(opt_RGT == 1, opt[:, 15], opt[:, 16]) * -abs(opt_SIZ) opt_EEP = np.where(opt_RGT == 1, opt[:, 17], opt[:, 18]) * -opt_SIZ opt_VOM = opt[:, 19] * opt_SIZ opt_VAN = opt[:, 20] * opt_SIZ opt_out = np.array([ opt_LEG, opt_TNR, opt_SID, opt_RGT, opt_SIZ, opt_STR, opt_PPX, opt_DEL, opt_GAM, opt_VEG, opt_THT, opt_VOM, opt_VAN, opt_VOL, opt_SPR, opt_EEP ]) stk_LEG = stk[:, 1] stk_TNR = np.zeros(stk.shape[0]).astype(int) stk_SID = np.where(stk[:, 4] < 0, -1, 1) stk_RGT = stk[:, 0] stk_SIZ = stk[:, 4].astype(int) stk_nan = np.zeros(stk.shape[0]) stk_PPX = np.where(stk_LEG == 1, self.get_raw(0, "SPT", 1)[0], self.get_raw(0, "SPT", 0)[0]) * -stk_SIZ stk_DEL = stk[:, 4].astype(float) stk_out = np.array([ stk_LEG, stk_TNR, stk_SID, stk_RGT, stk_SIZ, stk_nan, stk_PPX, stk_DEL, stk_nan, stk_nan, stk_nan, stk_nan, stk_nan, stk_nan, stk_nan, stk_nan ]) out = np.vstack([opt_out.T, stk_out.T]) out = utils.sort_array(out, (1, 3, 0)) head = [ "LEG", "TNR", "SID", "RGT", "SIZ", "STR", "PPX", "DEL", "GAM", "VEG", "THT", "VOM", "VAN", "VOL", "SPR", "EEP" ] dtyp = [ int, int, int, int, int, float, float, float, float, float, float, float, float, float, float, float ] dtype = np.dtype([(x, y) for x, y in zip(head, dtyp)]) return struct(out, dtype=dtype)
# @author lucasmiranda42 # encoding: utf-8 # module MPIP_Docker_workshop """ Main runfile for the MPIP Docker workshop """ import numpy as np from utils import sort_array, binary_search example_array = np.random.uniform(-10, 10, 10).astype(int) sorted = sort_array(example_array) print("initial array:", example_array) for _ in range(10): x = np.random.uniform(-10, 10, 1).astype(int)[0] print("The randomly generated value {} is{} in the array".format( x, ("" if binary_search(sorted, x) != -1 else " not")))
def get_decoders_files(status=None, path=None, file=None, offset=0, limit=common.database_limit, sort=None, search=None): """ Gets a list of the available decoder files. :param status: Filters by status: enabled, disabled, all. :param path: Filters by path. :param file: Filters by filename. :param offset: First item to return. :param limit: Maximum number of items to return. :param sort: Sorts the items. Format: {"fields":["field1","field2"],"order":"asc|desc"}. :param search: Looks for items with the specified string. :return: Dictionary: {'items': array of items, 'totalItems': Number of items (without applying the limit)} """ status = Decoder.__check_status(status) ruleset_conf = configuration.get_ossec_conf(section='rules') if not ruleset_conf: raise OssecAPIException(1500) tmp_data = [] tags = ['decoder'] exclude_filenames = [] for tag in tags: if tag in ruleset_conf: item_status = Decoder.S_ENABLED if type(ruleset_conf[tag]) is list: items = ruleset_conf[tag] else: items = [ruleset_conf[tag]] for item in items: if '/' in item: item_split = item.split('/') item_name = item_split[-1] item_dir = "{0}/{1}".format(common.ossec_path, "/".join(item_split[:-1])) else: item_name = item item_dir = "{0}/{1}".format(common.ruleset_rules_path, item) tmp_data.append({ 'file': item_name, 'path': item_dir, 'status': item_status }) tag = 'decoder_dir' if tag in ruleset_conf: if type(ruleset_conf[tag]) is list: items = ruleset_conf[tag] else: items = [ruleset_conf[tag]] for item_dir in items: all_decoders = "{0}/{1}/*.xml".format(common.ossec_path, item_dir) for item in glob(all_decoders): item_split = item.split('/') item_name = item_split[-1] item_dir = "/".join(item_split[:-1]) if item_name in exclude_filenames: item_status = Decoder.S_DISABLED else: item_status = Decoder.S_ENABLED tmp_data.append({ 'file': item_name, 'path': item_dir, 'status': item_status }) data = list(tmp_data) for d in tmp_data: if status and status != 'all' and status != d['status']: data.remove(d) continue if path and path != d['path']: data.remove(d) continue if file and file != d['file']: data.remove(d) continue if search: data = search_array(data, search['value'], search['negation']) if sort: data = sort_array(data, sort['fields'], sort['order']) else: data = sort_array(data, ['file'], 'asc') return { 'items': cut_array(data, offset, limit), 'totalItems': len(data) }
def get_decoders(status=None, path=None, file=None, name=None, parents=False, offset=0, limit=common.database_limit, sort=None, search=None): """ Gets a list of available decoders. :param status: Filters by status: enabled, disabled, all. :param path: Filters by path. :param file: Filters by file. :param name: Filters by name. :param parents: Just parent decoders. :param offset: First item to return. :param limit: Maximum number of items to return. :param sort: Sorts the items. Format: {"fields":["field1","field2"],"order":"asc|desc"}. :param search: Looks for items with the specified string. :return: Dictionary: {'items': array of items, 'totalItems': Number of items (without applying the limit)} """ status = Decoder.__check_status(status) all_decoders = [] for decoder_file in Decoder.get_decoders_files(status=status, limit=None)['items']: if glob(decoder_file['path']): all_decoders.extend( Decoder.__load_decoders_from_file(decoder_file['file'], decoder_file['path'], decoder_file['status'])) if not all_decoders: if (glob(common.default_decoder_xml)): all_decoders.extend( Decoder.__load_decoders_from_file( "decoder.xml", common.default_decoder_xml, Decoder.S_ENABLED)) if (glob(common.custom_decoder_xml)): all_decoders.extend( Decoder.__load_decoders_from_file( "local_decoder.xml", common.custom_decoder_xml, Decoder.S_ENABLED)) decoders = list(all_decoders) for d in all_decoders: if path and path != d.path: decoders.remove(d) continue if file and file != d.file: decoders.remove(d) continue if name and name != d.name: decoders.remove(d) continue if parents and 'parent' in d.details: decoders.remove(d) continue if search: decoders = search_array(decoders, search['value'], search['negation']) if sort: decoders = sort_array(decoders, sort['fields'], sort['order'], Decoder.SORT_FIELDS) else: decoders = sort_array(decoders, ['file', 'position'], 'asc') return { 'items': cut_array(decoders, offset, limit), 'totalItems': len(decoders) }
best_lh = 0.750 likelihoods.sort() images.sort() i = 1 list_len = len(likelihoods) for likelihood_path, image in zip(likelihoods, images): print(likelihood_path) print(image) print(f'File {i}/{list_len}') t2wi = sitk.ReadImage(image) t2wi_array = sitk.GetArrayFromImage(t2wi) likelihood = sitk.ReadImage(likelihood_path) likelihood_array = sitk.GetArrayFromImage(likelihood) likelihood_array[best_lh > likelihood_array] = 0 likelihood_array[likelihood_array > 0] = 1 mask = sort_array(likelihood_array) mask = mask == 1 t2wi_array[~mask] = 0 out_t2wi_img = sitk.GetImageFromArray(t2wi_array) out_t2wi_img.SetSpacing(t2wi.GetSpacing()) output_string = 'G:/masked-brains/day01/' + image.split('\\')[1] + '_masked-img.nii' sitk.WriteImage(out_t2wi_img, output_string) print(image.split('\\')[1] + ' is completed!') i += 1
def get_rules(status=None, group=None, pci=None, gdpr=None, path=None, file=None, id=None, level=None, offset=0, limit=common.database_limit, sort=None, search=None): """ Gets a list of rules. :param status: Filters by status: enabled, disabled, all. :param group: Filters by group. :param pci: Filters by pci requirement. :param gdpr: Filter by gdpr requirement. :param file: Filters by file of the rule. :param path: Filters by file of the path. :param id: Filters by rule ID. :param level: Filters by level. It can be an integer or an range (i.e. '2-4' that means levels from 2 to 4). :param offset: First item to return. :param limit: Maximum number of items to return. :param sort: Sorts the items. Format: {"fields":["field1","field2"],"order":"asc|desc"}. :param search: Looks for items with the specified string. :return: Dictionary: {'items': array of items, 'totalItems': Number of items (without applying the limit)} """ all_rules = [] if level: levels = level.split('-') if len(levels) < 0 or len(levels) > 2: raise OssecAPIException(1203) for rule_file in Rule.get_rules_files(status=status, limit=None)['items']: all_rules.extend(Rule.__load_rules_from_file(rule_file['file'], rule_file['path'], rule_file['status'])) rules = list(all_rules) for r in all_rules: if group and group not in r.groups: rules.remove(r) continue elif pci and pci not in r.pci: rules.remove(r) continue elif gdpr and gdpr not in r.gdpr: rules.remove(r) continue elif path and path != r.path: rules.remove(r) continue elif file and file != r.file: rules.remove(r) continue elif id and int(id) != r.id: rules.remove(r) continue elif level: if len(levels) == 1: if int(levels[0]) != r.level: rules.remove(r) continue elif not (int(levels[0]) <= r.level <= int(levels[1])): rules.remove(r) continue if search: rules = search_array(rules, search['value'], search['negation']) if sort: rules = sort_array(rules, sort['fields'], sort['order'], Rule.SORT_FIELDS) else: rules = sort_array(rules, ['id'], 'asc') return {'items': cut_array(rules, offset, limit), 'totalItems': len(rules)}
def ossec_log(type_log='all', category='all', months=3, offset=0, limit=common.database_limit, sort=None, search=None): """ Gets logs from ossec.log. :param type_log: Filters by log type: all, error or info. :param category: Filters by log category (i.e. ossec-remoted). :param months: Returns logs of the last n months. By default is 3 months. :param offset: First item to return. :param limit: Maximum number of items to return. :param sort: Sorts the items. Format: {"fields":["field1","field2"],"order":"asc|desc"}. :param search: Looks for items with the specified string. :return: Dictionary: {'items': array of items, 'totalItems': Number of items (without applying the limit)} """ logs = [] first_date = previous_month(months) statfs_error = "ERROR: statfs('******') produced error: No such file or directory" for line in tail(common.ossec_log, 2000): log_fields = __get_ossec_log_fields(line) if log_fields: log_date, log_category, level, description = log_fields if log_date < first_date: continue if category != 'all': if log_category: if log_category != category: continue else: continue log_line = { 'timestamp': str(log_date), 'tag': log_category, 'level': level, 'description': description } if type_log == 'all': logs.append(log_line) elif type_log.lower() == level.lower(): if "ERROR: statfs(" in line: if statfs_error in logs: continue else: logs.append(statfs_error) else: logs.append(log_line) else: continue else: if logs != []: logs[-1]['description'] += "\n" + line if search: logs = search_array(logs, search['value'], search['negation']) if sort: if sort['fields']: logs = sort_array(logs, order=sort['order'], sort_by=sort['fields']) else: logs = sort_array(logs, order=sort['order'], sort_by=['timestamp']) else: logs = sort_array(logs, order='desc', sort_by=['timestamp']) return {'items': cut_array(logs, offset, limit), 'totalItems': len(logs)}
for label_path, likelihood_path, mask in zip(labels, likelihoods, masks): ground_truth = sitk.ReadImage(mask) likelihood = sitk.ReadImage(likelihood_path) label = sitk.ReadImage(label_path) ground_truth_array = sitk.GetArrayFromImage(ground_truth) label_array = sitk.GetArrayFromImage(label) likelihood_array = sitk.GetArrayFromImage(likelihood) dice_list = [] print('Calculating dice scores!') for i in np.arange(0, 1, 0.01): likelihood_array_copy = np.copy(likelihood_array) likelihood_array_copy[i > likelihood_array_copy] = 0 likelihood_array_copy[likelihood_array_copy > 0] = 1 likelihood_array_copy = sort_array(likelihood_array_copy) dice = dice_coef_np(ground_truth_array, likelihood_array_copy) dice_list.append(dice) print('Dices scores calculated for ' + label_path) dices.append(dice_list) df = pd.DataFrame.from_records(dices) df = df.T mean = df.mean(axis=1) stdev = df.std(axis=1) t = np.arange(0, 1, 0.01) fig, ax = plt.subplots(1) ax.plot(t, mean, lw=2, color='indigo') plt.ylim([0.7, 1.05]) plt.xlim([-0.005, 1.006]) text_str = 'Max Dice Score: ' + str(np.round(mean.max(), 4)) + u"\u00B1" + str(