def read(self, input_file, read_intestation=False): """ Import the dataset from file. :param string filename: the name of the file. """ data, self.intestation = read_file(input_file, read_intestation) self.data = [] for element in data: info = [x.split(':') for x in element[-1].split('\n')] file_fields = [row[0] for row in info] d = dict() for valid_field in valid_fields: try: new_field = get_close_matches(valid_field, file_fields, n=1, cutoff=0.7) index = file_fields.index(new_field[0]) value = info[index][1] except: value = "" d[valid_field] = value self.data.append(element[:-1] + [d])
def merge_scripts_in_order(script_list, parent_folder, w_file): for h_file in script_list: file_path = search_file_in_folder(h_file, parent_folder) if file_path is not None and os.path.isfile(file_path): context = fh.read_file(file_path) context = "\r\n" + context fh.write_file(w_file, context, 'utf-8', 'a')
def request_for_posts_dict(api, user): search = '' words = file_handler.read_file('support_files/keywords.txt') for word in words: search = search + ' ' + word parsed = api.wall.search(owner_id=user, query=search) for post in parsed['items']: print(post['id'], "Обработан.") db_handler.db_save_users_posts(post)
def parse_users_posts_dict(): api = pars_posts_api_init() new_users_list = file_handler.read_file('support_files/users_id.txt') for user in new_users_list: try: request_for_posts_dict(api, user) except Exception as e: print(e) time.sleep(2) request_for_posts_dict(api, user)
def merge_views(script_list, folder, w_file): merge_scripts_in_order(script_list, folder, w_file) # merged first files = os.listdir(folder) for file in files: if file in script_list: continue # this kind of file has been merged f = os.path.join(folder, file) if os.path.isfile(f): context = fh.read_file(f) context = "\r\n" + context fh.write_file(w_file, context, 'utf-8', 'a')
def parse_users_data(): conf = file_handler.pars_secure_config() api = vk_requests.create_api(service_token=conf.get('app', 'token'), interactive=True) users_list = file_handler.read_file('support_files/users_id.txt') new_users_list = [] try: request_for_user(api, users_list, new_users_list) except Exception as e: print(e) time.sleep(1) request_for_user(api, users_list, new_users_list) with open('support_files/users_id.txt', 'w') as f: for line in new_users_list: f.write(str(line) + '\n')
def pars_id_group(): start_time = time.time() group_list = file_handler.read_file('support_files/group_id.txt') parsed = [] filtered = [] for group in group_list: try: parsed = parsed + parsers.parse_group_member_ids(group) except Exception as e: print(e) for user in parsed: if user not in filtered: filtered.append(user) with open('support_files/users_id.txt', 'w') as f: for line in filtered: f.write(str(line) + '\n') text.insert(1.0, ("--- %s seconds ---\n" % (time.time() - start_time))) text.insert(1.0, "ID Пользователей из групп собранны! Время сбора:\n")
def parse_users_posts_count(n): api = pars_posts_api_init() new_users_list = file_handler.read_file('support_files/users_id.txt') try: n = int(n) except Exception as e: return print(e) for user in new_users_list: try: request_for_posts_count(api, user, n) except Exception as e: if "error_code=18" in e.__str__(): print(e) elif "error_code=6" in e.__str__(): print(e) time.sleep(2) request_for_posts_count(api, user, n) else: print(e)
import file_handler import random import FCM import numpy as np if __name__ == '__main__': data = file_handler.read_file('input_data/2clstrain1200.csv') input_size = len(data) train_data = [] test_data = [] # producing random data for final drawing X:[-5, 17.5], Y:[-2.5, 15] X_rand, Y_rand = FCM.produce_random_num(-8, 19, -4, 17, 2000) random_input = np.concatenate((X_rand, Y_rand), axis=0) # Here we should choose randomly train and test data train_size = int(70 * input_size / 100) test_size = input_size - train_size train_data = [data[i] for i in range(test_size, input_size)] test_data = [data[i] for i in range(0, test_size)] train_data_no_label = [] train_labels = [] test_data_no_label = [] test_labels = [] for i in range(len(train_data)): train_data_no_label.append([train_data[i][0], train_data[i][1]]) train_labels.append(train_data[i][2])
# Defino numero de instancia instancia = '1' if '2/1' in l4: instancia = '2' if '3/1' in l4: instancia = '3' if '4/1' in l4: instancia = '4' if '5/1' in l4: instancia = '5' if '6/1' in l4: instancia = '6' for file_name in file_names: file_ref = '../{0}/{1}/{2}/{3}/{4}'.format(l1, l2, l3, l4, file_name) file = file_handler.read_file(file_ref) # Variables: paper, inicializacion, tamano & instancia ultimo_tiempo = '' val_greedy = 0 val_final = 0 if file != []: chart_encontre_tiempo_final = False for num, line in enumerate(file, 1): if pref_generacion in line: # Formato: ['GEN', '10000', '404.130000'] splitted_line = line.split('\n')[0].split(' ') if splitted_line[1] == '0': val_greedy = float(splitted_line[2])
def kill(task_id): todos = [todo for todo in read_file() if todo['status'] == 0] todos[int(task_id) - 1]['status'] = 1 write_file(todos) print 'cool!\nTasks left:' show(todos)
def list(): todos = read_file() if not todos: print 'Cool! You have no extra tasks!' return show(todos)
def add(task_detail): todos = read_file() todos.append({'task_detail': task_detail, 'status': 0}) write_file(todos) print 'got it.'
def completed_todos(): todos = read_file() return [todo for todo in todos if todo['status'] == 1]
def merge_all_scripts(): # absolute path trunk_dir = root_dir + '\\UniversalDatabasesCodes\\Databases' child_folders = ('Databases', 'Tables', 'PrimaryKeys', 'ForeignKeys', 'Indexes', 'Enums', r"UserDefinedDataTypes", 'SpecialScripts', r'UserFunctions', 'Views', 'StoredProcedures', 'Triggers', 'Constraints', r'UniqueKeys') script_name = 'DB_Deployment_v1.0.0.sql' view_list = [ "dbo.adhoc_Phone.view.sql", "dbo.adhoc_Address.view.sql", "dbo.adhoc_Customer.view.sql", "dbo.vw_AnnualIssuance_CustomerMailing.view.sql" ] moved_files = [ trunk_dir + '\\UserFunctions\\dbo.udf_GetCustomerName.function.sql', trunk_dir + '\\Views\\dbo.vw_ControlledInventory_OutletStatusCount.view.sql', trunk_dir + '\\Views\\dbo.vw_ConfigValueComplete_Effective.view.sql', trunk_dir + '\\Views\\dbo.adhoc_license.view.sql', trunk_dir + '\\Views\\dbo.vw_CurrentCustomerVesselName.view.sql', trunk_dir + '\\Views\\dbo.vw_CustomerVesselAllOwnership.view.sql', trunk_dir + '\\Views\\dbo.vw_CustomerAddress.view.sql', trunk_dir + '\\Views\\dbo.vw_StockOnHand.view.sql', trunk_dir + '\\Views\\dbo.vw_CustomerVesselCurrentOwnership.view.sql', trunk_dir + '\\Views\\dbo.vw_LEPermitRelatedCustomer.view.sql', trunk_dir + '\\Views\\dbo.vw_LifetimeAnnualIssuanceCustomerSearch_Live.view.sql', trunk_dir + '\\Views\\dbo.Diag_ItemFeeDist_CompleteTotals.view.sql', trunk_dir + '\\Views\\dbo.vw_ActiveAddress.view.sql', trunk_dir + '\\UserFunctions\\dbo.udf_GetCustomerIdentity.function.sql', trunk_dir + '\\Views\\dbo.vw_rpt_CustomerBusiness.view.sql', trunk_dir + '\\Views\\dbo.vw_AnnualIssuance_CustomerPurchase.view.sql', trunk_dir + '\\Views\\dbo.vw_Item_LifetimeEnablingItem.view.sql', trunk_dir + '\\Views\\dbo.vw_AnnualIssuance_CustomerHasEnablingItem.view.sql', trunk_dir + '\\UserFunctions\\dbo.udf_Search_LicenseReportAnswerGroup_ForDataExtracts.function.sql', trunk_dir + '\\UserFunctions\\dbo.udf_Renewal_Customers.function.sql', trunk_dir + '\\Views\\dbo.vw_rpt_CustomerVesselOwnershipCurrent.view.sql', trunk_dir + '\\UserFunctions\\dbo.udf_Search_Renewal_Customers.function.sql', trunk_dir + '\\UserFunctions\\dbo.udf_GetHighestPriorityDocumentTitleByDocumentID.function.sql', trunk_dir + '\\UserFunctions\\dbo.udf_Renewal_Items.function.sql', trunk_dir + '\\UserFunctions\\dbo.udf_Search_CustomerSimple.function.sql', trunk_dir + '\\UserFunctions\\dbo.udf_PreviouslyReportedCount.function.sql', trunk_dir + '\\UserFunctions\\dbo.udf_RoundToTheNearest.function.sql', trunk_dir + '\\UserFunctions\\dbo.udf_GetGlobalDistribution.function.sql', trunk_dir + '\\UserFunctions\\dbo.udf_CatalogAllFees.function.sql', trunk_dir + '\\Views\\dbo.vw_ItemSalesFee_All.view.sql', trunk_dir + '\\Views\\dbo.vw_CustomerVesselHomePortVesselPortCurrent.view.sql', trunk_dir + '\\Views\\dbo.vw_AnnualIssuance_PurchaseTotalByPackage.view.sql', trunk_dir + '\\Views\\dbo.vw_HuntDrawGroup.view.sql', trunk_dir + '\\Views\\dbo.vw_MasterHuntType_Complete.view.sql', trunk_dir + '\\Views\\dbo.vw_DrawConfig_Complete.view.sql', trunk_dir + '\\Views\\dbo.vw_HuntTypeLicenseYear_Complete.view.sql', trunk_dir + '\\Views\\dbo.vw_FulfillmentMostRecentDocumentFulfillmentAction.view.sql', trunk_dir + '\\Views\\dbo.vw_transactiondetail.view.sql', trunk_dir + '\\Views\\dbo.vw_LEPermitTypeEx.view.sql', trunk_dir + '\\Views\\dbo.vw_LEPermitEx.view.sql', trunk_dir + '\\Views\\dbo.vw_LEPermitAnnualDesignationEx.view.sql', trunk_dir + '\\Views\\dbo.vw_LEPermitHerringPlatoonSpecial.view.sql', trunk_dir + '\\Views\\dbo.vw_LEPermitHerringPlatoonDesignationInfo.view.sql', trunk_dir + '\\Views\\dbo.vw_LEPermitTemporaryTransferEx.view.sql', trunk_dir + '\\Views\\dbo.vw_LEPermitHerringSquareView.view.sql', trunk_dir + '\\Views\\dbo.vw_LifetimeAnnualIssuanceSummary_LivePackageCustomerList.view.sql', trunk_dir + '\\Views\\dbo.vw_rpt_CustomerActiveAddressReturnPreferred.view.sql', trunk_dir + '\\Views\\dbo.vw_rpt_VesselCustomersOnly.view.sql', trunk_dir + '\\Views\\dbo.vw_rpt_CustomerVesselOwnership.view.sql', trunk_dir + '\\Views\\dbo.vw_rpt_CustomerVesselHomePortVesselPortCurrent.view.sql', trunk_dir + '\\Views\\dbo.vw_rpt_HerringAreaPlatoon.view.sql', trunk_dir + '\\Views\\dbo.vw_rpt_LEPermitAnnualDesignation.view.sql', trunk_dir + '\\Views\\dbo.vw_rpt_LEPermitLEPermitAnnualDesignationCurrent.view.sql', trunk_dir + '\\Views\\dbo.vw_rpt_LEPermitTemporaryTransfer.view.sql', trunk_dir + '\\Views\\dbo.vw_rpt_LEPermitType.view.sql', trunk_dir + '\\Views\\dbo.vw_rpt_CustomerVesselDocumentationCurrent.view.sql', trunk_dir + '\\Views\\dbo.vw_rpt_CustomerVesselCurrentOwnershipDocumentationHomePort.view.sql', trunk_dir + '\\Views\\dbo.vw_AnnualIssuance_JobResultSubTotals.view.sql', trunk_dir + '\\Views\\dbo.vw_AnnualIssuance_JobResultTotalByMailing.view.sql', trunk_dir + '\\Views\\dbo.vw_rpt_HuntApplicationParticipants.view.sql', trunk_dir + '\\Views\\dbo.vw_rpt_CustomerVesselDocumentation.view.sql', trunk_dir + '\\Views\\dbo.vw_LifetimeAnnualIssuanceCustomerSearch_PostIssuance.view.sql', trunk_dir + '\\Views\\dbo.vw_CustomerUnifiedNameAndIdentities.view.sql', trunk_dir + '\\Views\\dbo.vw_SLMSParticipantInfo.view.sql', trunk_dir + '\\UserFunctions\\dbo.udf_Search_SPFalconDispositionHistory.function.sql', trunk_dir + '\\UserFunctions\\dbo.udf_Config_GetValueByKey_Effective.function.sql', trunk_dir + '\\Views\\dbo.vw_SLMSLettersConfigurationValues.view.sql', trunk_dir + '\\Views\\dbo.Diag_ItemFeeDist_FixedTotals.view.sql', trunk_dir + '\\Views\\dbo.vw_rpt_CustomerVesselHomePortVesselPort.view.sql', trunk_dir + '\\Views\\dbo.vw_CustomerOfficalDocument.view.sql', trunk_dir + '\\UserFunctions\\dbo.udf_ConvertHeight.function.sql', trunk_dir + '\\UserFunctions\\dbo.udf_GetCustomerIdentityForLicense.function.sql', trunk_dir + '\\UserFunctions\\dbo.udf_GetCustomerDisability.function.sql', trunk_dir + '\\Views\\dbo.vw_rpt_CustomerIndividual.view.sql', trunk_dir + '\\Views\\dbo.Diag_ItemFeeDist_PercentageTotals.view.sql', trunk_dir + '\\Views\\dbo.vw_CustomerActiveID.view.sql', trunk_dir + '\\Views\\dbo.Diag_ItemFeeDist_UnallocatedTotals.view.sql', ] enum_list = get_enums(trunk_dir + '\\Views') file_loc = trunk_dir + '\\Enums' fh.clear_folder(file_loc) fh.move_files(enum_list, file_loc) if os.path.exists(trunk_dir): script_file = os.path.join(trunk_dir, script_name) fh.write_file(script_file, '') for folder in child_folders: abs_path = os.path.join(trunk_dir, folder) if folder == 'SpecialScripts': merge_special_scripts(moved_files, script_file) file_loc = trunk_dir + '\\SpecialScripts' fh.clear_folder(file_loc) fh.move_files(moved_files, file_loc) continue if folder == 'Views': merge_views(view_list, abs_path, script_file) continue if os.path.exists(abs_path): files = os.listdir(abs_path) for file in files: f = os.path.join(abs_path, file) if os.path.isfile(f): context = fh.read_file(f) context = "\r\n" + context fh.write_file(script_file, context, 'utf-8', 'a')
def merge_special_scripts(script_list, w_file): for f in script_list: if os.path.isfile(f): context = fh.read_file(f) context = "\r\n" + context fh.write_file(w_file, context, 'utf-8', 'a')