def printBasicColumns(cls, objects): obj_list = cls.objectContainer.container() for _object in objects: obj_list.append(_object) cols = cls.objectContainer.container.getBasicCols() SanitationUtils.safePrint( obj_list.tabulate( cols, tablefmt = 'simple' ))
def printBasicColumns(users): # print len(users) usrList = UsrObjList() for user in users: usrList.append(user) # SanitationUtils.safePrint( "BILLING ADDRESS:", repr(user), user['First Name'], user.get('First Name'), user.name.__unicode__(out_schema="flat")) cols = colData.getBasicCols() SanitationUtils.safePrint( usrList.tabulate( cols, tablefmt = 'simple' ))
def getSlaveUpdatesNativeRecursive(self, col, updates=None): if updates == None: updates = OrderedDict() SanitationUtils.safePrint("getting updates for col %s, updates: %s" % (col, str(updates))) if col in self.colData.data.keys(): data = self.colData.data[col] if data.get(self.s_meta_target): data_s = data.get(self.s_meta_target,{}) if not data_s.get('final') and data_s.get('key'): updates[data_s.get('key')] = self.newSObject.get(col) if data.get('aliases'): data_aliases = data.get('aliases') for alias in data_aliases: if self.sColSemiStatic(alias): continue updates = self.getSlaveUpdatesNativeRecursive(alias, updates) return updates
sshClient = paramiko.SSHClient() sshClient.set_missing_host_key_policy(paramiko.AutoAddPolicy()) try: sshClient.connect(**paramikoSSHParams) stdin, stdout, stderr = sshClient.exec_command(command) possible_errors = stdout.readlines() assert not possible_errors, "command returned errors: " + possible_errors try: sftpClient = sshClient.open_sftp() sftpClient.chdir(remoteExportFolder) fstat = sftpClient.stat(exportFilename) if fstat: sftpClient.get(exportFilename, maPath) except Exception, e: SanitationUtils.safePrint("ERROR IN SFTP: " + str(e)) finally: sftpClient.close() except Exception, e: SanitationUtils.safePrint("ERROR IN SSH: " + str(e)) finally: sshClient.close() maParser = CSVParse_User( cols = colData.getImportCols(), defaults = colData.getDefaults(), contact_schema = 'act', ) maParser.analyseFile(maPath, maEncoding)
ascii_str = SanitationUtils.unicodeToAscii(u_str) ascii_back = SanitationUtils.asciiToUnicode(u_str) printandrep( "u_str", u_str ) printandrep( "utf8_str", utf8_str, utf8_back ) printandrep( "xml_str", xml_str, xml_back ) printandrep( "ascii_str", ascii_str, ascii_back ) print SanitationUtils.unicodeToUTF8(None) print SanitationUtils.utf8ToUnicode(None) print SanitationUtils.unicodeToXml(None) print SanitationUtils.xmlToUnicode(None) print SanitationUtils.unicodeToAscii(None) print SanitationUtils.asciiToUnicode(None) print SanitationUtils.coerceUnicode(None) SanitationUtils.safePrint(None) print converters.to_bytes(SanitationUtils.coerceUnicode("\xf0\x9f\x91\x8c")) print converters.to_bytes(SanitationUtils.unicodeToXml("\xf0\x9f\x91\x8c".decode("utf8"), True)) print converters.xml_to_unicode("L") print converters.xml_to_byte_string("L") print converters.to_bytes(converters.xml_to_unicode("と")) print converters.to_bytes(converters.xml_to_byte_string("と", input_encoding="ascii")) print converters.xml_to_unicode("👌", encoding="ascii") print converters.xml_to_byte_string("👌", input_encoding="ascii") map_json = '{"E-mail":"*****@*****.**","Web Site":"http:\/\/technotan.com.au","MYOB Customer Card ID":[""],"MYOB Card ID":[""],"First Name":["Neil \ud83d\udc4c\'&>"],"Surname":["Cunliffe-Williams"],"Contact":["Neil Cunliffe-Williams"],"Company":["Laserphile"],"Address 1":["7 Grosvenor Road"],"Address 2":[""],"City":[""],"Postcode":["6053"],"State":["WA"],"Phone":["0416160912"],"Home Address 1":["7 Grosvenor Road"],"Home Address 2":[""],"Home City":["Bayswater"],"Home Postcode":["6053"],"Home Country":["AU"],"Home State":["WA"],"Role":["ADMIN"],"ABN":["32"],"Business Type":[""],"Birth Date":[""],"Mobile Phone":["+61416160912"],"Fax":[""],"Lead Source":[""],"Referred By":[""]}' # unicodeToUTF8(u_str)
def main(): global testMode, inFolder, outFolder, logFolder, srcFolder, pklFolder, \ yamlPath, repPath, mFailPath, sFailPath, logPath, zipPath userFile = cardFile = emailFile = sinceM = sinceS = False ### OVERRIDE CONFIG WITH YAML FILE ### with open(yamlPath) as stream: config = yaml.load(stream) if 'inFolder' in config.keys(): inFolder = config['inFolder'] if 'outFolder' in config.keys(): outFolder = config['outFolder'] if 'logFolder' in config.keys(): logFolder = config['logFolder'] #mandatory merge_mode = config.get('merge_mode', 'sync') MASTER_NAME = config.get('master_name', 'MASTER') SLAVE_NAME = config.get('slave_name', 'SLAVE') DEFAULT_LAST_SYNC = config.get('default_last_sync') master_file = config.get('master_file', '') slave_file = config.get('slave_file', '') userFile = config.get('userFile') cardFile = config.get('cardFile') emailFile = config.get('emailFile') sinceM = config.get('sinceM') sinceS = config.get('sinceS') download_slave = config.get('download_slave') download_master = config.get('download_master') update_slave = config.get('update_slave') update_master = config.get('update_master') do_filter = config.get('do_filter') do_problematic = config.get('do_problematic') do_post = config.get('do_post') do_sync = config.get('do_sync') ### OVERRIDE CONFIG WITH ARGPARSE ### parser = argparse.ArgumentParser(description = 'Merge contact records between two databases') group = parser.add_mutually_exclusive_group() group.add_argument("-v", "--verbosity", action="count", help="increase output verbosity") group.add_argument("-q", "--quiet", action="store_true") group = parser.add_mutually_exclusive_group() group.add_argument('--testmode', help='Run in test mode with test databases', action='store_true', default=None) group.add_argument('--livemode', help='Run the script on the live databases', action='store_false', dest='testmode') group = parser.add_mutually_exclusive_group() group.add_argument('--download-master', help='download the master data', action="store_true", default=None) group.add_argument('--skip-download-master', help='use the local master file instead\ of downloading the master data', action="store_false", dest='download_master') group = parser.add_mutually_exclusive_group() group.add_argument('--download-slave', help='download the slave data', action="store_true", default=None) group.add_argument('--skip-download-slave', help='use the local slave file instead\ of downloading the slave data', action="store_false", dest='download_slave') group = parser.add_mutually_exclusive_group() group.add_argument('--update-master', help='update the master database', action="store_true", default=None) group.add_argument('--skip-update-master', help='don\'t update the master database', action="store_false", dest='update_master') group = parser.add_mutually_exclusive_group() group.add_argument('--update-slave', help='update the slave database', action="store_true", default=None) group.add_argument('--skip-update-slave', help='don\'t update the slave database', action="store_false", dest='update_slave') group = parser.add_mutually_exclusive_group() group.add_argument('--do-filter', help='filter the databases', action="store_true", default=None) group.add_argument('--skip-filter', help='don\'t filter the databases', action="store_false", dest='do_filter') group = parser.add_mutually_exclusive_group() group.add_argument('--do-sync', help='sync the databases', action="store_true", default=None) group.add_argument('--skip-sync', help='don\'t sync the databases', action="store_false", dest='do_sync') group = parser.add_mutually_exclusive_group() group.add_argument('--do-problematic', help='make problematic updates to the databases', action="store_true", default=None) group.add_argument('--skip-problematic', help='don\'t make problematic updates to the databases', action="store_false", dest='do_problematic') group = parser.add_mutually_exclusive_group() group.add_argument('--do-post', help='post process the contacts', action="store_true", default=None) group.add_argument('--skip-post', help='don\'t post process the contacts', action="store_false", dest='do_post') parser.add_argument('--m-ssh-host', help='location of master ssh server') parser.add_argument('--m-ssh-port', type=int, help='location of master ssh port') parser.add_argument('--limit', type=int, help='global limit of objects to process') parser.add_argument('--master-file', help='location of master file') parser.add_argument('--slave-file', help='location of slave file') parser.add_argument('--card-file') group = parser.add_argument_group() group.add_argument('--debug-abstract', action='store_true', dest='debug_abstract') group.add_argument('--debug-parser', action='store_true', dest='debug_parser') group.add_argument('--debug-update', action='store_true', dest='debug_update') group.add_argument('--debug-flat', action='store_true', dest='debug_flat') group.add_argument('--debug-name', action='store_true', dest='debug_name') group.add_argument('--debug-address', action='store_true', dest='debug_address') group.add_argument('--debug-client', action='store_true', dest='debug_client') group.add_argument('--debug-utils', action='store_true', dest='debug_utils') group.add_argument('--debug-contact', action='store_true', dest='debug_contact') args = parser.parse_args() if args: print args if args.verbosity > 0: Registrar.DEBUG_PROGRESS = True Registrar.DEBUG_ERROR = True if args.verbosity > 1: Registrar.DEBUG_MESSAGE = True if args.quiet: Registrar.DEBUG_PROGRESS = False Registrar.DEBUG_ERROR = False Registrar.DEBUG_MESSAGE = False if args.testmode is not None: testMode = args.testmode if args.download_slave is not None: download_slave = args.download_slave if args.download_master is not None: download_master = args.download_master if args.update_slave is not None: update_slave = args.update_slave if args.update_master is not None: update_master = args.update_master if args.do_filter is not None: do_filter = args.do_filter if args.do_sync is not None: do_sync = args.do_sync if args.do_problematic is not None: do_problematic = args.do_problematic if args.do_post is not None: do_post = args.do_post if args.m_ssh_port: m_ssh_port = args.m_ssh_port if args.m_ssh_host: m_ssh_host = args.m_ssh_host if args.master_file is not None: download_master = False master_file = args.master_file if args.slave_file is not None: download_slave = False slave_file = args.slave_file if args.card_file is not None: cardFile = args.card_file do_filter = True if args.debug_abstract is not None: Registrar.DEBUG_ABSTRACT = args.debug_abstract if args.debug_parser is not None: Registrar.DEBUG_PARSER = args.debug_parser if args.debug_update is not None: Registrar.DEBUG_UPDATE = args.debug_update if args.debug_flat is not None: Registrar.DEBUG_FLAT = args.debug_flat if args.debug_name is not None: Registrar.DEBUG_NAME = args.debug_name if args.debug_address is not None: Registrar.DEBUG_ADDRESS = args.debug_address if args.debug_client is not None: Registrar.DEBUG_CLIENT = args.debug_client if args.debug_utils is not None: Registrar.DEBUG_UTILS = args.debug_utils if args.debug_contact is not None: Registrar.DEBUG_CONTACT = args.dest='debug_contact' global_limit = args.limit # api config with open(yamlPath) as stream: optionNamePrefix = 'test_' if testMode else '' config = yaml.load(stream) ssh_user = config.get(optionNamePrefix+'ssh_user') ssh_pass = config.get(optionNamePrefix+'ssh_pass') ssh_host = config.get(optionNamePrefix+'ssh_host') ssh_port = config.get(optionNamePrefix+'ssh_port', 22) m_ssh_user = config.get(optionNamePrefix+'m_ssh_user') m_ssh_pass = config.get(optionNamePrefix+'m_ssh_pass') m_ssh_host = config.get(optionNamePrefix+'m_ssh_host') m_ssh_port = config.get(optionNamePrefix+'m_ssh_port', 22) remote_bind_host = config.get(optionNamePrefix+'remote_bind_host', '127.0.0.1') remote_bind_port = config.get(optionNamePrefix+'remote_bind_port', 3306) db_user = config.get(optionNamePrefix+'db_user') db_pass = config.get(optionNamePrefix+'db_pass') db_name = config.get(optionNamePrefix+'db_name') db_charset = config.get(optionNamePrefix+'db_charset', 'utf8mb4') wp_srv_offset = config.get(optionNamePrefix+'wp_srv_offset', 0) m_db_user = config.get(optionNamePrefix+'m_db_user') m_db_pass = config.get(optionNamePrefix+'m_db_pass') m_db_name = config.get(optionNamePrefix+'m_db_name') m_db_host = config.get(optionNamePrefix+'m_db_host') m_x_cmd = config.get(optionNamePrefix+'m_x_cmd') m_i_cmd = config.get(optionNamePrefix+'m_i_cmd') tbl_prefix = config.get(optionNamePrefix+'tbl_prefix', '') wp_user = config.get(optionNamePrefix+'wp_user', '') wp_pass = config.get(optionNamePrefix+'wp_pass', '') store_url = config.get(optionNamePrefix+'store_url', '') wc_api_key = config.get(optionNamePrefix+'wc_api_key') wc_api_secret = config.get(optionNamePrefix+'wc_api_secret') remote_export_folder = config.get(optionNamePrefix+'remote_export_folder', '') ### DISPLAY CONFIG ### if Registrar.DEBUG_MESSAGE: if testMode: print "testMode enabled" else: print "testMode disabled" if not download_slave: print "no download_slave" if not download_master: print "no download_master" if not update_master: print "not updating maseter" if not update_slave: print "not updating slave" if not do_filter: print "not doing filter" if not do_sync: print "not doing sync" if not do_post: print "not doing post" ### PROCESS CLASS PARAMS ### FieldGroup.do_post = do_post SyncUpdate.setGlobals( MASTER_NAME, SLAVE_NAME, merge_mode, DEFAULT_LAST_SYNC) TimeUtils.setWpSrvOffset(wp_srv_offset) ### SET UP DIRECTORIES ### for path in (inFolder, outFolder, logFolder, srcFolder, pklFolder): if not os.path.exists(path): os.mkdir(path) fileSuffix = "_test" if testMode else "" fileSuffix += "_filter" if do_filter else "" m_x_filename = "act_x"+fileSuffix+"_"+importName+".csv" m_i_filename = "act_i"+fileSuffix+"_"+importName+".csv" s_x_filename = "wp_x"+fileSuffix+"_"+importName+".csv" remoteExportPath = os.path.join(remote_export_folder, m_x_filename) if download_master: maPath = os.path.join(inFolder, m_x_filename) maEncoding = "utf-8" else: # maPath = os.path.join(inFolder, "act_x_test_2016-05-03_23-01-48.csv") maPath = os.path.join(inFolder, master_file) # maPath = os.path.join(inFolder, "500-act-records-edited.csv") # maPath = os.path.join(inFolder, "500-act-records.csv") maEncoding = "utf8" if download_slave: saPath = os.path.join(inFolder, s_x_filename) saEncoding = "utf8" else: saPath = os.path.join(inFolder, slave_file) # saPath = os.path.join(inFolder, "500-wp-records-edited.csv") saEncoding = "utf8" moPath = os.path.join(outFolder, m_i_filename) repPath = os.path.join(outFolder, "usr_sync_report%s.html" % fileSuffix) WPresCsvPath = os.path.join(outFolder, "sync_report_wp%s.csv" % fileSuffix) masterResCsvPath = os.path.join(outFolder, "sync_report_act%s.csv" % fileSuffix) masterDeltaCsvPath = os.path.join(outFolder, "delta_report_act%s.csv" % fileSuffix) slaveDeltaCsvPath = os.path.join(outFolder, "delta_report_wp%s.csv" % fileSuffix) mFailPath = os.path.join(outFolder, "act_fails%s.csv" % fileSuffix) sFailPath = os.path.join(outFolder, "wp_fails%s.csv" % fileSuffix) sqlPath = os.path.join(srcFolder, "select_userdata_modtime.sql") # pklPath = os.path.join(pklFolder, "parser_pickle.pkl" ) pklPath = os.path.join(pklFolder, "parser_pickle%s.pkl" % fileSuffix ) logPath = os.path.join(logFolder, "log_%s.txt" % importName) zipPath = os.path.join(logFolder, "zip_%s.zip" % importName) ### PROCESS OTHER CONFIG ### assert store_url, "store url must not be blank" xmlrpc_uri = store_url + 'xmlrpc.php' json_uri = store_url + 'wp-json/wp/v2' actFields = ";".join(ColData_User.getACTImportCols()) jsonConnectParams = { 'json_uri': json_uri, 'wp_user': wp_user, 'wp_pass': wp_pass } wcApiParams = { 'api_key':wc_api_key, 'api_secret':wc_api_secret, 'url':store_url } sqlConnectParams = { } actConnectParams = { 'hostname': m_ssh_host, 'port': m_ssh_port, 'username': m_ssh_user, 'password': m_ssh_pass, } actDbParams = { 'db_x_exe':m_x_cmd, 'db_i_exe':m_i_cmd, 'db_name': m_db_name, 'db_host': m_db_host, 'db_user': m_db_user, 'db_pass': m_db_pass, 'fields' : actFields, } if sinceM: actDbParams['since'] = sinceM fsParams = { 'importName': importName, 'remote_export_folder': remote_export_folder, 'inFolder': inFolder, 'outFolder': outFolder } ######################################### # Prepare Filter Data ######################################### print debugUtils.hashify("PREPARE FILTER DATA"), timediff() if do_filter: filterFiles = { 'users': userFile, 'emails': emailFile, 'cards': cardFile, } filterItems = {} for key, filterFile in filterFiles.items(): if filterFile: try: with open(os.path.join(inFolder,filterFile) ) as filterFileObj: filterItems[key] = [\ re.sub(r'\s*([^\s].*[^\s])\s*(?:\n)', r'\1', line)\ for line in filterFileObj\ ] except IOError, e: SanitationUtils.safePrint("could not open %s file [%s] from %s" % ( key, filterFile, unicode(os.getcwd()) )) raise e if sinceM: filterItems['sinceM'] = TimeUtils.wpStrptime(sinceM) if sinceS: filterItems['sinceS'] = TimeUtils.wpStrptime(sinceS)
insort(mDeltaUpdates, syncUpdate) if syncUpdate.sUpdated and syncUpdate.sDeltas: insort(sDeltaUpdates, syncUpdate) if not syncUpdate: continue if syncUpdate.sUpdated: syncSlaveUpdates = syncUpdate.getSlaveUpdates() if 'E-mail' in syncSlaveUpdates: newEmail = syncSlaveUpdates['E-mail'] if newEmail in saParser.emails: mObjects = [mObject] sObjects = [sObject] + saParser.emails[newEmail] SanitationUtils.safePrint("duplicate emails", mObjects, sObjects) emailConflictMatches.addMatch(Match(mObjects, sObjects)) continue if(not syncUpdate.importantStatic): if(syncUpdate.mUpdated and syncUpdate.sUpdated): if(syncUpdate.sMod): insort(problematicUpdates, syncUpdate) continue elif(syncUpdate.mUpdated and not syncUpdate.sUpdated): insort(nonstaticMUpdates, syncUpdate) if(syncUpdate.sMod): insort(problematicUpdates, syncUpdate) continue elif(syncUpdate.sUpdated and not syncUpdate.mUpdated): insort(nonstaticSUpdates, syncUpdate)
method_name = 'tansync.update_user_fields' method_args = ('user_id', 'fields_json_base64') username = '******' password = '******' store_url = 'http://minimac.ddns.me:11182/' # store_url = 'http://technotea.com.au/' # username = '******' # password = '******' xmlrpc_uri = store_url + 'xmlrpc.php' client = Client(xmlrpc_uri, username, password) # posts = client.call(posts.GetPosts()) # print posts # print client.normalize_string(u'no\u2015odle') fields = { 'first_name': 'no👌od👌le'.decode('utf8'), 'user_url': "http://www.laserphile.com/", 'user_login': "******" } fields_json = SanitationUtils.encodeJSON(fields) SanitationUtils.safePrint(repr(fields_json)) # fields_json_base64 = SanitationUtils.encodeBase64( fields_json ) fields_json_base64 = "eyJidXNpbmVzc190eXBlIjogIk90aGVyIiwgInByZWZfdGVsIjogIkZhbHNlIiwgImFjdF9yb2xlIjogIlJOIiwgInByZWZfbW9iIjogIkZhbHNlIiwgImhvd19oZWFyX2Fib3V0IjogIkV2ZW50IiwgInVzZXJfZW1haWwiOiAiZW1tYS1hbGxlbjk0QGhvdG1haWwuY29tIn0=" # print fields_json_base64 # eyJ1c2VyX2xvZ2luIjogImFkbWluIiwgImZpcnN0X25hbWUiOiAibm_wn5GMb2Twn5GMbGUiLCAidXNlcl91cmwiOiAiaHR0cDovL3d3dy5sYXNlcnBoaWxlLmNvbS8ifQ== test_out = client.call(UpdateUser(1, fields_json_base64)) print test_out
]) if not map_obj: raise Exception() return map_obj # return tabulate(map_obj, headers="keys", tablefmt="html") except: return map_json def map2table(map_obj): return tabulate(map_obj, headers="keys", tablefmt="html") changeDataFmt = changeData[:1] for user_id, c_time, changed, data in sorted(changeData[1:]): if user_id == 1: SanitationUtils.safePrint(changed) changedMap = json2map(changed) if not isinstance(changedMap, dict): continue if user_id == 1: for value in changedMap.values(): for val in value: SanitationUtils.safePrint(val) dataMap = json2map(data) diffMap = listUtils.keysNotIn(dataMap, changedMap.keys()) if isinstance(changedMap, dict) else dataMap changeDataFmt.append( [ user_id, c_time, "C:%s<br/>S:%s" % (map2table(changedMap), map2table(diffMap)) ] ) print "creating report..."