Beispiel #1
0
    def getReportCols(self):
        report_cols = ColData_User.getReportCols()
        # for exclude_col in ['E-mail','MYOB Card ID','Wordpress Username','Role']:
        #     if exclude_col in report_cols:
        #         del report_cols[exclude_col]

        return report_cols
Beispiel #2
0
    m_db_name = config.get('test_m_db_name')
    m_command = config.get('test_m_command')

exportFilename = "act_x_test_"+importName+".csv"
remoteExportPath = os.path.join(remoteExportFolder, exportFilename)
maPath = os.path.join(inFolder, exportFilename)
maEncoding = "utf-8"

paramikoSSHParams = {
    'hostname':    m_ssh_host,
    'port':        m_ssh_port,
    'username':    m_ssh_user,
    'password':    m_ssh_pass,
}

colData = ColData_User()
actCols = colData.getACTCols()
fields = ";".join(actCols.keys())

command = " ".join(filter(None,[
    'cd {wd};'.format(
        wd      = remoteExportFolder,
    ) if remoteExportFolder else None,
    '{cmd} "-d{db_name}" "-h{db_host}" "-u{db_user}" "-p{db_pass}"'.format(
        cmd     = m_command,
        db_name = m_db_name,
        db_host = m_db_host,
        db_user = m_db_user,
        db_pass = m_db_pass,
    ),
    '-s"%s"' % "2016-03-01",
Beispiel #3
0
 def getBasicCols(cls, self):
     return ColData_User.getBasicCols()
Beispiel #4
0
def main():
    global testMode, inFolder, outFolder, logFolder, srcFolder, pklFolder, \
        yamlPath, repPath, mFailPath, sFailPath, logPath, zipPath

    userFile = cardFile = emailFile = sinceM = sinceS = False

    ### OVERRIDE CONFIG WITH YAML FILE ###

    with open(yamlPath) as stream:
        config = yaml.load(stream)

        if 'inFolder' in config.keys():
            inFolder = config['inFolder']
        if 'outFolder' in config.keys():
            outFolder = config['outFolder']
        if 'logFolder' in config.keys():
            logFolder = config['logFolder']

        #mandatory
        merge_mode = config.get('merge_mode', 'sync')
        MASTER_NAME = config.get('master_name', 'MASTER')
        SLAVE_NAME = config.get('slave_name', 'SLAVE')
        DEFAULT_LAST_SYNC = config.get('default_last_sync')
        master_file = config.get('master_file', '')
        slave_file = config.get('slave_file', '')
        userFile = config.get('userFile')
        cardFile = config.get('cardFile')
        emailFile = config.get('emailFile')
        sinceM = config.get('sinceM')
        sinceS = config.get('sinceS')
        download_slave = config.get('download_slave')
        download_master = config.get('download_master')
        update_slave = config.get('update_slave')
        update_master = config.get('update_master')
        do_filter = config.get('do_filter')
        do_problematic = config.get('do_problematic')
        do_post = config.get('do_post')
        do_sync = config.get('do_sync')

    ### OVERRIDE CONFIG WITH ARGPARSE ###

    parser = argparse.ArgumentParser(description = 'Merge contact records between two databases')
    group = parser.add_mutually_exclusive_group()
    group.add_argument("-v", "--verbosity", action="count",
                        help="increase output verbosity")
    group.add_argument("-q", "--quiet", action="store_true")
    group = parser.add_mutually_exclusive_group()
    group.add_argument('--testmode', help='Run in test mode with test databases',
                        action='store_true', default=None)
    group.add_argument('--livemode', help='Run the script on the live databases',
                        action='store_false', dest='testmode')
    group = parser.add_mutually_exclusive_group()
    group.add_argument('--download-master', help='download the master data',
                       action="store_true", default=None)
    group.add_argument('--skip-download-master', help='use the local master file instead\
        of downloading the master data', action="store_false", dest='download_master')
    group = parser.add_mutually_exclusive_group()
    group.add_argument('--download-slave', help='download the slave data',
                       action="store_true", default=None)
    group.add_argument('--skip-download-slave', help='use the local slave file instead\
        of downloading the slave data', action="store_false", dest='download_slave')
    group = parser.add_mutually_exclusive_group()
    group.add_argument('--update-master', help='update the master database',
                       action="store_true", default=None)
    group.add_argument('--skip-update-master', help='don\'t update the master database',
                       action="store_false", dest='update_master')
    group = parser.add_mutually_exclusive_group()
    group.add_argument('--update-slave', help='update the slave database',
                       action="store_true", default=None)
    group.add_argument('--skip-update-slave', help='don\'t update the slave database',
                       action="store_false", dest='update_slave')
    group = parser.add_mutually_exclusive_group()
    group.add_argument('--do-filter', help='filter the databases',
                       action="store_true", default=None)
    group.add_argument('--skip-filter', help='don\'t filter the databases',
                       action="store_false", dest='do_filter')
    group = parser.add_mutually_exclusive_group()
    group.add_argument('--do-sync', help='sync the databases',
                       action="store_true", default=None)
    group.add_argument('--skip-sync', help='don\'t sync the databases',
                       action="store_false", dest='do_sync')
    group = parser.add_mutually_exclusive_group()
    group.add_argument('--do-problematic', help='make problematic updates to the databases',
                       action="store_true", default=None)
    group.add_argument('--skip-problematic', help='don\'t make problematic updates to the databases',
                       action="store_false", dest='do_problematic')
    group = parser.add_mutually_exclusive_group()
    group.add_argument('--do-post', help='post process the contacts',
                       action="store_true", default=None)
    group.add_argument('--skip-post', help='don\'t post process the contacts',
                       action="store_false", dest='do_post')

    parser.add_argument('--m-ssh-host', help='location of master ssh server')
    parser.add_argument('--m-ssh-port', type=int, help='location of master ssh port')
    parser.add_argument('--limit', type=int, help='global limit of objects to process')
    parser.add_argument('--master-file', help='location of master file')
    parser.add_argument('--slave-file', help='location of slave file')
    parser.add_argument('--card-file')

    group = parser.add_argument_group()
    group.add_argument('--debug-abstract', action='store_true', dest='debug_abstract')
    group.add_argument('--debug-parser', action='store_true', dest='debug_parser')
    group.add_argument('--debug-update', action='store_true', dest='debug_update')
    group.add_argument('--debug-flat', action='store_true', dest='debug_flat')
    group.add_argument('--debug-name', action='store_true', dest='debug_name')
    group.add_argument('--debug-address', action='store_true', dest='debug_address')
    group.add_argument('--debug-client', action='store_true', dest='debug_client')
    group.add_argument('--debug-utils', action='store_true', dest='debug_utils')
    group.add_argument('--debug-contact', action='store_true', dest='debug_contact')

    args = parser.parse_args()

    if args:
        print args
        if args.verbosity > 0:
            Registrar.DEBUG_PROGRESS = True
            Registrar.DEBUG_ERROR = True
        if args.verbosity > 1:
            Registrar.DEBUG_MESSAGE = True
        if args.quiet:
            Registrar.DEBUG_PROGRESS = False
            Registrar.DEBUG_ERROR = False
            Registrar.DEBUG_MESSAGE = False
        if args.testmode is not None:
            testMode = args.testmode
        if args.download_slave is not None:
            download_slave = args.download_slave
        if args.download_master is not None:
            download_master = args.download_master
        if args.update_slave is not None:
            update_slave = args.update_slave
        if args.update_master is not None:
            update_master = args.update_master
        if args.do_filter is not None:
            do_filter = args.do_filter
        if args.do_sync is not None:
            do_sync = args.do_sync
        if args.do_problematic is not None:
            do_problematic = args.do_problematic
        if args.do_post is not None:
            do_post = args.do_post
        if args.m_ssh_port:
            m_ssh_port = args.m_ssh_port
        if args.m_ssh_host:
            m_ssh_host = args.m_ssh_host
        if args.master_file is not None:
            download_master = False
            master_file = args.master_file
        if args.slave_file is not None:
            download_slave = False
            slave_file = args.slave_file
        if args.card_file is not None:
            cardFile = args.card_file
            do_filter = True

        if args.debug_abstract is not None:
            Registrar.DEBUG_ABSTRACT = args.debug_abstract
        if args.debug_parser is not None:
            Registrar.DEBUG_PARSER = args.debug_parser
        if args.debug_update is not None:
            Registrar.DEBUG_UPDATE = args.debug_update
        if args.debug_flat is not None:
            Registrar.DEBUG_FLAT = args.debug_flat
        if args.debug_name is not None:
            Registrar.DEBUG_NAME = args.debug_name
        if args.debug_address is not None:
            Registrar.DEBUG_ADDRESS = args.debug_address
        if args.debug_client is not None:
            Registrar.DEBUG_CLIENT = args.debug_client
        if args.debug_utils is not None:
            Registrar.DEBUG_UTILS = args.debug_utils
        if args.debug_contact is not None:
            Registrar.DEBUG_CONTACT = args.dest='debug_contact'

        global_limit = args.limit

    # api config

    with open(yamlPath) as stream:
        optionNamePrefix = 'test_' if testMode else ''
        config = yaml.load(stream)
        ssh_user = config.get(optionNamePrefix+'ssh_user')
        ssh_pass = config.get(optionNamePrefix+'ssh_pass')
        ssh_host = config.get(optionNamePrefix+'ssh_host')
        ssh_port = config.get(optionNamePrefix+'ssh_port', 22)
        m_ssh_user = config.get(optionNamePrefix+'m_ssh_user')
        m_ssh_pass = config.get(optionNamePrefix+'m_ssh_pass')
        m_ssh_host = config.get(optionNamePrefix+'m_ssh_host')
        m_ssh_port = config.get(optionNamePrefix+'m_ssh_port', 22)
        remote_bind_host = config.get(optionNamePrefix+'remote_bind_host', '127.0.0.1')
        remote_bind_port = config.get(optionNamePrefix+'remote_bind_port', 3306)
        db_user = config.get(optionNamePrefix+'db_user')
        db_pass = config.get(optionNamePrefix+'db_pass')
        db_name = config.get(optionNamePrefix+'db_name')
        db_charset = config.get(optionNamePrefix+'db_charset', 'utf8mb4')
        wp_srv_offset = config.get(optionNamePrefix+'wp_srv_offset', 0)
        m_db_user = config.get(optionNamePrefix+'m_db_user')
        m_db_pass = config.get(optionNamePrefix+'m_db_pass')
        m_db_name = config.get(optionNamePrefix+'m_db_name')
        m_db_host = config.get(optionNamePrefix+'m_db_host')
        m_x_cmd = config.get(optionNamePrefix+'m_x_cmd')
        m_i_cmd = config.get(optionNamePrefix+'m_i_cmd')
        tbl_prefix = config.get(optionNamePrefix+'tbl_prefix', '')
        wp_user = config.get(optionNamePrefix+'wp_user', '')
        wp_pass = config.get(optionNamePrefix+'wp_pass', '')
        store_url = config.get(optionNamePrefix+'store_url', '')
        wc_api_key = config.get(optionNamePrefix+'wc_api_key')
        wc_api_secret = config.get(optionNamePrefix+'wc_api_secret')
        remote_export_folder = config.get(optionNamePrefix+'remote_export_folder', '')


    ### DISPLAY CONFIG ###
    if Registrar.DEBUG_MESSAGE:
        if testMode:
            print "testMode enabled"
        else:
            print "testMode disabled"
        if not download_slave:
            print "no download_slave"
        if not download_master:
            print "no download_master"
        if not update_master:
            print "not updating maseter"
        if not update_slave:
            print "not updating slave"
        if not do_filter:
            print "not doing filter"
        if not do_sync:
            print "not doing sync"
        if not do_post:
            print "not doing post"

    ### PROCESS CLASS PARAMS ###

    FieldGroup.do_post = do_post
    SyncUpdate.setGlobals( MASTER_NAME, SLAVE_NAME, merge_mode, DEFAULT_LAST_SYNC)
    TimeUtils.setWpSrvOffset(wp_srv_offset)

    ### SET UP DIRECTORIES ###

    for path in (inFolder, outFolder, logFolder, srcFolder, pklFolder):
        if not os.path.exists(path):
            os.mkdir(path)

    fileSuffix = "_test" if testMode else ""
    fileSuffix += "_filter" if do_filter else ""
    m_x_filename = "act_x"+fileSuffix+"_"+importName+".csv"
    m_i_filename = "act_i"+fileSuffix+"_"+importName+".csv"
    s_x_filename = "wp_x"+fileSuffix+"_"+importName+".csv"
    remoteExportPath = os.path.join(remote_export_folder, m_x_filename)

    if download_master:
        maPath = os.path.join(inFolder, m_x_filename)
        maEncoding = "utf-8"
    else:
        # maPath = os.path.join(inFolder, "act_x_test_2016-05-03_23-01-48.csv")
        maPath = os.path.join(inFolder, master_file)
        # maPath = os.path.join(inFolder, "500-act-records-edited.csv")
        # maPath = os.path.join(inFolder, "500-act-records.csv")
        maEncoding = "utf8"
    if download_slave:
        saPath = os.path.join(inFolder, s_x_filename)
        saEncoding = "utf8"
    else:
        saPath = os.path.join(inFolder, slave_file)
        # saPath = os.path.join(inFolder, "500-wp-records-edited.csv")
        saEncoding = "utf8"

    moPath = os.path.join(outFolder, m_i_filename)
    repPath = os.path.join(outFolder, "usr_sync_report%s.html" % fileSuffix)
    WPresCsvPath = os.path.join(outFolder, "sync_report_wp%s.csv" % fileSuffix)
    masterResCsvPath = os.path.join(outFolder, "sync_report_act%s.csv" % fileSuffix)
    masterDeltaCsvPath = os.path.join(outFolder, "delta_report_act%s.csv" % fileSuffix)
    slaveDeltaCsvPath = os.path.join(outFolder, "delta_report_wp%s.csv" % fileSuffix)
    mFailPath = os.path.join(outFolder, "act_fails%s.csv" % fileSuffix)
    sFailPath = os.path.join(outFolder, "wp_fails%s.csv" % fileSuffix)
    sqlPath = os.path.join(srcFolder, "select_userdata_modtime.sql")
    # pklPath = os.path.join(pklFolder, "parser_pickle.pkl" )
    pklPath = os.path.join(pklFolder, "parser_pickle%s.pkl" % fileSuffix )
    logPath = os.path.join(logFolder, "log_%s.txt" % importName)
    zipPath = os.path.join(logFolder, "zip_%s.zip" % importName)

    ### PROCESS OTHER CONFIG ###

    assert store_url, "store url must not be blank"
    xmlrpc_uri = store_url + 'xmlrpc.php'
    json_uri = store_url + 'wp-json/wp/v2'

    actFields = ";".join(ColData_User.getACTImportCols())

    jsonConnectParams = {
        'json_uri': json_uri,
        'wp_user': wp_user,
        'wp_pass': wp_pass
    }

    wcApiParams = {
        'api_key':wc_api_key,
        'api_secret':wc_api_secret,
        'url':store_url
    }

    sqlConnectParams = {

    }

    actConnectParams = {
        'hostname':    m_ssh_host,
        'port':        m_ssh_port,
        'username':    m_ssh_user,
        'password':    m_ssh_pass,
    }

    actDbParams = {
        'db_x_exe':m_x_cmd,
        'db_i_exe':m_i_cmd,
        'db_name': m_db_name,
        'db_host': m_db_host,
        'db_user': m_db_user,
        'db_pass': m_db_pass,
        'fields' : actFields,
    }
    if sinceM: actDbParams['since'] = sinceM

    fsParams = {
        'importName': importName,
        'remote_export_folder': remote_export_folder,
        'inFolder': inFolder,
        'outFolder': outFolder
    }

    #########################################
    # Prepare Filter Data
    #########################################

    print debugUtils.hashify("PREPARE FILTER DATA"), timediff()

    if do_filter:
        filterFiles = {
            'users': userFile,
            'emails': emailFile,
            'cards': cardFile,
        }
        filterItems = {}
        for key, filterFile in filterFiles.items():
            if filterFile:
                try:
                    with open(os.path.join(inFolder,filterFile) ) as filterFileObj:
                        filterItems[key] = [\
                            re.sub(r'\s*([^\s].*[^\s])\s*(?:\n)', r'\1', line)\
                            for line in filterFileObj\
                        ]
                except IOError, e:
                    SanitationUtils.safePrint("could not open %s file [%s] from %s" % (
                        key,
                        filterFile,
                        unicode(os.getcwd())
                    ))
                    raise e
        if sinceM:
            filterItems['sinceM'] = TimeUtils.wpStrptime(sinceM)
        if sinceS:
            filterItems['sinceS'] = TimeUtils.wpStrptime(sinceS)
Beispiel #5
0
            filterItems['sinceM'] = TimeUtils.wpStrptime(sinceM)
        if sinceS:
            filterItems['sinceS'] = TimeUtils.wpStrptime(sinceS)
    else:
        filterItems = None

    print filterItems

    #########################################
    # Download / Generate Slave Parser Object
    #########################################

    print debugUtils.hashify("Download / Generate Slave Parser Object"), timediff()

    saParser = CSVParse_User(
        cols = ColData_User.getWPImportCols(),
        defaults = ColData_User.getDefaults(),
        filterItems = filterItems,
        limit = global_limit
    )
    if download_slave:
        SSHTunnelForwarderAddress = (ssh_host, ssh_port)
        SSHTunnelForwarderBindAddress = (remote_bind_host, remote_bind_port)
        for host in ['SSHTunnelForwarderAddress', 'SSHTunnelForwarderBindAddress']:
            try:
                check_address(eval(host))
            except Exception, e:
                assert not e, "Host must be valid: %s [%s = %s]" % (str(e), host, repr(eval(host)))
        SSHTunnelForwarderParams = {
            'ssh_address_or_host':SSHTunnelForwarderAddress,
            'ssh_password':ssh_pass,
    def analyseRemote(self, parser, since=None, limit=None, filterItems=None):

        self.assertConnect()

        # srv_offset = self.dbParams.pop('srv_offset','')
        self.dbParams['port'] = self.service.local_bind_address[-1]
        cursor = pymysql.connect( **self.dbParams ).cursor()

        sm_where_clauses = []

        if since:
            since_t = TimeUtils.wpServerToLocalTime( TimeUtils.wpStrptime(since))
            assert since_t, "Time should be valid format, got %s" % since
            since_s = TimeUtils.wpTimeToString(since_t)

            sm_where_clauses.append( "tu.`time` > '%s'" % since_s )

        modtime_cols = [
            "tu.`user_id` as `user_id`",
            "MAX(tu.`time`) as `Edited in Wordpress`"
        ]

        for tracking_name, aliases in ColData_User.getWPTrackedCols().items():
            case_clauses = []
            for alias in aliases:
                case_clauses.append("LOCATE('\"%s\"', tu.`changed`) > 0" % alias)
            modtime_cols.append("MAX(CASE WHEN {case_clauses} THEN tu.`time` ELSE \"\" END) as `{tracking_name}`".format(
                case_clauses = " OR ".join(case_clauses),
                tracking_name = tracking_name
            ))

        if sm_where_clauses:
            sm_where_clause = 'WHERE ' + ' AND '.join(sm_where_clauses)
        else:
            sm_where_clause = ''

        sql_select_modtime = """\
    SELECT
        {modtime_cols}
    FROM
        {tbl_tu} tu
    {sm_where_clause}
    GROUP BY
        tu.`user_id`""".format(
            modtime_cols = ",\n\t\t".join(modtime_cols),
            tbl_tu=self.tbl_prefix+'tansync_updates',
            sm_where_clause = sm_where_clause,
        )

        # print sql_select_modtime

        if since:
            cursor.execute(sql_select_modtime)
            headers = [SanitationUtils.coerceUnicode(i[0]) for i in cursor.description]
            results = [[SanitationUtils.coerceUnicode(cell) for cell in row] for row in cursor]
            table = [headers] + results
            # print tabulate(table, headers='firstrow')
            # results = list(cursor)
            # if len(results) == 0:
            #     #nothing to analyse
            #     return
            # else:
            #     # n rows to analyse
            #     print "THERE ARE %d ITEMS" % len(results)

        wpDbMetaCols = ColData_User.getWPDBCols(meta=True)
        wpDbCoreCols = ColData_User.getWPDBCols(meta=False)

        userdata_cols = ",\n\t\t".join(filter(None,
            [
                "u.%s as `%s`" % (key, name)\
                    for key, name in wpDbCoreCols.items()
            ] + [
                "MAX(CASE WHEN um.meta_key = '%s' THEN um.meta_value ELSE \"\" END) as `%s`" % (key, name) \
                    for key, name in wpDbMetaCols.items()
            ]
        ))

        # wpCols = OrderedDict(filter( lambda (k, v): not v.get('wp',{}).get('generated'), ColData_User.getWPCols().items()))

        # assert all([
        #     'Wordpress ID' in wpCols.keys(),
        #     wpCols['Wordpress ID'].get('wp', {}).get('key') == 'ID',
        #     wpCols['Wordpress ID'].get('wp', {}).get('final')
        # ]), 'ColData should be configured correctly'

        # userdata_cols2 = ",\n\t\t".join(filter(None,[
        #     ("MAX(CASE WHEN um.meta_key = '%s' THEN um.meta_value ELSE \"\" END) as `%s`" if data['wp'].get('meta') else "u.%s as `%s`") % (data['wp']['key'], col)\
        #     for col, data in wpCols.items()
        # ]))

        # print " -> COLS1: ", userdata_cols
        # print " -> COLS2: ", userdata_cols2

        # print userdata_cols

        sql_select_user = """
    SELECT
        {usr_cols}
    FROM
        {tbl_u} u
        LEFT JOIN {tbl_um} um
        ON ( um.`user_id` = u.`ID`)
    GROUP BY
        u.`ID`""".format(
            tbl_u = self.tbl_prefix+'users',
            tbl_um = self.tbl_prefix+'usermeta',
            usr_cols = userdata_cols,
        )

        um_on_clauses = []
        um_where_clauses = []

        um_on_clauses.append('ud.`Wordpress ID` = lu.`user_id`')

        if filterItems:
            if 'cards' in filterItems:
                um_where_clauses.append( "ud.`MYOB Card ID` IN (%s)" % (','.join([
                    '"%s"' % card for card in filterItems['cards']
                ])))

        if um_on_clauses:
            um_on_clause = ' AND '.join([
                "(%s)" % clause for clause in um_on_clauses
            ])
        else:
            um_on_clause = ''

        if um_where_clauses:
            um_where_clause = 'WHERE ' + ' AND '.join([
                "(%s)" % clause for clause in um_where_clauses
            ])
        else:
            um_where_clause = ''



        # print sql_select_user

        sql_select_user_modtime = """
SELECT *
FROM
(
    {sql_ud}
) as ud
{join_type} JOIN
(
    {sql_mt}
) as lu
ON {um_on_clause}
{um_where_clause}
{limit_clause};""".format(
            sql_ud = sql_select_user,
            sql_mt = sql_select_modtime,
            join_type = "INNER" if sm_where_clause else "LEFT",
            limit_clause = "LIMIT %d" % limit if limit else "",
            um_on_clause = um_on_clause,
            um_where_clause = um_where_clause
        )

        if Registrar.DEBUG_CLIENT: Registrar.registerMessage(sql_select_user_modtime)

        cursor.execute(sql_select_user_modtime)

        headers = [SanitationUtils.coerceUnicode(i[0]) for i in cursor.description]

        results = [[SanitationUtils.coerceUnicode(cell) for cell in row] for row in cursor]

        rows = [headers] + results

        # print rows

        if results:
            print "there are %d results" % len(results)
            parser.analyseRows(rows)
Beispiel #7
0
    ssh_user = config.get('ssh_user')
    ssh_pass = config.get('ssh_pass')
    ssh_host = config.get('ssh_host')
    ssh_port = config.get('ssh_port', 22)
    remote_bind_host = config.get('remote_bind_host', '127.0.0.1')
    remote_bind_port = config.get('remote_bind_port', 3306)
    db_host = config.get('db_host', '127.0.0.1')
    db_user = config.get('db_user')
    db_pass = config.get('db_pass')
    db_name = config.get('db_name')
    tbl_prefix = config.get('tbl_prefix', '')

sqlPath = os.path.join(srcFolder, "select_userdata_modtime.sql")

colData = ColData_User()

saRows = []

with \
    SSHTunnelForwarder(
        (ssh_host, ssh_port),
        ssh_password=ssh_pass,
        ssh_username=ssh_user,
        remote_bind_address=(remote_bind_host, remote_bind_port)
    ) as server, \
    open(sqlPath) as sqlFile:

    # server.start()
    print server.local_bind_address
    conn = MySQLdb.connect(