示例#1
0
    def analyseRemote(self, parser, since=None, limit=None):
        if since: pass #todo: implement since
        resultCount = 0

        # apiIterator = self.ApiIterator(self.service, self.endpoint_plural)
        apiIterator = self.getIterator(self.endpoint_plural)
        progressCounter = None
        for page in apiIterator:
            if progressCounter is None:
                total_items = apiIterator.total_items
                if limit:
                    total_items = min(limit, total_items)
                progressCounter = ProgressCounter(total_items)
            progressCounter.maybePrintUpdate(resultCount)

            # if Registrar.DEBUG_API:
            #     Registrar.registerMessage('processing page: %s' % str(page))
            if self.endpoint_plural in page:
                for page_item in page.get(self.endpoint_plural):

                    parser.analyseWpApiObj(page_item)
                    resultCount += 1
                    if limit and resultCount > limit:
                        if Registrar.DEBUG_API:
                            Registrar.registerMessage('reached limit, exiting')
                        return
示例#2
0

        # TODO: further sort emailMatcher

        print debugUtils.hashify("BEGINNING MERGE (%d)" % len(globalMatches))
        print timediff()


        syncCols = ColData_User.getSyncCols()

        if Registrar.DEBUG_PROGRESS:
            syncProgressCounter = ProgressCounter(len(globalMatches))

        for count, match in enumerate(globalMatches):
            if Registrar.DEBUG_PROGRESS:
                syncProgressCounter.maybePrintUpdate(count)

            mObject = match.mObjects[0]
            sObject = match.sObjects[0]

            syncUpdate = SyncUpdate_Usr(mObject, sObject)
            syncUpdate.update(syncCols)

            # SanitationUtils.safePrint( syncUpdate.tabulate(tablefmt = 'simple'))

            if syncUpdate.mUpdated and syncUpdate.mDeltas:
                insort(mDeltaUpdates, syncUpdate)

            if syncUpdate.sUpdated and syncUpdate.sDeltas:
                insort(sDeltaUpdates, syncUpdate)
示例#3
0
class UsrSyncClient_SSH_ACT(SyncClient_Abstract):
    def __init__(self, connectParams, dbParams, fsParams):
        self.dbParams = dbParams
        self.fsParams = fsParams
        super(UsrSyncClient_SSH_ACT, self).__init__(connectParams)

    def attemptConnect(self):
        self.service = paramiko.SSHClient()
        self.service.set_missing_host_key_policy(paramiko.AutoAddPolicy())
        self.service.connect(**self.connectParams)

    @property
    def connectionReady(self):
        return self.service and self.service._transport and self.service._transport.active

    def execSilentCommandAssert(self, command):
        self.assertConnect()
        stdin, stdout, stderr = self.service.exec_command(command)
        if stdin: pass #gets rid of annoying warnings
        possible_errors = stdout.readlines() + stderr.readlines()
        for error in possible_errors:
            if re.match("^Countries.*", error):
                print error
                continue
            assert not error, "command <%s> returned errors: %s" % (
                SanitationUtils.coerceUnicode(command),
                SanitationUtils.coerceUnicode(error)
            )

    def putFile(self, localPath, remotePath):
        self.assertConnect()

        # remoteDir, remoteFileName = os.path.split(remotePath)
        remoteDir = os.path.split(remotePath)[0]

        exception = Exception()
        sftpClient = self.service.open_sftp()
        if remoteDir:
            try:
                sftpClient.stat(remoteDir)
            except:
                sftpClient.mkdir(remoteDir)
        sftpClient.put(localPath, remotePath)
        fstat = sftpClient.stat(remotePath)
        sftpClient.close()

        if not fstat:
            exception = UserWarning("could not stat remote file")

        # try:
        #     sftpClient = self.service.open_sftp()
        #     if remoteDir:
        #         try:
        #             sftpClient.stat(remoteDir)
        #         except:
        #             sftpClient.mkdir(remoteDir)
        #     sftpClient.put(localPath, remotePath)
        #     fstat = sftpClient.stat(remotePath)
        #     if not fstat:
        #         exception = UserWarning("could not stat remote file")
        # except Exception, e:
        #     exception = e
        # finally:
        #     sftpClient.close()
        # if not isinstance(exception, Exception):
        #     raise exception


    def assertRemoteFileExists(self, remotePath, assertion = ""):
        self.assertConnect()

        # stdin, stdout, stderr = self.service.exec_command('stat "%s"' % remotePath)
        stderr = self.service.exec_command('stat "%s"' % remotePath)[2]
        possible_errors = stderr.readlines()
        assert not possible_errors, " ".join([assertion, "stat returned possible errors", str(possible_errors)])

    @classmethod
    def printFileProgress(self, completed, total):
        if not hasattr(self, 'progressCounter'):
            self.progressCounter = ProgressCounter(total)
        self.progressCounter.maybePrintUpdate(completed)

    def getDeleteFile(self, remotePath, localPath):
        self.assertRemoteFileExists(remotePath)

        sftpClient = self.service.open_sftp()
        sftpClient.get(remotePath, localPath, self.printFileProgress)
        sftpClient.remove(remotePath)
        sftpClient.close()

        # exception = None
        # try:
        #     sftpClient = self.service.open_sftp()
        #     sftpClient.get(remotePath, localPath, self.printFileProgress)
        #     sftpClient.remove(remotePath)
        # except Exception, e:
        #     exception = e
        # finally:
        #     sftpClient.close()
        # if exception:
        #     raise exception

    def removeRemoteFile(self, remotePath):
        self.assertRemoteFileExists(remotePath)
        self.service.exec_command('rm "%s"' % remotePath)

    def uploadChanges(self, user_pkey, updates=None):
        if not updates:
            return
        # print "UPDATES:", updates

        self.assertConnect()

        if 'MYOB Card ID' in updates:
            del updates['MYOB Card ID']

        updates = OrderedDict(
            [('MYOB Card ID', user_pkey)] \
            + updates.items()
        )

        importName = self.fsParams['importName']
        outFolder = self.fsParams['outFolder']
        remote_export_folder = self.fsParams['remote_export_folder']
        fileRoot = 'act_i_' + importName + '_' + user_pkey
        fileName = fileRoot + '.csv'
        localPath = os.path.join(outFolder, fileName)
        remotePath = os.path.join(remote_export_folder, fileName)
        importedFile = os.path.join(remote_export_folder, fileRoot + '.imported')

        with open(localPath, 'w+') as outFile:
            csvdialect = UnicodeCsvDialectUtils.act_out
            dictwriter = unicodecsv.DictWriter(
                outFile,
                dialect=csvdialect,
                fieldnames=updates.keys(),
                encoding='utf8',
                extrasaction='ignore',
            )
            dictwriter.writeheader()
            dictwriter.writerow(updates)

        self.putFile( localPath, remotePath)

        tokens = [
            'cd ' + remote_export_folder + ';',
            '{db_i_exe} "-d{db_name}" "-h{db_host}" "-u{db_user}" "-p{db_pass}"'.format(
                **self.dbParams
            ),
            ('"%s"' % fileName) if fileName else None
        ]

        command = " ".join( token for token in tokens if token)

        # command = " ".join(filter(None,))
        #
        # command = " ".join(filter(None,[
        #     'cd ' + remote_export_folder + ';',
        #     '{db_i_exe} "-d{db_name}" "-h{db_host}" "-u{db_user}" "-p{db_pass}"'.format(
        #         **self.dbParams
        #     ),
        #     ('"%s"' % fileName) if fileName else None
        # ]))

        self.execSilentCommandAssert(command)

        try:
            self.removeRemoteFile(importedFile)
        except:
            raise Exception("import didn't produce a .imported file")

    def analyseRemote(self, parser, since=None, limit=None):
        if not since:
            since = '1970-01-01'
        if limit:
            # todo: implement limit
            # this gets rid of unused argument warnings
            pass

        importName = self.fsParams['importName']
        remote_export_folder = self.fsParams['remote_export_folder']
        fileRoot = 'act_x_' + importName
        fileName = fileRoot + '.csv'
        inFolder = self.fsParams['inFolder']
        localPath = os.path.join(inFolder, fileName)
        remotePath = os.path.join(remote_export_folder, fileName)

        tokens = [
            'cd ' + remote_export_folder + ';',
            '{db_x_exe} "-d{db_name}" "-h{db_host}" "-u{db_user}" "-p{db_pass}" -c"{fields}"'.format(
                **self.dbParams
            ),
            '-s"%s"' % since,
            '"%s"' % fileName
        ]

        command = " ".join([token for token in tokens if token])

        # command = " ".join(filter(None,[
        #     'cd ' + remote_export_folder + ';',
        #     '{db_x_exe} "-d{db_name}" "-h{db_host}" "-u{db_user}" "-p{db_pass}" -c"{fields}"'.format(
        #         **self.dbParams
        #     ),
        #     '-s"%s"' % since,
        #     '"%s"' % fileName
        # ]))

        print "executing export command..."
        self.execSilentCommandAssert(command)
        print "donloading file..."
        self.getDeleteFile(remotePath, localPath)
        print "analysing file..."
        parser.analyseFile(localPath, dialect_suggestion='act_out')