コード例 #1
0
def updateContentTypesAndGlobalFields(apiKey, token, region, folder,
                                      extensionMapper):
    '''
    Now need to update content types and global field with correct schema
    '''
    config.logging.info(
        '{}Updating Content Types with correct schema{}'.format(
            config.BOLD, config.END))
    ctFolder = config.dataRootFolder + config.stackRootFolder + folder + config.folderNames[
        'contentTypes']
    for ctFile in config.readDirIfExists(ctFolder):
        contentType = config.readFromJsonFile(ctFolder + ctFile)
        if contentType:
            contentType = replaceFromMapper(extensionMapper, contentType,
                                            'content types')
            body = {'content_type': contentType}
            # cma.deleteContentType(apiKey, token, region, contentType['uid'])
            # ctUpdate = cma.createContentType(apiKey, token, body, region)
            ctUpdate = cma.updateContentType(apiKey, token, body, region,
                                             contentType['uid'])
            if ctUpdate:
                config.logging.info('Content Type {} updated'.format(
                    contentType['title']))
            else:
                config.logging.critical(
                    '{}Content Type {} NOT updated!{}'.format(
                        config.RED, contentType['title'], config.END))
    config.logging.info('{}Finished updating Content Types{}'.format(
        config.BOLD, config.END))
    config.logging.info(
        '{}Updating Global Fields with correct schema{}'.format(
            config.BOLD, config.END))
    gfFolder = config.dataRootFolder + config.stackRootFolder + folder + config.folderNames[
        'globalFields']
    for gfFile in config.readDirIfExists(gfFolder):
        globalField = config.readFromJsonFile(gfFolder + gfFile)
        if globalField:
            globalField = replaceFromMapper(extensionMapper, globalField,
                                            'global fields')
            body = {'global_field': globalField}
            # cma.deleteGlobalField(apiKey, token, region, globalField['uid'])
            # gfUpdate = cma.createGlobalField(apiKey, token, body, region)
            gfUpdate = cma.updateGlobalField(apiKey, token, body, region,
                                             globalField['uid'])
            if gfUpdate:
                config.logging.info('Global Field {} updated'.format(
                    globalField['title']))
            else:
                config.logging.critical(
                    '{}Global Field {} NOT updated!{}'.format(
                        config.RED, globalField['title'], config.END))
    config.logging.info('{}Finished updating Global Fields{}'.format(
        config.BOLD, config.END))
コード例 #2
0
def importEnvironments(apiKey, authToken, region, folder):
    '''
    Importing environments
    '''
    config.logging.info('{}Importing environments{}'.format(
        config.BOLD, config.END))
    f = config.dataRootFolder + config.stackRootFolder + folder + config.folderNames[
        'environments']
    mapDict = {}
    for envFile in config.readDirIfExists(f):
        environment = config.readFromJsonFile(f + envFile)
        if environment:
            body = {'environment': environment}
            environmentImport = cma.createEnvironment(apiKey, authToken, body,
                                                      region)
            if environmentImport:
                config.logging.info('Environment {} imported'.format(
                    environment['name']))
                mapDict = addToMapper(mapDict, environment['uid'],
                                      environmentImport['environment']['uid'])
        else:
            config.logging.error(
                '{}Unable to read from Environments file {}{}'.format(
                    config.RED, envFile, config.END))
    return createMapperFile(apiKey, folder, mapDict, 'environments')
コード例 #3
0
def importPublishingRules(apiKey, authToken, region, folder, mappers):
    '''
    Importing publishing rules
    '''
    config.logging.info('{}Importing publishing rules{}'.format(
        config.BOLD, config.END))
    f = config.dataRootFolder + config.stackRootFolder + folder + config.folderNames[
        'publishingRules']
    count = 1
    for pubFile in config.readDirIfExists(f):
        publishingRule = config.readFromJsonFile(f + pubFile)
        for key, value in mappers.items():
            publishingRule = replaceFromMapper(
                value, publishingRule,
                key)  # role uids from old and new stack mapped
        publishingRuleImport = cma.createPublishingRule(
            apiKey, authToken, {'publishing_rule': publishingRule}, region)
        if publishingRuleImport:
            config.logging.info('Publishing Rule {} imported'.format(count))
        else:
            config.logging.error(
                '{}Unable to read from Publishing Rule file {}{}'.format(
                    config.RED, pubFile, config.END))
        count += 1
    return True
コード例 #4
0
def importExtensions(apiKey, authToken, region, folder):
    '''
    Importing extensions
    '''
    config.logging.info('{}Importing extensions{}'.format(
        config.BOLD, config.END))
    f = config.dataRootFolder + config.stackRootFolder + folder + config.folderNames[
        'extensions']
    mapDict = {}
    for extFile in config.readDirIfExists(f):
        extension = config.readFromJsonFile(f + extFile)
        if extension:
            body = {'extension': extension}
            # if 'scope' in extension: # It's a custom widget - We cannot import it because it uses content types
            #     config.logging.info('Custom Widget detected. Delaying import.')
            # else:
            extensionImport = cma.createExtension(apiKey, authToken, body,
                                                  region)
            if extensionImport:
                config.logging.info('Extension {} imported'.format(
                    extension['title']))
                mapDict = addToMapper(mapDict, extension['uid'],
                                      extensionImport['extension']['uid'])
        else:
            config.logging.error(
                '{}Unable to read from Extension file {}{}'.format(
                    config.RED, extFile, config.END))
    return createMapperFile(apiKey, folder, mapDict, 'extensions')
コード例 #5
0
def importAnAsset(
        region, authToken, apiKey, metaData, assetFile,
        folderMapper):  #(region, apiKey, publishDetails, metaData, assetFile)
    '''
    Create Asset in Import Stack
    region is full URL
    publishDetails, metaData and assetFile are just the fullpath to the json file OR None
    folderMapper is a dict object
    '''
    tmpFolder = '.tmp/'
    if metaData:
        metaData = config.readFromJsonFile(metaData)
    if folderMapper and metaData:
        metaData = importStructure.replaceFromMapper(folderMapper, metaData,
                                                     'assets')
    if not assetFile:
        config.checkDir(tmpFolder)
        assetFile = config.downloadFileToDisk(metaData['asset']['url'],
                                              tmpFolder,
                                              metaData['asset']['filename'])
        if assetFile:
            assetFile = tmpFolder + metaData['asset']['filename']
    config.logging.debug('Region {}'.format(region))
    config.logging.debug('authToken {}'.format(authToken))
    config.logging.debug('apiKey {}'.format(apiKey))
    config.logging.debug('assetFile {}'.format(assetFile))
    config.logging.debug('metaData {}'.format(metaData))
    config.logging.debug('Filename {}'.format(metaData['asset']['filename']))
    create = cma.createAsset(region, authToken, apiKey, assetFile, metaData,
                             metaData['asset']['filename'])
    if create and (tmpFolder in assetFile):  # Cleaning from tmp folder
        os.remove(assetFile)
    return create
コード例 #6
0
def importWebhooks(apiKey, authToken, region, folder):
    '''
    Importing publishing rules
    '''
    config.logging.info('{}Importing webhooks{}'.format(
        config.BOLD, config.END))
    if config.disableWebhooks:
        config.logging.info(
            '{}All Enabled Webhooks will be disabled on import{}'.format(
                config.BOLD, config.END))
    else:
        config.logging.info(
            '{}Webhooks will be enabled on import. Please make sure they do not trigger on live environments{}'
            .format(config.YELLOW, config.END))
    f = config.dataRootFolder + config.stackRootFolder + folder + config.folderNames[
        'webhooks']
    for whfile in config.readDirIfExists(f):
        webhook = config.readFromJsonFile(f + whfile)
        if config.disableWebhooks:
            webhook['disabled'] = True
        webhookImport = cma.createWebhook(apiKey, authToken,
                                          {'webhook': webhook}, region)
        if webhookImport:
            config.logging.info('Webhook {} imported'.format(webhook['name']))
        else:
            config.logging.error(
                '{}Unable to read from Webhook file {}{}'.format(
                    config.RED, whfile, config.END))
    return True
コード例 #7
0
def importEntries(contentTypes,
                  languages,
                  folder,
                  region,
                  token,
                  apiKey,
                  assetMapper=None):
    '''
    Importing Entries
    '''
    entryFolder = folder + config.folderNames['entries']
    mapDict = {}
    for contentType in contentTypes:
        ctFolder = entryFolder + contentType + '/'
        config.logging.info('{}Importing Entries of type: {}{}'.format(
            config.BOLD, contentType, config.END))
        for language in languages:
            languageFile = ctFolder + language + '.json'
            if os.path.isfile(languageFile):
                config.logging.info(
                    '{}Importing Entries in Language: {}{}'.format(
                        config.BOLD, language, config.END))
                entries = config.readFromJsonFile(languageFile)
                for entry in entries['entries']:
                    if (entry['uid'] not in mapDict) and (entry['locale']
                                                          == language):
                        if assetMapper:
                            entry = replaceAssetFromMapper(
                                entry, assetMapper, 'entry assets')
                        create = cma.createEntry(apiKey, token, entry, region,
                                                 contentType, language)
                        if create:
                            config.logging.info(
                                'Entry Created - Title: {} - Language: {}'.
                                format(create['entry']['title'], language))
                            mapDict = importStructure.addToMapper(
                                mapDict, entry['uid'], create['entry']['uid'])
                    elif (entry['uid'] in mapDict) and (entry['locale']
                                                        == language):
                        if assetMapper:
                            entry = replaceAssetFromMapper(
                                entry, assetMapper, 'entry assets'
                            )  #importStructure.replaceFromMapper(assetMapper, entries, 'entries')
                        update = cma.updateEntry(apiKey, token, entry, region,
                                                 contentType, language,
                                                 mapDict[entry['uid']])
                        if update:
                            config.logging.debug(
                                'Entry Updated - Title: {} - Language: {}'.
                                format(update['entry']['title'], language))
            else:
                config.logging.debug(
                    'No entries in language: {}'.format(language))
    updateReferences(contentTypes, mapDict, languages, folder, region, token,
                     apiKey, assetMapper)
    return importStructure.createMapperFile(apiKey,
                                            folder.split('/')[-2], mapDict,
                                            'entries')
コード例 #8
0
def importRoles(apiKey, authToken, region, folder, languageMapper,
                environmentMapper):
    '''
    Importing roles
    '''
    config.logging.info('{}Importing roles{}'.format(config.BOLD, config.END))
    f = config.dataRootFolder + config.stackRootFolder + folder + config.folderNames[
        'roles']

    # # Getting current roles in import stack - Just to get the uid's of the buit in roles
    # currentRoles = cma.getAllRoles(apiKey, authToken, region)
    # roleUids = {
    #     'Developer': None,
    #     'Content Manager': None
    # }
    # if currentRoles: # Getting the uids for built-in roles to be able to update them
    #     for role in currentRoles['roles']:
    #         if role['name'] == 'Developer':
    #             roleUids['Developer'] = role['uid']
    #         elif role['name'] == 'Content Manager':
    #             roleUids['Content Manager'] = role['uid']

    mapDict = {}
    for roleFile in config.readDirIfExists(f):
        if roleFile not in (
                'Admin.json', 'Content Manager.json', 'Developer.json'
        ):  # Skipping update in built-in roles - Because it's buggy
            role = config.readFromJsonFile(f + roleFile)
            if role:
                del role['permissions']
                if 'rules' in role:
                    rules = replaceRoleRuleUids(role['rules'], languageMapper,
                                                environmentMapper)
                else:
                    rules = []
                roleImport = cma.createRole(apiKey, authToken, {'role': role},
                                            region)
                if roleImport:
                    try:
                        mapDict = addToMapper(mapDict, role['uid'],
                                              roleImport['role']['uid'])
                    except KeyError:
                        config.logging.debug(
                            'Not able to map uid for role {}'.format(
                                role['name']))
                    config.logging.info('{} role imported'.format(
                        role['name']))
            else:
                config.logging.error(
                    '{}Unable to read from Role file {}{}'.format(
                        config.RED, roleFile, config.END))
        else:
            config.logging.info(
                'Skipping system role import: {}'.format(roleFile))
    return createMapperFile(apiKey, folder, mapDict, 'roles')
コード例 #9
0
def importLabels(apiKey, authToken, region, folder):
    '''
    Importing labels
    '''
    config.logging.info('{}Importing labels{}'.format(config.BOLD, config.END))
    f = config.dataRootFolder + config.stackRootFolder + folder + config.folderNames[
        'labels']
    delayedList = []
    mapDict = {}
    for labFile in config.readDirIfExists(f):
        label = config.readFromJsonFile(f + labFile)
        if label:
            if label['parent']:
                delayedList.append(label)
            else:
                mapDict = importLabel(mapDict, apiKey, authToken, label,
                                      region)
        else:
            config.logging.error(
                '{}Unable to read from Label file {}{}'.format(
                    config.RED, labFile, config.END))
    counter = 1
    while delayedList and counter <= len(
            delayedList
    ) * 5:  # If we need to try this too often, we stop after len*5 times
        label = delayedList[0]
        try:
            newParents = []
            for parent in label['parent']:
                newParents.append(mapDict[parent])
            label['parent'] = newParents
            mapDict = importLabel(mapDict, apiKey, authToken, label, region)
        except KeyError:
            config.logging.debug('Unable to find parent label for {}'.format(
                label['name']))
            delayedList.append(label)
        delayedList.pop(0)
        counter += 1

    # If some labels are still in that list, we just import them without the hierarchy
    if delayedList:
        config.logging.warning(
            '{}Unable to import all labels with correct parents. Importing them in the top level.{}'
            .format(config.YELLOW, config.END))
        labelStr = ''
        for label in delayedList:
            label['parent'] = []
            labelStr = labelStr + ', ' + label['name']
            mapDict = importLabel(mapDict, apiKey, authToken, label, region)

        config.logging.warning(
            '{}Labels imported without parents: {}{}'.format(
                config.YELLOW, labelStr, config.END))

    return createMapperFile(apiKey, folder, mapDict, 'labels')
コード例 #10
0
def readExportReport(folder):
    '''
    Reading the export report before importing content
    '''
    folder = config.dataRootFolder + config.stackRootFolder + folder
    exportReport = folder + config.exportReportFile
    if not os.path.isfile(exportReport):
        config.logging.error(
            '{}Unable to read Export Report ({}) Not possible to Import Content from this Export Folder.{}'
            .format(config.RED, exportReport, config.END))
        return None
    exportReport = config.readFromJsonFile(exportReport)
    return exportReport
コード例 #11
0
def importFolders(folder, apiKey, token, region):
    '''
    Creating folders
    '''
    folderFile = folder + config.folderNames['assets'] + 'folders.json'
    if os.path.isfile(folderFile):
        mapDict = {}
        folderData = config.readFromJsonFile(folderFile)
        config.logging.info('Found Folders in Export')
        folderExport = folderData['assets']
        maxTries = len(folderExport) * 5
        tryNo = 0
        while folderExport and tryNo <= maxTries:
            tryNo += 1
            if tryNo == maxTries:
                config.logging.warning(
                    '{}Last possible try importing folders! (Try number: {}){}'
                    .format(config.YELLOW, tryNo, config.END))
            if 'parent_uid' in folderExport[0]:
                parentUid = folderExport[0]['parent_uid']
            else:
                parentUid = None
            if parentUid:
                if parentUid not in mapDict:
                    folderExport.append(folderExport[0])
                    folderExport.pop(0)
                    continue
                else:
                    parentUid = mapDict[parentUid]
            importedFolder = cma.createFolder(apiKey, token, region,
                                              folderExport[0]['name'],
                                              parentUid)
            if importedFolder:
                config.logging.info('Folder Imported: {}'.format(
                    importedFolder['asset']['name']))
                mapDict = importStructure.addToMapper(
                    mapDict, folderExport[0]['uid'],
                    importedFolder['asset']['uid'])
                folderExport.pop(0)
                continue
            folderExport.append(folderExport[0])
            folderExport.pop(0)
        return importStructure.createMapperFile(apiKey,
                                                folder.split('/')[-2], mapDict,
                                                'folders')
    config.logging.info('No Folders Found in Export')
    return None
コード例 #12
0
def updateReferences(contentTypes, mapDict, languages, folder, region, token,
                     apiKey, assetMapper):
    '''
    Iterating through all the entries and fixing references.
    ToDo: I should do this when creating entries (in the first iteration). This was quicker coding for POC.
    '''
    config.logging.info(
        '{}Updating entries references with correct uids{}'.format(
            config.BOLD, config.END))
    entryFolder = folder + config.folderNames['entries']
    for contentType in contentTypes:
        ctFolder = entryFolder + contentType + '/'
        for language in languages:
            languageFile = ctFolder + language + '.json'
            if os.path.isfile(languageFile):
                entries = config.readFromJsonFile(languageFile)
                for entry in entries['entries']:
                    try:
                        uid = mapDict[entry['uid']]
                    except KeyError:
                        config.logging.error(
                            '{}Unable to update entry - Entry not found in import - From Export: {}{}'
                            .format(config.RED, entry['uid'], config.END))
                        uid = None
                        continue
                    if entry['locale'] == language:
                        updateContentstack = False
                        for exportUid, importUid in mapDict.items():
                            entry[
                                'uid'] = ''  # Just replacing uid to prevent it to be found in the search ref function
                            entry, updateContentstack = replaceEntryReference(
                                entry, exportUid, importUid,
                                updateContentstack)
                        if updateContentstack:
                            entry = fixAssetReferences(entry, assetMapper)
                            update = cma.updateEntry(apiKey, token, entry,
                                                     region, contentType,
                                                     language, uid)
                            if update:
                                config.logging.info(
                                    'Updated References - {} {} {}'.format(
                                        contentType, language, uid))
                            else:
                                config.logging.error(
                                    '{}Unable to Update Entry - {} {} {}{}'.
                                    format(config.RED, contentType, language,
                                           uid, config.END))
コード例 #13
0
def getEnvironmentsFromExport(folder):
    '''
    Re-usable function that returns two objects from the export.
    Object 1: An array with all the environments with all information
    Object 2: An array of just the environment names
    '''
    folder = folder + config.folderNames['environments']
    envArr = []
    envArrOnlyNames = []
    for envFile in os.listdir(folder):
        environment = config.readFromJsonFile(folder + envFile)
        item = {
            'name': environment['name'],
            'uid': environment['uid']
        }
        envArr.append(item)
        envArrOnlyNames.append(environment['name'])
    return envArr, sorted(envArrOnlyNames)
コード例 #14
0
def findDeliveryTokenInExport(folder, contentInfo):
    '''
    Re-usable function that tries to find the deliveryToken in the export.
    If found - we can ask the user if he wants to include the workflow stages - can't export those using the delivery token
    '''
    f = folder + config.folderNames['deliveryTokens']
    for dtFile in os.listdir(f):
        deliveryToken = config.readFromJsonFile(f + dtFile)
        config.logging.debug('Delivery Token found: {}'.format(deliveryToken))
        try:
            envName = deliveryToken['scope'][0]['environments'][0]['name']
        except KeyError:
            envName = ''
        if envName == contentInfo['environments']:
            config.logging.info('Delivery token found ({}).'.format(envName))
            includeWorkFlows = includeWorkflowStages()
            config.addToExportReport('includeWorkFlows', includeWorkFlows, folder)
            if includeWorkFlows:
                return None
            return deliveryToken['token']
    return None
コード例 #15
0
def importDeliveryTokens(apiKey, authToken, region, folder):
    '''
    Importing delivery tokens
    '''
    config.logging.info('{}Importing delivery tokens{}'.format(
        config.BOLD, config.END))
    f = config.dataRootFolder + config.stackRootFolder + folder + config.folderNames[
        'deliveryTokens']
    for delFile in config.readDirIfExists(f):
        deliveryToken = config.readFromJsonFile(f + delFile)
        if deliveryToken:
            body = {
                'token': {
                    'name':
                    deliveryToken['name'],
                    'description':
                    deliveryToken['description'],
                    'scope': [{
                        'environments':
                        [deliveryToken['scope'][0]['environments'][0]['name']],
                        'module':
                        deliveryToken['scope'][0]['module'],
                        'acl':
                        deliveryToken['scope'][0]['acl']
                    }]
                }
            }
            deliveryTokenImport = cma.createDeliveryToken(
                apiKey, authToken, body, region)
            if deliveryTokenImport:
                config.logging.info('Delivery Token {} imported'.format(
                    deliveryToken['name']))
        else:
            config.logging.error(
                '{}Unable to read from Delivery Token file {}{}'.format(
                    config.RED, delFile, config.END))
    return True
コード例 #16
0
def importWorkflows(apiKey, authToken, region, folder, roleMapper):
    '''
    Importing workflows
    '''
    config.logging.info('{}Importing workflows{}'.format(
        config.BOLD, config.END))
    f = config.dataRootFolder + config.stackRootFolder + folder + config.folderNames[
        'workflows']
    mapDict = {}
    for wfFile in config.readDirIfExists(f):
        workflow = config.readFromJsonFile(f + wfFile)
        if workflow:
            workflowImport = cma.createWorkflow(apiKey, authToken,
                                                {'workflow': workflow}, region)
            if workflowImport:
                mapDict = addToMapper(mapDict, workflow['uid'],
                                      workflowImport['workflow']['uid'])
                config.logging.info('{} workflow imported'.format(
                    workflow['name']))
        else:
            config.logging.error(
                '{}Unable to read from Workflow file {}{}'.format(
                    config.RED, wfFile, config.END))
    return createMapperFile(apiKey, folder, mapDict, 'workflows')
コード例 #17
0
def createContentTypesAndGlobalFields(apiKey, token, region, folder):
    '''
    v2 - Create empty ones before updating them again. Able to avoid issues with circular dependencies
    '''
    config.logging.info('{}Creating Content Types{}'.format(
        config.BOLD, config.END))
    ctFolder = config.dataRootFolder + config.stackRootFolder + folder + config.folderNames[
        'contentTypes']
    for ctFile in config.readDirIfExists(ctFolder):
        config.logging.info(
            'Creating content type from file: {}'.format(ctFile))
        contentType = config.readFromJsonFile(ctFolder + ctFile)
        if contentType:
            # contentType = replaceFromMapper(extensionMapper, contentType, 'content types')
            body = {
                'content_type': {
                    'title': contentType['title'],
                    'uid': contentType['uid'],
                }
            }
            schema = []
            for field in contentType['schema']:
                if field['uid'] in ('url', 'title'):
                    schema.append(field)
            body['content_type']['schema'] = schema
            ctCreate = cma.createContentType(apiKey, token, body, region)
            if ctCreate:
                config.logging.info('Content Type {} created'.format(
                    contentType['title']))
            else:
                config.logging.critical(
                    '{}Content Type {} NOT created!{}'.format(
                        config.RED, contentType['title'], config.END))
    config.logging.info('{}Finished creating all Content Types{}'.format(
        config.BOLD, config.END))
    config.logging.info('{}Creating Global Fields{}'.format(
        config.BOLD, config.END))
    gfFolder = config.dataRootFolder + config.stackRootFolder + folder + config.folderNames[
        'globalFields']
    for gfFile in config.readDirIfExists(gfFolder):
        globalField = config.readFromJsonFile(gfFolder + gfFile)
        if globalField:
            body = {
                'global_field': {
                    'title':
                    globalField['title'],
                    'uid':
                    globalField['uid'],
                    'schema': [{
                        "data_type": "text",
                        "display_name": "temp field",
                        "uid": "temp_field",
                    }]
                }
            }
            gfCreate = cma.createGlobalField(apiKey, token, body, region)
            if gfCreate:
                config.logging.info('Global Field {} created'.format(
                    globalField['title']))
            else:
                config.logging.critical(
                    '{}Global Field {} NOT created!{}'.format(
                        config.RED, globalField['title'], config.END))
    config.logging.info('{}Finished creating all Global Fields{}'.format(
        config.BOLD, config.END))
コード例 #18
0
def initiateLogin(region, retrying=False):
    '''
    Initiating a Login sequence
    '''
    try:
        loginToNewRegion = False
        if retrying:
            shouldDeleteFile()
        loginInfo = None
        if os.path.isfile(config.authTokenFile):
            authTokenDict = config.readFromJsonFile(config.authTokenFile)
            if authTokenDict:
                if region in authTokenDict:
                    config.logging.info(
                        'Authtoken found for user {}{}{} in {} region.'.format(
                            config.UNDERLINE,
                            authTokenDict[region]['username'], config.END,
                            region))
                    use = [
                        inquirer.Confirm(
                            'useFile',
                            message=
                            "{}AuthToken found on local storage. Try to use that?{}"
                            .format(config.BOLD, config.END),
                            default=True)
                    ]
                    if inquirer.prompt(use)['useFile']:
                        return {
                            'region': region,
                            'username': authTokenDict[region]['username'],
                            'authtoken': authTokenDict[region]['authtoken']
                        }
                    else:
                        shouldDeleteFile()
                    loginInfo = getLoginInfo(region)
                else:
                    loginToNewRegion = True
                    loginInfo = getLoginInfo(region)
        else:
            loginInfo = getLoginInfo(region)
        if loginInfo or loginToNewRegion:
            statusCode, userSession = cma.login(
                loginInfo['username'], loginInfo['password'], loginInfo['2fa'],
                cma.regionMap[loginInfo['region']])
            if statusCode == 200:
                config.logging.info(
                    '{}Login Successful - Username: {} - Region: {}{}'.format(
                        config.GREEN, loginInfo['username'],
                        loginInfo['region'], config.END))
            else:
                config.logging.critical(
                    '{}Login ERROR! - Username: {} - Region: {} Status Code: {}{}'
                    .format(config.GREEN, loginInfo['username'],
                            loginInfo['region'], statusCode, config.END))
                return None
            sessionToFile = {
                loginInfo['region']: {
                    'username': loginInfo['username'],
                    'authtoken': userSession['user']['authtoken']
                }
            }
            session = {
                'username': loginInfo['username'],
                'authtoken': userSession['user']['authtoken'],
                'region': loginInfo['region']
            }
            if statusCode == 200 and loginInfo['store']:
                config.addToJsonFile(sessionToFile, config.authTokenFile)
            return session
    except TypeError:
        exitProgram()
コード例 #19
0
def importLanguages(apiKey, authToken, region, folder, masterLocale):
    '''
    Imports languages
    '''
    config.logging.info('{}Importing languages{}'.format(
        config.BOLD, config.END))
    f = config.dataRootFolder + config.stackRootFolder + folder + config.folderNames[
        'languages']
    createdLanguages = [masterLocale]
    delayedList = []
    mapDict = {}
    for langFile in config.readDirIfExists(f):
        language = config.readFromJsonFile(f + langFile)
        if language:
            if language['code'] != masterLocale:
                if language[
                        'fallback_locale'] not in createdLanguages:  # and language['code'] != masterLocale:
                    config.logging.info(
                        'Fallback Locale {} not yet created for locale {}. Delaying import.'
                        .format(language['fallback_locale'], language['code']))
                    delayedList.append(language)
                else:
                    importedLanguage = importLanguage(language, apiKey,
                                                      authToken, region)
                    if importedLanguage:
                        createdLanguages.append(language['code'])
                        mapDict.update(importedLanguage)
                    else:
                        delayedList.append(language)
        else:
            config.logging.error(
                '{}Unable to read from Language file {}{}'.format(
                    config.RED, langFile, config.END))
    counter = 1
    while delayedList and counter <= len(
            delayedList
    ) * 5:  # If we need to try this too often, we stop after len*5 times
        language = delayedList[0]
        config.logging.info(
            'Retrying to import locale skipped earlier: {}.'.format(
                language['code']))
        if language['fallback_locale'] in createdLanguages:
            importedLanguage = importLanguage(language, apiKey, authToken,
                                              region)
            if importedLanguage:
                createdLanguages.append(language['code'])
                mapDict.update(importedLanguage)
            else:
                delayedList.append(language)
        else:
            delayedList.append(language)
        delayedList.pop(0)
        counter += 1

    # if we still have some languages unimported still, we just add them with the master locale as the fallback
    if delayedList:
        config.logging.warning(
            '{}Unable to import languages with correct fallback locale defined: Importing with master locale as fallback: {}{}'
            .format(config.YELLOW, str(delayedList, config.END)))
        for language in delayedList:
            language['fallback_locale'] = masterLocale
            importedLanguage = importLanguage(language, apiKey, authToken,
                                              region)
            if importedLanguage:
                createdLanguages.append(language['code'])
                mapDict.update(importedLanguage)
            else:
                delayedList.append(language)
    return createMapperFile(apiKey, folder, mapDict, 'languages')