Пример #1
0
def exportEntriesUsingDeliveryToken(stackInfo, token, environment, folder, contentInfo):
    '''
    Using delivery token to export entries from a single environment
    '''
    languages = contentInfo['languages']
    contentTypes = contentInfo['contentTypes']
    entryFolder = folder + config.folderNames['entries']
    config.logging.debug('{}Using Delivery Token to Export. Variables for debug:{}'.format(config.CYAN, config.END))
    config.logging.debug('{}stackInfo: {}{}'.format(config.CYAN, stackInfo, config.END))
    config.logging.debug('{}contentInfo: {}{}'.format(config.CYAN, contentInfo, config.END))
    config.logging.debug('{}entryFolder: {}{}'.format(config.CYAN, entryFolder, config.END))
    config.checkDir(entryFolder)
    counter = 0
    for contentType in contentTypes:
        entryFolderPerContentType = entryFolder + contentType + '/'
        config.checkDir(entryFolderPerContentType)
        config.logging.info('{}Exporting Entries of Content Type: {}{}'.format(config.GREEN, contentType, config.END))
        for language in languages:
            config.logging.info('{}Exporting Entries of Language: {}{}'.format(config.BOLD, language, config.END))
            entries = cda.getAllEntries(stackInfo, contentType, language, environment, token)
            if entries:
                # I wish I could see all entries, based on where the master locale is published.
                # But I need to get all entries and see the publishing details in them
                # e.g. to see whether en-us (master or fallback) is published on the is-is
                fileName = entryFolderPerContentType + language + '.json'
                if config.writeToJsonFile(entries, fileName):
                    config.logging.info('Entries Exported to File. {}'.format(fileName))
                    counter = counter + len(entries['entries'])
                else:
                    config.logging.error('{}Unable to write to file. {}{}'.format(config.RED, fileName, config.END))
            else:
                config.logging.info('No Entries. {} - {}'.format(contentType, language))
    return True
Пример #2
0
def processAssetExport(assets, stackInfo, folder, masterLocale, downloadAssets):
    '''
    Re-usable function where exported entries (both via CMA and CDA) are worked on and written to export folders.
    '''
    if not assets:
        return False
    for asset in assets['assets']:
        uid = asset['uid']
        assetFolder = folder + uid + '/'
        config.checkDir(assetFolder)
        assetFileName = asset['filename']
        metadataFileName = uid + '_v{}.json'.format(asset['_version'])
        if config.writeToJsonFile({'asset': asset}, assetFolder + metadataFileName):
            config.logging.info('Image metadata written to {}'.format(metadataFileName))
        if downloadAssets:
            assetUrl = asset['url']
            config.logging.info('Downloading Asset: {} To file path: {}'.format(assetUrl, assetFileName))
            config.downloadFileToDisk(assetUrl, assetFolder, assetFileName)
        if 'publish_details' in asset:

            config.logging.info('Adding publishing details to export file: {}'.format(assetFolder + 'publishDetails.json'))
            if isinstance(asset['publish_details'], list):
                for i in asset['publish_details']:
                    key = i['locale'] + '-' + i['environment']
                    config.addToJsonFile({key: i}, assetFolder + 'publishDetails.json')
            else:
                key = asset['publish_details']['locale'] + '-' + asset['publish_details']['environment']
                config.addToJsonFile({key:asset['publish_details']}, assetFolder + 'publishDetails.json')
    return True
Пример #3
0
def importAnAsset(
        region, authToken, apiKey, metaData, assetFile,
        folderMapper):  #(region, apiKey, publishDetails, metaData, assetFile)
    '''
    Create Asset in Import Stack
    region is full URL
    publishDetails, metaData and assetFile are just the fullpath to the json file OR None
    folderMapper is a dict object
    '''
    tmpFolder = '.tmp/'
    if metaData:
        metaData = config.readFromJsonFile(metaData)
    if folderMapper and metaData:
        metaData = importStructure.replaceFromMapper(folderMapper, metaData,
                                                     'assets')
    if not assetFile:
        config.checkDir(tmpFolder)
        assetFile = config.downloadFileToDisk(metaData['asset']['url'],
                                              tmpFolder,
                                              metaData['asset']['filename'])
        if assetFile:
            assetFile = tmpFolder + metaData['asset']['filename']
    config.logging.debug('Region {}'.format(region))
    config.logging.debug('authToken {}'.format(authToken))
    config.logging.debug('apiKey {}'.format(apiKey))
    config.logging.debug('assetFile {}'.format(assetFile))
    config.logging.debug('metaData {}'.format(metaData))
    config.logging.debug('Filename {}'.format(metaData['asset']['filename']))
    create = cma.createAsset(region, authToken, apiKey, assetFile, metaData,
                             metaData['asset']['filename'])
    if create and (tmpFolder in assetFile):  # Cleaning from tmp folder
        os.remove(assetFile)
    return create
Пример #4
0
def exportAssetFolders(authToken, stackInfo, folder):
    '''
    Exporting All Folders to a single JSON file
    '''
    foldersFolder = folder + config.folderNames['folders']
    fileName = foldersFolder + config.fileNames['folders']
    config.checkDir(folder)
    folders = cma.getAllFolders(stackInfo, authToken)
    if folders:
        if config.writeToJsonFile(folders, fileName):
            config.logging.info('Folders Exported to file. ({})'.format(fileName))
            return True
        config.logging.error('{}Unable to write Folders to file: {}{}'.format(config.RED, fileName, config.END))
        return None
    config.logging.warning('{}No Asset Folders found on Stack.{}'.format(config.YELLOW, config.END))
    return None
Пример #5
0
def exportAssetsDeliveryToken(stackInfo, deliveryToken, environment, folder, contentInfo):
    '''
    Exporting Assets using the Delivery Token
    '''
    config.addToExportReport('AssetExportMethod', 'DeliveryToken', folder)
    masterLocale = stackInfo['masterLocale']
    downloadAssets = contentInfo['downloadAssets']
    folder = folder + config.folderNames['assets']
    config.checkDir(folder)
    config.logging.info('{}Exporting Assets on Environment {} using the Delivery Token{}'.format(config.BOLD, environment, config.END))
    assets = cda.getAllAssets(stackInfo, deliveryToken, environment)
    if processAssetExport(assets, stackInfo, folder, masterLocale, downloadAssets):
        config.logging.info('Finished Exporting Assets')
        return True
    config.logging.error('{}Unable to export Assets!{}')
    return False
Пример #6
0
def exportEntriesUsingAuthToken(stackInfo, authToken, folder, contentInfo, environment=None):
    '''
    Exporting entries using the Content Management API.
    Just ALL Entries on one hand, and based on published environment on the other hand.
    We need to get the entries for the master locale first. Then see what locales are available for those entries.
    '''
    languages = contentInfo['languages']
    # masterLocale = stackInfo['masterLocale']
    contentTypes = contentInfo['contentTypes']
    assetsToExport = contentInfo['assets'] # 'all', 'referenced' or 'noAssets'
    config.logging.debug('Assets to export: {}'.format(assetsToExport))
    entryFolder = folder + config.folderNames['entries']
    config.checkDir(entryFolder)
    counter = 0
    for contentType in contentTypes:
        ctFolder = entryFolder + contentType + '/'
        config.checkDir(ctFolder)
        if environment:
            config.logging.info('{}{}Exporting Entries of Content Type: {} from Environment: {}{}'.format(config.BOLD, config.GREEN, contentType, environment, config.END))
        else:
            config.logging.info('{}{}Exporting Entries of Content Type: {}{}'.format(config.BOLD, config.GREEN, contentType, config.END))

        for language in languages:
            config.logging.info('Exporting from Language: {}'.format(language))
            entries = cma.getAllEntries(stackInfo, contentType, language, authToken, environment)
            if entries:
                fileName = ctFolder + language + '.json'
                # if (language != masterLocale) and (fallbackLanguage is not None):
                # We need to confirm that entry is not using the fallback_locale.
                # If it's in a different language, we do not want to export it.
                # I wish I could add an extra parameter to the request, e.g. ?include_fallback_locale=false and just get empty responses.
                # We see in the master locale in what languages it is published in.
                newEntries = {'entries': []}
                for entry in entries['entries']:
                    if entry['locale'] == language: # We know it's the right language
                        newEntries['entries'].append(entry)
                if newEntries['entries']:
                    if config.writeToJsonFile(newEntries, fileName):
                        config.logging.info('Entries Exported to File. {}'.format(fileName))
                        counter = counter + len(entries['entries'])
                else:
                    config.logging.debug('No Entries. {} - {}'.format(contentType, language))
            config.logging.info('No Entries. {} - {}'.format(contentType, language))
    return True
Пример #7
0
def exportAssetsUsingAuthToken(stackInfo, authToken, folder, contentInfo, environment=None):
    '''
    Exporting Assets using the Auth token
    Environment optional
    '''
    config.addToExportReport('AssetExportMethod', 'AuthToken', folder)
    masterLocale = stackInfo['masterLocale']
    downloadAssets = contentInfo['downloadAssets']
    folder = folder + config.folderNames['assets']
    config.checkDir(folder)
    if environment:
        config.logging.info('{}Exporting Assets on Environment {} using Auth Token{}'.format(config.BOLD, environment, config.END))
    else:
        config.logging.info('{}Exporting all Assets using Auth Token{}'.format(config.BOLD, config.END))
    assets = cma.getAllAssets(stackInfo, authToken, environment)
    if processAssetExport(assets, stackInfo, folder, masterLocale, downloadAssets):
        config.logging.info('Finished Exporting Assets')
        return True
    config.logging.error('{}Unable to export Assets!{}')
    return False
def createMapperFile(apiKey, folder, mapDict, mapperName=''):
    '''
    Reusable function that creates the mapper file between exported and imported uids
    '''
    config.logging.info('Writing {} mapper to file'.format(mapperName))
    mapperFolder = config.dataRootFolder + config.stackRootFolder + config.mapperFolder + 'MAPPER_ImportTo-' + apiKey + '_ExportFrom-' + folder
    config.checkDir(config.dataRootFolder)
    config.checkDir(config.dataRootFolder + config.stackRootFolder)
    config.checkDir(config.dataRootFolder + config.stackRootFolder +
                    config.mapperFolder)
    config.checkDir(mapperFolder)
    config.writeToJsonFile(mapDict,
                           mapperFolder + '/' + config.fileNames[mapperName],
                           True)  # True -> Overwrite
    return mapDict
    dirs = []
   # dirs += ['%s/%s_transcoWorking_SOC' % (root_dir, region)]
    dirs += ['%s/%s_transcoWorking_REG' % (root_dir, region)]
    dirs += ['%s/%s_transcoWorking_NAT' % (root_dir, region)]

   
    INLTRANSCO_DEST = "/InlTransco/Dest/InlTransco.dest"
    SQLLOADER_LOG = "/InlTransco/Log"

    dic0 = {}
    dirs0='%s/%s/' % (root_dir, region)
   
    # check dirs

    # check root_dir
    config.checkDir(root_dir + "/")

    # check dirs and remove dir that does not exist
    dirs = [d for d in dirs if config.checkDir(d)]

    # check sql loader dirs
    for d in dirs:
        config.checkDir(d + SQLLOADER_LOG)

    # check InlTranscoDest dirs
    for d in dirs:
        config.checkFile(d + INLTRANSCO_DEST)

    transormDest = "../Log/transorm.dest"

    # Destination file
            blue=config.BLUE,
            bold=config.BOLD,
            end=config.END))
 '''
 Login starts
 '''
 try:
     region, userInfo, liveUserInfo, token = login.startup()
 except (TypeError, KeyError):
     exitProgram()
 config.logging.info('Logged in as: {}'.format(userInfo['username']))
 orgs = restructureOrgs(liveUserInfo)  # Making the org output simpler
 '''
 Login finished
 '''
 config.checkDir(config.dataRootFolder)
 startupAction = ''
 while 'Exit' not in startupAction or startupAction is not None:
     startupAction = startupQuestion()
     orgUid, orgName = findOrg(orgs)
     if any(s in startupAction for s in ('Entries', 'Assets')):
         stackName, stack = findStack(orgUid, token,
                                      region)  # Choose Org and Stack
         try:
             apiKey = stack['uid']
         except (AttributeError, KeyError, TypeError):
             apiKey = None
         stackInfo = {'apiKey': apiKey, 'region': region}
         if 'Export Entries to' in startupAction:
             ctArr = []
             contentTypes = cma.getAllContentTypes(