コード例 #1
0
def get_groups_on_disk(contentpath):
    groups_on_disk_info = {}
    group_json_files = findFilePath(contentpath, 'group.json', returnFirst=False)
    for group_json_file in group_json_files:
        os.chdir(os.path.dirname(group_json_file))
        group_json = json.load(open('group.json'))
        groups_on_disk_info[group_json['id']] = '{}:{}'.format(group_json['owner'], group_json['title'])
        
    return groups_on_disk_info
コード例 #2
0
def get_groups_on_disk(contentpath):
    groups_on_disk_info = {}
    group_json_files = findFilePath(contentpath,
                                    'group.json',
                                    returnFirst=False)
    for group_json_file in group_json_files:
        os.chdir(os.path.dirname(group_json_file))
        group_json = json.load(open('group.json'))
        groups_on_disk_info[group_json['id']] = '{}:{}'.format(
            group_json['owner'], group_json['title'])

    return groups_on_disk_info
コード例 #3
0
def validate_scene_parameter_folder(portal, folder, source_item_ids):
    valid = True
    msg = None

    # Does path exist
    if not os.path.exists(folder):
        valid = False
        msg = 'ERROR: folder {} does not exist.'.format(folder)
        return valid, msg

    # Is path a folder
    if not os.path.isdir(folder):
        valid = False
        msg = 'ERROR: path {} is not a folder.'.format(folder)
        return valid, msg

    # Does folder contain any scene service parameter files
    scene_parameter_files = findFilePath(folder,
                                         '*' + scene_parmater_file_suffix,
                                         returnFirst=False)
    if len(scene_parameter_files) == 0:
        valid = False
        msg = 'ERROR: folder {} does not contain any scene service' \
              ' parameter files (i.e. {})'.format(folder,
                                            scene_parmater_file_suffix)
        return valid, msg

    # Check if folder contains a scene service parameter file
    # for each scene service being published
    #missing_files = []
    msg = 'ERROR: folder {} is missing scene service parameter files ' \
          'for the following scene services: '.format(folder)
    for item_id in source_item_ids:
        service_name = portal.item(item_id)['url'].split('/')[-2]
        scene_parameter_file = os.path.join(
            folder, service_name + scene_parmater_file_suffix)
        if scene_parameter_file not in scene_parameter_files:
            valid = False
            msg = msg + '\n' + service_name
    if not valid:
        return valid, msg

    return valid, msg
コード例 #4
0
def validate_scene_parameter_folder(portal, folder, source_item_ids):
    valid = True
    msg = None
    
    # Does path exist
    if not os.path.exists(folder):
        valid = False
        msg = 'ERROR: folder {} does not exist.'.format(folder)
        return valid, msg
    
    # Is path a folder
    if not os.path.isdir(folder):
        valid = False
        msg = 'ERROR: path {} is not a folder.'.format(folder)
        return valid, msg
    
    # Does folder contain any scene service parameter files
    scene_parameter_files = findFilePath(folder,
                                '*' + scene_parmater_file_suffix,
                                returnFirst=False)
    if len(scene_parameter_files) == 0:
        valid = False
        msg = 'ERROR: folder {} does not contain any scene service' \
              ' parameter files (i.e. {})'.format(folder,
                                            scene_parmater_file_suffix)
        return valid, msg
        
    # Check if folder contains a scene service parameter file
    # for each scene service being published
    #missing_files = []
    msg = 'ERROR: folder {} is missing scene service parameter files ' \
          'for the following scene services: '.format(folder)
    for item_id in source_item_ids:
        service_name = portal.item(item_id)['url'].split('/')[-2]
        scene_parameter_file = os.path.join(folder, service_name + scene_parmater_file_suffix)
        if scene_parameter_file not in scene_parameter_files:
            valid = False
            msg = msg + '\n' + service_name
    if not valid:
        return valid, msg
    
    return valid, msg
コード例 #5
0
# Check if root folder and json file exist.
# ------------------------------------------------------------------------------------
if not os.path.exists(root_path):
    print '\nERROR: <RootFolderToSearch> folder ' + root_path + ' does not exist. Exiting script.'
    sys.exit(1)

if not os.path.isfile(id_map_file):
    print '\nERROR: <IDJsonFile> file ' + id_map_file + ' does not exist. Exiting script.'
    sys.exit(1)

# ------------------------------------------------------------------------------------
# Create list of files to update
# ------------------------------------------------------------------------------------
files_to_update = []

files_to_update.extend(findFilePath(root_path, '*.js', returnFirst=False))
files_to_update.extend(findFilePath(root_path, '*.html', returnFirst=False))
files_to_update.extend(findFilePath(root_path, '*.json', returnFirst=False))
files_to_update.extend(findFilePath(root_path, '*.csv', returnFirst=False))
files_to_update.extend(findFilePath(root_path, '*.erb', returnFirst=False))

total_files = len(files_to_update)

# ------------------------------------------------------------------------------------
# Create dictionary of search/replace values
# ------------------------------------------------------------------------------------
search_replace_map = {}

# Add old/new hostnames
search_replace_map[old_hostname] = new_hostname
コード例 #6
0
def get_userinfo_files(portal_content_folder):
    userinfo_files = findFilePath(portal_content_folder, userinfo_file, returnFirst=False)
    return userinfo_files
コード例 #7
0
def main():
    exit_err_code = 1
    
    # Print/get script arguments
    results = print_args()
    if not results:
        sys.exit(exit_err_code)
    portal_address, adminuser, password = results
    
    total_success = True
    title_break_count = 100
    section_break_count = 75
    search_query = None
    
    print '=' * title_break_count
    print 'Validate Hosted Service Sources'
    print '=' * title_break_count
    
    source_items = []
    hosted_items = []
    
    root_folder_path = None
    root_folder_path = tempfile.mkdtemp()
    print 'Temporary directory: {}'.format(root_folder_path)
    
    orig_dir = os.getcwd()
    
    try:
        portal = Portal(portal_address, adminuser, password)
        items = portal.search()
        
        # ---------------------------------------------------------------------
        #  Get info about hosted service source items
        # (currently service definitions)
        # ---------------------------------------------------------------------
        
        for item in items:
            
            if item['type'] == 'Service Definition':
                
                print '\nDownloading and extracting Service Definition item {}'.format(item['id'])
                
                # Download .sd file
                download_root_path = os.path.join(root_folder_path, item['id'])
                os.mkdir(download_root_path)
                download_path = portal.item_datad(item['id'], download_root_path)
                
                # Extract serviceconfiguration.json file from downloaded .sd file
                file_name = 'serviceconfiguration.json'
                extract_path = download_path.replace('.sd', '')
                #print extract_path
                os.mkdir(extract_path)
                err_stat = extractFromSDFile(download_path, extract_path, file_name)
                print 'Extract status: {}'.format(err_stat)
        
                # Open extract .json file
                file_path = findFilePath(extract_path, file_name)
                os.chdir(os.path.dirname(file_path))
                service_config = json.load(open(file_name))
                
                # [{id: val, owner: val, title: val, type: val
                # service_config: {stuff from .json file}}]
                d = {
                    'id': item['id'],
                    'owner': item['owner'],
                    'title': item['title'],
                    'type': item['type'],
                    'service_config': service_config
                    }
                source_items.append(d)

        # ---------------------------------------------------------------------
        # Get info about hosted service items
        # ---------------------------------------------------------------------
        print '\nDetermine what hosted services exist...'
        h_service_items = get_hosted_service_items(portal, items)
        
        for item in h_service_items:
            d = {
                'id': item['id'],
                'owner': item['owner'],
                'title': item['title'],
                'type': item['type'],
                'url': item['url']
                }
            hosted_items.append(d)

        # ---------------------------------------------------------------------
        # For each hosted service find the associated source item
        # ---------------------------------------------------------------------
        print '=' * section_break_count
        print '\nDetermine which source items are associated with each hosted service...'
        print '=' * section_break_count
        num_hosted_no_match = 0
        num_hosted_match = 0
        num_hosted_mismatch_owner = 0
        write_str = "\tid: {:<34}owner: {:<20}type: {:<25}service: {:<50}\n"
        
        for hosted_d in hosted_items:
            found = False
            found_num = 0
            
            # Get last components of URL (i.e., SRTM_V2_56020/FeatureServer)
            hosted_url = '/'.join(hosted_d['url'].split('/')[-2:])
            
            print '\n{}'.format('-' * 100)
            print 'Hosted Service Item:   Title - "{}"\n'.format(hosted_d['title'])
            
            hosted_str = write_str.format(
                hosted_d['id'],
                hosted_d['owner'],
                hosted_d['type'],
                hosted_url)
            print hosted_str
            
            # Look for match in source items
            print '\tMatching Source Item:'

            for source_d in source_items:
                src_service_info = source_d['service_config']['service']
                src_service_name = src_service_info['serviceName']
                src_service_type = src_service_info['type']
                src_service_url = '{}/{}'.format(src_service_name, src_service_type)
                if hosted_url == src_service_url:
                    found = True
                    found_num += 1
        
                    match_str = write_str.format(
                        source_d['id'],
                        source_d['owner'],
                        source_d['type'],
                        src_service_url)
                    print '\n\tTitle: "{}"'.format(source_d['title'])
                    print match_str
                    
                    if source_d['owner'] != hosted_d['owner']:
                        print '*** ERROR: owner does not match hosted service item owner.'
                        num_hosted_mismatch_owner += 1
                        
            if found_num == 0:
                print '*** ERROR: no matching source item found.'
            if found_num > 1:
                print '*** ERROR: there is more then one matching source item found.'
                
            if found:
                num_hosted_match += 1
            else:
                num_hosted_no_match += 1
    

        # ---------------------------------------------------------------------
        # For each source item find the associated hosted service
        # ---------------------------------------------------------------------
        print '=' * section_break_count
        print '\nDetermine which hosted services are associated with each source item...'
        print '=' * section_break_count
        num_source_no_match = 0
        num_source_match = 0
        num_source_mismatch_owner = 0
        write_str = "\tid: {:<34}owner: {:<20}type: {:<25}service: {:<50}\n"
        
        for source_d in source_items:
            found = False
            found_num = 0
        
            src_service_info = source_d['service_config']['service']
            src_service_name = src_service_info['serviceName']
            src_service_type = src_service_info['type']
            src_service_url = '{}/{}'.format(src_service_name, src_service_type)
                
                
            print '\n{}'.format('-' * 100)
            print 'Source Item:   Title - "{}"\n'.format(source_d['title'])
            
            source_str = write_str.format(
                source_d['id'],
                source_d['owner'],
                source_d['type'],
                src_service_url)
            print source_str
            
            # Look for match in source items
            print '\tMatching Hosted Service:'
        
            for hosted_d in hosted_items:
        
                # Get last components of URL (i.e., SRTM_V2_56020/FeatureServer)
                hosted_url = '/'.join(hosted_d['url'].split('/')[-2:])
            
                if hosted_url == src_service_url:
                    found = True
                    found_num += 1
        
                    match_str = write_str.format(
                        hosted_d['id'],
                        hosted_d['owner'],
                        hosted_d['type'],
                        hosted_url)
                    print '\n\tTitle: "{}"'.format(hosted_d['title'])
                    print match_str
            
                    if hosted_d['owner'] != source_d['owner']:
                        print '*** ERROR: owner does not match associated source owner.'
                        num_source_mismatch_owner += 1
                        
            if found_num == 0:
                print '*** ERROR: no matching hosted service found.'
            if found_num > 1:
                print '*** ERROR: there is more then one hosted service found.'
                
            if found:
                num_source_match += 1
            else:
                num_source_no_match += 1

        print '\n{}'.format('=' * section_break_count)
        print 'Summary:\n'
        print 'Number of hosted services: {}'.format(len(hosted_items))
        print 'With matching source item: {}'.format(num_hosted_match)
        print 'With NO matching source item: {}'.format(num_hosted_no_match)
        print 'With mis-matching owners: {}'.format(num_hosted_mismatch_owner)

        print '\nNumber of source items: {}'.format(len(source_items))
        print 'With matching hosted service: {}'.format(num_source_match)
        print 'With NO matching hosted service: {}'.format(num_source_no_match)        
        print 'With mis-matching owners: {}'.format(num_source_mismatch_owner)
        
    except:
        total_success = False
        
        # Get the traceback object
        tb = sys.exc_info()[2]
        tbinfo = traceback.format_tb(tb)[0]
     
        # Concatenate information together concerning the error 
        # into a message string
        pymsg = "PYTHON ERRORS:\nTraceback info:\n" + tbinfo + \
                "\nError Info:\n" + str(sys.exc_info()[1])
        
        # Print Python error messages for use in Python / Python Window
        print
        print "***** ERROR ENCOUNTERED *****"
        print pymsg + "\n"
        
    finally:
        os.chdir(orig_dir)
        if root_folder_path:
            shutil.rmtree(root_folder_path)
            
        print '\nDone.'
        if total_success:
            sys.exit(0)
        else:
            sys.exit(exit_err_code)
コード例 #8
0
def getPostgreSQLDatabases(password):
    try:
        dbList = []
        f = ""
        confFileExists = False

        # ---------------------------------------------------------------------
        # Create path variables
        # ---------------------------------------------------------------------
        confFileFolder = os.path.join(os.environ['APPDATA'], "postgresql")
        confFilePath = os.path.join(confFileFolder, "pgpass.conf")

        #Location where calling script is located
        scriptFileFolder = os.path.dirname(sys.argv[0])

        #supportFilesPath = os.path.join(scriptFileFolder, "SupportFiles")
        scriptFilePath = os.path.join(scriptFileFolder,
                                      "RunPostgreSQLStatement.bat")
        outFile = os.path.join(scriptFileFolder, "DatabaseOutput.txt")

        # Delete file containing database names (outFile) if it already exists
        if os.path.exists(outFile):
            os.remove(outFile)

        # ---------------------------------------------------------------------
        # Create postgreSQL password configuration file
        # NOTE: must be located in %APPDATA%\postgresql folder and must
        # be called pgpass.conf
        # ---------------------------------------------------------------------
        # The script file runs psql.exe which will prompt for a password
        # unless the pgpass.config file contains the connection information.
        # The file should have the following format:
        # hostname:port:database:username:password
        confString = "localhost:5432:postgres:postgres:" + password

        # Create postgres folder if it does not exist.
        if not os.path.exists(confFileFolder):
            os.makedirs(confFileFolder)

        # Check if config file already exists
        if os.path.exists(confFilePath):
            confFileExists = True
            os.rename(confFilePath, confFilePath + ".bak")

        # File doesn't exist so create it using the w mode
        conf_f = open(confFilePath, "w")

        # Write to config file
        conf_f.write(confString + "\n")
        conf_f.close()

        # ---------------------------------------------------------------------
        # Create batch file with psql command
        # ---------------------------------------------------------------------
        print "\t\t-Creating batch file: " + scriptFilePath + "..."
        psqlExePath = findFilePath(rootPostgreSQLPath, "psql.exe")

        strToWrite = '"' + psqlExePath + '" -U postgres -p 5432 -d postgres ' + \
                '-h localhost -f ' + scriptFileFolder + os.sep + \
  'ListPostgresDatabases.sql > ' + outFile

        # Create file/overwrite existing
        batFile_f = open(scriptFilePath, "w")
        batFile_f.write(strToWrite + "\n")
        batFile_f.close()

        # ---------------------------------------------------------------------
        # Run batch file that connects to postgreSQL postgres database
        # and queries pg_database table.
        # ---------------------------------------------------------------------
        print "\t\t-Executing batch file (" + scriptFilePath + ")"
        print "\t\tto determine existing PostgreSQL databases..."

        #Run batch file that executes sql statement
        subprocess.call(scriptFilePath)

        # Read output file and add database names to list
        if os.path.exists(outFile):
            f = open(outFile, "r")
            for line in f:
                # remove leading/trailing spaces
                line = line.strip()
                if line == "datname" or line.find("--") <> -1 or \
                    line.find(" rows") <> -1:
                    # skip line
                    continue
                if len(line) > 0:
                    #added this check because it extra blank line is
                    # in file
                    dbList.append(line)

    except:
        # Get the traceback object
        tb = sys.exc_info()[2]
        tbinfo = traceback.format_tb(tb)[0]

        # Concatenate information together concerning the error into a message string
        pymsg = "PYTHON ERRORS:\nTraceback info:\n" + tbinfo + "\nError Info:\n" + str(
            sys.exc_info()[1])

        # Print Python error messages for use in Python / Python Window
        print
        print "***** ERROR ENCOUNTERED *****"
        print pymsg + "\n"

    finally:
        if f:
            f.close()
        if conf_f:
            conf_f.close()

        # Delete the config file we created
        os.remove(confFilePath)

        # Rename backup config if existed
        if confFileExists:
            os.rename(confFilePath + ".bak", confFilePath)

        return dbList
コード例 #9
0
def getPostgreSQLDatabases(password):
    try:
        dbList = []
        f = ""
        confFileExists = False
 
        # ---------------------------------------------------------------------
        # Create path variables
        # ---------------------------------------------------------------------
        confFileFolder = os.path.join(os.environ['APPDATA'], "postgresql")
        confFilePath = os.path.join(confFileFolder, "pgpass.conf")
        
	#Location where calling script is located
        scriptFileFolder = os.path.dirname(sys.argv[0])
	
	#supportFilesPath = os.path.join(scriptFileFolder, "SupportFiles")
        scriptFilePath =  os.path.join(scriptFileFolder, "RunPostgreSQLStatement.bat")
        outFile = os.path.join(scriptFileFolder, "DatabaseOutput.txt")

        # Delete file containing database names (outFile) if it already exists
        if os.path.exists(outFile):
            os.remove(outFile)
            
        # ---------------------------------------------------------------------
        # Create postgreSQL password configuration file
        # NOTE: must be located in %APPDATA%\postgresql folder and must
        # be called pgpass.conf
        # ---------------------------------------------------------------------
        # The script file runs psql.exe which will prompt for a password
        # unless the pgpass.config file contains the connection information.
        # The file should have the following format:
        # hostname:port:database:username:password
        confString = "localhost:5432:postgres:postgres:" + password
        
        # Create postgres folder if it does not exist.
        if not os.path.exists(confFileFolder):
            os.makedirs(confFileFolder)
        
        # Check if config file already exists
        if os.path.exists(confFilePath):
            confFileExists = True
            os.rename(confFilePath, confFilePath + ".bak")
        
        # File doesn't exist so create it using the w mode
        conf_f = open(confFilePath, "w")
        
        # Write to config file
        conf_f.write(confString + "\n")
        conf_f.close()
        
        # ---------------------------------------------------------------------
        # Create batch file with psql command
        # ---------------------------------------------------------------------
        print "\t\t-Creating batch file: " + scriptFilePath + "..."
	psqlExePath = findFilePath(rootPostgreSQLPath, "psql.exe")
    
        strToWrite = '"' + psqlExePath + '" -U postgres -p 5432 -d postgres ' + \
                '-h localhost -f ' + scriptFileFolder + os.sep + \
		'ListPostgresDatabases.sql > ' + outFile
	
        # Create file/overwrite existing
        batFile_f = open(scriptFilePath, "w")
        batFile_f.write(strToWrite + "\n")
        batFile_f.close()
        
        # ---------------------------------------------------------------------
        # Run batch file that connects to postgreSQL postgres database
        # and queries pg_database table.
        # ---------------------------------------------------------------------
        print "\t\t-Executing batch file (" + scriptFilePath + ")"
        print "\t\tto determine existing PostgreSQL databases..."
            
        #Run batch file that executes sql statement
        subprocess.call(scriptFilePath)
        
        # Read output file and add database names to list
        if os.path.exists(outFile):
            f = open(outFile, "r")
            for line in f:
                # remove leading/trailing spaces
                line = line.strip()
                if line == "datname" or line.find("--") <> -1 or \
                    line.find(" rows") <> -1:
                    # skip line
                    continue
                if len(line) > 0:
                    #added this check because it extra blank line is
                    # in file
                    dbList.append(line)
            
    except:
        # Get the traceback object
        tb = sys.exc_info()[2]
        tbinfo = traceback.format_tb(tb)[0]
     
        # Concatenate information together concerning the error into a message string
        pymsg = "PYTHON ERRORS:\nTraceback info:\n" + tbinfo + "\nError Info:\n" + str(sys.exc_info()[1])
        
        # Print Python error messages for use in Python / Python Window
        print
        print "***** ERROR ENCOUNTERED *****"
        print pymsg + "\n"

    finally:
        if f:
            f.close()
        if conf_f:
            conf_f.close()
            
        # Delete the config file we created
        os.remove(confFilePath)
        
        # Rename backup config if existed
        if confFileExists:
            os.rename(confFilePath + ".bak", confFilePath)

        return dbList
コード例 #10
0
def main():
    exit_err_code = 1

    # Print/get script arguments
    results = print_args()
    if not results:
        sys.exit(exit_err_code)
    portal_address, adminuser, password = results

    total_success = True
    title_break_count = 100
    section_break_count = 75
    search_query = None

    print '=' * title_break_count
    print 'Validate Hosted Service Sources'
    print '=' * title_break_count

    source_items = []
    hosted_items = []

    root_folder_path = None
    root_folder_path = tempfile.mkdtemp()
    print 'Temporary directory: {}'.format(root_folder_path)

    orig_dir = os.getcwd()

    try:
        portal = Portal(portal_address, adminuser, password)
        items = portal.search()

        # ---------------------------------------------------------------------
        #  Get info about hosted service source items
        # (currently service definitions)
        # ---------------------------------------------------------------------

        for item in items:

            if item['type'] == 'Service Definition':

                print '\nDownloading and extracting Service Definition item {}'.format(
                    item['id'])

                # Download .sd file
                download_root_path = os.path.join(root_folder_path, item['id'])
                os.mkdir(download_root_path)
                download_path = portal.item_datad(item['id'],
                                                  download_root_path)

                # Extract serviceconfiguration.json file from downloaded .sd file
                file_name = 'serviceconfiguration.json'
                extract_path = download_path.replace('.sd', '')
                #print extract_path
                os.mkdir(extract_path)
                err_stat = extractFromSDFile(download_path, extract_path,
                                             file_name)
                print 'Extract status: {}'.format(err_stat)

                # Open extract .json file
                file_path = findFilePath(extract_path, file_name)
                os.chdir(os.path.dirname(file_path))
                service_config = json.load(open(file_name))

                # [{id: val, owner: val, title: val, type: val
                # service_config: {stuff from .json file}}]
                d = {
                    'id': item['id'],
                    'owner': item['owner'],
                    'title': item['title'],
                    'type': item['type'],
                    'service_config': service_config
                }
                source_items.append(d)

        # ---------------------------------------------------------------------
        # Get info about hosted service items
        # ---------------------------------------------------------------------
        print '\nDetermine what hosted services exist...'
        h_service_items = get_hosted_service_items(portal, items)

        for item in h_service_items:
            d = {
                'id': item['id'],
                'owner': item['owner'],
                'title': item['title'],
                'type': item['type'],
                'url': item['url']
            }
            hosted_items.append(d)

        # ---------------------------------------------------------------------
        # For each hosted service find the associated source item
        # ---------------------------------------------------------------------
        print '=' * section_break_count
        print '\nDetermine which source items are associated with each hosted service...'
        print '=' * section_break_count
        num_hosted_no_match = 0
        num_hosted_match = 0
        num_hosted_mismatch_owner = 0
        write_str = "\tid: {:<34}owner: {:<20}type: {:<25}service: {:<50}\n"

        for hosted_d in hosted_items:
            found = False
            found_num = 0

            # Get last components of URL (i.e., SRTM_V2_56020/FeatureServer)
            hosted_url = '/'.join(hosted_d['url'].split('/')[-2:])

            print '\n{}'.format('-' * 100)
            print 'Hosted Service Item:   Title - "{}"\n'.format(
                hosted_d['title'])

            hosted_str = write_str.format(hosted_d['id'], hosted_d['owner'],
                                          hosted_d['type'], hosted_url)
            print hosted_str

            # Look for match in source items
            print '\tMatching Source Item:'

            for source_d in source_items:
                src_service_info = source_d['service_config']['service']
                src_service_name = src_service_info['serviceName']
                src_service_type = src_service_info['type']
                src_service_url = '{}/{}'.format(src_service_name,
                                                 src_service_type)
                if hosted_url == src_service_url:
                    found = True
                    found_num += 1

                    match_str = write_str.format(source_d['id'],
                                                 source_d['owner'],
                                                 source_d['type'],
                                                 src_service_url)
                    print '\n\tTitle: "{}"'.format(source_d['title'])
                    print match_str

                    if source_d['owner'] != hosted_d['owner']:
                        print '*** ERROR: owner does not match hosted service item owner.'
                        num_hosted_mismatch_owner += 1

            if found_num == 0:
                print '*** ERROR: no matching source item found.'
            if found_num > 1:
                print '*** ERROR: there is more then one matching source item found.'

            if found:
                num_hosted_match += 1
            else:
                num_hosted_no_match += 1

        # ---------------------------------------------------------------------
        # For each source item find the associated hosted service
        # ---------------------------------------------------------------------
        print '=' * section_break_count
        print '\nDetermine which hosted services are associated with each source item...'
        print '=' * section_break_count
        num_source_no_match = 0
        num_source_match = 0
        num_source_mismatch_owner = 0
        write_str = "\tid: {:<34}owner: {:<20}type: {:<25}service: {:<50}\n"

        for source_d in source_items:
            found = False
            found_num = 0

            src_service_info = source_d['service_config']['service']
            src_service_name = src_service_info['serviceName']
            src_service_type = src_service_info['type']
            src_service_url = '{}/{}'.format(src_service_name,
                                             src_service_type)

            print '\n{}'.format('-' * 100)
            print 'Source Item:   Title - "{}"\n'.format(source_d['title'])

            source_str = write_str.format(source_d['id'], source_d['owner'],
                                          source_d['type'], src_service_url)
            print source_str

            # Look for match in source items
            print '\tMatching Hosted Service:'

            for hosted_d in hosted_items:

                # Get last components of URL (i.e., SRTM_V2_56020/FeatureServer)
                hosted_url = '/'.join(hosted_d['url'].split('/')[-2:])

                if hosted_url == src_service_url:
                    found = True
                    found_num += 1

                    match_str = write_str.format(hosted_d['id'],
                                                 hosted_d['owner'],
                                                 hosted_d['type'], hosted_url)
                    print '\n\tTitle: "{}"'.format(hosted_d['title'])
                    print match_str

                    if hosted_d['owner'] != source_d['owner']:
                        print '*** ERROR: owner does not match associated source owner.'
                        num_source_mismatch_owner += 1

            if found_num == 0:
                print '*** ERROR: no matching hosted service found.'
            if found_num > 1:
                print '*** ERROR: there is more then one hosted service found.'

            if found:
                num_source_match += 1
            else:
                num_source_no_match += 1

        print '\n{}'.format('=' * section_break_count)
        print 'Summary:\n'
        print 'Number of hosted services: {}'.format(len(hosted_items))
        print 'With matching source item: {}'.format(num_hosted_match)
        print 'With NO matching source item: {}'.format(num_hosted_no_match)
        print 'With mis-matching owners: {}'.format(num_hosted_mismatch_owner)

        print '\nNumber of source items: {}'.format(len(source_items))
        print 'With matching hosted service: {}'.format(num_source_match)
        print 'With NO matching hosted service: {}'.format(num_source_no_match)
        print 'With mis-matching owners: {}'.format(num_source_mismatch_owner)

    except:
        total_success = False

        # Get the traceback object
        tb = sys.exc_info()[2]
        tbinfo = traceback.format_tb(tb)[0]

        # Concatenate information together concerning the error
        # into a message string
        pymsg = "PYTHON ERRORS:\nTraceback info:\n" + tbinfo + \
                "\nError Info:\n" + str(sys.exc_info()[1])

        # Print Python error messages for use in Python / Python Window
        print
        print "***** ERROR ENCOUNTERED *****"
        print pymsg + "\n"

    finally:
        os.chdir(orig_dir)
        if root_folder_path:
            shutil.rmtree(root_folder_path)

        print '\nDone.'
        if total_success:
            sys.exit(0)
        else:
            sys.exit(exit_err_code)
コード例 #11
0
# Check if root folder and json file exist.
# ------------------------------------------------------------------------------------
if not os.path.exists(root_path):
    print '\nERROR: <RootFolderToSearch> folder ' + root_path + ' does not exist. Exiting script.'
    sys.exit(1)

if not os.path.isfile(id_map_file):
    print '\nERROR: <IDJsonFile> file ' + id_map_file + ' does not exist. Exiting script.'
    sys.exit(1) 
    
# ------------------------------------------------------------------------------------
# Create list of files to update
# ------------------------------------------------------------------------------------
files_to_update = []

files_to_update.extend(findFilePath(root_path, '*.js', returnFirst=False))
files_to_update.extend(findFilePath(root_path, '*.html', returnFirst=False))
files_to_update.extend(findFilePath(root_path, '*.json', returnFirst=False))
files_to_update.extend(findFilePath(root_path, '*.csv', returnFirst=False))
files_to_update.extend(findFilePath(root_path, '*.erb', returnFirst=False))

total_files = len(files_to_update)

# ------------------------------------------------------------------------------------
# Create dictionary of search/replace values
# ------------------------------------------------------------------------------------
search_replace_map = {}

# Add old/new hostnames
search_replace_map[old_hostname] = new_hostname
コード例 #12
0
def createDataStores(agsServerAccount, password, dataDrive):
    success = True
    
    try:
	
	opsServer = OpsServerConfig.getOpsServerRootPath(dataDrive)
	environmentData = OpsServerConfig.getEnvDataRootPath(dataDrive)
	dbConnFileRootPath = OpsServerConfig.getDBConnFileRootPath(dataDrive)
	
        print
        print "--Create Local Data Stores..."
	
        # Create local variables to use for both creating enterprise
        # geodatabases and creating connection files.
        dbPlatform = "POSTGRESQL"
        accountAuthentication = "DATABASE_AUTH"
        dbAdmin = "postgres"    #For PostgreSQL this is the postgres superuser
        gdbAdminName = "sde"    #For PostreSQL this must be 'sde'
	
	# 2/26/2013: Modified to dynamically search for keycodes file instead of
	# hardcoding to specific version of the software.
        #pathList = ["Program Files", "ESRI", "License10.1", "sysgen", "keycodes"]
	#authorizationFile = makePath("C", pathList)
	pathList = ["Program Files", "ESRI"]
	authorizationFile = findFilePath(makePath("C", pathList), "keycodes")
        

        # Create list of database names to create connection file.
	dbsToConnect = []
	for db in dbsToCreate:
	    dbsToConnect.append(db)
		
        # ---------------------------------------------------------------------
        # Create local environment data folders
        # ---------------------------------------------------------------------
        
        if createLocalEnvFolders:
            
            print "\n\t-Creating local environment data folders..."
            
            foldersToCreate = []
            foldersToCreate.append(environmentData)
	    foldersToCreate.append(dbConnFileRootPath)
		
            if not os.path.exists(environmentData):
                for folder in foldersToCreate:
                    print "\t\tCreating folder: " + folder
                    os.makedirs(folder)
                print "\t\tDone."
                print
                
		changeOwnership(opsServer, agsServerAccount)

        # ---------------------------------------------------------------------
        # Create local enterprise databases
        # ---------------------------------------------------------------------
        
        if createEnterpriseGDBs:
            
            print "\n\t-Creating local enterprise geodatabases" + \
                    " (this will take a few minutes)...\n"
	    
            for db in dbsToCreate:
                print "\t\tCreating geodatabase '" + db + "'..."
                arcpy.CreateEnterpriseGeodatabase_management(dbPlatform,
                                                            "localhost",
                                                            db,
                                                            accountAuthentication,
                                                            dbAdmin,
                                                            password,
                                                            "",
                                                            gdbAdminName,
                                                            password,
                                                            "",
                                                            authorizationFile)
		print "\t\tDone.\n"

        # ---------------------------------------------------------------------
        # Update PostgreSQL connection file to allow remote connections
        #   to environment databases
        # ---------------------------------------------------------------------
        
        if updatePostgreSQLConnectionFile:
	    connectionFilePath = findFilePath(rootPostgreSQLPath, "pg_hba.conf")
            
            print "\n\t-Updating PostgreSQL connection file to allow remote" + \
                    " connections to databases..."
            print "\t\tFile: " + connectionFilePath 
            	    
	    # Create list of PostgreSQL connection entries
	    postgreSQLConnEntries = []
	    postgreSQLConnEntries.append("host all all 0.0.0.0/0 md5")	#IPv4
	    postgreSQLConnEntries.append("host all all ::/0 md5")  	#IPv6
		
	    # Determine if connection entry already exists in file
	    for postgreSQLConnEntry in postgreSQLConnEntries:
		if findInFile(connectionFilePath, postgreSQLConnEntry):
		    #Entry already exists in file so remove entry from list
		    postgreSQLConnEntries.remove(postgreSQLConnEntry)	    

	    # Add connection entries
	    if len(postgreSQLConnEntries) > 0:
                hbaFile = open(connectionFilePath, 'a')
                for postgreSQLConnEntry in postgreSQLConnEntries:
                    hbaFile.write("{}\n".format(postgreSQLConnEntry))
                hbaFile.close()

                # Reload config file
                print "\n\t-Reloading connection file..."
		exeFile = findFilePath(rootPostgreSQLPath, "pg_ctl.exe")
		confFolder = os.path.dirname(connectionFilePath)
		exeCommand = "”" + exeFile + "” -D “" + confFolder + "” reload"
		os.popen(exeCommand)		
		
            print "\t\tDone."

        # ---------------------------------------------------------------------
        # Create SDE connection files to environment geodatabases
        # ---------------------------------------------------------------------
        
        if createSDEConnectionFiles:
            
            print "\n\t-Creating SDE connection files..."
            
            for db in dbsToCreate:
		outFile = dbsToCreate[db][1] + ".sde"
		
		# Set output folder location
		outFolder = dbConnFileRootPath
		sdeFilePath = os.path.join(outFolder, outFile)
		
		# If SDE connection file already exists, delete it.
		if os.path.exists(sdeFilePath):
		    print "\t\t* Deleting existing file " + sdeFilePath
		    os.remove(sdeFilePath)
		    print "\t\tRe-creating connection file " + sdeFilePath
		else:
		    print "\t\tCreating connection file " + sdeFilePath
		
		arcpy.CreateDatabaseConnection_management(outFolder,
							  outFile,
							  dbPlatform,
							  servername,
							  accountAuthentication,
							  gdbAdminName,
							  password,
							  "SAVE_USERNAME",
							  db.lower(),
							  "#",
							  "TRANSACTIONAL",
							  "sde.DEFAULT",
							  "#")
		    
		print "\t\tDone.\n"
		# Change ownership of sde file
		changeOwnership(sdeFilePath, agsServerAccount)

    except:
        success = False
        
        # Get the traceback object
        tb = sys.exc_info()[2]
        tbinfo = traceback.format_tb(tb)[0]
     
        # Concatenate information together concerning the error into a message string
        pymsg = "PYTHON ERRORS:\nTraceback info:\n" + tbinfo + "\nError Info:\n" + str(sys.exc_info()[1])
        msgs = "ArcPy ERRORS:\n" + arcpy.GetMessages() + "\n"
        
        # Print Python error messages for use in Python / Python Window
        print
        print "***** ERROR ENCOUNTERED *****"
        print pymsg + "\n"
        print msgs
        
    finally:
        # Return success flag
        return success
コード例 #13
0
ファイル: CopyGDBs.py プロジェクト: Esri/ops-server-config
printMsg = True
totalCopySuccess = True


try:
    
    #startTime = datetime.now()
    
    # ----------------------------------------
    # Determine which databases to copy
    # ----------------------------------------
    print "- Determining which databases to copy..."
    
    # Get list of all workspaces in destination folder
    # (these could be file or enterprise geodatabases)
    destDBPathsSDE = findFilePath(destPath, "*.sde", returnFirst=False)
    destDBPathsFGDB = findFolderPath(destPath, "*.gdb", returnFirst=False)
    destDBPaths = destDBPathsSDE + destDBPathsFGDB
    
    # Create dictionary where destination db name is key and
    # path to workspace is value.
    destDBs = {}
    for dbPath in destDBPaths:
        destDBs[os.path.basename(dbPath).split(".")[0].lower()] = dbPath
    
    # Get list of all workspaces in source folder
    # (these could be file or enterprise geodatabases)
    srcDBPathsSDE = findFilePath(srcPath, "*.sde", returnFirst=False)
    srcDBPathsFGDB = findFolderPath(srcPath, "*.gdb", returnFirst=False)
    srcDBPaths = srcDBPathsSDE + srcDBPathsFGDB
    
コード例 #14
0
def get_userinfo_files(portal_content_folder):
    userinfo_files = findFilePath(portal_content_folder,
                                  userinfo_file,
                                  returnFirst=False)
    return userinfo_files
コード例 #15
0
def createDataStores(agsServerAccount, password, dataDrive):
    success = True

    try:

        opsServer = OpsServerConfig.getOpsServerRootPath(dataDrive)
        environmentData = OpsServerConfig.getEnvDataRootPath(dataDrive)
        dbConnFileRootPath = OpsServerConfig.getDBConnFileRootPath(dataDrive)

        print
        print "--Create Local Data Stores..."

        # Create local variables to use for both creating enterprise
        # geodatabases and creating connection files.
        dbPlatform = "POSTGRESQL"
        accountAuthentication = "DATABASE_AUTH"
        dbAdmin = "postgres"  #For PostgreSQL this is the postgres superuser
        gdbAdminName = "sde"  #For PostreSQL this must be 'sde'

        # 2/26/2013: Modified to dynamically search for keycodes file instead of
        # hardcoding to specific version of the software.
        #pathList = ["Program Files", "ESRI", "License10.1", "sysgen", "keycodes"]
        #authorizationFile = makePath("C", pathList)
        pathList = ["Program Files", "ESRI"]
        authorizationFile = findFilePath(makePath("C", pathList), "keycodes")

        # Create list of database names to create connection file.
        dbsToConnect = []
        for db in dbsToCreate:
            dbsToConnect.append(db)

    # ---------------------------------------------------------------------
    # Create local environment data folders
    # ---------------------------------------------------------------------

        if createLocalEnvFolders:

            print "\n\t-Creating local environment data folders..."

            foldersToCreate = []
            foldersToCreate.append(environmentData)
            foldersToCreate.append(dbConnFileRootPath)

            if not os.path.exists(environmentData):
                for folder in foldersToCreate:
                    print "\t\tCreating folder: " + folder
                    os.makedirs(folder)
                print "\t\tDone."
                print

                changeOwnership(opsServer, agsServerAccount)

    # ---------------------------------------------------------------------
    # Create local enterprise databases
    # ---------------------------------------------------------------------

        if createEnterpriseGDBs:

            print "\n\t-Creating local enterprise geodatabases" + \
                    " (this will take a few minutes)...\n"

            for db in dbsToCreate:
                print "\t\tCreating geodatabase '" + db + "'..."
                arcpy.CreateEnterpriseGeodatabase_management(
                    dbPlatform, "localhost", db, accountAuthentication,
                    dbAdmin, password, "", gdbAdminName, password, "",
                    authorizationFile)
                print "\t\tDone.\n"

    # ---------------------------------------------------------------------
    # Update PostgreSQL connection file to allow remote connections
    #   to environment databases
    # ---------------------------------------------------------------------

        if updatePostgreSQLConnectionFile:
            connectionFilePath = findFilePath(rootPostgreSQLPath,
                                              "pg_hba.conf")

            print "\n\t-Updating PostgreSQL connection file to allow remote" + \
                    " connections to databases..."
            print "\t\tFile: " + connectionFilePath

            # Create list of PostgreSQL connection entries
            postgreSQLConnEntries = []
            postgreSQLConnEntries.append("host all all 0.0.0.0/0 md5")  #IPv4
            postgreSQLConnEntries.append("host all all ::/0 md5")  #IPv6

            # Determine if connection entry already exists in file
            for postgreSQLConnEntry in postgreSQLConnEntries:
                if findInFile(connectionFilePath, postgreSQLConnEntry):
                    #Entry already exists in file so remove entry from list
                    postgreSQLConnEntries.remove(postgreSQLConnEntry)

            # Add connection entries
            if len(postgreSQLConnEntries) > 0:
                hbaFile = open(connectionFilePath, 'a')
                for postgreSQLConnEntry in postgreSQLConnEntries:
                    hbaFile.write("{}\n".format(postgreSQLConnEntry))
                hbaFile.close()

                # Reload config file
                print "\n\t-Reloading connection file..."
                exeFile = findFilePath(rootPostgreSQLPath, "pg_ctl.exe")
                confFolder = os.path.dirname(connectionFilePath)
                exeCommand = "”" + exeFile + "” -D “" + confFolder + "” reload"
                os.popen(exeCommand)

            print "\t\tDone."

    # ---------------------------------------------------------------------
    # Create SDE connection files to environment geodatabases
    # ---------------------------------------------------------------------

        if createSDEConnectionFiles:

            print "\n\t-Creating SDE connection files..."

            for db in dbsToCreate:
                outFile = dbsToCreate[db][1] + ".sde"

                # Set output folder location
                outFolder = dbConnFileRootPath
                sdeFilePath = os.path.join(outFolder, outFile)

                # If SDE connection file already exists, delete it.
                if os.path.exists(sdeFilePath):
                    print "\t\t* Deleting existing file " + sdeFilePath
                    os.remove(sdeFilePath)
                    print "\t\tRe-creating connection file " + sdeFilePath
                else:
                    print "\t\tCreating connection file " + sdeFilePath

                arcpy.CreateDatabaseConnection_management(
                    outFolder, outFile, dbPlatform, servername,
                    accountAuthentication, gdbAdminName, password,
                    "SAVE_USERNAME", db.lower(), "#", "TRANSACTIONAL",
                    "sde.DEFAULT", "#")

                print "\t\tDone.\n"
                # Change ownership of sde file
                changeOwnership(sdeFilePath, agsServerAccount)

    except:
        success = False

        # Get the traceback object
        tb = sys.exc_info()[2]
        tbinfo = traceback.format_tb(tb)[0]

        # Concatenate information together concerning the error into a message string
        pymsg = "PYTHON ERRORS:\nTraceback info:\n" + tbinfo + "\nError Info:\n" + str(
            sys.exc_info()[1])
        msgs = "ArcPy ERRORS:\n" + arcpy.GetMessages() + "\n"

        # Print Python error messages for use in Python / Python Window
        print
        print "***** ERROR ENCOUNTERED *****"
        print pymsg + "\n"
        print msgs

    finally:
        # Return success flag
        return success