def add_file(file_path, file_data=None):
            split_path = file_path.split('/')
            split_path_is_metadata_folder = [
                s in AntMigrationTool.metadata_type_folders.values()
                and s != '' for s in split_path
            ]
            index_for_metadata_folder = split_path_is_metadata_folder.index(
                True)

            res = re.search(regex, file_path)
            folder_path = res.group(1)
            folder_name = '/'.join(split_path[index_for_metadata_folder:-1])
            file_name = split_path[-1]
            # folder_name = res.group(2)
            # file_name = res.group(3)
            if file_name == '*':
                for item in dir.get_files_list(folder_path):
                    add_file(item)
                return
            if file_data is None:
                file_data = open(file_path, 'rb').read()
            new_folder_path = '{}/{}'.format(self.push_path, folder_name)
            new_file_path = '{}/{}'.format(new_folder_path, file_name)
            dir.make_dir(new_folder_path, delete_first=False)
            dir.create_file(new_file_path, file_data)
Example #2
0
 def meta(self):
     if self.meta_dict:
         return self.meta_dict
     self.meta_dict = {}
     if not os.path.exists(self.cache_path):
         dir.make_dir(self.cache_path, False)
     if os.path.exists(self.meta_path):
         self.meta_dict = self.read_pickle(self.meta_path)
     else:
         self.write_pickle(self.meta_path, self.meta_dict)
     return self.meta_dict
Example #3
0
 def to_excel(self, file_path=None, additional_sheets=None):
     file_path = file_path or self.excel_file_path
     flat = SalesforceMetadataFile.flatten_dict(self.tree)
     dir.make_dir(self.excel_files_folder_path, False)
     output = {self.metadata_type(): flat}
     if additional_sheets is not None:
         output.update(additional_sheets)
     pdh.to_excel(output, self.excel_file_path)
     print(
         f'Open report at the following location:\n{os.path.abspath(self.excel_file_path)}'
     )
Example #4
0
def init():
    dir.make_dir(root, delete_first=False)
Example #5
0
 def write_result(to_folder, file_name, results):
     timenow = datetime.datetime.now().strftime("%Y-%m-%d %H.%M")
     output_file_path = f'{dataloader_files_path}/{to_folder}/{timenow} {file_name}.csv'
     dir.make_dir(f'{dataloader_files_path}/{to_folder}', delete_first=False)
     pd.DataFrame(results).to_csv(output_file_path, index=False)
     return output_file_path
Example #6
0
 def move(file_path, to_folder):
     timenow = datetime.datetime.now().strftime("%Y-%m-%d %H.%M")
     dir.make_dir(f'{dataloader_files_path}/{to_folder}', delete_first=False)
     file_name = get_file_name(file_path)
     new_path = f'{dataloader_files_path}/{to_folder}/{timenow} {file_name}'
     os.rename(file_path, new_path)
Example #7
0
def main(can_quit=False):
    dataloader_files_path = os.path.abspath('./resources/dataloader')
    print(f'\nListening... drop files in this directory to dataload or query:\n{dataloader_files_path}')
    print(f'\nFor query text files, include the prefix "--bulk" at the beginning of the file if you would like to use the Bulk API')
    time.sleep(0.2)
    dir.make_dir(dataloader_files_path, delete_first=False)

    def get_file_name(path):
        return os.path.split(path)[1]
    def move(file_path, to_folder):
        timenow = datetime.datetime.now().strftime("%Y-%m-%d %H.%M")
        dir.make_dir(f'{dataloader_files_path}/{to_folder}', delete_first=False)
        file_name = get_file_name(file_path)
        new_path = f'{dataloader_files_path}/{to_folder}/{timenow} {file_name}'
        os.rename(file_path, new_path)
    def write_result(to_folder, file_name, results):
        timenow = datetime.datetime.now().strftime("%Y-%m-%d %H.%M")
        output_file_path = f'{dataloader_files_path}/{to_folder}/{timenow} {file_name}.csv'
        dir.make_dir(f'{dataloader_files_path}/{to_folder}', delete_first=False)
        pd.DataFrame(results).to_csv(output_file_path, index=False)
        return output_file_path

    while True:
        files = [f for f in os.listdir(dataloader_files_path) if os.path.isfile(f'{dataloader_files_path}/{f}') and not f.startswith('.')]
        if len(files) > 0:
            options = {**{f: f'{dataloader_files_path}/{f}' for f in files if not f.startswith('~')}, **{'Refresh': None}}
            if can_quit: options.update({'Quit': 'QUIT'})
            file_path = prompt('\nWhich file would you like to load?', options)
            if file_path == 'QUIT':
                break
            if file_path is None:
                continue
            file_name = get_file_name(file_path)
            if file_name.endswith('.csv') or file_name.endswith('.xlsx') or file_name.endswith('.xls'):
                results = None
                try:
                    results = sf.dataload_file(file_path)
                    sf.console.clear()
                except Exception as e:
                    traceback.print_exc()
                next_options = ['Keep in working folder', 'Move to Archive',]
                if results is not None:
                    for instance, result in results.items():
                        if result.backup is not None:
                            threading.new(write_result, 'backups', f'({instance}) {result.job.object_name} BACKUP ({len(result.backup)})', result.backup)
                        if result is not None and hasattr(result.job, 'errors') and len(result.job.errors) > 0:
                            next_options.append('View errors')
                            threading.new(write_result, 'errors', f'({instance}) {file_name} ({len(result.job.errors)})', result.job.errors)
                            print(f'({instance}) Errors have been written to the /errors folder.')
                next_action = prompt(f'What would you like to do with the file? ({file_name})', options=next_options)
                if next_action == 'Move to Archive':
                    move(file_path, 'archive')
                elif next_action == 'View errors':
                    for instance, result in results.items():
                        df = pd.DataFrame().from_records(result.job.errors)
                        cols = list({**{'sf_result':None}, **{c:c for c in df.columns.values}})[0:5]
                        print(f'### {instance} ###')
                        print(df[cols].to_string())
            else:
                # Is query
                query_text = open(file_path, 'r').read()
                parsed_query = sf.parse_query(query_text)
                if parsed_query is not None:
                    try:
                        instances = prompt('Which instance(s) would you like to query?', multiselect=True, options={
                            (key[key.rindex('.')+1:] + (' prod' if val['sandbox'] == 'False' else '')).strip(): val 
                            for key, val in sf.instance_credentials.items() if 'security_token' in val
                        })
                        sessions = [Salesforce_API(c) for c in instances]
                        mode = 'bulk' if re.match(r'^\s*--\s*bulk', query_text) else 'simple'
                        queries = {
                            session.instance: threading.new(session.select, query_text, mode=mode, return_type='flat')
                            for session in sessions
                        }
                        threading.wait(queries.values())
                        print('\nResult file(s) written to:')
                        for instance, result in queries.items():
                            result = result.result()
                            output_file_path = write_result('query_results', f'{instance} {parsed_query.object_name} ({len(result)})', result)
                            print(f'/query_results/{os.path.split(output_file_path)[1]}')
                        sf.console.clear()
                    except Exception as e:
                        print(e)
                else:
                    print('Could not parse query file. Please update the file and try again.')
        time.sleep(1)
 def init_folders(self):
     dir.make_dir(self.pull_path)
     dir.make_dir(self.push_path)
     dir.make_dir(self.results_path)
 def __init__(self, salesforce_api_self):
     self.sf = salesforce_api_self
     self.session = self.sf  # For reverse compatibility
     AntMigrationTool._purge_old_folders()
     dir.make_dir('./resources/ant', delete_first=False)