def main(): #get_new_proxies("http") parse_config_file() for url in cb_result_urls.split(","): data = extract(url.strip(", ")) utilities.write_json_file(name="output.json".format(), data=data)
def project_cleanup(self): if os.path.exists(DATA_PATH): for data_file in os.listdir(DATA_PATH): try: data = data_file.split('.')[-1] if data == 'project': project_data = utilities.load_json_file( '{0}{1}'.format(DATA_PATH, data_file)) project_count = 0 for project in project_data['saved_files']: if not os.path.exists(project['photoshop_file']): project_data['saved_files'].pop(project_count) project_count += -1 project_count += 1 utilities.write_json_file( '{0}{1}'.format(DATA_PATH, data_file), project_data) except: pass
def create_project_file(self, project_name, project_dir): project_data = { 'project': project_name, 'project_directory': str(project_dir), 'saved_files': [None] } utilities.write_json_file( '{0}{1}.project'.format(DATA_PATH, project_name), project_data) self.set_project_startup()
def add_group ( self ): group_name = str( self.groupName_line.text() ) suffix_name = str( self.groupSuffix_line.text() ) if group_name != '': if suffix_name != '': utilities.add_table_item( self.group_list_table, [group_name, suffix_name] ) group_list = utilities.load_json_file( GROUPS_LOCATION ) group_list[group_name] = suffix_name utilities.write_json_file( GROUPS_LOCATION, group_list )
def remove_group(self): remove_items = utilities.get_selected_table_items( self.group_list_table) group_list = utilities.load_json_file(GROUPS_LOCATION) for item in remove_items: if group_list.has_key(item): del group_list[item] utilities.write_json_file(GROUPS_LOCATION, group_list) utilities.remove_selected_table_row(self.group_list_table)
def remove_group ( self ): remove_items = utilities.get_selected_table_items( self.group_list_table ) group_list = utilities.load_json_file( GROUPS_LOCATION ) for item in remove_items: if group_list.has_key( item ): del group_list[ item ] utilities.write_json_file( GROUPS_LOCATION, group_list ) utilities.remove_selected_table_row( self.group_list_table )
def group_list_setup( self ): utilities.get_directory( DATA_PATH ) if not os.path.exists( GROUPS_LOCATION ): project_data = {'diffuse': '_d', 'specular': '_s', 'normal': '_n', 'bump': '_b', 'occlusion': '_o', 'luminous': '_l'} utilities.write_json_file( GROUPS_LOCATION, project_data )
def add_group(self): group_name = str(self.groupName_line.text()) suffix_name = str(self.groupSuffix_line.text()) if group_name != '': if suffix_name != '': utilities.add_table_item(self.group_list_table, [group_name, suffix_name]) group_list = utilities.load_json_file(GROUPS_LOCATION) group_list[group_name] = suffix_name utilities.write_json_file(GROUPS_LOCATION, group_list)
def group_list_setup(self): utilities.get_directory(DATA_PATH) if not os.path.exists(GROUPS_LOCATION): project_data = { 'diffuse': '_d', 'specular': '_s', 'normal': '_n', 'bump': '_b', 'occlusion': '_o', 'luminous': '_l' } utilities.write_json_file(GROUPS_LOCATION, project_data)
def set_project_startup( self ): self.project_cleanup() project_name = str( self.project_combo.currentText() ) utilities.window_stay_on_top( self.window_stay_on_top_action.isChecked(), self.main_ui ) self.main_ui.show() try: utilities.window_stay_on_top( self.window_stay_on_top_action.isChecked(), self.group_ui ) self.group_ui.show() except: pass project_data = {'main_project': project_name, 'window_stay_on_top': self.window_stay_on_top_action.isChecked ()} utilities.write_json_file( STARTUP_LOCATION, project_data )
def set_project_startup(self): self.project_cleanup() project_name = str(self.project_combo.currentText()) utilities.window_stay_on_top( self.window_stay_on_top_action.isChecked(), self.main_ui) self.main_ui.show() try: utilities.window_stay_on_top( self.window_stay_on_top_action.isChecked(), self.group_ui) self.group_ui.show() except: pass project_data = { 'main_project': project_name, 'window_stay_on_top': self.window_stay_on_top_action.isChecked() } utilities.write_json_file(STARTUP_LOCATION, project_data)
def project_cleanup( self ): if os.path.exists( DATA_PATH ): for data_file in os.listdir( DATA_PATH ): try: data = data_file.split( '.' )[-1] if data == 'project': project_data = utilities.load_json_file( '{0}{1}'.format( DATA_PATH, data_file ) ) project_count = 0 for project in project_data['saved_files']: if not os.path.exists( project['photoshop_file'] ): project_data['saved_files'].pop( project_count ) project_count += -1 project_count += 1 utilities.write_json_file( '{0}{1}'.format( DATA_PATH, data_file ), project_data ) except: pass
from utilities import read_credentials, scrape_data, write_json_file from extractors import get_posts_alt FILE_NAME = '../private/credentials/reddit.txt' OUTPUT = '../private/output/2018-02-01.txt' # EXTRACTOR OPTIONS = {'subreddits': ['explainlikeimfive'], 'mode': 'top', 'time_filter': 'week', 'num_comments': 5, 'sub_coerce': ['_reddit', 'subreddit', 'author'], 'com_coerce': ['_replies', '_reddit', '_submission', 'author', 'subreddit']} EXTRACTOR = lambda reddit: get_posts_alt(reddit, **OPTIONS) if __name__ == '__main__': credentials = read_credentials(FILE_NAME) result_gen = scrape_data(*credentials, EXTRACTOR) write_json_file(result_gen, OUTPUT)
def save_export_data(self, group_data, ps_path, ps_name): project_name = str(self.project_combo.currentText()) ps_location = '{0}{1}'.format(ps_path, ps_name) group_data['photoshop_file'] = ps_location load_projects = utilities.load_json_file('{0}{1}.project'.format( DATA_PATH, project_name)) null_file = False if load_projects['saved_files'] == []: load_projects['saved_files'] = [group_data] null_file = True for project in load_projects['saved_files']: if project == None: load_projects['saved_files'] = [group_data] null_file = True else: try: if project['photoshop_file'] == ps_location: update_low_groups = set(project['low']).intersection( group_data['low']) update_high_groups = set(project['high']).intersection( group_data['high']) new_low_grps = list( set(group_data['low']) - set(project['low'])) new_high_grps = list( set(group_data['high']) - set(project['high'])) for low_goup in update_low_groups: project['low'][low_goup] = group_data['low'][ low_goup] for high_goup in update_high_groups: project['high'][high_goup] = group_data['high'][ high_goup] for grp in new_low_grps: project['low'].update( {grp: group_data['low'][grp]}) for grp in new_high_grps: project['high'].update( {grp: group_data['high'][grp]}) except: pass new_project = False for project in load_projects['saved_files']: if not null_file: new_project = list( set([group_data['photoshop_file']]) - set([project['photoshop_file']])) if new_project == []: new_project = False break if new_project != False: load_projects['saved_files'].append(group_data) utilities.write_json_file( '{0}{1}.project'.format(DATA_PATH, project_name), load_projects) save_current_ps = utilities.yes_no_dialog( self, 'Save Photoshop Files', 'Would you like to save your current Photoshop file {0}{1}?'. format(ps_path, ps_name)) if save_current_ps: self.save_photoshop_file(ps_path, ps_name) self.message_report(error_level=0, message="Export completed!")
def save_export_data( self, group_data, ps_path, ps_name ): project_name = str( self.project_combo.currentText() ) ps_location = '{0}{1}'.format( ps_path, ps_name ) group_data['photoshop_file'] = ps_location load_projects = utilities.load_json_file( '{0}{1}.project'.format( DATA_PATH, project_name ) ) null_file = False if load_projects['saved_files'] == []: load_projects['saved_files'] = [group_data] null_file = True for project in load_projects['saved_files']: if project == None: load_projects['saved_files'] = [group_data] null_file = True else: try: if project['photoshop_file'] == ps_location: update_low_groups = set( project['low'] ).intersection( group_data['low'] ) update_high_groups = set( project['high'] ).intersection( group_data['high'] ) new_low_grps = list( set( group_data['low'] ) - set( project['low'] ) ) new_high_grps = list( set( group_data['high'] ) - set( project['high'] ) ) for low_goup in update_low_groups: project['low'][low_goup] = group_data['low'][low_goup] for high_goup in update_high_groups: project['high'][high_goup] = group_data['high'][high_goup] for grp in new_low_grps: project['low'].update( {grp: group_data['low'][grp]} ) for grp in new_high_grps: project['high'].update( {grp: group_data['high'][grp]} ) except: pass new_project = False for project in load_projects['saved_files']: if not null_file: new_project = list( set( [group_data['photoshop_file']] ) - set( [project['photoshop_file']] ) ) if new_project == []: new_project = False break if new_project != False: load_projects['saved_files'].append( group_data ) utilities.write_json_file( '{0}{1}.project'.format( DATA_PATH, project_name ), load_projects ) save_current_ps = utilities.yes_no_dialog( self, 'Save Photoshop Files', 'Would you like to save your current Photoshop file {0}{1}?'.format( ps_path, ps_name ) ) if save_current_ps: self.save_photoshop_file( ps_path, ps_name ) self.message_report( error_level = 0, message = "Export completed!" )
def create_project_file( self, project_name, project_dir ): project_data = {'project': project_name, 'project_directory' : str( project_dir ), 'saved_files': [None]} utilities.write_json_file( '{0}{1}.project'.format( DATA_PATH, project_name ), project_data ) self.set_project_startup()