def switch(self, cmd): if cmd: try: file = cmd["file"] if os_path.exists("/etc/opkg/" + file): sh_move("/etc/opkg/" + file, "/etc/opkg/" + file + ".off") return (True, file + ".off") else: sh_move("/etc/opkg/" + file + ".off", "/etc/opkg/" + file) return (True, file) except Exception, e: return (False, str(e))
def switch(self,cmd): if cmd: try: file = cmd["file"] if os_path.exists("/etc/apt/sources.list.d/" + file): sh_move("/etc/apt/sources.list.d/" + file, "/etc/apt/sources.list.d/" + file + ".off") return (True, file + ".off") else: sh_move("/etc/apt/sources.list.d/" + file + ".off", "/etc/apt/sources.list.d/" + file) return (True, file) except Exception, e: return (False, str(e))
def checkenv(self): if not os_path.exists(os_path.realpath("settings.json")): raise exceptions.InvalidSettings("Please rename the file settings-dist.json to settings.json and fill the required info") json_data = open(os_path.realpath("settings.json")) settings = json.load(json_data) json_data.close() # At the moment there aren't required key, let's leave this check for future use required_keys = ['app_key', 'app_secret', 'token', 'token_secret'] for required in required_keys: try: value = settings[required] if value == '': raise exceptions.InvalidSettings("Please fill the required info '" + required + "' before continuing") except KeyError: raise exceptions.InvalidSettings("Please fill the required info '" + required + "' before continuing") try: if not settings['data_dir']: settings['data_dir'] = os_path.realpath("data/") else: if not os_path.exists(settings['data_dir']): logging.getLogger('dumpscraper').warn("Path " + settings['data_dir'] + " does not exist, using the default 'data' one") settings['data_dir'] = os_path.realpath("data/") except KeyError: settings['data_dir'] = os_path.realpath("data") self.settings = settings # Migrates the old folder structure (raw/YYYY-MM-DD) to the new one (raw/YYYY/MM/YYYY-MM-DD) # Let's check if we actually have to migrate the data if os_path.exists(settings['data_dir'] + '/raw'): raw_dirs = os_listdir(settings['data_dir'] + '/raw') regex = re.compile('\d{4}-\d{2}-\d{2}') old_dirs = filter(regex.match, raw_dirs) if old_dirs: from shutil import move as sh_move dump_logger = logging.getLogger('dumpscraper') dump_logger.info('Old folder structure found, migrating') for old_dir in old_dirs: parts = old_dir.split('-') old_path = settings['data_dir'] + '/raw/' + old_dir new_path = settings['data_dir'] + '/raw/' + parts[0] + '/' + parts[1] + '/' + parts[2] sh_move(old_path, new_path) dump_logger.info('Migration successfully completed')
def full_run(obj_set, blender_path, renders_per_class=10, work_dir=workspace, generate_background=True, background_database=None, blender_attributes={}, visualize_dump=False, dry_run_mode=False, n_of_pixels=300, adjust_brightness=False, render_samples=128): """ Function that will take all the parameters and execute the complete pipeline. Given object model files it will generate the specified number of training images and save them in a zip file format. At the end of the run, this function will clean up all files created, apart from the final zip file. This is due to the fact that large number of images is produced which would require large amounts of storage space. args: obj_set: Path to the folder containing folders with individual model object files blender_path: path to the blender executable renders_per_class: number of images to be generated per class. Default = 10 work_dir: path to the workspace that contains individual folders generate_background: Flag, if True, we will generate random background if False, will use images in a given database. Default =True background_database: Path to databse of backgrounds to use if generate_background is False blender_attributes: A dictionary containing attributes for blender. Optional parameter, if none given, basic predefined attributes will be used. Default = {} visualize_dump: Default = False dry_run_mode: Default = False n_of_pixels (int): The size of the edge of the square image. Is optional, Default = 300 adjust_brigtness (boolean): Whether the brigthness of the background should be adjusted to match on average the brightness of the foreground image. Default = False render_samples: Default = 128 """ print('Checking data directories...') slack.send_message( 'Obj_set: ' + obj_set + '\n renders_per_class: ' + str(renders_per_class), 'Rendering Run Started', 'good') # Ensure render_workspace folder exists if not os.path.isdir(work_dir): message = "Can't find rendering workspace folder. Please create the folder " + work_dir + ", containing object files and background database. See group folder for example." print(message) raise RenderPipelineError(message) destroy_folders(work_dir, temp_folders) validate_folders(work_dir, data_folders) obj_poses = os.path.join(work_dir, "object_poses") """----------------- Generating object poses ---------------""" src_path = os.path.join(project_path, "src") generate_poses(src_path, blender_path, obj_set, obj_poses, renders_per_class, blender_attributes, visualize_dump, dry_run_mode, n_of_pixels, render_samples) #now we need to take Ong' stats and move them into final folder for folder in os.listdir(obj_poses): orig_stats = os.path.join(obj_poses, folder, "stats") if (os.path.isdir(orig_stats)): final_name = folder + "_stats" sh_move(orig_stats, os.path.join(work_dir, "final_folder", final_name)) """----------------- Generating final images ---------------""" """ We need to distinguish between the case of drawing backrounds from a database and when generating ourselves """ print( ' ============================ GENERATING FINAL IMAGES ============================' ) final_folder = os.path.join(work_dir, "final_folder") final_im = os.path.join(work_dir, "final_folder/images") all_bbox = {} # Generate images for each class poses for folder in os.listdir(obj_poses): sub_obj = os.path.join(obj_poses, folder) if os.path.isdir(sub_obj) is False: print(sub_obj, " is not a folder") continue sub_final = os.path.join(final_im, folder) os.mkdir(sub_final) # Merge images based on the choice of background if generate_background: # Generate random background bboxes = random_bg_for_all_objects(sub_obj, sub_final, adjust_brightness, n_of_pixels) elif generate_background is False and background_database is None: print("We need a background database") raise RenderPipelineError("A background database is missing") else: # We draw background images from given database try: bboxes = mi.generate_for_all_objects(sub_obj, background_database, sub_final, adjust_brightness, n_of_pixels) except Exception as e: raise RenderPipelineError( "Error occured during random background generation!") # collate all the bboxes all_bbox[folder] = bboxes # Dump the parameters used for rendering and merging for folder in os.listdir(obj_poses): print(folder) # Dump all merging parameters to a json file all_params = { "object_set": os.path.split(obj_set)[-1], "images_per_class": renders_per_class, "background_generated": generate_background, "background_database": os.path.split(background_database)[-1], "number_of_pixels": n_of_pixels, "brightness_adjusted": adjust_brightness, "all_bboxes": all_bbox.__str__() } dump_file = os.path.join(final_folder, 'mergeparams_dump.json') with open(dump_file, "w+") as f: json.dump(all_params, f, sort_keys=True, indent=4, separators=(',', ': ')) # export everything into a zip file # compose the zip file name by specifying the background type # and a timestamp if generate_background: back_parameter = "random_bg" else: back_parameter = os.path.split(background_database)[-1] zip_name = os.path.join( work_dir, "final_zip", os.path.split(obj_set)[-1] + "_" + back_parameter + "_" + datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S").replace( " ", "_").replace(":", "_")) make_archive(zip_name, 'zip', final_folder) # Clean up all generated files, apart from the zip file destroy_folders(work_dir, temp_folders) final_result = zip_name + ".zip" slack.send_message('Full run completed. Final zip file: ' + final_result, 'Rendering Run Completed', 'good') return final_result
def checkenv(self): if not os_path.exists(os_path.realpath("settings.json")): raise exceptions.InvalidSettings( "Please rename the file settings-dist.json to settings.json and fill the required info" ) json_data = open(os_path.realpath("settings.json")) settings = json.load(json_data) json_data.close() # At the moment there aren't required key, let's leave this check for future use required_keys = ['app_key', 'app_secret', 'token', 'token_secret'] for required in required_keys: try: value = settings[required] if value == '': raise exceptions.InvalidSettings( "Please fill the required info '" + required + "' before continuing") except KeyError: raise exceptions.InvalidSettings( "Please fill the required info '" + required + "' before continuing") try: if not settings['data_dir']: settings['data_dir'] = os_path.realpath("data/") else: if not os_path.exists(settings['data_dir']): logging.getLogger('dumpscraper').warn( "Path " + settings['data_dir'] + " does not exist, using the default 'data' one") settings['data_dir'] = os_path.realpath("data/") except KeyError: settings['data_dir'] = os_path.realpath("data") self.settings = settings # Migrates the old folder structure (raw/YYYY-MM-DD) to the new one (raw/YYYY/MM/YYYY-MM-DD) # Let's check if we actually have to migrate the data if os_path.exists(settings['data_dir'] + '/raw'): raw_dirs = os_listdir(settings['data_dir'] + '/raw') regex = re.compile('\d{4}-\d{2}-\d{2}') old_dirs = filter(regex.match, raw_dirs) if old_dirs: from shutil import move as sh_move dump_logger = logging.getLogger('dumpscraper') dump_logger.info('Old folder structure found, migrating') for old_dir in old_dirs: parts = old_dir.split('-') old_path = settings['data_dir'] + '/raw/' + old_dir new_path = settings['data_dir'] + '/raw/' + parts[ 0] + '/' + parts[1] + '/' + parts[2] sh_move(old_path, new_path) dump_logger.info('Migration successfully completed')