def get_objects(src, filename): ''' Download objects dependency to temp cache folder. ''' temp = CACHE restart = not xbmcvfs.exists(os.path.join(temp, "objects") + '/') path = os.path.join(temp, filename).encode('utf-8') if restart and (settings('appliedPatch') or "") == filename: LOG.warn("Something went wrong applying this patch %s previously.", filename) restart = False if not xbmcvfs.exists(path) or filename.startswith('DEV'): delete_folder(CACHE) LOG.info("From %s to %s", src, path.decode('utf-8')) try: response = requests.get(src, stream=True, verify=True) response.raise_for_status() except requests.exceptions.SSLError as error: LOG.error(error) response = requests.get(src, stream=True, verify=False) dl = xbmcvfs.File(path, 'w') dl.write(response.content) dl.close() del response settings('appliedPatch', filename) unzip(path, temp, "objects") return restart
def _convert(self): try: if not os.path.exists(self.clips_path): os.makedirs(self.clips_path) if not os.path.exists(self.video_folder): os.makedirs(self.video_folder) # Extract Zip to the folder unzip(self.zip_path, self.clips_path) self._update_status("Unzipped") Logger.debug("Unzipped clips for file " + str(self.zip_path)) Logger.debug("clips are available at " + str(self.clips_path)) # move clips to root move_to_root_folder(self.clips_path, self.clips_path) Logger.debug("Moving clips to root dir is done") # Delete Small clips ... deleted_clips = delete_small_clips(self.clips_path) Logger.debug( "Following Clips are deleted due to low size (less than 100kb)" ) for deleted_clip in deleted_clips: Logger.debug(deleted_clip) self._update_progress_bar(50) self._update_status("Ready to make video") # merge clips and make video Logger.debug("Entered to merging try block") merge(self.clips_path, self.video_location, self._update_progress) Logger.debug("End of merging try block") # Clearing Temp dir delete_all_clips(self.clips_path) self._update_status( os.path.basename(self.video_location)[:-4] + " is now ready to watch") self._toast("Always use Mx/Vlc Player to watch Videos") except Exception: Logger.exception('Something happened wrong at merge') self._update_status("Something happened wrong at merge")
def do_unzip(): print '== do_unzip ==' tools_zip_folder = os.path.join(TOOLS_FOLDER, "zip/") req_zip_folder = os.path.join(REQ_FOLDER, "zip/") unzip(os.path.join(req_zip_folder, "registration.zip"), REQ_FOLDER) static_scripts_folder = os.path.join(STATIC_FOLDER, 'scripts')
project_name = "project" # Create the project's directory if it doesn't exist already. helper.softcreate(project_name) # Download the tutorial's repository tutorial_path = os.path.join(project_name, "tutorial.zip") helper.download( "https://github.com/EdjeElectronics/TensorFlow-Object-Detection-API-Tutorial-Train-Multiple-Objects-Windows-10/archive/master.zip", tutorial_path) helper.unzip( tutorial_path, project_name, "TensorFlow-Object-Detection-API-Tutorial-Train-Multiple-Objects-Windows-10-master" ) # Download the model's directory model_path = os.path.join(project_name, "model.zip") helper.download("https://github.com/tensorflow/models/archive/master.zip", model_path) helper.unzip(model_path, project_name, "models-master") # Download the model from model zoo model_path2 = os.path.join(project_name, "model2.tar.gz")
password = '' dbname = 'postgres' order_loader = OrderLoader(host, username, password, dbname) # Create tables table_creation_queries = [ user_table_creation_query, order_table_creation_query, product_table_creation_query, item_table_creation_query ] for table_creation_query in table_creation_queries: order_loader.create_table(table_creation_query) load_queries = [ item_inseration_query, user_inseration_query, order_inseration_query, product_inseration_query ] wanted_colss = [] # # Write data into database zip_file = '../data/data.zip' directory_to_extract_to = '../data/' unzip(zip_file, directory_to_extract_to) for orders_file in order_loader.get_order_filenames_from_zip(zip_file): for order in order_loader.get_orders(directory_to_extract_to + orders_file): for load_query, wanted_cols in zip(load_queries, wanted_colss): order_loader.write_order_into_db(order, load_query, wanted_cols) order_loader.disconnect()
def do_unzip(): print '== do_unzip ==' tools_zip_folder = os.path.join(TOOLS_FOLDER, "zip/") unzip(os.path.join(tools_zip_folder, "virtualenv.zip"), TOOLS_FOLDER) req_zip_folder = os.path.join(REQ_FOLDER, "zip/") unzip(os.path.join(req_zip_folder, "postmarkup.zip"), REQ_FOLDER) unzip(os.path.join(req_zip_folder, "registration.zip"), REQ_FOLDER) static_scripts_folder = os.path.join(STATIC_FOLDER, 'scripts') unzip(os.path.join(req_zip_folder, "markitup.zip"), static_scripts_folder) unzip(os.path.join(req_zip_folder, "jquery.min.js.zip"), static_scripts_folder) unzip(os.path.join(req_zip_folder, "ajaxupload.zip"), static_scripts_folder)
return list(filter(filter_nulls, table)) def read_list_of_vars(file_name): return [ line.strip().split('\t') for line in helper.read_file(file_name).strip().split('\n') ] if __name__ == "__main__": if len(sys.argv) < 3: print("Usage: filter <data_file_name> <list_of_vars_file_name>") sys.exit(1) file_name = sys.argv[1] list_of_vars_file_name = sys.argv[2] file_content = helper.read_file(file_name) table = helper.convert_string_to_table(file_content, sep=',') print("Orginal table rows:", len(table)) list_of_vars = read_list_of_vars(list_of_vars_file_name) variables, types = helper.unzip(list_of_vars) table = reduce_data(table, variables) table = clean_data(table, types) print("Reduced and cleaned rows:", len(table)) file_content = helper.convert_table_to_string(table) helper.write_file(file_name + ".new", file_content)