def populate_events(ap_args): """Main function to populate the database with archive events. :Parameters: - ap_args : dict : Information related to archive(s). """ FLOG.debug(ap_args) if __name__ != '__main__': ap_args = check_if_params(ap_args) check_arg_for_none_value(ap_args) CLOG.info('DB Populate args :- ' + str(ap_args)) arch_path = ap_args.get('temp') del_arch_path = ap_args.get('delete_temp') if not arch_path: arch_path = gisc_msgs.TEMP_LOCATION arg = [k for k in ap_args if k is not 'temp'][0] if arg is 'url': empty_directory(arch_path) download(ap_args[arg], arch_path) elif arg is 'arch_date': empty_directory(arch_path) download(get_url_from_date(ap_args[arg]), arch_path) elif arg is 'src_dir': arch_path = ap_args[arg] elif arg is 'zip': extract_zip(ap_args[arg], arch_path) elif arg is 'files': empty_directory(arch_path) map(lambda x: handle_valid_invalid_files(x, arch_path), ap_args[arg]) populate(arch_path) if arg is not 'src_dir' and del_arch_path: empty_directory(arch_path, False)
def get_web_ambientcg_asset(url, content_folder): is_ok, result = get_web_ambientcg_info(url, content_folder) if not is_ok: return False, result info = result url = info.pop("downloadLink") content_path = os.path.join(content_folder, info.pop("fileName")) headers = {'User-Agent': 'Blender'} is_ok, result = get_web_file(url, content_path=content_path, headers=headers) if is_ok: utils.extract_zip(result, path=content_folder) os.remove(result) else: print(f"Cannot download asset {url}", result) url = info["preview_url"] is_ok, result = get_web_file(url, content_folder, headers=headers) if is_ok: info["preview_path"] = result else: print(f"Cannot download preview {url}", result) return True, info
def extract_unity_launcher(type): utils.extract_zip(".tmp/UnityLauncher.%s.zip" % type, ".UnityLauncher.%s" % type) current_os = utils.get_current_os() if current_os == "macOS": os.chmod('.UnityLauncher.{0}/osx.10.12-x64/publish/UnityLauncher.{0}'.format(type), 0755) elif current_os == "linux": os.chmod('.UnityLauncher.{0}/linux-x64/publish/UnityLauncher.{0}'.format(type), 0755)
def export(email, password, useLocally, replaceLatestVersion, hasBreakingChange): files = os.listdir("./dist") whlFiles = [file for file in files if os.path.splitext(file)[1] == ".whl"] assert len(whlFiles), "More than one whl files" match = re.search("pyinventory-([.0-9]+)-py3-none-any.whl", whlFiles[0]) version = match.group(1) res = "\n".join( open(GRAPHQL_PYINVENORY_PATH, "r").read().splitlines()[10:-1]) packages = json.loads(res) client = InventoryClient(email, password, is_local_host=useLocally) if len(packages) != 0: latestVersion = packages[0]["version"] if LooseVersion(version) == LooseVersion(latestVersion): print("version {} is already exported".format(version)) if replaceLatestVersion: print("Replace version {} with new version".format(version)) latestPackage = packages.pop(0) try: delete_file(client, latestPackage["whlFileKey"], True) except Exception: print(f'whlFileKey {latestPackage["whlFileKey"]} cannot ' "be deleted") else: return elif LooseVersion(version) < LooseVersion(latestVersion): print( "newer version {} is already exported than current version {}". format(latestVersion, version)) return whlFileKey = store_file(client, os.path.join("./dist", whlFiles[0]), "application/zip", True) newPackage = { "version": version, "whlFileKey": whlFileKey, "uploadTime": datetime.isoformat(datetime.fromtimestamp(int(time.time()))) + "+00:00", "hasBreakingChange": hasBreakingChange, } packages.insert(0, newPackage) newContent = json.dumps(packages) open(GRAPHQL_PYINVENORY_PATH, "w").write(GRAPHQL_PYINVENORY_CONTENT.format(newContent)) export_doc() schemas = extract_zip("graphql_schema_versions/old_schemas.zip") schemas[version] = open("../graph/graphql/schema/symphony.graphql").read() archive_zip("graphql_schema_versions/old_schemas.zip", schemas)
def add_current_schema_with_version( schema_versions_library, current_schema_path, version ): old_schemas_archive = os.path.join(schema_versions_library, "old_schemas.zip") schemas = extract_zip(old_schemas_archive) schemas[version] = open(current_schema_path).read() archive_zip(old_schemas_archive, schemas)
def post(self): action = self.get_argument("action", "").lower() if action == "restart": try: logging.info("systemctl action %s openexcavator", action) subprocess.check_output(["systemctl", action, "openexcavator"], stderr=subprocess.STDOUT) except Exception as exc: logging.warning("systemctl: %s", exc) return self.render("restart.html", error_message=None) data = { "wifi_ssid": self.get_argument("wifi_ssid", None), "wifi_psk": self.get_argument("wifi_psk", None), "gps_host": self.get_argument("gps_host", None), "gps_port": self.get_argument("gps_port", None), "imu_host": self.get_argument("imu_host", None), "imu_port": self.get_argument("imu_port", None), "start_altitude": self.get_argument("start_altitude", None), "stop_altitude": self.get_argument("stop_altitude", None), "antenna_height": self.get_argument("antenna_height", None), "safety_depth": self.get_argument("safety_depth", None), "safety_height": self.get_argument("safety_height", None), "output_port": self.get_argument("output_port", None), "path": None } if self.request.files: file_info = self.request.files["path"][0] data["path"] = file_info["body"] error_msg = None try: data["gps_port"] = int(data["gps_port"]) data["imu_port"] = int(data["imu_port"]) data["start_altitude"] = float(data["start_altitude"]) data["stop_altitude"] = float(data["stop_altitude"]) data["antenna_height"] = float(data["antenna_height"]) data["safety_depth"] = float(data["safety_depth"]) data["safety_height"] = float(data["safety_height"]) if data["output_port"]: data["output_port"] = int(data["output_port"]) if data["output_port"] < 1024 or data["output_port"] > 65535: error_msg = "invalid output port (1024<port>65535" if data["path"]: try: if file_info["filename"].endswith(".zip"): data["path"] = utils.extract_zip(data["path"]) path_value = json.loads(data["path"].decode()) if "features" not in path_value: error_msg = "missing features from GeoJSON" except ValueError: error_msg = "JSON data is not valid" except Exception as exc: error_msg = "invalid input data: %s" % exc if error_msg: return self.redirect("/?error_msg=" + url_escape(error_msg)) self.application.database.set_config(data) return self.redirect("/")
def extract_zips(self): with self.lock: zip_paths = [ file for file in os.scandir(self.path) if os.path.splitext(file.name)[1] == ".zip" ] if not zip_paths: return [] self.extra = os.path.join(self.path, "__extra__") os.makedirs(self.extra, exist_ok=True) extracted_files = [] for zip_path in zip_paths: extracted_files.extend(utils.extract_zip(zip_path)) os.rename(zip_path, os.path.join(self.extra, os.path.basename(zip_path))) self.info["system_tags"].remove("zip") self.update_search_set() return extracted_files
def post(self): action = self.get_argument("action", "").lower() if action == "restart": try: logging.info("systemctl action %s openexcavator", action) subprocess.check_output(["systemctl", action, "openexcavator"], stderr=subprocess.STDOUT) except Exception as exc: logging.warning("systemctl: %s", exc) return self.render("restart.html", error_message=None) wifi_ssid = self.get_argument("wifi_ssid", None) wifi_psk = self.get_argument("wifi_psk", None) gps_host = self.get_argument("gps_host", None) gps_port = self.get_argument("gps_port", None) imu_host = self.get_argument("imu_host", None) imu_port = self.get_argument("imu_port", None) start_altitude = self.get_argument("start_altitude", None) stop_altitude = self.get_argument("stop_altitude", None) antenna_height = self.get_argument("antenna_height", None) safety_depth = self.get_argument("safety_depth", None) safety_height = self.get_argument("safety_height", None) path = None if self.request.files: file_info = self.request.files["path"][0] path = file_info["body"] error_msg = None try: gps_port = int(gps_port) imu_port = int(imu_port) start_altitude = float(start_altitude) stop_altitude = float(stop_altitude) antenna_height = float(antenna_height) safety_depth = float(safety_depth) safety_height = float(safety_height) if path: try: if file_info["filename"].endswith(".zip"): path = utils.extract_zip(path) pathvalue = json.loads(path.decode()) if "features" not in pathvalue: error_msg = "missing features from GeoJSON" except ValueError: error_msg = "JSON data is not valid" except Exception as exc: error_msg = "invalid input data: %s" % exc if error_msg: return self.redirect("/?error_msg=" + url_escape(error_msg)) data = { "start_altitude": start_altitude, "stop_altitude": stop_altitude, "path": path, "antenna_height": antenna_height, "gps_host": gps_host, "gps_port": gps_port, "imu_host": imu_host, "imu_port": imu_port, "wifi_ssid": wifi_ssid, "wifi_psk": wifi_psk, "safety_height": safety_height, "safety_depth": safety_depth } self.application.database.set_config(data) return self.redirect("/")
import os from utils import extract_zip, extract_tgz if __name__ == "__main__": if not os.path.isdir('data/data/frame/cron20190326/'): extract_zip('data/cron20190326.zip') extract_zip('data/cron20190415.zip') extract_tgz('data/jinhai_531.tar.gz')
import os CURR_DIR = os.path.dirname(os.path.realpath(__file__)) import utils pjoin = os.path.join common = CURR_DIR scst = pjoin(CURR_DIR, 'scst') ## print('\nINFO: Fetching `tylin/coco-caption` @ commit 3a9afb2 ...') dest = common zip_path = utils.maybe_download_from_url( r'https://github.com/tylin/coco-caption/archive/3a9afb2682141a03e1cdc02b0df6770d2c884f6f.zip', dest) utils.extract_zip(zip_path) os.remove(zip_path) old_name = pjoin(dest, 'coco-caption-3a9afb2682141a03e1cdc02b0df6770d2c884f6f') new_name = pjoin(dest, 'coco_caption') os.rename(old_name, new_name) #print('\nINFO: Fetching `ruotianluo/cider` @ commit 77dff32 ...') #dest = scst #zip_path = utils.maybe_download_from_url( # r'https://github.com/ruotianluo/cider/archive/dbb3960165d86202ed3c417b412a000fc8e717f3.zip', # dest) #utils.extract_zip(zip_path) #os.remove(zip_path) #old_name = pjoin(dest, 'cider-dbb3960165d86202ed3c417b412a000fc8e717f3') #new_name = pjoin(dest, 'cider_ruotianluo') #os.rename(old_name, new_name)
def main(): t1 = time.clock() print 'cleaning {} and {}...'.format(WORKING_DIR, OUTPUT_DIR) utils.rmdir(WORKING_DIR) utils.rmdir(OUTPUT_DIR) print 'extracting kml from {}...'.format(KMZ_PATH) utils.extract_zip(KMZ_PATH, WORKING_DIR) os.mkdir(OUTPUT_DIR) print 'parsing ground overlays from {}...'.format(KML_PATH) overlays, bounds = parse_kml_ground_overlays(KML_PATH) print 'loading overlay images into grid...' overlay_grid = parse_overlays_to_grid(WORKING_DIR, overlays) print 'merging overlay grid into single image...' image_grid = merge_overlay_grid(overlay_grid) path = os.path.join(OUTPUT_DIR, '_image_grid_original.png') print 'saving image of shape {} to {}...'.format(image_grid.shape, path) cv2.imwrite(path, image_grid) cache_digitized_path = '_image_grid_digitized.png' if os.path.exists(cache_digitized_path): print 'loading cached digitized image from {}...'.format( cache_digitized_path) image_grid = cv2.imread(cache_digitized_path) else: print 'digitizing image colors...' digitize_image_in_place(image_grid, ORIGINAL_COLORS, NEW_COLORS) path = os.path.join(OUTPUT_DIR, '_image_grid_digitized.png') print 'saving image of shape {} to {}...'.format( image_grid.shape, path) cv2.imwrite(path, image_grid) print 'filtering image with median blur k=3...' b, g, r = cv2.split(image_grid) b = scipy.ndimage.filters.median_filter(b, size=3) g = scipy.ndimage.filters.median_filter(g, size=3) r = scipy.ndimage.filters.median_filter(r, size=3) image_grid = cv2.merge([b, g, r]) path = os.path.join(OUTPUT_DIR, '_image_grid_digitized_median3.png') print 'saving image of shape {} to {}...'.format(image_grid.shape, path) cv2.imwrite(path, image_grid) print 'expanding image to cover all mercator coordinates...' image_grid, bounds = mercator.expand_image_to_mercator( image_grid, bounds, DEGREES_PER_PIXEL) print 'mapping image to mercator...' image_grid = mercator.map_image_to_mercator(image_grid, bounds, ZOOM, TILE_SIZE) path = os.path.join(OUTPUT_DIR, '_image_grid_mercator.png') print 'saving image of shape {} to {}...'.format(image_grid.shape, path) cv2.imwrite(path, image_grid) print 'extracting tiles to files at {}'.format(OUTPUT_DIR) extract_tiles_to_file(image_grid, ZOOM, TILE_SIZE) t2 = time.clock() print 'completed in {} seconds'.format(t2 - t1)
def auto(cls, path: os.DirEntry, asset_data_path, ignore_info=False ): # type: (os.DirEntry, str, bool) -> typing.Tuple[str, Asset] info = {} preview = None id = os.path.splitext(path.name)[0] if os.path.dirname(path.path) != asset_data_path: if id: number = 2 id_path = os.path.join(asset_data_path, id) while True: if os.path.exists(id_path): id_path = os.path.join(asset_data_path, id + f"_{number}") number += 1 else: break else: id_chars = "".join((string.ascii_lowercase, string.digits)) while True: id = ''.join(random.choice(id_chars) for _ in range(11)) id_path = os.path.join(asset_data_path, id) if not os.path.exists(id_path): break id_path = utils.PseudoDirEntry(id_path) else: id_path = path extra_folder = os.path.join(id_path.path, "__extra__") archive_folder = os.path.join(id_path.path, "__archive__") gallery_folder = os.path.join(id_path.path, "__gallery__") def get_info(): is_ok, result = asset_parser.get_web(url, id_path.path) if is_ok: preview = result.pop("preview_path", None) return result, preview return None, None if path.is_file(): file = utils.pathlib.Path(path) url = None auto_folder = file.parent url_files = [ utils.pathlib.Path(auto_folder, file.stem + extension) for extension in utils.URL_EXTENSIONS ] url_files = [ url for url in url_files if url.exists() and url.type == "url" ] for url_file in url_files: url = url_file.data utils.move_to_folder(url_file, extra_folder) if url: info, preview = get_info() if file.type == "zip": utils.extract_zip(file, id_path.path) utils.move_to_folder(file, archive_folder) else: utils.move_to_folder(file, id_path.path) else: id_path = utils.PseudoDirEntry( utils.move_to_folder(path.path, asset_data_path)) files = utils.File_Filter.from_dir(id_path, ignore=("__extra__", "__archive__")) old_info = None for existing_info in files.get_by_type("__info__"): if ignore_info: old_info = existing_info.data break else: return id, cls.default(id_path) zips = files.get_by_type("zip") if zips: for zip in zips: utils.extract_zip(str(zip), path=id_path.path) utils.move_to_folder(zip, archive_folder) files.update() if not info: for url_file in files.get_by_type("url"): url = url_file.data info, preview = get_info() if info: break if not info: for megascan_info in files.get_by_type("megascan_info"): megascan_id = megascan_info.data.get('id') if not megascan_id: continue url = f"https://quixel.com/megascans/home?assetId={megascan_id}" info, preview = get_info() if info: previews = [ str(file) for file in files.get_files() if file.name.lower().endswith("_preview.png") ] if previews: preview = previews[0] break if not info: for blendswap_info in files.get_by_type("blendswap_info"): url = blendswap_info.data info, preview = get_info() if info: break if not info and asset_parser.seven_z: for sbsar in files.get_by_type("sbsar"): is_ok, result = asset_parser.get_info_from_sbsar(str(sbsar)) if is_ok: info = result xml_attrs = info.pop("xml_attrs") if info.get("author") in ("Allegorithmic", "Adobe") or all( map(xml_attrs.get, ("pkgurl", "label", "keywords", "category", "author", "authorurl"))): sbsar_info = info label = info["name"] # if is Adobe name == label info_by_label = asset_parser.get_web_substance_source_info_by_label( label) if info_by_label: info = info_by_label description = sbsar_info.get("description") if description: info["description"] = description # if sketchfab asset --> use folder structure and try to utilize info about the scene if not preview: if not files.get_by_type("__icon__"): posible_icons = [ file for file in files.get_by_extension(('.png', '.jpg', '.jpeg')) if not file.is_meta ] if not posible_icons: # render asset's preview pass if len(posible_icons) == 1: preview = posible_icons[0] if preview: utils.move_to_folder(preview, gallery_folder) asset = cls.new(id_path, exist_ok=True) if ignore_info and old_info: asset.update_info(old_info) asset.update_info(info) asset.standardize_info() id = id.lower() return id, asset