def handle_source(source): if os.path.isdir(source): return source package_dir = False unzip_into_dir = os.path.join(os.getcwd(),'_pkg_' + datetime.now().strftime('%y%m%d_%H%M%S')) os.mkdir(unzip_into_dir) if source.endswith(".zip") and os.path.isfile(source): unzip_file(source, unzip_into_dir) try: zip_file = os.path.join(unzip_into_dir,"source_data.zip") urllib.urlretrieve(source, zip_file) unzip_file(zip_file, unzip_into_dir) except: pass for path in os.listdir(unzip_into_dir): if os.path.basename(path) != '__MACOSX': full_path = os.path.join(unzip_into_dir,path) if os.path.isdir(full_path): package_dir = full_path break return package_dir
def setup_elasticsearch(self, package_name, port=9200): """ Installs Elasticsearch into the package directory and adds default settings for running in a test environment Change these settings in production """ tmp_dir = settings.TMP_DIR install_location = self.get_elasticsearch_install_location( package_name) url = get_elasticsearch_download_url( os.path.join(settings.ROOT_DIR, 'install')) file_name = url.split('/')[-1] download_elasticsearch(tmp_dir) unzip_file(os.path.join(tmp_dir, file_name), tmp_dir) # Move to folder without version in name, so we can easilly find it back file_name_wo_extention = file_name[:-4] unzip_location = os.path.join(tmp_dir, file_name_wo_extention) os.rename(unzip_location, install_location) es_config_directory = os.path.join(install_location, 'config') try: os.rename( os.path.join(es_config_directory, 'elasticsearch.yml'), os.path.join(es_config_directory, 'elasticsearch.yml.orig')) except: pass with open(os.path.join(es_config_directory, 'elasticsearch.yml'), 'w') as f: f.write('# ----------------- FOR TESTING ONLY -----------------') f.write('\n# - THESE SETTINGS SHOULD BE REVIEWED FOR PRODUCTION -') f.write('\nnode.max_local_storage_nodes: 1') f.write('\nindex.number_of_shards: 1') f.write('\nindex.number_of_replicas: 0') f.write('\nhttp.port: %s' % port) f.write('\ndiscovery.zen.ping.multicast.enabled: false') f.write('\ndiscovery.zen.ping.unicast.hosts: ["localhost"]') f.write( '\ncluster.routing.allocation.disk.threshold_enabled: false') # install plugin if sys.platform == 'win32': os.system("call %s --install mobz/elasticsearch-head" % (os.path.join(install_location, 'bin', 'plugin.bat'))) else: os.chdir(os.path.join(install_location, 'bin')) os.system("chmod u+x plugin") os.system("./plugin -install mobz/elasticsearch-head") os.system("chmod u+x elasticsearch")
def install(self, install_location=None, port=None): """ Installs Elasticsearch into the package directory and adds default settings for running in a test environment Change these settings in production """ install_location = os.path.abspath(install_location) utils.ensure_dir(install_location) url = get_elasticsearch_download_url(os.path.join(settings.ROOT_DIR, 'install')) file_name = url.split('/')[-1] os_name = platform.system().lower() file_name_wo_extention = file_name.split('-%s' % os_name)[0] download_elasticsearch(os.path.join(settings.ROOT_DIR, 'install')) unzip_file(os.path.join(settings.ROOT_DIR, 'install', file_name), install_location) es_config_directory = os.path.join(install_location, file_name_wo_extention, 'config') try: os.rename(os.path.join(es_config_directory, 'elasticsearch.yml'), os.path.join(es_config_directory, 'elasticsearch.yml.orig')) except: pass os.chmod(os.path.join(install_location, file_name_wo_extention, 'bin', 'elasticsearch'), 0o755) def change_permissions_recursive(path, mode): for root, dirs, files in os.walk(path, topdown=True): for dir in [os.path.join(root,d) for d in dirs]: os.chmod(dir, mode) for file in [os.path.join(root, f) for f in files]: if '/bin/' in file: os.chmod(file, mode) change_permissions_recursive(os.path.join(install_location, file_name_wo_extention, 'modules', 'x-pack-ml', 'platform'), 0o755) with open(os.path.join(es_config_directory, 'elasticsearch.yml'), 'w') as f: f.write('# ----------------- FOR TESTING ONLY -----------------') f.write('\n# - THESE SETTINGS SHOULD BE REVIEWED FOR PRODUCTION -') f.write('\n# -https://www.elastic.co/guide/en/elasticsearch/reference/6.7/important-settings.html - ') f.write('\nhttp.port: %s' % port) f.write('\n\n# for the elasticsearch-head plugin') f.write('\nhttp.cors.enabled: true') f.write('\nhttp.cors.allow-origin: "*"') f.write('\n') print('Elasticsearch installed at %s' % os.path.join(install_location, file_name_wo_extention))
def setup_elasticsearch(self, package_name, port=9200): """ Installs Elasticsearch into the package directory and adds default settings for running in a test environment Change these settings in production """ install_location = self.get_elasticsearch_install_location(package_name) install_root = os.path.abspath(os.path.join(install_location, "..")) url = get_elasticsearch_download_url(os.path.join(settings.ROOT_DIR, "install")) file_name = url.split("/")[-1] try: unzip_file(os.path.join(settings.ROOT_DIR, "install", file_name), install_root) except: download_elasticsearch(os.path.join(settings.ROOT_DIR, "install")) es_config_directory = os.path.join(install_location, "config") try: os.rename( os.path.join(es_config_directory, "elasticsearch.yml"), os.path.join(es_config_directory, "elasticsearch.yml.orig"), ) except: pass with open(os.path.join(es_config_directory, "elasticsearch.yml"), "w") as f: f.write("# ----------------- FOR TESTING ONLY -----------------") f.write("\n# - THESE SETTINGS SHOULD BE REVIEWED FOR PRODUCTION -") f.write("\nnode.max_local_storage_nodes: 1") f.write("\nindex.number_of_shards: 1") f.write("\nindex.number_of_replicas: 0") f.write("\nhttp.port: %s" % port) f.write("\ndiscovery.zen.ping.multicast.enabled: false") f.write('\ndiscovery.zen.ping.unicast.hosts: ["localhost"]') f.write("\ncluster.routing.allocation.disk.threshold_enabled: false") # install plugin if sys.platform == "win32": os.system( "call %s --install mobz/elasticsearch-head" % (os.path.join(install_location, "bin", "plugin.bat")) ) else: os.chdir(os.path.join(install_location, "bin")) os.system("chmod u+x plugin") os.system("./plugin -install mobz/elasticsearch-head") os.system("chmod u+x elasticsearch")
def install(self, install_location=None, port=None): """ Installs Elasticsearch into the package directory and adds default settings for running in a test environment Change these settings in production """ install_location = os.path.abspath(install_location) utils.ensure_dir(install_location) url = get_elasticsearch_download_url(os.path.join(settings.ROOT_DIR, 'install')) file_name = url.split('/')[-1] file_name_wo_extention, extention = os.path.splitext(file_name) download_elasticsearch(os.path.join(settings.ROOT_DIR, 'install')) unzip_file(os.path.join(settings.ROOT_DIR, 'install', file_name), install_location) es_config_directory = os.path.join(install_location, file_name_wo_extention, 'config') try: os.rename(os.path.join(es_config_directory, 'elasticsearch.yml'), os.path.join(es_config_directory, 'elasticsearch.yml.orig')) except: pass os.chmod(os.path.join(install_location, file_name_wo_extention, 'bin', 'elasticsearch'), 0755) with open(os.path.join(es_config_directory, 'elasticsearch.yml'), 'w') as f: f.write('# ----------------- FOR TESTING ONLY -----------------') f.write('\n# - THESE SETTINGS SHOULD BE REVIEWED FOR PRODUCTION -') f.write('\n# -https://www.elastic.co/guide/en/elasticsearch/reference/5.0/system-config.html - ') f.write('\nhttp.port: %s' % port) f.write('\nscript.inline: true') f.write('\n\n# for the elasticsearch-head plugin') f.write('\nhttp.cors.enabled: true') f.write('\nhttp.cors.allow-origin: "*"') f.write('\n') print 'Elasticsearch installed at %s' % os.path.join(install_location, file_name_wo_extention)
def __init__(self, file=None, mapping_file=None, relations_file=None): self.business_data = '' self.mapping = None self.graphs = '' self.reference_data = '' self.business_data = '' self.file_format = '' self.relations = '' csv.field_size_limit(sys.maxint) if not file: file = settings.BUSINESS_DATA_FILES else: file = [file] if mapping_file == None: try: mapping_file = [file[0].split('.')[0] + '.mapping'] except: print '*' * 80 print "ERROR: Mapping file is missing or improperly named. Make sure you have mapping file with the same basename as your business data file and the extension .mapping" print '*' * 80 sys.exit() else: try: mapping_file = [mapping_file] except: print '*' * 80 print "ERROR: Mapping file is missing or improperly named. Make sure you have mapping file with the same basename as your business data file and the extension .mapping" print '*' * 80 sys.exit() if relations_file == None: try: relations_file = [file[0].split('.')[0] + '.relations'] except: pass for path in relations_file: if os.path.exists(path): if isfile(join(path)): self.relations = csv.DictReader( open(relations_file[0], 'r')) for path in mapping_file: if os.path.exists(path): if isfile(join(path)): self.mapping = json.load(open(path, 'r')) else: self.mapping = None for path in file: if os.path.exists(path): if isfile(join(path)): self.file_format = file[0].split('.')[-1] if self.file_format == 'json': with open(file[0], 'rU') as f: archesfile = JSONDeserializer().deserialize(f) if 'graph' in archesfile.keys(): self.graphs = archesfile['graph'] if 'reference_data' in archesfile.keys(): self.reference_data = archesfile[ 'reference_data'] if 'business_data' in archesfile.keys(): self.business_data = archesfile[ 'business_data'] elif self.file_format == 'csv': data = unicodecsv.DictReader(open(file[0], 'rU'), encoding='utf-8-sig', restkey='ADDITIONAL', restval='MISSING') self.business_data = list(data) elif self.file_format == 'zip': shp_zipfile = os.path.basename(path) shp_zipfile_name = os.path.splitext(shp_zipfile)[0] unzip_dir = os.path.join(os.path.dirname(path), shp_zipfile_name) unzip_file(path, unzip_dir) shp = [ i for i in os.listdir(unzip_dir) if i.endswith(".shp") ] if len(shp) == 0: print '*' * 80 print "ERROR: There is no shapefile in this zipfile." print '*' * 80 exit() elif len(shp) > 1: print '*' * 80 print "ERROR: There are multiple shapefiles in this zipfile. Please load each individually:" for s in shp: print "\npython manage.py packages -o import_business_data -s {0} -c {1} -ow [append or overwrite]".format( os.path.join(unzip_dir, s), mapping_file[0]) print '*' * 80 exit() shp_path = os.path.join(unzip_dir, shp[0]) self.business_data = self.shape_to_csv(shp_path) elif self.file_format == 'shp': self.business_data = self.shape_to_csv(path) else: print str(file) + ' is not a valid file' else: print path + ' is not a valid path'
def load_package(self, source, setup_db=True, overwrite_concepts='ignore', stage_concepts='keep'): def load_system_settings(): update_system_settings = True if os.path.exists(settings.SYSTEM_SETTINGS_LOCAL_PATH): response = raw_input( 'Overwrite current system settings with package settings? (Y/N): ' ) if response.lower() in ('t', 'true', 'y', 'yes'): update_system_settings = True print 'Using package system settings' else: update_system_settings = False if update_system_settings == True: if len( glob.glob( os.path.join(download_dir, '*', 'system_settings', 'System_Settings.json'))) > 0: system_settings = glob.glob( os.path.join(download_dir, '*', 'system_settings', 'System_Settings.json'))[0] shutil.copy(system_settings, settings.SYSTEM_SETTINGS_LOCAL_PATH) self.import_business_data( settings.SYSTEM_SETTINGS_LOCAL_PATH, overwrite=True) def load_resource_to_resource_constraints(): config_paths = glob.glob( os.path.join(download_dir, '*', 'package_config.json')) if len(config_paths) > 0: configs = json.load(open(config_paths[0])) for relationship in configs[ 'permitted_resource_relationships']: obj, created = models.Resource2ResourceConstraint.objects.update_or_create( resourceclassfrom_id=uuid.UUID( relationship['resourceclassfrom_id']), resourceclassto_id=uuid.UUID( relationship['resourceclassto_id']), resource2resourceid=uuid.UUID( relationship['resource2resourceid'])) def load_resource_views(): resource_views = glob.glob( os.path.join(download_dir, '*', 'business_data', 'resource_views', '*.sql')) try: with connection.cursor() as cursor: for view in resource_views: with open(view, 'r') as f: sql = f.read() cursor.execute(sql) except Exception as e: print e print 'Could not connect to db' def load_graphs(): branches = glob.glob( os.path.join(download_dir, '*', 'graphs', 'branches'))[0] resource_models = glob.glob( os.path.join(download_dir, '*', 'graphs', 'resource_models'))[0] # self.import_graphs(os.path.join(settings.ROOT_DIR, 'db', 'graphs','branches'), overwrite_graphs=False) self.import_graphs(branches, overwrite_graphs=False) self.import_graphs(resource_models, overwrite_graphs=False) def load_concepts(overwrite, stage): concept_data = glob.glob( os.path.join(download_dir, '*', 'reference_data', 'concepts', '*.xml')) collection_data = glob.glob( os.path.join(download_dir, '*', 'reference_data', 'collections', '*.xml')) for path in concept_data: self.import_reference_data(path, overwrite, stage) for path in collection_data: self.import_reference_data(path, overwrite, stage) def load_mapbox_styles(style_paths, basemap): for path in style_paths: style = json.load(open(path)) meta = {"icon": "fa fa-globe", "name": style["name"]} if os.path.exists( os.path.join(os.path.dirname(path), 'meta.json')): meta = json.load( open(os.path.join(os.path.dirname(path), 'meta.json'))) self.add_mapbox_layer(meta["name"], path, meta["icon"], basemap) def load_tile_server_layers(xml_paths, basemap): for path in xml_paths: meta = {"icon": "fa fa-globe", "name": os.path.basename(path)} if os.path.exists( os.path.join(os.path.dirname(path), 'meta.json')): meta = json.load( open(os.path.join(os.path.dirname(path), 'meta.json'))) self.add_tileserver_layer(meta['name'], path, meta['icon'], basemap) def load_map_layers(): basemap_styles = glob.glob( os.path.join(download_dir, '*', 'map_layers', 'mapbox_spec_json', 'basemaps', '*', '*.json')) overlay_styles = glob.glob( os.path.join(download_dir, '*', 'map_layers', 'mapbox_spec_json', 'overlays', '*', '*.json')) load_mapbox_styles(basemap_styles, True) load_mapbox_styles(overlay_styles, False) tile_server_basemaps = glob.glob( os.path.join(download_dir, '*', 'map_layers', 'tile_server', 'basemaps', '*', '*.xml')) tile_server_overlays = glob.glob( os.path.join(download_dir, '*', 'map_layers', 'tile_server', 'overlays', '*', '*.xml')) load_tile_server_layers(tile_server_basemaps, True) load_tile_server_layers(tile_server_overlays, False) def load_business_data(): business_data = [] business_data += glob.glob( os.path.join(download_dir, '*', 'business_data', '*.json')) business_data += glob.glob( os.path.join(download_dir, '*', 'business_data', '*.csv')) relations = glob.glob( os.path.join(download_dir, '*', 'business_data', 'relations', '*.relations')) for path in business_data: if path.endswith('csv'): config_file = path.replace('.csv', '.mapping') self.import_business_data(path, overwrite=True, bulk_load=True) else: self.import_business_data(path, overwrite=True) for relation in relations: self.import_business_data_relations(relation) uploaded_files = glob.glob( os.path.join(download_dir, '*', 'business_data', 'files', '*')) dest_files_dir = os.path.join(settings.MEDIA_ROOT, 'uploadedfiles') if os.path.exists(dest_files_dir) == False: os.makedirs(dest_files_dir) for f in uploaded_files: shutil.copy(f, dest_files_dir) def load_extensions(ext_type, cmd): extensions = glob.glob( os.path.join(download_dir, '*', 'extensions', ext_type, '*')) root = settings.APP_ROOT if settings.APP_ROOT != None else os.path.join( settings.ROOT_DIR, 'app') component_dir = os.path.join(root, 'media', 'js', 'views', 'components', ext_type) module_dir = os.path.join(root, ext_type) template_dir = os.path.join(root, 'templates', 'views', 'components', ext_type) for extension in extensions: templates = glob.glob(os.path.join(extension, '*.htm')) components = glob.glob(os.path.join(extension, '*.js')) if len(templates) == 1 and len(components) == 1: if os.path.exists(template_dir) == False: os.mkdir(template_dir) if os.path.exists(component_dir) == False: os.mkdir(component_dir) shutil.copy(templates[0], template_dir) shutil.copy(components[0], component_dir) modules = glob.glob(os.path.join(extension, '*.json')) modules.extend(glob.glob(os.path.join(extension, '*.py'))) if len(modules) > 0: module = modules[0] shutil.copy(module, module_dir) management.call_command(cmd, 'register', source=module) def load_widgets(): load_extensions('widgets', 'widget') def load_functions(): load_extensions('functions', 'fn') def load_datatypes(): load_extensions('datatypes', 'datatype') try: urllib.urlopen(source) remote = True except: remote = False if os.path.exists(source) or remote == True: if remote == True: download_dir = os.path.join(os.getcwd(), 'temp_' + str(uuid.uuid4())) if os.path.exists(download_dir) == False: os.mkdir(download_dir) zip_file = os.path.join(download_dir, 'source_data.zip') urllib.urlretrieve(source, zip_file) else: download_dir = os.path.dirname(source) zip_file = source unzip_file(zip_file, download_dir) if setup_db != False: if setup_db.lower() in ('t', 'true', 'y', 'yes'): self.setup_db(settings.PACKAGE_NAME) print 'loading system settings' load_system_settings() print 'loading widgets' load_widgets() print 'loading functions' load_functions() print 'loading datatypes' load_datatypes() print 'loading concepts' load_concepts(overwrite_concepts, stage_concepts) print 'loading resource models and branches' load_graphs() print 'loading resource to resource constraints' load_resource_to_resource_constraints() print 'loading map layers' load_map_layers() print 'loading business data - resource instances and relationships' load_business_data() print 'loading resource views' load_resource_views() else: print "A path to a local or remote zipfile is required"
def __init__(self, file=None, mapping_file=None, relations_file=None): self.business_data = "" self.mapping = None self.graphs = "" self.reference_data = "" self.business_data = "" self.file_format = "" self.relations = "" try: csv.field_size_limit(sys.maxsize) except: csv.field_size_limit(int(ctypes.c_ulong(-1).value // 2)) if not file: file = settings.BUSINESS_DATA_FILES else: file = [file] self.file = file if mapping_file is None: try: mapping_file_base = os.path.splitext(file[0])[0] mapping_file = [f"{mapping_file_base}.mapping"] except: print("*" * 80) print( "ERROR: Mapping file is missing or improperly named. Make sure you have \ mapping file with the same basename as your business data file and the extension .mapping" ) print("*" * 80) sys.exit() else: try: mapping_file = [mapping_file] except: print("*" * 80) print( "ERROR: Mapping file is missing or improperly named. Make sure you have \ mapping file with the same basename as your business data file and the extension .mapping" ) print("*" * 80) sys.exit() if relations_file is None: try: relations_file_base = os.path.splitext(file[0])[0] relations_file = [f"{relations_file_base}.relations"] except: pass for path in relations_file: if os.path.exists(path): if isfile(join(path)): self.relations = csv.DictReader( open(relations_file[0], "r")) for path in mapping_file: if os.path.exists(path): if isfile(join(path)): self.mapping = json.load(open(path, "r")) else: self.mapping = None for path in file: if os.path.exists(path): if isfile(join(path)): self.file_format = os.path.splitext(file[0])[1].strip(".") if self.file_format == "json": with open(file[0], "rU") as f: archesfile = JSONDeserializer().deserialize(f) if "graph" in list(archesfile.keys()): self.graphs = archesfile["graph"] if "reference_data" in list(archesfile.keys()): self.reference_data = archesfile[ "reference_data"] if "business_data" in list(archesfile.keys()): self.business_data = archesfile[ "business_data"] elif self.file_format == "csv": data = csv.DictReader(open(file[0], encoding="utf-8")) self.business_data = list(data) elif self.file_format == "zip": shp_zipfile = os.path.basename(path) shp_zipfile_name = os.path.splitext(shp_zipfile)[0] unzip_dir = os.path.join(os.path.dirname(path), shp_zipfile_name) unzip_file(path, unzip_dir) shp = [ i for i in os.listdir(unzip_dir) if i.endswith(".shp") ] if len(shp) == 0: print("*" * 80) print( "ERROR: There is no shapefile in this zipfile." ) print("*" * 80) exit() elif len(shp) > 1: print("*" * 80) print( "ERROR: There are multiple shapefiles in this zipfile. Please load each individually:" ) for s in shp: print( "\npython manage.py packages -o import_business_data -s {0} -c {1} -ow [append or overwrite]" .format(os.path.join(unzip_dir, s), mapping_file[0])) print("*" * 80) exit() shp_path = os.path.join(unzip_dir, shp[0]) self.business_data = self.shape_to_csv(shp_path) elif self.file_format == "shp": self.business_data = self.shape_to_csv(path) else: print(str(file) + " is not a valid file") else: print(path + " is not a valid path")
def __init__(self, file=None, mapping_file=None, relations_file=None): self.business_data = '' self.mapping = None self.graphs = '' self.reference_data = '' self.business_data = '' self.file_format = '' self.relations = '' csv.field_size_limit(sys.maxint) if not file: file = settings.BUSINESS_DATA_FILES else: file = [file] if mapping_file == None: try: mapping_file = [file[0].split('.')[0] + '.mapping'] except: print '*'*80 print "ERROR: Mapping file is missing or improperly named. Make sure you have mapping file with the same basename as your business data file and the extension .mapping" print '*'*80 sys.exit() else: try: mapping_file = [mapping_file] except: print '*'*80 print "ERROR: Mapping file is missing or improperly named. Make sure you have mapping file with the same basename as your business data file and the extension .mapping" print '*'*80 sys.exit() if relations_file == None: try: relations_file = [file[0].split('.')[0] + '.relations'] except: pass for path in relations_file: if os.path.exists(path): if isfile(join(path)): self.relations = csv.DictReader(open(relations_file[0], 'r')) for path in mapping_file: if os.path.exists(path): if isfile(join(path)): self.mapping = json.load(open(path, 'r')) else: self.mapping = None for path in file: if os.path.exists(path): if isfile(join(path)): self.file_format = file[0].split('.')[-1] if self.file_format == 'json': with open(file[0], 'rU') as f: archesfile = JSONDeserializer().deserialize(f) if 'graph' in archesfile.keys(): self.graphs = archesfile['graph'] if 'reference_data' in archesfile.keys(): self.reference_data = archesfile['reference_data'] if 'business_data' in archesfile.keys(): self.business_data = archesfile['business_data'] elif self.file_format == 'csv': data = unicodecsv.DictReader(open(file[0], 'rU'), encoding='utf-8-sig', restkey='ADDITIONAL', restval='MISSING') self.business_data = list(data) elif self.file_format == 'zip': shp_zipfile = os.path.basename(path) shp_zipfile_name = os.path.splitext(shp_zipfile)[0] unzip_dir = os.path.join(os.path.dirname(path),shp_zipfile_name) unzip_file(path,unzip_dir) shp = [i for i in os.listdir(unzip_dir) if i.endswith(".shp")] if len(shp) == 0: print '*'*80 print "ERROR: There is no shapefile in this zipfile." print '*'*80 exit() elif len(shp) > 1: print '*'*80 print "ERROR: There are multiple shapefiles in this zipfile. Please load each individually:" for s in shp: print "\npython manage.py packages -o import_business_data -s {0} -c {1} -ow [append or overwrite]".format( os.path.join(unzip_dir,s),mapping_file[0]) print '*'*80 exit() shp_path = os.path.join(unzip_dir,shp[0]) self.business_data = self.shape_to_csv(shp_path) elif self.file_format == 'shp': self.business_data = self.shape_to_csv(path) else: print str(file) + ' is not a valid file' else: print path + ' is not a valid path'