Пример #1
0
def copy_files(in_dir, files, sample_type, out_dir):
    """
  Copy selected files from in_dir to 'out_dir/sample_type'
  """
    print("Moving shuffled %s data to dir \'%s\'" %
          (sample_type, join(out_dir, sample_type)))
    for file_name in files:
        full_file_name = os.path.join(in_dir, file_name)
        if (os.path.isfile(full_file_name)):
            sh_copy(full_file_name, join(out_dir, sample_type))
    print("Moved %d shuffled samples to dir \'%s\'" %
          (len(files), join(out_dir, sample_type)))
Пример #2
0
	def showContextMenu(self):
		self.syslogger('context menu not implemented ')
		self.selectedItem = self.getCurrentListPosition()
		mypath = self.getListItem(self.selectedItem).getProperty('path')
		myipath = self.getListItem(self.selectedItem).getProperty('ipath')
		mymtype = self.getListItem(self.selectedItem).getProperty('MType')
		mydialog = xbmcgui.Dialog()
		myret = mydialog.select('Choose action', ['Add to Favourites', 'XDG-Open', 'Execute', 'Copy path to clipboard', 'Copy action to clipboard', 'Import File'])
		if myret == 0:
		  self.syslogger('context add favourite '+mypath)
		elif myret == 1:
		  self.syslogger('context xdg-open '+myipath)
		  os.system("xdg-open '"+myipath+"'")
		elif myret == 2:
		  if mymtype == 'rom':
		    mycmd = "python /home/inf0rmix/emuexec-thumbnailer '"+myipath.replace("'","\\'")+"'"
		    #self.syslogger("context execute-rom  "+mycmd)
		    os.system(mycmd)
		  else:
		    self.syslogger("context execute '"+mypath+"'")
		elif myret == 3:
		  self.syslogger('context CopyToClipboard '+mypath)
		  cb_set(mypath)
		elif myret == 5:
		  self.syslogger('context ImportFile '+mypath)
		  dialog = xbmcgui.Dialog()
		  mystartpath = XBMC_ADDON.getSetting("dlgDefaultDir")
		  mypath = dialog.browse(1,'Select file to import', 'files', '', False, False,mystartpath)
		  mynewpath = os.path.dirname(mypath)
		  XBMC_ADDON.setSetting("dlgDefaultDir",mynewpath)
		  #self.syslogger('context cp '+mypath+' ' + self.basedir)
		  sh_copy(mypath,self.basedir)
		  
		elif myret == 4:
		  if mypath.startswith('RunScript('):
		    if mypath.find('?/') > -1:
		      mypath = '/' + mypath.rsplit('?/',1)[1].rstrip(')')
		      self.syslogger('context menu 3 '+mypath)
		      cb_set(mypath)
		#import dialogcontextmenu
		#constructorParam = "720p"
		#cm = dialogcontextmenu.ContextMenuDialog("script-RCB-contextmenu.xml", util.getAddonInstallPath(), "Default", constructorParam, gui=self)
		#del cm
		return True
Пример #3
0
    def backup_codes(self):
        from shutil import copy as sh_copy, copytree as sh_cptree
        import os

        def ignore(src, names):
            return ['__pycache__']

        # backup dirs
        backup_dir = f"./{self.summary_dir}/{self.exp_id}"
        for _dir in dirs_to_backup:
            sh_cptree(_dir, f"./{backup_dir}/{_dir}", ignore=ignore)
        # backup ./*.py
        for _file in os.listdir('./'):
            if _file.endswith('.py'):
                sh_copy(f"./{_file}", f"./{backup_dir}/{_file}")
        # backup params
        _file = os.path.basename(self.params['param_file'])
        yaml.dump(self.params,
                  open(f"{backup_dir}/{self.exp_id}___{_file}", 'w'), CDumper)
Пример #4
0
def download(request):
    try:
        folder = get_full_path(request.user, request.POST['folder'])
        to_download = request.POST.getlist('to_download[]')
        num_files = len(to_download)

        if num_files == 0: return HttpResponse(status=422)
        if num_files == 1:
            file_path = os.path.join(folder, to_download[0])
            if os.path.isfile(file_path):
                response = FileResponse(open(file_path, 'rb'))
                response["FILENAME"] = to_download[0]
            else:
                with TemporaryDirectory() as temp_dir:
                    zip_path = make_archive(
                        os.path.join(temp_dir, to_download[0]), "zip",
                        file_path)
                    response = FileResponse(open(zip_path, 'rb'))
                    response["FILENAME"] = to_download[0] + ".zip"
        else:
            with TemporaryDirectory() as temp_dir:
                tmp_files = os.path.join(temp_dir, "files")
                os.mkdir(tmp_files)
                for f in to_download:
                    f_path = os.path.join(folder, f)
                    if os.path.isfile(f_path): sh_copy(f_path, tmp_files)
                    else: copytree(f_path, os.path.join(tmp_files, f))
                zip_path = make_archive(os.path.join(temp_dir, "archive"),
                                        "zip", tmp_files)
                response = FileResponse(open(zip_path, 'rb'))
                response["FILENAME"] = "download.zip"

        return response

    except KeyError as e:
        print(e)
        return HttpResponse(status=400)

    except OSError as e:
        print(e)
        return HttpResponse(status=422)
Пример #5
0
def copy_shapefiles(input_shapefile_abspath,
                    hdf5_name,
                    dest_folder_path,
                    remove=True):
    """
    get all file with same prefix of input_shapefile_abspath and copy them to dest_folder_path.
    """
    # create folder with hdf5 name in input project folder
    input_hdf5name_folder_path = os.path.join(dest_folder_path,
                                              os.path.splitext(hdf5_name)[0])
    if os.path.exists(input_hdf5name_folder_path):
        if remove:
            try:
                rmtree(input_hdf5name_folder_path)
                os.mkdir(input_hdf5name_folder_path)
            except PermissionError:
                print(
                    "Error: Hydraulic input file can be copied to input project folder"
                    " as it is open in another program.")
                try:
                    rmtree(input_hdf5name_folder_path)
                    os.mkdir(input_hdf5name_folder_path)
                except PermissionError:
                    print(
                        "Error: Can't create folder in input project folder.")
                    return
    else:
        os.mkdir(input_hdf5name_folder_path)

    # copy input file to input files folder with suffix triangulated
    all_input_files_abspath_list = glob(input_shapefile_abspath[:-4] + "*")
    all_input_files_files_list = [
        os.path.basename(file_path)
        for file_path in all_input_files_abspath_list
    ]
    for i in range(len(all_input_files_files_list)):
        sh_copy(
            all_input_files_abspath_list[i],
            os.path.join(input_hdf5name_folder_path,
                         all_input_files_files_list[i]))
Пример #6
0
def copy_hydrau_input_files(path_filename_source, filename_source_str,
                            hdf5_name, dest_folder_path):
    """
    copy input hydraulic files with indexHYDRAU.txt to input project folder in a folder as input
    (if severeral hydraulic with indexHYDRAU.txt, it will not be erased).
    """
    # create folder with hdf5 name in input project folder
    input_hdf5name_folder_path = os.path.join(dest_folder_path,
                                              os.path.splitext(hdf5_name)[0])
    if os.path.exists(input_hdf5name_folder_path):
        try:
            rmtree(input_hdf5name_folder_path)
            os.mkdir(input_hdf5name_folder_path)
        except PermissionError:
            print(
                "Error: Hydraulic input file can be copied to input project folder"
                " as it is open in another program.")
            try:
                rmtree(input_hdf5name_folder_path)
                os.mkdir(input_hdf5name_folder_path)
            except PermissionError:
                print("Error: Can't create folder in input project folder.")
    else:
        os.mkdir(input_hdf5name_folder_path)

    # get input files and copy them
    for file in filename_source_str.split(", "):
        if not os.path.splitext(file)[1]:  # no ext (ex: rubar20)
            files_to_copy = [
                x for x in os.listdir(path_filename_source) if file in x
            ]
            for file_to_copy in files_to_copy:
                if not os.path.isdir(
                        os.path.join(path_filename_source, file_to_copy)):
                    sh_copy(os.path.join(path_filename_source, file_to_copy),
                            input_hdf5name_folder_path)
        else:
            sh_copy(os.path.join(path_filename_source, file),
                    input_hdf5name_folder_path)
Пример #7
0
def paste(request):
    try:
        folder = get_full_path(request.user, request.POST['folder'])
        mode = request.session['clipboard_mode']
        for path in request.session['clipboard']:
            new_path = os.path.join(folder, os.path.basename(path))
            while os.path.exists(new_path):
                new_path += '.copy'
            if mode == 'cut':
                os.rename(path, new_path)
            elif mode == 'copy':
                sh_copy(path, new_path)
        if mode == 'cut': del request.session['clipboard']
        files = scan_folder(folder)
        return JsonResponse(files, safe=False)

    except KeyError as e:
        print(e)
        return HttpResponse(status=400)

    except OSError as e:
        print(e)
        return HttpResponse(status=422)
Пример #8
0
def load_data_and_compute_hs(hydrosignature_description,
                             progress_value,
                             q=[],
                             print_cmd=False,
                             project_properties={}):
    if not print_cmd:
        sys.stdout = mystdout = StringIO()

    # minimum water height
    if not project_properties:
        project_properties = create_default_project_properties_dict()

    # progress
    progress_value.value = 10

    path_prj = project_properties["path_prj"]

    # load
    hdf5 = hdf5_mod.Hdf5Management(path_prj,
                                   hydrosignature_description["hdf5_name"],
                                   new=False,
                                   edit=True)
    hdf5.get_hdf5_attributes(close_file=False)
    hdf5.load_units_index()
    hdf5.load_data_2d()
    hdf5.load_whole_profile()

    # get new_data_2d
    new_data_2d = hdf5.data_2d
    if hydrosignature_description["hs_export_mesh"]:
        new_data_2d_whole = hdf5.data_2d_whole

    # check matching
    matching, error = check_hs_class_match_hydraulic_values(
        hydrosignature_description["classhv"],
        h_min=new_data_2d.hvum.h.min,
        h_max=new_data_2d.hvum.h.max,
        v_min=new_data_2d.hvum.v.min,
        v_max=new_data_2d.hvum.v.max)

    # remove hab data
    new_data_2d.hvum.hdf5_and_computable_list = new_data_2d.hvum.hdf5_and_computable_list.no_habs(
    )

    if matching:
        # progress
        delta_reach = 90 / new_data_2d.reach_number

        # for each reach
        for reach_number in range(new_data_2d.reach_number):

            # progress
            delta_unit = delta_reach / new_data_2d[reach_number].unit_number

            # for each unit
            for unit_number in range(new_data_2d[reach_number].unit_number):
                hyd_data_mesh = new_data_2d[reach_number][unit_number]["mesh"][
                    "data"].to_records(index=False)
                hyd_tin = new_data_2d[reach_number][unit_number]["mesh"]["tin"]
                i_whole_profile = new_data_2d[reach_number][unit_number][
                    "mesh"]["i_whole_profile"]
                hyd_data_node = new_data_2d[reach_number][unit_number]["node"][
                    "data"].to_records(index=False)
                hyd_xy_node = new_data_2d[reach_number][unit_number]["node"][
                    "xy"]
                hyd_hv_node = np.array(
                    [hyd_data_node["h"], hyd_data_node["v"]]).T

                # progress
                delta_mesh = delta_unit / len(hyd_tin)

                if hydrosignature_description["hs_export_mesh"]:
                    nb_mesh, total_area, total_volume, mean_depth, mean_velocity, mean_froude, min_depth, max_depth, min_velocity, max_velocity, hsarea, hsvolume, node_xy_out, node_data_out, mesh_data_out, tin_out, i_whole_profile_out = hydrosignature_calculation_alt(
                        delta_mesh,
                        progress_value,
                        hydrosignature_description["classhv"],
                        hyd_tin,
                        hyd_xy_node,
                        hyd_hv_node,
                        hyd_data_node,
                        hyd_data_mesh,
                        i_whole_profile,
                        return_cut_mesh=True)
                else:
                    nb_mesh, total_area, total_volume, mean_depth, mean_velocity, mean_froude, min_depth, max_depth, min_velocity, max_velocity, hsarea, hsvolume = hydrosignature_calculation_alt(
                        delta_mesh,
                        progress_value,
                        hydrosignature_description["classhv"],
                        hyd_tin,
                        hyd_xy_node,
                        hyd_hv_node,
                        hyd_data_node,
                        hyd_data_mesh,
                        i_whole_profile,
                        return_cut_mesh=False)

                # attr
                hs_dict = {
                    "nb_mesh": nb_mesh,
                    "total_area": total_area,
                    "total_volume": total_volume,
                    "mean_depth": mean_depth,
                    "mean_velocity": mean_velocity,
                    "mean_froude": mean_froude,
                    "min_depth": min_depth,
                    "max_depth": max_depth,
                    "min_velocity": min_velocity,
                    "max_velocity": max_velocity,
                    "classhv": hydrosignature_description["classhv"]
                }
                # unitpath = "data_2d/reach_" + str(reach_number) + "/unit_" + str(unit_number)
                for key in hs_dict.keys():
                    new_data_2d[reach_number][unit_number].hydrosignature[
                        key] = hs_dict[key]
                new_data_2d[reach_number][unit_number].hydrosignature[
                    "hsarea"] = hsarea
                new_data_2d[reach_number][unit_number].hydrosignature[
                    "hsvolume"] = hsvolume

                # hs_export_mesh
                if hydrosignature_description["hs_export_mesh"]:
                    new_data_2d[reach_number][
                        unit_number].total_wet_area = total_area
                    new_data_2d[reach_number][unit_number]["mesh"][
                        "tin"] = tin_out
                    new_data_2d[reach_number][unit_number]["mesh"][
                        "i_whole_profile"] = i_whole_profile_out
                    new_data_2d[reach_number][unit_number]["node"][
                        "xy"] = node_xy_out
                    new_data_2d[reach_number][unit_number]["mesh"][
                        "data"] = DataFrame(mesh_data_out)
                    new_data_2d[reach_number][unit_number]["node"][
                        "data"] = DataFrame(node_data_out)

        if hydrosignature_description["hs_export_txt"]:
            hdf5.export_hydrosignature_txt()

        # write to initial hdf5
        hdf5.write_hydrosignature(hs_export_mesh=False)
        hdf5.close_file()

        # hs input hydraulic class save to input folder
        folder_name_new_hs = os.path.splitext(hdf5.filename)[0]
        new_hs_input_class_folder_path_out = os.path.join(
            project_properties["path_prj"], "input", folder_name_new_hs)
        if not os.path.exists(new_hs_input_class_folder_path_out):
            os.makedirs(new_hs_input_class_folder_path_out)
        sh_copy(
            os.path.join(
                hydrosignature_description["classhv_input_class_file_info"]
                ["path"],
                hydrosignature_description["classhv_input_class_file_info"]
                ["file"]),
            os.path.join(
                new_hs_input_class_folder_path_out,
                hydrosignature_description["classhv_input_class_file_info"]
                ["file"]))

        if hydrosignature_description["hs_export_mesh"]:
            # create new with new data_2d meshs
            hdf5_new = hdf5_mod.Hdf5Management(path_prj,
                                               hdf5.filename[:-4] + "_HS" +
                                               hdf5.extension,
                                               new=True,
                                               edit=True)
            new_data_2d.hvum.hydraulic_class.hdf5 = True
            new_data_2d.hvum.hydraulic_class.position = "mesh"
            if new_data_2d.hvum.hydraulic_class.name not in new_data_2d.hvum.hdf5_and_computable_list.names(
            ):
                new_data_2d.hvum.hdf5_and_computable_list.append(
                    new_data_2d.hvum.hydraulic_class)
            if hdf5_new.hdf5_type == "hydraulic":
                hdf5_new.create_hdf5_hyd(new_data_2d, new_data_2d_whole,
                                         project_properties)
            elif hdf5_new.extension == ".hab":
                hdf5_new.create_hdf5_hab(new_data_2d, new_data_2d_whole,
                                         project_properties)
            # add hs
            hdf5_new = hdf5_mod.Hdf5Management(path_prj,
                                               hdf5.filename[:-4] + "_HS" +
                                               hdf5.extension,
                                               new=False,
                                               edit=True)
            hdf5_new.get_hdf5_attributes(close_file=False)
            hdf5_new.load_units_index()
            hdf5_new.load_data_2d()
            hdf5_new.load_whole_profile()
            hdf5_new.data_2d = new_data_2d
            hdf5_new.write_hydrosignature(hs_export_mesh=True)
            hdf5_new.close_file()
    else:
        print("Error: " + hdf5.filename + " " + error)
        # warnings
        if not print_cmd:
            sys.stdout = sys.__stdout__
            if q:
                q.put(mystdout)
                sleep(0.1)  # to wait q.put() ..

    # warnings
    if not print_cmd:
        sys.stdout = sys.__stdout__
        if q:
            q.put(mystdout)
            sleep(0.1)  # to wait q.put() ..

    # prog
    progress_value.value = 100.0
mode = 'train'
if mode == 'train':
    print(config.model_name)
    print('=' * 20)
    exp_dir = 'model/' + config.model_name
    if not os.path.exists(exp_dir):
        os.mkdir(exp_dir)
    fwriter = open('%s/model_architecture.txt' % exp_dir, 'w')
    model.summary(print_fn=fwriter.write)
    fwriter.close()
    plot_model(model, to_file='%s/model.png' % exp_dir, show_shapes=True)
    from shutil import copy as sh_copy
    from os.path import abspath, basename, dirname
    file_src = abspath(__file__)
    sh_copy(file_src, '%s/%s' % (exp_dir, basename(file_src)))
    sh_copy('%s/%s' % (dirname(file_src), 'config.py'),
            '%s/%s' % (exp_dir, 'config.py'))

    # logging
    logging.basicConfig(level=logging.INFO,
                        format='%(asctime)s -  %(message)s',
                        filename='%s/logging.log' % exp_dir)
    logger = logging.getLogger(__name__)

    train_examples = open(config.train_path, 'r').readlines()
    train_examples = [json.loads(line.strip()) for line in train_examples]
    dev_examples = open(config.dev_path, 'r').readlines()
    dev_examples = [json.loads(line.strip()) for line in dev_examples]

    train_data = DataGenerator(data=train_examples,
Пример #10
0
    def check_need_update_biology_models_json(self):
        # create_biology_models_dict (new)
        self.create_biology_models_dict()

        # load json (existing)
        biological_models_dict_from_json = self.load_biology_models_json()

        # check diff all
        if biological_models_dict_from_json != self.biological_models_dict:
            self.modified = True

        # check condition
        if self.modified:  # update json
            # get differences
            diff_key_list = ""
            diff_key_inverse_list = ""
            for key in biological_models_dict_from_json.keys():
                set_old = set(
                    list(map(str, biological_models_dict_from_json[key])))
                set_new = set(list(map(str, self.biological_models_dict[key])))
                # new
                set_diff = set_new - set_old
                if set_diff:
                    diff_key_list += str(set_diff) + ", "
                # old
                set_diff_inverse = set_old - set_new
                if set_diff_inverse:
                    diff_key_inverse_list += str(set_diff_inverse) + ", "

            if diff_key_list or diff_key_inverse_list:
                # new xml curve (from AppData user)
                if "xml" in diff_key_list and "user" in diff_key_list:
                    diff_list = []
                    existing_path_xml_list = list(
                        map(str, biological_models_dict_from_json["path_xml"]))
                    new_path_xml_list = list(
                        map(str, self.biological_models_dict["path_xml"]))
                    new_xml_list = list(
                        set(existing_path_xml_list) ^ set(new_path_xml_list))
                    # warning and copy to save AppData folder
                    if new_xml_list:
                        new_biology_models_save_folder = os.path.join(
                            self.user_pref_biology_models_save,
                            strftime("%d_%m_%Y_at_%H_%M_%S"))
                        if not os.path.isdir(new_biology_models_save_folder):
                            os.mkdir(new_biology_models_save_folder)
                        for new_xml_element in new_xml_list:
                            # xml
                            if os.path.exists(new_xml_element):
                                sh_copy(new_xml_element,
                                        new_biology_models_save_folder)
                                # is image file specified in xml file ?
                                information_model_dict = bio_info_mod.get_biomodels_informations_for_database(
                                    new_xml_element)
                                if information_model_dict[
                                        "path_img"] is not None:
                                    if os.path.exists(
                                            information_model_dict["path_img"]
                                    ):
                                        sh_copy(
                                            information_model_dict["path_img"],
                                            new_biology_models_save_folder)
                                    else:
                                        information_model_dict[
                                            "path_img"] = None
                                # append
                                diff_list.append(
                                    os.path.basename(new_xml_element))
                        self.diff_list = ", ".join(
                            diff_list) + " added by user."

                # deleted xml curve (from AppData user)
                if "xml" in diff_key_inverse_list and "user" in diff_key_inverse_list:
                    diff_inverse_list = []
                    existing_path_xml_list = list(
                        map(str, biological_models_dict_from_json["path_xml"]))
                    new_path_xml_list = list(
                        map(str, self.biological_models_dict["path_xml"]))
                    removed_xml_list = list(
                        set(existing_path_xml_list) ^ set(new_path_xml_list))
                    # warning
                    if removed_xml_list:
                        for new_xml_element in removed_xml_list:
                            diff_inverse_list.append(
                                os.path.basename(new_xml_element))
                        self.diff_list = ", ".join(
                            diff_inverse_list) + " removed by user."
            else:
                self.diff_list = diff_key_list

            self.create_biology_models_json()