예제 #1
0
파일: elements.py 프로젝트: rmln/dtknv
 def get_all_exc_files(self):
     """
     Return a list of all present exc files.
     """
     files = {}
     fs = helpers.getallfiles(self.path, 'json')
     if fs:
         for i in fs:
             files[helpers.filename(i)] = fs
         return files
     else:
         return False        
예제 #2
0
 def load_file_create_cells(self, f=False, initial=False):
     """
     Load f file and place items into cells.
     """
     # Just a lame way to pass some lame code
     self.SAVINGBLOCKED = False
     # These are not available during the first
     # load, so not point in calling the destroy
     # attributes.
     if not initial:
         self.see_if_cells_are_changed()
         self.canvas.destroy()
         self.vsb.destroy()
     # Create canvas
     self.create_cells_container()
     # Load search and replace strings
     if f:
         # Call the method for loading the jsnon files
         # and see if it will fail
         try:
             items = self.load_exc_file(f)
             # Set this file as the last edited one
             self.set.set_last_excfile = f
         except:
             # Oops, an error. Inform the user and prevent from
             # saving, or the corrupt file may be overwritten (which
             # may or may not be a good idea, but it's up to the 
             # user's choice)
             messagebox.showwarning(self.lng['label_error_loadingexc'],
                                self.lng['label_error'])
             items = False
             self.menu_actions.entryconfigure(1, state='disabled')
             self.SAVINGBLOCKED = True
     else:
         items = False
         self.menu_actions.entryconfigure(1, state='normal')
     # Create a copy of items, so it is possible
     # to chek later if the items are changed
     self.items_orig = copy.deepcopy(items)
     # Place cells in the canvas
     self.draw_cells(exc=items)
     # Colorise the cells if needed
     self.colorise()
     # Update the title
     self.window.title(self.lng['window_exceptions'] + ' (%s)' % \
                           helpers.filename(self.active_filename))
     # Hide the frame if there was an error in loading the file:
     if self.SAVINGBLOCKED:
         self.canvas.pack_forget()
예제 #3
0
 def _unzip(self, f):
     """
     Unzips file content into temporary folder.
     """
     # Convert the script of the filename (датотека.txt > datoteka.txt)
     # This has to  be done here, or the program will not find the path.
     #fname = self._converfname(filename(f))
     fname = helpers.filename(f)
     if self.USERAM:
         self.unzipped = OfficeZIP(f)
     else:
         # Unzipped is the path to the temp subfolder where the file
         # is to be unzipped.
         maketmpdir(self.DIRTMP)
         self.unzipped = makesubdir(self.DIRTMP, fname)
         z = zipfile.ZipFile(f)
         z.extractall(self.unzipped)
예제 #4
0
        # export result
        output_result = os.path.join(os.path.dirname(args.output_file),
                                     '{}'.format(comparison))

        logging.debug('Path to file: {}'.format(output_result))
        h.export_result_to_csv(result_df, output_result)
        logger.info('Data for {} exported to csv'.format(comparison))

    return


if __name__ == "__main__":
    args = get_args()
    rule_params = h.load_json_parameter(args.file_id)
    filename = h.filename(args.input_file)

    # get logger
    logpath = os.path.join(paths.global_data_dir, args.file_id,
                           'log/divide.log')
    logger = h.get_logger(logpath)

    # get data
    data_df = pd.read_csv(args.input_file, header=0, index_col=None)

    # parameters
    values_cols_prefix = rule_params['all']['values_cols_prefix']

    # get contrast matrix
    comparison_df = pd.read_csv(args.comparison_file, index_col=0, header=0)
    comparisons = get_comparison_data(comparison_df)
예제 #5
0
    path2analysis_folder = os.path.join(paths.global_data_dir,
                                        args.analysis_id)
    try:
        os.mkdir(path2analysis_folder)
        logger.debug("Creating folder for this analysis")
    except FileExistsError:
        logger.debug("Analysis folder already created")

    path2error_file = os.path.join(paths.global_data_dir, args.analysis_id,
                                   args.error_file)

    logger.debug("Exporting import_error file to: {}".format(path2error_file))
    with open(args.error_file, 'w+') as json_file:
        json.dump(errors, json_file, indent=True)

    # export header
    output_sample_name = os.path.join(paths.global_data_dir, args.analysis_id,
                                      args.output_sample_name)

    logger.debug("Exporting header file to: {}".format(output_sample_name))
    fi.get_sample_name(df, output_sample_name)

    # export data
    if args.output_file:
        output_csv = args.output_file
    else:
        output_csv = os.path.join(
            paths.global_data_dir, args.analysis_id,
            "csv/{}.csv".format(h.filename(args.input_file)))
    logger.debug("Exporting converted file to: {}".format(output_csv))
    h.export_result_to_csv(df, output_csv, index_col=True)