Пример #1
0
 def __init__(self):
     self.fh = None
     self.d = Directories()
     self.console_log_level = logging.ERROR
     self.file_log_level = logging.INFO
     self.formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
     self.logger = logging.getLogger(self.d.program_name)
     self.logger.setLevel(logging.DEBUG)
     self.init_console()
Пример #2
0
def main():
    if COMPILE:
        dirs = Directories(
            base_dir=os.path.dirname(os.path.realpath(sys.argv[0])))

    else:
        dirs = Directories(
            base_dir=os.path.dirname(os.path.realpath(__file__)))

    make_logger(dirs)
    mod.QuestionEngine(dirs=dirs)
Пример #3
0
class ArchiveLogger(object):
    """docstring for ArchiveLogger"""
    def __init__(self):
        self.fh = None
        self.d = Directories()
        self.console_log_level = logging.ERROR
        self.file_log_level = logging.INFO
        self.formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
        self.logger = logging.getLogger(self.d.program_name)
        self.logger.setLevel(logging.DEBUG)
        self.init_console()

    def init_file_logging(self):
        if not self.d.check_myarchive():
            raise Exception, "My Archive Does Not Exist"
        self.fh = logging.FileHandler(self.d.logger)
        self.fh.setLevel(self.file_log_level)
        self.fh.setFormatter(self.formatter)
        self.logger.addHandler(self.fh)
        self.logger.info('Log started')

    def init_console(self):
        self.ch = logging.StreamHandler()
        self.ch.setLevel(self.console_log_level)
        self.ch.setFormatter(logging.Formatter('%(message)s'))
        self.logger.addHandler(self.ch)

    def print_logger(self):
        self.check_fh()
        if self.d.check_log_exists():
            f = open(self.d.logger)
            s = f.read()
            print s

    def check_fh(self):
        if self.fh is None:
            self.init_file_logging()

    def log_debug(self,msg):
        self.check_fh()
        self.logger.debug(msg)

    def log_info(self,msg):
        self.check_fh()
        self.logger.info(msg)

    def log_error(self,msg):
        self.check_fh()
        self.logger.error(msg)
Пример #4
0
def restore(destPath=None):
    d = Directories()
    paths = d.paths_in_index()

    for path in paths.keys():
        sourcePath = os.path.join(d.objects, paths[path])
        # print sourcePath
        if destPath is None:
            dest = path
        elif (os.path.exists(destPath)):
            dest = os.path.join(destPath, path)
        else:
            print "Invalid Path"
            break
        directory = os.path.join(*os.path.split(dest)[0:-1])
        restoreFiles(dest, sourcePath, directory)
Пример #5
0
class Task():
    def __init__(self):

        self._dirs = Directories()
        self._init_data_loader()
        self._init_network()

    def _update_dirs(self, log_folder):

        current_dir = os.path.dirname(os.path.realpath(__file__))
        network_dir = Network.directory() + log_folder
        self._dirs.update_for_task(current_dir, network_dir)

    def _init_network(self):

        self.__network = Network(data_loader=self.__data_loader,
                                 dirs=self._dirs)

    def _init_data_loader(self):
        self.__data_loader = DataLoader(dirs=self._dirs,
                                        total_num_examples=22872)

    def _init_trainer(self):

        self.__trainer = Trainer(data_loader=self.__data_loader,
                                 network=self.__network,
                                 dirs=self._dirs,
                                 ld_chkpt=True,
                                 save_freq=10000,
                                 log_freq=200,
                                 vis_freq=5000)
        return self.__trainer

    def run(self, train):

        self._init_trainer()
        log_folder = '/logged_data'

        self._update_dirs(log_folder)
        self.__network.chkpt_dir = self._dirs.chkpt_dir

        if train:
            self.__trainer.run_trainer()
        else:
            self.__trainer.run_visualiser()
        self.__trainer.sess.close()
Пример #6
0
    def __init__(self, **kwargs):
        debug_prefix = "[Main.__init__]"

        # Argv from shell
        self.argv = kwargs["argv"]

        # Empty list of flags and kflags
        self.flags = []
        self.kflags = {}

        if self.argv is not None:

            # Iterate in all args
            for arg in self.argv[1:]:

                # Is a kwarg
                if "=" in arg:
                    arg = arg.split("=")
                    self.kflags[arg[0]] = arg[1]

                # Is a flag
                else:
                    self.flags.append(arg)

        # Print user flags from the terminal
        print(debug_prefix, "Flags are:", self.flags)
        print(debug_prefix, "Keyword flags are:", self.kflags)

        # Create classes
        self.utils = Utils(self)
        self.subprocess_utils = SubprocessUtils(self)
        self.pacman = Pacman(self)
        self.directories = Directories(self)
        self.download = Download(self)
        self.actions = Actions(self)

        # Runs every action
        self.actions.run()
Пример #7
0
def init():
    #Define Archive Location
    d = Directories()

    # check if myArchive directory exists, create it if it doesn't
    if os.path.exists(d.my_archive) and os.path.isdir(d.my_archive):
        print "Archive Directory EXISTS\t %s" % d.my_archive
    else:
        print "Archive Directory CREATED\t %s" % d.my_archive
        os.makedirs(d.my_archive)

    # check if objects directory exists, create it if it doesn't
    if os.path.exists(d.objects) and os.path.isdir(d.objects):
        print "Objects Directory EXISTS\t %s" % d.objects
    else:
        print "Objects Directory CREATED\t %s" % d.objects
        os.makedirs(d.objects)

    # check if INDEX file exists, create it if it doesn't
    if os.path.exists(d.index) and os.path.isfile(d.index):
        print "INDEX file EXISTS\t\t %s" % d.index
    else:
        print "INDEX file CREATED\t\t %s" % d.index
        open(d.index,'w').close()
Пример #8
0
import h5py
from directories import Directories
import os


def add_axes_refs(obj_with_ref, obj_x_axis, obj_y_axis):
    obj_with_ref.attrs['x_axis'] = obj_x_axis.ref
    obj_with_ref.attrs['y_axis'] = obj_y_axis.ref


def add_refs_to_all_images(filename):
    with h5py.File(filename, 'a') as f:
        group = f['q_interpolated/PEN04']
        for key in list(group.keys()):
            current_group = group[key]
            x_axis = current_group['q_xy']
            y_axis = current_group['q_z']
            for k in list(current_group.keys()):
                if k.startswith('zaptime'):
                    current_group[k].attrs['x_axis'] = x_axis.ref
                    current_group[k].attrs['y_axis'] = y_axis.ref


if __name__ == '__main__':
    filename = os.path.join(Directories.get_dir_to_save_images(),
                            'whole_data.h5')
    add_refs_to_all_images(filename)
Пример #9
0
from directories import Directories

d = Directories()


# show what files are in the archive
def listfiles(searchArg=None):
    if searchArg == None:
        print "All Files"
    else:
        print "Files Matching %s:" % searchArg

    paths = d.paths_in_index()
    for x in paths:
        if searchArg == None or searchArg in x:
            print x
Пример #10
0
    parser.add_argument("--model", help="Set model to VAE/CVAE/TDCVAE/TDCVAE2 (required)", required=True)
    parser.add_argument("--dataset", help="Set dataset to MNIST/LFW/FF/LungScans accordingly (required, not case sensitive)", required=True)
    parser.add_argument("--save_files", help="Determine if files (samples etc.) should be saved (optional, default: False)", required=False, action='store_true')
    parser.add_argument("--save_model_state", help="Determine if state of model should be saved after each epoch during training (optional, default: False)",\
        required=False, action='store_true')
    parser.add_argument('--scales', help="Enables scaling of the model as specified in model_params", default=None, action='store_true')
    parser.add_argument('--thetas', help="Enables rotations of the model as specified in model_params", default=None, action='store_true')
    args = vars(parser.parse_args())
    model_arg = args["model"]
    dataset_arg = args["dataset"]
    save_files = args["save_files"]
    save_model_state = args["save_model_state"]

    if model_arg.lower() == "vae":
        data = get_model_data_vae(dataset_arg)
        directories = Directories(model_arg.lower(), dataset_arg, data["z_dim"],\
            make_dirs=save_files)
        data_loader = DataLoader(directories, data["batch_size"], dataset_arg)
        model = Vae(data_loader.input_dim, data["hidden_dim"],\
            data["z_dim"], data["beta"], data["batch_norm"])
        solver = Solver(model, data_loader, data["optimizer"],\
            data["epochs"], data["optim_config"],\
            step_config=data["step_config"], lr_scheduler=data["lr_scheduler"],\
            save_model_state=save_model_state)
    elif model_arg.lower() == "cvae":
        data = get_model_data_cvae(dataset_arg)
        directories = Directories(model_arg.lower(), dataset_arg, data["z_dim"],\
            make_dirs=save_files)
        data_loader = DataLoader(directories, data["batch_size"], dataset_arg)
        model = Cvae(data_loader.input_dim, data["hidden_dim"], data["z_dim"],\
            data["beta"], data_loader.n_classes, data["batch_norm"])
        solver = Solver(model, data_loader, data["optimizer"], data["epochs"],\
Пример #11
0
    def __init__(self):

        self._dirs = Directories()
        self._init_data_loader()
        self._init_network()
Пример #12
0
    def __init__(self, utils, config, failsafe):

        debug_prefix = "[Context.__init__]"

        # When logging isn't a simple line but something like a list or dictionary
        self.indentation = "··· | "

        # Set the (static) rootfolder substitution for changing paths session folders, so we can move the Dandere2x folder and be able to resume (?)
        self.rootfolder_substitution = "//ROOTFOLDER//"

        # Context needs Utils for logging and works based on a config
        self.utils = utils
        self.config = config

        # For absolute-reffering
        self.ROOT = self.utils.ROOT

        # Failsafe module
        self.failsafe = failsafe

        self.utils.log(
            color, 3, debug_prefix,
            "Rootfolder substitution is [%s] on Context and context_vars file"
            % self.rootfolder_substitution)

        if "resume_session_context_vars_file" in self.config:
            self.load_vars_from_file(
                self.config["resume_session_context_vars_file"])
            return

        # Session
        self.force = self.config["danger_zone"]["force"]

        # Get the operating system we're working with
        self.os = self.utils.get_os()
        self.utils.log(colors["warning"], 1, debug_prefix,
                       "Got operating system: " + self.os)

        # Loglevel
        self.loglevel = self.config["developer"]["loglevel"]
        self.utils.log(colors["warning"], 1, debug_prefix,
                       "LOGLEVEL: [%s]" % self.loglevel)

        # # Load variables

        # # # # See settings.yaml for a guide on the settings and what they are # # # #

        # Video I/O
        self.input_file = self.config["basic"]["input_file"]
        self.output_file = self.config["basic"]["output_file"]

        # Session, "auto" is the input filename
        self.session_name = self.config["basic"]["session_name"]
        self.input_filename = self.utils.get_basename(self.input_file)

        # Windows uses ugly back slashes, failsafe
        if self.os == "windows":
            self.input_file = self.input_file.replace("/", "\\")

        # Block matching related
        self.block_size = self.config["block_matching"]["block_size"]
        self.bleed = self.config["block_matching"]["bleed"]
        self.dark_threshold = self.config["block_matching"]["dark_threshold"]
        self.bright_threshold = self.config["block_matching"][
            "bright_threshold"]
        self.upscale_full_frame_threshold = self.config["block_matching"][
            "upscale_full_frame_threshold"]

        # Upscaler settings
        self.upscaler_type = self.config["upscaler"]["type"]
        self.upscale_ratio = self.config["upscaler"]["upscale_ratio"]
        self.denoise_level = self.config["upscaler"]["denoise_level"]
        self.tile_size = self.config["upscaler"]["tile_size"]
        self.upscaler_model = self.config["upscaler"]["model"]
        self.load_proc_save = self.config["upscaler"]["load:proc:save"]
        self.w2x_converter_cpp_jobs = self.config["upscaler"][
            "w2x_converter_cpp_jobs"]

        # realsr-ncnn-vulkan must have upscale_ratio=4 as it's the only option
        if self.upscaler_type == "realsr-ncnn-vulkan":
            self.utils.log(
                color, 0, debug_prefix,
                "[WARNING] USING REALSR UPSCALER, FORCING UPSCALE_RATIO=4 FOR CONVENIENCE"
            )
            self.upscale_ratio = 4

        # See if selected upscaler type and ratio / denoise are compatible
        self.failsafe.compatible_utype_uratio(self.upscaler_type,
                                              self.upscale_ratio)
        self.failsafe.compatible_upscaler_denoise(self.upscaler_type,
                                                  self.denoise_level)

        # If the user did not sent us a absolute path
        self.input_file = os.path.abspath(self.input_file)

        # Output file can be auto, that is, append $UPSCALE_RATIO$x_$UPSCALER_TYPE$ at the start of the filename
        if self.output_file == "auto":
            # This is already absolute path as we just set input_file to one
            self.output_file = self.utils.auto_output_file(
                self.input_file, self.upscale_ratio,
                self.config["upscaler"]["type"])
            self.utils.log(
                color, 1, debug_prefix,
                "Output file set to \"auto\", assigning: [%s]" %
                self.output_file)
        else:
            # Else if it was manually set, get the absolute path for it
            self.output_file = os.path.abspath(self.output_file)

        # Get the new output filename if it was set to auto
        self.output_filename = self.utils.get_basename(self.output_file)

        # Check if input is file and output file directory exist, if not create it
        self.failsafe.check_input_output(self.input_file, self.output_file)

        # # The special case where the session name is "auto",
        # so we set it according to the input file "a.mkv" -> "a"
        if self.session_name == "auto":
            self.session_name = self.utils.get_auto_session_name(
                self.input_file)

        # Stats settings
        self.average_last_N_frames = self.config["stats"][
            "average_last_N_frames"]
        self.show_stats = self.config["stats"]["show_stats"]

        # Create default variables
        self.resolution = []
        self.valid_resolution = []
        self.frame_rate = None
        self.frame_count = None
        self.frame_rate = None
        self.resume = False
        self.last_processing_frame = 0
        self.zero_padding = 8  # We change this later on based on the frame_count
        self.total_upscale_time = 0

        # Video related variables
        self.get_video_info_method = self.config["video"][
            "get_video_info_method"]
        self.get_frame_count_method = self.config["video"][
            "get_frame_count_method"]
        self.get_frame_rate_method = self.config["video"][
            "get_frame_rate_method"]
        self.get_resolution_method = self.config["video"][
            "get_resolution_method"]

        # # FFmpeg / FFprobe related
        self.deblock_filter = self.config["ffmpeg"]["deblock_filter"]
        self.encode_codec = self.config["ffmpeg"]["encode_codec"]

        # x264 encoding
        self.x264_preset = self.config["ffmpeg"]["x264"]["preset"]
        self.x264_tune = self.config["ffmpeg"]["x264"]["tune"]
        self.x264_crf = self.config["ffmpeg"]["x264"]["crf"]

        # Dandere2x C++ specific
        self.mindisk = self.config["dandere2x_cpp"]["mindisk"]
        self.write_debug_video = int(
            self.config["dandere2x_cpp"]["write_debug_video"])
        self.show_debug_video_realtime = int(
            self.config["dandere2x_cpp"]["show_debug_video_realtime"])
        self.show_block_matching_stats = int(
            self.config["dandere2x_cpp"]["show_block_matching_stats"])
        self.only_run_dandere2x_cpp = int(
            self.config["dandere2x_cpp"]["only_run_dandere2x_cpp"])

        # # Developer variables

        # How much time in seconds to wait for waiting operations like until_exist()
        self.wait_time = self.config["developer"]["wait_time_exists"]
        self.upscaler_wait_for_residuals = self.config["developer"][
            "upscaler_wait_for_residuals"]

        # The range we'll delete the residuals
        self.safety_ruthless_residual_eliminator_range = self.config[
            "developer"]["safety_ruthless_residual_eliminator_range"]

        # Will we be writing logs?
        self.write_log = self.config["developer"]["write_log"]

        # Vapoursynth settings
        self.use_vapoursynth = self.config["vapoursynth"]["enabled"]
        self.vapoursynth_pre = self.config["vapoursynth"]["pre"]
        self.vapoursynth_pos = self.config["vapoursynth"]["pos"]

        # This might sound dumb but it's good to debug as upscaler doesn't upscale and mindisk remove stuff
        self.enable_upscaler = self.config["debug"]["enable_upscaler"]

        # # # # Session directories / files # # # #

        self.utils.log(color, 4, debug_prefix,
                       "Configuring context.* directories and static files")

        # Directories will handle for us a few tweaks we can do to the Dandere2x runtime
        self.directories = Directories()
        self.directories.input_filename = self.input_filename
        self.directories.session_name = self.session_name
        self.directories.generate_dirs()

        self.plain_dirs = self.directories.plain_dirs
        self.plain_files = self.directories.plain_files

        # We declare these as none just for annoying errors on this
        # dynamic variable setting workaround and for autocompleting

        self.residual = self.directories.get("residual")
        self.externals = self.directories.get("externals")
        self.upscaled = self.directories.get("upscaled")
        self.partial = self.directories.get("partial")
        self.processing = self.directories.get("processing")
        self.merged = self.directories.get("merged")
        self.session = self.directories.get("session")
        self.sessions_folder = self.directories.get("sessions_folder")

        self.upscaled_video = self.directories.get("upscaled_video")
        self.resume_video_frame = self.directories.get("resume_video_frame")
        self.debug_video = self.directories.get("debug_video")
        self.context_vars = self.directories.get("context_vars")
        self.temp_vpy_script = self.directories.get("temp_vpy_script")
        self.vapoursynth_processing = self.directories.get(
            "vapoursynth_processing")
        self.joined_audio = self.directories.get("joined_audio")
        self.partial_video = self.directories.get("partial_video")
        self.logfile = self.directories.get("logfile")
        self.logfile_last_session = self.directories.get(
            "logfile_last_session")