Example #1
0
def main():
  parser = argparse.ArgumentParser(
      description='Prints all non-system dependencies for the given module.')
  parser.add_argument('module',
                      help='The python module to analyze.')
  parser.add_argument('--root', default='.',
                      help='Directory to make paths relative to.')
  parser.add_argument('--output',
                      help='Write output to a file rather than stdout.')
  parser.add_argument('--whitelist', default=[], action='append',
                      dest='whitelists',
                      help='Recursively include all non-test python files '
                      'within this directory. May be specified multiple times.')
  options = parser.parse_args()
  # Replace the path entry for print_python_deps.py with the one for the given
  # module.
  sys.path[0] = os.path.dirname(options.module)
  imp.load_source('NAME', options.module)

  paths_set = _ComputePythonDependencies()
  for path in options.whitelists:
    paths_set.update(os.path.abspath(p) for p in _FindPythonInDirectory(path))

  paths = [os.path.relpath(p, options.root) for p in paths_set]

  normalized_cmdline = _NormalizeCommandLine(options)
  out = open(options.output, 'w') if options.output else sys.stdout
  with out:
    out.write('# Generated by running:\n')
    out.write('#   %s\n' % normalized_cmdline)
    for path in sorted(paths):
      out.write(path + '\n')
    def loadConfig(self):
        home = expanduser("~")
        self.home = '%s/workspace/StandaloneTest'%home
        try:
            with open('%s/configs/machine_config.json'%self.home) as data_file:
                config = json.load(data_file)

            if(self.kernel_name == "amber"):
                Kconfig = imp.load_source('Kconfig', './configs/amber.wcfg')
            elif(self.kernel_name == "coco"):
                Kconfig = imp.load_source('Kconfig', './configs/coco.wcfg')
            elif(self.kernel_name == "gromacs"):
                Kconfig = imp.load_source('Kconfig', './configs/gromacs.wcfg')
            elif(self.kernel_name == "lsdmap"):
                Kconfig = imp.load_source('Kconfig', './configs/lsdmap.wcfg')

            m_cfg = config["machine_configs"][self.resource]
            k_cfg = config["kernal_configs"][self.kernel_name]

            #-------------------------------------------------------
            #Machine configs
            self.wdir = m_cfg["working_dir"]
            self.wall_time_limit = m_cfg["wall_time"]
            self.email = m_cfg["email"]
            self.queue = m_cfg["queue"]
            self.uname = m_cfg["username"]

            #-------------------------------------------------------
            #Kernal config
            self.exe = k_cfg["executable"]
            self.pre_exec = Kconfig.stampede_module
            self.environment = Kconfig.stampede_environment
        except Exception:
            print 'error'
            raise
Example #3
0
    def __init__(self, options, paths, cache=True):
        """Initialize a Hairball instance."""
        self.options = options
        self.paths = paths

        if options.kurt_plugin:
            for kurt_plugin in options.kurt_plugin:
                failure = False
                if kurt_plugin.endswith('.py') and os.path.isfile(kurt_plugin):
                    module = os.path.splitext(os.path.basename(kurt_plugin))[0]
                    try:
                        load_source(module, kurt_plugin)
                    except Exception:  # TODO: Enumerate possible exceptions
                        failure = True
                else:
                    try:
                        importlib.import_module(kurt_plugin)
                    except ImportError:
                        failure = True
                if failure and not options.quiet:
                    print('Could not load Kurt plugin: {}'.format(kurt_plugin))

        # Initialization Data
        if cache is True:
            self.cache = KurtCache()
        elif cache:
            self.cache = cache
        else:
            self.cache = False
        self.plugins = []
        self.extensions = [x.extension for x in
                           kurt.plugin.Kurt.plugins.values()]
Example #4
0
def import_module(module_name, path):
    if six.PY3:
        spec = importlib.util.spec_from_file_location(module_name, path)
        module = importlib.util.module_from_spec(spec)
        spec.loader.exec_module(module)
    elif six.PY2:
        imp.load_source(module_name, path)
Example #5
0
def project(args):
    """
    Update user settings with the settings of an existing PyScaffold project

    :param args: command line parameters as :obj:`argparse.Namespace`
    :return: updated command line parameters as :obj:`argparse.Namespace`
    """
    args = copy.copy(args)
    if not os.path.exists(args.project):
        raise RuntimeError("Project {project} does not"
                           " exist!".format(project=args.project))
    imp.load_source("versioneer", os.path.join(args.project, "versioneer.py"))
    # Generate setup with random module name since this function might be
    # called several times (in unittests) and imp.load_source seems to
    # not properly reload an already loaded file.
    mod_name = "setup_{rand}".format(rand=random.getrandbits(32))
    setup = imp.load_source(mod_name, os.path.join(args.project, "setup.py"))
    if not args.description:
        args.description = setup.DESCRIPTION
    if not args.license:
        args.license = setup.LICENSE
    if not args.url:
        args.url = setup.URL
    if not args.junit_xml:
        args.junit_xml = utils.safe_get(setup, "JUNIT_XML")
    if not args.coverage_xml:
        args.coverage_xml = utils.safe_get(setup, "COVERAGE_XML")
    if not args.coverage_html:
        args.coverage_html = utils.safe_get(setup, "COVERAGE_HTML")
    args.package = setup.MAIN_PACKAGE
    args.console_scripts = utils.list2str(setup.CONSOLE_SCRIPTS, indent=19)
    args.classifiers = utils.list2str(setup.CLASSIFIERS, indent=15)

    return args
Example #6
0
 def Main():
     try:
         if sys.argv:
             imp.load_source("__main__", os.path.abspath(sys.argv[0]))
     except Exception, e:
         main.mainloop.exception = sys.exc_info()
         raise
Example #7
0
    def run_action(self, task):
        if task["type"] == "room_message":
            self.bot.send_room_message(task["room"]["room_id"], task["content"], *task["args"], **task["kwargs"])
        elif task["type"] == "direct_message":
            user = self.bot.get_user_by_jid(task["target_jid"])
            self.bot.send_direct_message(user["hipchat_id"], task["content"], *task["args"], **task["kwargs"])
        elif task["type"] == "periodic_task":
            # Run the task
            module_info = self.plugin_modules_library[task["module_name"]]
            module = imp.load_source(module_info["name"], module_info["file_path"])
            cls = getattr(module, task["class_name"])
            fn = getattr(cls(), task["function_name"])

            thread = threading.Thread(target=fn)
            thread.start()

            # Schedule the next one.
            self.bot.add_periodic_task(
                task["module_name"],
                task["class_name"],
                task["function_name"],
                task["sched_args"],
                task["sched_kwargs"],
                ignore_scheduler_lock=True
            )
        elif task["type"] == "random_task":
            # Run the task
            module_info = self.plugin_modules_library[task["module_name"]]
            module = imp.load_source(module_info["name"], module_info["file_path"])
            cls = getattr(module, task["class_name"])
            fn = getattr(cls(), task["function_name"])

            thread = threading.Thread(target=fn)
            thread.start()
Example #8
0
def fix_sandbox():
    """
        This is the nastiest thing in the world...

        This WSGI middleware is the first chance we get to hook into anything. On the dev_appserver
        at this point the Python sandbox will have already been initialized. The sandbox replaces stuff
        like the subprocess module, and the select module. As well as disallows _sqlite3. These things
        are really REALLY useful for development.

        So here we dismantle parts of the sandbox. Firstly we add _sqlite3 to the allowed C modules.

        This only happens on the dev_appserver, it would only die on live. Everything is checked so that
        changes are only made if they haven't been made already.
    """

    if on_production():
        return

    from google.appengine.tools.devappserver2.python import sandbox

    if '_sqlite3' not in sandbox._WHITE_LIST_C_MODULES:
        fix_c_whitelist()

        # Reload the system socket.py, because of bug #9246
        import imp
        import os
        import ast

        psocket = os.path.join(os.path.dirname(ast.__file__), 'socket.py')
        imp.load_source('socket', psocket)
Example #9
0
 def _compile_from_file(self, path, filename):
     if path is not None:
         util.verify_directory(os.path.dirname(path))
         filemtime = os.stat(filename)[stat.ST_MTIME]
         if not os.path.exists(path) or \
                     os.stat(path)[stat.ST_MTIME] < filemtime:
             _compile_module_file(
                         self, 
                         open(filename, 'rb').read(), 
                         filename, 
                         path)
         module = imp.load_source(self.module_id, path, open(path, 'rb'))
         del sys.modules[self.module_id]
         if module._magic_number != codegen.MAGIC_NUMBER:
             _compile_module_file(
                         self, 
                         open(filename, 'rb').read(), 
                         filename, 
                         path)
             module = imp.load_source(self.module_id, path, open(path, 'rb'))
             del sys.modules[self.module_id]
         ModuleInfo(module, path, self, filename, None, None)
     else:
         # template filename and no module directory, compile code
         # in memory
         code, module = _compile_text(
                             self, 
                             open(filename, 'rb').read(), 
                             filename)
         self._source = None
         self._code = code
         ModuleInfo(module, None, self, filename, code, None)
     return module
def GapAndFluxAtEnergy(SplineDir, Energy, NPoints=50):


  Function_Flux = [Harmonic_0_NoFunction for i in range(NPoints)]
  Function_Gap  = [Harmonic_0_NoFunction for i in range(NPoints)]

  for i in range(NPoints):
    hn = i * 2 + 1
    try:
      a = imp.load_source('a', SplineDir + '/Harmonic_' + str(hn) + '_Flux_vs_Energy_Spline.py')
      methodToCall = getattr(a, 'Harmonic_' + str(hn) + '_Flux_vs_Energy_Spline')
      Function_Flux[i] = methodToCall
      a = imp.load_source('a', SplineDir + '/Harmonic_' + str(hn) + '_Gap_vs_Energy_Spline.py')
      methodToCall = getattr(a, 'Harmonic_' + str(hn) + '_Gap_vs_Energy_Spline')
      Function_Gap[i] = methodToCall
    except:
      pass


  Results = []
  for i in range(NPoints):
    hn   = 2 * i + 1
    Flux = Function_Flux[i](Energy)
    Gap  = Function_Gap[i](Energy)
    if Flux != 0:
      Results.append([Gap, Flux, hn])

  print len(Results), ' Results found'
  print '[Gap, Flux, Harmonic]'

  Results.sort(key=lambda x: int(x[1]))
  Results.reverse()

  return Results
Example #11
0
  def __init__(self, filename, translator):
    self.translator = translator
    parser = GIRParser(False)
    parser.parse(filename)

    self.path = os.path.dirname(os.path.abspath(filename))
    self.package = os.path.basename(self.path)

    self.func_spec = {}
    func_spec_file = os.path.join(self.path, 'func_spec.py')
    func_spec_module = imp.load_source('func_spec', func_spec_file)
    self.func_spec = func_spec_module.func_specs

    config_file = os.path.join(self.path, 'config.py')
    config_module = imp.load_source('config', config_file)
    for name in dir(config_module):
      if name.startswith('_'): continue
      if name[0].islower(): continue
      setattr(self, name.lower(), getattr(config_module, name))

    self.namespace = parser.get_namespace()
    self.module_name = self.namespace.name.lower()
    self.translator.namespaces.add(self.namespace.name)
    self.pkgconfig_packages = list(parser.get_pkgconfig_packages())
    self.includes = list(parser.get_c_includes())
    self.prefixes = self.namespace.symbol_prefixes

    self.functions = []
    self.const_symbols = set()

    self.class_types = {}
    self.record_types = {}

    self.exported_functions = set()
    self.functions_to_handle = []
Example #12
0
    def do_POST(self):
        if self.path != '/github':
            self.do_the_404()
            return

        length = int(self.headers['Content-Length'])
        post_data = urlparse.parse_qs(self.rfile.read(length).decode('utf-8'))

        if post_data.has_key('payload') and len(post_data['payload']) > 0:
            payload = json.loads(post_data['payload'][0])
            commits = payload.get('commits', None)
            if commits != None and len(commits) > 0:
                url = '/'.join(commits[0].get('url', None).split('/')[:-2])
                print url
                author = commits[0].get('author', None).get('name', None)
                message = commits[0].get('message', None)
                print "<%s>: %s" % (author, message)
                    
        self.send_response(200)
        self.send_header('Content-type', 'text/plain')
        self.end_headers()
        self.wfile.write('ONORE AL COMMENDATORE')

        cwd = os.getcwd()
        os.chdir(roba[url])
        subprocess.check_call(['git', 'pull'])
        imp.load_source('motoraggio', '%s/motoraggio.py' % roba[url])
        import motoraggio
        motoraggio.smazza()
        del motoraggio
        os.chdir(cwd)
Example #13
0
def check_import(filename, outfile):

    prefix, suffix = os.path.splitext(filename)
    dirname, basename = os.path.split(prefix)

    if basename in EXCLUDE:
        return

    if os.path.exists(prefix + ".pyc"):
        os.remove(prefix + ".pyc")

    # ignore script with pyximport for now, something does not work
    # which can lead to errors in downstream files. Issues for
    # example:
    # When a pyximport script is imported before one that imports a module
    # with a cython extension is being re-compiled, but without the proper
    # flags.
    blob = open(filename).read()
    if "import pyximport" in blob:
        return

    try:
        imp.load_source(basename, filename)

    except ImportError, msg:
        outfile.write("FAIL %s\n%s\n" % (basename, msg))
        outfile.flush()
        traceback.print_exc(file=outfile)
        ok_(False, '%s scripts/modules - ImportError: %s' %
            (basename, msg))
Example #14
0
def load_modules_from_hypes(hypes):
    """Load all modules from the files specified in hypes.

    Namely the modules loaded are:
    input_file, architecture_file, objective_file, optimizer_file

    Parameters
    ----------
    hypes : dict
        Hyperparameters

    Returns
    -------
    hypes, data_input, arch, objective, solver
    """
    base_path = hypes['dirs']['base_path']
    _add_paths_to_sys(hypes)
    f = os.path.join(base_path, hypes['model']['input_file'])
    data_input = imp.load_source("input", f)
    f = os.path.join(base_path, hypes['model']['architecture_file'])
    arch = imp.load_source("arch", f)
    f = os.path.join(base_path, hypes['model']['objective_file'])
    objective = imp.load_source("objective", f)
    f = os.path.join(base_path, hypes['model']['optimizer_file'])
    solver = imp.load_source("solver", f)

    return data_input, arch, objective, solver
Example #15
0
def load_modules_from_logdir(logdir):
    """Load hypes from the logdir.

    Namely the modules loaded are:
    input_file, architecture_file, objective_file, optimizer_file

    Parameters
    ----------
    logdir : string
        Path to logdir

    Returns
    -------
    data_input, arch, objective, solver
    """
    model_dir = os.path.join(logdir, "model_files")
    f = os.path.join(model_dir, "data_input.py")
    # TODO: create warning if file f does not exists
    data_input = imp.load_source("input", f)
    f = os.path.join(model_dir, "architecture.py")
    arch = imp.load_source("arch", f)
    f = os.path.join(model_dir, "objective.py")
    objective = imp.load_source("objective", f)
    f = os.path.join(model_dir, "solver.py")
    solver = imp.load_source("solver", f)

    return data_input, arch, objective, solver
Example #16
0
def get_style_module(type_, lang, style, to_lang=None):
    """Return a parsing/writing/converting module. """
    cfg = config.get_cfg(['lang', 'develop', 'version'])
    config.update_single(cfg, 'lang', DEFAULTS)
    key, name, modname = _get_info(cfg, type_, lang, style, to_lang)
    if 'develop' in cfg:
        try:
            path = cfg['develop'][key]
            if path[0] != '/':
                path = '%s/%s' % (config.CONFIG['path'], path)
            return load_source(modname, path)
        except (KeyError, IOError):
            pass
    versions = []
    for base in LEXOR_PATH:
        if 'version' in cfg:
            try:
                path = '%s/%s-%s.py' % (base, name, cfg['version'][key])
            except KeyError:
                versions += glob('%s/%s*.py' % (base, name))
                path = '%s/%s.py' % (base, name)
        else:
            versions += glob('%s/%s*.py' % (base, name))
            path = '%s/%s.py' % (base, name)
        try:
            return load_source(modname, path)
        except IOError:
            continue
    try:
        mod = load_source(modname, versions[0])
        mod.VERSIONS = versions
        return mod
    except (IOError, IndexError):
        raise ImportError("lexor module not found: %s" % name)
Example #17
0
    def f(self):
        imp = imp2 = None
        try:
            import imp

        except ImportError:
            import importlib.machinery as imp2

        try:
            if imp:
                imp.load_source(test_name, fn)
            else:
                imp2.SourceFileLoader(test_dir, fn)

        except example.util.DownloadError:
            raise unittest.SkipTest('could not download required data file')

        except ExternalProgramMissing as e:
            raise unittest.SkipTest(str(e))

        except ImportError as e:
            raise unittest.SkipTest(str(e))

        except topo.AuthenticationRequired as e:
            raise unittest.SkipTest('cannot download topo data (no auth credentials)')

        except Exception as e:
            raise e
Example #18
0
    def load_seed(self, file):
        context = self.context
        _,tail = os.path.split(file)
        modname = tail[:-3]
        if( file[-2:] == "py"):
            logging.debug("Found seed: "+file)
            file_header = ""
            fh = open(file, "r")
            fhd = fh.read()
            m = re.search('##START-CONF(.+?)##END-CONF(.*)', fhd, re.S)

            if m:
                conf = m.group(1).replace("##","")
                if conf:
                    d = json.loads(conf)
                    if not "auto-load" in d.keys() or d["auto-load"] == True:
                        imp.load_source(modname,file)

                        klass = PmkSeed.hplugins[modname](context)
                        PmkSeed.iplugins[modname] = klass
                        klass.pre_load(d)
                        klass.on_load()
                        klass.post_load()


        return modname
Example #19
0
    def loadModule(self, type, dirpath, filename):
        module_dict = {}
        mod_name = filename.split('.')[0]
        mod_dispname = '/'.join(re.split('/modules/' + type, dirpath)[-1].split('/') + [mod_name])
        mod_loadname = mod_dispname.replace('/', '_')
        mod_loadpath = os.path.join(dirpath, filename)
        mod_file = open(mod_loadpath)
        try:
            # import the module into memory
            imp.load_source(mod_loadname, mod_loadpath, mod_file)
            # find the module and make an instace of it
            _module = __import__(mod_loadname)
            _class = getattr(_module, mod_name)
            _instance = _class(self.config, self.display, self.modulelock)
            valid = True
            module_dict = {'name': mod_name,
                               'fingerprint': _instance.getFingerprint(),
                               'response': _instance.getResponse(),
                               'valid': True,
                               'somethingCool': _instance.doSomethingCool()}
            self.coolness[mod_dispname] = _instance

        except Exception as e:
            # notify the user of errors
            print e
            return None
        return module_dict
    def Activated(self):
        #Grab our code editor so we can interact with it
        mw = FreeCADGui.getMainWindow()
        cqCodePane = mw.findChild(QtGui.QPlainTextEdit, "cqCodePane")

        #Clear the old render before re-rendering
        clearActiveDocument()

        #Save our code to a tempfile and render it
        tempFile = tempfile.NamedTemporaryFile(delete=False)
        tempFile.write(cqCodePane.toPlainText().encode('utf-8'))
        tempFile.close()

        docname = os.path.splitext(os.path.basename(cqCodePane.file.path))[0]

        #If the matching 3D view has been closed, we need to open a new one
        try:
           FreeCAD.getDocument(docname)
        except:
            FreeCAD.newDocument(docname)

        #We import this way because using execfile() causes non-standard script execution in some situations
        imp.load_source('temp_module', tempFile.name)

        msg = QtGui.QApplication.translate(
            "cqCodeWidget",
            "Executed ",
            None,
            QtGui.QApplication.UnicodeUTF8)
        FreeCAD.Console.PrintMessage(msg + cqCodePane.file.path + "\r\n")
    def testBasic(self):
        # NOTE: In this test, cmd-line args to nosetests will also end up getting "parsed" by ilastik.
        #       That shouldn't be an issue, since the pixel classification workflow ignores unrecognized options.
        #       See if __name__ == __main__ section, below.
        args = "--project=" + self.PROJECT_FILE
        args += " --headless"
        args += " --sys_tmp_dir=/tmp"

        # Batch export options
        args += " --output_format=hdf5"
        args += " --output_filename_format={dataset_dir}/{nickname}_prediction.h5"
        args += " --output_internal_path=volume/pred_volume"
        args += " " + self.SAMPLE_DATA

        sys.argv = ['ilastik.py'] # Clear the existing commandline args so it looks like we're starting fresh.
        sys.argv += args.split()

        # Start up the ilastik.py entry script as if we had launched it from the command line
        ilastik_entry_file_path = os.path.join( os.path.split( ilastik.__file__ )[0], "../ilastik.py" )
        imp.load_source( 'main', ilastik_entry_file_path )
        
        # Examine the output for basic attributes
        output_path = self.SAMPLE_DATA[:-4] + "_prediction.h5"
        with h5py.File(output_path, 'r') as f:
            assert "/volume/pred_volume" in f
            pred_shape = f["/volume/pred_volume"].shape
            # Assume channel is last axis
            assert pred_shape[:-1] == self.data.shape[:-1], "Prediction volume has wrong shape: {}".format( pred_shape )
            assert pred_shape[-1] == 2, "Prediction volume has wrong shape: {}".format( pred_shape )
Example #22
0
    def extract_parser(self):
        parsers = []

        # Try exception-catching first; this should always work
        # Store prior to monkeypatch to restore
        parse_args_unmonkey = argparse.ArgumentParser.parse_args
        argparse.ArgumentParser.parse_args = parse_args_monkeypatch

        try:
            exec(
                self.script_source,
                {
                    'argparse': argparse,
                    '__name__': '__main__',
                    '__file__': self.script_path,
                 }
            )
        except ClintoArgumentParserException as e:
            # Catch the generated exception, passing the ArgumentParser object
            parsers.append(e.parser)
        except:
            sys.stderr.write('Error while trying exception-catch method on {0}:\n'.format(self.script_path))
            self.error = '{0}\n'.format(traceback.format_exc())
            sys.stderr.write(self.error)

        argparse.ArgumentParser.parse_args = parse_args_unmonkey

        if not parsers:
            try:
                module = imp.load_source('__name__', self.script_path)
            except:
                sys.stderr.write('Error while loading {0}:\n'.format(self.script_path))
                self.error = '{0}\n'.format(traceback.format_exc())
                sys.stderr.write(self.error)
            else:
                main_module = module.main.__globals__ if hasattr(module, 'main') else globals()
                parsers = [v for i, v in chain(six.iteritems(main_module), six.iteritems(vars(module)))
                           if issubclass(type(v), argparse.ArgumentParser)]
        if not parsers:
            f = tempfile.NamedTemporaryFile()
            try:
                ast_source = source_parser.parse_source_file(self.script_path)
                python_code = source_parser.convert_to_python(list(ast_source))
                f.write(six.b('\n'.join(python_code)))
                f.seek(0)
                module = imp.load_source('__main__', f.name)
            except:
                sys.stderr.write('Error while converting {0} to ast:\n'.format(self.script_path))
                self.error = '{0}\n'.format(traceback.format_exc())
                sys.stderr.write(self.error)
            else:
                main_module = module.main.__globals__ if hasattr(module, 'main') else globals()
                parsers = [v for i, v in chain(six.iteritems(main_module), six.iteritems(vars(module)))
                       if issubclass(type(v), argparse.ArgumentParser)]
        if not parsers:
            sys.stderr.write('Unable to identify ArgParser for {0}:\n'.format(self.script_path))
            return

        self.is_valid = True
        self.parser = parsers[0]
Example #23
0
def _agent_set(hyperparams_file):
    try:
        for i in range(1):
            if (i == 0):
                for j in np.arange(0,3,0.1):
                    hyperparams = imp.load_source('hyperparams0', hyperparams_file)
                    hyperparams.agent["rk"] = j
                    gps = GPSMain(hyperparams.config)
                    global_cost_counter = gps.run(itr_load=None)
                    string = "agent # rk # {0}".format(j)
                    _write_to_csv("python/gps/hyperparam_data/agent_set.csv", [string], global_cost_counter)

            if (i == 1):
                for j in np.arange(0,0.2,0.01):
                    hyperparams = imp.load_source('hyperparams0', hyperparams_file)
                    hyperparams.agent["dt"] = j
                    gps = GPSMain(hyperparams.config)
                    global_cost_counter = gps.run(itr_load=None)
                    string = "agent # dt # {0}".format(j)
                    _write_to_csv("python/gps/hyperparam_data/agent_set.csv", [string], global_cost_counter)

            if (i == 2):
                for j in np.arange(1,50,1):
                    hyperparams = imp.load_source('hyperparams0', hyperparams_file)
                    hyperparams.agent["substeps"] = j
                    gps = GPSMain(hyperparams.config)
                    global_cost_counter = gps.run(itr_load=None)
                    string = "agent # substeps # {0}".format(j)
                    _write_to_csv("python/gps/hyperparam_data/agent_set.csv", [string], global_cost_counter)


    except Exception as e:
        print("exception in agent data set")
        # _write_to_csv("python/gps/hyperparam_data/agent_set.csv", [e], [])
        _write_to_csv("python/gps/hyperparam_data/agent_set.csv", ['Error >>>>>'], [])          # >>>>> 5 times to demark the end of the error
    def _load_reader_app_overlays(self, cls_name):
        '''
        Load reader app overlay methods from resource file from
        readers/<name>_overlays.py
        '''
        self._log_location("'%s'" % cls_name)

        # Store the raw source to a temp file, import it
        if cls_name == "development_mode":
            do = self.prefs.get('development_overlay', None)
            self._log("loading development_overlay %s" % repr(do))
            overlay = imp.load_source("temporary_overlay_methods", do)
        else:
            overlay_source = 'readers/%s_overlays.py' % cls_name
            basename = re.sub('readers/', '', overlay_source)
            tmp_file = os.path.join(self.temp_dir, basename)
            with open(tmp_file, 'w') as tf:
                tf.write(get_resources(overlay_source))
            overlay = imp.load_source("temporary_overlay_methods", tmp_file)
            os.remove(tmp_file)

        # Extend iOSReaderApp with the functions defined in the overlay
        # [(<name>, <function>), (<name>, <function>)...]
        overlays = [f for f in getmembers(overlay) if isfunction(f[1])]
        self._log("loading %d overlays" % len(overlays))
        for method in overlays:
            self._log("adding overlay '%s()'" % method[0])
            setattr(self, method[0], MethodType(method[1], self, iOSReaderApp))

        del overlay
Example #25
0
 def _load_step_definitions(self, filepath):
     Foxpath.mappings = []
     # remarkably, this seems to be sufficient
     if six.PY3:
         SourceFileLoader('', filepath).load_module()
     else:
         load_source('', filepath)
Example #26
0
def perform_manual_push(results_dir):
    dirname =  '/results/plugins/scratch'
    cur_time = time.strftime("%H:%M:%S")
    manual_sync_log_file = dirname + '/manual_sync_' + cur_time + '.log'
    try:
        sys.stdout = open(manual_sync_log_file, "w")
        dirname =  os.path.dirname(os.path.realpath(__file__))
        HID_Common_Util = imp.load_source('HID_Common_Util', dirname + '/HID_Common_Util.py')
        HID_Report_Generator = imp.load_source('HID_Report_Generator', dirname + '/HID_Report_Generator.py')
        HID_Converge_Connector = imp.load_source('HID_Converge_Connector', dirname + '/HID_Converge_Connector.py')
        commonUtil = HID_Common_Util.HID_Common_Util()
        commonUtil.set_environment(results_dir)
        convergeConnector = HID_Converge_Connector.HID_Converge_Connector()
        reportGenerator = HID_Report_Generator.HID_Report_Generator()
        convergeConnector.launch_manual_sync()
        response = reportGenerator.re_launch()
        if (response == 'success'):
            return {'status': 'completed', 'errors':'none'}
        else:
            return {'status': 'failed', 'errors':'Auto Sync with Converge Failed'}
    except:
        print('<error> un-expected error during processing HID_Converge_Connector::launch_manual_sync')
        traceback.print_exc(file=sys.stdout)
        return {'status': 'failed', 'errors':'Auto Sync with Converge Failed'}
    finally:
        print('<info> exiting extend::perform_manual_push')
        sys.stdout = sys.__stdout__
Example #27
0
def load_conf_module(name, home = None):
    modname = 'config.' + name;

    if not modname in sys.modules:
        env_name = name.upper().replace('-', '_')
        env_home = env_name + '_HOME'

        home = check_env(env_home)
        if home and not os.path.isdir(home):
            raise Exception, '$%s=%s is not a directory' % (env_home, home)

        filename = name + '-config.py'
        if home: filename = home + '/' + filename

        if os.path.exists(filename):
            try:
                imp.load_source(modname, filename)

                if home:
                    mod = sys.modules[modname]
                    mod.__dict__['home'] = home

            except Exception, e:
                raise Exception, "Error loading configuration file '%s' " \
                    "for '%s':\n%s" % (filename, name, e)
        else:
            raise Exception, "Configuration file '%s' not found " \
                "for '%s' not found please set %s" % (filename, name, env_home)
Example #28
0
    def __init__(self, input_type = ''):

        if (input_type == 'telescope') or (input_type == 'planet') or (input_type == 'star'):
            pass
        else:
            print "Error: unrecognized input_type. Please use 'telescope', 'planet', or 'star'."
            return

        try:
            del sys.modules['input_usr']
            del sys.modules['input']
        except KeyError:
            pass

        default_input_file = os.path.join(relpath,'input_default_'+input_type+'.py')
        user_input_file = os.path.join(relpath,'input_user_'+input_type+'.py')

        self._input = imp.load_source("input", default_input_file)            # Load default inputs into self._input

        self._input_usr = imp.load_source("input_usr", user_input_file)       # Load user inputs into self._input_usr

        self._input.__dict__.update(self._input_usr.__dict__)                 # Update self._input with user values

        inp_dict = self._input.__dict__

        for key, value in inp_dict.items():
            if key.startswith('__') or isinstance(value, ModuleType) or isinstance(value, FunctionType):
                inp_dict.pop(key, None)

        self.__dict__.update(inp_dict)                                        # Make all parameters accessible as self.param

        del self._input
        del self._input_usr
    def load_widget(self):
        """legacy entrypoint called (widgets are moved to seperate addon), start redirect..."""
        action = self.params.get("action", "")
        newaddon = "script.skin.helper.widgets"
        log_msg(
            "Deprecated method: %s. Please reassign your widgets to get rid of this message. -"
            "This automatic redirect will be removed in the future" % (action),
            xbmc.LOGWARNING,
        )
        paramstring = ""
        for key, value in self.params.iteritems():
            paramstring += ",%s=%s" % (key, value)
        if xbmc.getCondVisibility("System.HasAddon(%s)" % newaddon):
            # TEMP !!! for backwards compatability reasons only - to be removed in the near future!!
            import imp

            addon = xbmcaddon.Addon(newaddon)
            addon_path = addon.getAddonInfo("path").decode("utf-8")
            imp.load_source("plugin", os.path.join(addon_path, "plugin.py"))
            from plugin import main

            main.Main()
            del addon
        else:
            # trigger install of the addon
            if KODI_VERSION >= 17:
                xbmc.executebuiltin("InstallAddon(%s)" % newaddon)
            else:
                xbmc.executebuiltin("RunPlugin(plugin://%s)" % newaddon)
Example #30
0
 def __import_asciidoc(self, reload=False):
     '''
     Import asciidoc module (script or compiled .pyc).
     See
     http://groups.google.com/group/asciidoc/browse_frm/thread/66e7b59d12cd2f91
     for an explanation of why a seemingly straight-forward job turned out
     quite complicated.
     '''
     if os.path.splitext(self.cmd)[1] in ['.py','.pyc']:
         sys.path.insert(0, os.path.dirname(self.cmd))
         try:
             try:
                 if reload:
                     import __builtin__  # Because reload() is shadowed.
                     __builtin__.reload(self.asciidoc)
                 else:
                     import asciidoc
                     self.asciidoc = asciidoc
             except ImportError:
                 raise AsciiDocError('failed to import ' + self.cmd)
         finally:
             del sys.path[0]
     else:
         # The import statement can only handle .py or .pyc files, have to
         # use imp.load_source() for scripts with other names.
         try:
             imp.load_source('asciidoc', self.cmd)
             import asciidoc
             self.asciidoc = asciidoc
         except ImportError:
             raise AsciiDocError('failed to import ' + self.cmd)
     if Version(self.asciidoc.VERSION) < Version(MIN_ASCIIDOC_VERSION):
         raise AsciiDocError(
             'asciidocapi %s requires asciidoc %s or better'
             % (API_VERSION, MIN_ASCIIDOC_VERSION))
def readCmdLineArgs(simConfigDefault='cfg.py',
                    netParamsDefault='netParams.py'):
    from .. import sim
    import imp, importlib, types
    import __main__

    if len(sys.argv) > 1:
        print(
            '\nReading command line arguments using syntax: python file.py [simConfig=filepath] [netParams=filepath]'
        )
    cfgPath = None
    netParamsPath = None

    # read simConfig and netParams paths
    for arg in sys.argv:
        if arg.startswith('simConfig='):
            cfgPath = arg.split('simConfig=')[1]
            cfg = sim.loadSimCfg(cfgPath, setLoaded=False)
            __main__.cfg = cfg
        elif arg.startswith('netParams='):
            netParamsPath = arg.split('netParams=')[1]
            if netParamsPath.endswith('.json'):
                netParams = sim.loadNetParams(netParamsPath, setLoaded=False)
            elif netParamsPath.endswith('py'):
                try:  # py3
                    loader = importlib.machinery.SourceFileLoader(
                        os.path.basename(netParamsPath).split('.')[0],
                        netParamsPath)
                    netParamsModule = types.ModuleType(loader.name)
                    loader.exec_module(netParamsModule)
                except:  # py2
                    netParamsModule = imp.load_source(
                        os.path.basename(netParamsPath).split('.')[0],
                        netParamsPath)
                netParams = netParamsModule.netParams
                print('Importing netParams from %s' % (netParamsPath))

    if not cfgPath:
        try:
            try:  # py3
                loader = importlib.machinery.SourceFileLoader(
                    'cfg', simConfigDefault)
                cfgModule = types.ModuleType(loader.name)
                loader.exec_module(cfgModule)
            except:  # py2
                cfgModule = imp.load_source('cfg', simConfigDefault)

            cfg = cfgModule.cfg
            __main__.cfg = cfg
        except:
            print(
                '\nWarning: Could not load cfg from command line path or from default cfg.py'
            )
            cfg = None

    if not netParamsPath:
        try:
            try:  # py3
                loader = importlib.machinery.SourceFileLoader(
                    'netParams', netParamsDefault)
                netParamsModule = types.ModuleType(loader.name)
                loader.exec_module(netParamsModule)
            except:  # py2
                cfgModule = imp.load_source('netParams', netParamsDefault)

            netParams = netParamsModule.netParams
        except:
            print(
                '\nWarning: Could not load netParams from command line path or from default netParams.py'
            )
            netParams = None

    return cfg, netParams
Example #32
0
#!/usr/bin/env python23
# Copyright 2017  David Snyder
#           2017  Ye Bai
# Apache 2.0
#
# This script generates augmented data.  It is based on
# steps/data/reverberate_data_dir.py but doesn't handle reverberation.
# It is designed to be somewhat simpler and more flexible for augmenting with
# additive noise.
from __future__ import print_function
import sys, random, argparse, os, imp
sys.path.append("steps/data/")
from reverberate_data_dir import ParseFileToDict
from reverberate_data_dir import WriteDictToFile
data_lib = imp.load_source('dml', 'steps/data/data_dir_manipulation_lib.py')


def GetArgs():
    parser = argparse.ArgumentParser(
        description="Augment the data directory with additive noises. "
        "Noises are separated into background and foreground noises which are added together or "
        "separately.  Background noises are added to the entire recording, and repeated as necessary "
        "to cover the full length.  Multiple overlapping background noises can be added, to simulate "
        "babble, for example.  Foreground noises are added sequentially, according to a specified "
        "interval.  See also steps/data/reverberate_data_dir.py "
        "Usage: augment_data_dir.py [options...] <in-data-dir> <out-data-dir> "
        "E.g., steps/data/augment_data_dir.py --utt-suffix aug --fg-snrs 20:10:5:0 --bg-snrs 20:15:10 "
        "--num-bg-noise 1:2:3 --fg-interval 3 --fg-noise-dir data/musan_noise --bg-noise-dir "
        "data/musan_music data/train data/train_aug",
        formatter_class=argparse.ArgumentDefaultsHelpFormatter)
    parser.add_argument(
Example #33
0
import os, sys, time, imp
from lda_def import LDA, Dataset
from hyper_dynamics import HyperDynamics
# np.set_printoptions(precision=3, suppress=True)


def merge_dicts(*dicts):
    return {k: v for d in dicts for k, v in d.items()}


def vars_stat(obj):
    return {k: getattr(obj, k) for k in dir(obj) if not k.startswith('_')}


if __name__ == '__main__':
    settsrc = imp.load_source('settsrc', sys.argv[1])
    args = settsrc.args
    pm = settsrc.pm
    print('M = {:3d}'.format(args.M))
    print(vars_stat(pm))

    data = Dataset(args.dtFilename, args.batchsize)
    data_W = sum(1 for line in open(args.dtVocname) if line.rstrip())
    model = LDA(data.n_tr, data_W, args.K, args.alpha, args.beta, args.sigma,
                args.n_gsamp)
    model.set_holdout_logperp(args.perpType, data.ho_train_cts,
                              data.ho_test_cts, args.n_window)
    theta = args.beta + args.sigma * np.random.normal(size=(args.M, args.K,
                                                            data_W))
    theta_tf = tf.Variable(theta)
    grads_tf = tf.placeholder(dtype=theta.dtype, shape=theta.shape)
Example #34
0
input_data=input_data_class.InputData(dataset=dataset)
config = configparser.ConfigParser()
config.read('config.ini')


user_label_dim=int(config[dataset]["num_classes"])
num_classes=1
save_model=True
epochs=int(config[dataset]["attack_epochs"])
user_epochs=int(config[dataset]["user_epochs"])
attack_epochs=int(config[dataset]["attack_shallow_model_epochs"])
batch_size=int(config[dataset]["defense_batch_size"])
defense_train_testing_ratio=float(config[dataset]["defense_training_ratio"])
result_folder=config[dataset]["result_folder"]
network_architecture=str(config[dataset]["network_architecture"])
fccnet=imp.load_source(str(config[dataset]["network_name"]),network_architecture)

config_gpu = tf.ConfigProto()
config_gpu.gpu_options.per_process_gpu_memory_fraction = 0.5
config_gpu.gpu_options.visible_device_list = "0"
set_session(tf.Session(config=config_gpu))

defense=args.defense

#########loading defense data###################
(x_evaluate,y_evaluate,l_evaluate)=input_data.input_data_attacker_evaluate()
evaluation_noise_filepath=result_folder+"/attack/"+defense+"_noise_data_evaluation.npz"

if defense == "L2":
    evaluation_noise_filepath=result_folder+"/attack/"+defense+"_"+args.e+"_noise_data_evaluation.npz"
Example #35
0
import os, sys
import copy, math
import heppy.framework.config as cfg
import logging
import imp
# next 2 lines necessary to deal with reimports from ipython
logging.shutdown()
reload(logging)
logging.basicConfig(level=logging.WARNING)

sample = imp.load_source(
    'heppylist',
    '/afs/cern.ch/work/h/helsens/public/FCCDicts/FCC_heppySampleList_fcc_v02.py'
)

comp = cfg.Component(
    'example',
    files=[
        "/eos/experiment/fcc/hh/generation/DelphesEvents/fcc_v02/mgp8_pp_tth01j_5f_hbb/events_000000100.root"
    ])

selectedComponents = [
    sample.mgp8_pp_tth01j_5f_hbb,
    sample.mgp8_pp_ttj_4f,
    sample.mgp8_pp_ttbb_4f,
    sample.mgp8_pp_ttz_5f_zbb,
]

sample.mgp8_pp_ttbb_4f.splitFactor = 330
sample.mgp8_pp_ttj_4f.splitFactor = 390
sample.mgp8_pp_tth01j_5f_hbb.splitFactor = 230
Example #36
0
    def process_file(self, filepath, only_if_updated=True, safe_mode=True):
        """
        Given a path to a python module or zip file, this method imports
        the module and look for dag objects within it.
        """
        from airflow.models.dag import DAG  # Avoid circular import

        found_dags = []

        # if the source file no longer exists in the DB or in the filesystem,
        # return an empty list
        # todo: raise exception?
        if filepath is None or not os.path.isfile(filepath):
            return found_dags

        try:
            # This failed before in what may have been a git sync
            # race condition
            file_last_changed_on_disk = datetime.fromtimestamp(os.path.getmtime(filepath))
            if only_if_updated \
                    and filepath in self.file_last_changed \
                    and file_last_changed_on_disk == self.file_last_changed[filepath]:
                return found_dags

        except Exception as e:
            self.log.exception(e)
            return found_dags

        mods = []
        is_zipfile = zipfile.is_zipfile(filepath)
        if not is_zipfile:
            if safe_mode:
                with open(filepath, 'rb') as file:
                    content = file.read()
                    if not all([s in content for s in (b'DAG', b'airflow')]):
                        self.file_last_changed[filepath] = file_last_changed_on_disk
                        # Don't want to spam user with skip messages
                        if not self.has_logged:
                            self.has_logged = True
                            self.log.info(
                                "File %s assumed to contain no DAGs. Skipping.",
                                filepath)
                        return found_dags

            self.log.debug("Importing %s", filepath)
            org_mod_name, _ = os.path.splitext(os.path.split(filepath)[-1])
            mod_name = ('unusual_prefix_' +
                        hashlib.sha1(filepath.encode('utf-8')).hexdigest() +
                        '_' + org_mod_name)

            if mod_name in sys.modules:
                del sys.modules[mod_name]

            with timeout(self.DAGBAG_IMPORT_TIMEOUT):
                try:
                    m = imp.load_source(mod_name, filepath)
                    mods.append(m)
                except Exception as e:
                    self.log.exception("Failed to import: %s", filepath)
                    self.import_errors[filepath] = str(e)
                    self.file_last_changed[filepath] = file_last_changed_on_disk

        else:
            zip_file = zipfile.ZipFile(filepath)
            for mod in zip_file.infolist():
                head, _ = os.path.split(mod.filename)
                mod_name, ext = os.path.splitext(mod.filename)
                if not head and (ext == '.py' or ext == '.pyc'):
                    if mod_name == '__init__':
                        self.log.warning("Found __init__.%s at root of %s", ext, filepath)
                    if safe_mode:
                        with zip_file.open(mod.filename) as zf:
                            self.log.debug("Reading %s from %s", mod.filename, filepath)
                            content = zf.read()
                            if not all([s in content for s in (b'DAG', b'airflow')]):
                                self.file_last_changed[filepath] = (
                                    file_last_changed_on_disk)
                                # todo: create ignore list
                                # Don't want to spam user with skip messages
                                if not self.has_logged:
                                    self.has_logged = True
                                    self.log.info(
                                        "File %s assumed to contain no DAGs. Skipping.",
                                        filepath)

                    if mod_name in sys.modules:
                        del sys.modules[mod_name]

                    try:
                        sys.path.insert(0, filepath)
                        m = importlib.import_module(mod_name)
                        mods.append(m)
                    except Exception as e:
                        self.log.exception("Failed to import: %s", filepath)
                        self.import_errors[filepath] = str(e)
                        self.file_last_changed[filepath] = file_last_changed_on_disk

        for m in mods:
            for dag in list(m.__dict__.values()):
                if isinstance(dag, DAG):
                    if not dag.full_filepath:
                        dag.full_filepath = filepath
                        if dag.fileloc != filepath and not is_zipfile:
                            dag.fileloc = filepath
                    try:
                        dag.is_subdag = False
                        self.bag_dag(dag, parent_dag=dag, root_dag=dag)
                        if isinstance(dag._schedule_interval, str):
                            croniter(dag._schedule_interval)
                        found_dags.append(dag)
                        found_dags += dag.subdags
                    except (CroniterBadCronError,
                            CroniterBadDateError,
                            CroniterNotAlphaError) as cron_e:
                        self.log.exception("Failed to bag_dag: %s", dag.full_filepath)
                        self.import_errors[dag.full_filepath] = \
                            "Invalid Cron expression: " + str(cron_e)
                        self.file_last_changed[dag.full_filepath] = \
                            file_last_changed_on_disk
                    except AirflowDagCycleException as cycle_exception:
                        self.log.exception("Failed to bag_dag: %s", dag.full_filepath)
                        self.import_errors[dag.full_filepath] = str(cycle_exception)
                        self.file_last_changed[dag.full_filepath] = \
                            file_last_changed_on_disk

        self.file_last_changed[filepath] = file_last_changed_on_disk
        return found_dags
#!/usr/bin/env python
# ====================================
# Copyright (c) Microsoft Corporation. All rights reserved.
# See license.txt for license information.
# ====================================
import os
import sys
import ConfigParser
import subprocess
import signal
import time
import logging
import logging.handlers

import imp
protocol = imp.load_source('protocol', '../protocol.py')
nxDSCLog = imp.load_source('nxDSCLog', '../nxDSCLog.py')
LG = nxDSCLog.DSCLog


def init_locals(WorkspaceId, AzureDnsAgentSvcZone):
    if WorkspaceId is None:
        WorkspaceId = ''
    if AzureDnsAgentSvcZone is None:
        AzureDnsAgentSvcZone = ''
    return WorkspaceId.encode('ascii', 'ignore'), AzureDnsAgentSvcZone.encode(
        'ascii', 'ignore')


def Set_Marshall(WorkspaceId, Enabled, AzureDnsAgentSvcZone):
    WorkspaceId, AzureDnsAgentSvcZone = init_locals(WorkspaceId,
Example #38
0
def get_var_from_file(filename):
    f = open(filename)
    cluster_info = imp.load_source('cluster_info', filename)
    f.close()

    return cluster_info
Example #39
0
def load_src(name, fpath):
    import os
    import imp
    return imp.load_source(name, os.path.join(os.path.dirname(__file__), fpath))
Example #40
0
def init():
    def close():
        for l_module, l_module_dict in loaded_modules.iteritems():
            l_module_dict['class'].apply_settings(system_exit=True)

        if window:
            window.gui.on_close('Closing Program from console')
        else:
            os._exit(0)

    # For system compatibility, loading chats
    loaded_modules = OrderedDict()
    gui_settings = {}
    window = None

    # Creating dict with folder settings
    main_config = {
        'root_folder':
        PYTHON_FOLDER,
        'conf_folder':
        CONF_FOLDER,
        'main_conf_file':
        MAIN_CONF_FILE,
        'main_conf_file_loc':
        MAIN_CONF_FILE,
        'main_conf_file_name':
        ''.join(os.path.basename(MAIN_CONF_FILE).split('.')[:-1]),
        'update':
        False
    }

    if not os.path.isdir(MODULE_FOLDER):
        logging.error(
            "Was not able to find modules folder, check you installation")
        exit()

    # Trying to load config file.
    # Create folder if doesn't exist
    if not os.path.isdir(CONF_FOLDER):
        log.error("Could not find %s folder", CONF_FOLDER)
        try:
            os.mkdir(CONF_FOLDER)
        except Exception as exc:
            log.debug("Exception: %s", exc)
            log.error("Was unable to create %s folder.", CONF_FOLDER)
            exit()

    log.info("Loading basic configuration")
    main_config_dict = OrderedDict()
    main_config_dict['gui_information'] = OrderedDict()
    main_config_dict['gui_information']['category'] = 'main'
    main_config_dict['gui_information']['width'] = '450'
    main_config_dict['gui_information']['height'] = '500'
    main_config_dict['system'] = OrderedDict()
    main_config_dict['system']['log_level'] = 'INFO'
    main_config_dict['system']['testing_mode'] = False
    main_config_dict['gui'] = OrderedDict()
    main_config_dict['gui']['cli'] = False
    main_config_dict['gui']['show_icons'] = False
    main_config_dict['gui']['show_hidden'] = False
    main_config_dict['gui']['gui'] = True
    main_config_dict['gui']['on_top'] = True
    main_config_dict['gui']['show_browser'] = True
    main_config_dict['gui']['show_counters'] = True
    main_config_dict['gui']['reload'] = None
    main_config_dict['language'] = get_language()

    main_config_gui = {
        'language': {
            'view': 'choose_single',
            'check_type': 'dir',
            'check': 'translations'
        },
        'system': {
            'hidden': ['log_level', 'testing_mode'],
        },
        'gui': {
            'hidden': ['cli']
        },
        'ignored_sections': ['gui.reload'],
        'non_dynamic': ['language.list_box', 'gui.*', 'system.*']
    }
    config = load_from_config_file(MAIN_CONF_FILE, main_config_dict)
    root_logger.setLevel(level=logging.getLevelName(
        main_config_dict['system'].get('log_level', 'INFO')))
    # Adding config for main module
    main_class = BaseModule(
        conf_params={
            'folder': CONF_FOLDER,
            'file': main_config['main_conf_file_loc'],
            'filename': main_config['main_conf_file_name'],
            'parser': config,
            'root_folder': main_config['root_folder'],
            'logs_folder': LOG_FOLDER,
            'config': main_config_dict,
            'gui': main_config_gui
        })
    loaded_modules['main'] = main_class.conf_params()

    gui_settings['gui'] = main_config_dict[GUI_TAG].get('gui')
    gui_settings['on_top'] = main_config_dict[GUI_TAG].get('on_top')
    gui_settings['language'] = main_config_dict.get('language')
    gui_settings['show_hidden'] = main_config_dict[GUI_TAG].get('show_hidden')
    gui_settings['size'] = (
        int(main_config_dict['gui_information'].get('width')),
        int(main_config_dict['gui_information'].get('height')))
    gui_settings['show_browser'] = main_config_dict['gui'].get('show_browser')

    # Checking updates
    log.info("Checking for updates")
    loaded_modules['main']['update'], loaded_modules['main'][
        'update_url'] = get_update(SEM_VERSION)
    if loaded_modules['main']['update']:
        log.info("There is new update, please update!")

    # Starting modules
    log.info("Loading Messaging Handler")
    log.info("Loading Queue for message handling")

    try:
        load_translations_keys(TRANSLATION_FOLDER, gui_settings['language'])
    except Exception as exc:
        log.debug("Exception: %s", exc)
        log.exception("Failed loading translations")

    # Creating queues for messaging transfer between chat threads
    queue = Queue.Queue()
    # Loading module for message processing...
    msg = messaging.Message(queue)
    loaded_modules.update(msg.load_modules(main_config,
                                           loaded_modules['main']))
    msg.start()

    log.info("Loading Chats")
    # Trying to dynamically load chats that are in config file.
    chat_modules = os.path.join(CONF_FOLDER, "chat_modules.cfg")
    chat_location = os.path.join(MODULE_FOLDER, "chat")
    chat_conf_dict = OrderedDict()
    chat_conf_dict['gui_information'] = {'category': 'chat'}
    chat_conf_dict['chats'] = []

    chat_conf_gui = {
        'chats': {
            'view': 'choose_multiple',
            'check_type': 'files',
            'check': os.path.sep.join(['modules', 'chat']),
            'file_extension': False
        },
        'non_dynamic': ['chats.list_box']
    }
    chat_config = load_from_config_file(chat_modules, chat_conf_dict)

    chat_module = BaseModule(
        conf_params={
            'folder': CONF_FOLDER,
            'file': chat_modules,
            'filename': ''.join(
                os.path.basename(chat_modules).split('.')[:-1]),
            'parser': chat_config,
            'config': chat_conf_dict,
            'gui': chat_conf_gui
        })
    loaded_modules['chat'] = chat_module.conf_params()

    for chat_module in chat_conf_dict['chats']:
        log.info("Loading chat module: {0}".format(chat_module))
        module_location = os.path.join(chat_location, chat_module + ".py")
        if os.path.isfile(module_location):
            log.info("found {0}".format(chat_module))
            # After module is find, we are initializing it.
            # Class should be named as in config
            # Also passing core folder to module so it can load it's own
            #  configuration correctly

            tmp = imp.load_source(chat_module, module_location)
            chat_init = getattr(tmp, chat_module)
            class_module = chat_init(
                queue,
                PYTHON_FOLDER,
                conf_folder=CONF_FOLDER,
                conf_file=os.path.join(CONF_FOLDER,
                                       '{0}.cfg'.format(chat_module)),
                testing=main_config_dict['system']['testing_mode'])
            loaded_modules[chat_module] = class_module.conf_params()
        else:
            log.error("Unable to find {0} module")

    # Actually loading modules
    for f_module, f_config in loaded_modules.iteritems():
        if 'class' in f_config:
            try:
                f_config['class'].load_module(main_settings=main_config,
                                              loaded_modules=loaded_modules,
                                              queue=queue)
                log.debug('loaded module {}'.format(f_module))
            except ModuleLoadException:
                msg.modules.remove(loaded_modules[f_module]['class'])
                loaded_modules.pop(f_module)
    log.info('LalkaChat loaded successfully')

    if gui_settings['gui']:
        import gui
        log.info("Loading GUI Interface")
        window = gui.GuiThread(gui_settings=gui_settings,
                               main_config=loaded_modules['main'],
                               loaded_modules=loaded_modules,
                               queue=queue)
        loaded_modules['gui'] = window.conf_params()
        window.start()

    if main_config_dict['gui']['cli']:
        try:
            while True:
                console = raw_input("> ")
                log.info(console)
                if console == "exit":
                    log.info("Exiting now!")
                    close()
                else:
                    log.info("Incorrect Command")
        except (KeyboardInterrupt, SystemExit):
            log.info("Exiting now")
            close()
        except Exception as exc:
            log.info(exc)
    else:
        try:
            while True:
                sleep(1)
        except (KeyboardInterrupt, SystemExit):
            log.info("Exiting now")
            close()
Example #41
0
import copy
import functools
import imp
import io
import os
from collections import OrderedDict, defaultdict
from datetime import datetime

from mozlog import reader
from mozlog.formatters import JSONFormatter
from mozlog.handlers import BaseHandler, StreamHandler, LogLevelFilter

here = os.path.dirname(__file__)
localpaths = imp.load_source("localpaths", os.path.abspath(os.path.join(here, os.pardir, os.pardir, "localpaths.py")))
from wpt.markdown import markdown_adjust, table


# If a test takes more than (FLAKY_THRESHOLD*timeout) and does not consistently
# time out, it is considered slow (potentially flaky).
FLAKY_THRESHOLD = 0.8


class LogActionFilter(BaseHandler):

    """Handler that filters out messages not of a given set of actions.

    Subclasses BaseHandler.

    :param inner: Handler to use for messages that pass this filter
    :param actions: List of actions for which to fire the handler
    """
Example #42
0
    description = "Running all unit test for pyconcrete"
    user_options = []

    def initialize_options(self):
        pass

    def finalize_options(self):
        pass

    def run(self):
        import unittest
        suite = unittest.TestLoader().discover(TEST_DIR)
        unittest.TextTestRunner(verbosity=2).run(suite)


version = imp.load_source('version', join(PY_SRC_DIR, 'version.py'))

include_dirs = [join(EXT_SRC_DIR, 'openaes', 'inc')]
if sys.platform == 'win32' and not is_mingw():
    include_dirs.append(join(EXT_SRC_DIR, 'include_win'))

module = Extension(
    'pyconcrete._pyconcrete',
    include_dirs=include_dirs,
    sources=[
        join(EXT_SRC_DIR, 'pyconcrete.c'),
        join(EXT_SRC_DIR, 'openaes', 'src', 'oaes.c'),
        join(EXT_SRC_DIR, 'openaes', 'src', 'oaes_base64.c'),
        join(EXT_SRC_DIR, 'openaes', 'src', 'oaes_lib.c'),
    ],
)
Example #43
0
from flask import escape
from flask_appbuilder.security.sqla import models as ab_models

import caravel
from caravel import app, db, models, utils, appbuilder
from caravel.models import DruidCluster

os.environ['CARAVEL_CONFIG'] = 'tests.caravel_test_config'

app.config['TESTING'] = True
app.config['CSRF_ENABLED'] = False
app.config['SECRET_KEY'] = 'thisismyscretkey'
app.config['WTF_CSRF_ENABLED'] = False
app.config['PUBLIC_ROLE_LIKE_GAMMA'] = True
BASE_DIR = app.config.get("BASE_DIR")
cli = imp.load_source('cli', BASE_DIR + "/bin/caravel")


class CaravelTestCase(unittest.TestCase):

    def __init__(self, *args, **kwargs):
        super(CaravelTestCase, self).__init__(*args, **kwargs)
        self.client = app.test_client()

        utils.init(caravel)
        admin = appbuilder.sm.find_user('admin')
        if not admin:
            appbuilder.sm.add_user(
                'admin', 'admin',' user', '*****@*****.**',
                appbuilder.sm.find_role('Admin'),
                password='******')
Example #44
0
#!/usr/bin/python
# -*- coding: utf-8 -*-
from Bio import SeqIO
from Bio.Align import MultipleSeqAlignment
# from Bio.SubsMat import MatrixInfo
from scipy import stats
import numpy
import math
import sys
import re
import collections

import imp
SubsMat = imp.load_source(
    'SubsMat', '/Users/bondsr/Documents/biopython/Bio/SubsMat/__init__.py')
MatrixInfo = imp.load_source(
    'MatrixInfo',
    '/Users/bondsr/Documents/biopython/Bio/SubsMat/MatrixInfo.py')


class PSSM:
    def __init__(self, alignment):
        self.name = "Unnamed"

        # default amino acid priors: Grabbed them from
        # http://www.tiem.utk.edu/~gross/bioed/webmodules/aminoacid.htm and
        # then altered the matrix to include gap and X -> 1/22 ≈ 0.0445, and
        # then change all other values by i-(i*0.045)
        self.priors_matrix = {
            "A": 0.067,
            "C": 0.03,
Example #45
0
import time

# ---------------------------------------------------- Globals ---------------------------------------------------------

xcode_logging = False
verbose = False
overwrite = False
valid_platform_args = ["windows", "linux", "macos", "ios", "android", "js", "python"]
platform_full_names = {'windows':'Windows', 'linux':'Linux', 'macos': 'macOS', 'ios': 'iOS', 'android': 'Android', 'js': 'JavaScript' }
script_folder = os.path.abspath(os.path.dirname(os.path.realpath(__file__)))
src_folder = os.path.abspath(os.path.join(script_folder, "..", "src"))
build_folder = os.path.abspath(os.path.join(src_folder, "build"))
librhino3dm_native_folder = os.path.abspath(os.path.join(src_folder, "librhino3dm_native"))

if sys.version_info[0] < 3:
    bootstrap = imp.load_source('bootstrap', os.path.join(script_folder, "bootstrap.py"))
else:
    bootstrap = SourceFileLoader('bootstrap', os.path.join(script_folder, "bootstrap.py")).load_module()

# ---------------------------------------------------- Logging ---------------------------------------------------------
# colors for terminal reporting
class bcolors:
    HEADER = '\033[95m'
    OKBLUE = '\033[94m'
    OKGREEN = '\033[92m'
    WARNING = '\033[93m'
    FAIL = '\033[91m'
    ENDC = '\033[0m'
    BOLD = '\033[1m'
    UNDERLINE = '\033[4m'
Example #46
0
def GetUtils():
  '''Dynamically load the tools/utils.py python module.'''
  return imp.load_source('utils', os.path.join(DART_DIR, 'tools', 'utils.py'))
Example #47
0
        'Please make the cfg.num_threads commensurate with '
        'cfg.validation_shards')

    # Run it!
    if cfg.validation_directory is not None:
        _process_dataset('validation',
                         cfg.validation_directory,
                         cfg.validation_shards,
                         cfg.label_defs_file_name,
                         cfg.val_labels_file_name,
                         resize=cfg.resize,
                         image_size=cfg.image_size)
    if cfg.train_directory is not None:
        _process_dataset('train',
                         cfg.train_directory,
                         cfg.train_shards,
                         cfg.label_defs_file_name,
                         resize=cfg.resize,
                         image_size=cfg.image_size)


if __name__ == '__main__':
    # Read in config file
    assert len(
        sys.argv
    ) == 2, "Must pass exactly one argument to the script, namely the cfg file, got " + str(
        len(sys.argv))
    abs_path = os.path.abspath(sys.argv[1])
    cfg = imp.load_source('*', abs_path)
    main(cfg)
import os, sys
import imp

from authorizenet import apicontractsv1
from authorizenet.apicontrollers import *
constants = imp.load_source('modulename', 'constants.py')


def update_customer_payment_profile(customerProfileId,
                                    customerPaymentProfileId):
    merchantAuth = apicontractsv1.merchantAuthenticationType()
    merchantAuth.name = constants.apiLoginId
    merchantAuth.transactionKey = constants.transactionKey

    creditCard = apicontractsv1.creditCardType()
    creditCard.cardNumber = "4111111111111111"
    creditCard.expirationDate = "2020-12"

    payment = apicontractsv1.paymentType()
    payment.creditCard = creditCard

    paymentProfile = apicontractsv1.customerPaymentProfileExType()
    paymentProfile.billTo = apicontractsv1.customerAddressType()
    paymentProfile.billTo.firstName = "John"
    paymentProfile.billTo.lastName = "Doe"
    paymentProfile.billTo.address = "123 Main St."
    paymentProfile.billTo.city = "Bellevue"
    paymentProfile.billTo.state = "WA"
    paymentProfile.billTo.zip = "98004"
    paymentProfile.billTo.country = "USA"
    paymentProfile.billTo.phoneNumber = "000-000-000"
Example #49
0
import matplotlib.pyplot as plt
from datetime import datetime
import argparse
import platform
import numpy as np
import os
import sys
import getopt
import code  # For development: code.interact(local=dict(globals(), **locals()))
import time
import imp
import ctypes
from ctypes import *
from operator import add

PartehInterpretParameters = imp.load_source('PartehInterpretParameters', \
                                            'py_modules/PartehInterpretParameters.py')
PartehTypes = imp.load_source('PartehTypes', 'py_modules/PartehTypes.py')
SyntheticBoundaries = imp.load_source('SyntheticBoundaries','py_modules/SyntheticBoundaries.py')
CDLParse = imp.load_source('CDLParse','py_modules/CDLParse.py')
F90ParamParse = imp.load_source('F90ParamParse','py_modules/F90ParamParse.py')

from PartehInterpretParameters import load_xml
from CDLParse import CDLParseDims, CDLParseParam, cdl_param_type
from F90ParamParse import f90_param_type, GetParamsInFile, GetPFTParmFileSymbols, MakeListUnique

f90_fates_integrators_obj_name = 'bld/FatesIntegratorsMod.o'
f90_fates_parteh_params_obj_name = 'bld/PRTParametersMod.o'
f90_fates_partehwrap_obj_name  = 'bld/FatesPARTEHWrapMod.o'
f90_fates_lossfluxes_obj_name  = 'bld/PRTLossFluxesMod.o'
f90_fates_parteh_generic_obj_name = 'bld/PRTGenericMod.o'
f90_fates_unitwrap_obj_name = 'bld/UnitWrapMod.o'
Example #50
0
import imp, json,cv2,os
import sys, threading, time
from multiprocessing import Queue
from avatar.AvatarBuilder import AvatarBuilder
from captureStats.DisplayInfo import DisplayInfo
from captureStats.faceCSVInfoWriter import FaceCSVInfoWriter
import time

weightsPath =  "faceDetector/weights.txt"
capturePath = "faceDetector/img.jpg"
azureKeys = "keys/azureKeys.txt"

fd = imp.load_source('FaceDetector','faceDetector/FaceDetector.py')
am = imp.load_source('AzureCognitiveManager','azureCogServManager/AzureCognitiveManager.py')

lockAzureThread = True

class Face():
    def _init__():
        self._faceAttr = None
        self._emotion = None
        self._faceId = ""
        self.GetAzureData = False

def initObjects():
    faceDetector = fd.FaceDetector(weightsPath, capturePath)
    with open(azureKeys,"r") as f:
        sub = json.load(f)
        f.close()
    azureCognitive = am.AzureCognitiveManager(sub)
Example #51
0
if normalizer_state is None:
    normalizer_state = 'ph_ts{}.input_str:previous.start_time:zero.normalizer'.format(
        args.timestep)
    normalizer_state = os.path.join(os.path.dirname(__file__),
                                    normalizer_state)
normalizer.load_params(normalizer_state)

args_dict = dict(args._get_kwargs())
args_dict['header'] = discretizer_header
args_dict['task'] = 'ph'
args_dict['num_classes'] = 25
args_dict['target_repl'] = target_repl

# Build the model
print("==> using model {}".format(args.network))
model_module = imp.load_source(os.path.basename(args.network), args.network)
model = model_module.Network(**args_dict)
suffix = ".bs{}{}{}.ts{}{}".format(
    args.batch_size, ".L1{}".format(args.l1) if args.l1 > 0 else "",
    ".L2{}".format(args.l2) if args.l2 > 0 else "", args.timestep,
    ".trc{}".format(args.target_repl_coef)
    if args.target_repl_coef > 0 else "")
model.final_name = args.prefix + model.say_name() + suffix
print("==> model.final_name:", model.final_name)

# Compile the model
print("==> compiling the model")
optimizer_config = {
    'class_name': args.optimizer,
    'config': {
        'lr': args.lr,
import argparse
import os
import pandas as pd
import numpy as np
import math

from collections import Counter
sys.path.append('/ifs/work/taylorlab/friedman/')

pathPrefix = ''
if os.getcwd() == '/Users/friedman/Desktop/mnt':
    pathPrefix = '/Users/friedman/Desktop/mnt'

import imp
analysis_utils = imp.load_source(
    'analysis_utils',
    '/Users/friedman/Desktop/mnt/ifs/work/taylorlab/friedman/myUtils/analysis_utils.py'
)


def create_facets_dict_key(row):
    return row['Tumor_Sample_Barcode'] + '_' + row['idCol']


def create_facets_clonality_dict(facetsDf):
    #todo make it incorportate patient info too
    facetsDf['idCol'] = facetsDf.apply(
        lambda row: str(row['Chromosome']) + '_' + str(row['Start_Position']),
        axis=1)
    facetsDf = data_compacting_and_cleaning_util.create_expected_mut_copies_col(
        facetsDf)
    facetsDf['naiveClonalStatus'] = facetsDf.apply(
Example #53
0
 def get_hybrids():
     """
     Yields all hybrid classes
     """
     key = 'ovs_hybrid_structure'
     if key in HybridRunner.cache:
         return HybridRunner.cache[key]
     volatile = VolatileFactory.get_client()
     hybrid_structure = volatile.get(key)
     if hybrid_structure is not None:
         HybridRunner.cache[key] = hybrid_structure
         return hybrid_structure
     base_hybrids = []
     inherit_table = {}
     translation_table = {}
     path = '/'.join([os.path.dirname(__file__), 'hybrids'])
     for filename in os.listdir(path):
         if os.path.isfile('/'.join([path, filename
                                     ])) and filename.endswith('.py'):
             name = filename.replace('.py', '')
             mod = imp.load_source(name, '/'.join([path, filename]))
             for member in inspect.getmembers(mod):
                 if inspect.isclass(member[1]) \
                         and member[1].__module__ == name:
                     current_class = member[1]
                     try:
                         current_descriptor = Descriptor(
                             current_class).descriptor
                     except TypeError:
                         continue
                     current_identifier = current_descriptor['identifier']
                     if current_identifier not in translation_table:
                         translation_table[
                             current_identifier] = current_descriptor
                     if 'DataObject' in current_class.__base__.__name__:
                         if current_identifier not in base_hybrids:
                             base_hybrids.append(current_identifier)
                         else:
                             raise RuntimeError(
                                 'Duplicate base hybrid found: {0}'.format(
                                     current_identifier))
                     elif 'DataObject' not in current_class.__name__:
                         structure = []
                         this_class = None
                         for this_class in current_class.__mro__:
                             if 'DataObject' in this_class.__name__:
                                 break
                             try:
                                 structure.append(
                                     Descriptor(this_class).
                                     descriptor['identifier'])
                             except TypeError:
                                 break  # This means we reached one of the built-in classes.
                         if 'DataObject' in this_class.__name__:
                             for index in reversed(range(1,
                                                         len(structure))):
                                 if structure[index] in inherit_table:
                                     raise RuntimeError(
                                         'Duplicate hybrid inheritance: {0}({1})'
                                         .format(structure[index - 1],
                                                 structure[index]))
                                 inherit_table[
                                     structure[index]] = structure[index -
                                                                   1]
     items_replaced = True
     hybrids = {hybrid: None for hybrid in base_hybrids[:]}
     while items_replaced is True:
         items_replaced = False
         for hybrid, replacement in inherit_table.iteritems():
             if hybrid in hybrids.keys() and hybrids[hybrid] is None:
                 hybrids[hybrid] = replacement
                 items_replaced = True
             if hybrid in hybrids.values():
                 for item in hybrids.keys():
                     if hybrids[item] == hybrid:
                         hybrids[item] = replacement
                 items_replaced = True
     hybrid_structure = {
         hybrid: translation_table[replacement]
         if replacement is not None else translation_table[hybrid]
         for hybrid, replacement in hybrids.iteritems()
     }
     HybridRunner.cache[key] = hybrid_structure
     volatile.set(key, hybrid_structure)
     return hybrid_structure
Example #54
0
# Contributor(s):
# If you wish your version of this file to be governed by only the CDDL or
# only the GPL Version 2, indicate your decision by adding "[Contributor]
# elects to include this software in this distribution under the [CDDL or GPL
# Version 2] license."  If you don't indicate a single choice of license, a
# recipient has the option to distribute your version of this file under
# either the CDDL, the GPL Version 2 or to extend the choice of license to
# its licensees as provided above.  However, if you add GPL Version 2 code
# and therefore, elected the GPL Version 2 license, then the option applies
# only if the new code is made subject to such option by the copyright
# holder.
#
# Portions Copyright [2016] [C2B2 Consulting Limited and/or its affiliates]  
import imp

conf = imp.load_source("pkg_conf", "../pkg_conf.py")

pkg = {
    "name"          : "phonehome",
    "version"       : conf.glassfish_version,
    "attributes"    : {
                        "pkg.summary" : "Phone Home Service Integration",
                        "pkg.description" : "Phone Home core and Phone Home Bootstrap modules",
                        "info.classification" : "OSGi Service Platform Release 4",
                      },
    "dirtrees"      : { "glassfish/modules" : {},
                      },
    "licenses"      : {
                        "../../../../ApacheLicense.txt" : {"license" : "ApacheV2"},
                      }
 }
Created on Wed July 13 2016

@author: fangren
contributed by T Williams
"""

import numpy as np
import matplotlib.pyplot as plt
import glob
import os
from os.path import basename
import imp
import scipy
from scipy import interpolate

plotTernary = imp.load_source("plt_ternary_save", "plotTernary.py")

path = '..//..//data//CoVZr//master_data//'
save_path = '..//..//figures//'

basename1 = 'CLEANED_Sample9_master_metadata_low.csv'
basename2 = 'CLEANED_Sample10_master_metadata_low.csv'
basename3 = 'CLEANED_Sample18_master_metadata_low.csv'

filename1 = path + basename1
filename2 = path + basename2
filename3 = path + basename3

data1 = np.genfromtxt(filename1, delimiter=',', skip_header=1)
data2 = np.genfromtxt(filename2, delimiter=',', skip_header=1)
data3 = np.genfromtxt(filename3, delimiter=',', skip_header=1)
logger.setLevel(logLevel)

com = "cat env.log|grep AtlasVersion | cut -d= -f2| tail -n1"
res, release = commands.getstatusoutput(com)

com = "cat env.log|grep AtlasArea | cut -d= -f2| tail -n1"
res, branch = commands.getstatusoutput(com)

com = "grep RTTINFO MYRTTINFOS.txt | cut -d= -f2 | tail -n1 | awk '{print $1}'"
res, testName = commands.getstatusoutput(com)
if res != 0:
    self.logger.error(testName.strip())
    sys.exit(1)

name = testName + 'Checks.py'
mod = imp.load_source("tests", name)
import tests

file = open('./checks.html', 'w')
txt = '<html><head><title>RTT results for ' + testName.strip(
) + '</title><link rel=\"StyleSheet\" href=\"rtt.css\" type=\"text/css\" /><meta http-equiv=\"Content-Type\" content=\"text/html; charset=iso-8859-1\"><meta http-equiv=\"Content-Language\" content=\"en\"></head><body><center><h1>RTT results for ' + testName.strip(
) + '</h1></center>'
txt += '<br><br><br><br>'
txt += '<center>' + 'Branch : ' + branch + '<br>'
txt += 'Release : ' + release + '<br><br><br>'

if tests.doHistoComparison == True:
    txt += '<a href="DrawHistos.html">Histograms</a>'

if tests.doTruth == True:
    txt += '<br><a href="CaloTests_Truth.html">Truth plots</a>'
import imp

import numpy as np
import pandas as pd

import axelrod

process_data = imp.load_source("processe_data", "src/process_data.py")


def test_strategies_properties():
    df = process_data.get_strategies_properties()

    assert isinstance(df, pd.DataFrame)
    assert len(df) == len(axelrod.strategies)

    for memory_depth in df["Memory_depth"]:
        assert isinstance(memory_depth, float)

    for use_of_game in df["Makes_use_of_game"]:
        assert isinstance(use_of_game, int)

    for use_of_length in df["Makes_use_of_length"]:
        assert isinstance(use_of_length, int)


def test_get_error_for_row():
    row = {
        "CC_to_C_rate": 0,
        "CD_to_C_rate": 0,
        "DC_to_C_rate": 0,
Example #58
0
def generate_instance(problem_object,
                      problem_directory,
                      instance_number,
                      staging_directory,
                      deployment_directory=None):
    """
    Runs the setup functions of Problem in the correct order

    Args:
        problem_object: The contents of the problem.json
        problem_directory: The directory to the problem
        instance_number: The instance number to be generated
        staging_directory: The temporary directory to store files in
        deployment_directory: The directory that will be deployed to. Defaults to a deterministic, unique
                              directory generated for each problem,instance pair using the configuration options
                              PROBLEM_DIRECTORY_ROOT and OBFUSCATE_PROBLEM_DIRECTORIES

    Returns:
        A dict containing (problem, staging_directory, deployment_directory, files,
                           web_accessible_files, service_file, socket_file)
    """

    logger.debug("Generating instance %d of problem '%s'.", instance_number,
                 problem_object["name"])
    logger.debug("...Using staging directory %s", staging_directory)

    username, new = create_instance_user(problem_object['name'],
                                         instance_number)
    if new:
        logger.debug("...Created problem user '%s'.", username)
    else:
        logger.debug("...Using existing problem user '%s'.", username)

    if deployment_directory is None:
        deployment_directory = generate_instance_deployment_directory(username)
    logger.debug("...Using deployment directory '%s'.", deployment_directory)

    seed = generate_seed(problem_object['name'], deploy_config.deploy_secret,
                         str(instance_number))
    logger.debug("...Generated random seed '%s' for deployment.", seed)

    copy_path = join(staging_directory, PROBLEM_FILES_DIR)
    shutil.copytree(problem_directory, copy_path)

    pretemplated_directory = join(copy_path, "__pre_templated")

    if isdir(pretemplated_directory):
        shutil.rmtree(pretemplated_directory)

    # store cwd to restore later
    cwd = os.getcwd()
    os.chdir(copy_path)

    challenge = load_source("challenge", join(copy_path, "challenge.py"))

    Problem = update_problem_class(challenge.Problem, problem_object, seed,
                                   username, deployment_directory)

    # run methods in proper order
    problem = Problem()

    # reseed and generate flag
    problem.flag = problem.generate_flag(Random(seed))
    problem.flag_sha1 = sha1(problem.flag.encode("utf-8")).hexdigest()
    logger.debug("...Instance %d flag is '%s'.", instance_number, problem.flag)

    logger.debug("...Running problem initialize.")
    problem.initialize()

    shutil.copytree(copy_path, pretemplated_directory)

    web_accessible_files = []

    def url_for(web_accessible_files,
                source_name,
                display=None,
                raw=False,
                pre_templated=False):
        if pre_templated:
            source_path = join(copy_path, "__pre_templated", source_name)
        else:
            source_path = join(copy_path, source_name)

        problem_hash = problem_object[
            "name"] + deploy_config.deploy_secret + str(instance_number)
        problem_hash = md5(problem_hash.encode("utf-8")).hexdigest()

        destination_path = join(STATIC_FILE_ROOT, problem_hash, source_name)

        link_template = "<a href='{}'>{}</a>"

        web_accessible_files.append(
            (source_path, join(deploy_config.web_root, destination_path)))
        uri_prefix = "//"
        uri = join(uri_prefix, deploy_config.hostname, destination_path)

        if not raw:
            return link_template.format(
                uri, source_name if display is None else display)

        return uri

    problem.url_for = functools.partial(url_for, web_accessible_files)

    logger.debug("...Templating the staging directory")
    template_staging_directory(copy_path, problem)

    if isinstance(problem, Compiled):
        problem.compiler_setup()
    if isinstance(problem, Remote):
        problem.remote_setup()
    if isinstance(problem, FlaskApp):
        problem.flask_setup()
    if isinstance(problem, PHPApp):
        problem.php_setup()
    if isinstance(problem, Service):
        problem.service_setup()

    logger.debug("...Running problem setup.")
    problem.setup()

    os.chdir(cwd)

    all_files = copy(problem.files)

    if isinstance(problem, Compiled):
        all_files.extend(problem.compiled_files)
    if isinstance(problem, Service):
        all_files.extend(problem.service_files)

    if not all([isinstance(f, File) for f in all_files]):
        logger.error("All files must be created using the File class!")
        raise FatalException

    for f in all_files:
        if not isinstance(f, Directory) and not os.path.isfile(
                join(copy_path, f.path)):
            logger.error("File '%s' does not exist on the file system!", f)

    service_file, socket_file = create_service_files(problem, instance_number,
                                                     staging_directory)
    logger.debug("...Created service files '%s','%s'.", service_file,
                 socket_file)

    # template the description
    problem.description = template_string(problem.description,
                                          **get_attributes(problem))
    logger.debug("...Instance description: %s", problem.description)

    return {
        "problem": problem,
        "staging_directory": staging_directory,
        "deployment_directory": deployment_directory,
        "files": all_files,
        "web_accessible_files": web_accessible_files,
        "service_file": service_file,
        "socket_file": socket_file
    }
        'USER': '******',
        'PASSWORD': '',
        'HOST': 'localhost',
        'PORT': '',
    }
}
# End Postgres support
"""
# ===============================================

# ==========LOAD CONFIG FROM MobSF HOME==========
try:
    # Update Config from MobSF Home Directory
    if USE_HOME:
        USER_CONFIG = os.path.join(MobSF_HOME, 'config.py')
        sett = imp.load_source('user_settings', USER_CONFIG)
        locals().update(
            {k: v for k, v in list(sett.__dict__.items())
                if not k.startswith('__')})
        CONFIG_HOME = True
    else:
        CONFIG_HOME = False
except Exception:
    logger.exception('Reading Config')
    CONFIG_HOME = False
# ===============================================

# ===MOBSF SECRET GENERATION AND DB MIGRATION====
SECRET_KEY = first_run(SECRET_FILE, BASE_DIR, MobSF_HOME)

# =============================================
Example #60
0
 def kairosdb_config(self, c):
     for child in c.children:
         if child.key == 'AddHostTag':
             self.add_host_tag = child.values[0]
         elif child.key == 'KairosDBURI':
             self.uri = child.values[0]
         elif child.key == 'TypesDB':
             for tag in child.values:
                 self.kairosdb_parse_types_file(tag)
         elif child.key == 'LowercaseMetricNames':
             self.lowercase_metric_names = child.values[0]
         elif child.key == 'MetricName':
             self.metric_name = str(child.values[0])
         elif child.key == 'HostSeparator':
             self.host_separator = child.values[0]
         elif child.key == 'MetricSeparator':
             self.metric_separator = child.values[0]
         elif child.key == 'ConvertToRate':
             if not child.values:
                 raise Exception("Missing ConvertToRate values")
             self.convert_rates = child.values
         elif child.key == 'Formatter':
             formatter_path = child.values[0]
             try:
                 self.formatter = imp.load_source('formatter',
                                                  formatter_path)
             except:
                 raise Exception('Could not load formatter %s %s' %
                                 (formatter_path, format_exc()))
         elif child.key == "PluginFormatterPath":
             if child.values:
                 self.pluginsToFormatter = self.load_plugin_formatters(
                     child.values[0])
         elif child.key == 'Tags':
             for tag in child.values:
                 tag_parts = tag.split("=")
                 if len(tag_parts) == 2 and len(tag_parts[0]) > 0 and len(
                         tag_parts[1]) > 0:
                     self.tags_map[tag_parts[0]] = tag_parts[1]
                 else:
                     raise Exception("Invalid tag: %s" % tag)
         elif child.key == 'ThrowawaySampleAge':
             if not child.values:
                 raise Exception("Missing %s value, must be in seconds" %
                                 child.key)
             try:
                 self.throwaway_sample_age = int(child.values[0])
             except Exception as ex:
                 self.throwaway_sample_age = False
                 raise Exception("%s requires time in seconds: %s" %
                                 (child.key, str(ex)))
         elif child.key == 'VerboseLogging':
             if isinstance(child.values[0], bool):
                 self.verbose_logging = bool(child.values[0])
             elif isinstance(child.values[0], str):
                 if str.lower(child.values[0]) == 'true':
                     self.verbose_logging = True
                 else:
                     self.verbose_logging = False
         elif child.key == 'HttpTimeout':
             if not child.values:
                 raise Exception("Missing %s value, must be in seconds" %
                                 child.key)
             try:
                 self.http_timeout = int(child.values[0])
             except Exception as ex:
                 self.http_timeout = 0
                 raise Exception("%s requires time in seconds: %s" %
                                 (child.key, str(ex)))