def __init__(self,
              preconditions,
              effects,
              baseId=-1,
              refId=-1,
              time=1,
              effectText="",
              involvedChars="none",
              consentingChars="none"):
     Node.__init__(self, baseId, refId)  #constructor for nodes in general
     #set other properties
     self.effects = effects
     self.preconditions = preconditions
     self.time = time
     self.involvedChars = []
     self.effectText = effectText
     if involvedChars == "none":
         temp = []
         precond = getsource(preconditions)
         eff = getsource(effects)
         for a in agents:
             if "\"" + a + "\"" in precond or "\"" + a + "\"" in eff:
                 temp.append(a)
         temp.append("$executingAgent$")
         self.involvedChars = temp
     else:
         self.involvedChars = involvedChars
     self.consentingChars = []
     if consentingChars == "none":
         self.consentingChars = ["$executingAgent$"]
     else:
         self.consentingChars = consentingChars
Beispiel #2
0
def print_func_info(fx, num_list, gx=0):
    re_bod = re.compile(r"(?<=return\s)(?P<state>(.*))")
    fn_bod = getsource(fx)
    ret_st = re_bod.search(fn_bod)
    print(f"h(x): {fx.__name__:<12} returning: {ret_st.group('state')}")
    if gx != 0:
        fn2_bod = getsource(gx)
        ret2_st = re_bod.search(fn2_bod)
        print(f"g(x): {gx.__name__:<12} returning: {ret2_st.group('state')}")
    print(f"List: {num_list}")
Beispiel #3
0
def test_patches():
    for obj, digests in _READ_TIMEOUT_DIGESTS.items():
        digest = hashlib.sha1(getsource(obj).encode('utf-8')).hexdigest()
        assert digest in digests, \
            "Digest of {} not found in: {}".format(obj.__name__, digests)

    for obj, digests in _API_DIGESTS.items():
        digest = hashlib.sha1(getsource(obj).encode('utf-8')).hexdigest()
        assert digest in digests, \
            "Digest of {} not found in: {}".format(obj.__name__, digests)
Beispiel #4
0
def test_patches():
    print("Botocore version: {} aiohttp version: {}".format(
        botocore.__version__, aiohttp.__version__))

    for obj, digests in _AIOHTTP_DIGESTS.items():
        digest = hashlib.sha1(getsource(obj).encode('utf-8')).hexdigest()
        assert digest in digests, \
            "Digest of {} not found in: {}".format(obj.__name__, digests)

    for obj, digests in _API_DIGESTS.items():
        digest = hashlib.sha1(getsource(obj).encode('utf-8')).hexdigest()
        assert digest in digests, \
            "Digest of {} not found in: {}".format(obj.__name__, digests)
Beispiel #5
0
def test_patches():
    print("Botocore version: {} aiohttp version: {}".format(
        botocore.__version__, aiohttp.__version__))

    for obj, digests in _AIOHTTP_DIGESTS.items():
        digest = hashlib.sha1(getsource(obj).encode('utf-8')).hexdigest()
        assert digest in digests, \
            "Digest of {} not found in: {}".format(obj.__name__, digests)

    for obj, digests in _API_DIGESTS.items():
        digest = hashlib.sha1(getsource(obj).encode('utf-8')).hexdigest()
        assert digest in digests, \
            "Digest of {} not found in: {}".format(obj.__name__, digests)
def test_getsource():
  assert getsource(f) == 'f = lambda x: x**2\n'
  assert getsource(g) == 'def g(x): return f(x) - x\n'
  assert getsource(h) == 'def h(x):\n  def g(x): return x\n  return g(x) - x\n'
  assert getname(f) == 'f'
  assert getname(g) == 'g'
  assert getname(h) == 'h'
  assert _wrap(f)(4) == 16
  assert _wrap(g)(4) == 12
  assert _wrap(h)(4) == 0

  assert getname(Foo) == 'Foo'
  assert getname(Bar) == 'Bar'
  assert getsource(Bar) == 'class Bar:\n  pass\n'
  assert getsource(Foo) == 'class Foo(object):\n  def bar(self, x):\n    return x*x+x\n'
Beispiel #7
0
def test_getsource():
  assert getsource(f) == 'f = lambda x: x**2\n'
  assert getsource(g) == 'def g(x): return f(x) - x\n'
  assert getsource(h) == 'def h(x):\n  def g(x): return x\n  return g(x) - x\n'
  assert getname(f) == 'f'
  assert getname(g) == 'g'
  assert getname(h) == 'h'
  assert _wrap(f)(4) == 16
  assert _wrap(g)(4) == 12
  assert _wrap(h)(4) == 0

  assert getname(Foo) == 'Foo'
  assert getname(Bar) == 'Bar'
  assert getsource(Bar) == 'class Bar:\n  pass\n'
  assert getsource(Foo) == 'class Foo(object):\n  def bar(self, x):\n    return x*x+x\n'
Beispiel #8
0
    def __init__(
        self,
        func_src=None,
        func=None,
        modules=[torch, syft, crypten],
        rm_decorators=True,
        **global_kwargs,
    ):
        """
        Control what should be accessible from inside the function.

        Args:
            func_src: function's src to be jailed. Required if func isn't set.
            func: function to be jailed. Ignored if func_src is set.
            modules: python modules that should be accessible.
            rm_decorators: specify if decorators should be removed. Default to True.
            global_kwargs: globals to be accessible.
        """

        if func_src is None:
            if func is None:
                raise ValueError("Either func_src or func must be set")
            else:
                try:
                    func_src = getsource(func)
                except:  # use inspect if dill fail
                    import inspect

                    func_src = inspect.getsource(func)

        if rm_decorators:
            # Remove decorator if any
            func_src = re.sub(r"@[^\(]+\([^\)]*\)", "", func_src)

        # remove base indent
        lines = func_src.split("\n")
        if len(lines) and re.match(r"^ *", lines[0]):
            base_re = "^" + re.match(r"^ *", lines[0]).group(0)
            new_lines = []
            for l in lines:
                l = re.sub(base_re, "", l)
                new_lines.append(l)
            func_src = "\n".join(new_lines)

        is_func, self._func_name = JailRunner._check_func_def(func_src)
        if not is_func:
            raise ValueError("Not a valid function definition")

        self._func_src = func_src

        self._jail_globals = global_kwargs
        # save names for serialization
        self._module_names = []
        # add modules
        for module in modules:
            self._jail_globals[module.__name__] = module
            self._module_names.append(module.__name__)

        self._is_built = False
        self._build()
def call_callable_dynamic_args(func, *args, **kwargs):
    if isinstance(func, Iterable):
        return [call_callable_dynamic_args(f, *args, **kwargs) for f in func]

    if not callable(func):
        if func is None:
            return None
        else:
            raise ValueError(f"function {func} is not callable")

    callable_args = Signature.from_callable(func).bind(*args, **kwargs)

    try:
        return func(*callable_args.args, **callable_args.kwargs)
    except StopIteration as s:
        raise s
    except KeyError as ke:
        raise ke
    except Exception as e:
        try:
            source = inspect.getsource(func)
        except OSError:
            try:
                from dill.source import getsource
                source = getsource(func)
            except Exception:
                source = "eval"

        raise RuntimeError(
            e,
            f"error while calling {func}({inspect.getfullargspec(func)})\n{source}\nwith arguments:\n{callable_args}, {kwargs}"
        )
 def process_actions_source(self):
     if not self.process_actions:
         return None
     return getsource(self.process_actions).replace(
         "def get_process_actions(self",
         f"def __get_process_actions_{camel_to_snake(self.name)}(self",
     )
Beispiel #11
0
def hint(f, do_print=True):
    result = ParseSource(getsource(f)).describe()

    if do_print:
        print(result)

    return result
Beispiel #12
0
 def tree_from_func(self, name, func):
     """
     Create a tree from a well-formed nested if-then function in Python.
     Tests must use the < or >= operators; split_dims can either be identified with indices, e.g. x[0],
     or with a valid entry in self.dim_names.
     """
     from dill.source import getsource
     lines = [l.strip() for l in getsource(func).split("\n")[:-1]]
     assert lines[0][:3] == "def"
     def _recurse(node, n):
         if lines[n][0] == "#": return _recurse(node, n + 1) 
         elif lines[n][:2] == "if":
             d, o, t = lines[n][3:-1].split(" ")
             assert o in ("<", ">=")
             try: split_dim = int(d.split("[")[1][:-1]) # If index specified.
             except: split_dim = self.dim_names.index(d) # If dim_name specified.
             split_dims.add(split_dim)
             if not node._do_split(split_dim, split_threshold=float(t)):
                 raise ValueError(f"Invalid split threshold at line {n}: \"{lines[n]}\".")
             n = _recurse(node.left if o == "<" else node.right, n + 1)
             assert lines[n] == "else:"
             n = _recurse(node.right if o == "<" else node.left, n + 1)
         elif lines[n][:6] == "return":
             # NOTE: Float-convertible return values are stored in node.meta["return"].
             try:    node.meta["return"] = float(lines[n][6:])
             except: node.meta["return"] = None
             n += 1
         else: raise ValueError(f"Parse error at line {n}: \"{lines[n]}\".")
         return n
     split_dims, eval_dims = set(), [] # NOTE: No eval dims.
     root = Node(self, sorted_indices=self.all_sorted_indices)
     _recurse(root, 1)
     self.models[name] = Tree(name, root, sorted(split_dims), eval_dims)
     return self.models[name]
Beispiel #13
0
def test1(obj):
    _obj = source._wrap(obj)
    assert _obj(1.57) == obj(1.57)
    src = source.getsource(obj, alias="_f")
    exec src in globals(), locals()
    assert _f(1.57) == obj(1.57)
    name = source._get_name(obj)
    assert name == obj.__name__ or src.split("=", 1)[0].strip()
Beispiel #14
0
 def inner(*args, **kwargs):
     print("")
     print(func.__name__)
     print(inspect.getsource(func))
     print(inspect.getsourcelines(func))
     print(dis.dis(func))
     print(getsource(func))
     return func(*args, **kwargs)
Beispiel #15
0
 def update_source(self, all_feats: Dict[str, "Feature"]) -> None:
     """
     supposed to run this through FeatureManager
     """
     source = getsource(self.__class__)
     self.__save_source(source)
     dependencies = self.__get_dependency(source)
     for d in dependencies:
         all_feats[d].update_source(all_feats)
Beispiel #16
0
def filterOnColumn(df,
                   column,
                   condition,
                   result_column,
                   condition_column,
                   inplace=True,
                   **kwargs):
    '''

    :param df: input pandas dataframe
    :param column: column that is the target of the filtering
    :param condition:
    :param inplace: inplace condition
    :return: the new dataframe if inplace is True, otherwise None
    '''

    if not inplace:
        df = df.copy(deep=True)

    # If a lambda function is used for filtering, for some reason, getsource
    # returns not just the source code of the lambda function but the function call
    # that has the lambda function. It therefore is necessary to extract the lambda
    # function from the call so that there is not the additional stuff.

    filter_source = getsource(condition)
    logger.debug('Original filter_source')
    logger.debug(filter_source)
    searchObj = re.search(r'(lambda [^,]*)([,]*)(.*)[)]{1}$', filter_source,
                          re.M | re.I)

    if searchObj:
        filter_source = searchObj.group(1)

    logger.debug('Retained filter_source')
    logger.debug(filter_source)

    df[result_column] = df[column].apply(condition)
    df[condition_column] = getsource(condition)

    if not inplace:
        return df
Beispiel #17
0
def test_patches():
    print("Botocore version: {} aiohttp version: {}".format(
        botocore.__version__, aiohttp.__version__))

    success = True
    for obj, digests in chain(_AIOHTTP_DIGESTS.items(), _API_DIGESTS.items()):
        digest = hashlib.sha1(getsource(obj).encode('utf-8')).hexdigest()
        if digest not in digests:
            print("Digest of {}:{} not found in: {}".format(
                obj.__qualname__, digest, digests))
            success = False

    assert success
Beispiel #18
0
def func_to_file(fup, fin):
    source = getsource(fup)
    executable = 'import sys\n\n' + source

    call = source.split('(')[0]

    for char in ['d', 'e', 'f', ' ']:
        call = call.lstrip(char)

    call = call + '(*sys.argv[1:])'
    executable += f'\n{call}\n'

    with open(fin, 'w') as fip:
        fip.write(executable)
Beispiel #19
0
 def p1(self):
     sum  = 0
     #begin code _BEGIN_
     for x in xrange(1, 1000):
         if x % 3 == 0 or x % 5 == 0:
             sum += x
     #_END_ end code
     result = "Sum = " + str(sum)
     allcode = getsource(p1)
     start = allcode.index("_BEGIN_") + len("_BEGIN_") + 1
     end = allcode.index("_END_", start) - 2
     code = str("".join(map(str, allcode[start:end])))
     print code
     showResult("Problem 1: Find sum of all the multiples of 3 or 5 below 1000",
                                 result, code)
Beispiel #20
0
def get_needed_source(func_name):
    needed_globals = getglobals(func_name)

    this_module = sys.modules['__main__']
    full_source_lines = re.split("\n", getsource(this_module))
    all_imports = list(
        filter(lambda x: re.search("\s?import\s", x), full_source_lines))

    needed_imports = []
    for imp in all_imports:
        if line_contains_var_in_list(imp, needed_globals['other']):
            needed_imports.append(imp)

    imported_funcs = []
    for func in needed_globals['function']:
        for imp in all_imports:
            if re.search("[^\w]" + func + "([^\w]|)", imp):
                needed_imports.append(imp)
                imported_funcs.append(func)
                break
    unimported_funcs = [
        func for func in needed_globals['function']
        if func not in imported_funcs
    ]

    needed_source = ""

    for imp in needed_imports:
        needed_source += imp + "\n"

    needed_source += "\n" + getsource(getattr(this_module, func_name)) + "\n"

    for func in unimported_funcs:
        needed_source += getsource(getattr(this_module, func)) + "\n"

    return needed_source
Beispiel #21
0
def test_create_constraints():
    # create constraints, and evaluate all constraint functions for a given x vector
    print("\n1 --- 'create_constraints'")

    print("\nk=3:")
    k = 3
    r = create_constraints(k)
    v0 = np.ones(k) / k
    for i in r:
        print(i.get('fun')(v0))
    for i in r:
        print(getsource(i.get('fun')))

    print("\nk=4:")
    k = 4
    r = create_constraints(k)
    v0 = np.ones(k**2) / k
    for i in r:
        print(i.get('fun')(v0))

    print("\nk=5:")
    k = 5
    r = create_constraints(k)
    v0 = np.ones(k**2) / k
    for i in r:
        print(i.get('fun')(v0))

    print("\nk=6:")
    k = 6
    r = create_constraints(k)
    v0 = np.ones(k**2) / k
    for i in r:
        print(i.get('fun')(v0))

    print("\nk=5 (free parameters are all 0):")
    k = 5
    r = create_constraints(k)
    v0 = np.zeros(k**2) / k
    for i in r:
        print(i.get('fun')(v0))

    print("\nk=4 (free parameters are all different):")
    k = 4
    r = create_constraints(k)
    v0 = [0, 1, 2, 4, 8, 16, 32, 64, 128, 256, 10]
    for i in r:
        print(i.get('fun')(v0))
Beispiel #22
0
    def get_jsonable_from_parameters(self):
        """ """
        jsonable_parameters = []

        for one_model_parameters in self.raw_parameters:
            one_jsonable = dict(one_model_parameters)
            converter = one_model_parameters['tau_converter']

            if not isinstance(converter, str):
                try:
                    # not always works, but this is not important
                    one_jsonable["tau_converter"] = str(getsource(converter))
                except (TypeError, OSError):
                    # OSError: may arise if working in Jupyter Notebook
                    one_jsonable["tau_converter"] = "<NOT AVAILABLE>"

            jsonable_parameters.append(one_jsonable)

        return jsonable_parameters
Beispiel #23
0
 def _complex_to_simple(self, obj):
     result = obj
     if inspect.isclass(obj):
         # result = { '__type__': 'class' }
         # print('Class')
         result = {'__type__': 'class'}
         result['name'] = obj.__name__
         allowed_keys = ['__init__']
         result['members'] = dict(
             (key, value) for (key, value) in obj.__dict__.items()
             if not key.startswith('__') or key in allowed_keys)
         # print(result)
     elif callable(obj):
         # print('Function')
         result = {'__type__': 'function'}
         code = getsource(obj).strip()
         if 'lambda ' in code:
             code = code[code.find('lambda '):]
         result['code'] = code
         result['globals'] = {}
         excluded_keys = ['global_vars']
         for key, value in list(obj.__globals__.items()):
             if key.startswith('global') and not key in excluded_keys:
                 result['globals'][key] = value
         # print(f'Globals for {obj.__name__} function: {result["globals"]}')
         # transformed_code = {}
         # print(obj.__code__.co_consts)
         # print(dir(obj.__code__))
         # for key in dir(obj.__code__):
         #   if key.startswith('co'):
         #     attr = getattr(obj.__code__, key)
         #     transformed_code[key] = attr() if callable(attr) else attr
         # print(transformed_code)
         # result['code'] = transformed_code
         result['name'] = obj.__name__
         result['args'] = inspect.getargspec(obj).args
         # result['globals'] = {}
         # excluded_keys = ['init']
         # for key, value in list(global_vars.init.__globals__.items()):
         #   if not key.startswith('__') and key not in excluded_keys:
         #     result['globals'][key] = value
     return result
Beispiel #24
0
def RecordSimulationData():
    from dill.source import getsource
    PayoutConfig = getsource(TableHelpers.CalculatePayouts)
    tempVar = Simulation(
        settings.counting,
        settings.reshufflePercentage,
        settings.reshuffleDecks,
        settings.numberOfPlayers,
        settings.tableminimumbet,
        settings.tablemaximumbet,
        settings.WalkAwayLoss,
        settings.WalkAwayWin,
        settings.WalkAwayWinLimit,
        settings.WalkAwayWinBig,
        settings.WalkAwayWinBigLimit,
        settings.players[0]._peakpurse,
        settings.CounterEndingPurse,
        settings.definition,
    )
    SimulationEntries.append(tempVar)
        def extract_vars(obj: object) -> dict:
            """Extracts attributes from a dataset/project/version."""
            vars_dict = {}
            for attr, val in vars(obj).items():
                if callable(val):
                    # function; skip
                    pass

                elif attr in ['uuids_', 'meta_']:
                    # convert lambda functions to strings
                    vars_dict[attr] = {
                        key: getsource(func).split(': ', 1)[1].strip(',\n')
                        for key, func in val.items()
                    }

                elif attr not in ['api', 'projects', 'versions']:
                    # regular attr, add to dict
                    vars_dict[attr] = val

            return vars_dict
Beispiel #26
0
 def build(self, all_feats: Dict[str, "Feature"], filepath: str):
     """
     supposed to run this through FeatureManager
     """
     if os.path.exists(self.dvc_path):
         return
     output_opt = '-o ' + self.output_path()
     dvc_output_opt = '-f ' + self.dvc_path
     source = getsource(self.__class__)
     source_path = self.__save_source(source)
     dependencies = self.__get_dependency(source)
     dep_option_list = [
         '-d ' + d for d in (self.file_dependencies + [source_path])
     ]
     for d in dependencies:
         all_feats[d].build(all_feats, filepath)
         dep_option_list.append('-d ' + self.output_path(d))
     dep_option = ' '.join(dep_option_list)
     py_command = f'python {filepath} run {self.name}'
     command = ' '.join(
         ['dvc run', dep_option, output_opt, dvc_output_opt, py_command])
     subprocess.run(command, shell=True)
Beispiel #27
0
services = out.decode("utf-8").split("\n")
services = list(filter(len, services))
services = [
    service.split(".py")[0] for service in services
    if not service.startswith("__")
]
imports = ["import " + service + "\n" for service in services]
exec("".join(imports))

from dill.source import getsource

controller_methods = []
for service in services:
    controller_method_names = list(eval(service + ".methods").keys())
    for controller_method_name in controller_method_names:
        controller_method = getsource(
            eval(service + "." + controller_method_name))
        controller_methods = [
            *controller_methods, "\n\[email protected](" + service + ".methods[\"" +
            controller_method_name + "\"][\"url\"]," + " methods=" + service +
            ".methods[\"" + controller_method_name +
            "\"][\"http_methods\"])\n" + controller_method
        ]

footer = """

if __name__ == '__main__':
    app.run(host=host, port=5001, debug=show_output)

"""

server1.writelines([*imports, *controller_methods, footer])
Beispiel #28
0
  def bar(self, x):
    return x*x+x
_foo = Foo()

def add(x,y):
  return x+y

# yes, same as 'f', but things are tricky when it comes to pointers
squared = lambda x:x**2

class Bar:
  pass
_bar = Bar()

                       # inspect.getsourcelines # dill.source.getblocks
assert getsource(f) == 'f = lambda x: x**2\n'
assert getsource(g) == 'def g(x): return f(x) - x\n'
assert getsource(h) == 'def h(x):\n  def g(x): return x\n  return g(x) - x \n'
assert getname(f) == 'f'
assert getname(g) == 'g'
assert getname(h) == 'h'
assert _wrap(f)(4) == 16
assert _wrap(g)(4) == 12
assert _wrap(h)(4) == 0

assert getname(Foo) == 'Foo'
assert getname(Bar) == 'Bar'
assert getsource(Bar) == 'class Bar:\n  pass\n'
assert getsource(Foo) == 'class Foo(object):\n  def bar(self, x):\n    return x*x+x\n'
#XXX: add getsource for  _foo, _bar
def getCode(func):
    allcode = getsource(eval(func))
    start = allcode.index("_BEGIN_") + len("_BEGIN_") + 1
    end = allcode.index("_END_", start) - 2
    code = str("".join(map(str, allcode[start:end])))
    return code
Beispiel #30
0
def add(x, y):
    return x + y


# yes, same as 'f', but things are tricky when it comes to pointers
squared = lambda x: x**2


class Bar:
    pass


_bar = Bar()

# inspect.getsourcelines # dill.source.getblocks
assert getsource(f) == 'f = lambda x: x**2\n'
assert getsource(g) == 'def g(x): return f(x) - x\n'
assert getsource(h) == 'def h(x):\n  def g(x): return x\n  return g(x) - x \n'
assert getname(f) == 'f'
assert getname(g) == 'g'
assert getname(h) == 'h'
assert _wrap(f)(4) == 16
assert _wrap(g)(4) == 12
assert _wrap(h)(4) == 0

assert getname(Foo) == 'Foo'
assert getname(Bar) == 'Bar'
assert getsource(Bar) == 'class Bar:\n  pass\n'
assert getsource(
    Foo) == 'class Foo(object):\n  def bar(self, x):\n    return x*x+x\n'
#XXX: add getsource for  _foo, _bar
Beispiel #31
0
 def wrapper_get_str_func(*args, **kwargs):
     print(getsource(func))
     return uparser_dec(getsource(func))
Beispiel #32
0
def getCode(func):
    allcode = getsource(eval(func))
    start = allcode.index("_BEGIN_") + len("_BEGIN_") + 1
    end = allcode.index("_END_", start) - 2
    code = str("".join(map(str, allcode[start:end])))
    return code
Beispiel #33
0
        def func_caching(*args, **dict_args):
            force_reload = any(re.match(expr, func.__name__) is not None for expr in ninja_globals["force_reload"]) \
                or dict_args.get("force_reload", False)


            if len(args) > 0:
                raise Exception("For cached functions pass all args by dict_args (ensures cache resolution)")

            # Get arguments including defaults
            a = inspect.getargspec(func)
            if a.defaults:
                default_args = dict(zip(a.args[-len(a.defaults):],a.defaults))
                for default_arg in default_args:
                    if dict_args.get(default_arg, None) is None:
                        dict_args[default_arg] = default_args[default_arg]

            # Generate key
            dict_args_original = dict(dict_args)
            dict_args_original.pop("force_reload", None)
            dict_args_original.pop("store", None)
            dict_args_original.pop("_write_to_cache", None)
            dict_args_original.pop("_load_cache_or_fail", None)
            part_key, dumped_arguments = _generate_key(func.__name__, dict_args_original, skip_args)

            # Key is dependent on source. If you change source of function it reloads automatically
            try:
                part_key += "_" + hashlib.sha1(getsource(func)).hexdigest()[0:10] + "_"
            except:
                 ninja_globals["logger"].warning(func.__name__+": Cache is not dependent on source")

            full_key = func.__name__
            for k in key_args:
                if dict_args[k]!="":
                    full_key = full_key + "_" + str(dict_args[k])

            full_key = full_key+"_"+part_key

            # print "DUMPED", dumped_arguments
            # print "KEY", full_key


            # Load from RAM cache
            if not force_reload \
                    and cached_ram and full_key in ninja_globals["cache"]:
                if logger:
                    ninja_globals["logger"].debug(func.__name__+": Reading from RAM cache")
                return ninja_globals["cache"][full_key]

            # Resolve existence
            cache_file_default = os.path.join(ninja_globals["cache_dir"], full_key + ".pkl")
            exists = os.path.exists(cache_file_default) if check_fnc is None \
                else check_fnc(full_key, ninja_globals["cache_dir"])


            if not exists and \
                     dict_args.get("_write_to_cache", False) is False  and \
                    cache_google_cloud and ninja_globals["google_cache_on"]:

                if os.system(ninja_globals["gsutil_path"] + " stat "+os.path.join(ninja_globals["google_cloud_cache_dir"], full_key + "*")) == 0:
                    exists = True
                    ninja_globals["logger"].debug(func.__name__+": Reading from Google Cloud Storage")

                    os.system(ninja_globals["gsutil_path"] + " -m cp "+os.path.join(ninja_globals["google_cloud_cache_dir"], full_key + "* ") + \
                              ninja_globals["cache_dir"])


            def evaluate():
                if logger:
                    logger.debug(func.__name__+": Cache miss or force reload. Caching " + full_key)

                # Special function that can overwrite cache
                if dict_args.get("_write_to_cache", None) is not None:
                    returned_value = dict_args.get("_write_to_cache")
                else:
                    returned_value = func(*args, **dict_args_original)



                return returned_value

            def write(returned_value):
                if dict_args.get("store", True):
                    ninja_globals["logger"].debug(func.__name__+": Saving " + full_key)
                    if save_fnc:
                        save_fnc(full_key, returned_value, ninja_globals["cache_dir"])
                    else:
                        with open(cache_file_default, "w") as f:
                            if use_cPickle:
                                cPickle.dump(returned_value, f)
                            else:
                                pickle.dump(returned_value, f)

                        # Write arguments for later retrieval
                        # NOTE: of course better way would be to keep a dict..
                        with open(os.path.join(ninja_globals["cache_dir"], full_key+".args"), "w") as f:
                            f.write(json.dumps({"func_name":func.__name__, "time": _uct_timestamp(), "key":\
                                                full_key, "search_args": dict({k: dict_args[k] for k in search_args}), \
                                                "args": json.loads(dumped_arguments)}))

                    if cache_google_cloud and ninja_globals["google_cache_on"]:
                        assert ninja_globals["google_cloud_cache_dir"] != ""
                        # if not os.system(ninja_globals["gsutil_path"] + " stat "+os.path.join(ninja_globals["google_cloud_cache_dir"], full_key + "*")) == 0:
                        os.system(ninja_globals["gsutil_path"]+" -m cp "+os.path.join(ninja_globals["cache_dir"], full_key + "* ") + " " +\
                                ninja_globals["google_cloud_cache_dir"])

                return returned_value
            # Load from cache unless some conditions are met
            if exists and not force_reload:
                if dict_args.get("_write_to_cache", False) != False:
                    write(dict_args.get("_write_to_cache"))
                    return




                if logger:
                    logger.debug(func.__name__+":Loading (pickled?) file")

                if load_fnc:
                    value = load_fnc(full_key, ninja_globals["cache_dir"])
                else:
                    # We do try here because we might have failed writing pickle file before
                    try:
                        with open(cache_file_default, "r") as f:
                            value = cPickle.load(f) if use_cPickle else pickle.load(f)
                    except:
                       ninja_globals['logger'].info(func.__name__+":Corrupted file")
                       value = write(evaluate())


                    #return None # Just writing

            else:
                if not dict_args.get("_load_cache_or_fail", False):
                    value = write(evaluate())
                else:
                    return None

            if cached_ram:
                ninja_globals["cache"][full_key] = value
            return value
# HOW TO PRINT THE SOURCE CODE FOR IMPORTED FUNCTIONS OR CLASSES (EXAMPLE)

# import packages for hyper-parameter optimization
from sklearn.grid_search import RandomizedSearchCV
#from sklearn.grid_search import GridSearchCV
from sklearn.cross_validation import train_test_split
from sklearn.neighbors import KNeighborsClassifier
from imutils import paths
import numpy as np
import argparse
import imutils
import time
import cv2
import os

from sklearn import *

import dill
from dill.source import getsource
print('\n\nok so far...\n')
print getsource(RandomizedSearchCV)
print('\n\nok so far...\n')
print getsource(train_test_split)

# how to print method from a class:

# CLASS foo
# METHOD bar

# print getsource(foo.bar)
Beispiel #35
0
l1=[1,2 ]
l2=[3, 4]

l = zip(l1, l2)

#print(l.__next__())

l = l1.append(l2)

def add(a, b):
    return a + b

import inspect
insp = inspect.getsource(add)
print(insp)
import pandas
print(inspect.getsource(pandas.DataFrame))

from dill.source import getsource, getsourcefile
print(getsource(add))
print(getsource(l1.copy, builtin=True)) #error?
#print(getsource(pandas.DataFrame.append, builtin=True))