Ejemplo n.º 1
0
def _cell_magic(line, query):
    """Underlying function for bigquery cell magic

    Note:
        This function contains the underlying logic for the 'bigquery' cell
        magic. This function is not meant to be called directly.

    Args:
        line (str): "%%bigquery" followed by arguments as required
        query (str): SQL query to run

    Returns:
        pandas.DataFrame: the query results.
    """
    args = magic_arguments.parse_argstring(_cell_magic, line)

    project = args.project or context.project
    client = bigquery.Client(project=project, credentials=context.credentials)
    job_config = bigquery.job.QueryJobConfig()
    job_config.use_legacy_sql = args.use_legacy_sql
    query_job = _run_query(client, query, job_config)

    if not args.verbose:
        display.clear_output()

    result = query_job.to_dataframe()
    if args.destination_var:
        IPython.get_ipython().push({args.destination_var: result})
    return result
Ejemplo n.º 2
0
 def _ipython_register(): # pragma: no cover
   try:
     import IPython
     if IPython.get_ipython():
       IPython.get_ipython().events.register("post_execute", Canvas._ipython_post_execute)
   except:
     pass
Ejemplo n.º 3
0
 def show_error(self):
     #print('Circle was moved to (%s, %s).' % (x, y))
     print "an error occured"
     etype, evalue, tb=sys.exc_info()
     ip.showtraceback(etype, evalue, tb)    
     print IPython.get_ipython()
     print "afterwards"
Ejemplo n.º 4
0
def unload_ipython_extension(shell):
  _shell.InteractiveShell.run_cell_magic = _orig_run_cell_magic
  _shell.InteractiveShell.run_line_magic = _orig_run_line_magic
  _requests.Session.__init__ = _orig_init
  _httplib2.Http.request = _orig_request
  try:
    del _IPython.get_ipython().user_ns['project_id']
    del _IPython.get_ipython().user_ns['set_project_id']
  except Exception:
    pass  # We mock IPython for tests so we need this.
Ejemplo n.º 5
0
def dump_history(name, destination):
    console = IPython.get_ipython()
    with open(os.path.join(destination, "history_manual.py"), "w") as stream_out:
        stream_out.write("# -*- coding: utf-8 -*-\n")
        for ceil in console.history_manager.get_range():
            stream_out.write("\n\n# <codecell>\n\n" + ceil[2])
    original_path = os.path.join(IPython.get_ipython().starting_dir, name + ".ipynb")
    dest = os.path.join(destination, "notebook.ipynb")
    shutil.copy(original_path, dest)
    console.run_cell("%notebook -e " + os.path.join(destination, "history.py"), silent=True)
    console.run_cell("%notebook -e " + os.path.join(destination, "history.ipynb"), silent=True)
    debug("Copied last saved state of notebook and dumped kernel history")
Ejemplo n.º 6
0
def register_exit(func):
    if is_ipython():
        ip = IPython.get_ipython()
        ip.hooks['shutdown_hook'].add(func, 1)
    else:
        import atexit
        atexit.register(func)
def test_bigquery_magic_without_optional_arguments(monkeypatch):
    ip = IPython.get_ipython()
    ip.extension_manager.load_extension("google.cloud.bigquery")
    magics.context.credentials = mock.create_autospec(
        google.auth.credentials.Credentials, instance=True
    )

    # Shouldn't fail when BigQuery Storage client isn't installed.
    monkeypatch.setattr(magics, "bigquery_storage_v1beta1", None)

    sql = "SELECT 17 AS num"
    result = pandas.DataFrame([17], columns=["num"])
    run_query_patch = mock.patch(
        "google.cloud.bigquery.magics._run_query", autospec=True
    )
    query_job_mock = mock.create_autospec(
        google.cloud.bigquery.job.QueryJob, instance=True
    )
    query_job_mock.to_dataframe.return_value = result
    with run_query_patch as run_query_mock:
        run_query_mock.return_value = query_job_mock

        return_value = ip.run_cell_magic("bigquery", "", sql)

    assert isinstance(return_value, pandas.DataFrame)
    assert len(return_value) == len(result)  # verify row count
    assert list(return_value) == list(result)  # verify column names
Ejemplo n.º 8
0
def in_ipnb():
    """
    Check if it is running inside an IPython Notebook (updated for new notebooks)
    """
    try:
        import IPython

        ip = IPython.get_ipython()

        front_end = None
        if "IPKernelApp" in ip.config:
            front_end = ip.config.get('IPKernelApp').get("parent_appname")
        elif "KernelApp" in ip.config:
            front_end = ip.config.get('KernelApp').get("parent_appname")

        if isinstance(front_end, IPython.config.loader.LazyConfigValue) or front_end is None:
            if isinstance(ip, IPython.kernel.zmq.zmqshell.ZMQInteractiveShell):
                return True
            else:
                return False
        elif isinstance(front_end, six.string_types):
            if 'ipython-notebook' in front_end.lower():
                return True
            elif 'notebook' in front_end.lower():
                return True
    except Exception as e:
        logger.debug("Cannot determine if running a notebook because of %s" % e)
        return False
    return False
Ejemplo n.º 9
0
def get_ipython():
    import IPython
    if ipython_is_newer((0, 11)):
        #return IPython.core.ipapi.get()
        return IPython.get_ipython()
    else:
        return IPython.ipapi.get()
Ejemplo n.º 10
0
Archivo: plot.py Proyecto: rcbrgs/tuna
def plot_spectral_rings(spectral_rings):
    """This function will plot all arrays and print the data of all parameters
    specified in a tuna.tools.spectral_rings_fitter object.
    """
    ipython = IPython.get_ipython()
    ipython.magic("matplotlib qt")

    plot(spectral_rings["ridge"], title = "Ridge", ipython = ipython)
    for counter in range(len(spectral_rings["ring_pixel_sets"])):
        plot(spectral_rings["ring_pixel_sets"][counter][0],
             title = "Ring pixel set {}".format(counter), ipython = ipython)
    for counter in range(len(spectral_rings["gradients"])):
        plot(spectral_rings["gradients"][counter],
             title = "Gradients", ipython = ipython)
    plot(spectral_rings["upper_percentile_regions"],
         title = "lower_percentile_regions", ipython = ipython)
    plot(spectral_rings["lower_percentile_regions"],
         title = "upper_percentile_regions", ipython = ipython)
    for counter in range(len(spectral_rings["construction"])):
        plot(spectral_rings["construction"][counter],
             title = "Construction {}".format(counter), ipython = ipython)
    for counter in range(len(spectral_rings["ring_fit"])):
        plot(spectral_rings["ring_fit"][counter],
             title = "Ring fit {}".format(counter), ipython = ipython)
        print("Ring {} parameters: {}".format(
            counter, spectral_rings["ring_fit_parameters"]))
    for counter in range(len(spectral_rings["rings"])):
        print("Ring {} = {}".format(counter, spectral_rings["rings"][counter]))
    for counter in range(len(spectral_rings["concentric_rings"])):
        print("Concentric ring {} = {}".format(
            counter, spectral_rings["concentric_rings"][counter]))
Ejemplo n.º 11
0
def register_transform(RE, *, prefix='<'):
    '''Register RunEngine IPython magic convenience transform

    Assuming the default parameters

    This maps `< stuff(*args, **kwargs)` -> `RE(stuff(*args, **kwargs))`

    RE is assumed to be available in the global namespace

    Parameters
    ----------
    RE : str
        The name of a valid RunEngine instance in the global IPython namespace

    prefix : str, optional
        The prefix to trigger this transform on.  If this collides with
        valid python syntax or an existing transform you are on your own.
    '''
    import IPython
    from IPython.core.inputtransformer import StatelessInputTransformer

    @StatelessInputTransformer.wrap
    def tr_re(line):
        if line.startswith(prefix):
            line = line[len(prefix):].strip()
            return '{}({})'.format(RE, line)
        return line
    ip = IPython.get_ipython()
    ip.input_splitter.logical_line_transforms.append(tr_re())
    ip.input_transformer_manager.logical_line_transforms.append(tr_re())
Ejemplo n.º 12
0
def ipython_slack_notification_hook(self, etype, value, tb, tb_offset=None):
    """
    Custom exception hook that first sends info to our slack channel by way of
    a flask app and then shows the IPython traceback.

    Thanks https://mail.scipy.org/pipermail/ipython-dev/2012-April/008945.html
    The API has slightly changed from that email, so check out the official
    docs instead at http://ipython.org/ipython-doc/dev/api/generated/IPython.core.interactiveshell.html#IPython.core.interactiveshell.InteractiveShell.set_custom_exc
    """
    # h[0] is the session number
    # h[1] is the line number
    # h[2] is the command]
    ipython_history = [
        h[2] for h in IPython.get_ipython().history_manager.get_range()]
    notify_slack(ipython_history=ipython_history)
    IPython.get_ipython().showtraceback()
Ejemplo n.º 13
0
def _query_cell(args, cell_body):
  """Implements the BigQuery cell magic for used to build SQL objects.

  The supported syntax is:

      %%bq query <args>
      [<inline SQL>]

  Args:
    args: the optional arguments following '%%bql query'.
    cell_body: the contents of the cell
  """
  name = args['name']
  udfs = args['udfs']
  datasources = args['datasources']
  subqueries = args['subqueries']

  # Finally build the query object
  query = google.datalab.bigquery.Query(cell_body, env=IPython.get_ipython().user_ns,
                                        udfs=udfs, data_sources=datasources, subqueries=subqueries)

  # if no name is specified, execute this query instead of defining it
  if name is None:
    return query.execute().result()
  else:
    google.datalab.utils.commands.notebook_environment()[name] = query
Ejemplo n.º 14
0
 def get_code(self, revision="HEAD", original_name=False, file_type="notebook"):
     self.keeper._checkout(revision)
     src = os.path.join(
         self.keeper.work_dir,
         self.keeper.project_name,
         self.keeper.project_config['internal-path'],
         self.notebook,
         file_type + ".ipynb")
     if original_name:
         dst_file = self.notebook + ".ipynb"
     else:
         dst_file = self.notebook + "@" + file_type + "@" + revision + ".ipynb"
     dst = os.path.join(IPython.get_ipython().starting_dir, dst_file)
     shutil.copy(src, dst)
     os.chdir(IPython.get_ipython().starting_dir)
     IPython.display.display(IPython.display.FileLink(dst_file))
Ejemplo n.º 15
0
def sql_cell(args, cell):
    """Implements the SQL cell magic for ipython notebooks.

  The supported syntax is:

      %%sql [--module <modulename>]
      [<optional Python code for default argument values>]
      [<optional named queries>]
      [<optional unnamed query>]

  At least one query should be present. Named queries should start with:

      DEFINE QUERY <name>

  on a line by itself.

  Args:
    args: the optional arguments following '%%sql'.
    cell: the contents of the cell; Python code for arguments followed by SQL queries.
  """
    name = args["module"] if args["module"] else "_sql_cell"
    module = imp.new_module(name)
    query = _split_cell(cell, module)
    ipy = IPython.get_ipython()
    if not args["module"]:
        # Execute now
        if query:
            return gcp.bigquery.Query(query, values=ipy.user_ns).execute().results
    else:
        # Add it as a module
        sys.modules[name] = module
        exec "import %s" % name in ipy.user_ns
Ejemplo n.º 16
0
def main():
    """
    Register VenvMagics with IPython
    """
    import IPython
    ip = IPython.get_ipython()
    ip.register_magics(VenvMagics)
Ejemplo n.º 17
0
def snip(tag="",start=-2,write_date=True):
    """ 
        This function records a previously execute notebook cell into a file (default: ipython_history.py)

        a tag can be added to sort the cell

        `start` defines which cell in the history to record. Default is -2, ie. the one executed previously to the current one.

    """
    import IPython
    i = IPython.get_ipython()
    last_history = i.history_manager.get_range(start=start,stop=start+1,output=True)
    with open("ipython_history.py",'a') as output_file:
        for l in last_history:
            global _session_description
            output_file.write('\n\n\n'+('#'*80)+'\n')
            if _session_description != "":
                output_file.write('#\n'+_lines_as_comments(_session_description)+'\n#\n')
            if tag != "":
                output_file.write(_lines_as_comments(tag)+'\n')
            if write_date:
                import datetime
                output_file.write('# '+datetime.datetime.now().isoformat()+'\n')
            output_file.write('\n\n# In ['+str(l[1])+']:\n'+l[2][0])
            _last_inputs.append(l[2][0])
            _tagged_inputs[tag] = _tagged_inputs.get(tag,[])
            _tagged_inputs[tag].append(l[2][0])
            output_file.write('\n\n# Out ['+str(l[1])+']:\n'+_lines_as_comments(repr(l[2][1])))
Ejemplo n.º 18
0
    def nbsample(draws, step, start=None, trace=None, chain=0, tune=None, model=None, random_seed=None):
        try:
            assert(hasattr(IPython.get_ipython(), 'comm_manager'))
        except (AssertionError, NameError, KeyError) as e:
            raise NotImplementedError(_no_notebook_error_message)
    
        display.display_html(_javascript, raw=True)
        w = ISampleWidget()
        display.display(w)
        t_start = time.time()
        t_last = time.time()

        w.max_samples = draws
        w.current_samples = 0
        for i,backend in enumerate(iter_sample(draws, step, start=start, trace=trace,
            chain=chain, tune=tune, model=model, random_seed=None), 1):
            elapsed = time.time() - t_start
            elapsed_last = time.time() - t_last

            if elapsed_last > 0.1:
                t_last = time.time()
                w.current_samples = i
                w.clock = "%02i:%02i:%02i" % (elapsed / 60 / 60, elapsed / 60 % 60, elapsed % 60)
                get_ipython().kernel.do_one_iteration()
                if w.stopped:
                    break
        w.current_samples = i
        return backend
Ejemplo n.º 19
0
def test_extension_load():
    ip = IPython.get_ipython()
    ip.extension_manager.load_extension('google.cloud.bigquery')

    # verify that the magic is registered and has the correct source
    magic = ip.magics_manager.magics['cell'].get('bigquery')
    assert magic.__module__ == 'google.cloud.bigquery.magics'
Ejemplo n.º 20
0
def _chart_cell(args, cell):
  source = args['data']
  ipy = IPython.get_ipython()
  chart_options = _utils.parse_config(cell, ipy.user_ns)
  if chart_options is None:
    chart_options = {}
  fields = args['fields'] if args['fields'] else '*'

  _HTML_TEMPLATE = """
    <div class="bqgc" id="%s">
    </div>
    <script>
      require(['extensions/charting', 'element!%s', 'style!/static/extensions/charting.css'],
        function(charts, dom) {
          charts.render(dom, {chartStyle:'%s', dataName:'%s', fields:'%s'}, %s, %s);
        }
      );
    </script>
  """
  div_id = _html.Html.next_id()
  chart_type = args['chart']
  count = 25 if chart_type == 'paged_table' else -1
  data, _ = _utils.get_data(source, fields, 0, count)

  return IPython.core.display.HTML(
    _HTML_TEMPLATE % (div_id, div_id, chart_type, _utils.get_data_source_index(source), fields,
                      json.dumps(chart_options, cls=gcp._util.JSONEncoder),
                      json.dumps(data, ensure_ascii=False, cls=gcp._util.JSONEncoder)))
Ejemplo n.º 21
0
def test_bigquery_magic_with_result_saved_to_variable():
    ip = IPython.get_ipython()
    ip.extension_manager.load_extension("google.cloud.bigquery")
    magics.context.credentials = mock.create_autospec(
        google.auth.credentials.Credentials, instance=True
    )

    sql = "SELECT 17 AS num"
    result = pandas.DataFrame([17], columns=["num"])
    assert "df" not in ip.user_ns

    run_query_patch = mock.patch(
        "google.cloud.bigquery.magics._run_query", autospec=True
    )
    query_job_mock = mock.create_autospec(
        google.cloud.bigquery.job.QueryJob, instance=True
    )
    query_job_mock.to_dataframe.return_value = result
    with run_query_patch as run_query_mock:
        run_query_mock.return_value = query_job_mock

        ip.run_cell_magic("bigquery", "df", sql)

    assert "df" in ip.user_ns  # verify that variable exists
    df = ip.user_ns["df"]
    assert len(df) == len(result)  # verify row count
    assert list(df) == list(result)  # verify column names
Ejemplo n.º 22
0
def test_bigquery_magic_with_dict_params():
    ip = IPython.get_ipython()
    ip.extension_manager.load_extension("google.cloud.bigquery")
    magics.context.credentials = mock.create_autospec(
        google.auth.credentials.Credentials, instance=True
    )

    sql = "SELECT @num AS num"
    result = pandas.DataFrame([17], columns=["num"])
    assert "params_dict_df" not in ip.user_ns

    run_query_patch = mock.patch(
        "google.cloud.bigquery.magics._run_query", autospec=True
    )
    query_job_mock = mock.create_autospec(
        google.cloud.bigquery.job.QueryJob, instance=True
    )
    query_job_mock.to_dataframe.return_value = result
    with run_query_patch as run_query_mock:
        run_query_mock.return_value = query_job_mock

        params = {"num": 17}
        # Insert dictionary into user namespace so that it can be expanded
        ip.user_ns["params"] = params
        ip.run_cell_magic("bigquery", "params_dict_df --params $params", sql)
        run_query_mock.assert_called_once_with(mock.ANY, sql.format(num=17), mock.ANY)

    assert "params_dict_df" in ip.user_ns  # verify that the variable exists
    df = ip.user_ns["params_dict_df"]
    assert len(df) == len(result)  # verify row count
    assert list(df) == list(result)  # verify column names
Ejemplo n.º 23
0
def bq_sql(declaration, sql):
  """Implements the bigquery cell magic for ipython notebooks.

  The supported syntax is:
  %%bq_sql [<var>]
  <sql>

  Args:
    declaration: the optional variable to be initialized with the resulting query.
    sql: the contents of the cell interpreted as the SQL.
  Returns:
    The results of executing the query converted to a dataframe if no variable
    was specified. None otherwise.
  """
  ipy = _ipython.get_ipython()

  # Use the user_ns dictionary, which contains all current declarations in
  # in the kernel as the dictionary to use to retrieve values for placeholders
  # within the specified sql statement.
  sql = _bq.sql(sql, **ipy.user_ns)
  query = _bq.query(sql)

  variable_name = declaration.strip()
  if len(variable_name):
    # Update the global namespace with the new variable, or update the value of
    # the existing variable if it already exists.
    ipy.push({variable_name: query})
    return None
  else:
    # If a variable was not specified, then simply return the results, so they
    # get rendered as the output of the cell.
    return query.results()
Ejemplo n.º 24
0
def run(**kwargs):
    """
    Start to run a strategy
    """
    config_path = kwargs.get('config_path', None)
    if config_path is not None:
        config_path = os.path.abspath(config_path)
        kwargs.pop('config_path')
    if not kwargs.get('base__securities', None):
        kwargs.pop('base__securities', None)

    from rqalpha import main
    source_code = kwargs.get("base__source_code")
    cfg = parse_config(kwargs, config_path=config_path, click_type=True, source_code=source_code)
    source_code = cfg.base.source_code
    results = main.run(cfg, source_code=source_code)

    # store results into ipython when running in ipython
    from rqalpha.utils import is_run_from_ipython
    if results is not None and is_run_from_ipython():
        import IPython
        from rqalpha.utils import RqAttrDict
        ipy = IPython.get_ipython()
        report = results.get("sys_analyser", {})
        ipy.user_global_ns["results"] = results
        ipy.user_global_ns["report"] = RqAttrDict(report)
Ejemplo n.º 25
0
def _cell_magic(line, query):
    """Underlying function for bigquery cell magic

    Note:
        This function contains the underlying logic for the 'bigquery' cell
        magic. This function is not meant to be called directly.

    Args:
        line (str): "%%bigquery" followed by arguments as required
        query (str): SQL query to run

    Returns:
        pandas.DataFrame: the query results.
    """
    args = magic_arguments.parse_argstring(_cell_magic, line)

    params = []
    if args.params is not None:
        try:
            params = _helpers.to_query_parameters(
                ast.literal_eval("".join(args.params))
            )
        except Exception:
            raise SyntaxError(
                "--params is not a correctly formatted JSON string or a JSON "
                "serializable dictionary"
            )

    project = args.project or context.project
    client = bigquery.Client(project=project, credentials=context.credentials)
    bqstorage_client = _make_bqstorage_client(
        args.use_bqstorage_api or context.use_bqstorage_api, context.credentials
    )
    job_config = bigquery.job.QueryJobConfig()
    job_config.query_parameters = params
    job_config.use_legacy_sql = args.use_legacy_sql
    query_job = _run_query(client, query, job_config)

    if not args.verbose:
        display.clear_output()

    result = query_job.to_dataframe(bqstorage_client=bqstorage_client)
    if args.destination_var:
        IPython.get_ipython().push({args.destination_var: result})
    else:
        return result
Ejemplo n.º 26
0
 def onComputationStatus(self, status):
     if self.customComputationStatusCallback is not None:
         self.customComputationStatusCallback(status)
     elif haveIPythonNotebook and IPython.get_ipython() is not None:
         IPython.core.display.clear_output(status is not None)
         if status is not None:
             IPython.core.display.display(
                 IPython.core.display.HTML("<div>Active Cores: %s</div>" % status['cpus']['value'])
                 )
Ejemplo n.º 27
0
def _get_newtabmagic(browser='firefox', port=None):
    ip = IPython.get_ipython()
    ip.reset()
    newtab = newtabmagic.NewTabMagics(ip)
    if browser is not None:
        newtab.newtab('--browser ' + browser)
    if port is not None:
        newtab.newtab('--port ' + str(port))
    return newtab
Ejemplo n.º 28
0
def get_data(source, fields='*', env=None, first_row=0, count=-1, schema=None):
  """ A utility function to get a subset of data from a Table, Query, Pandas dataframe or List.

  Args:
    source: the source of the data. Can be a Table, Pandas DataFrame, List of dictionaries or
        lists, or a string, in which case it is expected to be the name of a table in BQ.
    fields: a list of fields that we want to return as a list of strings, comma-separated string,
        or '*' for all.
    env: if the data source is a Query module, this is the set of variable overrides for
        parameterizing the Query.
    first_row: the index of the first row to return; default 0. Onl;y used if count is non-negative.
    count: the number or rows to return. If negative (the default), return all rows.
    schema: the schema of the data. Optional; if supplied this can be used to help do type-coercion.

  Returns:
    A tuple consisting of a dictionary and a count; the dictionary has two entries: 'cols'
    which is a list of column metadata entries for Google Charts, and 'rows' which is a list of
    lists of values. The count is the total number of rows in the source (independent of the
    first_row/count parameters).

  Raises:
    Exception if the request could not be fulfilled.
  """

  ipy = IPython.get_ipython()
  if env is None:
    env = {}
  env.update(ipy.user_ns)
  if isinstance(source, basestring):
    source = datalab.utils.get_item(ipy.user_ns, source, source)
    if isinstance(source, basestring):
      source = datalab.bigquery.Table(source)

  if isinstance(source, types.ModuleType) or isinstance(source, datalab.data.SqlStatement):
    source = datalab.bigquery.Query(source, values=env)

  if isinstance(source, list):
    if len(source) == 0:
      return _get_data_from_empty_list(source, fields, first_row, count, schema)
    elif isinstance(source[0], dict):
      return _get_data_from_list_of_dicts(source, fields, first_row, count, schema)
    elif isinstance(source[0], list):
      return _get_data_from_list_of_lists(source, fields, first_row, count, schema)
    else:
      raise Exception("To get tabular data from a list it must contain dictionaries or lists.")
  elif isinstance(source, pandas.DataFrame):
    return _get_data_from_dataframe(source, fields, first_row, count, schema)
  elif (isinstance(source, google.datalab.bigquery.Query) or
        isinstance(source, google.datalab.bigquery.Table)):
    return google.datalab.utils.commands._utils.get_data(
        source, fields, env, first_row, count, schema)
  elif isinstance(source, datalab.bigquery.Query):
    return _get_data_from_table(source.results(), fields, first_row, count, schema)
  elif isinstance(source, datalab.bigquery.Table):
    return _get_data_from_table(source, fields, first_row, count, schema)
  else:
    raise Exception("Cannot chart %s; unsupported object type" % source)
Ejemplo n.º 29
0
def get_ipython():
    """Return an IPython instance. Returns None
    if IPython is not installed"""

    try:
        import IPython
        return IPython.get_ipython()
    except ImportError:
        return None
Ejemplo n.º 30
0
def get_ipython():
    import IPython
    if ipython_is_newer((0, 11)):
        try:
            return IPython.core.ipapi.get()
        except:
            return IPython.get_ipython() # bug depending on iPython version which to use, build in robust version check
    else:
        print 'IPython.ipapi.get()'
        return IPython.ipapi.get()
Ejemplo n.º 31
0
def autocomplete(hook: bool = True, jedi: bool = None, greedy: bool = None):
    '''
    Call rt.autocomplete() to specialize jupyter lab autcomplete output.
    arrays, categoricals, datetime, struct, and datasets will be detected.
    array will be array followed by the dtype.
    
    Parameters
    ----------
    hook: bool, default True
        set to False to unhook riptable autocomplete
    jedi: bool, default None
        set to True to set use_jedi in IPython
    greedy: bool, default None
        set to True to set greedy in IPython for [' autocomplete

    Examples
    --------
    >>> rt.autocomplete(); ds=Dataset({'test':arange(5), 'another':arange(5.0), 'mycat':rt.Cat(arange(5)), 'mystr': arange(5).astype('S')})
    Now in jupyter lab type 'ds.<tab>'

    '''
    def gettype(element) -> str:
        result: str = ''
        if hasattr(element, '_autocomplete'):
            result = element._autocomplete()
        elif isinstance(element, np.ndarray):
            # how to display array
            bstring = 'Array '
            dnum = element.dtype.num
            if dnum == 0:
                # bool check
                extra = 'b'
            elif dnum <= 10:
                # integer check
                if dnum & 1 == 1:
                    extra = 'i'
                else:
                    extra = 'u'
            elif dnum <= 13:
                extra = 'f'
            else:
                extra = element.dtype.char + str(element.itemsize)

            if dnum <= 13:
                result = bstring + extra + str(element.itemsize * 8)
            else:
                result = bstring + extra

        else:
            if callable(element):
                result = 'function'
            else:
                try:
                    result = element.__class__.__name__
                except Exception:
                    result = 'unknown'
        return result

    #----------------------------------------------
    def gettype_foritem(acobj, name, oclass, oinstance) -> str:
        t = '<unknown>'
        try:
            item = None
            if name in oinstance:
                item = oinstance[name]
            elif name in oclass:
                item = oclass[name]
            elif hasattr(acobj, name):
                item = getattr(acobj, name)
            t = gettype(item)
        except Exception:
            pass
        return t

    # ---------------------------------------------
    def _evaluate_text(code, dotpos, careful: bool = False):
        '''
        careful: when True indicates that the entire line should NOT be evaluated

        NOTE: Internal function that calls eval() which may not be acceptable for some applications.
        '''
        def call_eval(text):
            try:
                # try two diff namespaces
                acobj = eval(mainpart, _ipcompleter.namespace)
            except:
                try:
                    acobj = eval(mainpart, _ipcompleter.global_namespace)
                except:
                    acobj = None
            return acobj

        mainpart = code[:dotpos]
        acobj = None
        startpos = dotpos
        endpos = dotpos + 1
        startwith = None

        if careful:
            # check for ()
            pass

        if not careful:
            acobj = call_eval(mainpart)

        if acobj is None:
            # trickier... now scan backwards from dot
            while startpos >= 0:
                # search back until hit non-naming character
                if code[startpos] not in Hooker.babydict:
                    startpos += 1
                    break
                startpos -= 1

            #careful did not check entire line
            if careful and startpos < 0:
                startpos = 0
            if startpos >= 0:
                mainpart = code[startpos:dotpos]
                acobj = call_eval(mainpart)

            # if we still have not found it
            if acobj is None:
                laststartpos = startpos
                startpos = dotpos
                while startpos >= 0:
                    # search back until hit paren() or comma
                    if code[startpos] in '(),':
                        startpos += 1
                        break
                    startpos -= 1

                #careful did not check entire line
                if careful and startpos < 0:
                    startpos = 0

                if startpos != laststartpos:
                    if startpos >= 0:
                        mainpart = code[startpos:dotpos]
                        acobj = call_eval(mainpart)

        if acobj is not None:
            # calc the subsection
            while endpos < len(code):
                # search back until hit non-naming character
                if code[endpos] not in Hooker.babydict:
                    break
                endpos += 1

            if endpos > (dotpos + 1):
                startwith = code[dotpos + 1:endpos]
            #print("startpos", startpos, endpos, mainpart, startwith)
            startpos = dotpos

        return acobj, startpos, mainpart, endpos, startwith

    # ---------------------------------------------
    def _riptable_deduplicate_completions(text, completions):
        # This is the hook for use_jedi=True and console.
        #[<Completion start=3 end=3 text='as_struct' type='function', signature='(self)',>,  ..]
        # there is also special code to detect a 'function' in IPython\terminal\ptutils.py
        found = False

        # turn the enumerator into a list
        completions = list(completions)

        # look for apply_schema as only riptable will have this marker up front
        for comp in completions:
            if comp.text == 'apply_schema' or comp.text == 'apply_cols':
                found = True  # looks like our Struct
                break

        # if jedi completed something that is not a Struct, stop trying to autocomplete
        if not found and len(completions) > 0:
            return Hooker._orig_deduplicate(text, completions)

        # we only autocomplete on dots
        dotpos = text.rfind('.')

        if dotpos > 0:
            acobj, startpos, mainpart, endpos, startwith = _evaluate_text(
                text, dotpos, careful=not found)

            if acobj is not None:

                # calc the subsection
                endpos = dotpos + 1
                while endpos < len(text):
                    # search back until hit non-naming character
                    if text[endpos] not in Hooker.babydict:
                        break
                    endpos += 1

                # is this a container class we own
                if isinstance(acobj,
                              (TypeRegister.Struct, TypeRegister.FastArray)):
                    oclass = acobj.__class__.__dict__
                    oinstance = acobj.__dict__

                    # TODO: if jedi has a mistake, do we correct it here?
                    if len(completions) == 0:
                        #redo completions
                        completions = []
                        ldir = dir(acobj)
                        for c in ldir:
                            if not c.startswith('_'):
                                # check if we have a startswith
                                if startwith is not None:
                                    if not c.startswith(startwith):
                                        continue

                                t = gettype_foritem(acobj, c, oclass,
                                                    oinstance)
                                completions.append(
                                    Completion(start=startpos + 1,
                                               end=dotpos,
                                               text=c,
                                               type=t,
                                               signature='(self)'))

                    if isinstance(acobj, TypeRegister.Struct):
                        # get the columns
                        keys = acobj.keys()
                        keys.sort()
                        movetotop = {}

                        # first put the columns in
                        for k in keys:
                            # check if we have a startswith
                            if startwith is not None:
                                if not k.startswith(startwith):
                                    continue

                            # Struct or Dataset getitem call
                            element = acobj[k]
                            movetotop[k] = Completion(start=startpos + 1,
                                                      end=dotpos,
                                                      text=k,
                                                      type=gettype(acobj[k]),
                                                      signature='(self)')

                        # then add anything else (note if completions is empty we could call dir)
                        for comp in completions:
                            text = comp.text
                            if text is not None and text not in movetotop:
                                movetotop[text] = comp

                        completions = list(movetotop.values())

        return Hooker._orig_deduplicate(text, completions)

    def _riptable_do_complete(self, code, cursor_pos):
        '''
        Hooked from ipythonkernel.do_complete.  Hit in jupyter lab.
        Calls the original do_complete, then possibly rearranges the list.
        As of Dec 2019, this is the use_jedi=True hook in jupyter lab.
        '''

        # self is ipkernel.ipythonkernel
        # code is what text the user typed
        # call original first (usually kicks in jedi)
        result = Hooker._orig_do_complete(self, code, cursor_pos)

        # we only autocomplete on dots
        dotpos = code.rfind('.')

        if dotpos > 0:
            mainpart = code[:dotpos]
            acobj, startpos, mainpart, endpos, startwith = _evaluate_text(
                code, dotpos)

            if acobj is not None:
                try:
                    if isinstance(acobj, TypeRegister.Struct):

                        oclass = acobj.__class__.__dict__
                        oinstance = acobj.__dict__

                        # add a dot to complete mainpart for string matching later
                        mainpart += '.'
                        lenmainpart = len(mainpart)

                        # get the jedi completions
                        rmatches = result['matches']

                        # check if there are any jedi completions
                        if len(rmatches) == 0:

                            # jedi failed, so we will attempt
                            matches = []
                            completions = []
                            ldir = dir(acobj)
                            for c in ldir:
                                if not c.startswith('_'):
                                    # check if we have a startswith
                                    if startwith is not None:
                                        if not c.startswith(startwith):
                                            continue
                                    matches.append(c)
                                    t = gettype_foritem(
                                        acobj, c, oclass, oinstance)
                                    completions.append({
                                        'start': startpos,
                                        'end': dotpos,
                                        'text': c,
                                        'type': t
                                    })

                            rmatches = matches

                            result = {
                                'matches': matches,
                                'cursor_end': endpos,
                                'cursor_start': dotpos + 1,
                                'metadata': {
                                    '_jupyter_types_experimental': completions
                                },
                                'status': 'ok'
                            }

                        meta = result['metadata']
                        keys = acobj.keys()
                        keys.sort()

                        # result will look similar to:
                        # {'matches': ['list of strings'], 'cursor_end': 5, 'cursor_start': 5, 'metadata': {'_jupyter_types_experimental':
                        #     [{'start': 5, 'end': 5, 'text': 'add_traits', 'type': 'function'},
                        #      {'start': 5, 'end': 5, 'text': 'update_config', 'type': 'function'}]}, 'status':'ok'}
                        # jupyter notebook
                        jtypes = meta.get('_jupyter_types_experimental', None)
                        if jtypes is not None:
                            toptext = []
                            bottomtext = []

                            topmatch = []
                            bottommatch = []

                            # jtypes is a list of dicts
                            for jdict in jtypes:
                                top = False
                                text = jdict.get('text', None)
                                if text is not None:
                                    subtext = text
                                    if text.startswith(mainpart):
                                        subtext = text[lenmainpart:]
                                    # jedi matches after the dot
                                    if subtext in keys:
                                        # Struct or Dataset getitem call
                                        element = acobj[subtext]
                                        jdict['type'] = gettype(element)
                                        top = True
                                if top:
                                    topmatch.append(jdict)
                                    toptext.append(text)
                                else:
                                    bottommatch.append(jdict)
                                    bottomtext.append(text)

                            # change the order
                            jtypes = topmatch + bottommatch
                            rmatches = toptext + bottomtext

                            # insert our order
                            meta['_jupyter_types_experimental'] = jtypes
                            result['matches'] = rmatches

                            if len(toptext) > 0 and dotpos < len(code):
                                # indicate end of list in some instances to force regen
                                # to move out list back to the top during partial completions like ds.a<tab>
                                msg = '---'
                                jtypes.append({
                                    'start': 0,
                                    'end': 0,
                                    'text': msg,
                                    'type': 'endlist'
                                })
                                rmatches.append(msg)

                except Exception as e:
                    # indicate we crashed so user can report
                    jtypes = meta.get('_jupyter_types_experimental', None)
                    msg = 'CRASHRIPTABLE'
                    jtypes.insert(0, {
                        'start': 0,
                        'end': 0,
                        'text': msg,
                        'type': f'{e}'
                    })
                    result['matches'].insert(0, msg)

        return result

    # ---- start of main code for autocomplete --------
    import IPython
    from ipykernel import ipkernel
    _ipcompleter = IPython.get_ipython().Completer
    Hooker._ipcompleter = _ipcompleter

    # check version, we only support 7
    version = IPython.version_info[0]
    if version != 7:
        return

    # caller may optionally set jedi or greedy
    if jedi is True or jedi is False: _ipcompleter.use_jedi = jedi
    if greedy is True or greedy is False: _ipcompleter.greedy = greedy

    if hook:
        # for jupyter lab
        if Hooker._orig_do_complete is None:
            Hooker._orig_do_complete = ipkernel.IPythonKernel.do_complete
            setattr(ipkernel.IPythonKernel, 'do_complete',
                    _riptable_do_complete)

        # for console text (not jupyter lab)
        if Hooker._orig_deduplicate is None:
            Hooker._putils = IPython.terminal.ptutils
            Hooker._orig_deduplicate = Hooker._putils._deduplicate_completions
            setattr(Hooker._putils, '_deduplicate_completions',
                    _riptable_deduplicate_completions)
    else:
        # unhook ------
        if Hooker._orig_do_complete is not None:
            setattr(ipkernel.IPythonKernel, 'do_complete',
                    Hooker._orig_do_complete)
            Hooker._orig_do_complete = None

        if Hooker._orig_deduplicate is not None:
            setattr(Hooker._putils, '_deduplicate_completions',
                    Hooker._orig_deduplicate)
            Hooker._orig_deduplicate = None
Ejemplo n.º 32
0
def true():
    import IPython, pidgy.kernel.shell

    return isinstance(IPython.get_ipython(), pidgy.kernel.shell.pidgyShell)
Ejemplo n.º 33
0
def _cell_magic(line, query):
    """Underlying function for bigquery cell magic

    Note:
        This function contains the underlying logic for the 'bigquery' cell
        magic. This function is not meant to be called directly.

    Args:
        line (str): "%%bigquery" followed by arguments as required
        query (str): SQL query to run

    Returns:
        pandas.DataFrame: the query results.
    """
    args = magic_arguments.parse_argstring(_cell_magic, line)

    if args.use_bqstorage_api is not None:
        warnings.warn(
            "Deprecated option --use_bqstorage_api, the BigQuery "
            "Storage API is already used by default.",
            category=DeprecationWarning,
        )
    use_bqstorage_api = not args.use_rest_api

    params = []
    if args.params is not None:
        try:
            params = _helpers.to_query_parameters(
                ast.literal_eval("".join(args.params)))
        except Exception:
            raise SyntaxError(
                "--params is not a correctly formatted JSON string or a JSON "
                "serializable dictionary")

    project = args.project or context.project
    client = bigquery.Client(
        project=project,
        credentials=context.credentials,
        default_query_job_config=context.default_query_job_config,
        client_info=client_info.ClientInfo(user_agent=IPYTHON_USER_AGENT),
    )
    if context._connection:
        client._connection = context._connection
    bqstorage_client = _make_bqstorage_client(use_bqstorage_api,
                                              context.credentials)

    close_transports = functools.partial(_close_transports, client,
                                         bqstorage_client)

    try:
        if args.max_results:
            max_results = int(args.max_results)
        else:
            max_results = None

        query = query.strip()

        if not query:
            error = ValueError("Query is missing.")
            _handle_error(error, args.destination_var)
            return

        # Any query that does not contain whitespace (aside from leading and trailing whitespace)
        # is assumed to be a table id
        if not re.search(r"\s", query):
            try:
                rows = client.list_rows(query, max_results=max_results)
            except Exception as ex:
                _handle_error(ex, args.destination_var)
                return

            result = rows.to_dataframe(bqstorage_client=bqstorage_client)
            if args.destination_var:
                IPython.get_ipython().push({args.destination_var: result})
                return
            else:
                return result

        job_config = bigquery.job.QueryJobConfig()
        job_config.query_parameters = params
        job_config.use_legacy_sql = args.use_legacy_sql
        job_config.dry_run = args.dry_run

        if args.destination_table:
            split = args.destination_table.split(".")
            if len(split) != 2:
                raise ValueError(
                    "--destination_table should be in a <dataset_id>.<table_id> format."
                )
            dataset_id, table_id = split
            job_config.allow_large_results = True
            dataset_ref = bigquery.dataset.DatasetReference(
                client.project, dataset_id)
            destination_table_ref = dataset_ref.table(table_id)
            job_config.destination = destination_table_ref
            job_config.create_disposition = "CREATE_IF_NEEDED"
            job_config.write_disposition = "WRITE_TRUNCATE"
            _create_dataset_if_necessary(client, dataset_id)

        if args.maximum_bytes_billed == "None":
            job_config.maximum_bytes_billed = 0
        elif args.maximum_bytes_billed is not None:
            value = int(args.maximum_bytes_billed)
            job_config.maximum_bytes_billed = value

        try:
            query_job = _run_query(client, query, job_config=job_config)
        except Exception as ex:
            _handle_error(ex, args.destination_var)
            return

        if not args.verbose:
            display.clear_output()

        if args.dry_run and args.destination_var:
            IPython.get_ipython().push({args.destination_var: query_job})
            return
        elif args.dry_run:
            print("Query validated. This query will process {} bytes.".format(
                query_job.total_bytes_processed))
            return query_job

        if max_results:
            result = query_job.result(max_results=max_results).to_dataframe(
                bqstorage_client=bqstorage_client)
        else:
            result = query_job.to_dataframe(bqstorage_client=bqstorage_client)

        if args.destination_var:
            IPython.get_ipython().push({args.destination_var: result})
        else:
            return result
    finally:
        close_transports()
Ejemplo n.º 34
0
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Nov 10 09:30:27 2016

@author: downey
"""

#%% import modules and set default fonts and colors

import IPython as IP
IP.get_ipython().magic('reset -sf')
import matplotlib.pyplot as plt
import matplotlib as mpl
import numpy as np
import pandas as PD
import scipy as sp
from scipy import interpolate
import pickle
import time
import re
import json as json
import pylab

# set default fonts and plot colors
plt.rcParams.update({'image.cmap': 'viridis'})
cc = plt.rcParams['axes.prop_cycle'].by_key()['color']
plt.rcParams.update({
    'font.serif': [
        'Times New Roman', 'Times', 'DejaVu Serif', 'Bitstream Vera Serif',
        'Computer Modern Roman', 'New Century Schoolbook',
Ejemplo n.º 35
0
def notebook_environment():
    """ Get the IPython user namespace. """
    ipy = IPython.get_ipython()
    return ipy.user_ns
Ejemplo n.º 36
0
    com = Cube(0.05, pos=robot.com, color='g')
    lf_target = Contact(robot.sole_shape, pos=[0, 0.3, 0], visible=True)
    rf_target = Contact(robot.sole_shape, pos=[0, -0.3, 0], visible=True)

    # IK tasks
    lf_task = ContactTask(robot, robot.left_foot, lf_target, weight=1000)
    rf_task = ContactTask(robot, robot.right_foot, rf_target, weight=1000)
    com_task = COMTask(robot, com, weight=10)
    reg_task = PostureTask(robot, robot.q, weight=0.1)  # regularization task

    # IK setup
    robot.init_ik(active_dofs=robot.whole_body)
    robot.ik.add_tasks([lf_task, rf_task, com_task, reg_task])
    for (dof_id, dof_ref) in dof_targets:
        robot.ik.add_task(
            DOFTask(robot, dof_id, dof_ref, gain=0.5, weight=0.1))

    # First, generate an initial posture
    robot.solve_ik(max_it=100, conv_tol=1e-4, debug=True)

    # Next, we move the COM back and forth for 10 seconds
    move_com_back_and_forth(10)

    # Finally, we start the simulation with the IK on
    sim.schedule(robot.ik)
    sim.start()

    # Don't forget to give the user a prompt
    if IPython.get_ipython() is None:
        IPython.embed()
Ejemplo n.º 37
0
            comd = self.pendulum.com.pd
            dv = 2. * numpy.random.random(3) - 1.
            dv[2] *= 0.5  # push is weaker in vertical direction
            dv *= self.gain / norm(dv)
            self.pendulum.com.set_vel(comd + dv)
            self.handle = draw_arrow(com - dv, com, color='b', linewidth=0.01)


if __name__ == '__main__':
    sim = pymanoid.Simulation(dt=0.03)
    sim.set_viewer()
    sim.viewer.SetCamera([[-0.28985337, 0.40434395, -0.86746239, 1.40434551],
                          [0.95680245, 0.1009506, -0.27265003, 0.45636871],
                          [-0.02267354, -0.90901867, -0.41613816, 1.15192068],
                          [0., 0., 0., 1.]])

    contact = pymanoid.Contact((0.1, 0.05), pos=[0., 0., 0.])
    pendulum = pymanoid.models.InvertedPendulum(pos=[0., 0., 0.8],
                                                vel=numpy.zeros(3),
                                                contact=contact)
    stabilizer = Stabilizer(pendulum)
    pusher = Pusher(pendulum)

    sim.schedule(stabilizer)  # before pendulum in schedule
    sim.schedule(pendulum)
    sim.schedule_extra(pusher)
    sim.start()

    if IPython.get_ipython() is None:  # give the user a prompt
        IPython.embed()
Ejemplo n.º 38
0
 def __init__(self):
     import IPython
     self.dbutils = IPython.get_ipython().user_ns["dbutils"]
Ejemplo n.º 39
0
def get_path_and_source_from_frame(frame):
    globs = frame.f_globals or {}
    module_name = globs.get('__name__')
    file_name = frame.f_code.co_filename
    cache_key = (module_name, file_name)
    try:
        return source_and_path_cache[cache_key]
    except KeyError:
        pass
    loader = globs.get('__loader__')

    source = None
    if hasattr(loader, 'get_source'):
        try:
            source = loader.get_source(module_name)
        except ImportError:
            pass
        if source is not None:
            source = source.splitlines()
    if source is None:
        ipython_filename_match = ipython_filename_pattern.match(file_name)
        if ipython_filename_match:
            entry_number = int(ipython_filename_match.group(1))
            try:
                import IPython
                ipython_shell = IPython.get_ipython()
                ((_, _, source_chunk),) = ipython_shell.history_manager. \
                                  get_range(0, entry_number, entry_number + 1)
                source = source_chunk.splitlines()
            except Exception:
                pass
        else:
            try:
                with open(file_name, 'rb') as fp:
                    source = fp.read().splitlines()
            except utils.file_reading_errors:
                pass
    if not source:
        # We used to check `if source is None` but I found a rare bug where it
        # was empty, but not `None`, so now we check `if not source`.
        source = UnavailableSource()

    # If we just read the source from a file, or if the loader did not
    # apply tokenize.detect_encoding to decode the source into a
    # string, then we should do that ourselves.
    if isinstance(source[0], bytes):
        encoding = 'utf-8'
        for line in source[:2]:
            # File coding may be specified. Match pattern from PEP-263
            # (https://www.python.org/dev/peps/pep-0263/)
            match = re.search(br'coding[:=]\s*([-\w.]+)', line)
            if match:
                encoding = match.group(1).decode('ascii')
                break
        source = [
            pycompat.text_type(sline, encoding, 'replace') for sline in source
        ]

    result = (file_name, source)
    source_and_path_cache[cache_key] = result
    return result
Ejemplo n.º 40
0
def ipython_kw_matches(text):
    """Match named ITK object's named parameters"""
    import IPython
    import itk
    import re
    import inspect
    import itkTemplate
    regexp = re.compile(
        r'''
                    '.*?' |  # single quoted strings or
                    ".*?" |  # double quoted strings or
                    \w+     |  # identifier
                    \S  # other characters
                    ''', re.VERBOSE | re.DOTALL)
    ip = IPython.get_ipython()
    if "." in text:  # a parameter cannot be dotted
        return []
    # 1. Find the nearest identifier that comes before an unclosed
    # parenthesis e.g. for "foo (1+bar(x), pa", the candidate is "foo".
    if ip.Completer.readline:
        textUntilCursor = ip.Completer.readline.get_line_buffer(
        )[:ip.Completer.readline.get_endidx()]
    else:
        # IPython >= 5.0.0, which is based on the Python Prompt Toolkit
        textUntilCursor = ip.Completer.text_until_cursor

    tokens = regexp.findall(textUntilCursor)
    tokens.reverse()
    iterTokens = iter(tokens)
    openPar = 0
    for token in iterTokens:
        if token == ')':
            openPar -= 1
        elif token == '(':
            openPar += 1
            if openPar > 0:
                # found the last unclosed parenthesis
                break
    else:
        return []
    # 2. Concatenate dotted names ("foo.bar" for "foo.bar(x, pa" )
    ids = []
    isId = re.compile(r'\w+$').match
    while True:
        try:
            ids.append(iterTokens.next())
            if not isId(ids[-1]):
                ids.pop()
                break
            if not iterTokens.next() == '.':
                break
        except StopIteration:
            break
    # lookup the candidate callable matches either using global_matches
    # or attr_matches for dotted names
    if len(ids) == 1:
        callableMatches = ip.Completer.global_matches(ids[0])
    else:
        callableMatches = ip.Completer.attr_matches('.'.join(ids[::-1]))
    argMatches = []
    for callableMatch in callableMatches:
        # drop the .New at this end, so we can search in the class members
        if callableMatch.endswith(".New"):
            callableMatch = callableMatch[:-4]
        try:
            object = eval(callableMatch, ip.Completer.namespace)
            if isinstance(object, itkTemplate.itkTemplate):
                # this is a template - lets grab the first entry to search for
                # the methods
                object = object.values()[0]
            namedArgs = []
            isin = isinstance(object, itk.LightObject)
            if inspect.isclass(object):
                issub = issubclass(object, itk.LightObject)
            if isin or (inspect.isclass(object) and issub):
                namedArgs = [n[3:] for n in dir(object) if n.startswith("Set")]
        except Exception as e:
            print(e)
            continue
        for namedArg in namedArgs:
            if namedArg.startswith(text):
                argMatches.append(u"%s=" % namedArg)
    return argMatches
Ejemplo n.º 41
0
                # this is a template - lets grab the first entry to search for
                # the methods
                object = object.values()[0]
            namedArgs = []
            isin = isinstance(object, itk.LightObject)
            if inspect.isclass(object):
                issub = issubclass(object, itk.LightObject)
            if isin or (inspect.isclass(object) and issub):
                namedArgs = [n[3:] for n in dir(object) if n.startswith("Set")]
        except Exception as e:
            print(e)
            continue
        for namedArg in namedArgs:
            if namedArg.startswith(text):
                argMatches.append(u"%s=" % namedArg)
    return argMatches


# install progress callback and custom completer if we are in ipython
# interpreter
try:
    import itkConfig
    import IPython
    if IPython.get_ipython():
        IPython.get_ipython().Completer.matchers.insert(0, ipython_kw_matches)
    # some cleanup
    del itkConfig, IPython
except (ImportError, AttributeError):
    # fail silently
    pass
Ejemplo n.º 42
0
    def OnInit(self):
        self.SetAppName("Eelbrain")
        self.SetAppDisplayName("Eelbrain")

        # File Menu
        m = file_menu = wx.Menu()
        m.Append(wx.ID_OPEN, '&Open... \tCtrl+O')
        m.AppendSeparator()
        m.Append(wx.ID_CLOSE, '&Close Window \tCtrl+W')
        m.Append(wx.ID_SAVE, "Save \tCtrl+S")
        m.Append(wx.ID_SAVEAS, "Save As... \tCtrl+Shift+S")

        # Edit Menu
        m = edit_menu = wx.Menu()
        m.Append(ID.UNDO, '&Undo \tCtrl+Z')
        m.Append(ID.REDO, '&Redo \tCtrl+Shift+Z')
        m.AppendSeparator()
        m.Append(wx.ID_CUT, 'Cut \tCtrl+X')
        m.Append(wx.ID_COPY, 'Copy \tCtrl+C')
        m.Append(ID.COPY_AS_PNG, 'Copy as PNG \tCtrl+Shift+C')
        m.Append(wx.ID_PASTE, 'Paste \tCtrl+V')
        m.AppendSeparator()
        m.Append(wx.ID_CLEAR, 'Cle&ar')

        # Tools Menu
        # updated by the active GUI
        tools_menu = wx.Menu()

        # View Menu
        m = view_menu = wx.Menu()
        m.Append(ID.SET_VLIM, "Set Y-Axis Limit... \tCtrl+l", "Change the Y-"
                 "axis limit in epoch plots")
        m.Append(ID.SET_MARKED_CHANNELS, "Mark Channels...", "Mark specific "
                 "channels in plots")
        self._draw_crosshairs_menu_item = m.Append(ID.DRAW_CROSSHAIRS,
                                                   "Draw &Crosshairs",
                                                   "Draw crosshairs under the "
                                                   "cursor",
                                                   kind=wx.ITEM_CHECK)
        m.AppendSeparator()
        m.Append(ID.SET_LAYOUT, "&Set Layout... \tCtrl+Shift+l", "Change the "
                 "page layout")

        # Go Menu
        m = go_menu = wx.Menu()
        m.Append(wx.ID_FORWARD, '&Forward \tCtrl+]', 'Go One Page Forward')
        m.Append(wx.ID_BACKWARD, '&Back \tCtrl+[', 'Go One Page Back')

        # Window Menu
        m = window_menu = wx.Menu()
        m.Append(ID.WINDOW_MINIMIZE, '&Minimize \tCtrl+M')
        m.Append(ID.WINDOW_ZOOM, '&Zoom')
        m.AppendSeparator()
        m.Append(ID.WINDOW_TILE, '&Tile')
        m.AppendSeparator()
        self.window_menu_window_items = []

        # Help Menu
        m = help_menu = wx.Menu()
        m.Append(ID.HELP_EELBRAIN, 'Eelbrain Help')
        m.Append(ID.HELP_PYTHON, "Python Help")
        m.AppendSeparator()
        m.Append(wx.ID_ABOUT, '&About Eelbrain')

        # Menu Bar
        menu_bar = wx.MenuBar()
        menu_bar.Append(file_menu, "File")
        menu_bar.Append(edit_menu, "Edit")
        menu_bar.Append(tools_menu, "Tools")
        menu_bar.Append(view_menu, "View")
        menu_bar.Append(go_menu, "Go")
        menu_bar.Append(window_menu, "Window")
        menu_bar.Append(help_menu,
                        self.GetMacHelpMenuTitleName() if IS_OSX else 'Help')
        wx.MenuBar.MacSetCommonMenuBar(menu_bar)
        self.menubar = menu_bar

        # Dock icon
        self.dock_icon = DockIcon(self)

        # Bind Menu Commands
        self.Bind(wx.EVT_MENU_OPEN, self.OnMenuOpened)
        self.Bind(wx.EVT_MENU, self.OnAbout, id=wx.ID_ABOUT)
        self.Bind(wx.EVT_MENU, self.OnOpen, id=wx.ID_OPEN)
        self.Bind(wx.EVT_MENU, self.OnClear, id=wx.ID_CLEAR)
        self.Bind(wx.EVT_MENU, self.OnCloseWindow, id=wx.ID_CLOSE)
        self.Bind(wx.EVT_MENU, self.OnCopy, id=wx.ID_COPY)
        self.Bind(wx.EVT_MENU, self.OnCopyAsPNG, id=ID.COPY_AS_PNG)
        self.Bind(wx.EVT_MENU, self.OnCut, id=wx.ID_CUT)
        self.Bind(wx.EVT_MENU, self.OnDrawCrosshairs, id=ID.DRAW_CROSSHAIRS)
        self.Bind(wx.EVT_MENU, self.OnOnlineHelp, id=ID.HELP_EELBRAIN)
        self.Bind(wx.EVT_MENU, self.OnOnlineHelp, id=ID.HELP_PYTHON)
        self.Bind(wx.EVT_MENU, self.OnPaste, id=wx.ID_PASTE)
        self.Bind(wx.EVT_MENU, self.OnRedo, id=ID.REDO)
        self.Bind(wx.EVT_MENU, self.OnSave, id=wx.ID_SAVE)
        self.Bind(wx.EVT_MENU, self.OnSaveAs, id=wx.ID_SAVEAS)
        self.Bind(wx.EVT_MENU, self.OnSetLayout, id=ID.SET_LAYOUT)
        self.Bind(wx.EVT_MENU,
                  self.OnSetMarkedChannels,
                  id=ID.SET_MARKED_CHANNELS)
        self.Bind(wx.EVT_MENU, self.OnSetVLim, id=ID.SET_VLIM)
        self.Bind(wx.EVT_MENU, self.OnUndo, id=ID.UNDO)
        self.Bind(wx.EVT_MENU, self.OnWindowMinimize, id=ID.WINDOW_MINIMIZE)
        self.Bind(wx.EVT_MENU, self.OnWindowTile, id=ID.WINDOW_TILE)
        self.Bind(wx.EVT_MENU, self.OnWindowZoom, id=ID.WINDOW_ZOOM)
        self.Bind(wx.EVT_MENU, self.OnQuit, id=wx.ID_EXIT)
        self.Bind(wx.EVT_MENU, self.OnYieldToTerminal, id=ID.YIELD_TO_TERMINAL)

        # bind update UI
        self.Bind(wx.EVT_UPDATE_UI, self.OnUpdateUIBackward, id=wx.ID_BACKWARD)
        self.Bind(wx.EVT_UPDATE_UI, self.OnUpdateUIClear, id=wx.ID_CLEAR)
        self.Bind(wx.EVT_UPDATE_UI, self.OnUpdateUIClose, id=wx.ID_CLOSE)
        self.Bind(wx.EVT_UPDATE_UI, self.OnUpdateUICopy, id=wx.ID_COPY)
        self.Bind(wx.EVT_UPDATE_UI,
                  self.OnUpdateUICopyAsPNG,
                  id=ID.COPY_AS_PNG)
        self.Bind(wx.EVT_UPDATE_UI, self.OnUpdateUICut, id=wx.ID_CUT)
        self.Bind(wx.EVT_UPDATE_UI, self.OnUpdateUIDown, id=wx.ID_DOWN)
        self.Bind(wx.EVT_UPDATE_UI,
                  self.OnUpdateUIDrawCrosshairs,
                  id=ID.DRAW_CROSSHAIRS)
        self.Bind(wx.EVT_UPDATE_UI, self.OnUpdateUIForward, id=wx.ID_FORWARD)
        self.Bind(wx.EVT_UPDATE_UI, self.OnUpdateUIOpen, id=wx.ID_OPEN)
        self.Bind(wx.EVT_UPDATE_UI, self.OnUpdateUIPaste, id=wx.ID_PASTE)
        self.Bind(wx.EVT_UPDATE_UI, self.OnUpdateUIRedo, id=ID.REDO)
        self.Bind(wx.EVT_UPDATE_UI, self.OnUpdateUISave, id=wx.ID_SAVE)
        self.Bind(wx.EVT_UPDATE_UI, self.OnUpdateUISaveAs, id=wx.ID_SAVEAS)
        self.Bind(wx.EVT_UPDATE_UI, self.OnUpdateUISetLayout, id=ID.SET_LAYOUT)
        self.Bind(wx.EVT_UPDATE_UI,
                  self.OnUpdateUISetMarkedChannels,
                  id=ID.SET_MARKED_CHANNELS)
        self.Bind(wx.EVT_UPDATE_UI, self.OnUpdateUISetVLim, id=ID.SET_VLIM)
        self.Bind(wx.EVT_UPDATE_UI, self.OnUpdateUITools, id=ID.TOOLS)
        self.Bind(wx.EVT_UPDATE_UI, self.OnUpdateUIUndo, id=ID.UNDO)
        self.Bind(wx.EVT_UPDATE_UI, self.OnUpdateUIUp, id=wx.ID_UP)

        # register in IPython
        self.using_prompt_toolkit = False
        self._ipython = None
        if ('IPython' in sys.modules and LooseVersion(
                sys.modules['IPython'].__version__) >= LooseVersion('5')
                and CONFIG['prompt_toolkit']):
            import IPython

            IPython.terminal.pt_inputhooks.register('eelbrain',
                                                    self.pt_inputhook)
            shell = IPython.get_ipython()
            if shell is not None:
                try:
                    shell.enable_gui('eelbrain')
                except IPython.core.error.UsageError:
                    print("Prompt-toolkit does not seem to be supported by "
                          "the current IPython shell (%s); The Eelbrain GUI "
                          "needs to block Terminal input to work. Use "
                          "eelbrain.gui.run() to start GUI interaction." %
                          shell.__class__.__name__)
                else:
                    self.using_prompt_toolkit = True
                    self._ipython = shell

        self.SetExitOnFrameDelete(not self.using_prompt_toolkit)
        if not self.using_prompt_toolkit:
            go_menu.AppendSeparator()
            go_menu.Append(ID.YIELD_TO_TERMINAL,
                           '&Yield to Terminal \tAlt+Ctrl+Q')

        return True
Ejemplo n.º 43
0
def theano_gradient_funtimes():
    import theano
    import numpy as np
    import theano.tensor as T
    import lasagne
    import ibeis_cnn.theano_ext as theano_ext

    TEST = True

    x_data = np.linspace(-10, 10, 100).astype(np.float32)[:, None, None, None]
    y_data = (x_data**2).flatten()[:, None]

    X = T.tensor4('x')
    y = T.matrix('y')

    #x_data_batch =
    #y_data_batch =
    inputs_to_value = {X: x_data[0:16], y: y_data[0:16]}

    l_in = lasagne.layers.InputLayer((16, 1, 1, 1))
    l_out = lasagne.layers.DenseLayer(
        l_in,
        num_units=1,
        nonlinearity=lasagne.nonlinearities.rectify,
        W=lasagne.init.Orthogonal())

    network_output = lasagne.layers.get_output(l_out, X)

    # TEST NETWORK OUTPUT

    if TEST:
        result = theano_ext.eval_symbol(network_output, inputs_to_value)
        print('network_output = %r' % (result, ))

    loss_function = lasagne.objectives.squared_error
    #def loss_function(network_output, labels):
    #    return (network_output - labels) ** 2

    losses = loss_function(network_output, y)
    if TEST:
        result = theano_ext.eval_symbol(losses, inputs_to_value)
        print('losses = %r' % (result, ))

    loss = lasagne.objectives.aggregate(losses, mode='mean')

    if TEST:
        result = theano_ext.eval_symbol(loss, inputs_to_value)
        print('loss = %r' % (result, ))

    L2 = lasagne.regularization.regularize_network_params(
        l_out, lasagne.regularization.l2)
    weight_decay = .0001
    loss_regularized = loss + weight_decay * L2
    loss_regularized.name = 'loss_regularized'

    parameters = lasagne.layers.get_all_params(l_out)

    gradients_regularized = theano.grad(loss_regularized,
                                        parameters,
                                        add_names=True)

    if TEST:
        if False:
            s = T.sum(1 / (1 + T.exp(-X)))
            s.name = 's'
            gs = T.grad(s, X, add_names=True)
            theano.pp(gs)
            inputs_to_value = {X: x_data[0:16], y: y_data[0:16]}
            result = theano_ext.eval_symbol(gs, inputs_to_value)
            print('%s = %r' % (
                gs.name,
                result,
            ))
            inputs_to_value = {X: x_data[16:32], y: y_data[16:32]}
            result = theano_ext.eval_symbol(gs, inputs_to_value)
            print('%s = %r' % (
                gs.name,
                result,
            ))

        for grad in gradients_regularized:
            result = theano_ext.eval_symbol(grad, inputs_to_value)
            print('%s = %r' % (
                grad.name,
                result,
            ))

        grad_on_losses = theano.grad(losses, parameters, add_names=True)

    learning_rate_theano = .0001
    momentum = .9
    updates = lasagne.updates.nesterov_momentum(gradients_regularized,
                                                parameters,
                                                learning_rate_theano, momentum)

    X_batch = T.tensor4('x_batch')
    y_batch = T.fvector('y_batch')

    func = theano.function(
        inputs=[theano.Param(X_batch),
                theano.Param(y_batch)],
        outputs=[network_output, losses],
        #updates=updates,
        givens={
            X: X_batch,
            y: y_batch,
        },
    )

    y_predict_batch, loss_batch = func(inputs_to_value[X], inputs_to_value[y])

    if ut.inIPython():
        import IPython
        IPython.get_ipython().magic('pylab qt4')

    import plottool as pt
    pt.plot(x_data, y_predict)
    pt.iup()
    pass
def line_profile(image,
                 order=2,
                 plotter=None,
                 comparisons=None,
                 **viewer_kwargs):
    """View the image with a line profile.

    Creates and returns an ipywidget to visualize the image along with a line
    profile.

    The image can be 2D or 3D.

    Parameters
    ----------
    image : array_like, itk.Image, or vtk.vtkImageData
        The 2D or 3D image to visualize.

    order : int, optional
        Spline order for line profile interpolation. The order has to be in the
        range 0-5.

    plotter : 'plotly', 'bqplot', or 'ipympl', optional
        Plotting library to use. If not defined, use plotly if available,
        otherwise bqplot if available, otherwise ipympl.

    comparisons: dict, optional
        A dictionary whose keys are legend labels and whose values are other
        images whose intensities to plot over the same line.

    viewer_kwargs : optional
        Keyword arguments for the viewer. See help(itkwidgets.view).

    """

    profiler = LineProfiler(image=image, **viewer_kwargs)

    if not plotter:
        try:
            import plotly.graph_objs as go
            plotter = 'plotly'
        except ImportError:
            pass
    if not plotter:
        try:
            import bqplot
            plotter = 'bqplot'
        except ImportError:
            pass
    if not plotter:
        plotter = 'ipympl'

    def get_profile(image_or_array):
        image_from_array = to_itk_image(image_or_array)
        if image_from_array:
            image_ = image_from_array
        else:
            image_ = image_or_array
        image_array = itk.GetArrayViewFromImage(image_)
        dimension = image_.GetImageDimension()
        distance = np.sqrt(
            sum([(profiler.point1[ii] - profiler.point2[ii])**2
                 for ii in range(dimension)]))
        index1 = tuple(
            image_.TransformPhysicalPointToIndex(
                tuple(profiler.point1[:dimension])))
        index2 = tuple(
            image_.TransformPhysicalPointToIndex(
                tuple(profiler.point2[:dimension])))
        num_points = int(
            np.round(
                np.sqrt(
                    sum([(index1[ii] - index2[ii])**2
                         for ii in range(dimension)])) * 2.1))
        coords = [
            np.linspace(index1[ii], index2[ii], num_points)
            for ii in range(dimension)
        ]
        mapped = scipy.ndimage.map_coordinates(image_array,
                                               np.vstack(coords[::-1]),
                                               order=order,
                                               mode='nearest')
        return np.linspace(0.0, distance, num_points), mapped

    if plotter == 'plotly':
        import plotly.graph_objs as go
        layout = go.Layout(xaxis=dict(title='Distance'),
                           yaxis=dict(title='Intensity'))
        fig = go.FigureWidget(layout=layout)
    elif plotter == 'bqplot':
        import bqplot
        x_scale = bqplot.LinearScale()
        y_scale = bqplot.LinearScale()
        x_axis = bqplot.Axis(scale=x_scale,
                             grid_lines='solid',
                             label='Distance')
        y_axis = bqplot.Axis(scale=y_scale,
                             orientation='vertical',
                             grid_lines='solid',
                             label='Intensity')
        labels = ['Reference']
        display_legend = False
        if comparisons:
            display_legend = True
            labels += [label for label in comparisons.keys()]
        lines = [
            bqplot.Lines(scales={
                'x': x_scale,
                'y': y_scale
            },
                         labels=labels,
                         display_legend=display_legend,
                         enable_hover=True)
        ]
        fig = bqplot.Figure(marks=lines, axes=[x_axis, y_axis])
    elif plotter == 'ipympl':
        ipython = IPython.get_ipython()
        ipython.enable_matplotlib('widget')

        is_interactive = matplotlib.is_interactive()
        matplotlib.interactive(False)

        fig, ax = plt.subplots()
    else:
        raise ValueError('Invalid plotter: ' + plotter)

    def update_plot():
        if plotter == 'plotly':
            distance, intensity = get_profile(image)
            fig.data[0]['x'] = distance
            fig.data[0]['y'] = intensity
            if comparisons:
                for ii, image_ in enumerate(comparisons.values()):
                    distance, intensity = get_profile(image_)
                    fig.data[ii + 1]['x'] = distance
                    fig.data[ii + 1]['y'] = intensity
        elif plotter == 'bqplot':
            distance, intensity = get_profile(image)
            if comparisons:
                for image_ in comparisons.values():
                    distance_, intensity_ = get_profile(image_)
                    distance = np.vstack((distance, distance_))
                    intensity = np.vstack((intensity, intensity_))
            fig.marks[0].x = distance
            fig.marks[0].y = intensity
        elif plotter == 'ipympl':
            ax.plot(*get_profile(image))
            if comparisons:
                ax.plot(*get_profile(image), label='Reference')
                for label, image_ in comparisons.items():
                    ax.plot(*get_profile(image_), label=label)
                ax.legend()
            else:
                ax.plot(*get_profile(image))

            ax.set_xlabel('Distance')
            ax.set_ylabel('Intensity')
            fig.canvas.draw()
            fig.canvas.flush_events()

    def update_profile(change):
        if plotter == 'plotly':
            update_plot()
        elif plotter == 'bqplot':
            update_plot()
        elif plotter == 'ipympl':
            is_interactive = matplotlib.is_interactive()
            matplotlib.interactive(False)
            ax.clear()
            update_plot()
            matplotlib.interactive(is_interactive)

    if plotter == 'plotly':
        distance, intensity = get_profile(image)
        trace = go.Scattergl(x=distance, y=intensity, name='Reference')
        fig.add_trace(trace)
        if comparisons:
            for label, image_ in comparisons.items():
                distance, intensity = get_profile(image_)
                trace = go.Scattergl(x=distance, y=intensity, name=label)
                fig.add_trace(trace)
        widget = widgets.VBox([profiler, fig])
    elif plotter == 'bqplot':
        update_plot()
        widget = widgets.VBox([profiler, fig])
    elif plotter == 'ipympl':
        update_plot()
        widget = widgets.VBox([profiler, fig.canvas])

    profiler.observe(update_profile, names=['point1', 'point2'])

    return widget
Ejemplo n.º 45
0
def _storage_read(args, _):
  contents = _get_item_contents(args['object'])
  ipy = IPython.get_ipython()
  ipy.push({args['variable']: contents})
Ejemplo n.º 46
0
def _cell_magic(line, query):
    """Underlying function for bigquery cell magic

    Note:
        This function contains the underlying logic for the 'bigquery' cell
        magic. This function is not meant to be called directly.

    Args:
        line (str): "%%bigquery" followed by arguments as required
        query (str): SQL query to run

    Returns:
        pandas.DataFrame: the query results.
    """
    # The built-in parser does not recognize Python structures such as dicts, thus
    # we extract the "--params" option and inteprpret it separately.
    try:
        params_option_value, rest_of_args = _split_args_line(line)
    except lap.exceptions.QueryParamsParseError as exc:
        rebranded_error = SyntaxError(
            "--params is not a correctly formatted JSON string or a JSON "
            "serializable dictionary")
        six.raise_from(rebranded_error, exc)
    except lap.exceptions.DuplicateQueryParamsError as exc:
        rebranded_error = ValueError("Duplicate --params option.")
        six.raise_from(rebranded_error, exc)
    except lap.exceptions.ParseError as exc:
        rebranded_error = ValueError(
            "Unrecognized input, are option values correct? "
            "Error details: {}".format(exc.args[0]))
        six.raise_from(rebranded_error, exc)

    args = magic_arguments.parse_argstring(_cell_magic, rest_of_args)

    if args.use_bqstorage_api is not None:
        warnings.warn(
            "Deprecated option --use_bqstorage_api, the BigQuery "
            "Storage API is already used by default.",
            category=DeprecationWarning,
        )
    use_bqstorage_api = not args.use_rest_api

    params = []
    if params_option_value:
        # A non-existing params variable is not expanded and ends up in the input
        # in its raw form, e.g. "$query_params".
        if params_option_value.startswith("$"):
            msg = 'Parameter expansion failed, undefined variable "{}".'.format(
                params_option_value[1:])
            raise NameError(msg)

        params = _helpers.to_query_parameters(
            ast.literal_eval(params_option_value))

    project = args.project or context.project

    bigquery_client_options = copy.deepcopy(context.bigquery_client_options)
    if args.bigquery_api_endpoint:
        if isinstance(bigquery_client_options, dict):
            bigquery_client_options[
                "api_endpoint"] = args.bigquery_api_endpoint
        else:
            bigquery_client_options.api_endpoint = args.bigquery_api_endpoint

    client = bigquery.Client(
        project=project,
        credentials=context.credentials,
        default_query_job_config=context.default_query_job_config,
        client_info=client_info.ClientInfo(user_agent=IPYTHON_USER_AGENT),
        client_options=bigquery_client_options,
    )
    if context._connection:
        client._connection = context._connection

    bqstorage_client_options = copy.deepcopy(context.bqstorage_client_options)
    if args.bqstorage_api_endpoint:
        if isinstance(bqstorage_client_options, dict):
            bqstorage_client_options[
                "api_endpoint"] = args.bqstorage_api_endpoint
        else:
            bqstorage_client_options.api_endpoint = args.bqstorage_api_endpoint

    bqstorage_client = _make_bqstorage_client(
        use_bqstorage_api,
        context.credentials,
        bqstorage_client_options,
    )

    close_transports = functools.partial(_close_transports, client,
                                         bqstorage_client)

    try:
        if args.max_results:
            max_results = int(args.max_results)
        else:
            max_results = None

        query = query.strip()

        if not query:
            error = ValueError("Query is missing.")
            _handle_error(error, args.destination_var)
            return

        # Any query that does not contain whitespace (aside from leading and trailing whitespace)
        # is assumed to be a table id
        if not re.search(r"\s", query):
            try:
                rows = client.list_rows(query, max_results=max_results)
            except Exception as ex:
                _handle_error(ex, args.destination_var)
                return

            result = rows.to_dataframe(bqstorage_client=bqstorage_client)
            if args.destination_var:
                IPython.get_ipython().push({args.destination_var: result})
                return
            else:
                return result

        job_config = bigquery.job.QueryJobConfig()
        job_config.query_parameters = params
        job_config.use_legacy_sql = args.use_legacy_sql
        job_config.dry_run = args.dry_run

        if args.destination_table:
            split = args.destination_table.split(".")
            if len(split) != 2:
                raise ValueError(
                    "--destination_table should be in a <dataset_id>.<table_id> format."
                )
            dataset_id, table_id = split
            job_config.allow_large_results = True
            dataset_ref = bigquery.dataset.DatasetReference(
                client.project, dataset_id)
            destination_table_ref = dataset_ref.table(table_id)
            job_config.destination = destination_table_ref
            job_config.create_disposition = "CREATE_IF_NEEDED"
            job_config.write_disposition = "WRITE_TRUNCATE"
            _create_dataset_if_necessary(client, dataset_id)

        if args.maximum_bytes_billed == "None":
            job_config.maximum_bytes_billed = 0
        elif args.maximum_bytes_billed is not None:
            value = int(args.maximum_bytes_billed)
            job_config.maximum_bytes_billed = value

        try:
            query_job = _run_query(client, query, job_config=job_config)
        except Exception as ex:
            _handle_error(ex, args.destination_var)
            return

        if not args.verbose:
            display.clear_output()

        if args.dry_run and args.destination_var:
            IPython.get_ipython().push({args.destination_var: query_job})
            return
        elif args.dry_run:
            print("Query validated. This query will process {} bytes.".format(
                query_job.total_bytes_processed))
            return query_job

        if max_results:
            result = query_job.result(max_results=max_results).to_dataframe(
                bqstorage_client=bqstorage_client)
        else:
            result = query_job.to_dataframe(bqstorage_client=bqstorage_client)

        if args.destination_var:
            IPython.get_ipython().push({args.destination_var: result})
        else:
            return result
    finally:
        close_transports()
Ejemplo n.º 47
0
def get_ioloop():
    ipython = IPython.get_ipython()
    if ipython and hasattr(ipython, 'kernel'):
        return zmq.eventloop.ioloop.IOLoop.instance()
def synthesize_teaching_controller_general_configuration():
    """Synthesize a controller for teaching.

    The output controller is in the general configuration.
    """
    # constant
    Ts = 0.008  # sampling internval
    s = co.tf([1, 0], [1])

    k_env_nom = 20
    m_desired = 1.2
    b_desired = 8

    K_env_aug = 1.0

    # desired response from w3 to z1
    plant_nominal = pool.PlantV3.plant(K_env=20,
                                       m_tip=0.08,
                                       K_env_aug=K_env_aug)

    def xcmd_xenv_upper_bound(omegas):
        s = co.tf([1, 0], [1])
        return (3 / (1 + 0.03 * s)**3).freqresp(omegas)[0][0, 0]

    fmeasure_xenv_desired_tf = co.c2d(
        co.tf([m_desired * k_env_nom, b_desired * k_env_nom, 0],
              [m_desired, b_desired, 0 + k_env_nom]), Ts)

    f_env_aug_f_robust_tf = Ss.Qsyn.lambda_log_interpolate(
        [[0.1, 50], [25, 50], [40, 0.2], [100, 0.05]], preview=False)

    def f_env_aug_f_robust_shaping_for_human(omegas):
        s = co.tf([1, 0], [1])
        # return np.ones_like(omegas)
        return 400 * (1 + 0.157 * 1j * omegas)

    def f_env_aug_f_robust_shaping_for_spring(omegas):
        s = co.tf([1, 0], [1])
        # return np.ones_like(omegas)
        return 800 * (1 + 0.05 * 1j * omegas)

    design_dict = {
        'ny':
        2,
        'nu':
        1,
        'Ntaps':
        1400,
        'Nsteps':
        1000,
        'freqs':
        np.linspace(1e-2, np.pi / Ts, 1000),

        # different objective
        'shape-time-delay':
        6,  # number of delayed time step
        'shape-time': [['step', (0, 1), fmeasure_xenv_desired_tf, 1]
                       # ['step', (1, 1), xcmd_xenv_desired_tf, 1]
                       ],
        'constraint-freq': [
            # [(1, 1), xcmd_xenv_upper_bound, False],
            ([3, 3, f_env_aug_f_robust_shaping_for_human],
             f_env_aug_f_robust_tf, False)
        ],
        'reg2':
        1,

        # DC gain
        'dc-gain': [[(0, 1), 0]],
        'constraint-nyquist-stability': [
            # shaping for robust stability against human
            [(3, 3), f_env_aug_f_robust_shaping_for_human, (-0.0, 0), 0.30,
             (3, 15)],
            [(3, 3), f_env_aug_f_robust_shaping_for_human, (-0.5, 0), 1.57,
             (15, 220)],

            # # shaping for robust stability against spring
            # [(3, 3), f_env_aug_f_robust_shaping_for_spring, (-0.0, 0), 0.20, (3, 25)],
            # [(3, 3), f_env_aug_f_robust_shaping_for_spring, (-0.5, 0), 1.57, (25, 220)],
        ],
        'additional-freq-vars': [(3, 3), (1, 1), (1, 3), (2, 3)],
        'additional-time-vars': [
            ["step", (1, 1)],
            ["step", (0, 1)],
        ]
    }

    if input("Design controller?") == 'y':
        synthesis_result = Ss.Qsyn.Q_synthesis(plant_nominal, design_dict)

    if input("View immediate synthesis result?") == 'y':
        omega_interested = [1, 5, 10, 20, 40, 60, 80, 100, 200, 300]
        analysis_dict = {
            'row_col': (3, 2),
            'freqs':
            design_dict['freqs'],
            'recipe': [
                (0, 1, "step", (0, 1)),
                (0, 1, "step", fmeasure_xenv_desired_tf),
                (0, 0, "step", (1, 1)),
                (1, 0, "nyquist", (3, 3, f_env_aug_f_robust_shaping_for_human),
                 omega_interested),
                (1, 1, "nyquist", (3, 3,
                                   f_env_aug_f_robust_shaping_for_spring),
                 omega_interested),
                (2, 0, "bode_mag", (3, 3,
                                    f_env_aug_f_robust_shaping_for_human),
                 omega_interested),
                (2, 0, "bode_mag", [f_env_aug_f_robust_tf]),

                # (2, 1, "bode_mag", (1, 1), omega_interested),
                # (2, 1, "bode_mag", xcmd_xenv_upper_bound),
                (2, 1, "bode_phs", (1, 3), omega_interested),
                (2, 1, "bode_phs", (2, 3), omega_interested),
                (2, 1, "bode_phs", (3, 3,
                                    f_env_aug_f_robust_shaping_for_human),
                 omega_interested),
            ]
        }
        from importlib import reload
        reload(Ss)
        Ss.Qsyn.Q_synthesis_analysis(
            synthesis_result,
            analysis_dict,
            output_descriptions=pool.PlantV3.output_descriptions,
            input_descriptions=pool.PlantV3.input_descriptions)

    if input("Print controller for execution? y/[n]: ") == "y":
        DATA_DIR = '~/catkin_ws/src/infinite_interaction/config/teaching_experiment'
        import Jan09_print_controllers as print_controllers
        from importlib import reload
        reload(print_controllers)
        # profile_name = "Q_syn_admittance_v2"
        profile_name = "Q_syn_admittance_v3_Xaxis"
        print_controllers.print_controller(profile_name,
                                           synthesis_result,
                                           scale_output=1,
                                           DATA_DIR=DATA_DIR)

    if input("Convert controller to state-space and save for later analysis?"
             ) == "y":
        K_Qparam_ss = Ss.Qsyn.form_Q_feedback_controller_ss(
            synthesis_result['Qtaps'], synthesis_result['Pyu'])
        np.savez(
            "Nov21_synthesize_teaching_controller_general_configuration.npz",
            A=K_Qparam_ss.A,
            B=K_Qparam_ss.B,
            C=K_Qparam_ss.C,
            D=K_Qparam_ss.D,
            dt=K_Qparam_ss.dt)

    import IPython
    if IPython.get_ipython() is None:
        IPython.embed()
Ejemplo n.º 49
0
    def flush(self):
        sys.stdout.flush()


FORMAT = "%(name)s.%(funcName)s:  %(message)s"
formatter = logging.Formatter(FORMAT)

# Check for IPython and use a special logger
use_ipython_handler = False
try:
    import IPython
except ImportError:
    pass
else:
    if IPython.get_ipython() is not None:
        use_ipython_handler = True
if use_ipython_handler:
    default_handler = IPythonStreamHandler()
else:
    default_handler = logging.StreamHandler(sys.stdout)
default_handler.setLevel(logging.INFO)
default_handler.setFormatter(formatter)


def handle_logging():
    "Send INFO-level log messages to stdout. Do not propagate."
    if use_ipython_handler:
        # Avoid double-printing messages to IPython stderr.
        trackpy.logger.propagate = False
    trackpy.logger.addHandler(default_handler)
Ejemplo n.º 50
0
 def hb(self, line=''):
     print( "\n\n\n########################################" )
     print( IPython.get_ipython().banner )
     EasyIpy.PrintEasyIpyStartupMessage()
Ejemplo n.º 51
0
Shows a GUI with input parameters. The parameters can be plotted, saved and the blade can be seen in 3D
The aerodynamic values of the blades are stored in .txt files for Re- numbers of 50.000. They can be generated using XFoil.
So far, only 4-digit NACA airfoils can be used.
A correction for number of blades could not be succesfully implemented, as the values for the tip showed impossible values.

Input: design wind speed, design rotations, design rotor radius, .txt files of aerodynamic values
Output: bladed design with chord and twist distribution. 
"""

import numpy as np
import matplotlib.pyplot as plt

try:
    import IPython
    shell = IPython.get_ipython()
    shell.enable_matplotlib(gui='qt')
except:
    print('failed') 
    
#########
###############          entries          #####################################
    def BEM(self):
        self.rotor_diameter         = 0.2                                       # rotor diameter
        self.hub_diameter           = 0.056                                     # hub diameter       
        self.rotations              = 3000                                      # rotations 
        self.v_des                  = 15                                        # wind speed
        self.Blades                 = 3                                         # blades 
        self.sections               = 15                                        # sections
        self.efficiency             = 80                                        # efficiency
        self.airfoil                = 'NACA_2418'                               # airfoil
Ejemplo n.º 52
0
 def GetInstance( cls ):
     if cls.__Instance is None:
         cls.__Instance = cls( IPython.get_ipython() )
     return cls.__Instance
Ejemplo n.º 53
0
# to see your plot config
print(f"matplotlib backend: {matplotlib.get_backend()}")
print(f"matplotlib config file: {matplotlib.matplotlib_fname()}")
print(f"matplotlib config dir: {matplotlib.get_configdir()}")
plt.close("all")

# try to set separate window ploting
if "inline" in matplotlib.get_backend():
    print("Plotting is set to inline at the moment:", end=" ")

    if "ipykernel" in matplotlib.get_backend():
        print("backend is ipykernel (IPython?)")
        print("Trying to set backend to separate window:", end=" ")
        import IPython

        IPython.get_ipython().run_line_magic("matplotlib", "")
    else:
        print("unknown inline backend")

print("continuing with this plotting backend", end="\n\n\n")

# set styles
try:
    # installed with "pip install SciencePLots" (https://github.com/garrettj403/SciencePlots.git)
    # gives quite nice plots
    plt_styles = ["science", "grid", "bright", "no-latex"]
    plt.style.use(plt_styles)
    print(f"pyplot using style set {plt_styles}")
except Exception as e:
    print(e)
    print("setting grid and only grid and legend manually")
Ejemplo n.º 54
0
    def __init__(self, imageOrFilter, Label=False, Title=None):
        import tempfile
        import itk
        import os
        import platform
        # get some data from the environment
        command = os.environ.get("WRAPITK_SHOW2D_COMMAND")
        if command is None:
            if platform.system() == "Darwin":
                command = (
                    "open -a ImageJ -n --args -eval 'open(\"%(image)s\"); "
                    "run (\"View 100%%\"); rename(\"%(title)s\");'")
            else:
                command = ("imagej %(image)s -run 'View 100%%' -eval "
                           "'rename(\"%(title)s\")' &")

        label_command = os.environ.get("WRAPITK_SHOW2D_LABEL_COMMAND")
        if label_command is None:
            if platform.system() == "Darwin":
                label_command = (
                    "open -a ImageJ -n --args -eval 'open(\"%(image)s\"); "
                    "run (\"View 100%%\"); rename(\"%(title)s\"); "
                    "run(\"3-3-2 RGB\");'")
            else:
                label_command = ("imagej %(image)s -run 'View 100%%' -eval "
                                 "'rename(\"%(title)s\")' -run '3-3-2 RGB' &")

        compress = os.environ.get(
            "WRAPITK_SHOW2D_COMPRESS",
            "true").lower() in ["on", "true", "yes", "1"]
        extension = os.environ.get("WRAPITK_SHOW2D_EXTENSION", ".tif")

        # use the tempfile module to get a non used file name and to put
        # the file at the rignt place
        self.__tmpFile__ = tempfile.NamedTemporaryFile(suffix=extension)
        # get an updated image
        img = output(imageOrFilter)
        img.UpdateOutputInformation()
        img.Update()
        if Title is None:
            # try to generate a title
            s = img.GetSource()
            if s:
                s = itk.down_cast(s)
                if hasattr(img, "GetSourceOutputIndex"):
                    o = '[%s]' % img.GetSourceOutputIndex()
                elif hasattr(img, "GetSourceOutputName"):
                    o = '[%s]' % img.GetSourceOutputName()
                else:
                    o = ""
                Title = "%s%s" % (s.__class__.__name__, o)
            else:
                Title = img.__class__.__name__
            try:
                import IPython
                ip = IPython.get_ipython()
                if ip is not None:
                    names = []
                    ref = imageOrFilter
                    if s:
                        ref = s
                    for n, v in ip.user_ns.iteritems():
                        if isinstance(v, itk.LightObject) and v == ref:
                            names.append(n)
                    if names != []:
                        Title = ", ".join(names) + " - " + Title
            except ImportError:
                # just do nothing
                pass
        # change the LabelMaps to an Image, so we can look at them easily
        if 'LabelMap' in dir(itk) and img.GetNameOfClass() == 'LabelMap':
            # retreive the biggest label in the label map
            maxLabel = img.GetNthLabelObject(img.GetNumberOfLabelObjects() -
                                             1).GetLabel()
            # search for a filter to convert the label map
            lab = itk.LabelMapToLabelImageFilter.keys()
            maxVal = itk.NumericTraits[itk.template(params[1])[1][0]].max()
            cond = params[0] == class_(img) and maxVal >= maxLabel
            label_image_type = sorted([params[1] for params in lab if cond])[0]
            convert = itk.LabelMapToLabelImageFilter[img,
                                                     label_image_type].New(img)
            convert.Update()
            img = convert.GetOutput()
            # this is a label image - force the parameter
            Label = True
        write(img, self.__tmpFile__.name, compress)
        # now run imview
        import os
        if Label:
            os.system(label_command % {
                "image": self.__tmpFile__.name,
                "title": Title
            })
        else:
            os.system(command % {
                "image": self.__tmpFile__.name,
                "title": Title
            })
Ejemplo n.º 55
0
def _cell_magic(line, query):
    """Underlying function for bigquery cell magic

    Note:
        This function contains the underlying logic for the 'bigquery' cell
        magic. This function is not meant to be called directly.

    Args:
        line (str): "%%bigquery" followed by arguments as required
        query (str): SQL query to run

    Returns:
        pandas.DataFrame: the query results.
    """
    args = magic_arguments.parse_argstring(_cell_magic, line)

    params = []
    if args.params is not None:
        try:
            params = _helpers.to_query_parameters(
                ast.literal_eval("".join(args.params))
            )
        except Exception:
            raise SyntaxError(
                "--params is not a correctly formatted JSON string or a JSON "
                "serializable dictionary"
            )

    project = args.project or context.project
    client = bigquery.Client(
        project=project,
        credentials=context.credentials,
        default_query_job_config=context.default_query_job_config,
        client_info=client_info.ClientInfo(user_agent=IPYTHON_USER_AGENT),
    )
    if context._connection:
        client._connection = context._connection
    bqstorage_client = _make_bqstorage_client(
        args.use_bqstorage_api or context.use_bqstorage_api, context.credentials
    )

    if args.max_results:
        max_results = int(args.max_results)
    else:
        max_results = None

    job_config = bigquery.job.QueryJobConfig()
    job_config.query_parameters = params
    job_config.use_legacy_sql = args.use_legacy_sql
    job_config.dry_run = args.dry_run

    if args.maximum_bytes_billed == "None":
        job_config.maximum_bytes_billed = 0
    elif args.maximum_bytes_billed is not None:
        value = int(args.maximum_bytes_billed)
        job_config.maximum_bytes_billed = value

    error = None
    try:
        query_job = _run_query(client, query, job_config=job_config)
    except Exception as ex:
        error = str(ex)

    if not args.verbose:
        display.clear_output()

    if error:
        if args.destination_var:
            print(
                "Could not save output to variable '{}'.".format(args.destination_var),
                file=sys.stderr,
            )
        print("\nERROR:\n", error, file=sys.stderr)
        return

    if args.dry_run and args.destination_var:
        IPython.get_ipython().push({args.destination_var: query_job})
        return
    elif args.dry_run:
        print(
            "Query validated. This query will process {} bytes.".format(
                query_job.total_bytes_processed
            )
        )
        return query_job

    if max_results:
        result = query_job.result(max_results=max_results).to_dataframe(
            bqstorage_client=bqstorage_client
        )
    else:
        result = query_job.to_dataframe(bqstorage_client=bqstorage_client)

    if args.destination_var:
        IPython.get_ipython().push({args.destination_var: result})
    else:
        return result
Ejemplo n.º 56
0
import os, sys, re, cPickle, gzip, time, collections, IPython, h5py, pandas
from itertools import *
import numpy as np
import scipy, scipy.special, scipy.stats, scipy.weave
sys.path.append(os.path.expanduser('~/mattenv/python'))
from mdhlib import *

from matplotlib.pyplot import *
ip = IPython.get_ipython()
ip.magic('matplotlib inline')

Ejemplo n.º 57
0
import asyncio
import math
from collections import defaultdict
from typing import Callable, Coroutine

import IPython as ipy
import numpy as np
import pandas as pd
import yaml
from ib_insync import *

from assemble import assemble
from support import calcsd, calcsd_df, delete_all_files, get_dte

# Specific to Jupyter. Will be ignored in IDE / command-lines
if ipy.get_ipython().__class__.__name__ == 'ZMQInteractiveShell':
    import nest_asyncio
    nest_asyncio.apply()
    util.startLoop()
    pd.options.display.max_columns = None


async def chains(ib: IB, c) -> pd.DataFrame:

    chains = await ib.reqSecDefOptParamsAsync(underlyingSymbol=c.symbol,
                                              futFopExchange="",
                                              underlyingSecType=c.secType,
                                              underlyingConId=c.conId)

    # Pick up one chain if it is a list
    chain = chains[0] if isinstance(chains, list) else chains
Ejemplo n.º 58
0
    unique_name,
    use_alias,
)

# A registry of all figures that have had "show" called in this session.
# This is needed for the sphinx-gallery scraper in pygmt/sphinx_gallery.py
SHOWED_FIGURES = []

# Configurations for figure display
SHOW_CONFIG = {
    "method": "external",  # Open in an external viewer by default
}

# Show figures in Jupyter notebooks if available
if IPython:
    get_ipython = IPython.get_ipython()  # pylint: disable=invalid-name
    if get_ipython and "IPKernelApp" in get_ipython.config:  # Jupyter Notebook enabled
        SHOW_CONFIG["method"] = "notebook"

# Set environment variable PYGMT_USE_EXTERNAL_DISPLAY to 'false' to disable
# external display. Use it when running the tests and building the docs to
# avoid popping up windows.
if os.environ.get("PYGMT_USE_EXTERNAL_DISPLAY", "true").lower() == "false":
    SHOW_CONFIG["method"] = "none"


class Figure:
    """
    A GMT figure to handle all plotting.

    Use the plotting methods of this class to add elements to the figure.  You
Ejemplo n.º 59
0
def get_data(source, fields='*', env=None, first_row=0, count=-1, schema=None):
    """ A utility function to get a subset of data from a Table, Query, Pandas dataframe or List.

  Args:
    source: the source of the data. Can be a Table, Pandas DataFrame, List of dictionaries or
        lists, or a string, in which case it is expected to be the name of a table in BQ.
    fields: a list of fields that we want to return as a list of strings, comma-separated string,
        or '*' for all.
    env: if the data source is a Query module, this is the set of variable overrides for
        parameterizing the Query.
    first_row: the index of the first row to return; default 0. Onl;y used if count is non-negative.
    count: the number or rows to return. If negative (the default), return all rows.
    schema: the schema of the data. Optional; if supplied this can be used to help do type-coercion.

  Returns:
    A tuple consisting of a dictionary and a count; the dictionary has two entries: 'cols'
    which is a list of column metadata entries for Google Charts, and 'rows' which is a list of
    lists of values. The count is the total number of rows in the source (independent of the
    first_row/count parameters).

  Raises:
    Exception if the request could not be fulfilled.
  """

    ipy = IPython.get_ipython()
    if env is None:
        env = {}
    env.update(ipy.user_ns)
    if isinstance(source, basestring):
        source = datalab.utils.get_item(ipy.user_ns, source, source)
        if isinstance(source, basestring):
            source = datalab.bigquery.Table(source)

    if isinstance(source, types.ModuleType) or isinstance(
            source, datalab.data.SqlStatement):
        source = datalab.bigquery.Query(source, values=env)

    if isinstance(source, list):
        if len(source) == 0:
            return _get_data_from_empty_list(source, fields, first_row, count,
                                             schema)
        elif isinstance(source[0], dict):
            return _get_data_from_list_of_dicts(source, fields, first_row,
                                                count, schema)
        elif isinstance(source[0], list):
            return _get_data_from_list_of_lists(source, fields, first_row,
                                                count, schema)
        else:
            raise Exception(
                "To get tabular data from a list it must contain dictionaries or lists."
            )
    elif isinstance(source, pandas.DataFrame):
        return _get_data_from_dataframe(source, fields, first_row, count,
                                        schema)
    elif (isinstance(source, google.datalab.bigquery.Query)
          or isinstance(source, google.datalab.bigquery.Table)):
        return google.datalab.utils.commands._utils.get_data(
            source, fields, env, first_row, count, schema)
    elif isinstance(source, datalab.bigquery.Query):
        return _get_data_from_table(source.results(), fields, first_row, count,
                                    schema)
    elif isinstance(source, datalab.bigquery.Table):
        return _get_data_from_table(source, fields, first_row, count, schema)
    else:
        raise Exception("Cannot chart %s; unsupported object type" % source)
Ejemplo n.º 60
0
def get_source_from_frame(frame):
    module_name = (frame.f_globals or {}).get('__name__') or ''
    if module_name:
        try:
            return source_cache_by_module_name[module_name]
        except KeyError:
            pass
    file_name = frame.f_code.co_filename
    if file_name:
        try:
            return source_cache_by_file_name[file_name]
        except KeyError:
            pass
    loader = (frame.f_globals or {}).get('__loader__')

    source = None
    if hasattr(loader, 'get_source'):
        try:
            source = loader.get_source(module_name)
        except ImportError:
            pass
        if source is not None:
            source = source.splitlines()
    if source is None:
        ipython_filename_match = ipython_filename_pattern.match(file_name)
        if ipython_filename_match:
            entry_number = int(ipython_filename_match.group(1))
            try:
                import IPython
                ipython_shell = IPython.get_ipython()
                ((_, _, source_chunk),) = ipython_shell.history_manager. \
                                  get_range(0, entry_number, entry_number + 1)
                source = source_chunk.splitlines()
            except Exception:
                pass
        else:
            try:
                with open(file_name, 'rb') as fp:
                    source = fp.read().splitlines()
            except utils.file_reading_errors:
                pass
    if source is None:
        source = UnavailableSource()

    # If we just read the source from a file, or if the loader did not
    # apply tokenize.detect_encoding to decode the source into a
    # string, then we should do that ourselves.
    if isinstance(source[0], bytes):
        encoding = 'ascii'
        for line in source[:2]:
            # File coding may be specified. Match pattern from PEP-263
            # (https://www.python.org/dev/peps/pep-0263/)
            match = re.search(br'coding[:=]\s*([-\w.]+)', line)
            if match:
                encoding = match.group(1).decode('ascii')
                break
        source = [
            six.text_type(sline, encoding, 'replace') for sline in source
        ]

    if module_name:
        source_cache_by_module_name[module_name] = source
    if file_name:
        source_cache_by_file_name[file_name] = source
    return source