def sync(ctx: typer.Context, project: str = typer.Argument( ..., help='The name for the project, specified in config file'), since: datetime = typer.Option(..., formats=['%Y-%m-%d']), until: datetime = typer.Option(..., formats=['%Y-%m-%d']), dry: bool = typer.Option( False, help='Use log entries instead of uploading them to redmine'), drain: bool = typer.Option( False, help='Use drain issues for entries without specified dest')): config = setup_config(ctx, ctx.meta['config_path']) setup_http(ctx) ctx.meta['rdm_user'] = extract.get_redmine_user(config["redmine"]["url"]) time_entries = get_toggl_enteries(config, project, since, until) issues = get_redmine_issues(config, project, since) issue_ids = petl.columns(issues)['id'] entries_to_load, unset_entries = petl.biselect( time_entries, lambda row: row['issue_id'] in issue_ids) if drain and petl.nrows(unset_entries): log.info('Using drain') drained, unset_entries = drained_entries(ctx, issues, unset_entries, project) log.info(f'Drained {petl.nrows(drained)} issues') entries_to_load = petl.cat(entries_to_load, drained) if petl.nrows(unset_entries): log.warning(f'There\'re {petl.nrows(unset_entries)} unset entries') if get_proj_attr(config, project, 'group_entries'): log.info('Using group by day and description') entries_to_load = transform.group_entries_by_day(entries_to_load) load.to_redmine_time(config["redmine"]["url"], entries_to_load, activity_id=get_proj_attr(config, project, 'rdm_activity_id'), user_id=ctx.meta['rdm_user'].get('id'), dry=dry)
def make_sqlalchemy_table(table, tablename, schema=None, constraints=True, metadata=None): """ Create an SQLAlchemy table based on a :mod:`petl` table. Parameters ---------- table : sequence of sequences (petl table) Table data to use to infer types etc. tablename : string Name of the table schema : string Name of the database schema to create the table in constraints : bool If True use length and nullable constraints metadata : sqlalchemy.MetaData Custom table metadata """ try: import sqlalchemy except ImportError as e: raise UnsatisfiedDependency(e, dep_message) if not metadata: metadata = sqlalchemy.MetaData() sql_table = sqlalchemy.Table(tablename, metadata, schema=schema) fields = header(table) cols = columns(table) for f in fields: sql_column = make_sqlalchemy_column(cols[f], f, constraints=constraints) sql_table.append_column(sql_column) return sql_table
def test_columns(): table = [['foo', 'bar'], ['a', 1], ['b', 2], ['b', 3]] cols = columns(table) eq_(['a', 'b', 'b'], cols['foo']) eq_([1, 2, 3], cols['bar'])
def test_columns(): table = [['foo', 'bar'], ['a', 1], ['b', 2], ['b', 3]] cols = columns(table) assertequal(['a', 'b', 'b'], cols['foo']) assertequal([1, 2, 3], cols['bar'])
from __future__ import division, print_function, absolute_import # columns() ########### import petl as etl table = [['foo', 'bar'], ['a', 1], ['b', 2], ['b', 3]] cols = etl.columns(table) cols['foo'] cols['bar'] # facetcolumns() ################ import petl as etl table = [['foo', 'bar', 'baz'], ['a', 1, True], ['b', 2, True], ['b', 3]] fc = etl.facetcolumns(table, 'foo') fc['a'] fc['b']
def test_columns(): table = [["foo", "bar"], ["a", 1], ["b", 2], ["b", 3]] cols = columns(table) eq_(["a", "b", "b"], cols["foo"]) eq_([1, 2, 3], cols["bar"])