Example #1
0
def export_intervals(chanjo_db, include_header=True, bed_score=0):
  r"""Return BED-formatted interval lines from existing ``chanjo_db``.

  BED lines are ready to be printed or written to a file.

  Args:
    chanjo_db (session): ``sqlalchemy.orm.session`` object with a
      ``.query``-method
    include_header (bool, optional): whether to include BED header
    bed_score (int, optional): dummy score (0-1000) to insert at field 5
      to complete the BED format

  Yields:
    str: stringified and tab-delimited interval

  Examples:
    >>> from chanjo import export_intervals, Store
    ... # instantiate a new connection to a Chanjo database
    >>> db = Store('./coverage.sqlite')
    >>> with open('intervals.sorted.bed', 'w') as stream:
    ...   # write intervals in BED-format with appropriate headers
    ...   for bed_line in export_intervals(db):
    ...     stream.write(bed_line + '\n')
  """
  if include_header:
    yield '#chrom\tchromStart\tchromEnd\tname\tscore\tstrand'

  # setup up which columns to fetch to make BED file
  # column 5 is just a silly default for the "score" field in BED
  i = Interval  # alias
  columns = (i.contig, i.start - 1, i.end, i.id, i.strand)

  # BED files are tab-delimited
  delimiter = '\t'

  # 1. fetch interval tuples from the database (producer)
  # 2. stringify each item in each subsequence (interval tuple)
  # 3. join lines on tab-character
  # 4. prepend the header
  bed_lines = pipe(
    fetch_records(chanjo_db, columns),
    map(map(str)),                        # convert fields to strings
    map(juxt(compose(list, take(4)),      # keep first 4 fields
             lambda _: [str(bed_score)],  # insert BED score
             compose(list, last))),       # keep last field
    map(concat),                          # flatten each item
    map(delimiter.join)                   # join on \t
  )

  for bed_line in bed_lines:
    yield bed_line
	def make_data(val):
		clients = compose(
			count,
			unique,
			pluck('customer'),
			lambda: val
		)
		animals = compose(
			valmap(count),
			groupby('species'),
			lambda: val
		)
		return {
			'total_val': reduce(lambda total, x: total + x.get('total_vat'), val, 0.00),
			'animals': _get_dict_to_csv(animals()),
			'clients': clients()
		}
Example #3
0
def _get_filters(filters):
    join = compose(lambda x: " AND ".join(x), concatv)
    clauses = join(
        ["bl.posting_datetime BETWEEN %(from_date)s AND %(to_date)s"],
        ["bl.station = %(station)s"] if filters.station else [],
    )

    return clauses, filters
Example #4
0
def tom_base_state_to_base_state(tom_base_state):
    tom_base_state = valmap(compose(np.copy, np.flip), tom_base_state)
    q, T, z, rho = get(["qv", "T", "z", "rho"], tom_base_state)
    return {
        "QT": q * 1000.0,
        "SLI": T + grav / cp * z,
        "height_center": z,
        "density": rho
    }
Example #5
0
def get_orders_for(station=None, shipping_order=None):
    def get_qty(row):
        if row.get("loading_unit") == "Packages":
            return row.get("no_of_packages")
        if row.get("loading_unit") == "Weight":
            return row.get("weight_actual")
        return 0

    def set_qty(row):
        qty = get_qty(row)
        return merge(row, {"qty": qty, "available": qty})

    get_result = compose(list, map(set_qty), frappe.db.sql)

    if station:
        return get_result(
            """
                SELECT
                    bl.booking_order,
                    MAX(bl.loading_unit) AS loading_unit,
                    SUM(bl.no_of_packages) AS no_of_packages,
                    SUM(bl.weight_actual) AS weight_actual,
                    bl.bo_detail,
                    bofd.item_description AS description
                FROM `tabBooking Log` AS bl
                LEFT JOIN `tabBooking Order Freight Detail` AS bofd ON
                    bofd.name = bl.bo_detail
                WHERE bl.station = %(station)s
                GROUP BY bl.bo_detail HAVING
                    SUM(bl.no_of_packages) > 0 OR SUM(bl.weight_actual) > 0
            """,
            values={"station": station},
            as_dict=1,
        )

    if shipping_order:
        return get_result(
            """
                SELECT
                    bl.booking_order,
                    MAX(bl.loading_unit) AS loading_unit,
                    -SUM(bl.no_of_packages) AS no_of_packages,
                    -SUM(bl.weight_actual) AS weight_actual,
                    bl.bo_detail,
                    bofd.item_description AS description
                FROM `tabBooking Log` AS bl
                LEFT JOIN `tabBooking Order Freight Detail` AS bofd ON
                    bofd.name = bl.bo_detail
                WHERE bl.shipping_order = %(shipping_order)s
                GROUP BY bl.bo_detail HAVING
                    SUM(bl.no_of_packages) < 0 OR SUM(bl.weight_actual) < 0
            """,
            values={"shipping_order": shipping_order},
            as_dict=1,
        )

    return []
Example #6
0
 def __init__(self, **kwargs):
     self.attrs = pipe(
         self.imports,
         reversed,
         map(vars),
         merge,
         keyfilter(compose(str.islower, first), ),
         valfilter(callable),
     )
     self.attrs.update()
Example #7
0
 def __repr__(self):
     self._tokens = pipe(
         self._tokens, filter(
             compose(complement(
                 lambda s: s.startswith('_ipython') or
                     s.startswith('_repr') if isinstance(s, str) else s,
             ), first, second,)
         ), list
     )
     return super().__repr__()
Example #8
0
def get_realStats(coords_all, coords_sub, indexes, r_stat, cell, pbc):

    frac_coords = get_scaled_positions(cell=cell, pbc=pbc, wrap=True)
    real_coords = get_real_positions(cell=cell)
    rescale = compose(real_coords, frac_coords)

    return pipe(
        indexes, lambda indxs: rescale(coords_all[indxs[:, 1]] - coords_sub[
            indxs[:, 0]] + cell.diagonal() / 2),
        lambda crds: crds - cell.diagonal() / 2)
Example #9
0
def extract_ctr_children_sum(doc: Tabs_doc) -> Dict[Tab_num, Disp_dur]:
    default = {'dispatchCount': np.nan, 'duration': np.nan}

    def reduce_children(chs: List[Disp_dur]):
        # TODO: extract host info?
        disp_durs = lmap(extract_dur_disp_tab, chs)
        return reduce_keys(op.add, disp_durs) or default

    tab_docs = z.valmap(z.compose(reduce_children, itg('children')), doc)
    return tab_docs
Example #10
0
def get_legal_doc(content_type):
    get_content = compose(
        lambda x: frappe.get_cached_value("Terms and Conditions", x, "terms"),
        lambda x: frappe.get_cached_value("Leiteng Website Settings", None, x),
    )

    if content_type in ["privacy", "terms"]:
        return get_content(content_type)

    return None
def remove_paths(paths, graph):
    # Allow passing a single path.
    if isinstance(paths, str):
        paths = [paths]

    indices_to_remove = tlz.compose(
        list, tlz.map(lambda v: v.index), tlz.remove(is_None),
        tlz.map(find_vertex_by_name_or_none(graph)))(paths)

    return graph - indices_to_remove if len(indices_to_remove) > 0 else graph
Example #12
0
def sequence(*args):
    """Compose functions in order

    Args:
      args: the functions to compose

    Returns:
      composed functions

    """
    return compose(*args[::-1])
def _get_filters(filters):
    join = compose(lambda x: " AND ".join(x), concatv)
    clauses = join(
        ["True"],
        ["`tabItem`.item_group = %(item_group)s"] if filters.get("item_group") else [],
        ["`tabBin`.warehouse = %(warehouse)s"] if filters.get("warehouse") else [],
        ["`tabItem Supplier`.supplier = %(supplier)s"]
        if filters.get("supplier")
        else [],
    )
    return clauses, filters
Example #14
0
def rcompose(*args):
    """Compose functions in order

    Args:
      args: the functions to compose

    Returns:
      composed functions

    >>> assert rcompose(lambda x: x + 1, lambda x: x * 2)(3) == 8
    """
    return compose(*args[::-1])
Example #15
0
 def from_tom_base_state(tom_base_state):
     """Return WaveEq from Tom's base state"""
     # TODO refactor this to an abstract factor (A)
     tom_base_state = valmap(compose(np.copy, np.flip), tom_base_state)
     q, T, z, rho = get(["qv", "T", "z", "rho"], tom_base_state)
     base_state = {
         "QT": q * 1000.0,
         "SLI": T + grav / cp * z,
         "height_center": z,
         "density": rho
     }
     return WaveEq(base_state)
def _get_species(patients):
	species = compose(
		valmap(lambda x: x['vc_species']),
		valmap(first),
		groupby('name'),
		lambda: frappe.get_all(
			'Patient',
			filters=[['name', 'in', patients]],
			fields=['name', 'vc_species']
		)
	)
	return species()
Example #17
0
def register_jinja2_magic(env=Environment(), display='Markdown'):
    """Display reusable jinja2 templates.  Returns a jinja2 template.
    """
    magical(
        'jinja2', lang='jinja2',
        display=compose(
            IPython.display.display,
            IPython.display.Markdown,
            lambda x: x.render(**IPython.get_ipython().user_ns),
        )
    )(env.from_string)
    return env
Example #18
0
def _get_root_groups():
    def get_root(x):
        # assuming that parent - child relationship is never circular
        parent = get_parent(x)
        if parent:
            return get_root(parent)
        return x

    groups = frappe.get_all(
        "Item Group",
        fields=["name", "parent_item_group"],
        filters={"show_in_website": 1},
    )
    get_parent = compose(
        excepts(StopIteration, first, lambda _: None),
        lambda x: filter(lambda y: y.get("name") == x.get("parent_item_group"),
                         groups),
    )
    make_unique_roots = compose(list, unique, map(lambda x: x.get("name")),
                                map(get_root))

    return make_unique_roots(groups)
Example #19
0
def flatten_references_graph(references_graph, pipeline, exclude_paths=None):
    if exclude_paths is not None:
        exclude_paths = frozenset(exclude_paths)
        references_graph = tlz.compose(
            tlz.map(
                over("references",
                     lambda xs: frozenset(xs).difference(exclude_paths))),
            tlz.remove(lambda node: node["path"] in exclude_paths))(
                references_graph)

    igraph_graph = references_graph_to_igraph(references_graph)

    return create_list_of_lists_of_strings(pipe(pipeline, igraph_graph))
Example #20
0
def list_addresses(token, page="1", page_length="10"):
    decoded_token = get_decoded_token(token)
    customer_id = frappe.db.exists(
        "Customer", {"le_firebase_uid": decoded_token["uid"]}
    )
    if not customer_id:
        frappe.throw(frappe._("Customer does not exist on backend"))

    get_count = compose(
        lambda x: x[0][0],
        lambda x: frappe.db.sql(
            """
                SELECT COUNT(name) FROM `tabDynamic Link` WHERE
                    parenttype = 'Address' AND
                    link_doctype = 'Customer' AND
                    link_name = %(link_name)s
            """,
            values={"link_name": x},
        ),
    )
    addresses = frappe.db.sql(
        """
            SELECT
                a.name AS name,
                a.address_line1 AS address_line1,
                a.address_line2 AS address_line2,
                a.city AS city,
                a.state AS state,
                a.country AS country,
                a.pincode AS pincode
            FROM `tabDynamic Link` AS dl
            LEFT JOIN `tabAddress` AS a ON a.name = dl.parent
            WHERE dl.parenttype = 'Address' AND
                dl.link_doctype = 'Customer' AND
                dl.link_name = %(link_name)s
            GROUP BY a.name
            ORDER BY a.modified DESC
            LIMIT %(start)s, %(page_length)s
        """,
        values={
            "link_name": customer_id,
            "start": (frappe.utils.cint(page) - 1) * frappe.utils.cint(page_length),
            "page_length": frappe.utils.cint(page_length),
        },
        as_dict=1,
    )
    return {
        "count": get_count(customer_id),
        "items": addresses,
    }
Example #21
0
def get_realStats(coords_all, coords_sub, r_stat, cutoff, cell, cell_dim, pbc,):
    import MDAnalysis
    
    tree = MDAnalysis.lib.pkdtree.PeriodicKDTree(box=cell_dim.astype(np.float32))
    tree.set_coords(coords_all.astype(np.float32), 
                    cutoff=np.float32(cutoff))

    frac_coords = get_scaled_positions(cell=cell, pbc=pbc, wrap=True)
    real_coords = get_real_positions(cell=cell)
    rescale = compose(real_coords, frac_coords)
    
    return pipe(tree.search_tree(coords_sub, radius=r_stat), 
                lambda indxs: rescale(coords_all[indxs[:,1]] - coords_sub[indxs[:,0]] + cell.diagonal()/2), 
                lambda crds: crds - cell.diagonal()/2)
Example #22
0
def _extend_data(filters, data, inv_idx, emp_idx):
    invoices = [x[inv_idx] for x in data]
    get_employee_map = compose(
        valmap(lambda x:
               [x.get("pb_sales_employee"),
                x.get("pb_sales_employee_name")]),
        valmap(first),
        groupby("name"),
        lambda: frappe.db.sql(
            """
            SELECT name, pb_sales_employee, pb_sales_employee_name FROM `tabSales Invoice`
            WHERE name IN %(invoices)s
        """,
            values={"invoices": invoices},
            as_dict=1,
        ),
    )
    employees = get_employee_map() if invoices else {}
    set_employee = compose(list, lambda x: concatv(employees[x[inv_idx]], x))
    set_commission = compose(
        list,
        lambda x: concatv(
            x,
            [
                x[filters.net_amount_col_idx] * frappe.utils.flt(
                    filters.commission_rate) / 100
            ],
        ),
    )

    make_row = compose(set_employee, set_commission)
    extended = [make_row(x) for x in data]

    if not filters.sales_employee:
        return extended

    return [x for x in extended if x[emp_idx] == filters.sales_employee]
Example #23
0
def list_addresses(token, page="1", page_length="10"):
    customer_id = get_customer_id(token)

    get_count = compose(
        lambda x: x[0][0],
        lambda x: frappe.db.sql(
            """
                SELECT COUNT(name) FROM `tabDynamic Link` WHERE
                    parenttype = 'Address' AND
                    link_doctype = 'Customer' AND
                    link_name = %(link_name)s
            """,
            values={"link_name": x},
        ),
    )
    addresses = frappe.db.sql(
        """
            SELECT
                a.name AS name,
                a.address_line1 AS address_line1,
                a.address_line2 AS address_line2,
                a.city AS city,
                a.state AS state,
                a.country AS country,
                a.pincode AS pincode
            FROM `tabDynamic Link` AS dl
            LEFT JOIN `tabAddress` AS a ON a.name = dl.parent
            WHERE dl.parenttype = 'Address' AND
                dl.link_doctype = 'Customer' AND
                dl.link_name = %(link_name)s
            GROUP BY a.name
            ORDER BY a.modified DESC
            LIMIT %(start)s, %(page_length)s
        """,
        values={
            "link_name": customer_id,
            "start":
            (frappe.utils.cint(page) - 1) * frappe.utils.cint(page_length),
            "page_length": frappe.utils.cint(page_length),
        },
        as_dict=1,
    )

    count = get_count(customer_id)
    return {
        "count": count,
        "pages": frappe.utils.ceil(count / frappe.utils.cint(page_length)),
        "items": addresses,
    }
def subcomponent(mode, paths, graph):
    path_indices = tlz.compose(
        tlz.map(attrgetter('index')), tlz.remove(is_None),
        tlz.map(find_vertex_by_name_or_none(graph)))(paths)

    debug("path_indices", path_indices)

    main_indices = list(subcomponent_multi(graph, path_indices, mode))

    debug('main_indices', main_indices)

    return {
        "main": graph.induced_subgraph(main_indices),
        "rest": graph - main_indices
    }
Example #25
0
    def __init__(self, limit=None, schema=None, keep_properties=True, chunk=False):
        self.schema = schema
        self.limit = limit
        self.chunk = chunk or False
        self.set_property_filter(keep_properties)

        # Set up pipeline, in reverse order
        steps = [self.validate, self.process]
        if self.limit is not None:
            self.logger.debug(f'Loading %s features only', self.limit)
            steps.append(take(self.limit))
        if self.chunk:
            self.logger.debug(f'Features will arrive in batches of %s', self.chunk)
            steps.append(lambda it: grouper(self.chunk, it))
        self.pipeline = compose(*reversed(steps))
Example #26
0
def _update_freight(bo, si):
    get_freight_row = compose(
        excepts(StopIteration, first, lambda _: None),
        lambda name: filter(lambda x: x.name == name, bo.freight),
    )
    for sii in [x for x in si.items if x.gg_update_freight]:
        freight = get_freight_row(sii.gg_bo_detail)
        if freight:
            freight.based_on = frappe.get_cached_value("Item", sii.item_code,
                                                       "gg_freight_based_on")
            if freight.based_on == "Packages":
                freight.no_of_packages = sii.qty
            elif freight.based_on == "Weight":
                freight.weight_actual = sii.qty
            freight.rate = sii.rate
            freight.amount = sii.amount
Example #27
0
def get_item(route):
    item_code = frappe.db.exists("Item", {
        "route": route.replace("__", "/"),
        "show_in_website": 1
    })
    if not item_code:
        frappe.throw(frappe._("Item does not exist at this route"))

    doc = frappe.db.get_value(
        "Item",
        item_code,
        fieldname=[
            "name",
            "item_name",
            "item_group",
            "has_variants",
            "description",
            "web_long_description",
            "image",
            "website_image",
        ],
        as_dict=1,
    )

    get_price_list_rate = compose(
        lambda x: frappe.db.get_value(
            "Item Price",
            filters={
                "item_code": item_code,
                "price_list": x
            },
            fieldname="price_list_rate",
        ) if x else None,
        lambda: frappe.get_cached_value("Shopping Cart Settings", None,
                                        "price_list"),
    )

    return merge(
        {"route": route},
        doc,
        {
            "description":
            frappe.utils.strip_html_tags(doc.get("description") or ""),
            "price_list_rate":
            get_price_list_rate(),
        },
    )
Example #28
0
def sort_ips(inpath, keepcomments, clipboard):
    '''Given a list of IPs (one per line) from a file path (INPATH), the
    clipboard (-C), or stdin if nothing provided, print in sorted
    order (to stdout unless -C is provided).

    '''
    content = get_input_content(inpath, clipboard)

    return pipe(
        content.splitlines(),
        map(do_nothing if keepcomments else strip_comments),
        filter(lambda l: l.strip()),
        filter(compose(is_ip, strip, strip_comments)),
        sortips,
        '\n'.join,
        print if not clipboard else cb_copy_ensure_nl,
    )
Example #29
0
def fcompose(*args):
    """Helper function to compose functions.

    >>> f = lambda x: x - 2
    >>> g = lambda x: 2 * x
    >>> f(g(3))
    4
    >>> fcompose(g, f)(3)
    4

    Args:
      *args: tuple of functions

    Returns:
      composed functions
    """
    return compose(*args[::-1])
Example #30
0
def fcompose(*args):
    """Helper function to compose functions.

    >>> f = lambda x: x - 2
    >>> g = lambda x: 2 * x
    >>> f(g(3))
    4
    >>> fcompose(g, f)(3)
    4

    Args:xb
      *args: tuple of funct]ions

    Retuns:
      composed functions
    """
    return compose(*args[::-1])
Example #31
0
def _get_columns(filters):
    join = compose(list, concatv)
    return join(
        [
            {
                "fieldtype": "Data",
                "fieldname": "item_description",
                "label": "Description",
                "width": 240,
            },
            {
                "fieldtype": "Link",
                "fieldname": "booking_order",
                "options": "Booking Order",
                "label": "Booking Order",
                "width": 120,
            },
            {
                "fieldtype": "Data",
                "fieldname": "consignor_name",
                "label": "Consignor Name",
                "width": 180,
            },
            {
                "fieldtype": "Data",
                "fieldname": "consignee_name",
                "label": "Consignee Name",
                "width": 180,
            },
        ],
        concat([
            {
                "fieldtype": "Int",
                "fieldname": "{}__no_of_packages".format(x),
                "label": "{} Packages".format(x),
                "width": 90,
            },
            {
                "fieldtype": "Float",
                "fieldname": "{}__weight_actual".format(x),
                "label": "{} Weight".format(x),
                "width": 90,
            },
        ] for x in activities),
    )
Example #32
0
def add_weight(answer: dict):
    def is_a_matching_question(answer):
        return pipe(
            [answer_keys.match_left, answer_keys.incorrect],
            map(lambda k: k in answer),
            any,
        )

    needs_weight = compose(
        any,
        juxt(complement(is_a_matching_question), ),
    )

    if needs_weight(answer):
        return assoc(answer, answer_keys.weight,
                     int(answer.get(answer_keys.weight, 0) and 100))

    return answer
        "EXPOSE 5000\n"
        "\n"
        "ENTRYPOINT [ \"bokeh-server\" ]\n"
        "CMD [ \"--ip=0.0.0.0\", \"--port=5000\" ]\n"
    ).format(version=version)


def write_file((filename, data)):
    print("Write '{}' .. ".format(filename), end="")
    with open(filename, 'w') as f:
        f.write(data)
    print("OK")

if __name__ == "__main__":
    import argparse

    parser = argparse.ArgumentParser(description="Generates bokeh Dockerfile for given version")
    parser.add_argument("version", type=str, nargs="*")
    args = parser.parse_args()

    list(map(
        compose(
            write_file,
            juxt([
                "Dockerfile_{}".format,
                get_dockerfile
            ])
        ),
        args.version
    ))