Ejemplo n.º 1
0
 def __init__(self):
     self.tables = {}
     self.tables["Entry"] = Table("Entry")
     self.tables["File"] = Table("File")
     self.tables["CodeFragment"] = Table("CodeFragment")
     self.tables["SearchResult"] = Table("SearchResult")
     self.func = {}
     self.func["metaphone"] = CustomFunction('metaphone', ['text', 'len'])
     self.func["levenshtein"] = CustomFunction('levenshtein',
                                               ['source', 'target'])
     self.func["levenshtein_less_equal"] = CustomFunction(
         'levenshtein_less_equal', ['source', 'target', 'max_d'])
def get_loan_amount(filters):
    total_amount = 0
    for doctype in ["Loan Disbursement", "Loan Repayment"]:
        loan_doc = frappe.qb.DocType(doctype)
        ifnull = CustomFunction("IFNULL", ["value", "default"])

        if doctype == "Loan Disbursement":
            amount_field = Sum(loan_doc.disbursed_amount)
            posting_date = (loan_doc.disbursement_date).as_("posting_date")
            account = loan_doc.disbursement_account
            salary_condition = loan_doc.docstatus == 1
        else:
            amount_field = Sum(loan_doc.amount_paid)
            posting_date = (loan_doc.posting_date).as_("posting_date")
            account = loan_doc.payment_account
            salary_condition = loan_doc.repay_from_salary == 0
        amount = (frappe.qb.from_(loan_doc).select(amount_field).where(
            loan_doc.docstatus == 1).where(salary_condition).where(
                account == filters.get("account")).where(
                    posting_date > getdate(filters.get("report_date"))).where(
                        ifnull(loan_doc.clearance_date, "4000-01-01") <=
                        getdate(filters.get("report_date"))).run()[0][0])

        total_amount += flt(amount)

    return total_amount
Ejemplo n.º 3
0
def get_serial_nos_data_after_transactions(args):
    from pypika import CustomFunction

    serial_nos = set()
    args = frappe._dict(args)
    sle = frappe.qb.DocType("Stock Ledger Entry")
    Timestamp = CustomFunction("timestamp", ["date", "time"])

    stock_ledger_entries = (frappe.qb.from_(sle).select(
        "serial_no",
        "actual_qty").where((sle.item_code == args.item_code)
                            & (sle.warehouse == args.warehouse)
                            & (Timestamp(sle.posting_date, sle.posting_time) <
                               Timestamp(args.posting_date, args.posting_time))
                            & (sle.is_cancelled == 0)).orderby(
                                sle.posting_date, sle.posting_time,
                                sle.creation).run(as_dict=1))

    for stock_ledger_entry in stock_ledger_entries:
        changed_serial_no = get_serial_nos_data(stock_ledger_entry.serial_no)
        if stock_ledger_entry.actual_qty > 0:
            serial_nos.update(changed_serial_no)
        else:
            serial_nos.difference_update(changed_serial_no)

    return "\n".join(serial_nos)
 def process_date_diff_args(
         self, args: str) -> Tuple[CustomFunction, CustomFunction]:
     arg_list = args.split(",")
     if len(arg_list) != 2:
         raise ValueError("Date_diff have invalid args", arg_list)
     result = []
     for arg in arg_list:
         if arg == "now":
             date = current_date()
         else:
             date_fnc = CustomFunction("date", ["col1"])
             if "." in arg:
                 items = arg.split(".")
                 topic_name = items[0].strip()
                 topic = get_topic_by_name(topic_name, None)
                 table = None
                 if self.topic_space_filter:
                     if self.topic_space_filter(self.param.topicId):
                         alias_ = self.topic_space_filter(
                             self.param.topicId)["alias"]
                         table = AliasedQuery(alias_)
                 if table is None:
                     table = build_table_by_topic_id(topic.topicId)
                 factor_name = items[1].strip()
                 date = date_fnc(Field(factor_name, None, table))
             else:
                 date = date_fnc(arg)
         result.append(date)
     return tuple(result)
def get_loan_entries(filters):
    loan_docs = []
    for doctype in ["Loan Disbursement", "Loan Repayment"]:
        loan_doc = frappe.qb.DocType(doctype)
        ifnull = CustomFunction("IFNULL", ["value", "default"])

        if doctype == "Loan Disbursement":
            amount_field = (loan_doc.disbursed_amount).as_("credit")
            posting_date = (loan_doc.disbursement_date).as_("posting_date")
            account = loan_doc.disbursement_account
            salary_condition = loan_doc.docstatus == 1
        else:
            amount_field = (loan_doc.amount_paid).as_("debit")
            posting_date = (loan_doc.posting_date).as_("posting_date")
            account = loan_doc.payment_account
            salary_condition = loan_doc.repay_from_salary == 0

        query = (frappe.qb.from_(loan_doc).select(
            ConstantColumn(doctype).as_("payment_document"),
            (loan_doc.name).as_("payment_entry"),
            (loan_doc.reference_number).as_("reference_no"),
            (loan_doc.reference_date).as_("ref_date"),
            amount_field,
            posting_date,
        ).where(loan_doc.docstatus == 1).where(salary_condition).where(
            account == filters.get("account")).where(
                posting_date <= getdate(filters.get("report_date"))).where(
                    ifnull(loan_doc.clearance_date, "4000-01-01") > getdate(
                        filters.get("report_date"))))

        entries = query.run(as_dict=1)
        loan_docs.extend(entries)

    return loan_docs
Ejemplo n.º 6
0
async def select_all(city):
    """Fetch all data at once

    Fetch data from DB

    args:
        city: selected city

    returns:
        Dictionary that contains the requested data, which is converted
            by fastAPI to a json object.
    """
    data = Table("data")
    di_fn = CustomFunction("ROUND", ["number"])
    columns = (
        # 'lat', 'lon'
        data["lat"].as_("latitude"),
        data["lon"].as_("longitude"),
        data["Crime Rating"].as_("crime"),
        data["Rent"].as_("rental_price"),
        data["Air Quality Index"].as_("air_quality_index"),
        data["Population"].as_("population"),
        data["Nearest"].as_("nearest_string"),
        data["Good Days"].as_("good_days"),
        data["Crime Rate per 1000"].as_("crime_rate_ppt"),
        di_fn(data["Diversity Index"] * 100).as_("diversity_index"),
    )

    q = (Query.from_(data).select(*columns).where(
        data.City == city.city).where(data.State == city.state))
    value = await database.fetch_one(str(q))
    return value
Ejemplo n.º 7
0
    def test_should_return_function_with_arguments(self):
        DateDiff = CustomFunction("DATE_DIFF",
                                  ["interval", "start_date", "end_date"])

        self.assertEqual(
            "DATE_DIFF('day','start_date','end_date')",
            str(DateDiff("day", "start_date", "end_date")),
        )
Ejemplo n.º 8
0
    def test_should_fail_use_custom_function_on_select_with_wrong_arguments(
            self):
        service = Table("service")

        DateDiff = CustomFunction("DATE_DIFF",
                                  ["interval", "start_date", "end_date"])

        with self.assertRaises(FunctionException):
            Query.from_(service).select(DateDiff("day", service.created_date))
def _date_diff(unit, args_str):
    args_list = args_str.split(",")
    if len(args_list) != 2:
        raise ValueError("Date_diff have invalid args", args_list)
    date_diff = CustomFunction("DATE_DIFF", ["col1", "col2", "col3"])
    arg1 = args_list[0].strip()
    arg2 = args_list[1].strip()
    date1 = _process_date_diff_arg(arg1)
    date2 = _process_date_diff_arg(arg2)
    return date_diff(unit, date1, date2)
Ejemplo n.º 10
0
    def test_should_use_custom_function_on_select(self):
        service = Table("service")

        DateDiff = CustomFunction("DATE_DIFF",
                                  ["interval", "start_date", "end_date"])

        q = Query.from_(service).select(
            DateDiff("day", service.created_date, service.updated_date))

        self.assertEqual(
            'SELECT DATE_DIFF(\'day\',"created_date","updated_date") FROM "service"',
            str(q),
        )
def _process_date_diff_arg(arg):
    date_fnc = CustomFunction("date", ["col1"])
    if arg == "now":
        date = PseudoColumn('current_date')
    else:
        if "." in arg:
            arg_list = arg.split(".")
            topic_name = arg_list[0].strip()
            factor_name = arg_list[1].strip()
            date = Table("topic_" + topic_name).as_(topic_name)[factor_name]
        else:
            date = date_fnc(arg)
    return date
Ejemplo n.º 12
0
async def select_all(city):
    data = Table("data")
    di_fn = CustomFunction("ROUND", ["number"])
    columns = (
        # 'lat', 'lon'
        data["lat"].as_("latitude"),
        data["lon"].as_("longitude"),
        data["Crime Rating"].as_("crime"),
        data["Rent"].as_("rental_price"),
        data["Air Quality Index"].as_("air_quality_index"),
        data["Population"].as_("population"),
        data["Nearest"].as_("nearest_string"),
        data["Good Days"].as_("good_days"),
        data["Crime Rate per 1000"].as_("crime_rate_ppt"),
        di_fn(data["Diversity Index"] * 100).as_("diversity_index"),
    )

    q = (Query.from_(data).select(*columns).where(
        data.City == city.city).where(data.State == city.state))
    value = await database.fetch_one(str(q))
    return value
Ejemplo n.º 13
0
    def test_should_return_function_with_no_arguments(self):
        CurrentDate = CustomFunction("CURRENT_DATE")

        self.assertEqual("CURRENT_DATE()", str(CurrentDate()))
Ejemplo n.º 14
0
from datetime import datetime

from pypika import CustomFunction, MSSQLQuery, Parameter, Table
from pypika.functions import Cast, DateDiff
from pypika.terms import Function, PseudoColumn

from .base import Database

_MSSQLDateAdd = CustomFunction('DATEADD', ['date_part', 'number', 'term'])
_MSSQLConvert = CustomFunction('CONVERT', ['term', 'expression', 'style'])


class MSSQLDatabase(Database):
    """
    Microsoft SQL Server client that uses the pymssql module under the hood.
    """

    # The pypika query class to use for constructing queries
    query_cls = MSSQLQuery

    def __init__(self,
                 host='localhost',
                 port=1433,
                 database=None,
                 user=None,
                 password=None,
                 **kwargs):
        super().__init__(host, port, database, **kwargs)
        self.user = user
        self.password = password
Ejemplo n.º 15
0
    def query_generator(self):
        # special handle here to prevent date messed up during query process~
        date_format = CustomFunction('DATE_FORMAT', ['date_field', 'format'])
        format = '%m/%d/%Y %H:%i:%s'

        # process join tables - [BODY]
        tables = []
        table_obj_map = {}
        join_condition_queue = {}
        base_table_name = ""
        for table in self.parser.body_all_columns:
            base_table_name = table if len(
                base_table_name) == 0 else base_table_name
            table_obj = Table(table)
            tables.append(table_obj)
            table_obj_map[table] = table_obj
            join_condition_queue[table] = []
        from_table = tables[0]
        join = table_obj_map.copy()
        join.pop(base_table_name)
        # print(join)
        # print(self.all_source_type)

        # process join conditions - [BODY]
        for var in self.parser.body_variable_map:
            # only join when more then 1 table name exist!
            if len(self.parser.body_variable_map[var]) > 2:
                previous_table = ""
                for table_name in self.parser.body_variable_map[var][1:]:
                    if len(previous_table) == 0:
                        previous_table = table_name
                    else:
                        pre = previous_table.split('.')
                        curr = table_name.split('.')
                        exp1 = getattr(table_obj_map[pre[0]], pre[1])
                        exp2 = getattr(table_obj_map[curr[0]], curr[1])
                        if self.is_a_time_field(curr[1]):
                            # print("Target Column Type: ", self.all_source_type[curr[1]])
                            exp1 = date_format(exp1, format)
                            exp2 = date_format(exp2, format)
                        join_condition_queue[curr[0]].append([exp1, exp2])

        # process where filters - [BODY]
        where = []
        for table_name in self.parser.body_condition_columns:
            for condition in self.parser.body_condition_columns[table_name]:
                # it is not a variable, then it is a filter! Apply it as where statement!
                if condition[2] not in self.parser.body_variable_map:
                    # TODO: make it handle complicated filter!
                    if condition[2].lower() in ["null", "notnull"]:
                        op = "isnull" if condition[2].lower(
                        ) == "null" else "notnull"
                        column_body = getattr(table_obj_map[table_name],
                                              condition[0])
                        where.append(getattr(column_body, op)())
                    elif condition[2].lower() in ["true", "false"]:
                        op = True if condition[2].lower() == "true" else False
                        where.append(
                            getattr(table_obj_map[table_name], condition[0]) ==
                            op)
                    elif self.is_a_time_field(condition[0]):
                        # Special handle for date field~
                        where.append(
                            date_format(
                                getattr(table_obj_map[table_name],
                                        condition[0]), format) == date_format(
                                            condition[2].strip('"'), format))
                    else:
                        where.append(
                            getattr(table_obj_map[table_name], condition[0]) ==
                            condition[2].strip('"'))

        q = MySQLQuery.from_(from_table)

        # join must be followed by ON~ multiple joins should be separated as well!
        for join_table_name in join:
            # print(join_table_name, join[join_table_name])
            # q = q.join(join[join_table_name])
            q = q.left_join(join[join_table_name])

            j_c = None
            for j_condition in join_condition_queue[join_table_name]:
                if j_c is None:
                    j_c = (j_condition[0] == j_condition[1])
                else:
                    j_c = j_c & (j_condition[0] == j_condition[1])
            # ONLY perform join condition if it is not empty!
            if j_c is not None:
                q = q.on(j_c)

        for w_statement in where:
            q = q.where(w_statement)

        # final_query = []
        query_objects = {}

        # be aware! the header might contain more than one event table!
        for event in self.parser.header_simple_columns:
            select_object = []
            select_col_names = []
            # process select - [HEADER]
            for attribute in self.parser.header_simple_columns[event]:
                select_object.append(
                    getattr(
                        table_obj_map[
                            self.parser.reverse_column_map[attribute]],
                        attribute))
                select_col_names.append(attribute)

            # process additional select - [HEADER]
            for condition in self.parser.header_condition_columns[event]:
                # TODO: make it handle complicated condition!
                # Handle Column Renaming Here
                if condition[2] in self.parser.body_variable_map and len(
                        condition
                ) < 5:  # Ahhhh this is very risky to do this! Nested table statment by min have 5 elements
                    new_name = condition[0]
                    origin_column = self.parser.body_variable_map[
                        condition[2]][1].split('.')
                    new_col = getattr(table_obj_map[origin_column[0]],
                                      origin_column[1])
                    select_object.append(new_col.as_(new_name))
                    self.rename_map[condition[2]] = new_name
                    select_col_names.append(new_name)

            self.query_selected_cols[event] = select_col_names.copy()

            # process nested table attributes - [HEADER]
            for event_name_dot_nested_table_name in self.nested_table_elements:
                if event + "." in event_name_dot_nested_table_name:  # in case of multi-sugar~
                    nest_col_name = []
                    # print(event_name_dot_nested_table_name)
                    for attribute in self.nested_table_elements[
                            event_name_dot_nested_table_name]:
                        if isinstance(attribute, str):
                            select_object.append(
                                getattr(
                                    table_obj_map[
                                        self.parser.
                                        reverse_column_map[attribute]],
                                    attribute))
                            nest_col_name.append(attribute)
                        elif attribute[2] in self.parser.body_variable_map:
                            new_name = attribute[0]
                            origin_column = self.parser.body_variable_map[
                                attribute[2]][1].split('.')
                            new_col = getattr(table_obj_map[origin_column[0]],
                                              origin_column[1])
                            select_object.append(new_col.as_(new_name))
                            self.rename_map[attribute[2]] = new_name
                            nest_col_name.append(new_name)
                self.query_selected_cols[
                    event_name_dot_nested_table_name] = nest_col_name.copy()

            mysql_query_obj = q.select(*select_object)
            query_objects[event] = mysql_query_obj

        # print('self.nested_table_elements', self.nested_table_elements)
        # print('self.query_selected_cols', self.query_selected_cols)
        return query_objects
Ejemplo n.º 16
0
from pypika import CustomFunction

# Presto datetime functions
from_iso8601_timestamp = CustomFunction('from_iso8601_timestamp',
                                        ['timestamp'])
from_iso8601_date = CustomFunction('from_iso8601_date', ['date'])
from_unixtime = CustomFunction('from_unixtime', ['time'])
to_iso8601 = CustomFunction('to_iso8601', ['date'])
to_unixtime = CustomFunction('to_unixtime', ['timestamp'])

# TODO: add presto functions
# https://prestodb.io/docs/0.172/functions.html
Ejemplo n.º 17
0
from pypika import CustomFunction

_add_subtract_args = ["name", "interval"]

ToYYYYMM = CustomFunction("toYYYYMM")
AddYears = CustomFunction("addYears", _add_subtract_args)
AddMonths = CustomFunction("addMonths", _add_subtract_args)
AddWeeks = CustomFunction("addWeeks", _add_subtract_args)
AddDays = CustomFunction("addDays", _add_subtract_args)
AddHours = CustomFunction("addHours", _add_subtract_args)
AddMinutes = CustomFunction("addMinutes", _add_subtract_args)
AddSeconds = CustomFunction("addSeconds", _add_subtract_args)
AddQuarters = CustomFunction("addQuarters", _add_subtract_args)
SubtractYears = CustomFunction("subtractYears", _add_subtract_args)
SubtractMonths = CustomFunction("subtractMonths", _add_subtract_args)
SubtractWeeks = CustomFunction("subtractWeeks", _add_subtract_args)
SubtractDays = CustomFunction("subtractDays", _add_subtract_args)
SubtractHours = CustomFunction("subtractHours", _add_subtract_args)
SubtractMinutes = CustomFunction("subtractMinutes", _add_subtract_args)
SubtractSeconds = CustomFunction("subtractSeconds", _add_subtract_args)
SubtractQuarters = CustomFunction("subtractQuarters", _add_subtract_args)
FormatDateTime = CustomFunction("formatDateTime", ["name", "dt_format"])
def date_diff(unit: Unit, arg1: CustomFunction,
              arg2: CustomFunction) -> CustomFunction:
    func_date_diff = CustomFunction("DATE_DIFF", ["col1", "col2", "col3"])
    return func_date_diff(unit, arg1, arg2)
Ejemplo n.º 19
0
    def test_should_fail_with_wrong_arguments(self):
        DateDiff = CustomFunction("DATE_DIFF",
                                  ["interval", "start_date", "end_date"])

        with self.assertRaises(FunctionException):
            DateDiff("foo")
Ejemplo n.º 20
0
from pypika import CustomFunction

is_null = CustomFunction("IS_NULL", ["v_value"])
gb_x = CustomFunction("GB_X", ["v_geom"])
gb_y = CustomFunction("GB_Y", ["v_geom"])
st_x = CustomFunction("ST_X", ["v_geom"])
st_y = CustomFunction("ST_Y", ["v_geom"])
st_round_x = CustomFunction("ST_ROUND_X", ["v_geom"])
st_round_y = CustomFunction("ST_ROUND_Y", ["v_geom"])

st_transform_4326 = CustomFunction("ST_TRANSFORM_4326", ["v_geom"])
to_bit = CustomFunction("TO_BIT", ["v_value"])
to_num = CustomFunction("TO_NUM", ["v_value"])
to_year = CustomFunction("TO_YEAR", ["v_value"])
from_float_to_int = CustomFunction("FROM_FLOAT_TO_INT", ["v_value"])

SQL_FUNCTION_MAPPING = {
    "IS_NULL": is_null,
    "GB_X": gb_x,
    "GB_Y": gb_y,
    "ST_X": st_x,
    "ST_Y": st_y,
    "ST_ROUND_X": st_round_x,
    "ST_ROUND_Y": st_round_y,
    "ST_TRANSFORM_4326": st_transform_4326,
    "TO_BIT": to_bit,
    "TO_NUM": to_num,
    "TO_YEAR": to_year,
    "FROM_FLOAT_TO_INT": from_float_to_int
}