Ejemplo n.º 1
0
    def test_engines_cache(self):
        from aldjemy.core import Cache, get_engine

        self.assertEqual(get_engine('default'), Cache.engines['default'])
        self.assertEqual(get_engine('logs'), Cache.engines['logs'])
        self.assertEqual(get_engine(), Cache.engines['default'])
        self.assertNotEqual(get_engine('default'), get_engine('logs'))
Ejemplo n.º 2
0
    def test_engines_cache(self):
        from aldjemy.core import Cache, get_engine

        self.assertEqual(get_engine('default'), Cache.engines['default'])
        self.assertEqual(get_engine('logs'), Cache.engines['logs'])
        self.assertEqual(get_engine(), Cache.engines['default'])
        self.assertNotEqual(get_engine('default'), get_engine('logs'))
Ejemplo n.º 3
0
    def test_engines_cache(self):
        from aldjemy.core import Cache, get_engine

        self.assertEqual(get_engine("default"), Cache.engines["default"])
        self.assertEqual(get_engine("logs"), Cache.engines["logs"])
        self.assertEqual(get_engine(), Cache.engines["default"])
        self.assertNotEqual(get_engine("default"), get_engine("logs"))
Ejemplo n.º 4
0
    def test_engines_cache(self):
        from aldjemy.core import Cache, get_engine

        self.assertEqual(get_engine("default"), Cache.engines["default"])
        self.assertEqual(get_engine("logs"), Cache.engines["logs"])
        self.assertEqual(get_engine(), Cache.engines["default"])
        self.assertNotEqual(get_engine("default"), get_engine("logs"))
Ejemplo n.º 5
0
    def bulk_create(logs):
        '''
        Utility method - bulk create/save ApiLog instances
        '''

        logger.debug('bulk create logs: %r', logs)
        with transaction.atomic():
            with get_engine().connect() as conn:
                last_id = int(
                    conn.execute(
                        'select last_value from reports_apilog_id_seq;')
                        .scalar() or 0)
                ApiLog.objects.bulk_create(logs)
                #NOTE: Before postgresql & django 1.10 only: 
                # ids must be manually created on bulk create
                for i,log in enumerate(logs):
                    log.id = last_id+i+1
            
                bulk_create_diffs = []
                for i,log in enumerate(logs):
                    for key, logdiffs in log.diffs.items():
                        bulk_create_diffs.append(
                            LogDiff(
                                log=log,
                                field_key = key,
                                field_scope = 'fields.%s' % log.ref_resource_name,
                                before=logdiffs[0],
                                after=logdiffs[1])
                        )
                LogDiff.objects.bulk_create(bulk_create_diffs)
            
            return logs
Ejemplo n.º 6
0
    def test_sa_sql_expression_language_fetching(self):
        """
        Test full record fetching using SQLAlchemy-aldjemy SQL Expression Language.
        """
        boards = [
            ["x", "o", "x", "o", "o", "x", "x", "x", "o"],  # both (full board)
            [" ", " ", " ", " ", "x", " ", " ", " ", " "],  # only x
            [" ", " ", " ", "o", "o", " ", " ", " ", "o"],  # only o
            [" ", " ", " ", " ", " ", " ", " ", " ", " "],  # none
        ]

        created_objects = []
        for board in boards:
            ttt = TicTacToeBoard(board=board)
            ttt.save()
            created_objects.append(ttt)

        query = (select(TicTacToeBoard.sa.id,
                        TicTacToeBoard.sa.board).order_by(
                            TicTacToeBoard.sa.id).limit(10))

        with get_engine().begin() as connection:
            test_data = connection.execute(query)

        for t_data, c_object in zip(test_data, created_objects):
            t_data_id, t_data_board = t_data
            assert t_data_id == c_object.id
            assert t_data_board == c_object.board
Ejemplo n.º 7
0
Archivo: models.py Proyecto: gmat/lims
    def bulk_create(logs):
        '''
        Utility method - bulk create/save ApiLog instances
        '''

        logger.debug('bulk create logs: %r', logs)
        with transaction.atomic():
            with get_engine().connect() as conn:
                last_id = int(
                    conn.execute(
                        'select last_value from reports_apilog_id_seq;').
                    scalar() or 0)
                ApiLog.objects.bulk_create(logs)
                #NOTE: Before postgresql & django 1.10 only:
                # ids must be manually created on bulk create
                for i, log in enumerate(logs):
                    log.id = last_id + i + 1

                bulk_create_diffs = []
                for i, log in enumerate(logs):
                    for key, logdiffs in log.diffs.items():
                        bulk_create_diffs.append(
                            LogDiff(log=log,
                                    field_key=key,
                                    field_scope='fields.%s' %
                                    log.ref_resource_name,
                                    before=logdiffs[0],
                                    after=logdiffs[1]))
                LogDiff.objects.bulk_create(bulk_create_diffs)

            return logs
Ejemplo n.º 8
0
 def connection(self):
     """
     Get the database connection to use for testing.
     :return: the database connection to use for testing.
     """
     if self._connection is None:
         self._connection = get_engine().connect()
     return self._connection
Ejemplo n.º 9
0
def get_sa_session():
    """
    Create and return a SQLAlchemy session.
    :return: A SQLAlchemy session.
    """
    engine = get_engine()
    _Session = sessionmaker(bind=engine)
    return _Session()
Ejemplo n.º 10
0
def Session():
    """
    Creates and returns an sqlalchemy session mapped to django orm's models
    this is no longer used meaningfully since the django orm has been fully replaced
    """
    from aldjemy.core import get_engine
    # Get the engine from aldjemy
    engine = get_engine()
    # Create the session with tyhe engine
    _Session = sessionmaker(bind=engine)
    return _Session()
Ejemplo n.º 11
0
def get_aldjemy_session():
    """
    Use aldjemy to make a session

    .. note:
        Use only in this case. In normal production mode
        it is safer make session explictly because it is more robust
    """
    from aldjemy.core import get_engine
    engine = get_engine()
    _Session = sessionmaker(bind=engine)
    return _Session()
Ejemplo n.º 12
0
    def bulk_create(logs):
        '''
        Utility method - bulk create/save ApiLog instances
        '''

        logger.debug('bulk create logs: %r', logs)
        with transaction.atomic():
            with get_engine().connect() as conn:
                last_id = int(
                    conn.execute(
                        'select last_value from reports_apilog_id_seq;').
                    scalar() or 0)

                for log in logs:
                    if log.json_field:
                        if isinstance(log.json_field, dict):
                            try:
                                log.json_field = json.dumps(
                                    log.json_field, cls=LimsJSONEncoder)
                            except:
                                logger.exception(
                                    'error with json_field value encoding: %r - %r',
                                    e, log.json_field)

                ApiLog.objects.bulk_create(logs)
                #NOTE: Before postgresql & django 1.10 only:
                # ids must be manually created on bulk create
                for i, log in enumerate(logs):
                    log.id = last_id + i + 1

                bulk_create_diffs = []
                for i, log in enumerate(logs):
                    for key, logdiffs in log.diffs.items():
                        bulk_create_diffs.append(
                            LogDiff(log=log,
                                    field_key=key,
                                    field_scope='fields.%s' %
                                    log.ref_resource_name,
                                    before=ApiLog._encode_before_after(
                                        logdiffs[0]),
                                    after=ApiLog._encode_before_after(
                                        logdiffs[1])))
                LogDiff.objects.bulk_create(bulk_create_diffs)

            return logs
Ejemplo n.º 13
0
    def bulk_create(logs):
        '''
        Utility method - bulk create/save ApiLog instances
        '''

        logger.debug('bulk create logs: %r', logs)
        with transaction.atomic():
            with get_engine().connect() as conn:
                last_id = int(
                    conn.execute(
                        'select last_value from reports_apilog_id_seq;')
                        .scalar() or 0)
                
                for log in logs:
                    if log.json_field:
                        if isinstance(log.json_field, dict):
                            try:
                                log.json_field = json.dumps(log.json_field, cls=LimsJSONEncoder)
                            except:
                                logger.exception('error with json_field value encoding: %r - %r', 
                                    e, log.json_field)
                
                ApiLog.objects.bulk_create(logs)
                #NOTE: Before postgresql & django 1.10 only: 
                # ids must be manually created on bulk create
                for i,log in enumerate(logs):
                    log.id = last_id+i+1
            
                bulk_create_diffs = []
                for i,log in enumerate(logs):
                    for key, logdiffs in log.diffs.items():
                        bulk_create_diffs.append(
                            LogDiff(
                                log=log,
                                field_key = key,
                                field_scope = 'fields.%s' % log.ref_resource_name,
                                before=ApiLog._encode_before_after(logdiffs[0]),
                                after=ApiLog._encode_before_after(logdiffs[1]))
                        )
                LogDiff.objects.bulk_create(bulk_create_diffs)
            
            return logs
Ejemplo n.º 14
0
def Session():
    from aldjemy.core import get_engine
    engine = get_engine()
    _Session = sessionmaker(bind=engine)
    return _Session()
Ejemplo n.º 15
0
def Session():
    from aldjemy.core import get_engine
    engine = get_engine()
    _Session = sessionmaker(bind=engine)
    return _Session()
Ejemplo n.º 16
0
    def post(self, request):
        try:
            # import pdb; pdb.set_trace()
            session = Session()
            dct_data = {}

            int_company = request.data['company_id']
            ins_company = CompanyDetails.objects.filter(pk_bint_id=int_company)
            # lst_branch = list(Branch.objects.filter(fk_company_id = ins_company[0].pk_bint_id).values())
            lst_branch = list(Branch.objects.all().values())
            # fromdate =  datetime.strptime(request.data['date_from'][:10] , '%Y-%m-%d' )
            fromdate = request.data['date_from']
            # # todate =  datetime.strptime(request.data['date_to'][:10] , '%Y-%m-%d' )
            todate = request.data['date_to']
            if request.data['bln_chart']:
                str_sort = request.data.get('strGoodPoorClicked', 'NORMAL')
                int_page = int(request.data.get('intCurrentPage', 1))
                if request.data.get('show_type'):
                    str_show_type = 'total_amount'
                else:
                    str_show_type = 'int_quantity'

                engine = get_engine()
                conn = engine.connect()

                lst_mv_view = []

                lst_mv_view = request.data.get('lst_mv')

                if not lst_mv_view:
                    session.close()
                    return JsonResponse({
                        'status': 0,
                        'reason': 'No view list found'
                    })
                query_set = ""
                if len(lst_mv_view) == 1:

                    if request.data['type'].upper() == 'ENQUIRY':

                        query = "select vchr_enquiry_status as status, sum(" + str_show_type + ") as counts, vchr_product_name as vchr_service, concat(staff_first_name, ' ',staff_last_name) as vchr_staff_full_name, user_id as fk_assigned, staff_first_name, staff_last_name ,vchr_brand_name, vchr_item_name, is_resigned, promoter, branch_id, product_id, brand_id, branch_name from " + lst_mv_view[
                            0] + " {} group by vchr_enquiry_status ,vchr_service, vchr_staff_full_name, fk_assigned, vchr_brand_name, vchr_item_name, promoter, is_resigned,staff_first_name, staff_last_name, branch_id, product_id, brand_id, branch_name"
                    else:

                        query = "select vchr_enquiry_status as status, sum(" + str_show_type + ") as counts, vchr_product_name as vchr_service, concat(staff_first_name, ' ',staff_last_name) as vchr_staff_full_name,user_id as fk_assigned,staff_first_name, staff_last_name ,vchr_brand_name, vchr_item_name, is_resigned, promoter, branch_id, product_id, brand_id, branch_name from " + lst_mv_view[
                            0] + " {} group by vchr_enquiry_status ,vchr_service, vchr_staff_full_name, fk_assigned, vchr_brand_name, vchr_item_name, promoter, is_resigned,staff_first_name, staff_last_name, branch_id, product_id, brand_id, branch_name"

                else:

                    if request.data['type'].upper() == 'ENQUIRY':

                        for data in lst_mv_view:
                            query_set += "select vchr_enquiry_status as status,vchr_product_name as vchr_service,concat(staff_first_name,' ',staff_last_name) as vchr_staff_full_name,sum(" + str_show_type + ") as counts,user_id as fk_assigned,vchr_brand_name,vchr_item_name,promoter,is_resigned, branch_id, product_id, brand_id, branch_name from " + data + " {} group by  vchr_enquiry_status , vchr_service, vchr_staff_full_name, fk_assigned, vchr_brand_name, vchr_item_name, promoter, is_resigned, branch_id, product_id, brand_id, branch_name union "
                    else:

                        for data in lst_mv_view:

                            query_set += "select vchr_enquiry_status as status,vchr_product_name as vchr_service,concat(staff_first_name,' ',staff_last_name) as vchr_staff_full_name,sum(" + str_show_type + ") as counts,user_id as fk_assigned, vchr_brand_name, vchr_item_name,promoter,is_resigned,branch_id, product_id, brand_id, branch_name from " + data + " {} group by vchr_enquiry_status, vchr_service, vchr_staff_full_name, fk_assigned, vchr_brand_name, vchr_item_name, promoter,is_resigned,branch_id, product_id, brand_id, branch_name union "

                    query = query_set.rsplit(' ', 2)[0]
                """ data wise filtering """

                str_filter_data = "where dat_enquiry :: date BETWEEN '" + request.data[
                    'date_from'] + "' AND '" + request.data[
                        'date_to'] + "' AND int_company_id = " + int_company + ""
                """Permission wise filter for data"""
                if request.user.userdetails.fk_group.vchr_name.upper() in [
                        'ADMIN', 'GENERAL MANAGER SALES', 'COUNTRY HEAD'
                ]:
                    pass
                elif request.user.userdetails.fk_group.vchr_name.upper() in [
                        'BRANCH MANAGER', 'ASSISTANT BRANCH MANAGER'
                ]:
                    str_filter_data = str_filter_data + " AND branch_id = " + str(
                        request.user.userdetails.fk_branch_id) + ""

                elif request.user.userdetails.fk_hierarchy_group_id or request.user.userdetails.fk_group.vchr_name.upper(
                ) in ['CLUSTER MANAGER']:
                    lst_branch = show_data_based_on_role(request)
                    str_filter_data += " AND branch_id IN (" + str(
                        lst_branch)[1:-1] + ")"
                else:
                    session.close()
                    return Response({'status': 0, 'reason': 'No data'})

                if request.data.get('branch'):
                    str_filter_data += " AND branch_id IN (" + str(
                        request.data.get('branch'))[1:-1] + ")"

                if request.data.get('product'):
                    str_filter_data += " AND product_id = " + str(
                        request.data.get('product')) + ""

                if request.data.get('brand'):
                    str_filter_data += " AND brand_id = " + str(
                        request.data.get('brand')) + ""

                # import pdb; pdb.set_trace()
                #for getting user corresponding products
                lst_user_id = []
                lst_user_id.append(request.user.id)

                lst_user_products = get_user_products(lst_user_id)
                if lst_user_products:
                    str_filter_data += " AND product_id in (" + str(
                        lst_user_products)[1:-1] + ")"

                if len(lst_mv_view) == 1:
                    query = query.format(str_filter_data)
                else:
                    query = query.format(str_filter_data, str_filter_data)
                rst_enquiry = conn.execute(query).fetchall()

                if not rst_enquiry:
                    session.close()
                    return Response({'status': 'failled', 'data': 'No Data'})
                dct_data = {}
                dct_data['branch_all'] = {}
                dct_data['service_all'] = {}
                dct_data['brand_all'] = {}
                dct_data['item_all'] = {}
                dct_data['status_all'] = {}

                for ins_data in rst_enquiry:
                    if ins_data.branch_name.title(
                    ) not in dct_data['branch_all']:
                        dct_data['branch_all'][
                            ins_data.branch_name.title()] = {}
                        dct_data['branch_all'][
                            ins_data.branch_name.title()]['Enquiry'] = int(
                                ins_data.counts)
                        dct_data['branch_all'][
                            ins_data.branch_name.title()]['Sale'] = 0

                        if ins_data.status == 'INVOICED':
                            dct_data['branch_all'][
                                ins_data.branch_name.title()]['Sale'] = int(
                                    ins_data.counts)
                    else:
                        dct_data['branch_all'][
                            ins_data.branch_name.title()]['Enquiry'] += int(
                                ins_data.counts)
                        if ins_data.status == 'INVOICED':
                            dct_data['branch_all'][
                                ins_data.branch_name.title()]['Sale'] += int(
                                    ins_data.counts)

                    if ins_data.vchr_service.title(
                    ) not in dct_data['service_all']:
                        dct_data['service_all'][
                            ins_data.vchr_service.title()] = {}
                        dct_data['service_all'][
                            ins_data.vchr_service.title()]['Enquiry'] = int(
                                ins_data.counts)
                        dct_data['service_all'][
                            ins_data.vchr_service.title()]['Sale'] = 0
                        if ins_data.status == 'INVOICED':
                            dct_data['service_all'][
                                ins_data.vchr_service.title()]['Sale'] = int(
                                    ins_data.counts)
                    else:
                        dct_data['service_all'][
                            ins_data.vchr_service.title()]['Enquiry'] += int(
                                ins_data.counts)
                        if ins_data.status == 'INVOICED':
                            dct_data['service_all'][
                                ins_data.vchr_service.title()]['Sale'] += int(
                                    ins_data.counts)

                    if ins_data.vchr_brand_name.title(
                    ) not in dct_data['brand_all']:
                        dct_data['brand_all'][
                            ins_data.vchr_brand_name.title()] = {}
                        dct_data['brand_all'][
                            ins_data.vchr_brand_name.title()]['Enquiry'] = int(
                                ins_data.counts)
                        dct_data['brand_all'][
                            ins_data.vchr_brand_name.title()]['Sale'] = 0
                        if ins_data.status == 'INVOICED':
                            dct_data['brand_all'][ins_data.vchr_brand_name.
                                                  title()]['Sale'] = int(
                                                      ins_data.counts)
                    else:
                        dct_data['brand_all'][ins_data.vchr_brand_name.title(
                        )]['Enquiry'] += int(ins_data.counts)
                        if ins_data.status == 'INVOICED':
                            dct_data['brand_all'][ins_data.vchr_brand_name.
                                                  title()]['Sale'] += int(
                                                      ins_data.counts)

                    if ins_data.vchr_item_name.title(
                    ) not in dct_data['item_all']:
                        dct_data['item_all'][
                            ins_data.vchr_item_name.title()] = {}
                        dct_data['item_all'][
                            ins_data.vchr_item_name.title()]['Enquiry'] = int(
                                ins_data.counts)
                        dct_data['item_all'][
                            ins_data.vchr_item_name.title()]['Sale'] = 0
                        if ins_data.status == 'INVOICED':
                            dct_data['item_all'][
                                ins_data.vchr_item_name.title()]['Sale'] = int(
                                    ins_data.counts)
                    else:
                        dct_data['item_all'][
                            ins_data.vchr_item_name.title()]['Enquiry'] += int(
                                ins_data.counts)
                        if ins_data.status == 'INVOICED':
                            dct_data['item_all'][ins_data.vchr_item_name.title(
                            )]['Sale'] += int(ins_data.counts)

                    if ins_data.status not in dct_data['status_all']:
                        dct_data['status_all'][ins_data.status] = int(
                            ins_data.counts)
                    else:
                        dct_data['status_all'][ins_data.status] += int(
                            ins_data.counts)

                dct_data['brand_all'] = paginate_data(dct_data['brand_all'],
                                                      10)
                dct_data['brand_all'] = chart_data_sort(
                    request, dct_data['brand_all'], 'NORMAL', 1)
                # sorted_dct_data = sorted(dct_data['brand_all'][1].items(), key= best_key)
                # dct_data['brand_all'] = dict(sorted_dct_data)

                dct_data['branch_all'] = paginate_data(dct_data['branch_all'],
                                                       10)
                dct_data['branch_all'] = chart_data_sort(
                    request, dct_data['branch_all'], str_sort, int_page)
                # sorted_dct_data = sorted(dct_data['branch_all'][1].items(), key= best_key)
                # dct_data['branch_all'] = dict(sorted_dct_data)

                dct_data['item_all'] = paginate_data(dct_data['item_all'], 10)
                dct_data['item_all'] = chart_data_sort(request,
                                                       dct_data['item_all'],
                                                       'NORMAL', 1)
                # sorted_dct_data = sorted(dct_data['item_all'][1].items(), key= best_key)
                # dct_data['item_all'] = dict(sorted_dct_data)

                dct_data['service_all'] = paginate_data(
                    dct_data['service_all'], 10)
                dct_data['service_all'] = chart_data_sort(
                    request, dct_data['service_all'], 'NORMAL', 1)
                # sorted_dct_data = sorted(dct_data['service_all'][1].items(), key= best_key)
                # dct_data['service_all'] = dict(sorted_dct_data)

                if request.data['type'].upper() == 'ENQUIRY':
                    str_report_name = 'Branch Enquiry Report'
                    lst_details = [
                        'branch_all-bar', 'service_all-bar', 'brand_all-bar',
                        'item_all-bar', 'status_all-pie'
                    ]
                    dct_label = {
                        'branch_all': 'Branch wise',
                        'service_all': 'Product wise',
                        'brand_all': 'Brand wise',
                        'item_all': 'Item wise',
                        'status_all': 'Status wise'
                    }
                else:
                    str_report_name = 'Branch Sales Report'
                    lst_details = [
                        'branch_all-bar', 'service_all-bar', 'brand_all-bar',
                        'item_all-bar'
                    ]
                    dct_label = {
                        'branch_all': 'Branch wise',
                        'service_all': 'Product wise',
                        'brand_all': 'Brand wise',
                        'item_all': 'Item wise'
                    }

            if request.data['bln_table']:
                if request.data['type'].upper() == 'ENQUIRY':
                    str_report_name = 'Branch Enquiry Report'
                else:
                    str_report_name = 'Branch Sales Report'
                rst_enquiry = session.query(ItemEnquirySA.vchr_enquiry_status.label('status'),ProductsSA.vchr_name.label('vchr_service'),func.concat(AuthUserSA.first_name, ' ',
                                    AuthUserSA.last_name).label('vchr_staff_full_name'),
                                    EnquiryMasterSA.fk_assigned_id.label('fk_assigned'),func.DATE(EnquiryMasterSA.dat_created_at).label('dat_created_at'),EnquiryMasterSA.vchr_enquiry_num,func.concat(CustomerModelSA.vchr_name).label('vchr_full_name'),
                                    AuthUserSA.id.label('user_id'),AuthUserSA.last_name.label('staff_last_name'),
                                    AuthUserSA.first_name.label('staff_first_name'),BranchSA.vchr_name.label('vchr_name'),BrandsSA.vchr_name,ItemsSA.vchr_name,
                                    UserSA.fk_brand_id,UserSA.dat_resignation_applied,
                                    case([(UserSA.dat_resignation_applied < datetime.now(),literal_column("'resigned'"))],
                                        else_=literal_column("'not resigned'")).label("is_resigned"))\
                                    .filter(cast(EnquiryMasterSA.dat_created_at,Date) >= fromdate,
                                            cast(EnquiryMasterSA.dat_created_at,Date) <= todate,
                                            EnquiryMasterSA.fk_company_id == request.user.userdetails.fk_company_id,
                                            EnquiryMasterSA.chr_doc_status == 'N')\
                                    .join(EnquiryMasterSA,ItemEnquirySA.fk_enquiry_master_id == EnquiryMasterSA.pk_bint_id)\
                                    .join(BranchSA,BranchSA.pk_bint_id == EnquiryMasterSA.fk_branch_id)\
                                    .join(CustomerSA,EnquiryMasterSA.fk_customer_id == CustomerSA.pk_bint_id)\
                                    .join(AuthUserSA, EnquiryMasterSA.fk_assigned_id == AuthUserSA.id)\
                                    .join(UserSA, AuthUserSA.id == UserSA.user_ptr_id )\
                                    .join(ProductsSA,ProductsSA.pk_bint_id == ItemEnquirySA.fk_product_id)\
                                    .join(BrandsSA,BrandsSA.pk_bint_id==ItemEnquirySA.fk_brand_id)\
                                    .join(ItemsSA,ItemsSA.pk_bint_id==ItemEnquirySA.fk_item_id)
                """Permission wise filter for data"""
                if request.user.userdetails.fk_group.vchr_name.upper() in [
                        'ADMIN', 'GENERAL MANAGER SALES', 'COUNTRY HEAD'
                ]:
                    pass
                elif request.user.userdetails.fk_group.vchr_name.upper() in [
                        'BRANCH MANAGER', 'ASSISTANT BRANCH MANAGER'
                ]:
                    rst_enquiry = rst_enquiry.filter(
                        EnquiryMasterSA.fk_branch_id ==
                        request.user.userdetails.fk_branch_id)
                elif request.user.userdetails.fk_hierarchy_group_id or request.user.userdetails.fk_group.vchr_name.upper(
                ) in ['CLUSTER MANAGER']:
                    lst_branch = show_data_based_on_role(request)
                    rst_enquiry = rst_enquiry.filter(
                        EnquiryMasterSA.fk_branch_id.in_(lst_branch))
                else:
                    session.close()
                    return Response({'status': 0, 'reason': 'No data'})

                if request.data.get('branch'):
                    rst_enquiry = rst_enquiry.filter(
                        EnquiryMasterSA.fk_branch_id.in_(
                            tuple(request.data.get('branch'))))

                # import pdb; pdb.set_trace()
                #for getting user corresponding products
                lst_user_id = []
                lst_user_id.append(request.user.id)
                lst_user_products = get_user_products(lst_user_id)

                if lst_user_products:
                    rst_enquiry = rst_enquiry.filter(
                        ProductsSA.id.in_(lst_user_products))

                if not rst_enquiry.all():
                    session.close()
                    return Response({'status': 'failled', 'data': 'No Data'})

                lst_tbl_head = [
                    'Enquiry No', 'Branch', 'Product', 'Brand', 'Item',
                    'Status'
                ]
                lst_tbl_index = [5, 10, 1, 11, 12, 0]

            if request.data['document'].upper() == 'PDF':

                if request.data['bln_table'] and request.data['bln_chart']:
                    file_output = generate_pdf(request, str_report_name,
                                               lst_details, dct_label,
                                               dct_data, lst_tbl_head,
                                               lst_tbl_index,
                                               list(rst_enquiry.all()))
                elif request.data['bln_chart']:
                    file_output = generate_pdf(request, str_report_name,
                                               lst_details, dct_label,
                                               dct_data)
                elif request.data['bln_table']:
                    file_output = generate_pdf(request, str_report_name,
                                               lst_tbl_head, lst_tbl_index,
                                               list(rst_enquiry.all()))

                if request.data.get('export_type').upper() == 'DOWNLOAD':
                    session.close()
                    return Response({
                        "status": 1,
                        'file': file_output['file'],
                        'file_name': file_output['file_name']
                    })
                elif request.data.get('export_type').upper() == 'MAIL':
                    session.close()
                    return Response({"status": 1})

            elif request.data['document'].upper() == 'EXCEL':
                if request.data['bln_table'] and request.data['bln_chart']:
                    data = generate_excel(request, str_report_name,
                                          lst_details, dct_label, dct_data,
                                          lst_tbl_head, lst_tbl_index,
                                          list(rst_enquiry.all()))
                elif request.data['bln_chart']:
                    data = generate_excel(request, str_report_name,
                                          lst_details, dct_label, dct_data)
                elif request.data['bln_table']:
                    data = generate_excel(request, str_report_name,
                                          lst_tbl_head, lst_tbl_index,
                                          list(rst_enquiry.all()))

                if request.data.get('export_type').upper() == 'DOWNLOAD':
                    session.close()
                    return Response({"status": 1, "file": data})
                elif request.data.get('export_type').upper() == 'MAIL':
                    session.close()
                    return Response({"status": 1})

        except Exception as e:
            session.close()
            return Response({'status': 0, 'data': str(e)})
Ejemplo n.º 17
0
def AldjemySession():
    engine = get_engine()
    Session = sessionmaker(bind=engine)
    return Session()
Ejemplo n.º 18
0
from django.shortcuts import render
from django.db import IntegrityError
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework import status
from rest_framework.generics import get_object_or_404
from aldjemy.core import get_engine
from sqlalchemy.sql import select
from .models import User
from .serializers import UserSerializer, UserPasswordSerializer

engine = get_engine()


def str_to_int_list(ids):
    ids = ids.split(',')
    res = []
    for id in ids:
        try:
            res.append((int(id)))
        except ValueError:
            return []
    return res


# Create your views here.
class UserView(APIView):
    def get(self, request):
        ids = request.GET.get('ids', None)
        if ids:
            ids = str_to_int_list(ids)
Ejemplo n.º 19
0
def Session():
    from aldjemy.core import get_engine
    from sqlalchemy.orm import sessionmaker
    engine = get_engine()
    _Session = sessionmaker(bind=engine)
    return _Session()
Ejemplo n.º 20
0
    def stream_response_from_statement(self, request, stmt, count_stmt, 
            output_filename, field_hash={}, param_hash={}, 
            rowproxy_generator=None, is_for_detail=False,
            downloadID=None, title_function=None, use_caching=None, meta=None ):
        '''
        Execute the SQL stmt provided and stream the results to the response:
        
        Caching (for json responses only): resources will be cached if:
        - self.use_caching is True and use_caching is not False and limit > 0
        - limit == 0 and use_caching is True
        
        '''
        
        
        DEBUG_STREAMING = False or logger.isEnabledFor(logging.DEBUG)
        
        logger.info('stream_response_from_statement: %r', self._meta.resource_name )
        temp_param_hash = param_hash.copy()
        if 'schema' in temp_param_hash:
            del temp_param_hash['schema']
        if DEBUG_STREAMING:
            logger.info('stream_response_from_statement: %r, %r', 
                self._meta.resource_name,temp_param_hash)
        limit = param_hash.get('limit', 25)        
        try:
            limit = int(limit)
        except Exception:
            raise BadRequest(
                "Invalid limit '%s' provided. Please provide a positive integer." 
                % limit)
        if limit > 0:    
            stmt = stmt.limit(limit)
        if is_for_detail:
            limit = 1

        offset = param_hash.get('offset', 0 )
        try:
            offset = int(offset)
        except Exception:
            raise BadRequest(
                "Invalid offset '%s' provided. Please provide a positive integer." 
                % offset)
        if offset < 0:    
            offset = -offset
        stmt = stmt.offset(offset)
        
        conn = get_engine().connect()
        
        try:
            logger.debug('offset: %s, limit: %s', offset, limit)
        
            if DEBUG_STREAMING:
                logger.info('stmt: %s, param_hash: %s ', 
                    str(stmt.compile(
                            dialect=postgresql.dialect(), 
                            compile_kwargs={"literal_binds": True})), 
                    temp_param_hash)
                logger.info(
                    'count stmt %s', 
                    str(count_stmt.compile(
                        dialect=postgresql.dialect(), 
                        compile_kwargs={"literal_binds": True})))
            
            content_type = self.get_accept_content_type(request, format=param_hash.get('format', None))
            logger.debug('---- content_type: %r, hash: %r', content_type, temp_param_hash)
            result = None
            if content_type == JSON_MIMETYPE:
                logger.info(
                    'streaming json, use_caching: %r, self.use_cache: %r, limit: %d, %r', 
                    use_caching, self.use_cache, limit, is_for_detail)
                if ((self.use_cache is True and use_caching is not False)
                        and ( use_caching is True or limit > 0)):
                    cache_hit = self._cached_resultproxy(
                        conn, stmt, count_stmt, param_hash, limit, offset)
                    if cache_hit:
                        logger.info('cache hit: %r', output_filename)
                        result = cache_hit['cached_result']
                        count = cache_hit['count']
                    else:
                        # cache routine should always return a cache object
                        logger.error('error, cache not set: execute stmt')
                        count = conn.execute(count_stmt).scalar()
                        result = conn.execute(stmt)
                        logger.info('result: %r', [x for x in result])    
                    logger.info('====count: %d====', count)
                    
                else:
                    logger.info('not cached, execute count stmt...')
                    count = conn.execute(count_stmt).scalar()
                    logger.info('excuted count stmt: %d', count)
                    result = conn.execute(stmt)
                    logger.info('excuted stmt')

                if not meta:
                    meta = {
                        'limit': limit,
                        'offset': offset,
                        'total_count': count
                        }
                else:
                    temp = {
                        'limit': limit,
                        'offset': offset,
                        'total_count': count
                        }
                    temp.update(meta)    
                    meta = temp
                
                if rowproxy_generator:
                    result = rowproxy_generator(result)
                    
                logger.info('is for detail: %r, count: %r', is_for_detail, count)
                if is_for_detail and count == 0:
                    logger.info('detail not found')
                    conn.close()
                    return HttpResponse(status=404)
                
                if DEBUG_STREAMING:
                    logger.info('json setup done, meta: %r', meta)
    
            else: # not json
            
                logger.info('excute stmt')
                result = conn.execute(stmt)
                logger.info('excuted stmt')
                
                if rowproxy_generator:
                    result = rowproxy_generator(result)
                    # FIXME: test this for generators other than json generator        
            
            result = closing_iterator_wrapper(result, conn.close)
            return self.stream_response_from_cursor(request, result, output_filename, 
                field_hash=field_hash, 
                param_hash=param_hash, 
                is_for_detail=is_for_detail, 
                downloadID=downloadID, 
                title_function=title_function, 
                meta=meta)
        except Exception, e:
            logger.exception('on stream response')
            raise e
Ejemplo n.º 21
0
 def test_engines_cache(self):
     self.assertEqual(get_engine("default"), Cache.engines["default"])
     self.assertEqual(get_engine("logs"), Cache.engines["logs"])
     self.assertEqual(get_engine(), Cache.engines["default"])
     self.assertNotEqual(get_engine("default"), get_engine("logs"))