Example #1
0
 def get(self):
     ts = time.time()
     MSalesDetailTxns.insert_data_from_csv()  # to database
     #create_and_dump_sales_from_csv() # to pickle
     te = time.time()
     return {
         'Time taken for import sales master':
         "{} min".format(round((te - ts) / 60, 2))
     }
Example #2
0
 def get(self, client_id):
     error = None
     try:
         MSalesDetailTxns.delete_client_data(client_id)
         ItemMaster.delete_client_data(client_id)
         delete_clients_mongo_collections(client_id)
         delete_client_pickle_files(client_id)
     except Exception as e:
         error = str(e) 
     if error:
         return {"error": error}
     return {"cleaned":True}, 200
           
     
Example #3
0
def load_n_process_salesdetail_data(client_id):

    sales_details = sales_table.fetch_all_by_client(client_id)  #from database
    # sales_details = read_dataframe_from_pickle(client_id, data_name='sales') #from pickle

    sales_details['Datetimestamp'] = pd.to_datetime(
        sales_details['Datetimestamp'])
    sales_details['item_len'] = sales_details['Item_code'].apply(
        lambda x: len(x))
    sales_details = sales_details[sales_details['item_len'] > 2].copy()
    # itemcode2name = {}
    # for code, name in zip(sales_details.Item_code, sales_details.Item_name):
    #     itemcode2name[code] = name
    #     #itemname2code[name] = code
    #return sales_details, itemcode2name
    return sales_details
Example #4
0
def get_daily_sales_data(client_id):

    required_col = ['Datetimestamp', 'Total_sales']
    sales = sales_table.fetch_all_by_client(client_id)[
        required_col]  # from database
    # sales = read_dataframe_from_pickle(client_id, data_name='sales')[required_col] # from pickle
    sales['Datetimestamp'] = pd.to_datetime(sales['Datetimestamp'])
    #print(sales.head())
    sales['date'] = sales['Datetimestamp'].dt.date.astype('datetime64[ns]')
    daily_sales = sales.groupby('date',
                                as_index=False).agg({'Total_sales': np.sum})
    daily_sales['month'] = daily_sales.date.dt.month.astype(np.int8)
    daily_sales['week'] = daily_sales.date.dt.week.astype(np.int8)
    #daily_sales['month'] = daily_sales['month'].apply(lambda x: calendar.month_abbr[x])
    daily_sales['year'] = daily_sales.date.dt.year.astype(np.int16)

    return daily_sales
def get_item_wise_sales_data(client_id, year):
    required_cols = [
        'Datetimestamp', 'Item_code', 'Item_name', 'Sale_quantity',
        'Unitcost_price', 'Unitsale_price', 'Total_sales'
    ]

    sales = sales_table.fetch_all_by_client(client_id)[
        required_cols]  #from database
    # sales = read_dataframe_from_pickle(client_id, data_name='sales')[required_cols] #from pickle

    sales['Datetimestamp'] = pd.to_datetime(sales['Datetimestamp'])
    sales['month'] = sales['Datetimestamp'].dt.month.astype(np.int8)
    sales['year'] = sales['Datetimestamp'].dt.year.astype(np.int16)
    sales['profit'] = round(
        ((sales['Unitsale_price'] - sales['Unitcost_price']) *
         sales['Sale_quantity']), 2)
    sales = sales[sales['year'] >= year - 1]

    return sales
def create_itemcodes_list(client_id):
    required_cols = ['Sales_id', 'Item_code', 'Item_name']
    saleid_items = sales_table.fetch_all_by_client(client_id)  #from database
    # saleid_items = read_dataframe_from_pickle(client_id, data_name='sales') #from pickle
    saleid_items = saleid_items[required_cols]
    saleid_items['vitem_len'] = saleid_items['Item_code'].apply(
        lambda x: len(x))
    saleid_items = saleid_items[saleid_items['vitem_len'] > 2]

    items_groups = saleid_items[['Sales_id', 'Item_code', 'Item_name']].groupby(['Sales_id'], as_index=False). \
                    agg(lambda x : ','.join(x))

    items_groups['Item_code'] = items_groups['Item_code'].apply(
        lambda x: x.split(','))
    items_groups['Item_code'] = items_groups['Item_code'].apply(
        lambda x: list(set(x)))

    Item_codes_list = items_groups.Item_code.tolist()

    return Item_codes_list
def get_code2name_dict(client_id):

    saleid_items = sales_table.fetch_all_by_client(client_id)[[
        'Item_code', 'Item_name'
    ]]  #from database
    # saleid_items = read_dataframe_from_pickle(client_id, data_name='sales')[['Item_code', 'Item_name']] #from pickle
    saleid_items['vitem_len'] = saleid_items['Item_code'].apply(
        lambda x: len(x))
    saleid_items = saleid_items[saleid_items['vitem_len'] > 2]

    # mst_item = download_columns_from_db(['Item_code', 'Item_name'], item_table, client_id)
    # mst_item = read_dataframe_from_pickle(client_id, data_name='item')
    code2name = {}
    # for i in range(mst_item.shape[0]):
    #     code, name = mst_item.Item_code[i], mst_item.Item_name[i]
    #     if len(code) > 2:
    #         code2name[code] = name

    for code, name in zip(saleid_items.Item_code, saleid_items.Item_name):
        #if code not in code2name:
        code2name[code] = name

    return code2name
Example #8
0
    def get(self, objectid):
        
        if FS.exists({"_id":ObjectId(objectid)})==True:             
            data = []
            m =FS.get(ObjectId(objectid))
            metadata= (m.metadata)
            client_obj_id= (metadata['client'])
            #print(client_obj_id)
 
            if metadata['category']== "item":
                outputdata =FS.get(ObjectId(objectid)).read()

                for i in (str(outputdata)).split("\\n"):
                    byedata = bytes(i, 'utf-8')
                    d = (byedata.decode("utf-8"))
                    fresh= (str((d.strip('\\r'))) )
                    if len (fresh.split(','))>2:
                        data.append(fresh.split(','))
                #print(data[0])                
                try:
                    #print('item_delete') 
                    client_id = str(client_obj_id) # to convert bson objectId to str
                    #print(client_id)                   
                    if ItemMaster.does_client_exist(client_id):                       
                       ItemMaster.delete_client_data(client_id)
                       delete_clients_mongo_collections(client_id)
                       delete_client_pickle_files(client_id)  
                                         
                    ItemMaster.insert_data_for_items_by_row(data, client_obj_id) #to database
                    #creat_and_dump_dataframe(data, metadata['category'], client_obj_id) #to pikcle
                    #set isProcessed to True in myfiles.files collection
                    metadata.setdefault('isProcessed', True)
                    metadata['isProcessed']=True
                    files_coll = nosqldb['myfiles.files']
                    files_coll.update_one(
                            {'_id':ObjectId(objectid)}, {"$set": {'metadata': metadata}}
                        )
                except Exception as e:
                    print (str(e))
                    #error = e
                    return {"status": False}, 404
                return  True, 200

            elif metadata['category']=="sales":
                outputdata =FS.get(ObjectId(objectid)).read()

                for i in (str(outputdata)).split("\\n"):
                    byedata = bytes(i, 'utf-8')
                    d = (byedata.decode("utf-8"))
                    fresh= (str((d.strip('\\r'))) )
                    if len (fresh.split(','))>4:
                        data.append(fresh.split(','))
                try:
                    client_id = str(client_obj_id) # to convert bson objectId to str
                    #print(client_id)
                    if MSalesDetailTxns.does_client_exist(client_id):
                        MSalesDetailTxns.delete_client_data(client_id)
                        delete_clients_mongo_collections(client_id)
                        delete_client_pickle_files(client_id)                         
                    MSalesDetailTxns.insert_data_for_sales_by_row(data, client_obj_id) #to database
                    #creat_and_dump_dataframe(data, metadata['category'], client_obj_id) #to pickle
                    #set isProcessed to True in myfiles.files collection
                    metadata.setdefault('isProcessed', True)
                    metadata['isProcessed']=True
                    files_coll = nosqldb['myfiles.files']
                    files_coll.update_one(
                            {'_id':ObjectId(objectid)}, {"$set": {'metadata': metadata}}
                        )
                except Exception as e:
                    print (str(e))                   
                    return { "status": False}, 404
                return  True, 200

            else:
                return {"status": False}, 404            

        return {"status": False}, 400
Example #9
0
def load_data_dataframe(client_id):

    df = sales_table.fetch_all_by_client(client_id)  # from database
    #df = read_dataframe_from_pickle(client_id, data_name='sales')  # from pikcle

    return df