Пример #1
0
def do_conversion(in_file, out_file):
    ai = AdamImport()
    ifile = ''.join([ADAM_PATH, in_file])
    ofile = ''.join([ADAM_EXPORT_PATH, out_file])
    out_type = 'csv'
    ai.DBFConverter(ifile, ofile, out_type)
    print "ran conversion %s to %s" % (in_file, out_file)
Пример #2
0
def sv_Get_RO_Count(type):
    """

    type:valid options are totalcount, oldcount

    """
    ai = AdamImport()

    ifile = 'F:\\adamexports\\adamcache\Sicar\Data\\rofile.dbf'
    ofile = 'F:\\adamexports\csvfiles\\rofile.csv'
    out_type = 'csv'

    if need_refresh(ofile):
        ai.DBFConverter(ifile, ofile, out_type)

    rof = pd.read_csv(ofile, engine='python')
    cutoff_date = datetime.date.today() + datetime.timedelta(-30)

    ttlcount = rof.RO_NUM.count()
    rof['DATE_IN'] = pd.to_datetime(rof['DATE_IN'])
    oldcount = rof[rof['DATE_IN'] < pd.to_datetime(cutoff_date)]
    oldcount = oldcount['RO_NUM'].count()
    if type == 'totalcount':
        return ttlcount
    elif type == 'oldcount':
        return oldcount
Пример #3
0
    def handle(self, *args, **options):

        ai = AdamImport()

        ifile = 'F:\\adamexports\\adamcache\Incar\Data\INVEN.DBF'
        ofile = 'F:\\adamexports\csvfiles\INVEN.csv'
        out_type = 'csv'

        ai.DBFConverter(ifile, ofile, out_type)

        inv = pd.read_csv(ofile)
        ext = inv.ONHAND * inv.COST
        ext = sum(ext)
        try:
            r = PartsInv(invvalue=ext)
            r.save()
            print "updated inventory value to %s" % ext
        except:
            print "There was an error updating invvalue"

        ifile = 'F:\\adamexports\\adamcache\Sicar\Data\\rofile.dbf'
        ofile = 'F:\\adamexports\csvfiles\\rofile.csv'
        out_type = 'csv'

        ai.DBFConverter(ifile, ofile, out_type)

        rof = pd.read_csv(ofile)
        cutoff_date = datetime.date.today() + datetime.timedelta(-30)

        ttlcount = rof.RO_NUM.count()
        custsum = rof.CP_TOTAL.sum()
        intsum = rof.IN_TOTAL.sum()
        warsum = rof.WP_TOTAL.sum()
        extsum = rof.XP_TOTAL.sum()

        pcount = rof[rof['STATUS'].str.contains('P')]
        pcount = pcount['STATUS'].count()
        rof['DATE_IN'] = pd.to_datetime(rof['DATE_IN'])

        oldcount = rof[rof['DATE_IN'] < pd.to_datetime(cutoff_date)]
        oldcount = oldcount['RO_NUM'].count()

        try:
            r = ServiceRO(
                totalrocount=ttlcount,
                oldrocount=oldcount,
                printedrocount=pcount,
                ro_custpay=custsum,
                ro_intpay=intsum,
                ro_warpay=warsum,
                ro_extpay=extsum,
            )
            r.save()

            print "updated service RO values"
            print ttlcount, pcount, oldcount, custsum, intsum, warsum, extsum
        except:
            print "There was an error updating Service Ro Values"
Пример #4
0
def do_conversion(ifile, ofile):
    ai = AdamImport()
    ifile = ''.join([ADAM_PATH, ifile])
    ofile = ''.join([ADAM_EXPORT_PATH, ofile])
    out_type = 'csv'

    #if need_refresh(ofile):
    ai.DBFConverter(ifile, ofile, out_type)
    print "conversion completed"
Пример #5
0
def pa_Get_Inventory_Value():
    ai = AdamImport()
    ifile = 'F:\\adamexports\\adamcache\Incar\Data\INVEN.DBF'
    ofile = 'F:\\adamexports\csvfiles\INVEN.csv'
    out_type = 'csv'

    if need_refresh(ofile):
        ai.DBFConverter(ifile, ofile, out_type)

    inv = pd.read_csv(ofile, engine='python')
    ext = inv.ONHAND * inv.COST
    ext = sum(ext)
    return ext
Пример #6
0
def export(request, path_id):
    # view to convert a DBF file on the fly and export it to a CSV
    ai = AdamImport()
    # get the id from the path and pull the correspoding DBF
    p = ADAMFiles.objects.get(id=path_id)
    p = str(p)
    ai.DBFConverter(p, 'output.csv', 'csv')
    f = open('output.csv')
    ofile = os.path.basename(p)
    ofile = ofile.replace('.dbf', '')
    ofile = ofile.replace('.DBF', '')
    response = HttpResponse(f, content_type="text/csv")
    params = 'attachment; filename=%s.csv' % ofile
    response['Content-Disposition'] = params
    f.close()
    return response
Пример #7
0
def detail(request, path_id):
    # view for the adam/<id> page returns an html rendered dataframe
    # create the object to convert DBF to pandas
    # this view is mainly for previewing data
    ai = AdamImport()
    # get the id from the path and pull the correspoding DBF
    p = ADAMFiles.objects.get(id=path_id)
    p = str(p)
    # conver the DBF to a dataframe
    datafr = ai.DBFConverter(p, 'output.csv', 'pandas')

    # *******************
    # modify the dataframe here before converting to HTML
    # be careful not to return too many rows or the http req will time out
    # *******************

    s = datafr.head()

    # *******************
    # *******************
    s_trunk = s.to_html()
    return HttpResponse(s_trunk)
Пример #8
0
from dbftopandas import AdamImport

ai = AdamImport()

i = 'f:\\adamexports\\adamcache\Sicar\Data\\rofile.dbf'
o = 'f:\\adamexports\csvfiles\\rofile.csv'
t = 'csv'

ai.DBFConverter(i, o, t)

#t = 'pandas'

#pd = ai.DBFConverter(i,o,t)

#print pd.head()

#headers = ai.GetColNames(i)

#print headers

#i = 'f:\\adamexports\\adamcache\Apcar\Data\\apinv.dbf'

#headers = ai.GetColNames(i)

#print headers

#data_types = ai.GetColNamesAndTypes(i)

#for d in data_types:
#    if data_types[d] == 'NoneType':
#        print d, data_types[d]
Пример #9
0
class Command(BaseCommand):
    def handle(self, *args, **options):
        def send_email(htmlbody, subject):
            # using SendGrid's Python Library - https://github.com/sendgrid/sendgrid-python

            x = SGFields.objects.get(id=1)
            u = x.sgusername
            p = x.sgpassword

            sg = sendgrid.SendGridClient(u, p)
            message = sendgrid.Mail()
            """
            message.add_filter('templates', 'enable', '1')
            message.add_filter('templates', 'template_id', 'TEMPLATE-ALPHA-NUMERIC-ID')
            message.add_substitution('key', 'value')
            message.add_to("*****@*****.**")
            message.set_from("*****@*****.**")
            message.set_subject("Sending with SendGrid is Fun")
            message.set_html("and easy to do anywhere, even with Python")
            message.add_to_name("Jesse Dovi")
            message.set_from_name("Dovi Motors Inc.")
            """

            message.add_to("*****@*****.**")
            message.add_to_name("Jesse Dovi")
            message.set_from_name("Dovi Motors Inc.")
            message.set_from("*****@*****.**")
            message.set_subject(subject)
            message.set_html(htmlbody)

            status, msg = sg.send(message)

            return (status, msg)

        #configure logger
        log_file = 'log.txt'
        logging.basicConfig(filename=log_file,
                            level=logging.DEBUG,
                            format='%(asctime)s %(levelname)s:%(message)s')

        #assign the full path to the daily statement file
        daily_statement = 'DailyStatement.csv'

        #Get the last time that the DailyStatement.CSV was downloaded
        last_modified = time.ctime(os.path.getmtime(daily_statement))
        logging.info('Started floor plan notification process')
        logging.info('DailyStatement.csv files was last downloaded %s',
                     last_modified)

        #DailyStatement.CSV should be downloaded and saved in the project folder
        try:
            fp = pd.read_table(daily_statement,
                               sep=',',
                               index_col=False,
                               engine='python',
                               header=None)
            logging.info(
                'Successfully created the DailyStatement.csv dataframe')
        except Exception, e:
            logging.warning(
                'There was an error creating the DailyStatement.csv dataframe')
            logging.warning(e)

        #select only records that have VEH in the 1st column
        fp = fp[(fp[0] == 'VEH')]

        #set the column names to the first line of the dataframe
        fp.columns = fp.iloc[0]

        #resest the index so that the rows are numbered in sequence
        fp = fp.reset_index(drop=True)

        #stip off the 1st line, which is a duplicate with the new columns
        fp = fp.ix[1:]

        #covert the floordate to a pandas date time field
        fp['FloorDate'] = pd.to_datetime(fp['FloorDate'])

        #select only the relevent columns
        fp = fp[['VIN', 'FloorDate', 'Interest']]

        ###########################################################
        #Begin the next section.  Ensure that adamcache has been updated before running this command

        #create the import object and set the arg variables
        ai = AdamImport()
        i = 'f:\\adamexports\\adamcache\Ficar\Data\usstock.dbf'
        o = 'output.csv'
        t = 'pandas'

        #pull in the USStock table
        try:
            temp = ai.DBFConverter(i, o, t)
            logging.info('usstock table imported successfully')
        except Exception, e:
            logging.warning('There was an error importing the usstock table')
            logging.warning(e)
            pass
Пример #10
0
def pa_Get_Parts_Count(type, start_days, end_days, field, cost=1500):
    """
    type takes either total, detail, detail_stock (this returns detail including stock parts)
    total retuns in int, sum of ONHAND
    detail return a dataframe obj with detailed records
    field can take DATEPURC or DATESOLD
    cost should be expressed as an integer in cents

    start_days should be a negative integer
    end_days should be a negative integer greater than start days
    """

    #parts needed to be in stock for DPA
    stock_file = ''.join([ADAM_EXPORT_PATH, 'Extract.csv'])

    stock = pd.read_csv(stock_file)
    stock.columns = ['Ford', 'Alternate', 'QOH', 'Days1', 'Days2']
    ford = stock[['Ford']].dropna()
    ford['Alternate'] = ford['Ford']
    stock = stock[['Alternate']].dropna()
    stock = stock.append(ford['Alternate'], ignore_index=True)

    #pull the latest parts inventory from ADAM
    ai = AdamImport()
    ifile = ''.join([ADAM_PATH, '\Incar\Data\INVEN.DBF'])
    ofile = ''.join([ADAM_EXPORT_PATH, 'INVEN.csv'])
    out_type = 'csv'

    #refresh the data in necessary
    if need_refresh(ofile):
        ai.DBFConverter(ifile, ofile, out_type)

    #read the CSV in a dataframe and convert the dates
    inv = pd.read_csv(ofile, engine='python')
    startdate = datetime.date.today() + datetime.timedelta(start_days)
    enddate = datetime.date.today() + datetime.timedelta(end_days)

    #initialize the parameters from the query string
    fdDate = str(field)
    #change cost to an integer and multiply by 100 to make it in dollars
    intCost = int(cost) / 100

    #filter out the data
    inv = inv[inv['ONHAND'] > 0]
    inv = inv[inv['COST'] > intCost]
    inv[fdDate] = pd.to_datetime(inv[fdDate])
    inv = inv[(inv[fdDate] < pd.to_datetime(startdate))
              & (inv[fdDate] > pd.to_datetime(enddate))]
    inv['ext'] = inv['ONHAND'] * inv['COST']

    #if the type is total just give a total parts value
    if type == "total":
        inv_sum = inv['ONHAND'].sum()
        return inv_sum
    #if the type is detail prepare the data for detailed export
    elif type == "detail":

        #create a full part number field to filter on
        inv.SUFFIX = inv.SUFFIX.fillna('')
        inv['FULLPN'] = inv['PREFIX'] + inv['PARTNO'] + inv['SUFFIX']
        #print inv['PREFIX'],inv['PARTNO'],inv['SUFFIX'],inv['FULLPN']
        #create the filter to exclude stock parts
        invx = inv['FULLPN'].isin(stock['Alternate'])
        #print stock['Alternate'].loc[20:30]
        print invx
        #create a new inv dataframe with out the stock parts
        inv = inv[~invx]
        inv_detail = inv[[
            'PREFIX', 'PARTNO', 'SUFFIX', 'DESC', 'ONHAND', 'DATEPURC',
            'DATESOLD', 'COST', 'ext', 'LOCATION'
        ]]
        inv_detail = inv_detail.sort(fdDate)
        return inv_detail
    elif type == "detail_stock":
        #in this case, return everything including stock parts
        inv_detail = inv[[
            'PREFIX', 'PARTNO', 'SUFFIX', 'DESC', 'ONHAND', 'DATEPURC',
            'DATESOLD', 'COST', 'ext', 'LOCATION'
        ]]
        inv_detail = inv_detail.sort(fdDate)
        return inv_detail