Esempio n. 1
0
def load_csv2db(csv, header_map, resource, file_type='rep'):
    global done
    start = perf_counter()
    dataset = Dataset()
    pd_csv = csv
    csv_header = list(pd_csv)

    try:
        if file_type == 'rep':
            #concatentate the records data into a misc fields for later restoration
            pd_csv['misc'] = misc_col(csv, csv_header)
        #map replist headers to db headers
        pd_csv.rename(columns=header_map, inplace=True)
        #add id col for django import export
        pd_csv['Id'] = np.nan

        #import contact records
        dataset.csv = pd_csv.to_csv()
        resource.import_data(dataset, dry_run=False)
    except:
        print("lost the pandas csv")
    end = perf_counter()  #stop timer
    time = end - start
    if file_type == 'rep':
        done = True
        uploadTime.objects.create(num_records=len(repContact.objects.all()),
                                  seconds=round(time, 2))
    else:
        uploadTime.objects.create(num_records=len(sfcontact.objects.all()),
                                  seconds=round(time, 2))
    return csv_header
Esempio n. 2
0
    def clean_events_sheet(self):
        uploaded_file = self.cleaned_data['events_sheet']
        data = Dataset()
        if uploaded_file.name.endswith('.xlsx'):
            try:
                data.xlsx = uploaded_file.read()
            except Exception:
                logger.exception('Could not read Excel workbook upload')
                raise ValidationError('Could not read Excel workbook')
        elif uploaded_file.name.endswith('.csv'):
            try:
                data.csv = uploaded_file.read().decode('utf-8')
            except Exception:
                logger.exception('Could not read CSV upload')
                raise ValidationError('Could not read CSV')
        else:
            raise ValidationError('Unrecognized file type for "%s"' %
                                  uploaded_file.name)

        missing_field_list = ', '.join(self.required_fields -
                                       set(data.headers))
        if missing_field_list:
            raise ValidationError(
                'Missing fields in uploaded spreadsheet: %s' %
                missing_field_list)

        setattr(self, 'uploaded_events', data)
        return uploaded_file
Esempio n. 3
0
def cmd_import(args):
    if not os.path.exists(COMO_BATTERY_FILE):
        current_dataset = create_database()
    else:
        current_dataset = read_database()
    if os.path.exists(args.get(0)):
        import_dataset = Dataset()
        with open(os.path.expanduser(args.get(0)), "r") as import_file:
            import_dataset.csv = import_file.read()
        import_dataset.dict = map(import_format, import_dataset.dict)
        new = current_dataset.stack(import_dataset).sort('time')

        with open(COMO_BATTERY_FILE, 'w') as como:
            como.write(zlib.compress(new.json))

        puts(colored.white("battery statistics imported"))
    else:
        error("Couldn't open file: %s" % args.get(0))
Esempio n. 4
0
def cmd_import(args):
    if not os.path.exists(COMO_BATTERY_FILE):
        current_dataset = create_database()
    else:
        current_dataset = read_database()
    if os.path.exists(args.get(0)):
        import_dataset = Dataset()
        with open(os.path.expanduser(args.get(0)), "r") as import_file:
            import_dataset.csv = import_file.read()
        import_dataset.dict = map(import_format, import_dataset.dict)
        new = current_dataset.stack(import_dataset).sort('time')

        with open(COMO_BATTERY_FILE, 'w') as como:
            como.write(zlib.compress(new.json))

        puts(colored.white("battery statistics imported"))
    else:
        error("Couldn't open file: %s" % args.get(0))
Esempio n. 5
0
def scrape(request):
    #apex callout sends json data with group, indi, and channel attrs if not filled then = 0

    print(request.GET)
    logging.debug(request.GET)

    export = finra_check_job(request.GET)

    print('run scraper')
    print('get results from scraper')
    filename = 'filename="Scraper.csv"'
    response = HttpResponse(content_type='text/csv')
    response['Content-Disposition'] = 'attachment; ' + filename
    dataset = Dataset()
    dataset.csv = export.to_csv(index=False)

    writer = csv.writer(response)
    for line in dataset:
        writer.writerow(line)

    return response
Esempio n. 6
0
 def parse(raw):
     data = Dataset()
     data.csv = raw
     print data.json
 def import_csv(self, csv, uni, user):
     dataset = Dataset()
     dataset.csv = str(csv)
     return self.import_data(dataset, uni=uni, user=user)