def import_athletes(): upload_file = flask.request.files['members'] file_path = "./tmp/bulk_members_upload.xlsx" upload_file.save(file_path) rows = p.get_records(file_name=file_path, start_row=1) # Retrieves header name for the excel file name_field = flask.request.form.get('name') last_name_field = flask.request.form.get('last_name') birth_date_field = flask.request.form.get('birth_date') birth_place_field = flask.request.form.get('birth_place') fiscal_code_field = flask.request.form.get('fiscal_code') address_field = flask.request.form.get('address') zip_code_field = flask.request.form.get('zip_code') city_field = flask.request.form.get('city') province_field = flask.request.form.get('province') gender_field = flask.request.form.get('gender') phone_field = flask.request.form.get('phone') email_field = flask.request.form.get('email') members = import_document(Member, rows, name_field, last_name_field, birth_date_field, birth_place_field, fiscal_code_field, address_field, zip_code_field, city_field, province_field, gender_field, phone_field, email_field) return flask.jsonify(members)
def process_vehicle_catalogs_related(request): model = request.GET.get('model') try: records = pyexcel.get_records(file_name=MODEL_OPTIONS[model]['path']) except Exception as err: return Response({'error': err.args}, status=status.HTTP_500_INTERNAL_SERVER_ERROR) count = 0 for item in records: try: parent_object = MODEL_OPTIONS[model]['parent_model'].objects.get( id=item['parent_id']) except Brands.DoesNotExist: raise Exception('Brand not found') data = { 'name': item['name'], MODEL_OPTIONS[model]['parent']: parent_object, } try: children = MODEL_OPTIONS[model]['model'](**data) except Exception as err: return Response({'error': err.args}, status=status.HTTP_500_INTERNAL_SERVER_ERROR) children.save() count += 1 return Response({'message': f'{count} elements created'}, status=status.HTTP_201_CREATED)
def upload_employeedata_step5(request): req=request.POST records = pe.get_records(file_name=req['file_name']) chk=insertData(records,req,request.user.user_id,req['file_name']) stat=None if req['data_type']=='Employee': sqlqry="SELECT av_engine.av_trxn_company_employees_load_fn(%s, '%s', '%s','%s')"%(req['comp_id'],req['comp_name'],request.user.user_id,req['upload_id']) ret_msg=custom_query(sqlqry) if ret_msg=='SUCCESS': stat='success' AvLogDataUploads.objects.filter(data_upload_id=req['upload_id']).update(no_of_records=chk,status='5 to 6 Master Updates Done',upload_or_rollback='Upload to Rollback') else: AvLogDataUploads.objects.filter(data_upload_id=req['upload_id']).update(no_of_records=chk,status='5 to 6 Master Updates Error',upload_or_rollback='Upload to Rollback') stat='error' elif req['data_type']=='Expenses': sqlqry="SELECT av_engine.av_trxn_expenses_load_fn(%s, '%s', '%s','%s')"%(req['comp_id'],req['comp_name'],request.user.user_id,req['upload_id']) ret_msg=custom_query(sqlqry) if ret_msg=='SUCCESS': stat='success' AvLogDataUploads.objects.filter(data_upload_id=req['upload_id']).update(no_of_records=chk,status='5 to 6 Master Updates Done',upload_or_rollback='Upload to Rollback') else: stat='error' AvLogDataUploads.objects.filter(data_upload_id=req['upload_id']).update(no_of_records=chk,status='5 to 6 Master Updates Error',upload_or_rollback='Upload to Rollback') json_data = json.dumps( { 'status':stat,'message':'All records successfully inserted', 'chk':chk, 'file_name':req['file_name'], 'upload_id':req['upload_id'] } ) utilb.logManage(request.user.company.id,request.user.company.short_name,datetime.now(),'Data Load','Records Inserted','Information','Records Inserted',request.user.user_id,req['upload_id'],'uploadData') return HttpResponse( json_data , content_type='application/json')
def exceltodictionary(dataf, dic={}): data = pe.get_records(file_name=dataf) for line in data: usrname = line['-1'] dic[usrname]['Category'] = line[''] return dic '''
def upload_employeedata_step3(request): req=request.POST records = pe.get_records(file_name=req['file_name']) stat=True if req['data_type']=='Employee': ls=[] for record in records: if record['Employee ID'] in ls: stat=False ls.append(record['Employee ID']) elif req['data_type']=='Expenses': ls=[] for record in records: if record['Report Line ID'] in ls: stat=False ls.append(record['Report Line ID']) if stat==True: status='success' msg='All records is unique' AvLogDataUploads.objects.filter(data_upload_id=req['upload_id']).update(status='4 of 6 File Formate Check') utilb.logManage(request.user.company.id,request.user.company.short_name,datetime.now(),'Data Load','Duplicate Records','Information','Duplicate Records checked',request.user.user_id,req['upload_id'],'uploadData') else: status='danger' msg='Some records are duplicate' AvLogDataUploads.objects.filter(data_upload_id=req['upload_id']).update(status='2 of 6 File Formate Check') utilb.logManage(request.user.company.id,request.user.company.short_name,datetime.now(),'Data Load','Duplicate Records','Information','Duplicate Records checked failed',request.user.user_id,req['upload_id'],'uploadData') json_data = json.dumps( { 'status':status,'message':msg, 'file_name':req['file_name'], 'upload_id':req['upload_id'] } ) return HttpResponse( json_data , content_type='application/json')
def process_postcodes(request): count = 0 static_path = os.path.join(BASE_DIR, 'static') try: state = States.objects.get(name='Ciudad de México') except States.DoesNotExist: state = States.objects.create(name='Ciudad de México') file_path = static_path + '/catalogs/address/ciudad_mexico.xlsx' records = pyexcel.get_records(file_name=file_path) for item in records: try: city = Cities.objects.get(name=item['D_mnpio']) except Cities.DoesNotExist: city = Cities.objects.create(name=item['D_mnpio'], state=state) try: colony = Colonies.objects.get(name=item['d_asenta']) except Colonies.DoesNotExist: colony = Colonies.objects.create(name=item['d_asenta'], city=city) try: postcode = Postcodes.objects.get(postcode=item['d_codigo'], colony=colony) except Postcodes.DoesNotExist: postcode = Postcodes.objects.create(postcode=item['d_codigo'], colony=colony) if postcode: count += 1 return JsonResponse({'message': f'{count} objects created'})
def exceltodictionary(dataf, dic = {}): data = pe.get_records(file_name=dataf) for line in data: usrname = line['-1'] dic[usrname]['Category'] = line[''] return dic '''
def record_nets_donated_excel(request): if request.method=='POST' and request.FILES['excel_file']: myfile = request.FILES['excel_file'] fs = FileSystemStorage() filename = fs.save(myfile.name, myfile) uploaded_file_url = fs.url(filename) missing_facilities = [] records = pe.get_records(file_name=settings.BASE_DIR+uploaded_file_url) for record in records: beneficiary =record['beneficiary'] invoice_no = record['invoice'] warehouse = record['warehouse'] nets_issued = record['nets'] donor = record['donor'] date_issued = record['date_issued'] if Nets_donated.objects.filter(invoice_no=invoice_no).exists(): continue issued = Nets_donated(beneficiary=beneficiary, invoice_no=invoice_no, nets_issued=nets_issued, donor_code=donor, date_issued=date_issued).save() messages.success(request, "Success! Nets donated records successfully saved.") return redirect('distribution:nets_donated') else: template = "distribution/excel-donation.html" context={} return render(request, template, context)
def test_get_records_from_memory(self): data = [["X", "Y", "Z"], [1, 2, 3], [4, 5, 6]] content = pe.save_as(dest_file_type="xls", array=data) records = pe.get_records( file_content=content.getvalue(), file_type="xls" ) eq_(records, [{"X": 1, "Y": 2, "Z": 3}, {"X": 4, "Y": 5, "Z": 6}])
def test_get_records_from_file(self): data = [["X", "Y", "Z"], [1, 2, 3], [4, 5, 6]] sheet = pe.Sheet(data) testfile = "testfile.xls" sheet.save_as(testfile) result = pe.get_records(file_name=testfile) eq_(result, [{"X": 1, "Y": 2, "Z": 3}, {"X": 4, "Y": 5, "Z": 6}]) os.unlink(testfile)
def test_get_dict_from_file(self): data = [["X", "Y", "Z"], [1, 2, 3], [4, 5, 6]] sheet = pe.Sheet(data) testfile = "testfile.xls" sheet.save_as(testfile) result = pe.get_records(file_name=testfile) assert result == [{"X": 1, "Y": 2, "Z": 3}, {"X": 4, "Y": 5, "Z": 6}] os.unlink(testfile)
def upload_employeedata_step5(request): req = request.POST records = pe.get_records(file_name=req['file_name']) chk = insertData(records, req, request.user.user_id, req['file_name']) stat = None if req['data_type'] == 'Employee': sqlqry = "SELECT av_engine.av_trxn_company_employees_load_fn(%s, '%s', '%s','%s')" % ( req['comp_id'], req['comp_name'], request.user.user_id, req['upload_id']) ret_msg = custom_query(sqlqry) if ret_msg == 'SUCCESS': stat = 'success' AvLogDataUploads.objects.filter( data_upload_id=req['upload_id']).update( no_of_records=chk, status='5 to 6 Master Updates Done', upload_or_rollback='Upload to Rollback') else: AvLogDataUploads.objects.filter( data_upload_id=req['upload_id']).update( no_of_records=chk, status='5 to 6 Master Updates Error', upload_or_rollback='Upload to Rollback') stat = 'error' elif req['data_type'] == 'Expenses': sqlqry = "SELECT av_engine.av_trxn_expenses_load_fn(%s, '%s', '%s','%s')" % ( req['comp_id'], req['comp_name'], request.user.user_id, req['upload_id']) ret_msg = custom_query(sqlqry) if ret_msg == 'SUCCESS': stat = 'success' AvLogDataUploads.objects.filter( data_upload_id=req['upload_id']).update( no_of_records=chk, status='5 to 6 Master Updates Done', upload_or_rollback='Upload to Rollback') else: stat = 'error' AvLogDataUploads.objects.filter( data_upload_id=req['upload_id']).update( no_of_records=chk, status='5 to 6 Master Updates Error', upload_or_rollback='Upload to Rollback') json_data = json.dumps({ 'status': stat, 'message': 'All records successfully inserted', 'chk': chk, 'file_name': req['file_name'], 'upload_id': req['upload_id'] }) utilb.logManage(request.user.company.id, request.user.company.short_name, datetime.now(), 'Data Load', 'Records Inserted', 'Information', 'Records Inserted', request.user.user_id, req['upload_id'], 'uploadData') return HttpResponse(json_data, content_type='application/json')
def record_nets_distributed_excel(request): if request.method=='POST' and request.FILES['excel_file']: myfile = request.FILES['excel_file'] fs = FileSystemStorage() filename = fs.save(myfile.name, myfile) uploaded_file_url = fs.url(filename) missing_facilities = [] records = pe.get_records(file_name=settings.BASE_DIR+uploaded_file_url) for record in records: if Facilities.objects.filter(mfl_code=record['facility']).exists(): facility = get_object_or_404(Facilities, mfl_code=record['facility']) dist_month = record['dist_month'] dist_year = record['dist_year'] anc_nets = int(check_for_blank_cells(record['anc_nets'])) cwc_nets = int(check_for_blank_cells(record['cwc_nets'])) others_nets = int(check_for_blank_cells(record['others_nets'])) total_nets = anc_nets + cwc_nets + others_nets bal_cf = int(check_for_blank_cells(record['bal_cf'])) # if confirm_nets_issuance(total_nets, facility.net_balance, facility.system_net_balance): if Distribution_report.objects.filter(facility=facility, dist_month=dist_month, dist_year=dist_year).exists(): continue else: distribution = Distribution_report( facility=facility, dist_month=dist_month, dist_year=dist_year, cwc_nets=cwc_nets, anc_nets=anc_nets, others_nets=others_nets, total_nets=total_nets, bal_cf=bal_cf ).save() facility.net_balance = bal_cf facility.system_net_balance = facility.system_net_balance - total_nets facility.save() # else: # messages.error(request, "Error! Nets issued for {}, exceed the remaining balance!".format(facility)) # return redirect("distribution:record_distribution") else: missing_facilities.append(record['facility']) continue messages.success(request, "Success! Nets distribution reports successfully recorded.") request.session['missing_facilities'] = missing_facilities return redirect('distribution:nets_distributed') else: template = "distribution/excel-distribution.html" context={} return render(request, template, context)
def students(request): if request.method == 'POST': form = AddStudentsForm(request.POST, request.FILES) if form.is_valid(): file = handle_uploaded_file(request.FILES['file']) records = pe.get_records(file_name=file) i = 0 repeated = [] students = map(lambda s:int(s.admission_no), Students.objects.all()) print(students) for record in records: admission_no = record['admissionno'] if admission_no not in students: login = Login() username = record['firstname'].lower() + "_" + str(record['admissionno']) password = helpers.generateRandomPassword() login.username = username login.password = password login.type = 'student' login.status = '1' login.save() tomail = record['email'] subject = 'Codepad Student account created' message = 'Your codepad student account has been created. Please note the following credentials - Username : '******', Password : '******'admissionno'] student.firstname = record['firstname'] student.lastname = record['lastname'] student.address = record['address'] student.contact = record['contact'] student.email = record['email'] student.department = record['department'] student.semester = record['semester'] student.save() i += 1 else: repeated.append(admission_no) request.session['alert'] = str(i) + ' Student accounts created' else: # request.session['alert'] = None form = AddStudentsForm() data = Students.objects.all() return render(request, 'Faculty/students.html', {'form': form, 'students': data})
def uploadfj(): file = request.files['file'] # app.logger.info(file.filename) if file: filename = file.filename app.logger.info(filename) data_xml = pyexcel.get_records(file_type="xlsx", file_content=file) # app.logger.info(data_xml) result = exec_stream_fj(data_xml) return json.dumps(result)
def getDependencies(rally): import pyexcel as pe import pyexcel.ext.xlsx records = pe.get_records(file_name="RallyUpload-DependenciestoCA-v2.xlsx") for record in records: print record['RALLY ID'] + " - " + record['Successor'] addDependenciesToRally(rally, record['RALLY ID'], record['Successor']) return ()
def get_users(self): '''Abre un archivo .xlsx y obtiene las dos columnas (usuario y fecha) obtiene los datos de before during y after del evento''' data = excel.get_records(file_name=sys.argv[1]) users = {} for record in data: event = record['Date'] usname = record['Screenname'] if not users.has_key(usname): users.update({usname: event}) return users
def main(argv): # TODO consider using argparse if len(argv) < 4: print( "Four parameters are mandatory: input file, picture directory, first name and last name", file=sys.stderr) return t = pe.get_records(file_name=argv[0]) ft = FamilyTreeMapping(t, argv[1]) ft.printGedcom(argv[2], argv[3])
def test_get_records_from_records(self): data = [ {"X": 1, "Y": 2, "Z": 3}, {"X": 4, "Y": 5, "Z": 6} ] result = pe.get_records(records=data) eq_(result, [ {"X": 1, "Y": 2, "Z": 3}, {"X": 4, "Y": 5, "Z": 6} ])
def test_get_records_from_records(self): data = [ {"X": 1, "Y": 2, "Z": 3}, {"X": 4, "Y": 5, "Z": 6} ] result = pe.get_records(records=data) assert result == [ {"X": 1, "Y": 2, "Z": 3}, {"X": 4, "Y": 5, "Z": 6} ]
def loadExcelInDict(): import pyexcel from pyexcel._compact import OrderedDict file = 'C:\\Users\\r103co62\\Desktop\\Template.xlsx' # Get your data in an ordered dictionary of lists my_dict = pyexcel.get_dict(file_name=file, name_columns_by_row=0) # Get your data in a dictionary of 2D arrays book_dict = pyexcel.get_book_dict(file_name=file) # Retrieve the records of the file records = pyexcel.get_records(file_name=file)
def importjifang(mysql, datafile): records = pyexcel.get_records(file_name=datafile) rkeys = {} for k in records[0].keys(): rkeys[k] = GetKey(k) name = datafile.split("_")[0] tbname = p.get_pinyin(name, "") tbname = "jt_" + tbname # records = pyexcel.get_dict(file_name="逻辑站点_2020-07-18.xlsx") # s = collections.Counter([v.strip() for v in records["逻辑站点名称"]]) # print([k for k, v in s.items() if v > 1]) # print(keys) with mysql.cursor() as cursor: cursor.execute("TRUNCATE TABLE %s;" % tbname) mysql.commit() index = 1 with mysql.cursor() as cursor: for i, r in enumerate(records): if i < 4: continue keys = [] values = [] for k, v in r.items(): if v: keys.append(rkeys[k]) values.append(v) keysname = ", ".join(keys) valuesdata = ", ".join(["'" + v.strip() + "'" for v in values]) sql = f"""INSERT INTO {tbname} ({keysname}) VALUES ({valuesdata}) ;""" # print(sql) # with mysql.cursor() as cursor: try: cursor.execute(sql) except Exception as e: print(e) print(sql) if index % 10000 == 0: try: mysql.commit() print(index) except Exception as e: print(e) print(sql) index += 1 mysql.commit()
def _get_records(self): """ Open the records as a list. If you use iget then it returns a generator and we can't easily save the state. pe.iget_records(file_name=self.SOURCE) :return: """ records = pe.get_records(file_name=self.SOURCE) return records
def test_get_records_from_array(self): data = [ ["X", "Y", "Z"], [1, 2, 3], [4, 5, 6] ] result = pe.get_records(array=data) eq_(result, [ {"X": 1, "Y": 2, "Z": 3}, {"X": 4, "Y": 5, "Z": 6} ])
def test_get_records_from_dict(self): data = { "X": [1, 4], "Y": [2, 5], "Z": [3, 6] } result = pe.get_records(adict=data) eq_(result, [ {"X": 1, "Y": 2, "Z": 3}, {"X": 4, "Y": 5, "Z": 6} ])
def test_get_records_from_dict(self): data = { "X": [1, 4], "Y": [2, 5], "Z": [3, 6] } result = pe.get_records(adict=data) assert result == [ {"X": 1, "Y": 2, "Z": 3}, {"X": 4, "Y": 5, "Z": 6} ]
def test_get_records_from_array(self): data = [ ["X", "Y", "Z"], [1, 2, 3], [4, 5, 6] ] result = pe.get_records(array=data) assert result == [ {"X": 1, "Y": 2, "Z": 3}, {"X": 4, "Y": 5, "Z": 6} ]
def init_connection(): vg_data = pyexcel.get_records(file_name='vgsales.csv') client = pymysql.connect(host='localhost', port=3306, user='******', password='******', cursorclass=pymysql.cursors.DictCursor) board = client.cursor() return [vg_data, client, board]
def handle_excel(): records = pe.get_records(file_name="example.xlsx") for record in records: print("%s 剩余应收款为 %d" % (record['销售名称'], record['剩余应收款'])) if int(record['剩余应收款']) > 0: print("正在将本条单独拆分出来...") elif str.find(record['销售名称'], '\/'): # if '\/' in record['销售名称']: print("有两个销售") else: print("剩余应收款为0,不做操作...") print("表格拆分完毕,即将进入下一步,发送邮件")
def test_issue_95_preserve_order_in_get_orders(): test_data = [['a', 'b', 'c'], ['1', '2', '3'], ['4', '5', '6']] records = pe.get_records(array=test_data) result = [] for record in records: for key, value in record.items(): result.append([key, value]) expected = [['a', '1'], ['b', '2'], ['c', '3'], ['a', '4'], ['b', '5'], ['c', '6']] eq_(result, expected)
def test_get_records_from_memory(self): data = [ ["X", "Y", "Z"], [1, 2, 3], [4, 5, 6] ] content = pe.save_as(dest_file_type="xls", array=data) records = pe.get_records(file_content=content.getvalue(), file_type="xls") assert records == [ {"X": 1, "Y": 2, "Z": 3}, {"X": 4, "Y": 5, "Z": 6} ]
def process_vehicle_catalogs(request): model = request.GET.get('model') try: records = pyexcel.get_records(file_name=MODEL_OPTIONS[model]['path']) except Exception as err: return Response({'error': err.args}, status=status.HTTP_500_INTERNAL_SERVER_ERROR) count = 0 for item in records: MODEL_OPTIONS[model]['model'].objects.create(name=item['name']) count += 1 return Response({'message': f'{count} objects created'}, status=status.HTTP_201_CREATED)
def get_records_as_list_with_dicts(file_type, content, header, excel_header_fields): records = pyexcel.get_records(file_type=file_type, file_content=content) records_list = [] excel_header_fields = [field.lower() for field in excel_header_fields] for record in records: row = {} for header_field in header: if header_field.lower() in excel_header_fields: row[header_field] = record[header_field] records_list.append(row) #print(f"AKHI: {records_list}") return records_list
def upload_employeedata_step2(request): req=request.POST records = pe.get_records(file_name=req['file_name']) if ('Employee ID' in records[0] and req['data_type']=='Employee') or ('Report ID' in records[0] and req['data_type']=='Expenses'): json_data = json.dumps( { 'status':'success','message':'File formate is ok.', 'file_name':req['file_name'], 'upload_id':req['upload_id'] } ) AvLogDataUploads.objects.filter(data_upload_id=req['upload_id']).update(status='2 of 6 File Formate Check') utilb.logManage(request.user.company.id,request.user.company.short_name,datetime.now(),'Data Load','Records Processed','Information','Checked file formate',request.user.user_id,req['upload_id'],'uploadData') else: json_data = json.dumps( { 'status':'danger','message':'File formate is not valid.', 'file_name':req['file_name'], 'upload_id':req['upload_id'] } ) AvLogDataUploads.objects.filter(data_upload_id=req['upload_id']).update(status='1 of 6 File Formate Check') utilb.logManage(request.user.company.id,request.user.company.short_name,datetime.now(),'Data Load','Records Processed','Information','Checked file formate failed',request.user.user_id,req['upload_id'],'uploadData') return HttpResponse( json_data , content_type='application/json')
def get_records(self, **keywords): """Get a list of records from the file :param sheet_name: For an excel book, there could be multiple sheets. If it is left unspecified, the sheet at index 0 is loaded. For 'csv', 'tsv' file, *sheet_name* should be None anyway. :param keywords: additional key words :returns: A list of records """ params = self.get_params(**keywords) if 'name_columns_by_row' not in params: params['name_columns_by_row'] = 0 return pe.get_records(**params)
def _method_pyexcel(self, out_sep=DEFAULT_OUT_SEP, line_terminator=DEFAULT_LINE_TERMINATOR, sheet_name=0, *args, **kwargs): """Do the conversion :term:`XLSX` -> :term:`CSV` using pyexcel library""" import pyexcel with open(self.outfile, "w") as out_stream: writer = csv.writer(out_stream, delimiter=out_sep, lineterminator=line_terminator) first_row = True for row in pyexcel.get_records(file_name=self.infile): if first_row: writer.writerow([k for k, v in row.items()]) first_row = False writer.writerow([v for k, v in row.items()])
def upload_employeedata_step3(request): req = request.POST records = pe.get_records(file_name=req['file_name']) stat = True if req['data_type'] == 'Employee': ls = [] for record in records: if record['Employee ID'] in ls: stat = False ls.append(record['Employee ID']) elif req['data_type'] == 'Expenses': ls = [] for record in records: if record['Report Line ID'] in ls: stat = False ls.append(record['Report Line ID']) if stat == True: status = 'success' msg = 'All records is unique' AvLogDataUploads.objects.filter( data_upload_id=req['upload_id']).update( status='4 of 6 File Formate Check') utilb.logManage(request.user.company.id, request.user.company.short_name, datetime.now(), 'Data Load', 'Duplicate Records', 'Information', 'Duplicate Records checked', request.user.user_id, req['upload_id'], 'uploadData') else: status = 'danger' msg = 'Some records are duplicate' AvLogDataUploads.objects.filter( data_upload_id=req['upload_id']).update( status='2 of 6 File Formate Check') utilb.logManage(request.user.company.id, request.user.company.short_name, datetime.now(), 'Data Load', 'Duplicate Records', 'Information', 'Duplicate Records checked failed', request.user.user_id, req['upload_id'], 'uploadData') json_data = json.dumps({ 'status': status, 'message': msg, 'file_name': req['file_name'], 'upload_id': req['upload_id'] }) return HttpResponse(json_data, content_type='application/json')
def init_connection(): vg_data = pyexcel.get_records(file_name='vgsales.csv') client = pymysql.connect( host='localhost', port=3306, user='******', password='******', cursorclass=pymysql.cursors. DictCursor # turn data type into dictionary/ list of dictionaries ) board = client.cursor() return [vg_data, board]
def get_data(self): '''Abre un archivo .xlsx y obtiene las dos columnas (usuario y fecha) obtiene los datos de before during y after del evento''' data = excel.get_records(file_name=sys.argv[1]) users, dates = ({} for i in range(2)) metadata = {} tweets = {} for record in data: hashtag = record['Hashtag'] if hashtag != 'none' and hashtag != '' and '#' in hashtag: if not tweets.has_key(hashtag): print hashtag t = self.get_tweets(hashtag) tweets.update({hashtag: t}) else: pass print tweets
def test_issue_95_preserve_order_in_get_orders(): test_data = [["a", "b", "c"], ["1", "2", "3"], ["4", "5", "6"]] records = p.get_records(array=test_data) result = [] for record in records: for key, value in record.items(): result.append([key, value]) expected = [ ["a", "1"], ["b", "2"], ["c", "3"], ["a", "4"], ["b", "5"], ["c", "6"], ] eq_(result, expected)
def upload_employeedata_step6(request): req=request.POST records = pe.get_records(file_name=req['file_name']) if req['data_type']=='Employee': AvLogDataUploads.objects.filter(data_upload_id=req['upload_id']).update(status='6 of 6 Audvantage Done') msg='6 to 6 Audvantage Done' elif req['data_type']=='Expenses': sqlqry="SELECT av_engine.av_check_exp_engine_fn(%s, '%s', '%s','%s')"%(req['comp_id'],req['comp_name'],request.user.user_id,req['upload_id']) ret_msg=custom_query(sqlqry) if ret_msg=='SUCCESS': msg='6 to 6 Audvantage Done' AvLogDataUploads.objects.filter(data_upload_id=req['upload_id']).update(status='6 of 6 Audvantage Done') else: AvLogDataUploads.objects.filter(data_upload_id=req['upload_id']).update(status='6 of 6 Audvantage Error') msg='6 to 6 Audvantage Error' json_data = json.dumps( { 'status':'success','message':msg, 'file_name':req['file_name'], 'upload_id':req['upload_id'] , 'logData': getLogData(request.user.company.id) } ) utilb.logManage(request.user.company.id,request.user.company.short_name,datetime.now(),'Data Load','Load End','Information','Load End',request.user.user_id,req['upload_id'],'uploadData') return HttpResponse( json_data , content_type='application/json')
def uploadEmployeeData(request): json_data=None DataType=AvMasterUploadDataType.objects.order_by('data_type') DataType_json = serializers.serialize('json', DataType ) DataType_json_list = json.loads( DataType_json ) cursor = connection.cursor() rolePermisn=utilb.userRolePermission(request.user.company.id,request.user.user_id) compData='' if 'View All Companies' in rolePermisn: compData=AvMasterCompany.objects.values('id','short_name').order_by('short_name') elif 'View Company' in rolePermisn: compData=AvMasterCompany.objects.values('id','short_name').filter(id=request.user.company.id).order_by('short_name') if request.method == "POST": data=request.FILES['file'] path = default_storage.save('tmp/import.xls', ContentFile(data.read())) tmp_file = os.path.join(settings.MEDIA_ROOT, path) records = pe.get_records(file_name=tmp_file) req=request.POST cursor.execute("SELECT nextval('av_log_data_upload_id_seq')") upload_id=cursor.fetchone() cursor.close() uploadID=int(''.join(map(str,upload_id))) c=AvLogDataUploads(data_upload_id=uploadID,company_id=request.user.company.id,company_name=request.user.company.short_name,data_type=req['data_type'],uploaded_by=request.user.user_id,uploaded_date=datetime.now(),no_of_records='0',status='1 of 6 - File Upload',upload_or_rollback='File Upload') c.save() json_data = json.dumps( { 'status':'success','message':'File uploaded successfully.', 'file_name':tmp_file, 'upload_id':uploadID ,'logData': getLogData(request.user.company.id) } ) utilb.logManage(request.user.company.id,request.user.company.short_name,datetime.now(),'Data Load','Load Start','Information','File Uploaded',request.user.user_id,uploadID,'uploadData') else: json_data = json.dumps( { 'DataType':DataType_json_list, 'compData':list(compData) ,'UserCompId':request.user.company.id, 'logData': getLogData(request.user.company.id), 'rolePermission':utilb.userRolePermission(request.user.company.id,request.user.user_id)} ) return HttpResponse( json_data , content_type='application/json')
##from linecache import* ##x=open('filex.txt') ##a=[] ##c=[] ##a=x.readlines() ##for i in range(len(a)): ## c.append(a[i].split(' ')) ##d=[] ##for i in range (len(c)): ## d.append(int(c[i][1])*int(c[i][2])) ## print(c[i][0]+' get '+ str(d[i])+' this month') ## ##import webbrowser ##import time ##from turtle import* ##def getinternet(i): ## lit=['https://www.facebook.com','https://www.youtube.com/','http://www.tutorialspoint.com/python/time_sleep.htm'] ## if i=='1': ## webbrowser.open(lit[0]) ## elif i=='2': ## webbrowser.open(lit[1]) ## else: ## webbrowser.open(lit[2]) ##i=input('number plz:') ##getinternet(i) import pyexcel as pe import pyexcel.ext.xls record=pe.get_records(file_name='xxx.xls') for record in records: print("%s is aged at %d" %(record['Name'],record['Age']))
import pyexcel as pe import pyexcel.ext.xls import pyexcel.ext.xlsx records = pe.get_records(file_name="/Users/cyberprism/Desktop/CSE_S4.xlsx") for record in records: print(record['firstname'], record['lastname'])
import datetime import ujson import pyexcel as pe import pyexcel.ext.xlsx client = pymongo.MongoClient() today = datetime.date.today() today = today.strftime("%Y%m%d") # creat database and collection db1 = client['ACAHack'] col = 'formulary_' + today #open file records = pe.get_records(file_name="a.xlsx") counter = 0 for record in records: url = record["URL Submitted"] if ".json" in url: response = urllib.urlopen(url) data = json.loads(response.read()) for url2 in data["formulary_urls"]: print url2 response2 = urllib.urlopen(url2) print type(response2) dic = ujson.loads(response2.read()) for item in dic: try:
def test_get_records(self): pe.get_records("something")
def test_get_records_from_sql(self): records = pe.get_records(session=Session(), table=Signature) assert records == [{"X": 1, "Y": 2, "Z": 3}, {"X": 4, "Y": 5, "Z": 6}]
def test_get_records(self): expected = pe.get_records("something") assert expected == None
import smtplib import pyexcel as pe import pyexcel.ext.xlsx def send_email(message, rev): username = "******" password = "******" email_helper = smtplib.SMTP('smtp.gmail.com:587') email_helper.starttls() email_helper.login(username,password) email_helper.sendmail("*****@*****.**", rev,message) email_helper.quit() records = pe.get_records(file_name="info.xlsx") for record in records: n = record['Name'] send_email("Subject :"+ n +"oi\nTran Thanh Tu",record['Email']) # class calculateDistance(city): # hanoi = {"name" : "Ha Noi", "longt: 50, "lat" : 75} # daiduong = {"name": "Hai Duong", "longt: 25, "lat" : 10} # def __init__(self, name, longt, lat ): # self.name = name # self.longt = longt # self.lat = lat # def distance(self): # class City: # def __init__(self, name, longt, lat): # self.name = name # self.longt = longt # self.lat = lat
import sys reload(sys) sys.setdefaultencoding('utf-8') import pyexcel as pe import pyexcel.ext.xlsx records = pe.get_records(file_name="yahoo.xlsx") c=0 #f = open("tickers.txt", "w") for record in records: print ("%s::%s" %(record['Ticker'].encode('utf-8'),record['Name'].encode('utf-8'))) #f.write("%s::%s\n" % (record['Ticker'], record['Name'])) #f.close()