def manageShipping(request): querySet = ShippingKey.objects.all() for emails in querySet: email = emails.email if request.user.email == email: if request.method == "POST": data = request.POST["book"] type = request.POST["type"] book = Book.objects.get(id=data) if type == "borrow": bookorder = BookOrder.objects.get(book=book, dateReturned=None) else: bookorderQ = BookOrder.objects.filter(book=book).exclude(dateReturned=None) for Q in bookorderQ: bookorder = Q bookorder.shipped = True bookorder.save() shippingDetails = Shipping(order=bookorder, type=type, shippedOn=datetime.now(), shippedBy=UserProfile.objects.get(user=request.user)) shippingDetails.save() return HttpResponseRedirect ("%slibraryPortal/manageShipping" % settings.SITE_URL) else: orderedBooks = BookOrder.objects.filter(shipped=False, dateReturned=None) returnedBooks = BookOrder.objects.filter(shipped=False).exclude(dateReturned=None) lateBooks = BookOrder.objects.filter(dueDate__lt=datetime.now(), dateReturned=None) return render_to_response('libraryPortal/shippingDashboard.html', locals(), context_instance=global_context(request)) else: raise Http404
def get(self): args = dict(client_id='261144481778-2rppbrlfrr1uhkv6t1f0d27s7n2334rh.apps.googleusercontent.com', redirect_uri="http://pinnerspace.appspot.com/oauth2callback") args["code"] = self.request.get("code") args["client_secret"] = 'rXfCRY5tP8Q9OveqcnlPwJKe' args["grant_type"] = "authorization_code" request = urllib2.Request(url="https://accounts.google.com/o/oauth2/token", data=urllib.urlencode(args)) response = json.load(urllib2.urlopen(request)) access_token = response["access_token"] profile = json.load(urllib.urlopen("https://www.googleapis.com/plus/v1/people/me?" + urllib.urlencode(dict(access_token=access_token)))) emailacc = "" for email in profile["emails"]: if (email["type"] == "account"): emailacc = email["value"] guser = ndb.Key('Account', str(emailacc)) if guser.get() == None: user = Account(id=str(emailacc), accid=str(emailacc), usernick=profile["displayName"], email=str(emailacc), access_token=access_token, login_type="google") user.put() else: guser.get().access_token = access_token guser.get().login_type = "google" guser.get().put() self.response.set_cookie("user", str(emailacc), expires=datetime.now() + timedelta(days=1)) self.response.set_cookie("token", str(access_token), expires=datetime.now() + timedelta(days=1)) self.redirect("/")
def loansReportView(request, template_name='loan/loansReport.html'): if request.GET['start'] !='': fechaInicio = request.GET['start'] fechaFin = request.GET['end'] else: fechaInicio=datetime.now().strftime("%Y-%m-01"+" %H:%M") fechaFin =datetime.now().strftime("%Y-%m-%d %H:%M") if request.GET['_btn'] == 'Imprimir Reporte': template_name='loan/loansReportR.html' fechaInicioO = datetime.strptime(fechaInicio, '%Y-%m-%d %H:%M') mayores = Loan.objects.filter(user__age__gte=60).filter(loan_date__gt=fechaInicio).filter(loan_date__lte=fechaFin).count() adultos = Loan.objects.filter(user__age__lt=60).filter(user__age__gte=19).filter(loan_date__gt=fechaInicio).filter(loan_date__lte=fechaFin).count() jovenes = Loan.objects.filter(user__age__lt=19).filter(user__age__gte=13).filter(loan_date__gt=fechaInicio).filter(loan_date__lte=fechaFin).count() ninos = Loan.objects.filter(user__age__lt=13).filter(user__age__gte=6).filter(loan_date__gt=fechaInicio).filter(loan_date__lte=fechaFin).count() preescolar = Loan.objects.filter(user__age__lt=6).filter(user__age__gte=3).filter(loan_date__gt=fechaInicio).filter(loan_date__lte=fechaFin).count() consulta = BookLoan.objects.filter(book__book_classification__name='CONSULTA').filter(loan__loan_date__gt=fechaInicio).filter(loan__loan_date__lte=fechaFin).count() general = BookLoan.objects.filter(book__book_classification__name='GENERAL').filter(loan__loan_date__gt=fechaInicio).filter(loan__loan_date__lte=fechaFin).count() debilesvisuales = BookLoan.objects.filter(book__book_classification__name='MATERIAL PARA DÉBILES VISUALES E INVIDENTES ').filter(loan__loan_date__gt=fechaInicio).filter(loan__loan_date__lte=fechaFin).count() infantil = BookLoan.objects.filter(book__book_classification__name='INFANTIL').filter(loan__loan_date__gt=fechaInicio).filter(loan__loan_date__lte=fechaFin).count() audiovisual = BookLoan.objects.filter(book__book_classification__name='MATERIAL AUDIO VISUAL').filter(loan__loan_date__gt=fechaInicio).filter(loan__loan_date__lte=fechaFin).count() now = datetime.now() c = {'mayores':mayores,'adultos':adultos,'jovenes':jovenes,'ninos':ninos,'preescolar':preescolar,'consulta':consulta,'general': general, 'debilesvisuales':debilesvisuales,'infantil':infantil,'audiovisual':audiovisual,'fechaActual':now.strftime("%d/%m/%Y a las %H:%M:%S"),'fechaInicio':fechaInicio,'fechaFin':fechaFin,'fechaInicioO':fechaInicioO} return render_to_response(template_name, c, context_instance=RequestContext(request))
def Bpp_callback(self, address, data, cb0=-1, cb1=-1): print("@" + str(self.count) + " Time: " + datetime.now().strftime("%Y-%m-%d %H:%M:%S.%f")[:-3] + " ADDR: 0x" + binascii.hexlify(address)+ " DATA: 0x" + binascii.hexlify(data) + " (ACK: " + str(not cb1) + ")") print >> logfile, "@" + str(self.count) + " Time: " + datetime.now().strftime("%Y-%m-%d %H:%M:%S.%f")[:-3] + " ADDR: 0x" + binascii.hexlify(address) + " DATA: 0x" + binascii.hexlify(data)+ " (ACK: " + str(not cb1) + ")" if (str(int(binascii.hexlify(address),16))=="116"): self.cdc_group = True self.cdc_cmeas = int(binascii.hexlify(data),16) self.cdc_time = datetime.now().strftime("%H:%M:%S.%f")[:-3] self.cdc_date = datetime.now().strftime("%Y-%m-%d") elif (str(int(binascii.hexlify(address),16))=="118"): if self.cdc_group: self.cdc_cref = int(binascii.hexlify(data),16) elif (str(int(binascii.hexlify(address),16))=="119"): if self.cdc_group: self.cdc_crev = int(binascii.hexlify(data),16) elif (str(int(binascii.hexlify(address),16))=="121"): if self.cdc_group: self.cdc_cpar = int(binascii.hexlify(data),16) elif (str(int(binascii.hexlify(address),16))=="122"): if self.cdc_group: self.cdc2_cmeas = int(binascii.hexlify(data),16) elif (str(int(binascii.hexlify(address),16))=="123"): if self.cdc_group: self.cdc2_cref = int(binascii.hexlify(data),16) elif (str(int(binascii.hexlify(address),16))=="124"): if self.cdc_group: self.cdc2_crev = int(binascii.hexlify(data),16) elif (str(int(binascii.hexlify(address),16))=="125"): if self.cdc_group: self.cdc2_cpar = int(binascii.hexlify(data),16) wr.writerow([self.cdc_date,self.cdc_time,self.cdc_cmeas,self.cdc_cref,self.cdc_crev,self.cdc_cpar,self.cdc2_cmeas,self.cdc2_cref,self.cdc2_crev,self.cdc2_cpar]) self.count += 1 self.cdc_group = False if self.count>self.args.killcount: self.exit()
def log_fiskal(self): fiskal_prostor_id = invoice_id = None if self.msgtype in ('echo'): fiskal_prostor_id = self.oe_id elif self.msgtype in ('prostor_prijava', 'prostor_odjava', 'PoslovniProstor'): fiskal_prostor_id = self.oe_id elif self.msgtype in ('racun', 'racun_ponovo', 'Racun'): invoice_id = self.oe_id company_id = self.oe_obj and self.oe_obj.company_id and self.oe_obj.company_id.id or 1 self.set_stop_time() t_obrada = self.stop_time['time_stamp'] - self.start_time['time_stamp'] time_obr='%s.%s s'%(t_obrada.seconds, t_obrada.microseconds) self.cr.execute(""" INSERT INTO fiskal_log( user_id, create_uid, create_date ,name, type, time_stamp ,sadrzaj, odgovor, greska ,fiskal_prostor_id, invoice_id, time_obr ,company_id ) VALUES ( %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s ); """, ( self.uid, self.uid, datetime.now(), self.zaglavlje.IdPoruke, self.msgtype, datetime.now(), str(self.poruka_zahtjev), str(self.poruka_odgovor), self.greska, fiskal_prostor_id, invoice_id, time_obr, company_id ) )
def add_todo(args): """Add a new item to the list of things todo.""" if str(args) == args: line = args elif len(args) >= 1: line = concat(args, " ") else: line = prompt("Add:") prepend = CONFIG["PRE_DATE"] l = len([1 for l in iter_todos()]) + 1 pri_re = re.compile("(\([A-X]\))") if pri_re.match(line) and prepend: line = pri_re.sub(concat(["\g<1>", datetime.now().strftime(" %Y-%m-%d ")]), line) elif prepend: line = concat([datetime.now().strftime("%Y-%m-%d "), line]) with open(CONFIG["TODO_FILE"], "a") as fd: fd.write(concat([line, "\n"])) s = "TODO: '{0}' added on line {1}.".format(line, l) print(s) if CONFIG["USE_GIT"]: _git_commit([CONFIG["TODO_FILE"]], s) return l
def loansView(request): if request.GET['start'] !='': fechaInicio = request.GET['start'] fechaFin = request.GET['end'] else: fechaInicio=datetime.now().strftime("%Y-%m-01"+" %H:%M") fechaFin =datetime.now().strftime("%Y-%m-%d %H:%M") loan_list = Loan.objects.filter(loan_date__gt=fechaInicio).filter(loan_date__lte=fechaFin) paginator = Paginator(loan_list, 10) # Show 25 contacts per page page = request.GET.get('page') #####PARA PAGINACION############## try: loans = paginator.page(page) except PageNotAnInteger: # If page is not an integer, deliver first page. loans = paginator.page(1) except EmptyPage: # If page is out of range (e.g. 9999), deliver last page of results. loans = paginator.page(paginator.num_pages) c = {'Loans':loans,'fechaInicio':fechaInicio,'fechaFin':fechaFin} return render_to_response('loan/loans.html', c, context_instance=RequestContext(request))
def post(self): channel_id = self.request.get('channel_id'); clientSeq = self.request.get('clientSeq'); timestamp = self.request.get('timestamp'); msg = self.request.get('msg'); logging.info('Received MESSAGE [%s %s]' % (channel_id, clientSeq)) sequence = memcache.incr("sequence", initial_value=0) if sequence is None: sequence = 0 scene_k = ndb.Key('Scene', 'scene1') scene = scene_k.get() # echo message back to all users message = json.dumps({ 'type' : 'echo', 'sequence' : sequence, 'timestamp' : timestamp, 'clientSeq' : int(clientSeq), 'channel_id' : channel_id, 'msg' : msg, 'server_time' : int(time.time() * 1000) }); tStart = datetime.now() channel.send_message(channel_id, message) tTotal = datetime.now() - tStart logging.info(' responded to sender [%s] (%dms)' % (channel_id, tTotal.microseconds/1000)) if len(scene.connections)>1: logging.info(' broadcasting to %i clients' % (len(scene.connections)-1)) for c in scene.connections: if c.channel_id != channel_id: tStart = datetime.now() channel.send_message(c.channel_id, message) tTotal = datetime.now() - tStart logging.info(' broadcast to [%s] (%dms)' % (c.channel_id, tTotal.microseconds/1000))
def run(): global lasttweet print "Starting twitter monitoring" while True: inbounddirect = tweetReadDirect() if inbounddirect: dtn=datetime.now() datenow=dtn.strftime("%H:%M:%S %D ") print inbounddirect['sender'] and inbounddirect['body'] if inbounddirect['sender'] == SENDER and inbounddirect['body'] == "Upload": print("Picture request received") takepic() dtn=datetime.now() imagename='still' + dtn.strftime("%m%d%Y-%H%M%S") + '.jpg' flow = dropbox.client.DropboxOAuth2FlowNoRedirect(db_app_key, db_app_secret) client = dropbox.client.DropboxClient(db_access_token) f = open('/home/tomc/images/still.jpg') response = client.put_file(imagename, f) print "uploaded: ", response string2tweet=datenow + 'Picture uploaded' tweetDirect(string2tweet) elif inbounddirect['sender'] == SENDER and inbounddirect['body'] == "?": print("Syntax request message received") string2tweet=datenow + 'To upload picture send Upload' tweetDirect(string2tweet) time.sleep(300)
def time_since_last_timestamp(alert_str, attempt_alert_str): from datetime import datetime try: timer = int(config.get("General_settings", "Email Timer")) except: timer = 60 for line in reversed(open("/var/www/alert_log.txt").readlines()): if alert_str in line: print("Checking alert log for last time an email was sent...") parse = line.split("|") # splits log file by | delimiter parse = parse[0] # timestamp is always at the start of the log with index 0 d1 = datetime.strptime(parse, "%Y-%m-%d %H:%M:%S ") less_timer = (datetime.now() - d1).total_seconds() / 60 return less_timer break # Cuts loop so it only takes the last email timestamp and doesnt read through the whole file elif attempt_alert_str in line: print("Checking alert log for last attempted email sent...") parse = line.split("|") # splits log file by | delimiter parse = parse[0] # timestamp is always at the start of the log with index 0 d1 = datetime.strptime(parse, "%Y-%m-%d %H:%M:%S ") less_timer = (datetime.now() - d1).total_seconds() / 60 return less_timer break # Cuts loop so it only takes the last email timestamp and doesnt read through the whole file else: # if alert string or attempted alert string not found in log greater_timer = timer + 100 return greater_timer
def create_prices(): price_set = [] ranges = get_ranges() con = db_con(local=True) #con = db_con(local=False) cur = con.cursor() # Open the connection and turn off autocommit cur.execute('SET autocommit=0;') #Define the price_set price_set_id = int(round(time.time(),0)) # price_set_id is the unix timestamp, rounded to the second time_start = datetime.now() time_end = datetime.now() + relativedelta(months=1) # make sure these are valid for a while in case the server goes down # 1) Create and insert the prices for r in ranges: price = make_price(r['min_price'], r['max_price']) q = """INSERT INTO price_sets (merchant_id, price, price_set_id, time_start, time_end) VALUES ('%s', '%s', '%s', '%s', '%s')"""%(r['merchant_id'], price, price_set_id, time_start, time_end ) cur.execute(q) # 2) Void other prices cur.execute('UPDATE price_sets SET void=1 WHERE price_set_id NOT LIKE %s'%price_set_id) # 3) Delete price_sets that are more than a month old q= "DELETE FROM price_sets WHERE time_start<'%s'"%((datetime.now()-relativedelta(months=1)).date()) cur.execute(q) # Commit all inserts at once and close the connection con.commit() con.close()
def __init__(self, bql_req=None, offset=DEFAULT_REQUEST_OFFSET, count=DEFAULT_REQUEST_COUNT, max_per_group=DEFAULT_REQUEST_MAX_PER_GROUP, facet_map=None): self.qParam = {} self.explain = False self.route_param = None self.prepare_time = 0 # Statement prepare time in milliseconds self.stmt_type = "unknown" if bql_req != None: assert(facet_map) time1 = datetime.now() # XXX need to move to SenseiClient # ok, msg = bql_req.merge_selections() # if not ok: # raise SenseiClientError(msg) self.stmt_type = bql_req.get_stmt_type() if self.stmt_type == "desc": self.index = bql_req.get_index() else: self.query = bql_req.get_query() self.offset = bql_req.get_offset() or offset self.count = bql_req.get_count() or count self.columns = bql_req.get_columns() self.sorts = bql_req.get_sorts() self.selections = bql_req.get_selections() self.filter = bql_req.get_filter() self.query_pred = bql_req.get_query_pred() self.facets = bql_req.get_facets() # PARAM_RESULT_HIT_STORED_FIELDS is a reserved column name. If this # column is selected, turn on fetch_stored flag automatically. if (PARAM_RESULT_HIT_STORED_FIELDS in self.columns or bql_req.get_fetching_stored()): self.fetch_stored = True else: self.fetch_stored = False self.groupby = bql_req.get_groupby() self.max_per_group = bql_req.get_max_per_group() or max_per_group self.facet_init_param_map = bql_req.get_facet_init_param_map() delta = datetime.now() - time1 self.prepare_time = delta.seconds * 1000 + delta.microseconds / 1000 logger.debug("Prepare time: %sms" % self.prepare_time) else: self.query = None self.offset = offset self.count = count self.columns = [] self.sorts = None self.selections = [] self.filter = {} self.query_pred = {} self.facets = {} self.fetch_stored = False self.groupby = None self.max_per_group = max_per_group self.facet_init_param_map = {}
def _hitung_reimburse_obat(self, cr, uid, ids, wage, jatah_reimburse_pengobatan, arg, context=None): rec = self.browse(cr, uid, ids, context=context)[0] typ=rec.type_id.reimburse_pengobatan wag=rec.wage date_start =rec.date_start date_end = rec.date_end dt_yr = datetime.strptime(date_start,"%Y-%m-%d").year dt_bln = datetime.strptime(date_start,"%Y-%m-%d").month dten_yr = False dten_bln = False if date_end != False : dten_yr = datetime.strptime(date_end,"%Y-%m-%d").year dten_bln = datetime.strptime(date_end,"%Y-%m-%d").month year =datetime.now().year month =datetime.now().month result = {} for re in self.browse(cr, uid, ids, context=context): if dt_yr == year : bulan = float(13 - dt_bln) elif dten_yr == year and dt_yr == year : bulan = float(dten_bln) elif date_end != False and dten_yr == year : bulan = float(dten_bln) else : bulan = float(12) #if wag: total = wag * typ jatah = float((wag * typ) * (bulan/12)) if total >= rec.type_id.range_pengobatan : result [re.id]= float(rec.type_id.range_pengobatan * (bulan/12)) else : result [re.id]= jatah return result
def orders(request, **kwargs): if request.GET.get('w'): weeks = int(request.GET['w']) else: weeks = 4 start_date = (datetime.now() - timedelta(weeks=weeks)) end_date = datetime.now() if request.GET.get('unpaid'): unpaid = True packages = Order.objects.filter( date_paid__isnull=True, date_confirmed__range=(start_date, end_date) ).order_by('-date_confirmed') else: packages = CustomerPackage.objects.filter( created__range=(start_date, end_date), **kwargs ).order_by('-created') return _render(request, 'my_admin/orders.html', locals())
def run(): global lasttweet if DEBUG: print "Starting twitter monitoring" while True: inbounddirect = tweetReadDirect() if inbounddirect: dtn=datetime.now() datenow=dtn.strftime("%H:%M:%S %D ") if DEBUG: print inbounddirect['sender'] and inbounddirect['body'] if inbounddirect['sender'] == SENDER and inbounddirect['body'] == "Upload": if DEBUG: print("Picture request received") takepic() dtn=datetime.now() imagename='/still' + dtn.strftime("%m%d%Y-%H%M%S") + '.jpg' client = dropbox.Dropbox(db_access_token) f = open(PICSAVELOCATION) data = f.read() uploadstatus = client.files_upload(data, imagename) if DEBUG: print uploadstatus string2tweet=datenow + 'Picture uploaded' tweetDirect(string2tweet) elif inbounddirect['sender'] == SENDER and inbounddirect['body'] == "?": if DEBUG: print("Syntax request message received") string2tweet=datenow + 'To upload picture send Upload' tweetDirect(string2tweet) time.sleep(300)
def update_positions(data,user): from datetime import datetime if data.get('positions'): for position in data.get('positions').get('values'): p_id = position.get('id') pos = Positions.query.get(p_id) month = position.get('startDate').get('month') year = position.get('startDate').get('year') start_date = datetime.now().replace(month=month,year=year) company = position.get('company').get('name') is_current = position.get('isCurrent') end_date = None if not is_current: month = position.get('endDate').get('month') year = position.get('endDate').get('year') end_date = datetime.now().replace(month=month,year=year) if pos : pos.summary = position.get('summary') pos.title = position.get('title') pos.is_current = position.get('isCurrent') pos.start_date = start_date pos.company = company if end_date: pos.end_date = end_date else: pos = Positions(id=p_id,summary=data.get('summary'),title=data.get('title'),is_current=data.get('isCurrent'), start_date=start_date,company=company,end_date=end_date) pos.user_id = user.user_id db.session.add(pos) else: pass
def admin_product(request, id): product = get_object_or_404(Product, pk=id) sold_items = WarehouseItem.objects.filter( unique_product__parent_product=product, sold__isnull=False, ) total_weight = 0 for x in sold_items: total_weight += x.unique_product.weight start_date = (datetime.now() - timedelta(weeks=52)) end_date = datetime.now() sold_this_year = WarehouseItem.objects.filter( unique_product__parent_product=product, sold__range=(start_date, end_date), ) in_stock = WarehouseItem.objects.filter( unique_product__parent_product=product, sold__isnull=True, ) # if there's a date range, filter by date range. return _render(request, 'my_admin/product.html', locals())
def createnew(self, name='burgerking'): no = DayOrder() no.order_id_alin = '0000' + str(self.__index) no.address = 'kb363' + str(self.__index) no.order_id_old = '0000' + str(self.__index) no.order_time = datetime.now() no.phone = '1300010' + str(self.__index) no.origin_price = 13.0 no.real_price = 10.0 no.send_time = datetime.now() no.order_id_old = '0000' + str(self.__index) no.platform = 1 no.promotion = 'nothing' no.merchant = Merchant.objects.filter(name = name)[0] no.save() print 'ordersavesuccess' dish = Dish() dish.dish_name = 'testdish' dish.dish_count = 3 dish.dish_price = 13.0 dish.order = no dish.save() self.__index += 1 return 'success'
def login(request): if request.method == 'POST': form = LoginForm(request.POST) if form.is_valid(): username = form.data['usuario'] password = form.data['password'] user = auth.authenticate(username=username, password=password) if user is not None and user.is_active: # Password valido, el usuario esta marcado como activo auth.login(request, user) request.session['carrito'] = [] return HttpResponseRedirect(reverse('inicio')) elif user is None or not user.is_active: form._errors[NON_FIELD_ERRORS] = form.error_class(["Verifique su usuario y/o password."]) categorias = Categoria.objects.all() ofertas = Oferta.objects.filter() of1 = ofertas.exclude(fecha_inicio__gte=datetime.now()) ofertas = ofertas.exclude(fecha_fin__lte=datetime.now()) productos , ofertas = productos_publicar() values={ 'productos':productos, 'ofertas':ofertas, 'ofertas':ofertas, 'form':form, 'categorias':categorias, } return render_to_response('internet/cuerpo.html',values, context_instance = RequestContext(request))
def handle(self, *args, **options): dump = options['dump'] manifest = self.get_manifest(dump) two_weeks_ago = datetime.now() - timedelta(days=14) three_months_ago = datetime.now() - timedelta(days=90) keys = manifest.bucket.get_all_keys() for key in keys: dt = parser.parse(key.key) is_2_weeks_old = dt < two_weeks_ago is_3_months_old = dt < three_months_ago is_monday_9PM = dt.strftime("%w %H") == '1 18' if is_2_weeks_old: if is_monday_9PM: log.debug("keep:", dt) else: log.debug("delete:", dt) key.delete() else: log.info("keep:", dt) log.info("Log Rotation Complete.")
def freshtoken(picklefile): import pickle from datetime import datetime, timedelta tdict = pickle.load(open(picklefile, "rb")) token = tdict['access_token'] expires = tdict['expires'] thetime = datetime.now() if thetime > expires: endpoint = "https://www.googleapis.com/oauth2/v3/token" # Note to self: research the v3 endpoint. configfile = open(globs.FILE, 'rb') lines = configfile.readlines() configfile.close() config = {} for item in lines: parts = item.split() config[parts[0]] = parts[2][1:-1] postheaders = { 'client_id': config['CLIENT_ID'], 'client_secret': config['CLIENT_SECRET'], 'refresh_token': config['REFRESH_TOKEN'], 'grant_type': 'refresh_token' } import requests r = requests.post(endpoint, postheaders) rd = r.json() xseconds = rd['expires_in'] expiresin = datetime.now() + timedelta(seconds=xseconds) pickleme = { 'access_token': rd['access_token'], 'expires': expiresin } pickle.dump(pickleme, open(globs.TOKEN, 'wb')) else: return token
def message(display_hash=None): """ Display a specific message. """ if not display_hash: abort(404) #TODO Flash error message message = Message.query.filter(Message.display_hash==display_hash).first() if not message or message not in current_user.sent_messages and current_user not in message.recipients: return redirect(url_for('messages')) if request.method == 'POST': form = CommentForm(request.form) if form.validate(): comment = form.populated_object() comment.sender = current_user comment.message = message message.last_updated = datetime.now() db.session.add(comment) db.session.commit() return redirect(url_for('message', display_hash=display_hash)) if request.method == 'GET': if current_user == message.sender: message.sender_last_viewed = datetime.now() db.session.commit() else: user_message = UserMessage.query.filter(UserMessage.message==message, UserMessage.recipient==current_user).first() user_message.last_viewed = datetime.now() db.session.commit() form = CommentForm() return render_template('message.html', message=message, form=form)
def testCurrentProcesses(self): a = agentaction() a.JOBID = 10 a.SCHDATE = datetime.now() a.CMDLINE = QUICK_CMD a.STDOUT = True a.execute() self.assert_(len(current_processes["running"]) == 1) self.assert_(a.JOBID in current_processes["running"]) a2 = agentaction() a2.JOBID = 11 a2.SCHDATE = datetime.now() a2.CMDLINE = QUICK_CMD a2.STDOUT = True a2.execute() self.assert_(len(current_processes["running"]) == 2) self.assert_(a2.JOBID in current_processes["running"]) time.sleep(1) l = a.check_status() self.assert_(len(current_processes["running"]) == 1) self.assert_(a2.JOBID in current_processes["running"]) l = a2.check_status() self.assert_(len(current_processes["running"]) == 0)
def runTask(self, func, day=0, hour=0, min=0, second=0): # Init time now = datetime.now() strnow = now.strftime('%Y-%m-%d %H:%M:%S') # print "now:", strnow # First next run time period = timedelta(days=day, hours=hour, minutes=min, seconds=second) next_time = now + period strnext_time = next_time.strftime('%Y-%m-%d %H:%M:%S') # print "next run:", strnext_time while True: # Get system current time iter_now = datetime.now() iter_now_time = iter_now.strftime('%Y-%m-%d %H:%M:%S') if str(iter_now_time) == str(strnext_time): # Get every start work time # print "start work: %s" % iter_now_time # Call task func func() # print "task done." # Get next iteration time iter_time = iter_now + period strnext_time = iter_time.strftime('%Y-%m-%d %H:%M:%S') # print "next_iter: %s" % strnext_time # Continue next iteration # 1分钟检查一次 time.sleep(60) continue
def get_context_data(self, **kwargs): context = super(StatementView, self).get_context_data(**kwargs) context['transactions'] = Transaction.objects.filter(user=self.request.user) context['total_income'] = 0 context['total_spent'] = 0 for i in Transaction.objects.filter(user=self.request.user): if i.transaction_type_id == 1: context['total_income'] += i.money if i.transaction_type_id == 2: context['total_spent'] -= i.money months = [] days = [] for i in Transaction.objects.filter(user=self.request.user): new_month = datetime.date(i.datetime).timetuple().tm_mon if new_month not in months: months.append(new_month) new_day = datetime.date(i.datetime).timetuple().tm_mday if new_day not in days: days.append(new_day) print 'now:', datetime.now() all_transactions = Transaction.objects.all() last_transaction = Transaction.objects.filter(user=self.request.user)[len(all_transactions)-1:] curr_day = datetime.date(datetime.now()).timetuple().tm_mday print curr_day for i in last_transaction: print 'last: ', i.datetime last_day = datetime.date(i.datetime).timetuple().tm_mday print 'last:', last_day delta = curr_day - last_day print 'delta: ', delta print months # вытащил месяцы, в которые проходили транзакции print days return context
def get_date(): months = {1: 31, 2: 28, 3: 31, 4: 30, 5: 31, 6: 30, 7: 31, 8: 31, 9: 30, 10: 31, 11: 30, 12: 31} Year = datetime.now().strftime('%Y') Month = datetime.now().strftime('%m') Day = datetime.now().strftime('%d') if int(Day) == 1: if Month == 1: YesterYear = int(Year)-1 YesterMonth = 12 YesterDay = 31 else: YesterYear = Year YesterMonth = int(Month)-1 YesterDay = months(YesterMonth) else: YesterYear = Year YesterMonth = Month YesterDay = int(Day) - 1 if YesterMonth < 10: YesterMonth = '0'+str(YesterMonth) else: YesterMonth = str(YesterMonth) if YesterDay < 10: YesterDay = '0' + str(YesterDay) else: YesterDay = str(YesterDay) return str(YesterYear)+'-'+str(YesterMonth)+ '-' + str(YesterDay)
def wait(*args): """Stops program and waits for keypress. start/return unpause. Esc/select quit.""" # Set a flag to know what to return flag = "thisisstupid" # note current time so you can subtract how much time wait takes from the Game Timer then = datetime.now() if args: time.sleep(args[0]) else: while flag == "thisisstupid": for event in pygame.event.get(): if event.type == QUIT: flag = False elif event.type == KEYDOWN: if event.key in [K_ESCAPE, ord('q')]: # pressing escape quits flag = False if event.key in [K_RETURN]: flag = True elif event.type == JOYBUTTONDOWN: if event.button == JOYBUTTONS['start']: flag = True if event.button == JOYBUTTONS['select']: flag = False now = datetime.now() # Set Game timer's subtract key to the time wait() has taken in milliseconds GAME_TIMER['subtract'] += (now - then).total_seconds() * 1000 return flag
def send_message(): """ Send a message to a number of users. """ if request.method == 'POST': form = MessageForm(request.form) #TODO If not recipients, flash message. Put recipients in form. if form.validate() and 'recipients' in request.form: message = form.populated_object() message.sender = current_user message.last_updated = datetime.now() message.sender_last_viewed = message.last_updated for display_hash in request.form.getlist('recipients'): user = User.query.filter(User.display_hash==display_hash).first() if user: user_message = UserMessage() user_message.recipient = user user_message.last_viewed = datetime.now() message.recipients.append(user_message) db.session.add(message) db.session.commit() return redirect(url_for('messages')) if request.method == 'GET': form = MessageForm() users = User.query.filter(User.id!=current_user.id).all() return render_template('send_message.html', form=form, users=users)
def checkin(): form = CheckinForm(request.form) if request.method == 'POST' and form.validate(): res = db.findReservation(form.code.data) if res: print 'Reservation %s is already in the system...' % res.code return redirect(url_for('flight_status', code=res.code)) res = db.addReservation(form.firstname.data, form.lastname.data, form.code.data, form.email.data) if config["SEND_ADMIN_EMAIL"]: admin_message = "First: %s\nLast: %s\nEmail: %s\nConfirmation Number: %s\nTime: %s" % ( form.firstname.data, form.lastname.data, form.email.data, form.code.data, datetime.now()) send_email('An automatic southwest check in has been initiated', admin_message, boarding_pass=None, email=config["ADMIN_EMAIL"]) print 'Created', res if not res.active: return message('It looks like all of your flights have already taken off :(') success = getFlightTimes(res) if success: message = getFlightInfo(res, res.flights) if config["SEND_EMAIL"]: send_email('Waiting for SW flight', message, boarding_pass=None, email=res.email); scheduleAllFlights(res) print 'Current time: %s' % DateTimeToString(datetime.now(utc)) return redirect(url_for('flight_status', code=res.code)) else: db.isReservationActive(res) if not res.active: db.deleteReservation(res) return display_message("We can't find that reservation!") return render_template('index.html', form=form)
def render_html(stream): stream.write("""<!DOCTYPE html> <html> <head><title>Simple CMX Webinterface</title></head> <body> <pre>{file} running on {host}. Page generated at {ctime}</pre> <a href="/json">view in JSON formt</a>""".format( file=escape(__file__), host=escape(hostinfo), ctime=datetime.now().ctime() )) for component in cmx.Registry.list(): stream.write(""" <table border=2> <tr><th colspan=4>{name} ({processId})</th></tr> <tr><th>Name</th><th>Value</th><th>update</th><th>type</th></tr>""".format( name=escape(component.name()), processId=component.processId())) for value in component.list(): stream.write(""" <tr> <td>{name}</td> <td class="value"><textarea rows=2 cols=80>{value}</textarea></td> <td>{update}<br />{updateRel}</td> <td>{type}</td> </tr>""".format(name=escape(value.name()), type=escape(value.__class__.__name__), value=value.value(), update=datetime.fromtimestamp(value.mtime()/10.0**6), updateRel=datetime.now()-datetime.fromtimestamp(value.mtime()/10.0**6) )) stream.write("</table>\n<br/>\n") stream.write("</body>\n</html>")
def online_appointment_confirm(self, **post): error = {} error_message = [] if request.env.user._is_public(): param = request.env['ir.config_parameter'].sudo().search([('key', '=', 's2u_online_appointment')], limit=1) if not param or param.value.lower() != 'public': return request.render('s2u_online_appointment.only_registered_users') if not post.get('name', False): error['name'] = True error_message.append(_('Por favor, escriba su nombre.')) if not post.get('email', False): error['email'] = True error_message.append(_('Por favor, introduzca su dirección de correo electrónico.')) elif not functions.valid_email(post.get('email', '')): error['email'] = True error_message.append(_('Por favor, introduce una dirección de correo electrónico válida.')) if not post.get('phone', False): error['phone'] = True error_message.append(_('Por favor, introduzca su número de teléfono.')) # Remover validacion de tipo, doctor, fecha y hora.. porque no seran añadidos por el creador, sino por el blass #try: # appointee_id = int(post.get('appointee_id', 0)) #except: # appointee_id = 0 # validacion no doctor #if not appointee_id: # error['appointee_id'] = True # error_message.append(_('Por favor seleccione un Doctor válido.')) # Obtengo las opciones previamente configuradas. #option = request.env['s2u.appointment.option'].sudo().search([('id', '=', int(post.get('appointment_option_id', 0)))]) # Validacion si no tengo tipo configurado #if not option: # error['appointment_option_id'] = True # error_message.append(_('Por favor seleccione un tipo válido.')) # Busco horarios de trabajo en mi modelo. # Validacion horario #slot = request.env['s2u.appointment.slot'].sudo().search([('id', '=', int(post.get('timeslot_id', 0)))]) #if not slot: # error['timeslot_id'] = True # error_message.append(_('Seleccione un intervalo de tiempo válido.')) # validacion completa de fecha vs horarios de trabajo #try: # date_start = datetime.datetime.strptime(post['appointment_date'], '%d/%m/%Y').strftime('%Y-%m-%d') # day_slot = date_start + ' ' + functions.float_to_time(slot.slot) # start_datetime = self.ld_to_utc(day_slot, appointee_id) #except: # error['appointment_date'] = True # error_message.append(_('Por favor seleccione una fecha valida.')) values = self.prepare_values(form_data=post) if error_message: values['error'] = error values['error_message'] = error_message return request.render('s2u_online_appointment.make_appointment', values) # validacion completa en caso de que se seleccione una ranura ocupada #if not self.check_slot_is_possible(option.id, post['appointment_date'], appointee_id, slot.id): # values['error'] = {'timeslot_id': True} # values['error_message'] = [_('La ranura de tiempo ya está ocupada, elija otra ranura.')] # return request.render('s2u_online_appointment.make_appointment', values) # si el usuario no esta registrado lo registramos. por ahora que se registre, por loca #if request.env.user._is_public(): # partner = request.env['res.partner'].sudo().search(['|', ('phone', 'ilike', values['phone']), # ('email', 'ilike', values['email'])]) # if partner: # partner_ids = [self.appointee_id_to_partner_id(appointee_id), # partner[0].id] # else: # partner = request.env['res.partner'].sudo().create({ # 'name': values['name'], # 'phone': values['phone'], # 'email': values['email'] # }) # partner_ids = [self.appointee_id_to_partner_id(appointee_id), # partner[0].id] #else: # partner_ids = [self.appointee_id_to_partner_id(appointee_id), # request.env.user.partner_id.id] # set detaching = True, we do not want to send a mail to the attendees #aqui se crea el nuevo evento en calendario mkon. now = datetime.now() # appointment = request.env['calendar.event'].sudo().with_context(detaching=False).create({ # 'name': "Nueva cita medica creada", # 'description': post.get('remarks', ''), # 'start': now.strftime("%Y-%m-%d %H:%M:%S"), # 'stop': (now + timedelta(hours=2)).strftime("%Y-%m-%d %H:%M:%S"), # 'user_id': request.env.user.id, # }) # set all attendees on 'accepted' # appointment.attendee_ids.write({ # 'state': 'accepted' # }) # registered user, we want something to show in his portal if not request.env.user._is_public(): vals = { 'partner_id': request.env.user.partner_id.id, 'descripcion': post.get('remarks'), 'enfermedades': post.get('antecedentes'), 'cirugias': post.get('procedimientos'), 'state': 'pending', # 'appointee_id': self.appointee_id_to_partner_id(appointee_id), #'event_id': appointment.id } registration = request.env['s2u.appointment.registration'].create(vals) print(registration) return request.redirect('/online-appointment/appointment-scheduled?appointment=%d' % registration.id)
class Monster: try: monster_job_email = [] monster_job_phoneNo = [] driver = None start_time = datetime.now() url = "" wait = '' link_count=0 job_count=[] #logging.basicConfig(filename='monster.log', filemode='w', format='%(name)s - %(levelname)s - %(message)s',level=logging.INFO) #logger = logging.getLogger(__name__) job_details = {'Job Category': '', 'Date&Time': '', 'Searched Job Title': '', 'Searched Job Location': '', 'Job Portal': 'Monster', 'Job Date Posted': '', 'Job Title': '', 'Job Company Name': '', 'Job Location': '', 'Job Phone No': '', 'Job Email': '', 'Job Link': '', 'Job Description': ''} def __init__(self, driver, url): try: print(self.start_time) self.driver = driver self.url = url self.wait = WebDriverWait(self.driver, 30) logging.basicConfig(filemode='a', format='%(name)s - %(levelname)s - %(message)s', level=logging.INFO) logging.info("######################################################################### \n" " \n" "===========================Monster Job Search=============================\n" " \n" "##########################################################################") logging.info(url) except Exception as e: print("Unknown Exception in Monster class __init__ ",e) logging.exception("Unknown Exception in Monster class __init__ ") logging.exception(e) # search jobs def monster_search_jobs(self, jp_common, job_title, job_location): try: logging.info("In monster_search_jobs") print("In monster_search_jobs") # Finding Job Title Textbox element and sending text. job_title_web_element = jp_common.find_web_element("//*[@id='keywords2']", "Job Title Textbox", "one", self.wait) jp_common.web_element_action(job_title_web_element,"send_keys",job_title,"Job Title Textbox") # Finding Job Location Textbox element and sending text. job_location_web_element=jp_common.find_web_element("//*[@id='location']", "Job Location Textbox", "one", self.wait) jp_common.web_element_action(job_location_web_element,"send_keys",job_location,"Job Location Textbox") # Finding Search Button element and clicking it. search_web_element=jp_common.find_web_element("//*[@id='doQuickSearch']", "Search Button", "one", self.wait) jp_common.web_element_action(search_web_element,"click","","Search Button") except Exception as e: print("Unexpected error in monster_search_jobs",e) logging.exception("Unexpected exception in monster_search_jobs") logging.exception(e) self.driver.get_screenshot_as_file("Screenshots\monster_search_jobs_exception.png") def monster_valid_job(self,jp_common): try: logging.info("In monster_valid_jobs") print("In monster_valid_jobs") # Finding Job Title Textbox element and sending text. #msg = jp_common.find_web_element("/html/body/div[2]/section/div/header/h1", "Job Search Message", "one", # self.wait).text #job_search_message_web_element = jp_common.find_web_element("/html/body/div[2]/section/div/header", # "Job Search Message", "one", # self.wait) #print(jp_common.web_element_action(job_search_message_web_element, "get_text","", "Job Search Message")) #msg=job_search_message_web_element.text msg="New Jobs in U.S" logging.info(msg+"===================") #print(driver.find_element_by_xpath("/html/body/div[2]/section/div/header/h1").text) #if job_search_message_web_element.text == "New Jobs in the US": if msg=="Sorry, we didn't find any jobs matching your criteria": return False else: return True except Exception as e: print("Unexpected error in monster_valid_jobs", e) logging.error("Unexpected exception in monster_valid_jobs") logging.exception(e) self.driver.get_screenshot_as_file("Screenshots\monster_valid_jobs_exception.png") # load more def monster_loadmore_jobs(self): logging.info("In monster_loadmore_jobs") print("In monster_loadmore_jobs") click=0 time.sleep(3) try: load_more_web_element=self.driver.find_element_by_xpath("//*[@id='loadMoreJobs'][@class='mux-btn btn-secondary load-more-btn ']") print(load_more_web_element,"load more found") #load_more_web_element=jp_common.find_web_element("//*[@id='loadMoreJobs']", "Load More Button", "one", self.wait) while load_more_web_element: click +=1 load_more_web_element = self.driver.find_element_by_xpath("//*[@id='loadMoreJobs'][@class='mux-btn btn-secondary load-more-btn ']") print("Load More found and clicked", click, "time(s)") '''if(load_more_web_element==None): break print("while load more") #load_more_web_element = jp_common.find_web_element("//*[@id='loadMoreJobs']", "Load More Button", "one", # self.wait) driver.execute_script("window.scrollTo(0, document.body.scrollHeight);") actions = ActionChains(self.driver) actions.move_to_element(load_more_web_element) actions.perform() print("Load More found and clicked",click,"time(s)")''' try: load_more_web_element.click() except Exception as e: print("Unexpected error when clicking load more button",e) logging.exception("Unexpected error when clicking load more button") logging.exception(e) time.sleep(3) except Exception as e: print("Unexpected error in monster_loadmore_jobs",e) logging.error("Unexpected exception in monster_loadmore_jobs"+str(e)) self.driver.get_screenshot_as_file("Screenshots\monster_load_more_jobs_exception.png") # Get list of job links populated def monster_get_job_links(self,jp_common): # job links xpath print("In monster_get_job_links") logging.info("In monster_get_job_links") try: job_links_web_element = jp_common.find_web_element("//*[@id='SearchResults']/section/div/div[2]/header/h2/a", "Job Links", "multiple", self.wait) return job_links_web_element except Exception as e: print("Unknown Exception in monster_get_jobs_links",e) logging.error("Unknown Exception in monster_get_jobs_links") logging.exception(e) self.driver.get_screenshot_as_file("Screenshots\monster_get_job_links_exception.png") # Get list of job company names def monster_get_job_company(self,jp_common): print("In monster_get_job_company") logging.info("In monster_get_job_company") try: job_company_web_element = jp_common.find_web_element("//*[@id='SearchResults']/section/div/div[2]/div[1]/span", "Job Company Name", "multiple", self.wait) return job_company_web_element except Exception as e: print("Unknown Exception in monster_get_job_company", e) logging.error("Unknown Exception in monster_get_job_company") logging.exception(e) self.driver.get_screenshot_as_file("Screenshots\monster_get_job_company.png") # Get names of job locations def monster_get_job_location(self,jp_common): print("In monster_get_job_location") logging.info("In monster_get_job_location") try: job_location_web_element = jp_common.find_web_element("//*[@id='SearchResults']/section/div/div[2]/div[2]/span", "Job Location", "multiple", self.wait) return job_location_web_element except Exception as e: print("Unknown Exception in get_jobs_location".e) logging.error("Unknown Exception in get_jobs_location") logging.exception(e) self.driver.get_screenshot_as_file("Screenshots\monster_get_job_location_exception.png") # Get Date-time of job posted def monster_get_job_posted_datetime(self,jp_common): print("In monster_get_posted_datetime") logging.info("In monster_get_posted_datetime") try: job_posted_datetime_web_element = jp_common.find_web_element("//*[@id='SearchResults']/section/div/div[3]/time", "Job Date Posted", "multiple", self.wait) return job_posted_datetime_web_element except Exception as e: print("Unknown Exception in monster_get_jobs_poster_datetime") logging.error("Unknown Exception in monster_get_jobs_poster_datetime") logging.exception(e) self.driver.get_screenshot_as_file("Screenshots\monster_get_job_posted_datetime_exception.png") # Get Job description and scrape Email and Phone number def monster_get_job_desc(self, job_date_posted, job_title, job_loc, job_links, job_company, job_location, jp_common): print("In monster_get_jobs_desc") logging.info("In monster_get_jobs_desc") print(self.driver.page_source) if "//*[@id='JobBody']" in self.driver.page_source: print("yes") else: print("no") #breakpoint() try: self.link_count=0 self.job_details['Searched Job Title'] = job_title self.job_details['Searched Job Location'] = job_loc for link in job_links: job_desc = [] logging.info("job title: "+link.text) logging.info("Monster, Link clicked :"+ job_title+" " + job_loc + " " + str(self.link_count+1) + " / " + str(len(job_links))) logging.info("==============================================> " + str(self.link_count+1)) self.job_details['Job Link'] = link.get_attribute("href") self.job_details['Job Title'] = link.text self.job_details['Job Company Name'] = job_company[self.link_count].text self.job_details['Job Location'] = job_location[self.link_count].text self.job_details['Job Date Posted'] = job_date_posted[self.link_count].text try: jp_common.web_element_action(link, "click", "", "Job link") #jp_common.get_url(driver,link.get_attribute("href")) #job_description_web_element=jp_common.find_web_element("//*[@id='main-content']/div/div/div/div[3]", # "Job Description", "multiple",self.wait) job_description_web_element = jp_common.find_web_element( "//*[@id='JobBody']","Job Description", "multiple", self.wait) for element in job_description_web_element: job_desc.append(element.text) except Exception as e: print("Unknown Exception occurred while clicking to get job description",e) logging.error("Unknown Excecption occurred while clicking to get job description") logging.exception(e) else: #for element in job_description_web_element: # job_desc.append(element.text) #job_desc+=element.text job_desc=' '.join(map(str, job_desc)) #print(job_desc) self.job_details['Job Description']=job_desc self.job_details['Job Email'] = jp_common.get_Email_desc(job_desc) logging.info(self.job_details['Job Email']) self.job_details['Job Phone No'] = jp_common.get_Phno_desc(job_desc) logging.info(self.job_details['Job Phone No']) self.job_details['Date&Time'] = datetime.now().strftime("%b-%d-%Y %H:%M:%S") jp_common.write_to_csv(self.job_details) # jp_common.copy_to_json("Job_Details.json",self.job_details) self.link_count += 1 except Exception as e: print("Unknown exception in monster_get_job_desc",e) logging.error("Unknown exception in monster_get_job_desc") logging.exception(e) self.driver.get_screenshot_as_file("Screenshots\monster_get_job_desc_exception.png") # To get jobs from the user given choices def monster_get_jobs(self, arr, jp_common): print("In monster_get_jobs") logging.info("In monster_get_jobs") try: jp_common.get_url(self.driver, self.url) for title, loc in zip(arr[0], arr[1]): logging.info(title + " " +loc) job_title = title job_loc = loc self.monster_search_jobs(jp_common, job_title, job_loc) if(self.monster_valid_job(jp_common)==True): self.monster_loadmore_jobs() job_links = self.monster_get_job_links(jp_common) job_company = self.monster_get_job_company(jp_common) job_location = self.monster_get_job_location(jp_common) job_date_posted = self.monster_get_job_posted_datetime(jp_common) self.job_details['Job Category'] = jp_common.set_job_category(job_title) logging.info("Links Populated for Monster : "+job_title +" " +job_loc + " are : " + str(len(job_links))) self.monster_get_job_desc(job_date_posted, job_title, job_loc, job_links, job_company, job_location, jp_common) self.job_count.append(self.link_count) jp_common.get_all_phno() jp_common.get_all_email() self.monster_clear_search(job_title,job_loc) self.report(arr,"Sorry no jobs matching your search",jp_common) jp_common.time_to_execute() except Exception as e: print("Unknown exception in monster_get_jobs",e) logging.error("Unknown exception in monster_get_jobs") logging.exception(e) self.driver.get_screenshot_as_file("Screenshots\monster_get_jobs_exception.png") # clear search boxes def monster_clear_search(self, job_title, job_loc): print("In monster_clear_search") logging.info("monster_clear_search") try: for i in range(len(job_title)): self.driver.find_element_by_xpath("//*[@id='keywords2']").send_keys(Keys.BACKSPACE) for i in range(len(job_loc)): self.driver.find_element_by_xpath("//*[@id='location']").send_keys(Keys.BACKSPACE) except Exception as e: print("Unknown exception in monster_clear_search", e) logging.error("Unknown exception in monster_clear_search") logging.exception(e) self.driver.get_screenshot_as_file("Screenshots\monster_get_clear_search_exception.png") def report(self, arr, msg,jp_common): try: logging.info("================================") logging.info("=======Monster Report===========") for title, loc, count in zip(arr[0], arr[1], range(len(self.job_count))): if len(self.job_count) > 0: logging.info(title + " " + loc + ":" + str(self.job_count[count]) + " Jobs") else: logging.info(title+" "+ loc + " : "+ msg) logging.info("Total Monster Execution :" + str(jp_common.time_to_execute())) logging.info("================================") except Exception as e: print("Unknown exception in monster_clear_search", e) logging.exception("Unknown exception in monster_clear_search") logging.exception(e) self.driver.get_screenshot_as_file("Screenshots\monster_get_clear_search_exception.png") except Exception as e: print("Unknown Exception occurred in Class Monster",e) logging.error("Unknown Exception occurred in Class Monster") logging.exception(e) driver.get_screenshot_as_file("Screenshots\monster_class_exception.png")
import plotly import plotly.plotly as py #for plotting import plotly.offline as offline import plotly.graph_objs as go import plotly.tools as tls import plotly.figure_factory as ff from fredapi import Fred import lightgbm as lgb from xgboost import XGBRegressor from sklearn.metrics import accuracy_score import credentials fred = credentials.fred #set script starting time start_time = datetime.now() indics = { 'INDPRO': 'IP', #Industrial Production 'NETEXP': 'Exports', #Net Exports of Goods and Services 'EXPGSC1': 'Real_Exports', #Real Exports of Goods and Services 'DGORDER': 'NewOrders', #Manufacturers' New Orders: Durable Goods 'NEWORDER': 'NewOrders_NoDef', #Manufacturers' New Orders: Nondefense Capital Goods Excluding Aircraft 'PCECC96': 'Real_PCE', #Real Personal Consumption Expenditures 'IMPGSC1': 'Real_Imports', #Real imports of goods and services 'IMPGS': 'Imports', #Imports of Goods and Services 'AWHMAN': 'Wk_hrs_manu', #Average Weekly Hours of Production and Nonsupervisory Employees: Manufacturing 'AWHNONAG': 'Wk_hrs_priv', #Average Weekly Hours of Production and Nonsupervisory Employees: Total private
def send_xml(request, transmissor_id, service): from emensageriapro.settings import BASE_DIR from datetime import datetime data_atual = str(datetime.now()).replace(':', '-').replace(' ', '-').replace( '.', '-') import os from emensageriapro.settings import FORCE_PRODUCAO_RESTRITA, TP_AMB, CA_CERT_PEM_FILE, CERT_HOST, CERT_PASS, CERT_PEM_FILE, KEY_PEM_FILE CERT_HOST = BASE_DIR + '/' + CERT_HOST if TP_AMB == '1': # Produção if service == 'WsEnviarLoteEventos': URL_WS = "https://webservices.producaorestrita.esocial.gov.br/servicos/empregador/enviarloteeventos/WsEnviarLoteEventos.svc" ACTION = "http://www.esocial.gov.br/servicos/empregador/lote/eventos/envio/v1_1_0/ServicoEnviarLoteEventos/EnviarLoteEventos" elif service == 'WsConsultarLoteEventos': URL_WS = "https://webservices.producaorestrita.esocial.gov.br/servicos/empregador/consultarloteeventos/WsConsultarLoteEventos.svc" ACTION = "http://www.esocial.gov.br/servicos/empregador/lote/eventos/envio/consulta/retornoProcessamento/v1_1_0/ServicoConsultarLoteEventos/ConsultarLoteEventos" elif TP_AMB == '2': # Produção-Restrita if service == 'WsEnviarLoteEventos': URL_WS = "https://webservices.producaorestrita.esocial.gov.br/servicos/empregador/enviarloteeventos/WsEnviarLoteEventos.svc" ACTION = "http://www.esocial.gov.br/servicos/empregador/lote/eventos/envio/v1_1_0/ServicoEnviarLoteEventos/EnviarLoteEventos" elif service == 'WsConsultarLoteEventos': URL_WS = "https://webservices.producaorestrita.esocial.gov.br/servicos/empregador/consultarloteeventos/WsConsultarLoteEventos.svc" ACTION = "http://www.esocial.gov.br/servicos/empregador/lote/eventos/envio/consulta/retornoProcessamento/v1_1_0/ServicoConsultarLoteEventos/ConsultarLoteEventos" dados = {} name = get_transmissor_name(transmissor_id) transmissor_dados = {} tle = TransmissorLoteEsocial.objects.using('default').\ get(id=transmissor_id) # tra = executar_sql(""" # SELECT te.empregador_tpinsc, te.empregador_nrinsc, # t.transmissor_tpinsc, t.transmissor_nrinsc, # t.esocial_lote_min, t.esocial_lote_max, # t.esocial_timeout, t.esocial_certificado, t.esocial_senha # FROM public.transmissor_lote_esocial te # JOIN public.transmissores t ON t.id = te.transmissor_id # WHERE te.id=%s; # """ % transmissor_id, True) transmissor_dados['empregador_tpinsc'] = tle.empregador_tpinsc transmissor_dados['empregador_nrinsc'] = tle.empregador_nrinsc transmissor_dados[ 'transmissor_tpinsc'] = tle.transmissor.transmissor_tpinsc transmissor_dados[ 'transmissor_nrinsc'] = tle.transmissor.transmissor_nrinsc transmissor_dados['esocial_lote_min'] = tle.transmissor.esocial_lote_min transmissor_dados['esocial_lote_max'] = tle.transmissor.esocial_lote_max transmissor_dados['esocial_timeout'] = int(tle.transmissor.esocial_timeout) # transmissor_dados['esocial_certificado'] = BASE_DIR + 'uploads/' + tle.transmissor.esocial_certificado # transmissor_dados['esocial_senha'] = tle.transmissor.esocial_senha cert_pem_file = BASE_DIR + '/' + CERT_PEM_FILE key_pem_file = BASE_DIR + '/' + KEY_PEM_FILE if not os.path.isfile(cert_pem_file): create_pem_files(CERT_HOST, CERT_PASS, cert_pem_file, key_pem_file) dados['transmissor_id'] = transmissor_id dados['header'] = 'arquivos/Comunicacao/%s/header/%s_%s.xml' % ( service, name, data_atual) dados['request'] = 'arquivos/Comunicacao/%s/request/%s_%s.xml' % ( service, name, data_atual) dados['response'] = 'arquivos/Comunicacao/%s/response/%s_%s.xml' % ( service, name, data_atual) dados[ 'header_completo'] = '%s/arquivos/Comunicacao/%s/header/%s_%s.xml' % ( BASE_DIR, service, name, data_atual) dados[ 'request_completo'] = '%s/arquivos/Comunicacao/%s/request/%s_%s.xml' % ( BASE_DIR, service, name, data_atual) dados[ 'response_completo'] = '%s/arquivos/Comunicacao/%s/response/%s_%s.xml' % ( BASE_DIR, service, name, data_atual) dados['service'] = service dados['url'] = URL_WS dados['cert'] = cert_pem_file dados[ 'cacert'] = '%s/certificados/webservicesproducaorestritaesocialgovbr.crt' % BASE_DIR dados['key'] = key_pem_file dados['action'] = ACTION dados['timeout'] = transmissor_dados['esocial_timeout'] quant_eventos_validados = TransmissorEventosEsocial.objects.using('default'). \ filter(transmissor_lote_esocial_id=transmissor_id, status=4).count() # qt_ev_validados = executar_sql(""" # SELECT count(*) FROM transmissor_eventos_esocial # WHERE transmissor_lote_esocial_id=%s AND status=4""" % transmissor_id, True) # quant_eventos_validados = qt_ev_validados[0][0] if quant_eventos_validados or service == 'WsConsultarLoteEventos': quant_eventos = TransmissorEventosEsocial.objects.using('default'). \ filter(transmissor_lote_esocial_id=transmissor_id, status=4).count() # qt_ev = executar_sql(""" # SELECT count(*) FROM transmissor_eventos_esocial # WHERE transmissor_lote_esocial_id=%s""" % transmissor_id, True) # quant_eventos = qt_ev[0][0] if (quant_eventos >= transmissor_dados['esocial_lote_min'] and \ quant_eventos <= transmissor_dados['esocial_lote_max'] and \ service == 'WsEnviarLoteEventos') or service == 'WsConsultarLoteEventos': create_request(dados, transmissor_dados) command = '''curl --connect-timeout %(timeout)s --cert %(cert)s --key %(key)s --cacert %(cacert)s -H "Content-Type: text/xml;charset=UTF-8" -H "SOAPAction:%(action)s" --dump-header %(header_completo)s --output %(response_completo)s -d@%(request_completo)s %(url)s''' % dados command = command.replace('\n', '') for n in range(10): command = command.replace(' ', ' ') os.system(command) if not os.path.isfile(BASE_DIR + '/' + dados['response']): messages.error( request, '''O servidor demorou mais que o esperado para efetuar a conexão! Caso necessário solicite ao administrador do sistema para que aumente o tempo do Timeout. Timeout atual %(timeout)s''' % dados) return None if service == 'WsEnviarLoteEventos': from emensageriapro.mensageiro.functions.funcoes_esocial_comunicacao import read_envioLoteEventos read_envioLoteEventos(dados['response'], transmissor_id) messages.success(request, 'Lote enviado com sucesso!') elif service == 'WsConsultarLoteEventos': from emensageriapro.mensageiro.functions.funcoes_esocial_comunicacao import read_consultaLoteEventos messages.success(request, 'Lote consultado com sucesso!') read_consultaLoteEventos(dados['response'], transmissor_id) gravar_nome_arquivo(dados['header'], 0) gravar_nome_arquivo(dados['request'], 0) gravar_nome_arquivo(dados['response'], 0) if 'HTTP/1.1 200 OK' not in ler_arquivo(dados['header']): messages.warning( request, 'Retorno do servidor: ' + ler_arquivo(dados['header'])) if service == 'WsEnviarLoteEventos': TransmissorLoteEsocial.objects.using('default').filter( id=transmissor_id).update(status=7) # alterar_status_transmissor(transmissor_id, 7) elif service == 'WsConsultarLoteEventos': TransmissorLoteEsocial.objects.using('default').filter( id=transmissor_id).update(status=9) # alterar_status_transmissor(transmissor_id, 9) elif (quant_eventos < transmissor_dados['esocial_lote_min'] and \ service == 'WsEnviarLoteEventos'): messages.error(request, 'Lote com quantidade inferior a mínima permitida!') TransmissorLoteEsocial.objects.using('default').filter( id=transmissor_id).update(status=0) #alterar_status_transmissor(transmissor_id, 0) elif (quant_eventos > transmissor_dados['esocial_lote_max'] and \ service == 'WsEnviarLoteEventos'): messages.error( request, 'Lote com quantidade de eventos superior a máxima permitida!') TransmissorLoteEsocial.objects.using('default').filter( id=transmissor_id).update(status=0) #alterar_status_transmissor(transmissor_id, 0) else: messages.error(request, 'Erro ao enviar o lote!') if service == 'WsEnviarLoteEventos': TransmissorLoteEsocial.objects.using('default').filter( id=transmissor_id).update(status=5) # alterar_status_transmissor(transmissor_id, 5) elif service == 'WsConsultarLoteEventos': TransmissorLoteEsocial.objects.using('default').filter( id=transmissor_id).update(status=8) # alterar_status_transmissor(transmissor_id, 8) else: messages.error(request, 'Não possuem eventos validados para envio neste lote!') atualizar_status_esocial()
if DEBUG == 1: #Recive ip from google-services (Must be internet!) g = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) g.connect(("gmail.com", 80)) print(g.getsockname()[0]) ssdd = g.getsockname()[0] g.close() today = datetime.time(datetime.now()) timel = date.today() log = ('log' + str(timel) + '.log') print('Log file:', log) ipfile = open('values/mip.txt', 'w') ipfile.write(str(ssdd)) ipfile.close() print('Ip writed to file!' + str(ssdd))
def retweet_priority_list(self): #retweets anything posted by users in a given twitter list. oldIds = open("RetweetedIds.txt", 'r') ids = oldIds.readlines() tweetedAlreadyUser = False oldID = False #loop through everyone in a certain users list. the list name uses the lists url slug. for member in tweepy.Cursor(self.api.list_members, 'LIST ID', 'LIST ID', wait_on_rate_limit=True, wait_on_rate_limit_notify=True).items(): try: tweetedAlreadyUser = False #reset for each new user, because we havent retweeted that person yet CUT = self.api.user_timeline( screen_name=member.screen_name, count=100, include_rts=False, exclude_replies=True) # current user timeline self.sleep(5) #limit server requests print("selected user: "******"RetweetedIds.txt", 'a') writeUsedID.write( str(tweet.id) + "\n") #write the ID to the file print("wrote ID") #wrote ID text tweet.retweet() #retweet print("retweeted " + (str(tweet.id))) #retweeted text tweetedAlreadyUser = True #this user has retweeted, switch to next self.sleep( 90 ) #wait a bit to ensure you dont break twitter request limit except tweepy.TweepError as e: print(e.reason) if "185" in e.reason: break else: #Code used to save leftover tweets to another txt file but thats no longer required #tweetlater = open("tweetLater.txt",'a') #tweetlater.write(str(tweet.id)+"\n")#write the ID to the file print("will retweet later: " + str(tweet.id)) except tweepy.TweepError as e: print(e.reason) continue except StopIteration: break except tweepy.TweepError as e: print(e.reason) continue except StopIteration: break
def croling_naver(): nowdate = datetime.now() nowdate = nowdate.strftime('%y-%m-%d') yesterday = nowdate - datetime.timedelta(days=2) yesterdaydate = yesterday.strftime('%y-%m-%d') page = 1 query = '해양경찰' # url 인코딩 에러는 encoding parse.quote(query) s_date = yesterdaydate e_date = nowdate s_from = s_date.replace("-", "") e_to = e_date.replace("-", "") filepath = "c:/aa/" + str(e_to) if not (os.path.isdir(filepath)): os.makedirs(os.path.join(filepath)) f = open(filepath + '/' + query + '_' + str(e_to) + '.txt', 'w', encoding='utf-8') while page < 500: cont = get_naver_news(query, s_date, e_date, s_from, e_to, page) soup = BeautifulSoup(cont, 'html.parser') print(soup) time.sleep(2 + random.random() * 4) for urls in soup.select("._sp_each_url"): try: if urls["href"].startswith("http://"): print("\n{}\n{}\n".format(urls["title"], urls["href"])) cont_detail = get_detail_news(urls["href"]) soup_detail = BeautifulSoup(cont_detail, 'html.parser') for tag in soup_detail.find_all("meta"): if tag.get("property") == "og:title": title = tag.get("content") elif tag.get("property") == "og:description": description = tag.get("content") else: title = urls["title"] description = '' # print("\n{}\n{}\n".format(title, description)) print(title) title = title.replace('"', '').replace('?', '').replace( '\r', '').replace('\n', '').replace('<', '').replace( '>', '').replace('/', '') print(title) # title = title.replace('/\r\n|\r|\n/', '') f = open(filepath + "/" + title + '.txt', 'w', encoding='utf-8') f.write("\n{}\n\n{}\n\n{}\n".format( title, urls["href"], description)) f.close() except exception as e: print(e) continue page += 10
def post(self): destination = self.get_body_argument('arrival_city', None) flight_number = self.get_body_argument('flight_number', None) arrival_date = self.get_body_argument('arrival_date', None) size = self.get_body_argument('size', None) user = self.get_current_user() user_entry = datastore['users'].find_one(email=user) if destination is not None: destination = datastore['airports'].find_one(name=destination) from datetime import datetime if (destination, flight_number, arrival_date, size) is not None: # add flight # convert departure date, arrival date into datetime objects arrival = datetime.strptime('{}'.format(arrival_date), '%Y-%m-%d') datastore['trips'].insert({ 'destination_airport_id': destination['id'], 'arrival_date': arrival, 'flight_number': flight_number, 'created_at': datetime.now(), 'traveller_id': user_entry['id'], 'size': size }) # alert any buyers requests = datastore['order_requests'].find( destination_airport_id=destination['id'], status='active') for request in requests: dest_airport = datastore['airports'].find_one( id=destination['id']) if request['status'] == 'active' and request[ 'email'] != user_entry['email']: send_simple_message( subject='We found a traveller headed your way!', text=""" Hey {}, We found a traveller headed to {} landing at {}. Buy your items here at http://getferry.com/buy?airport={}. Thanks, Friends at Ferry """.format(request['email'], dest_airport['city_served'], dest_airport['name'], destination), email=request['email']) datastore['order_requests'].update( { 'id': request['id'], 'status': 'inactive' }, ['id']) return self.write('success') return self.write('err')
#!/usr/bin/python #-*- coding: utf-8 -*- import sys import subprocess import datetime import MySQLdb from datetime import datetime, timedelta namedict = {'10.1.1.1':'ScoringBoard', '10.1.2.2':'UserRoute', '10.1.1.2':'UserRoute', '10.1.100.3':'FireWall', '10.1.2.3':'FireWall','10.1.10.3':'FireWall', '10.1.10.1':'WebSite', '10.1.100.150':'BigBoss', '10.1.100.1':'WebSiteInside', '10.1.100.50':'FileServer', '10.1.100.25':'Staff'} servicedict = {'21':'FTP', '23':'Telnet', '80':'HTTP', '443':'HTTPS', '445':'SMB', '8080':'HTTP'} now = datetime.now() - timedelta(seconds=90) nowdate = datetime.strftime(now, '%Y-%m-%d') nowtime = datetime.strftime(now, '%Y%m%d%H%M') flow = subprocess.Popen(args = 'nfdump -r /netflow/flow/'+nowdate+'/nfcapd.'+nowtime, shell = True, stdout = subprocess.PIPE) s = flow.stdout.read() s = s.strip() flows = s.splitlines() flows = flows[1:-4] db = MySQLdb.connect(host = '192.168.6.145', user = '******', passwd = 'bemo0GF!', db = 'iss') cursor = db.cursor() log = open('/tmp/log', 'a') for f in flows:
# the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, # MA 02110-1301, USA. # # import time import datetime from datetime import datetime while (1): year = datetime.now().year month = datetime.now().month day = datetime.now().day hour = datetime.now().hour minute = datetime.now().minute second = datetime.now().second print year, month, day, hour, minute, second time.sleep(1)
def main(): # required arg parser = argparse.ArgumentParser() parser.add_argument('-f', required=True) # -f data set file name argument parser.add_argument('-g') # if -g is defined use google_translator.translate method not translate_wrapper parser.add_argument('-l') # -l integer that indicate the pivot language level, single-pivot or multi-pivot range between 0 and 2 parser.add_argument('-p') # use pretrained translator(p==true - MarianMT) or online translator engine(p==false - Yandex,Google Translator) parser.add_argument('-c') # cut-off criteria to stop paraphrasing, default c=0 which mean don't apply cut-off args = parser.parse_args() # load configs from config.ini file config = configparser.ConfigParser(inline_comment_prefixes="#") config.read(os.path.join(os.path.dirname(__file__), ".","config.ini")) my_memory_config = config["MYMEMORY"] yandex_config = config["YANDEX"] google_config = config["GOOGLE"] deepl_config = config['DEEPL'] try: if str(args.p) == "None":#if -p not defined set default value to true args.p="true" if args.p == "false": if "email" not in my_memory_config or my_memory_config["email"] == "": raise Exception("Define a Valid email address for MyMemory API in config.ini") else: valid_mail = my_memory_config['email'] if "api_key" not in yandex_config or yandex_config["api_key"] == "": raise Exception("Yandex Translate API token is not defined in config.ini") else: yandex_api_key = yandex_config["api_key"] if "api_key" not in deepl_config or deepl_config["api_key"] == "": raise Exception("DeepL API Authentication Key not defined in config.ini") else: deepl_api_key = deepl_config["api_key"] if args.g:#flag g specify to use Official Google Traslator API not a wrapper if "api_key" not in google_config or google_config["api_key"] == "": raise Exception("Google Translate API token is not defined in config.ini") else: google_api_key = google_config['api_key'] if args.l: pivot_level = int(args.l) if pivot_level<0 or pivot_level>2: raise Exception("Pivot-level value should be 0,1 or 2") else: pivot_level = 0 if args.c: cut_off = int(args.c) if cut_off<0: raise Exception("Cut-off parameter value should be greater or equal to 0") else: cut_off = 0 # default value except Exception as e: print(str(e)) exit() file_path = os.path.join(os.path.dirname(__file__), ".", "dataset/"+args.f) # data to paraphrase t1 = time.time() # to compute overall time execution now = datetime.now() start_time = now.strftime("%H:%M:%S") pr_green("Starting time: "+start_time) if args.p=="true": paraphrases = pretrained_transaltion(file_path,pivot_level,cut_off) else: paraphrases = online_transaltion(file_path,deepl_api_key,valid_mail,pivot_level,cut_off) # compute diversity metrics print("\nCompute Mean-TTR, Mean-PINC and DIV scores: ") diversity_score = diversity_metrics.main(paraphrases,cut_off) for k,v in diversity_score.items(): print("\t============================================================") print("\t Cut_off parpameter = ",k," ") print("\t============================================================") print("\t\tMean TTR: ", v[0]["Mean TTR"]) print("\t\tMean PINC: ", v[1]["Mean PINC"]) print("\t\tDiversity: ", v[2]['Diversity']) paraphrases = remove_cosine_score(paraphrases) # compute BLEU-Score of generated paraphrases print("\nCompute BLEU, GLEU and CHRF scores: ") bleu_score.main(paraphrases,cut_off) gleu_score.main(paraphrases,cut_off) chrf_score.main(paraphrases,cut_off) t2 = "Overall elapsed time: "+str(datetime.timedelta(0,time.time()-t1)) pr_green(t2)
def saveimage(frame): dt = datetime.now() nowfile = './resource/CAP_IMG_THREAD.jpg' rzTmp = cv2.resize(frame, (100, 75)) cv2.imwrite(nowfile, rzTmp)
def format_tweets(tweets, user, _type, _query, geo_s): """Function that processes tweets to obtain specific information in a dataframe Args: tweets (tweepy.models.SearchResults): api.search or api.user_timeline result user (str): screen name of an user _type (str): mentions or timeline _query (str): query search geo_s (str): geocode "lat,long,radious" Returns: df_tw, df_rtw (tuple(pd.DataFrame)): dataframes with collected information from tweets and retweets respectively. """ tweets_lst = [] retweets_lst = [] for idx, tweet in enumerate(tweets): access_time = datetime.now().strftime("%Y %b %d %H:%M:%S") date = datetime.strptime(tweet._json["created_at"], '%a %b %d %H:%M:%S %z %Y') date_5 = date - timedelta(hours=5) date_format = date_5.strftime("%Y %b %d %H:%M:%S") if (_type == "mentions") and (tweet._json["user"]["screen_name"] == user) and (tweet._json["in_reply_to_screen_name"] == user): pass else: # Getting tweets if not (tweet._json["full_text"].startswith('RT @')): temp = [ tweet._json["id"], access_time, date_format, tweet._json["user"]["screen_name"], _query, False, None, _type, geo_s, tweet._json["geo"], tweet._json["user"]["location"], tweet._json["full_text"] ] tweets_lst.append(temp) # Getting retweets else: temp = [ tweet._json["id"], access_time, date_format, tweet._json["user"]["screen_name"], _query, True, tweet._json["retweeted_status"]["user"]["screen_name"], _type, geo_s, tweet._json["geo"], tweet._json["retweeted_status"]["user"]["location"], tweet._json["retweeted_status"]["full_text"] ] retweets_lst.append(temp) df_tw = pd.DataFrame(tweets_lst, columns=[ 'id', 'fecha_consulta', 'fecha_escritura', 'cuenta_origen', 'query_busqueda', 'retweet', 'retweeted_from', 'type', 'geo_search', 'geo', 'location', 'texto' ]) df_rtw = pd.DataFrame(retweets_lst, columns=[ 'id', 'fecha_consulta', 'fecha_escritura', 'cuenta_origen', 'query_busqueda', 'retweet', 'retweeted_from', 'type', 'geo_search', 'geo', 'location', 'texto' ]) return df_tw, df_rtw
import openerp.addons.decimal_precision as dp import netsvc import datetime import calendar from datetime import date import re import urllib import urllib2 import logging from datetime import date from datetime import datetime from datetime import timedelta from dateutil import relativedelta import calendar import base64 today = datetime.now() dt_time = time.strftime('%m/%d/%Y %H:%M:%S') class kg_debit_note(osv.osv): def _amount_line_tax(self, cr, uid, line, context=None): val = 0.0 qty = line.qty amt_to_per = 0 kg_discount_per = 0 tot_discount_per = 0 for c in self.pool.get('account.tax').compute_all( cr, uid, line.tax_id, line.price_unit * (1 - (tot_discount_per or 0.0) / 100.0), qty, line.product_id, line.header_id.supplier_id)['taxes']:
downloads_metadata = [[u['URL'] for u in r['umm']['RelatedUrls'] if u['Type']=="EXTENDED METADATA"] for r in results['items']] for f in downloads_data: downloads_all.append(f) for f in downloads_metadata: downloads_all.append(f) downloads = [item for sublist in downloads_all for item in sublist] # Finish by downloading the files to the data directory in a loop. success_cnt=failure_cnt=0 for f in downloads: try: for extension in extensions: if f.lower().endswith((extension)): urlretrieve(f, data+"/"+basename(f)) print(datetime.now()) print("SUCCESS: "+f+"\n\n") success_cnt=success_cnt+1 except Exception as e: print(datetime.now()) print("FAILURE: "+f+"\n\n") failure_cnt=failure_cnt+1 print(e) print("Downloaded: "+str(success_cnt)+" files\n") print("Files Failed to download:"+str(failure_cnt)+"\n") delete_token(token_url,token) print("END \n\n")
from dateutil.tz import tzutc from dateutil import parser iam = boto3.client('iam') users = iam.list_users() for i in users['Users']: groups = iam.list_groups_for_user(UserName=i['UserName']) policies = iam.list_user_policies(UserName=i['UserName']) mfadevices = iam.list_mfa_devices(UserName=i['UserName']) accesskeys = iam.list_access_keys(UserName=i['UserName']) if 'PasswordLastUsed' in i.keys(): passwordLastUsed = datetime.now(i['PasswordLastUsed'].tzinfo) else: passwordLastUsed = "NONE" if groups is None: groups = "NO GROUPS" if policies is None: policies = "NO POLICIES" if mfadevices is None: mfadevices = "NO MFA DEVICES" if accesskeys is None: accesskeys = "NO KEYS"
def show_artist(artist_id): #Shows the venue page with the given venue_id #First Check if there is any artist with the given id artist = Artist.query.get(artist_id) #If there is no artist with the given id then abort thr request and show 404 error page if artist is None: return render_template('errors/404.html') #Create a dictionary to store the details of the artist artist_details = { "id":artist.id, "name":artist.name, "genres":artist.genres, "city":artist.city, "state":artist.state, "phone":artist.phone, "website":artist.website, "facebook_link":artist.facebook_link, "seeking_venue":artist.seeking_venue, "seeking_description":artist.seeking_description, "image_link":artist.image_link } #Find the details of the past shows attended by the artist previous_shows = Show.query.filter(Show.artist_id == artist_id).filter(Show.start_time < datetime.now()).all() #Find the details of the future shows future_shows = Show.query.filter(Show.artist_id == artist_id).filter(Show.start_time > datetime.now()).all() #List to store the details of the previous shows past_shows_info = [] #Iterate through every show in previous shows for show in previous_shows: #Find the details of the venue where the artist performed previously venue_details = Venue.query.get(show.venue_id) #Append the required information to the list past_shows_info.append({ "venue_id":show.venue_id,#Id of the Venue "venue_name":venue_details.name,#Name of the Venue "venue_image_link":venue_details.image_link,#Image Link of the Venue "start_time":show.start_time.strftime("%Y-%m-%d %H:%M:%S"),#Show start time in (YY-MM-DD) format }) #List to store the details of the future shows upcoming_shows_info = [] #Iterate through every show in future shows for show in future_shows: #Find the details of the venue where the artist is going o perform venue_details = Venue.query.get(show.venue_id) upcoming_shows_info.append({ "venue_id":show.venue_id,#Id of the Venue "venue_name":venue_details.name,#Name of the Venue "venue_image_link":venue_details.image_link,#Image Link of the Venue "start_time":show.start_time.strftime("%Y-%m-%d %H:%M:%S"),#Show start time in (YY-MM-DD) format }) #Store the details of the past shows artist_details["past_shows"] = past_shows_info #Store the details of the future shows artist_details["upcoming_shows"] = upcoming_shows_info #Count the number of shows played by the artist in the past artist_details["past_shows_count"] = len(past_shows_info) #Count the number of shows booked by the artist artist_details["upcoming_shows_count"] = len(upcoming_shows_info) #Return the response and redirect the user return render_template('pages/show_artist.html', artist=artist_details)
# Getting mentions # --------------- tw_m, rtw_m = get_tweets_mentions( _user, api=api, number_of_api_calls=number_of_api_calls_per_user, items_per_call=100) print((len(tw_m) + len(rtw_m)), "mentions collected for user: "******"tweets", "and", len(rtw), "retweets collected for user: "******"%Y_%b_%d_%H:%M:%S") out = access_time + "_cuentas.csv" all_dfs = pd.concat(dfs, ignore_index=True) # quoting=csv.QUOTE_ALL helps to preserve the original tweet in case that it # contains either commas or tabs all_dfs.to_csv(out, sep='\t', index=False, header=True, quoting=csv.QUOTE_ALL)
def show_venue(venue_id): #First Check if there is a venue with the given id venue = Venue.query.get(venue_id) #If there is no venue with the given id then abort thr request and show a 404 error page. if venue is None: return render_template('errors/404.html') #Create dictionary storing the details of the venue venue_details = { "id":venue.id, "name":venue.name, "genres":venue.genres, "address":venue.address, "city":venue.city, "state":venue.state, "phone":venue.phone, "website":venue.website, "facebook_link":venue.facebook_link, "seeking_talent":venue.seeking_talent, "seeking_description":venue.seeking_description, "image_link":venue.image_link } #Find the list of the previous shows held at the venue previous_shows = Show.query.filter(Show.venue_id == venue_id).filter(Show.start_time < datetime.now()).all() #Find the list of the future shows which are going to be held at this venue future_shows = Show.query.filter(Show.venue_id == venue_id).filter(Show.start_time > datetime.now()).all() #List for storing the details of past shows past_shows_info= [] #Iterate through every show in the previous shows for show in previous_shows: #Find the details of the artist who played at this venue artist_details = Artist.query.get(show.artist_id) #Append the details of the previous show to the list past_shows_info.append({ "artist_id":show.artist_id,#Id of the artist "artist_name":artist_details.name,#Name of the artist "artist_image_link":artist_details.image_link,#Link to the artist image "start_time":show.start_time.strftime("%Y-%m-%d %H:%M:%S")#Show time in (YY-MM-DD) format }) #List for storing the details of future shows upcoming_shows_info = [] #Iterate through every show in the future shows for show in future_shows: #Find tehe details of the artist who will play at this venue artist_details = Artist.query.get(show.artist_id) #Append the details of the future show to the list upcoming_shows_info.append({ "artist_id":show.artist_id,#Id of the artist "artist_name":artist_details.name,#Name of the artist "artist_image_link":artist_details.image_link,#Link to the artist image "start_time":show.start_time.strftime("%Y-%m-%d %H:%M:%S")#Show time in (YY-MM-DD) format }) #Store the information about past_shows in the response venue_details["past_shows"] = past_shows_info #Store the information about future shows in the response venue_details["upcoming_shows"] = upcoming_shows_info #Count the number of past shows held at this venue venue_details["past_shows_count"] = len(past_shows_info) #Count the number of future shows held at this venue venue_details["upcoming_shows_count"] = len(upcoming_shows_info) #Return the response and redirect the user return render_template('pages/show_venue.html', venue=venue_details)
def log(s, label='INFO'): sys.stdout.write(label + ' [' + str(datetime.now()) + '] ' + str(s) + '\n') sys.stdout.flush()
def venues(): #This method returns venues which are grouped by cities and then grouped by state #Get all the info about all the places places = Venue.query.with_entities(Venue.city,Venue.state,func.count(Venue.id)).group_by(Venue.city,Venue.state).all() #List to store information for every venue venue_info = [] #Iterate through all the places for place in places: #Get all the venues in the place venues = Venue.query.filter_by(state=place.state).filter_by(city=place.city).all() data_venue = [] #Now Again iterate through all the venues in a place for venue in venues: num_upcoming_shows = len(Show.query.filter(Show.venue_id==venue.id).filter(Show.start_time>datetime.now()).all()) data_venue.append({ "id":venue.id,#Id of the venue "name":venue.name,#Name of the venue "num_upcoming_shows":num_upcoming_shows#Number of Upcoming shows at this venue }) #Now append all the venue info collected venue_info.append({ "city":place.city,#Name of the city where the venue is located "state":place.state,#Name of the state where the venue is located "venues":data_venue#List of the venues }) #Return the template along with the data return render_template('pages/venues.html', areas=venue_info)
def search_artists(): # seach for "A" should return "Guns N Petals", "Matt Quevado", and "The Wild Sax Band". # search for "band" should return "The Wild Sax Band". #This method searches for all the venues which includes the search_term as a substring in the name of the venue #First fetch the search_term from the request body artist_name = request.form['search_term'] #This method is case insensitive so use ilike while executing the query artists = Artist.query.filter(Artist.name.ilike('%'+artist_name+'%')).all() #List for storing the information about the artists artist_info = [] #Iterate through all the artists for artist in artists: #Append the required information to the list num_upcoming_shows = len(Show.query.filter(Show.artist_id == artist.id).filter(Show.start_time > datetime.now()).all()) artist_info.append({ "id":artist.id, "name":artist.name, "num_upcoming_shows":num_upcoming_shows }) #Count the number of artists found with the given search term no_of_artists = len(artist_info) #Store the final response artists_found={ "data":artist_info, "count":no_of_artists } #Return the template along with the data return render_template('pages/search_artists.html', results=artists_found, search_term=request.form.get('search_term', ''))
x = dfg.head(10) x = ''.join(map(str, x)) headerInfo = """--- layout: post title: "Magnaball Ticket Need Stats" date: 2015-08-18 05:36:07 categories: magnaball ---""" text_file = open( '/Users/danielmsheehan/GitHub/magnaball.github.com/_posts/2015-08-18-magnaball-stats.markdown', "w") x = x.replace('dtype: int64', '').replace('tickets', '') y = datetime.now().strftime('%Y-%m-%d %H:%M:%S') z = int(x) * 225 theText = headerInfo + '\n' + '\n' + 'The number of tickets people want is: <strong>' + x + '</strong>' + '\n' + '\n' + 'Ticket count may not updated as frequently as map. Updated as of ' + y + ' EST.' + '\n' + '\n' + 'At $225 a ticket, that would be <strong>$' + str( z) + '</strong> in GA tickets.' text_file.write(theText) text_file.close() print theText print i print datetime.now().strftime('%Y-%m-%d %H:%M:%S') time.sleep(pause)
def search_venues(): #This method searches for all the venues which includes the search_term as a substring in the name of the venue #First fetch the search_term from the request body venue_name = request.form['search_term'] #This method is case insensitive so use the ilike method while querying the database venues_set = Venue.query.filter(Venue.name.ilike('%'+venue_name+'%')).all() #List to store the information about venues venues = [] #Iterate through all the venues for venue in venues_set: #Append the venue info num_upcoming_shows = len(Show.query.filter(Show.venue_id == venue.id).filter(Show.start_time>datetime.now()).all()) venues.append({ "id":venue.id,#Id of the venue "name":venue.name,#Name of the venue "num_upcoming_shows":num_upcoming_shows#Number of upcoming shows at this venue }) #Count the number of venues found with the given search_term no_of_venues = len(venues) #Store the final response venues_found={ "data":venues, "count":no_of_venues } #Return the template along with the data return render_template('pages/search_venues.html', results=venues_found, search_term=request.form.get('search_term', ''))
#generating report_form report_form = { "Parameters.ReportId": str(i), "Parameters.From": FROM_form, "Parameters.To": TO_form, "Parameters.Category.Id": "", "Parameters.Category.Name": "", "Parameters.Category.isLeafValidaton": "", "Parameters.OutputFormat": "Csv" } # POST request r = s.post(WEB_MyReports, data=report_form) print(r.status_code == 200) tz = pytz.timezone('Europe/Warsaw') req_time = datetime.strftime(datetime.now(tz), '%H-%M') print("Time of request is " + req_time) ### DOWNLOADING and processing the reports ### #once the report is requested, we wait until it will be available on page, than it can be downloaded and processed All_rep = pd.DataFrame( [["Raport przejść", 4, "Raport_przejsc_" + FROM_un + "_" + TO_un + ".csv"], ["Zestawienie opinii o Sklepie", 5, "tbs"], ["Raport boksów reklamowych", 6, "tbs"], ["Dynamika popularności", 7, "tbs"], ["Popularność produktu Sklepu", 9, "tbs"], ["Raport podsumowujący", 10, "tbs"], ["Top 2000 rekomendowanych produktów", 11, "tbs"], ["Raport promowanych ofert na stronie kategorii", 4255, "tbs"], ["Raport obecności w Strefach polecanych ofert i produktów", 4311, "tbs"],
from datetime import datetime from datetime import timedelta from helpers import determine_user_level if os.path.exists("env.py"): import env app = Flask(__name__) app.config["MONGO_DBNAME"] = os.environ.get("MONGO_DBNAME") app.config["MONGO_URI"] = os.environ.get("MONGO_URI") app.secret_key = os.environ.get("SECRET_KEY") mongo = PyMongo(app) gmaps = googlemaps.Client(key="AIzaSyAh2mLYzYzAmAWbN_OUY9CksCHUJm82WWg") timestamp = datetime.now() createdon = timestamp.strftime("%d/%m/%Y, %H:%M:%S") createdondate = timestamp.strftime("%d/%m/%Y") days_in_year = 365.2425 @app.route("/") @app.route("/register", methods=["GET", "POST"]) def register(): """ renders register page with input for e-mail address, a choose password input, and a confirm password input """ if request.method == "POST": existing_user = mongo.db.users.find_one( {"email": request.form.get("email").lower()}) if existing_user:
rm_img(city[0], country) elif ar == 1: c_c_r = 0 if c_c_r == 0: cities = weather_codes.get_cities(countries) elif c_c_r == 1: cities = weather_codes.get_cities(cities) elif c_c_r == 2: cities = weather_codes.get_cities(1) for c in cities: c[0] = c[0].replace('/', ' ') print(c) #print(cities) get_data_multithread.fetch_data_multithread(cities, 1900, 2016) dt = datetime.now() start = (dt.minute * 60 + dt.second) * 1000000 + dt.microsecond ############################################ for city in cities: country = city[2] print(city[0]) table = [] flag = True count_to_flag = 0 year = 2015 while flag: url = "http://www.wunderground.com/history/airport/" + city[ 1] + "/" + str( year ) + "/1/1/CustomHistory.html?dayend=31&monthend=12&yearend=" + str(
# import sys # print(sys.copyright) # Python模块的版权 # print(sys.platform) # 当前操作系统的版本,一般为win32或win64 # print(sys.version) # Pyhton的版本 import datetime from datetime import datetime, timezone print(datetime.now()) # 日期时间
def __init__(self): self.last = datetime.now()
def generate_xlsx_report(self, workbook, data, partners): # global variable now = datetime.now() _now = 'รายงาน ณ วันที่ ' + format_date_time(now) output = io.BytesIO() date_from = partners.date_from date_to = partners.date_to get_both_date = self.env['sale.order'].search([ ('create_date', '>=', date_from), ('create_date', '<=', date_to) ]) get_date_from = self.env['sale.order'].search([('create_date', '>=', date_from)]) get_date_to = self.env['sale.order'].search([('create_date', '<=', date_to)]) get_sale_order = self.env['sale.order'].search([]) #get_company = self.env['sale.order'].search([]) # format bold = workbook.add_format({ 'bold': True, 'align': 'center', 'valign': 'vcenter', 'bg_color': 'red' }) price_format = workbook.add_format({'num_format': '$#,##0.00'}) # Report name sheet = workbook.add_worksheet('Sale report total') sheet.set_column('K:N', 12) sheet.set_row(0, 30) sheet.merge_range('K1:N1', _now, bold) sheet.set_column('A:N', 12) sheet.set_row(1, 30) sheet.set_row(2, 30) sheet.set_row(3, 30) date_format = workbook.add_format({ 'num_format': 'd mmm yyyy', 'bold': True, 'align': 'center', 'valign': 'vcenter', 'bg_color': 'red' }) #sheet.merge_range('A1:N1', get_sale_order.partner_id, bold) sheet.merge_range('A2:N2', 'รายงานยอดขายของใบส่งขาย(Sale Order)', bold) # sheet.merge_range('A3:N3', 'จากวันที่ ' + str(date_from) + ' ถึง ' + # str(date_to), bold) if date_from == False and date_to == False: _date_from = self.env['sale.order'].search([], order='create_date asc', limit=1) _date_to = self.env['sale.order'].search([], order='create_date desc', limit=1) sheet.merge_range( 'A3:N3', 'จากวันที่ ' + format_date_time(_date_from.create_date.date()) + ' ถึง ' + format_date_time(_date_to.create_date.date()), bold) elif date_from == False: _date_from_last = self.env['sale.order'].search( [('create_date', '<=', date_to)], order='create_date asc', limit=1) sheet.merge_range( 'A3:N3', 'จากวันที่ ' + format_date_time(_date_from_last.create_date.date()) + ' ถึง ' + format_date_time(date_to), bold) elif date_to == False: _date_to_last = self.env['sale.order'].search( [('create_date', '>=', date_from)], order='create_date desc', limit=1) sheet.merge_range( 'A3:N3', 'จากวันที่ ' + format_date_time(date_from) + ' ถึง ' + format_date_time(_date_to_last.create_date.date()), bold) else: sheet.merge_range( 'A3:N3', 'จากวันที่ ' + format_date_time(date_from) + ' ถึง ' + format_date_time(date_to), bold) head_cell_format = workbook.add_format() head_cell_format.set_align('vcenter') sheet.set_row(5, 20) sheet.set_column('A:N', 20) sheet.write(5, 0, 'No.', bold) sheet.write(5, 1, 'สาขา', bold) sheet.write(5, 2, 'เลขที่ใบสั่งขาย', bold) sheet.write(5, 3, 'วันที่ใบสั่งขาย', bold) sheet.write(5, 4, 'ชื่อลูกค้า', bold) sheet.write(5, 5, 'ชื่อผู้ขาย', bold) sheet.write(5, 6, 'ชื่อสินค้า', bold) sheet.write(5, 7, 'รายละเอียด', bold) sheet.write(5, 8, 'จำนวน', bold) sheet.write(5, 9, 'มูลค่าก่อนภาษี', bold) sheet.write(5, 10, 'ภาษีมูลค่าเพิ่ม', bold) sheet.write(5, 11, 'มูลค่ารวม', bold) sheet.write(5, 12, 'สถานะ', bold) row = 6 count = 1 # # for get_in_sale_order in field: # if date_from == True and date_to == True: # for _date_range in date_range(date_from, date_to): # print("_date_range----------->", _date_range) # format_date_range = _date_range.strfmt('%Y-%m-%d') # _format_date_range = datetime.strptime(str(format_date_range), '%Y-%m-%d') # _create_date = datetime.strptime(str(get_in_sale_order.create_date.date()), '%Y-%m-%d') # print(date_integer(format_date_range), date_integer(get_in_sale_order.create_date.date())) # if date_integer(_format_date_range) == date_integer(_create_date): # print(get_in_sale_order.order_line) # for get_in_order_line in get_in_sale_order.order_line: # print(get_in_order_line.product_id.name, get_in_sale_order.name) # sheet.write(row, 0, count) # sheet.write(row, 1, get_in_sale_order.team_id.name) # sheet.write(row, 2, get_in_sale_order.name) # sheet.write(row, 3, format_date_time(get_in_sale_order.create_date.date())) # sheet.write(row, 4, get_in_sale_order.partner_id.name) # sheet.write(row, 5, get_in_sale_order.user_id.name) # sheet.write(row, 6, get_in_order_line.product_id.name) # sheet.write(row, 7, get_in_order_line.name) # sheet.write(row, 8, get_in_order_line.product_uom_qty) # sheet.write(row, 9, get_in_sale_order.amount_untaxed, price_format) # sheet.write(row, 10, get_in_sale_order.amount_tax, price_format) # sheet.write(row, 11, get_in_sale_order.amount_total, price_format) # sheet.write(row, 12, get_in_sale_order.state) # row += 1 # count += 1 # elif date_from == True and date_to == False: # if date_integer(get_in_sale_order.create_date.date()) >= date_integer(date_from): # print(get_in_sale_order.order_line,) # for get_in_order_line in get_in_sale_order.order_line: # print(get_in_order_line.product_id.name, get_in_sale_order.name) # sheet.write(row, 0, count) # sheet.write(row, 1, get_in_sale_order.team_id.name) # sheet.write(row, 2, get_in_sale_order.name) # sheet.write(row, 3, format_date_time(get_in_sale_order.create_date.date())) # sheet.write(row, 4, get_in_sale_order.partner_id.name) # sheet.write(row, 5, get_in_sale_order.user_id.name) # sheet.write(row, 6, get_in_order_line.product_id.name) # sheet.write(row, 7, get_in_order_line.name) # sheet.write(row, 8, get_in_order_line.product_uom_qty) # sheet.write(row, 9, get_in_sale_order.amount_untaxed, price_format) # sheet.write(row, 10, get_in_sale_order.amount_tax, price_format) # sheet.write(row, 11, get_in_sale_order.amount_total, price_format) # sheet.write(row, 12, get_in_sale_order.state) # row += 1 # count += 1 # elif date_from == False and date_to == False: # for get_in_order_line_all in get_in_sale_order.order_line: # print(get_in_order_line_all.product_id.name, get_in_sale_order.name) # print(date_integer(get_in_sale_order.create_date.date())) # sheet.write(row, 0, count) # sheet.write(row, 1, get_in_sale_order.team_id.name) # sheet.write(row, 2, get_in_sale_order.name) # sheet.write(row, 3, format_date_time(get_in_sale_order.create_date.date())) # sheet.write(row, 4, get_in_sale_order.partner_id.name) # sheet.write(row, 5, get_in_sale_order.user_id.name) # sheet.write(row, 6, get_in_order_line_all.product_id.name) # sheet.write(row, 7, get_in_order_line_all.name) # sheet.write(row, 8, get_in_order_line_all.product_uom_qty) # sheet.write(row, 9, get_in_sale_order.amount_untaxed, price_format) # sheet.write(row, 10, get_in_sale_order.amount_tax, price_format) # sheet.write(row, 11, get_in_sale_order.amount_total, price_format) # sheet.write(row, 12, get_in_sale_order.state) # row += 1 # count += 1 if date_to == False: for get_in_sale_order in get_date_from: for get_in_order_line_all in get_in_sale_order.order_line: sheet.write(row, 0, count) sheet.write(row, 1, get_in_sale_order.team_id.name) sheet.write(row, 2, get_in_sale_order.name) sheet.write( row, 3, format_date_time(get_in_sale_order.create_date.date())) sheet.write(row, 4, get_in_sale_order.partner_id.name) sheet.write(row, 5, get_in_sale_order.user_id.name) sheet.write(row, 6, get_in_order_line_all.product_id.name) sheet.write(row, 7, get_in_order_line_all.name) sheet.write(row, 8, get_in_order_line_all.product_uom_qty) sheet.write(row, 9, get_in_sale_order.amount_untaxed, price_format) sheet.write(row, 10, get_in_sale_order.amount_tax, price_format) sheet.write(row, 11, get_in_sale_order.amount_total, price_format) sheet.write(row, 12, get_in_sale_order.state) row += 1 count += 1 elif date_from == False: for get_in_sale_order in get_date_to: for get_in_order_line_all in get_in_sale_order.order_line: sheet.write(row, 0, count) sheet.write(row, 1, get_in_sale_order.team_id.name) sheet.write(row, 2, get_in_sale_order.name) sheet.write( row, 3, format_date_time(get_in_sale_order.create_date.date())) sheet.write(row, 4, get_in_sale_order.partner_id.name) sheet.write(row, 5, get_in_sale_order.user_id.name) sheet.write(row, 6, get_in_order_line_all.product_id.name) sheet.write(row, 7, get_in_order_line_all.name) sheet.write(row, 8, get_in_order_line_all.product_uom_qty) sheet.write(row, 9, get_in_sale_order.amount_untaxed, price_format) sheet.write(row, 10, get_in_sale_order.amount_tax, price_format) sheet.write(row, 11, get_in_sale_order.amount_total, price_format) sheet.write(row, 12, get_in_sale_order.state) row += 1 count += 1 elif date_from and date_to: for get_in_sale_order in get_both_date: for get_in_order_line_all in get_in_sale_order.order_line: sheet.write(row, 0, count) sheet.write(row, 1, get_in_sale_order.team_id.name) sheet.write(row, 2, get_in_sale_order.name) sheet.write( row, 3, format_date_time(get_in_sale_order.create_date.date())) sheet.write(row, 4, get_in_sale_order.partner_id.name) sheet.write(row, 5, get_in_sale_order.user_id.name) sheet.write(row, 6, get_in_order_line_all.product_id.name) sheet.write(row, 7, get_in_order_line_all.name) sheet.write(row, 8, get_in_order_line_all.product_uom_qty) sheet.write(row, 9, get_in_sale_order.amount_untaxed, price_format) sheet.write(row, 10, get_in_sale_order.amount_tax, price_format) sheet.write(row, 11, get_in_sale_order.amount_total, price_format) sheet.write(row, 12, get_in_sale_order.state) row += 1 count += 1 if date_to == False and date_from == False: for get_in_sale_order in get_sale_order: for get_in_order_line_all in get_in_sale_order.order_line: sheet.write(row, 0, count) sheet.write(row, 1, get_in_sale_order.team_id.name) sheet.write(row, 2, get_in_sale_order.name) sheet.write( row, 3, format_date_time(get_in_sale_order.create_date.date())) sheet.write(row, 4, get_in_sale_order.partner_id.name) sheet.write(row, 5, get_in_sale_order.user_id.name) sheet.write(row, 6, get_in_order_line_all.product_id.name) sheet.write(row, 7, get_in_order_line_all.name) sheet.write(row, 8, get_in_order_line_all.product_uom_qty) sheet.write(row, 9, get_in_sale_order.amount_untaxed, price_format) sheet.write(row, 10, get_in_sale_order.amount_tax, price_format) sheet.write(row, 11, get_in_sale_order.amount_total, price_format) sheet.write(row, 12, get_in_sale_order.state) row += 1 count += 1