def build_realtime_measurements(msm, transductor): if RealTimeMeasurement.objects.filter(transductor=transductor): measurement = RealTimeMeasurement.objects.get( transductor=transductor) measurement.voltage_a = msm['voltage_a'] measurement.voltage_b = msm['voltage_b'] measurement.voltage_c = msm['voltage_c'] measurement.current_a = msm['current_a'] measurement.current_b = msm['current_b'] measurement.current_c = msm['current_c'] measurement.total_active_power = msm['total_active_power'] measurement.total_reactive_power = msm['total_reactive_power'] measurement.total_power_factor = msm['total_power_factor'] measurement.transductor = transductor measurement.collection_date = datetime.strptime( msm['transductor_collection_date'], '%Y-%m-%dT%H:%M:%S.%f') measurement.save() else: RealTimeMeasurement.objects.create( voltage_a=msm['voltage_a'], voltage_b=msm['voltage_b'], voltage_c=msm['voltage_c'], current_a=msm['current_a'], current_b=msm['current_b'], current_c=msm['current_c'], total_active_power=msm['total_active_power'], total_reactive_power=msm['total_reactive_power'], total_power_factor=msm['total_power_factor'], transductor=transductor, collection_date=datetime.strptime( msm['transductor_collection_date'], '%Y-%m-%dT%H:%M:%S.%f'))
def ReportView(request): if request.is_ajax(): endDate = None startDate = request.GET.get('startDate') if request.GET.get( 'endDate') == '' or request.GET.get('endDate') is None: endDateConverter = datetime.today() else: endDate = request.GET.get('endDate') endDateConverter = datetime.strptime(endDate, "%Y-%m-%d").date() startDateConverter = datetime.strptime(startDate, "%Y-%m-%d").date() from_user = DaftarTransaksi.objects.filter( user_id=request.user.id, created__date__gte=startDateConverter, created__date__lte=endDateConverter) daftar_barang = ListProductTransaksi.objects.filter( transaksi_id__in=from_user) return render(request, 'report_details.html', { 'daftar_barang': daftar_barang, 'num': startDateConverter }) from_user = DaftarTransaksi.objects.filter(user_id=request.user.id) daftar_barang = ListProductTransaksi.objects.filter( transaksi_id__in=from_user) context = { 'daftar_barang': daftar_barang, 'from_user': from_user, } return render(request, 'cashier/report.html', context)
def get_kobo_forms(surveyID=None): base_response = requests.get(kobo_constants.kobo_form_link, headers={ 'Authorization': kobo_constants.authorization_token }).json() list_res = [] for i in range(len(base_response)): surveyObj = survey.objects.filter( kobo_form_id=base_response[i]['formid']).first() if surveyObj: survey_ID = surveyObj.survey_id if survey_ID == surveyID: print(base_response[i]['formid'], " ", base_response[i]['title'], " ", base_response[i]['date_created']) match = re.search(r'\d{4}-\d{2}-\d{2}', base_response[i]['date_created']) date = datetime.strptime(match.group(), '%Y-%m-%d').date() res = (base_response[i]['formid'], base_response[i]['title'] + " (" + str(date) + ")") list_res.append(res) else: print(base_response[i]['formid'], " ", base_response[i]['title'], " ", base_response[i]['date_created']) match = re.search(r'\d{4}-\d{2}-\d{2}', base_response[i]['date_created']) date = datetime.strptime(match.group(), '%Y-%m-%d').date() res = (base_response[i]['formid'], base_response[i]['title'] + " (" + str(date) + ")") list_res.append(res) return sorted(tuple(list_res), reverse=True)
def _fetch_from_stock(symbol, outputsize): logger.info(f'fetching from stock') url = urljoin(ALPHA_VANTAGE_BASE_URL, 'query') params = { 'function': 'TIME_SERIES_DAILY', 'symbol': symbol, 'outputsize': outputsize } data = do_request(url, params) time_series = data['Time Series (Daily)'] parities = [] for date, rates in time_series.items(): parities.append({ 'date': make_aware(datetime.strptime(date, '%Y-%m-%d')), 'open': rates['1. open'], 'high': rates['2. high'], 'low': rates['3. low'], 'close': rates['4. close'] }) try: last_updated = datetime.strptime(data['Meta Data']['3. Last Refreshed'], '%Y-%m-%d %H:%M:%S') except Exception: last_updated = datetime.strptime(data['Meta Data']['3. Last Refreshed'], '%Y-%m-%d') last_updated = make_aware(last_updated) return last_updated, parities
def documentos_ge(request): g_e = request.session['gauser_extra'] qa = Q(subentidad__in=g_e.subentidades.all()) | Q( cargo__in=g_e.cargos.all()) | Q(gauser=g_e.gauser) qb = qa & Q(documento__entidad=g_e.ronda.entidad) docs_id = Compartir_Ges_documental.objects.filter(qb).values_list( 'documento__id', flat=True) docs = Ges_documental.objects.filter(id__in=docs_id, borrado=False).distinct() try: inicio = datetime.strptime(request.POST['inicio'], '%Y-%m-%d').date() except: inicio = datetime.strptime('1900-1-1', '%Y-%m-%d').date() try: fin = datetime.strptime(request.POST['fin'], '%Y-%m-%d').date() except: fin = datetime.now().date() try: texto = request.POST['texto'] except: texto = '' try: etiqueta = Etiqueta_documental.objects.get(entidad=g_e.ronda.entidad, id=request.POST['etiqueta']) qb = Q(creado__gte=inicio) & Q(creado__lte=fin) & Q( nombre__icontains=texto) & Q(etiquetas__in=etiqueta.hijos) except: qb = Q(creado__gte=inicio) & Q(creado__lte=fin) & Q( nombre__icontains=texto) return docs.filter(qb)
def search_matches(request): current_user = User.objects.get(id=request.session['user_id']) user_list = User.objects.all() photo_url = {} for user in user_list: for pic in user.pictures.all(): photo_url[pic.id] = 'dating' + str(pic.pictures.url) # ----- current user age ----- # date_format = "%Y-%m-%d" today = datetime.strptime(str(datetime.now().date()), date_format) current_user_dob = datetime.strptime(str(current_user.dob.date()), date_format) delta = today - current_user_dob current_user.age = round(delta.days/365) print(current_user.age) # ----- current user age ----- # context = { "photo_url": photo_url, "user": current_user, "age_range": ['25-35', '36-45', '46-55', '56-65', '66-75'], "ethnic_group": ['White or Caucasian', 'Native American', 'Asian', 'Native Hawaiian or Pacific islander', 'Black or African', 'Latino or Hispanic', 'Middle Eastern', 'Other'], "religion": ['Christian', 'Catholic', 'Spiritual but not religious', 'Protestant', 'Agnostic', 'Other'], "educational_level": ["High School", "Some College", "Associates Degree", "Bachelors Degree", "Graduate Degree", "Post Doctoral"] } return render(request,'dating/search_matches.html', context)
def forwards(self, orm): "Write your forwards methods here." for vision in orm.Vision.objects.all(): if vision.tweet_id: tweet = vision.tweet.tweet_data elif vision.app_tweet_id: tweet = vision.app_tweet.tweet_data else: continue if 'created_at' in tweet: vision.tweeted_at = datetime.strptime( tweet['created_at'], '%a %b %d %H:%M:%S +0000 %Y') vision.tweeted_at = vision.tweeted_at.replace(tzinfo=utc) vision.save() for reply in orm.Reply.objects.all(): if reply.tweet_id: tweet = reply.tweet.tweet_data else: continue if 'created_at' in tweet: reply.tweeted_at = datetime.strptime( tweet['created_at'], '%a %b %d %H:%M:%S +0000 %Y') reply.tweeted_at = reply.tweeted_at.replace(tzinfo=utc) reply.save()
def messages_likes(request): current_user = User.objects.get(id=request.session['user_id']) # ----- age calculation ----- # date_format = "%Y-%m-%d" today = datetime.strptime(str(datetime.now().date()), date_format) dob = datetime.strptime(str(current_user.dob.date()), date_format) delta = today - dob current_user.age = round(delta.days / 365) # ----- age calculation ----- # current_user.dob.date() photo_url = {} for pic in current_user.pictures.all(): photo_url[pic.id] = 'dating' + str(pic.pictures.url) user = User.objects.get(id=request.session['user_id']) messages = Message.objects.filter(user_written_for=user) context = { "messages": messages, "user": current_user, "photo_url": photo_url } return render(request, 'dating/message_like.html', context)
def user_info_display(request, user_id): user = User.objects.get(id=request.session['user_id']) selected_user = User.objects.get(id=user_id) liked_user = UserLike.objects.filter(like_by=user).filter( liked=selected_user) # ----- current user age ----- # date_format = "%Y-%m-%d" today = datetime.strptime(str(datetime.now().date()), date_format) selected_user_dob = datetime.strptime(str(selected_user.dob.date()), date_format) delta = today - selected_user_dob selected_user.age = round(delta.days / 365) print(selected_user.age) # ----- current user age ----- # photo_url = {} for pic in selected_user.pictures.all(): photo_url[pic.id] = 'dating' + str(pic.pictures.url) for pic in user.pictures.all(): photo_url[pic.id] = 'dating' + str(pic.pictures.url) context = { "user": user, "selected_user": selected_user, "photo_url": photo_url, "liked_user": liked_user } return render(request, 'dating/user_info.html', context)
def edate_year_month(cls, y, m, dt=True): if isinstance(y, int): y = str(y) if isinstance(m, int): m = str(m) if m == '12': y = int(y) + 1 m = '01' t = datetime.strptime('{}-{}-01'.format(y, m), '%Y-%m-%d') elif m.startswith('0'): m = int(m[1:]) + 1 if m < 10: t = datetime.strptime('{}-0{}-01'.format(y, m), '%Y-%m-%d') else: t = datetime.strptime('{}-{}-01'.format(y, m), '%Y-%m-%d') else: m = int(m) + 1 t = datetime.strptime('{}-{}-01'.format(y, m), '%Y-%m-%d') if dt: return datetime.date( datetime(t.year, t.month, t.day, tzinfo=pytz.timezone('UTC'))) else: return datetime(t.year, t.month, t.day, t.hour, t.minute, t.second, t.microsecond, tzinfo=pytz.timezone('UTC'))
def list(self): errors = defaultdict(list) start = self.request.GET.get('range_start', None) end = self.request.GET.get('range_end', None) event_id = self.request.GET.get('event_id', None) range_start = range_end = None if start is not None: try: range_start = datetime.strptime(start, '%Y-%m-%d %H:%M') except ValueError: errors['start'].append("Start not in the correct format") if end is not None: try: range_end = datetime.strptime(end, '%Y-%m-%d %H:%M') except ValueError: errors['end'].append("End not in the correct format") if errors: raise BadRequest(str(errors)) calendar = self.request.user.profile.calendars.get(title="Default") query = calendar.events.get(id=event_id) if event_id else calendar events = query.get_between(range_start, range_end) return events
def create_equipment(self, request, json_request, *args, **kwargs): try: json_payload=json_request["payload"] newEquipment=Equipment( code=json_payload["code"], name=json_payload["name"], where=Place.objects.get(name="Base"), date_calibration=datetime.strptime(json_payload["calibrationdata"], "%Y-%m-%d").date(), validity_time=int(json_payload["months"]), in_calibration=0, is_active=json_payload["isactive"] ) if(json_payload["validitydata"]!=""): newEquipment.date_validity=datetime.strptime(json_payload["validitydata"], "%Y-%m-%d").date() newEquipment.create(user=request.user) data={ "type":json_request["type"], "status":"success" } except: data={ "type":json_request["type"], "status":"fail" } return data
def get_dt_pattern(dt_string): for key, pattern in dt_patterns.items(): try: dt.strptime(dt_string, pattern) except Exception as e: pass else: return pattern
def schdl(): start_dt=datetime.strptime(start_date, "%Y-%m-%d").date() end_dt =datetime.strptime(end_date, "%Y-%m-%d").date() if date.today()<= end_dt and date.today()>= start_dt: for dt_time in date_time: dt_obj=datetime.strptime(dt_time, '%Y-%m-%d %H:%M:%S') if datetime.now()==dt_obj: task()
def devuelve_fecha(string): DATE_FORMATS = ['%d/%m/%Y', '%d/%m/%y', '%d-%m-%Y', '%d-%m-%y'] for date_format in DATE_FORMATS: try: fecha = datetime.strptime(string, date_format) return fecha except: pass return datetime.strptime('01/01/1900', '%d/%m/%Y')
def _strptime(timestamp): try: #99% of images use this Standard format return datetime.strptime(timestamp, '%Y-%m-%dT%H:%M:%S').replace(tzinfo=utc) except ValueError, bad_timestamp: if 'unconverted data' in bad_timstamp.message and 'Z' in bad_timestamp.message: #Include microseconds using ISO Standard return datetime.strptime(timestamp, '%Y-%m-%dT%H:%M:%S.%fZ').replace(tzinfo=utc) raise
def ajaxEditGraf(request): user = request.user usuario = Usuario.objects.get(user=user) vendedor = usuario.get_vendedor() iniDate = request.GET.get('ini', None) print(iniDate) iniDate = iniDate.replace("/", "-") finDate = request.GET.get('fin', None) finDate = finDate.replace("/", "-") if iniDate == "" and finDate == "": finDate = datetime.today() iniDate = finDate - timedelta(days=4) if iniDate == "" and finDate != "": finDate = datetime.strptime(finDate, '%Y-%m-%d') iniDate = finDate - timedelta(days=4) if iniDate != "" and finDate == "": finDate = datetime.today() iniDate = datetime.strptime(iniDate, '%Y-%m-%d') else: iniDate = datetime.strptime(iniDate, '%Y-%m-%d') finDate = datetime.strptime(finDate, '%Y-%m-%d') print("alo") if iniDate.date() > finDate.date(): return redirect('/main/estadisticas/') trvend = Transacciones.objects.filter(vendedor=vendedor) trvend = trvend.filter(fecha__lte=finDate, fecha__gte=iniDate).values('producto').annotate( conteo=Count('producto')) listaTransac = list(trvend) productos = [] cantidad = [] for i in listaTransac: productos.append(Producto.objects.get(id=int(i['producto'])).nombre) cantidad.append(int(i['conteo'])) favoritQS = Favoritos.objects.filter( vendedor=vendedor, fecha__lte=finDate, fecha__gte=iniDate).values('vendedor').annotate( conteo=Count('vendedor')) listaFav = list(favoritQS) favoritos = [] for i in listaFav: favoritos.append(int(i['conteo'])) print(productos) print(cantidad) print(favoritos) return JsonResponse({ 'productosGraficar': simplejson.dumps(productos), 'cantidadGraficar': simplejson.dumps(cantidad), 'numFav': simplejson.dumps(favoritos) })
def rss_xml_parser(url: str, user_id: int): datetime_pattern = None r = requests.get(url) # print(r.text) # entire feed root = etree.fromstring(r.text) # get channel info channel = None for child in root: _channel_title = child.findtext("title") _channel_link = child.findtext("link") if datetime_pattern is None: datetime_pattern = get_dt_pattern(child.findtext("lastBuildDate")) if datetime_pattern is None: print(f"Error. Datetime pattern was not found. Url: {url}") return _last_build = dt.strptime(child.findtext("lastBuildDate"), datetime_pattern) channel = Channel.objects.filter(link=_channel_link, user__id=user_id).first() if channel: # do not create new feeds if url time build repeats if channel.last_build.timestamp() >= _last_build.timestamp(): return if not channel: channel = Channel.objects.create( title=_channel_title, link=_channel_link, user_id=user_id, last_build=_last_build, ) break items = root.findall("channel/item") feeds = [] for entry in items: # get description, url, and thumbnail desc = entry.findtext("description") title = entry.findtext("title") pub_date_str = entry.findtext("pubDate") pub_date = dt.strptime(pub_date_str, datetime_pattern) feeds.append( Feed( channel=channel, title=title, description=desc, pub_date=pub_date, )) Feed.objects.bulk_create(feeds)
def _strptime(timestamp): try: # 99% of images use this Standard format return datetime.strptime( timestamp, '%Y-%m-%dT%H:%M:%S').replace(tzinfo=utc) except ValueError as bad_timestamp: if 'unconverted data' in bad_timstamp.message and 'Z' in bad_timestamp.message: # Include microseconds using ISO Standard return datetime.strptime( timestamp, '%Y-%m-%dT%H:%M:%S.%fZ').replace(tzinfo=utc) raise
def relatorio(request): if request.method == 'POST': form = RelatorioSetorForm(request, request.POST) if form.is_valid(): chamados = Chamado.objects.filter( setor__setor_id=request.session['setor_id']) if form['setor'].value() != '': s_helper = ServiceHelper() setor = s_helper.get_setor(form['setor'].value()) setor_solicitante = setor.set_nome chamados = chamados.filter(setor_solicitante=setor.set_id) else: setor_solicitante = 'TODOS OS SETORES' data_inicio = datetime.strptime(form['data_inicio'].value(), '%d/%m/%Y') chamados = chamados.filter(data_abertura__gte=data_inicio) if form['data_fim'].value() != '': data_fim = datetime.strptime(form['data_fim'].value(), '%d/%m/%Y') data_fim = data_fim + timedelta(days=1) #chamados = chamados.filter(Q(data_fechamento=None) or Q(data_fechamento__lte=data_fim)) #chamados = chamados.filter(Q(data_fechamento__isnull=True) or Q(data_fechamento__lte=data_fim)) chamados = chamados.exclude(data_fechamento__gt=data_fim) if form['grupo_servico'].value() != '': chamados = chamados.filter( grupo_servico=form['grupo_servico'].value()) chamados = chamados.order_by('data_abertura') context = { 'chamados': chamados, 'inicio': form['data_inicio'].value(), 'fim': form['data_fim'].value(), 'setor': request.session['setor_nome'], 'setor_solicitante': setor_solicitante } filename = fill_template('relatorio2.odt', context, output_format='pdf') visible_filename = 'relatorio_chamados.pdf' return FileResponse(filename, visible_filename) else: return render(request, 'core/relatorio/chamado/index.html', {'form': form}) else: return render(request, 'core/relatorio/chamado/index.html', {'form': form})
def clean_year_founded(string): year = re.search(r"([0-9]{4})年", string) month = re.search(r"([0-9]{1,2})月", string) if year: if month: date = datetime.strptime("-".join([year.group(1), month.group(1)]), "%Y-%m") else: date = datetime.strptime(year.group(1), "%Y") return date else: return None
def handle(self, *args, **options): log.info('-' * 30) log.info('开始检查监督员坐标') day = options.get('d') if not day: day = datetime.now() else: day = datetime.strptime(day, '%Y-%m-%d') log.info('数据日期为:%s' % day) now = day checkDay = day today = now.date() startDate = today.strftime('%Y%m%d00') tomorro = now + timedelta(days=1) endDate = today.strftime('%Y%m%d23') log.info('today = %s' % today) try: todayWorkGroup = WorkInspector.objects.get(date=today) keepers = list(todayWorkGroup.inspector.all()) log.info('上班人数:%s' % len(keepers)) for keeper in keepers: log.info('开始拉取%s轨迹数据' % str(keeper)) tracks = getKeeperTrack(keeper.code, startDate, endDate) log.info('总共拉取了%s条' % len(tracks)) posList = [] for track in tracks: x, y = cordToloc(track.get('coordx'), track.get('coordy')) pos = Point(float(x), float(y)) posList.append({ 'tracktime': datetime.strptime(track.get('tracktime'), '%Y-%m-%d %H:%M:%S'), 'pos': pos, }) log.info('开始去除无效轨迹点') posList = removeInvalidPos(keeper, posList) log.info('开始检查是否有工作时间内没有轨迹点的情况') noPosCheck(keeper, posList, checkDay) log.info('开始检查轨迹点是否跑到围栏外') outBoxCheck(keeper, posList) except WorkInspector.DoesNotExist: log.info('未设置工作组') #except Exception as e: #log.error(str(e)) log.info('检查监督员完成')
def my_matches(request): if 'user' not in request.session: return redirect('../login/') current_user = User.objects.get(id=request.session['user_id']) blocked_user = UserBlock.objects.filter(block_by=current_user) user_list = User.objects.filter(gender=current_user.seeking_for).exclude( id=request.session['user_id']).exclude(blocked__in=blocked_user) liked_user = UserLike.objects.filter(like_by=current_user) photo_url = {} for user in user_list: for pic in user.pictures.all(): photo_url[pic.id] = 'dating' + str(pic.pictures.url) for pic in current_user.pictures.all(): photo_url[pic.id] = 'dating' + str(pic.pictures.url) # ----- to pick liked users ------# liked_user_array = [] for u in user_list: for y in liked_user: if u == y.liked: liked_user_array.append(y.liked) # ----- age calculation ----- # date_format = "%Y-%m-%d" today = datetime.strptime(str(datetime.now().date()), date_format) for user in user_list: dob = datetime.strptime(str(user.dob.date()), date_format) delta = today - dob user.age = round(delta.days / 365) # ----- age calculation ----- # # ----- current user age ----- # current_user_dob = datetime.strptime(str(current_user.dob.date()), date_format) delta = today - current_user_dob current_user.age = round(delta.days / 365) print(current_user.age) # ----- current user age ----- # context = { "user_list": user_list, "user": current_user, "liked_user": liked_user, "liked_user_array": liked_user_array, "photo_url": photo_url } return render(request, 'dating/my_matches.html', context)
def save(self, force_insert=False, force_update=False, using=None, update_fields=None): if not isinstance(self.created, datetime) and self.created is not None: self.created = datetime.strptime(unicode(self.created), '%Y-%m-%dT%H:%M:%S.%f') if not isinstance(self.lastSeen, datetime) and self.lastSeen is not None: self.lastSeen = datetime.strptime(unicode(self.lastSeen), '%Y-%m-%dT%H:%M:%S.%f') return self
def glance_timestamp(iso_8601_stamp): if not iso_8601_stamp: return None try: datetime_obj = datetime.strptime(iso_8601_stamp, '%Y-%m-%dT%H:%M:%S.%f') except ValueError: try: datetime_obj = datetime.strptime(iso_8601_stamp, '%Y-%m-%dT%H:%M:%S') except ValueError: raise ValueError("Expected ISO8601 Timestamp in Format: YYYY-MM-DDTHH:MM:SS[.sssss]") # All Dates are UTC relative datetime_obj = datetime_obj.replace(tzinfo=pytz.utc) return datetime_obj
def get_posts(self): req = urllib.request.Request(self.url, data=None, headers={'User-Agent': 'feed-reader'}) site = urllib.request.urlopen(req).read() soup = BeautifulSoup(site, "xml") items = soup.findAll("item") if len(items) == 0: items = soup.findAll("entry") posts = [] for item in items[:self.max_posts]: title = item.find("title").text link = item.find("link") if link is not None: link_ = link.text if link_ == "": link = link.get("href") else: link = link_ else: link = item.find("enclosure").get("url") post_date = item.find("pubDate") if post_date is not None: post_date = post_date.text.strip() else: post_date = item.find("published").text.strip() try: post_date = datetime.strptime(post_date, "%a, %d %b %Y %H:%M:%S %z") except ValueError: try: post_date = datetime.strptime(post_date, "%a, %d %b %Y %H:%M:%S %Z") except ValueError: post_date = iso8601.parse_date(post_date, ) if is_naive(post_date): post_date = make_aware(post_date) post = Post(title=title, url=link, post_date=post_date, add_date=now(), view=False) posts += [post] return posts
def periode(tahun: int) -> tuple: """ input tahun :param tahun: :return: tuple 1 dengan 2 element range tanggal 'start gasal', 'end gasal' tuple 2 dengan 2 element range tanggal 'start genap', 'end genap' """ tahun_ini = tahun tahun_lalu = tahun_ini - 1 genap_start = datetime.strptime('27-01-{}'.format(tahun_ini), '%d-%m-%Y') genap_end = datetime.strptime('29-01-{}'.format(tahun_ini), '%d-%m-%Y') gasal_start = datetime.strptime('27-07-{}'.format(tahun_lalu), '%d-%m-%Y') gasal_end = datetime.strptime('29-01-{}'.format(tahun_lalu), '%d-%m-%Y') return (gasal_start, gasal_end), (genap_start, genap_end)
def get(self, request): current_page = int(request.GET.get("p", 1)) #获取前台传的当前页面号 new_tags = NewsTag.objects.all() per_page_news = 2 #每页显示的新闻列表数量 newslist = NewsPub.objects.filter(is_delete=True).all() start_time = request.GET.get('start_time', '') end_time = request.GET.get('end_time', '') title = request.GET.get('title', '') author = request.GET.get('author', '') tag_id = int(request.GET.get('tag_id', 0)) #urlencode 生成标准的url查询格式的字符 other_param = urlencode({ 'start_time': start_time, 'end_time': end_time, 'title': title, 'author': author, 'tag_id': tag_id }) if start_time and end_time: # print(start_time,end_time) start_data = datetime.strptime(start_time, '%Y/%m/%d') end_data = datetime.strptime(end_time, '%Y/%m/%d') + timedelta( days=1) # 天数加1 #make_awre解决时区不是激活的警告 newslist = NewsPub.objects.filter( pub_time__range=(make_aware(start_data), make_aware(end_data))) if title: newslist = NewsPub.objects.filter(title__contains=title) print(newslist) if author: newslist = NewsPub.objects.filter(auth__username__contains=author) if tag_id: newslist = NewsPub.objects.filter(tag_id=int(tag_id)) paginator = Paginator(newslist, per_page_news) context = self.get_page_data(paginator, current_page) context.update({'news_tags': new_tags, 'other_param': other_param}) return render(request, 'cms/news/news_manage.html', context=context)
def logparse(self, log): """ Parse a E-Mail sendlog :param log: Log string :return: """ line_regex = re.compile( r'(?P<timestamp>[\d+\:]{5,10}\.\d+)\s(?P<linetype>\w+)\:\s(?P<payload>.*)', re.MULTILINE) entry_list = [] for match in line_regex.finditer(log): timestamp, linetype, payload = match.groups() time = datetime.strptime(timestamp, "%H:%M:%S.%f") if not (payload == "" and linetype == ""): if type(linetype) is bytes: linetype = linetype.decode("ascii") if type(payload) is bytes: payload = payload.decode("ascii") entry = MailLogLine(protocol=self, type=linetype, payload=payload, timestamp=time) entry_list.append(entry) MailLogLine.objects.bulk_create(entry_list)
def dehydrate_members(self, bundle): rangesString = bundle.request.GET.get('ranges',None) fullRange = rangesString is None if not fullRange: ranges = map( lambda rangeString:[datetime.strptime(val,"%Y%m") if val else None for val in rangeString.split('-')], rangesString.split(',')) mks_values = dict(bundle.obj.get_mks_values(ranges)) else: mks_values = dict(bundle.obj.get_mks_values()) members = [] for mk in Member.objects.filter(pk__in=mks_values.keys(), current_party__isnull=False).select_related('current_party'): # TODO: this sucks, performance wise current_party = mk.current_party mk_data = mks_values[mk.id] members.append(dict( id=mk.id, name=mk.name, score=map(itemgetter('score'),mk_data) if isinstance(mk_data,list) else mk_data['score'], rank=map(itemgetter('rank'),mk_data) if isinstance(mk_data,list) else mk_data['rank'], volume=map(itemgetter('volume'),mk_data) if isinstance(mk_data,list) else mk_data['volume'], numvotes=map(itemgetter('numvotes'),mk_data) if isinstance(mk_data,list) else mk_data['numvotes'], absolute_url=mk.get_absolute_url(), party=current_party.name, party_url=current_party.get_absolute_url(), party_id=current_party.pk )) return members
def _cal_prob(self, data): last = data[0] last_time = last['time'] last_time = datetime.strptime(last_time, '%Y-%m-%d %H:%M:%S') now = datetime.now() one_day = timedelta(days=1) today_mid = now.replace(hour=13, minute=0) last_day_mid = (now - one_day).replace(hour=13, minute=0) last_day_morning = (now - one_day).replace(hour=7, minute=0) # 总体基数 base = 0.8 # 判断当前检查区域,还没有被检查的概率 if last_time > last_day_morning: last_pro = 0.02 elif last_time > last_day_mid: last_pro = 0.1 elif last_time > today_mid: last_pro = 0.35 else: last_pro = 0.8 last['probability'] = '%d%%' % round(100 * base * last_pro) left = 1 - last_pro if data[1]['polygon'] == data[2]['polygon']: # 两个相同区域,去掉最后一个 data[1]['probability'] = '%d%%' % round(100 * base * left) data.pop() else: data[1]['probability'] = '%d%%' % round(100 * base * left * 0.9) data[2]['probability'] = '%d%%' % round(100 * base * left * 0.1) return data
def _send_instance_email(driverCls, provider, identity, instance_id): try: logger.debug("_send_instance_email task started at %s." % datetime.now()) driver = get_driver(driverCls, provider, identity) instance = driver.get_instance(instance_id) #Breakout if instance has been deleted at this point if not instance: logger.debug("Instance has been teminated: %s." % instance_id) return username = identity.user.username profile = UserProfile.objects.get(user__username=username) if profile.send_emails: #Only send emails if allowed by profile setting created = datetime.strptime(instance.extra['created'], "%Y-%m-%dT%H:%M:%SZ") send_instance_email(username, instance.id, instance.name, instance.ip, created, username) else: logger.debug("User %s elected NOT to receive new instance emails" % username) logger.debug("_send_instance_email task finished at %s." % datetime.now()) except Exception as exc: logger.warn(exc) _send_instance_email.retry(exc=exc)
def update(self, data): """ Updates crawling parameters and status. It is called usually when user makes some changes in task data using GUI or API. """ self.logger.debug('Updating task server: %s' % json.dumps(data)) if self._get_status() in [Status.STOPPING, Status.STARTING]: return if data['finished']: self.stop() return self.data_lock.acquire() self.whitelist = data['whitelist'] self.blacklist = data['blacklist'] self.mime_type = data['mime_type'] self.max_links = int(data['max_links']) self.expire_date = datetime.strptime(data['expire_date'], DATE_FORMAT) self.data_lock.release() if data['active']: self.resume() else: self.pause()
def get_queryset(self, **kwargs): queryset = super(VentaListar, self).get_queryset() fecha = datetime.now() if self.request.GET.get('fecha'): fecha_str = self.request.GET.get('fecha') fecha = datetime.strptime(fecha_str, '%d-%m-%Y') return queryset.filter(fecha=fecha)
def set_time_from_tweet(self, tweet): if 'created_at' in tweet.tweet_data: try: self.tweeted_at = datetime.strptime(tweet.tweet_data['created_at'], '%a %b %d %H:%M:%S +0000 %Y') self.tweeted_at = self.tweeted_at.replace(tzinfo=utc) except ValueError: pass
def get_context_data(self): ctx = super(DummyWidget, self).get_context_data() value = self.get_setting('VALUE').value date = datetime.strptime(value, self.time_format) ctx.update({ 'value': date, }) return ctx
def get_birthday(self): """ 通过身份证号获取生日 """ try: date = datetime.strptime(self.ID_number[6:14], "%Y%m%d") return date except IndexError: return None
def get_date(value): """ Returns the given field as a DateTime object. This is necessary because Postgres and SQLite return different values for datetime columns (DateTime vs. string). """ if isinstance(value, string_types): return datetime.strptime(value, '%Y-%m-%d') return value
def get_queryset(self): date = self.request.GET.get('date') if date and date != "undefined": date = timezone.make_aware(datetime.strptime(date, "%m/%d/%Y").replace(hour=6, minute=0), timezone.get_current_timezone()) end_range_date = date + timedelta(days=1) events = Event.objects.filter(start__range=(date, end_range_date)).order_by('start') if not self.request.user.is_staff: events = events.exclude(state=Event.STATUS.Draft) return events return Event.objects.none()
def glance_timestamp(iso_8601_stamp): if not isinstance(iso_8601_stamp,basestring): if iso_8601_stamp: logger.debug("Stamp %s could not be parsed" % iso_8601_stamp) return None append_char = "Z" if iso_8601_stamp.endswith("Z") else "" try: datetime_obj = datetime.strptime( iso_8601_stamp, '%Y-%m-%dT%H:%M:%S.%f'+append_char) except ValueError: try: datetime_obj = datetime.strptime( iso_8601_stamp, '%Y-%m-%dT%H:%M:%S'+append_char) except ValueError: raise ValueError( "Expected ISO8601 Timestamp in Format:" " YYYY-MM-DDTHH:MM:SS[.sssss][Z]") # All Dates are UTC relative datetime_obj = datetime_obj.replace(tzinfo=pytz.utc) return datetime_obj
def dehydrate_members(self, bundle): ranges = [ tuple(datetime.strptime(val, "%Y%m") if val else None for val in rangeString.split('-')) for rangeString in bundle.request.GET.get('ranges', '-').split(',')] mks_values = dict(bundle.obj.get_mks_values(ranges)) mks_list = Member.objects.filter(pk__in=set(mks_values.keys()), current_party__isnull=False).select_related('current_party') for mk in mks_list: yield dict( id=mk.id, name=mk.name, absolute_url=mk.get_absolute_url(), party= mk.current_party.name, party_url= mk.current_party.get_absolute_url(), party_id= mk.current_party.pk, **self._get_points(mks_values[mk.id]))
def get(cls, datetime_value, mask='%Y-%m-%d %H:%M:%S'): """ Get datetime and return one default datetime string :mask: By default is %Y-%m-%d %H:%M:%S :datetime_value: str, datetime, tz.datetime """ if isinstance(datetime_value, datetime): _date, _time = cls.convert_chrono_datetime(datetime_value) if isinstance(datetime_value, normal_datetime): _date, _time = cls.convert_chrono_datetime(datetime_value) if isinstance(datetime_value, str) or isinstance(datetime_value, unicode): datetime_value = datetime.strptime(datetime_value, mask) _date, _time = cls.convert_chrono_datetime(datetime_value) chrono, _ = cls.objects.get_or_create(chrono_date=_date, chrono_time=_time) return chrono
def download_report_for_month(request, dt_string, group_pk=None): if not request.user.is_authenticated(): return redirect("login") elif not has_permission(request.user, "cnto_view_reports"): return redirect("manage") group = MemberGroup.objects.get(pk=group_pk) dt = datetime.strptime(dt_string, "%Y-%m-%d") events = Event.objects.filter(start_dt__year=dt.year, start_dt__month=dt.month).order_by("start_dt") members = Member.objects.filter(member_group=group).order_by("name") filename = "%s-%s.csv" % (dt.strftime("%Y-%m"), group.name.lower()) # Create the HttpResponse object with the appropriate CSV header. response = HttpResponse(content_type='text/csv') response['Content-Disposition'] = 'attachment; filename="%s"' % (filename,) writer = csv.writer(response) header_columns = ["Member"] for event in events: header_columns.append(event.start_dt.strftime("%Y-%m-%d")) writer.writerow(header_columns) for member in members: member_columns = [member.name] for event in events: was_adequate = False try: attendance = Attendance.objects.get(member=member, event=event) was_adequate = attendance.was_adequate() except Attendance.DoesNotExist: pass if was_adequate: member_columns.append("X") else: member_columns.append(" ") writer.writerow(member_columns) writer.writerow([]) writer.writerow(["X = attended"]) return response
def test_login_presistent(registered_customer, api_client): login_url = reverse('shop:login') data = { 'form_data': { 'username': registered_customer.email, 'password': '******', 'stay_logged_in': True } } response = api_client.post(login_url, data, format='json') tz_gmt = pytz.timezone('GMT') shall_expire = datetime.now(tz=tz_gmt).replace(microsecond=0) + timedelta(seconds=settings.SESSION_COOKIE_AGE) assert response.status_code == 200 session_cookie = response.cookies.get('sessionid') expires = datetime.strptime(session_cookie['expires'], '%a, %d-%b-%Y %H:%M:%S GMT').replace(tzinfo=tz_gmt) assert abs(expires - shall_expire) < timedelta(seconds=5) assert session_cookie['max-age'] == settings.SESSION_COOKIE_AGE
def get_report_body_for_month(request, month_string): """Get reports """ if not request.user.is_authenticated(): return redirect("login") elif not has_permission(request.user, "cnto_view_reports"): return redirect("manage") month_dt = datetime.strptime(month_string, "%Y-%m") context = get_report_context_for_date_range( timezone.make_aware(datetime(month_dt.year, month_dt.month, 1, 0, 0), timezone.get_default_timezone()), timezone.make_aware(datetime(month_dt.year, month_dt.month, calendar.monthrange(month_dt.year, month_dt.month)[1], 23, 59), timezone.get_default_timezone())) return JsonResponse(context)
def should_update(self): """ Checks if an update is needed. Checks against ``self.update_interval`` and the ``LAST_UPDATE`` setting if update is needed. """ last_update = self.get_setting('LAST_UPDATE') if not last_update: last_update = datetime(1900, 1, 1) else: last_update = datetime.strptime( last_update.value, self.update_time_format) time_since = now() - last_update if time_since.seconds < self.update_interval: return False return True
def parse_date(d, interactive=True): """ Parse a mm/dd/yyyy date. """ if not d: return None elif d.lower() in ("not collected",): return None for fmt in ("%m/%d/%Y", "%m/%d/%y"): try: return datetime.strptime(d.strip(), fmt).date() except ValueError: continue else: return try_to_correct_value( "Could not parse the date {!r}".format(d), lambda d: parse_date(d, interactive), interactive=interactive )
def save_event(request, event_type_name, dt_string, start_hour, end_hour): """Return the daily process main overview page. """ try: if not request.user.is_authenticated(): return redirect("login") elif not has_permission(request.user, "cnto_edit_events"): return redirect("manage") event_type = EventType.objects.get(name__iexact=event_type_name) dt = datetime.strptime(dt_string, "%Y-%m-%d") start_dt = timezone.make_aware(datetime(dt.year, dt.month, dt.day, int(start_hour), 00, 00), timezone.get_default_timezone()) pytz.timezone("Europe/Stockholm") if int(end_hour) >= 24: end_dt = timezone.make_aware(datetime(dt.year, dt.month, dt.day, 0, 0, 0), timezone.get_default_timezone()) end_dt += timedelta(days=1, hours=int(end_hour) - 24) else: end_dt = timezone.make_aware(datetime(dt.year, dt.month, dt.day, int(end_hour), 00, 00), timezone.get_default_timezone()) if end_dt < start_dt: end_dt += timedelta(hours=240) event = Event.objects.get(start_dt__year=start_dt.year, start_dt__month=start_dt.month, start_dt__day=start_dt.day) event.start_dt = start_dt event.end_dt = end_dt event.event_type = event_type event.duration_minutes = (end_dt - start_dt).total_seconds() / 60 event.save() except Exception: return JsonResponse({"success": False, "error": traceback.format_exc()}) return JsonResponse({"success": True, "error": None})
def handle_enqueue_mails(self,publish_id,*args,**options): ''' enqueue messages from Publish ''' eta_format='%y-%m-%d %H:%M' if options['sync']: print "Enqueue mails of a publish",publish_id,"Synchronously" enqueue_mails_for_publish('manage', publish_id,False) elif options['eta']: print "Enqueue mails of a publish",publish_id,"Asynchronously for ",options['eta'] try: task=enqueue_mails_for_publish.apply_async( ( 'manage',publish_id, ), eta= make_eta( datetime.strptime( options['eta'],eta_format ) ) ) print task except Exception,e: print e, eta_format
def parse_csv_row(key_map, row): vehicle = Vehicle() for key, val in row.items(): mapped_field_name = key_map.get(key,None) if is_ignored_col(key): continue elif not mapped_field_name is None: vehicle = handle_direct_field(vehicle, mapped_field_name, val) continue else: if key == 'Type': vehicle.is_new = vehicle_type_to_boolean(val) elif key == 'Certified': vehicle.certified = cert_to_boolean(val) elif key == 'Make': vehicle.make, is_new_make = VehicleMake.objects.get_or_create(name=val) elif key == 'EngineDisplacement': vehicle.displacement = number_displacement(val) elif key == 'DateInStock': vehicle.date_in_stock = datetime.strptime(val, '%m/%d/%Y') else: print('Unknown column in csv field '+ key) # body style vehicle.body, is_new_body = BodyStyle.objects.get_or_create( name=row['Body'] ) # vehicle model vehicle.model, is_new_model = VehicleModel.objects.get_or_create( make=vehicle.make, number=row['ModelNumber'], name=row['Model'], doors=int(row['Doors']), ) vehicle.save() vehicle_image_for_vehicle = partial(vehicle_image_obj_mkr, vehicle) bulk_list = map(vehicle_image_for_vehicle, row['ImageList'].split(',')) VehicleImage.objects.bulk_create(bulk_list) return vehicle
def dispatch(self, request, *args, **kwargs): try: self.ctype = ContentType.objects.get_for_id( kwargs.get('ctype_id')) self.object = self.ctype.get_all_objects_for_this_type().get( pk=kwargs.get('object_id')) except ObjectDoesNotExist: raise Http404 if passes_test(request.user, obj=self.object): self.valuetypes = DatedValueType.objects.filter(ctype=self.ctype) if len(self.valuetypes) == 0: raise Http404 self.date_str = request.GET.get('date') or request.POST.get('date') if self.date_str: date_fmt = getattr(settings, 'DATE_FORMAT') self.date = datetime.strptime(self.date_str, date_fmt) else: self.date = now().date() return super(ValuesManagementView, self).dispatch( request, *args, **kwargs) return permission_required(super(ValuesManagementView, self).dispatch, test_to_pass=passes_test, obj=self.object)( request, *args, **kwargs)
def test_create_product_with_shop_product_and_attributes(admin_user): shop = get_default_shop() client = _get_client(admin_user) supplier = get_default_supplier() cat = Category.objects.create( status=CategoryStatus.VISIBLE, visibility=CategoryVisibility.VISIBLE_TO_ALL, identifier="test_category", name="Test" ) product_type = get_default_product_type() assert Attribute.objects.count() > 0 assert Product.objects.count() == 0 attributes_data = [] expected_values = { "untranslated_string_value": "test value", "numeric_value": 12, "boolean_value": True, "timedelta_value": "200", # seconds "datetime_value": "2017-01-01 01:00:00", "translated_string_value": "translated string value" } for spec in ATTR_SPECS: attr = Attribute.objects.get(identifier=spec["identifier"]) attr_data = { "numeric_value": None, "datetime_value": None, "untranslated_string_value": "", "attribute": attr.pk, # "product": product.pk } if attr.is_stringy: if attr.is_translated: attr_data["translations"] = { "en": { "translated_string_value": expected_values["translated_string_value"] } } else: attr_data["untranslated_string_value"] = expected_values["untranslated_string_value"] elif attr.is_numeric: if attr.type == AttributeType.BOOLEAN: attr_data["numeric_value"] = int(expected_values["boolean_value"]) elif attr.type == AttributeType.TIMEDELTA: attr_data["numeric_value"] = int(expected_values["timedelta_value"]) else: attr_data["numeric_value"] = expected_values["numeric_value"] elif attr.is_temporal: attr_data["datetime_value"] = expected_values["datetime_value"] attributes_data.append(attr_data) data = _get_product_sample_data() data["shop_products"] = _get_sample_shop_product_data(shop, cat, supplier) data["attributes"] = attributes_data response = client.post("/api/shuup/product/", content_type="application/json", data=json.dumps(data)) assert response.status_code == status.HTTP_201_CREATED # check all for lang in ("en", "pt-br"): activate(lang) product = Product.objects.first() _check_product_basic_data(product, data, lang) assert Product.objects.count() == 1 assert ShopProduct.objects.count() == 1 product = Product.objects.first() shop_product = ShopProduct.objects.first() assert product.get_shop_instance(shop) == shop_product assert supplier in shop_product.suppliers.all() assert cat in shop_product.categories.all() assert shop_product.primary_category == cat # validate attribute values for spec in ATTR_SPECS: attribute = Attribute.objects.get(identifier=spec["identifier"]) attr = ProductAttribute.objects.get(product=product, attribute=attribute) if attribute.is_stringy: if attribute.is_translated: attr.set_current_language("en") assert attr.value == expected_values["translated_string_value"] else: assert attr.value == expected_values["untranslated_string_value"] elif attribute.is_numeric: if attribute.type == AttributeType.BOOLEAN: assert attr.value == expected_values["boolean_value"] elif attribute.type == AttributeType.TIMEDELTA: assert attr.value == datetime.timedelta(seconds=int(expected_values["timedelta_value"])) else: assert attr.value == expected_values["numeric_value"] elif attribute.is_temporal: dt_value = expected_values["datetime_value"] parsed_dt = dt.strptime(dt_value, "%Y-%m-%d %H:%M:%S") assert attr.value.year == parsed_dt.year assert attr.value.month == parsed_dt.month assert attr.value.day == parsed_dt.day
def scrape(request, event_type_name, dt_string, start_hour, end_hour): """Return the daily process main overview page. """ try: if not request.user.is_authenticated(): return redirect("login") elif not has_permission(request.user, "cnto_edit_events"): return redirect("manage") event_type = EventType.objects.get(name__iexact=event_type_name) dt = datetime.strptime(dt_string, "%Y-%m-%d") start_dt = timezone.make_aware( datetime(dt.year, dt.month, dt.day, int(start_hour), 00, 00), timezone.get_default_timezone() ) pytz.timezone("Europe/Stockholm") if int(end_hour) >= 24: end_dt = timezone.make_aware(datetime(dt.year, dt.month, dt.day, 0, 0, 0), timezone.get_default_timezone()) end_dt += timedelta(days=1, hours=int(end_hour) - 24) else: end_dt = timezone.make_aware( datetime(dt.year, dt.month, dt.day, int(end_hour), 00, 00), timezone.get_default_timezone() ) if end_dt < start_dt: end_dt += timedelta(hours=240) try: scrape_result, scrape_stats = get_all_event_attendances_between( start_dt.astimezone(pytz.utc), end_dt.astimezone(pytz.utc) ) except ValueError: traceback.print_exc() scrape_result = {} scrape_stats = {"average_attendance": 0, "minutes": 0, "success": True} # scrape_result = {u'Spartak [CNTO - Gnt]': 1.0, u'Chypsa [CNTO - Gnt]': 1.0, u'Guilly': 0.42857142857142855, # u'Hellfire [CNTO - SPC]': 1.0, u'Cody [CNTO - Gnt]': 1.0, # u'Ozzie [CNTO - SPC]': 0.7142857142857143, u'Skywalker': 0.6397515527950312, # u'Obi [CNTO - JrNCO]': 0.7142857142857143, u'Zero': 1.0, # u'Chris [CNTO - SPC]': 0.14285714285714285, u'Hateborder [CNTO - Gnt]': 1.0, # u'Dusky [CNTO - Gnt]': 0.7142857142857143} # scrape_stats = {'average_attendance': 0.7795031055900622, 'minutes': 56.0} try: event = Event.objects.get( start_dt__year=start_dt.year, start_dt__month=start_dt.month, start_dt__day=start_dt.day ) event.start_dt = start_dt event.end_dt = end_dt event.event_type = event_type event.duration_minutes = scrape_stats["minutes"] event.save() except Event.DoesNotExist: event = Event( start_dt=start_dt, end_dt=end_dt, duration_minutes=scrape_stats["minutes"], event_type=event_type ) event.save() previous_attendances = Attendance.objects.filter(event=event) previous_attendances.delete() for raw_username in scrape_result: username_parts = raw_username.split(" ") username = username_parts[0] if len(username) == 0: continue rank_str = RECRUIT_RANK if len(username_parts) > 3: rank_str = username_parts[3][0:-1] attendance_value = scrape_result[raw_username] try: rank = Rank.objects.get(name__iexact=rank_str) except Rank.DoesNotExist: rank = Rank(name=rank_str) rank.save() try: member = Member.objects.get(name__iexact=username, discharged=False, deleted=False) except Member.DoesNotExist: member = Member(name=username, rank=rank) member.save() except MultipleObjectsReturned: raise ValueError("Multiple users found with name %s!" % (username,)) attendance_seconds = (attendance_value * event.duration_minutes) * 60 try: attendance = Attendance.objects.get(event=event, member=member) attendance.attendance_seconds = attendance_seconds attendance.save() except Attendance.DoesNotExist: attendance = Attendance(event=event, member=member, attendance_seconds=attendance_seconds) attendance.save() return JsonResponse({"attendance": scrape_result, "stats": scrape_stats, "success": True}) except Exception, e: return JsonResponse({"success": False, "error": traceback.format_exc()})
if async in ['False', False]: async = False if time is None: enqueue_mail(mail_obj=msg, async=async) return if type(time) == str: m = re.search(r"^(?P<number>\d+)(?P<unit>[smh])$", time) if m: p = m.groupdict() d = {{'s': 'seconds', 'm': 'minutes', 'h': 'hours', }[p['unit']]: int(p['number'])} time = datetime.now() + timedelta(**d) else: time = make_aware(datetime.strptime(time, "%Y-%m-%d %H:%M:%S")) enqueue_mail.apply_async((), {'mail_obj': msg}, eta_time=make_eta(time)) def handle_forward(self, *args, **options): ''' forword message ''' print ">>>>", args, options if options['id'] and options['id'].isdigit(): msg = Message.objects.get(id=options['id']) print msg
def __init__(self, date): try: self.date = datetime.strptime(date, "%Y-%m-%d") except ValueError: self.date = datetime.strptime(date, "%d/%m/%Y")