def detail(request, offset): university_id = int(offset) try: university = University.objects.get(pk=university_id) except University.DoesNotExist: return {'error_code': 'UniversityNotFound'}, 404 schedule_unit = university.scheduleunit_set.all() response = { 'name': university.name, 'support': { 'import_course': university.support_import_course, 'list_course': university.support_list_course, 'ta': university.support_ta, }, 'lessons': { 'detail': [{ 'number': item.number, 'start': time.strftime(item.start, "%H:%M"), 'end': time.strftime(item.end, "%H:%M"), } for item in schedule_unit], 'separators': listify_int(university.lessons_separator) } } return response
def InsertSingleEvent(calendar_client, title='bseu-api event', content='study hard', where='in space', start_time=None, end_time=None, ucalendar=None): event = gdata.calendar.data.CalendarEventEntry() event.title = atom.data.Title(text=title) event.content = atom.data.Content(text=content) event.where.append(gdata.calendar.data.CalendarWhere(value=where)) if start_time is None: # Use current time for the start_time and have the event last 1 hour start_time = time.strftime('%Y-%m-%dT%H:%M:%S.000Z', time.gmtime()) end_time = time.strftime('%Y-%m-%dT%H:%M:%S.000Z', time.gmtime(time.time() + 3600)) else: start_time = (start_time - timedelta(hours=3)).strftime('%Y-%m-%dT%H:%M:%S.000Z') end_time = (end_time - timedelta(hours=3)).strftime('%Y-%m-%dT%H:%M:%S.000Z') event.when.append(gdata.calendar.data.When(start=start_time, end=end_time)) try: if ucalendar is None: calendar_client.InsertEvent(event) else: calendar_client.InsertEvent(event, ucalendar) except Exception, e: logging.error('import was unsuccessful - skipping: %s' % e)
def generate_time_formats(time): time_formats = ["hmmss", "hhmmss", "Hmmss", "HHmmss"] hour_12_full = time.strftime("%I") hour_24_full = time.strftime("%H") hour_12_short = hour_12_full.lstrip("0") hour_24_short = hour_24_full.lstrip("0") min_full = time.strftime("%M") sec_full = time.strftime("%S") formatted_times= [] for df in time_formats: cp = deepcopy(df) cp = cp.replace("HH", hour_24_full) cp = cp.replace("hh", hour_12_full) cp = cp.replace("H", hour_24_short) cp = cp.replace("h", hour_12_short) cp = cp.replace("mm", min_full) cp = cp.replace("ss", sec_full) formatted_times.append(cp) return formatted_times
def display(adc_temp, adc_co2): global datetime global co2 global temp temp = adc_temp * 5 co2 = adc_co2 * 200 datetime = (time.strftime("%Y-%m-%d ") + time.strftime("%H:%M:00")) print (time.strftime("%H:%M:%S",time.localtime()),';',"{0:04f}".format(adc_temp),';', temp,';', "{0:04f}".format(adc_co2),';',co2)
def now(self, time = None): """ now -> str time is datedate.datetime.now.time() """ if time is None: time = datetime.now().time() if time.minute < 10: return time.strftime("%H ноль %m") else: return time.strftime("%H %M")
def PassSubmit(): expert_info=Expert_info.query.filter(Expert_info.UserName==request.values.get("UserName")).first() expert_info.Statue=u'可用' md=hashlib.md5() md.update(request.values.get("UserName")) i=str(md.hexdigest())[1:10] expert_info.ExpertCertificateID='zj-'+i vt=time.localtime(time.time()+31622400) expert_info.ValidTime=time.strftime('%Y-%m-%d',vt) expert_info.save() return json.dumps({'time':time.strftime('%Y-%m-%d',vt),'ExpertCertificateID':'zj-'+i})
def update_profile(self): """ Update Developer Profile and basic information """ profile, created = Profile.objects.get_or_create(dev_user=self) # User ------- url = req['get_user_info'].format(self.githubuser, GITHUB1, GITHUB2) info = requests.get(url) if settings.DEBUG: print "Updating {}".format(profile) if not info.from_cache: now = time.strftime('%Y-%m-%d') api, created = APIStats.objects.get_or_create(date=now) api.inc_call() api.save() info = info.json() profile.followers = info[u'followers'] profile.following = info[u'following'] self.repos = info[u'public_repos'] self.save() # Repositories url = req['get_user_repos'].format(self.githubuser, GITHUB1, GITHUB2) repos = requests.get(url) if not repos.from_cache: now = time.strftime('%Y-%m-%d') api, created = APIStats.objects.get_or_create(date=now) api.inc_call() api.save() stars = 0 forks = 0 solver = 0 for repo in repos.json(): stars += int(repo[u'stargazers_count']) forks += int(repo[u'forks_count']) solver += int(repo[u'open_issues_count']) profile.stars = stars profile.forks = forks profile.solver = solver profile.save()
def Abort(): t=time.localtime(time.time()-7200) reseaon=Reseaon.query.filter(Reseaon.UserName==request.values.get("UserName")).first() if reseaon == None: reseaon=Reseaon(UserName=request.values.get("UserName"),ReseaonContext=request.values.get("NotPassResult"),CreateTime=time.strftime('%Y-%m-%d',t),Message="被中止") else: reseaon.CreateTime=time.strftime('%Y-%m-%d',t) reseaon.Message=u"被中止" reseaon.ReseaonContext=request.values.get("NotPassResult") reseaon.save() expert_info=Expert_info.query.filter(Expert_info.UserName==request.values.get("UserName")).first() expert_info.Statue=u'失效' expert_info.ValidTime=time.strftime('%Y-%m-%d',t) expert_info.save() return time.strftime('%Y-%m-%d',t)
async def update_url(*, projectName, urlID: str = Path(...), urlsItemInfo: UrlsItemInfo): # Url表中添加 数据 # print(projectName, urlID, urlsItemInfo) urlsItemInfo = urlsItemInfo.dict() # 添加时间戳 urlsItemInfo['modifiedTime'] = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) # projectName 转 projectId projectId = await findProjectIdFromProjectName( dbPrefix, 'Project', queryDict={'projectName': projectName}, showDict={'_id': 1}) #result1 = await createUrlItems(dbPrefix+'-'+projectId,'Urls',urlsItemInfos) # 生成urlID, 构造 querydict urlID = ObjectId(urlID) # print('urlID',urlID) result1 = await updateUrlItems(dbPrefix + '-' + projectId, 'Urls', queryDict={"_id": urlID}, setDict={"$set": urlsItemInfo}) return (result1)
def getTime(y): today = date.today() timeNow = time.strftime("%H:%M:%S") when = "" x = str(y).split(" ") stoday = str(today) year = x[0].split("-")[0] month = x[0].split("-")[1] day = x[0].split("-")[2] if x[0] == stoday: hour = x[1].split(":")[0] minu = x[1].split(":")[1] seco = x[1].split(":")[2].split(".")[0] totime = ((int(timeNow.split(":")[0])*3600)+(int(timeNow.split(":")[1])*60)+int(timeNow.split(":")[2]))-((int(hour)*3600)+(int(minu)*60)+int(seco)) if timeNow.split(":")[0] == hour or (int(timeNow.split(":")[0]) - int(hour)) == 1: if timeNow.split(":")[1] == minu: when = str(round(totime))+" second(s) ago" elif int(timeNow.split(":")[1])-int(minu) == 1 and int(timeNow.split(":")[2]) < 50: when = str(round(totime))+" second(s) ago" else: when = str(round(totime/60))+" minute(s) ago" else: when = str(round(totime/3600))+" hours ago" elif stoday.split("-")[0] == year: if stoday.split("-")[1] == month: when = str(int(stoday.split("-")[2])-int(day))+" day(s) ago" elif int(stoday.split("-")[1])-int(month) == 1 and int(stoday.split("-")[2]) < 24: when = str(int(stoday.split("-")[2]))+" day(s) ago" else: when = str(int(stoday.split("-")[1])-int(month))+" month(s) ago" else: when = str(int(stoday.split("-")[0])-int(year))+" year(s) ago" return when
async def to_stop_word(*, projectName: str = Path(...), stopDictItemID: List, currentPage: int = 1, pageSize: int = 10): """ 用户词转停止词 """ # projectName 转 projectId projectId = await findProjectIdFromProjectName(dbPrefix, 'Project', queryDict={'projectName': projectName}, showDict={'_id': 1}) if not projectId: raise HTTPException(status_code=503, detail='projectNotExist') deleteDictList = [] targetDictList = [] #print(stopDictItemID,1111111111111) for item in stopDictItemID: try: oid = ObjectId(item['_id']['$oid']) except: raise HTTPException(status_code=503, detail='invalid ObjectID') else: deleteDict = {'_id': oid} deleteDictList.append(deleteDict) targetDictList.append({'word': item['word'], 'modifiedTime': time.strftime("%Y/%m/%d %H:%M:%S", time.localtime()), 'operator': item['operator'], 'source': '用户词','exStatus':''}) result = await deleteUserDictItems(dbPrefix+'-'+projectId, 'UserDict', 'StopDict', currentPage, pageSize, deleteDictList, targetDictList) if result == 'error': raise HTTPException(status_code=403, detail=result) else: return (result)
def writemdfile(lat, lon, fileitme): targetfile = open(repopath + "_posts/sailtrack/"+time.strftime("%Y-%m-%d-Sailtrack-") + fileitme+".md", 'w') targetfile.write("---\n") targetfile.write("layout: track\n") targetfile.write("title: sail track "+ fileitme +"\n") targetfile.write("categories: sailtrack\n") targetfile.write("date: " + time.strftime("%Y-%m-%d") + "\n") targetfile.write("published: false\n") targetfile.write("geo: " + fileitme + ".csv\n") targetfile.write("geocenterlon: "+ str(lon) +"\n") targetfile.write("geocenterlat: "+ str(lat) +"\n") targetfile.write("mapzoom: 11\n") targetfile.write("---\n\n") targetfile.flush() targetfile.close() return
def log(self, text, color="green", type=1, status=1, download=""): color_hex = { 'red': '91m', 'green': '92m', 'yellow': '93m', 'blue': '94m', 'cyan': '96m' } if self.test_source: self.test_source.send_log(text.decode('utf-8'), type, status, download) else: print( '[' + time.strftime("%H:%M:%S") + '] \033[{color} {string} \033[0m'.format( # time=dt.strftime(format1), color=color_hex[color], string=text.decode('utf-8'))) try: os.write(self.file, dt.strftime(format1) + ": " + text + "\n") except IOError as (errno, strerror): print "I/O error({0}): {1}".format(errno, strerror)
def stopProfiling(pr, profileName): pr.disable() ps = pstats.Stats(pr) profiles = xbmc.translatePath("%sprofiles/" % xbmcaddon.Addon().getAddonInfo('profile')).decode('utf-8') if not xbmcvfs.exists(profiles): # Create the profiles folder xbmcvfs.mkdir(profiles) timestamp = time.strftime("%Y-%m-%d %H-%M-%S") profile = "%s%s_profile_(%s).tab" % (profiles, profileName, timestamp) f = xbmcvfs.File(profile, 'w') f.write("NumbCalls\tTotalTime\tCumulativeTime\tFunctionName\tFileName\r\n") for (key, value) in ps.stats.items(): (filename, count, func_name) = key (ccalls, ncalls, total_time, cumulative_time, callers) = value try: f.write( "%s\t%s\t%s\t%s\t%s\r\n" % (ncalls, "{:10.4f}".format(total_time), "{:10.4f}".format(cumulative_time), func_name, filename)) except ValueError: f.write( "%s\t%s\t%s\t%s\t%s\r\n" % (ncalls, "{0}".format(total_time), "{0}".format(cumulative_time), func_name, filename)) f.close()
def get_human_readable(self): start = self.localised_start() remind_on = 'on %s' % WEEKDAYS[start.weekday()] remind_at = time.strftime(start.time(), '%H:%M') today = datetime.now(pytz.timezone(self.user.timezone.name)) tomorrow = today + timedelta(days=1) if start.date() == today.date(): remind_on = 'today' elif start.date() == tomorrow.date(): remind_on = 'tomorrow' elif start.date().month > today.date().month \ or start.date().year > today.year: remind_on += ' %s %s' % ( start.date().day, MONTHS[start.date().month] ) # if the week is not this week elif today.date().isocalendar()[1] < start.date().isocalendar()[1]: remind_on += ' %s %s' % ( start.date().day, MONTHS[start.date().month] ) elif start.date() == today.date() - timedelta(days=1): remind_on = 'yesterday' if start.date().year > today.year or start.date().year < today.year: remind_on += ' %s' % start.date().year return remind_on, remind_at
def group_get(request, id): g = Group.objects.select_related("FacultyContract__Faculty", "Course", "Course__Course").get(id=id) data = { "id": g.id, "subject": g.course.id, "course": g.course.parent.id if g.course.parent <> None else None, "agreementId": g.facultycontract.id, "start": time.strftime(g.start, "%H:%M"), "end": time.strftime(g.end, "%H:%M"), "mode": g.mode } return HttpResponse(json.dumps(data), mimetype="application/json")
def convert_to_time(hex_value): ''' Convert 16 bit hex value into time ''' from datetime import time #extract values hour = hex_value & CONST_HOUR_MASK hour = hour >> CONST_HOUR_RSHIFT minute = hex_value & CONST_MINUTE_MASK minute = minute >> CONST_MINUTE_RSHIFT second = hex_value & CONST_SECOND_MASK second = second << CONST_SECOND_LSHIFT #multiply by 2 because of encoding try: time = time(hour, minute, second) except ValueError as ve: print ve sys.exit(CONST_EINVAL) return "Time: " + time.strftime("%I:%M:%S %p")
def _compute_constract_id(self): if self.constract_type: max_constract_id = '' baseid = 'DTD-' + self.constract_type.name_id + time.strftime( "%Y%m%d", time.localtime()) sql_baseid = baseid + "%" self._cr.execute( "select constract_id from dtdream_contract where constract_id like '" + sql_baseid + "' order by id desc limit 1") for rec in self._cr.fetchall(): max_constract_id = rec[0] if max_constract_id: max_id = max_constract_id[15:] if int(max_id) < 9: self.constract_id_copy = baseid + '0' + str( int(max_id) + 1) self.constract_id = baseid + '0' + str(int(max_id) + 1) else: self.constract_id_copy = baseid + str(int(max_id) + 1) self.constract_id = baseid + str(int(max_id) + 1) else: self.constract_id_copy = baseid + '01' self.constract_id = baseid + '01'
def graph_seats_history(self, courses): times = courses.datetimes('time_created', 'hour') seats = [] self.stdout.write('Creating graphs for ' + str(len(times)) + ' versions') i = 0 for time in times: i += 1 self.stdout.write('-> Calculating seats for: ' + str(time)) versions = CourseVersion.objects.filter( time_created__year=time.year, time_created__month=time.month, time_created__day=time.day, time_created__hour=time.hour) total = 0 excluded_values = set([]) for version in versions: value = version.field_list()['seats'] total += value excluded_values.add(version.get_course().id) others = Course.objects.exclude(id__in=excluded_values) for other in others: latest = self.get_last_version(other, time) if latest: value = latest.field_list()['seats'] total += value obj = {'x': str(int(time.strftime("%s")) * 1000), 'y': total} seats.append(obj) self.stdout.write('-> Seats: ' + str(total)) self.stdout.write(json.dumps(seats))
def save_user(backend, user, response, *args, **kwargs): """ When an user Login with GitHub, we create a Developer instance so we can have an associated Developer profile with that authenticated user """ if backend.name == 'github': try: dev = Developer.objects.get(user=user) # If exists, we check for any new update in the repos. Check all the badges # FIXME In the future, this function will be called using Celery return except ObjectDoesNotExist: if settings.DEBUG: print "Creating new user {}".format(user) # Analytics stuff now = time.strftime('%Y-%m-%d') u, created = UserStats.objects.get_or_create(date=now) u.inc_user() u.save() api, created = APIStats.objects.get_or_create(date=now) api.inc_call() api.save() # Create user developer = Developer() developer.githubuser = response['login'] developer.avatar = response['avatar_url'] developer.repos = response['public_repos'] developer.user = user developer.save()
async def create_project(project: Project, currentPage: int = 1, pageSize: int = 10): # 操作流程: # 1. 插入时间戳 # 2: 将项目名称 写入 keywordsManagement ->Project 表 # 3: 将项目对应分类列表写入 项目名 -> Categories 表 # 应该返回全部的 projects-> categories 对象 # [{projectname: 'xx',creater: '', timestamp: '',categories:[1,2,3]},{},{}] # print(currentPage, pageSize) # 1 插入时间戳 projectnew = project.dict() projectnew['timestamp'] = time.strftime("%Y/%m/%d %H:%M:%S", time.localtime()) # getBJTime() # print('projectsnew',projectnew) # 2 写入数据库,并捕获所有的 异常 try: result = await createnewproject(dbPrefix, 'Project', projectnew, currentpage=currentPage, pagesize=pageSize) except pymongo.errors.DuplicateKeyError as e: #print('重复错误!') errMsg = e.details['errmsg'].split(':')[-1].strip('}').strip().strip( '"') #print('errMsg',errMsg) raise HTTPException(status_code=503, detail=f'以下项目出现重复,创建失败! 项目名称: \'{errMsg}\'') except Exception as e: #print('其他错误') raise HTTPException(status_code=503, detail=e.details) else: # 返回正常值 return (result)
def CreateComment(): if request.method=='GET': return json.dumps({'message':'Please use method POST!'}) if request.method=='POST': token=request.values.get('token') if token is None: return json.dumps({'message':'Need Token!'}) user=User.verify_auth_token(token) if type(user) is types.StringType: return json.dumps({'message':user}) if request.values.get('DishID') is None: return json.dumps({'message':'Need DishID!'}) if YMDish.query.filter(YMDish.DishID==request.values.get('DishID')).all() is None: return json.dumps({'message':'DishID is invalid!'}) if request.values.get('Content') is None: return json.dumps({'message':'Need Content!'}) else: ym_dish_comments=YMDishComment(UserName=user.UserName,DishID=request.values.get('DishID') ,Time=time.strftime('%Y-%m-%d %H:%M:%S',time.localtime(time.time())) ,Content=request.values.get('Content')) try: ym_dish_comments.save() except: return json.dumps({'message':'Comment Existed!'}) return json.dumps({'message':'Add Comment Success!'})
def dump_all_data(data, number_of_records=28): '''Input: data filtered through all helper functions, test functions, number_of_records, an int, number of records to save to file per batch. Splits data into number_of_records requested and remainder. dumps number of records to json, reserves remainder as cur_data. Note: additional fields that can be added are commented out for ease of changing at a later stage. These should not be removed. Returns: data dumped to json, remaining data not yet dumped as cur_data. ''' if len(data) > number_of_records: data_to_dump = data[0:number_of_records] cur_data = data[number_of_records: -1] else: data_to_dump = data cur_data = [] data_copy = data_to_dump[:] output_data = [] for entry in data_to_dump: output_data.append({ "FullName": entry["FullName"], "AssignedToName": entry["AssignedToName"], "DueTime": str(time.strftime(entry["DTime"], "%H:%M")), # "StatusDesc": entry["StatusDesc"], # "CompleteDate": str(entry["CompleteDate"]) # "EventId": entry["EventId"], # "CDate": date.strftime(entry["CDate"], "%m/%d/%Y"), # "CTime": time.strftime(entry["CTime"], "%H:%M"), # "CompanyName": entry["CompanyName"], # "EventCategoryDesc": entry["EventCategoryDesc"], # "DDate": date.strftime(entry["DDate"], "%m/%d/%Y"), # "ActivityId": entry["ActivityId"], # "EntityId": entry["EntityId"], # "DueDate": entry["DueDate"] }) return (output_data, cur_data)
async def update_basicWords(*, projectName, basicWordItemId: str = Path(...), currentPage: Optional[int] = 1, pageSize: Optional[int] = 10, items2Update: Optional[Items2Update], flag: Optional[str] = 'id'): items2Update = items2Update.dict() for ele in list(items2Update.keys()): # 所有 更新项,不能为空。否则,会被 置空: 非常恐怖 if items2Update[ele] == None: items2Update.pop(ele) print('hahhah', basicWordItemId, items2Update) # 添加新的时间戳 和 长度 items2Update['timestamp'] = time.strftime("%Y/%m/%d %H:%M:%S", time.localtime()) if items2Update.get('word'): items2Update['Length'] = len(items2Update.get('word')) print(projectName, basicWordItemId, items2Update, currentPage, pageSize) # projectName 转 projectId projectId = await findProjectIdFromProjectName( dbPrefix, 'Project', queryDict={'projectName': projectName}, showDict={'_id': 1}) if not projectId: raise HTTPException(status_code=503, detail='projectNotExist') # 更新 if flag == 'word': # 按照word 查找,而不是 id result = await updateBasicWords(dbPrefix + '-' + projectId, 'basicWords', queryDict={"word": basicWordItemId}, setDict={"$set": items2Update}, currentPage=currentPage, pageSize=pageSize) if isinstance(result, str): raise HTTPException(status_code=503, detail=result) else: return (result) else: try: oid = ObjectId(basicWordItemId) except: raise HTTPException(status_code=503, detail='invalid ObjectID') else: result = await updateBasicWords(dbPrefix + '-' + projectId, 'basicWords', queryDict={"_id": oid}, setDict={"$set": items2Update}, currentPage=currentPage, pageSize=pageSize) if isinstance(result, str): raise HTTPException(status_code=503, detail=result) else: return (result)
def register(req): context = {} if req.method == 'POST': form = UserForm(req.POST) if form.is_valid(): #获得表单数据 username = form.cleaned_data['username'] password = form.cleaned_data['password'] # # 判断用户是否存在 # user = auth.authenticate(username = username,password = password) # if user: # context['userExit']=True # return render(req, 'register.html', context) #添加到数据库(还可以加一些字段的处理) user = User.objects.create_user(username=username, password=password) now_time = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time())) user.last_login = now_time user.save() #添加到session req.session['username'] = username #调用auth登录 auth.login(req, user) #重定向到首页 return HttpResponse('register success !') else: context = {'isLogin':False} #将req 、页面 、以及context{}(要传入html文件中的内容包含在字典里)返回 return render(req,'register.html',context)
def log_force(self, entry): myLog.currentTask = entry text = time.strftime("%Y/%m/%d %H:%M:%S %Z") + " " + entry #open the log.txt in append mode and write in the new log f = open('log.txt', 'a') f.write(text + "\n") f.close() #clear the terminal and statusline os.system('cls' if os.name == 'nt' else 'clear') myLog.statusline = '' #get the dimensions of terminal window rows, columns = os.popen('stty size', 'r').read().split() #print everything in the log.txt r = open('log.txt', 'r') old_log = r.read().split("\n") old_log = list(filter(None, old_log)) if (len(old_log) > 200): old_log = old_log[-200:] r.close() for line in old_log: print(line) print("-" * int(columns)) myLog.refresh_statusline(self)
def date_to_str(time): if time is None: return None elif is_str(time): return time else: return time.strftime(DATE_FMT)
def onchange_from_date(self, cr, uid,ids,from_date,context=None): # today_date = str(datetime.datetime.now()) today_date= time.strftime('%Y-%m-%d') result = {'value': {}} if from_date < today_date: return { 'warning':{'title':'warning','message':'Please Enter Valid Date'},'value' :{'from_date':False,'to_date':False}} return result
def article_comment(article_id): article = articleService.find_by_id(article_id) form = request.form email = form['email'] for content in form: for word in sensitive_words: if word in form[content]: return 'contain inappropriate word(s)' user = userService.find_by_email(email) if user is None: user = User(email=email) comment = Comment(user_id=user.id, email=email, article_id=article_id, content=form['content'], postTime=time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())) commentService.insert(comment) articleService.addComment(article) ip = ipService.find_ip_by_ip(request.remote_addr) cip = ipService.find_cip_by_both(comment.id, ip.id) if cip is None: ipService.insert( CommentIp(ip_id=ip.id, comment_id=comment.id, vote_state=0)) return redirect('/article/' + article_id)
async def create_url(*,projectName: str = Path(...),currentPage: int = 1, pageSize: int =10, urlsItemInfos:List[UrlsItemInfo]): # Url表中添加 数据 # print(projectName, urlsItemInfos) urlsItemInfos = [urlsItem.dict() for urlsItem in urlsItemInfos] # 添加时间戳 for urlItem in urlsItemInfos: urlItem['modifiedTime'] = time.strftime("%Y/%m/%d %H:%M:%S", time.localtime()) # projectName 转 projectId projectId = await findProjectIdFromProjectName(dbPrefix, 'Project', queryDict={'projectName': projectName}, showDict={'_id': 1}) if not projectId: raise HTTPException(status_code=503, detail='projectNotExist') try: result1 = await createUrlItems(dbPrefix+'-'+projectId,'Urls',currentpage = currentPage, pagesize = pageSize,ItemInfos = urlsItemInfos) except pymongo.errors.BulkWriteError as e: # key重复错误, 返回重复的项 # print(e.details['writeErrors']) temp = e.details['writeErrors'] result = [] # 返回重复的项 for ele in temp: result.append(ele['op']['rootUrl']) #print(result) raise HTTPException(status_code=503, detail="以下url重复,未插入,请修改后重试!" + str(result)) except TypeError as e: #print(e) raise HTTPException(status_code=503, detail='添加项不能为空') except Exception as e: # 其他错误 raise HTTPException(status_code=503, detail=json.loads(json_util.dumps(e.details))) else: return (result1)
async def update_url(*,projectName,urlID : str = Path(...),currentPage: int = 1, pageSize: int =10, urlsItemInfo:UrlsItemInfo): # Url表中添加 数据 # print(projectName, urlID, urlsItemInfo) urlsItemInfo = urlsItemInfo.dict() # 添加时间戳 urlsItemInfo['modifiedTime'] = time.strftime("%Y/%m/%d %H:%M:%S", time.localtime()) # projectName 转 projectId projectId = await findProjectIdFromProjectName(dbPrefix, 'Project', queryDict={'projectName': projectName}, showDict={'_id': 1}) if not projectId: raise HTTPException(status_code=503, detail='projectNotExist') #result1 = await createUrlItems(dbPrefix+'-'+projectId,'Urls',urlsItemInfos) # 生成urlID, 构造 querydict try: oid = ObjectId(urlID) except: raise HTTPException(status_code=503, detail='invalid ObjectID') else: urlID = oid # print('urlID',urlID) result1 = await updateUrlItems(dbPrefix+'-'+projectId,'Urls',currentPage = currentPage, pageSize = pageSize, queryDict={"_id":urlID},setDict={"$set":urlsItemInfo}) #if 'error' in result1.lower(): if isinstance(result1, str) and 'error' in result1.lower(): raise HTTPException(status_code=503, detail=result1) else: return (result1)
def access_token(self,public_key=None,secret_key=None,*args,**kwargs): global host global db_user global db_pass global db_name db = MySQLdb.connect(host=host,user=db_user,passwd=db_pass,db=db_name) PUBLIC_KEY=public_key PRIVATE_KEY=secret_key if public_key==None or secret_key==None: cherrypy.response.status = 400 cherrypy.response.headers['Content-Type'] = "application/json" return """{"message": "Wrong number of parameters","error": "invalid_request","status": 400,"cause": []}""" else: Authorized=Valida_Credenciales(PUBLIC_KEY,PRIVATE_KEY) if Authorized: ahora=datetime.now() fecha=str(ahora.year)+"-"+str(ahora.month)+"-"+str(ahora.day)+"T"+time.strftime("%H:%M:%S") milisegundo=str(int(round(time.time()*1000))) concatenated = PUBLIC_KEY+milisegundo+PRIVATE_KEY signature=str(hashlib.sha256(concatenated).hexdigest()) cur=db.cursor() SQL="INSERT INTO `"+str(db_name)+"`.`server_token_history` (`access_token`, `created`, `refresh_token`, `public_key`, `milisegundo`, `valido`) VALUES ('API_ACCT-"+str(hashlib.sha256(fecha+signature).hexdigest())+"', '"+str(fecha)+"', 'API_RT-"+str(signature)+"', '"+PUBLIC_KEY+"','"+str(milisegundo)+"','Si');" dbuser=cur.execute(SQL) db.commit() cur.close() db.close() cherrypy.response.status = 200 cherrypy.response.headers['Content-Type'] = "application/json" #result=commands.getoutput('/usr/bin/python /home/marcosibushak/Sistemas/Token_Ibushak.py') return '{"access_token":"API_ACCT-'+str(hashlib.sha256(fecha+signature).hexdigest())+'","expires_in":10800,"refresh_token":"API_RT-'+str(signature)+'","status":200}' else: cherrypy.response.status = 400 cherrypy.response.headers['Content-Type'] = "application/json" return '{"message": "invalid client_id or client_secret.","error": "invalid_client","status": 400,"cause": []}'
def tick(): global time1 time2 = time.strftime('%H:%M:%S') if time2 != time1: time1 = time2 clock.config(text=time2) clock.after(200, tick)
def _membership_state(self, cr, uid, ids, name, args, context=None): """This Function return Membership State For Given Partner. @param self: The object pointer @param cr: the current row, from the database cursor, @param uid: the current user’s ID for security checks, @param ids: List of Partner IDs @param name: Field Name @param context: A standard dictionary for contextual values @param return: Dictionary of Membership state Value """ res = {} for id in ids: res[id] = 'none' today = time.strftime('%Y-%m-%d') for id in ids: partner_data = self.browse(cr, uid, id, context=context) if partner_data.membership_cancel and today > partner_data.membership_cancel: res[id] = 'canceled' continue if partner_data.membership_stop and today > partner_data.membership_stop: res[id] = 'old' continue s = 4 if partner_data.membership_ids: for mline in partner_data.membership_ids: if mline.state: mstate = mline.state if mstate == 'paid': s = 0 elif mstate == 'invoiced' and s!=0: s = 1 elif mstate == 'canceled' and s!=0 and s!=1: s = 2 elif mstate == 'waiting' and s!=0 and s!=1: s = 3 if s==4: for mline in partner_data.membership_ids: if mline.membership_start < today and mline.membership_end and mline.membership_end < today and mline.membership_start <= mline.membership_end: s = 5 else: s = 6 if s==0: res[id] = 'paid' elif s==1: res[id] = 'invoiced' elif s==2: res[id] = 'canceled' elif s==3: res[id] = 'waiting' elif s==5: res[id] = 'old' elif s==6: res[id] = 'none' if partner_data.free_member and s!=0: res[id] = 'free' if partner_data.associate_member: res_state = self._membership_state(cr, uid, [partner_data.associate_member.id], name, args, context=context) res[id] = res_state[partner_data.associate_member.id] return res
def intialise_proxy_manager(options): """ Proxy Manager initialization. :param dict options: Proxy manager configuration parameters. """ proxy_manager = None if options['Botnet_mode'] is not None: proxy_manager = Proxy_manager() answer = "Yes" proxies = [] if options['Botnet_mode'][0] == "miner": miner = Proxy_Miner() proxies = miner.start_miner() if options['Botnet_mode'][0] == "list": # load proxies from list proxies = proxy_manager.load_proxy_list( options['Botnet_mode'][1]) answer = raw_input( "[#] Do you want to check the proxy list? [Yes/no] : ") if answer.upper() in ["", "YES", "Y"]: proxy_q = multiprocessing.Queue() proxy_checker = multiprocessing.Process( target=Proxy_Checker.check_proxies, args=( proxy_q, proxies, )) logging.info("Checking Proxies...") start_time = time.time() proxy_checker.start() proxies = proxy_q.get() proxy_checker.join() proxy_manager.proxies = proxies proxy_manager.number_of_proxies = len(proxies) if options['Botnet_mode'][0] == "miner": logging.info( "Writing proxies to disk(~/.owtf/proxy_miner/proxies.txt)") miner.export_proxies_to_file("proxies.txt", proxies) if answer.upper() in ["", "YES", "Y"]: logging.info( "Proxy Check Time: %s", time.strftime( '%H:%M:%S', time.localtime(time.time() - start_time - 3600))) cprint("Done") if proxy_manager.number_of_proxies is 0: ServiceLocator.get_component("error_handler").FrameworkAbort( "No Alive proxies.") proxy = proxy_manager.get_next_available_proxy() # check proxy var... http:// sock:// options['OutboundProxy'] = [] options['OutboundProxy'].append(proxy["proxy"][0]) options['OutboundProxy'].append(proxy["proxy"][1])
def intialise_proxy_manager(options): """ Proxy Manager initialization. :param dict options: Proxy manager configuration parameters. """ proxy_manager = None if options['Botnet_mode'] is not None: proxy_manager = Proxy_manager() answer = "Yes" proxies = [] if options['Botnet_mode'][0] == "miner": miner = Proxy_Miner() proxies = miner.start_miner() if options['Botnet_mode'][0] == "list": # load proxies from list proxies = proxy_manager.load_proxy_list( options['Botnet_mode'][1] ) answer = raw_input( "[#] Do you want to check the proxy list? [Yes/no] : " ) if answer.upper() in ["", "YES", "Y"]: proxy_q = multiprocessing.Queue() proxy_checker = multiprocessing.Process( target=Proxy_Checker.check_proxies, args=(proxy_q, proxies,) ) logging.info("Checking Proxies...") start_time = time.time() proxy_checker.start() proxies = proxy_q.get() proxy_checker.join() proxy_manager.proxies = proxies proxy_manager.number_of_proxies = len(proxies) if options['Botnet_mode'][0] == "miner": logging.info("Writing proxies to disk(~/.owtf/proxy_miner/proxies.txt)") miner.export_proxies_to_file("proxies.txt", proxies) if answer.upper() in ["", "YES", "Y"]: logging.info( "Proxy Check Time: %s", time.strftime( '%H:%M:%S', time.localtime(time.time() - start_time - 3600) ) ) cprint("Done") if proxy_manager.number_of_proxies is 0: ServiceLocator.get_component("error_handler").FrameworkAbort("No Alive proxies.") proxy = proxy_manager.get_next_available_proxy() # check proxy var... http:// sock:// options['OutboundProxy'] = [] options['OutboundProxy'].append(proxy["proxy"][0]) options['OutboundProxy'].append(proxy["proxy"][1])
def upload(): form = request.form for blank in form: if form[blank] == '': return "Blank can't be empty!" if re.match('\b[\w\.-]+@[\w\.-]+\.\w{2,4}\b', form['email']) == None: return "Wrong email address format!" file = request.files['pdf'].read() filename = request.files['pdf'].filename split = filename.split('.') if len(split) != 2 or split[1] != 'pdf': return 'unsupported file type' for content in form: #print(content) if content == 'pdf': continue for word in sensitive_words: if word in form[content]: return 'contain inappropriate word(s)' filename = request.files['pdf'].filename email = form['email'] user = userService.find_by_email(email) if user is None: user = User(email=email) userService.insert(user) #判断是否被拉黑 subject = subjectService.find_by_title(form['subject']) if subject is None: return '<h2>There is no such subject, ' \ 'please create the subject first.</h2><a href="/">back to home</a>' #验证码 nextid = articleService.nextId() article = Article(title=form['title'], postTime=time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), abstract=form['abstract'], highlight_part=form['highlight'], subject_id=subject.id, user_id=user.id, dl_link="static/" + str(nextid) + ".pdf") articleService.insert(article) new_filename = "static/" + str(nextid) + ".pdf" # 存储文件到本地 newFile = open(new_filename, "wb") newFile.write(file) newFile.close() # 发送邮件 msg = MIMEText( 'hanks for posting article.If you are sure that you haven\'t post any articles in OAPS, please contact <a href="mailto:[email protected]">[email protected]</a> to delete it.', 'html', 'utf-8') msg['From'] = _format_addr('*****@*****.**') msg['To'] = _format_addr(email) msg['Subject'] = Header('[OAPS]', 'utf-8').encode() server = smtplib.SMTP('smtp.qq.com', 25) server.set_debuglevel(1) server.login('*****@*****.**', '***') # TODO 输入密码 server.sendmail('*****@*****.**', [email], msg.as_string()) server.quit() return redirect('/article/' + str(article.id))
def test_filling_file_field(self): self.dummy = mommy.make_one(DummyFileFieldModel) field = DummyFileFieldModel._meta.get_field('file_field') self.assertTrue(isinstance(field,FileField)) import time path = "/tmp/%s/mock_file.txt" % time.strftime('%Y/%m/%d') self.assertEqual(self.dummy.file_field.path, path)
def generate_order_sn(self): #当前时间 + userID + 随机数 from random import Random random_ins = Random() order_sn = "{time_str}{userid}{ranstr}".format(time_str=time.strftime("%Y%m%d%H%M%S"), userid=self.context["request"].user.id, ranstr=random_ins.randint(10,99)) return order_sn
def agreement_groups(request, id): groups = Group.objects.select_related("Course").filter( facultycontract=Contract.objects.get(id=id)) data = [] for g in groups: data.append({ "id": g.id, "subject": g.course.name, "start": time.strftime(g.start, "%H:%M"), "end": time.strftime(g.end, "%H:%M"), "mode": g.mode, "type": g.get_mode_display() }) return HttpResponse(json.dumps(data), mimetype="application/json")
def makePrePrintDic(ip): timelength = datetime.strptime(ipLastDic[ip], "%Y-%m-%d %H:%M:%S") - datetime.strptime( ipStartDic[ip][0], "%Y-%m-%d %H:%M:%S") outputString = ip + "," + ipStartDic[ip][0] + "," + ipLastDic[ ip] + "," + str(timelength.seconds + 1) + "," + str(ipRequestCount[ip]) prePrintoutDic[ipStartDic[ip][0] + " " + time.strftime( time(microsecond=ipStartDic[ip][1]), "%f")] = outputString
def test_filling_file_field(self): self.dummy = mommy.make(models.DummyFileFieldModel, _create_files=True) field = models.DummyFileFieldModel._meta.get_field('file_field') self.assertIsInstance(field, django_models.FileField) import time path = "%s/%s/mock_file.txt" % (gettempdir(), time.strftime('%Y/%m/%d')) self.assertEqual(abspath(self.dummy.file_field.path), abspath(path))
def get_str_from_minutes(minutes): """ Useful for plotting :param minutes: Output of get_minutes_in_day :return: str ("11:59 PM") """ dt = time(int(np.floor(minutes / 60)), int(minutes % 60), 0) return time.strftime(dt, "%I:%M %p")
def get_hours_info(self, date): """ Повертає години які є у консультаціях """ consultations = Consultation.objects.filter( date=date, advocat=self, ) consultations = consultations.exclude( status__in=[Consultation.DECLINED, Consultation.FINISHED], ) hours = [] for consultation in consultations: hours.append({ "consultation_id": consultation.id, "start": time.strftime(consultation.start, "%H:%M"), "end": time.strftime(consultation.end, "%H:%M"), }) return hours
def local_time(self): localtime = timezone(str(TIME_ZONE)) time = datetime.now(localtime) format_time = time.strftime('%H:%M:%S') if TIME_ZONE == 'US/Eastern': output = "Local time: {} EST".format(format_time) if TIME_ZONE == 'Europe/London': output = "Local time: {} GMT".format(format_time) return output
def test_filling_file_field(self): dummy = baker.make(models.DummyFileFieldModel, _create_files=True) field = models.DummyFileFieldModel._meta.get_field('file_field') assert isinstance(field, FileField) import time path = "%s/%s/mock_file.txt" % (gettempdir(), time.strftime('%Y/%m/%d')) assert abspath(path) == abspath(dummy.file_field.path) dummy.file_field.delete()
def save(self, db_name=None): timestamp = '%d%m%Y%H%M%S' if db_name is None: db_name = time.strftime(timestamp) + '.db' buffer = [] buffer.append(' '.join(map(str, (len(self.queryes), len(self.files))))) buffer += ['\t'.join(i) for i in self.queryes] buffer += self.files save(buffer, db_name)
def save_note(self, request, pk=None): instance = self.get_object() if instance.status == 'U': instance.content = request.data.get('content') instance.save() status = 'Saved at %s!' % time.strftime('%I:%M:%S %p') else: status = 'Class note is already submitted' return Response({'status': status})
def writemdfile(lat, lon, fileitme): targetfile = open( repopath + "_posts/sailtrack/" + time.strftime("%Y-%m-%d-Sailtrack-") + fileitme + ".md", 'w') targetfile.write("---\n") targetfile.write("layout: track\n") targetfile.write("title: sail track " + fileitme + "\n") targetfile.write("categories: sailtrack\n") targetfile.write("date: " + time.strftime("%Y-%m-%d") + "\n") targetfile.write("published: false\n") targetfile.write("geo: " + fileitme + ".csv\n") targetfile.write("geocenterlon: " + str(lon) + "\n") targetfile.write("geocenterlat: " + str(lat) + "\n") targetfile.write("mapzoom: 11\n") targetfile.write("---\n\n") targetfile.flush() targetfile.close() return
def _datestamp_to_srt(self, date_arr): ret_date_arr = list() for datestamp in date_arr: if datestamp is None: ret_date_arr.append('19900101') else: x = time.strftime('%Y%m%d', time.localtime(datestamp / 1000)) ret_date_arr.append(x) return ret_date_arr
def plotWindLogChart(data): """ Make a plot of the given wind log data as moving average. @param data - time after the cup anemometer passed the sensor in [ms] since 1.1.1970 """ # todo das muss ich noch richtig machen und überprüfen # Get the windlog data of weather station Berlin Tegel # tegelDaten = [x.split(';') for x in open(windlogsDir+'/tegelDaten.txt','r').readlines()] # Fetch time and date of the wind velocity data # xTegel = np.array([time.mktime(time.strptime(x[1],'%Y%m%d%H')) for x in tegelDaten],dtype=float) * 1000 # Fetch the wind velocity in convert them into km/h # yTegel = np.array([x[3] for x in tegelDaten],dtype=float) *3.6 # calculate the time difference between one cup anemometer rotation timeDiff = data[1:-1] - data[0:-2] # time between to time logs x = (data[1:-1] + data[0:-2]) / 2. # create a figure for plotting the data fig = plt.figure(figsize=(24, 12)) ax = plt.subplot(111) plt.title('Datei : ' + fname) plt.xlabel('Time') plt.ylabel('Wind velocity [km/h]') step = 1 r = 1 vAvg = v(timeDiff[::step]) * 3.6 vGMAvg = [] for i in range(vAvg.size): vGMAvg.append( sum(vAvg[max(0, i - r):min(vAvg.size, i + r)]) / (min(vAvg.size, i + r) - max(0, i - r))) ax.plot(x[:-1:step], vGMAvg[:-1], '-', label='Mittelwert - gleitendes Mittel r = ' + str(r), linewidth=1) # create the x axis (time axis) labels xlabels = np.arange(x[0], x[-1], (x[-1] - x[0]) / 24) labels = [] for xi in xlabels: labels.append(time.strftime('%H:%M Uhr', time.localtime(xi / 1000))) plt.xticks(xlabels, labels, rotation='vertical') plt.legend() plt.ylim(ymin=0, ymax=40) # format='png' plt.savefig('test.png', format='png', bbox_inches='tight') print('test.png', 'saved as png ...')
def get_destino(self, dia, horario, origem): inscricao_sessoes = Inscricaosessao.objects.filter( inscricao=self, sessao__dia=dia).order_by('sessao__horarioid__inicio') destino = [] if horario == time.strftime( inscricao_sessoes.first().sessao.horarioid.inicio, "%H:%M:%S"): for local in inscricao_sessoes: if time.strftime( local.sessao.horarioid.inicio, "%H:%M:%S") == horario and ( origem == 'Check in' or origem.id != local.sessao.atividadeid.espacoid.id): destino.append({ 'key': local.sessao.atividadeid.espacoid.id, 'value': local.sessao.atividadeid.espacoid.nome }) else: inscricao_sessoes = Inscricaosessao.objects.filter( inscricao=self, sessao__dia=dia, sessao__horarioid__inicio__gte=horario).order_by( 'sessao__horarioid__inicio') for local in inscricao_sessoes: if origem != 'Check in' and origem.id != local.sessao.atividadeid.espacoid.id: destino.append({ 'key': local.sessao.atividadeid.espacoid.id, 'value': local.sessao.atividadeid.espacoid.nome }) if len(destino) == 0: if origem != 'Check in' and origem.id != local.sessao.atividadeid.espacoid.id: destino.append({ 'key': inscricao_sessoes.last().sessao.atividadeid.espacoid.id, 'value': inscricao_sessoes.last().sessao.atividadeid.espacoid.nome }) return destino
def local_time(zone): localtime = timezone(str(zone)) time = datetime.now(localtime) format_time = time.strftime('%H:%M:%S') if zone == 'US/Eastern': output = "{} Local time: {} EST".format(wisp, format_time) if zone == 'Europe/London': output = "{} Local time: {} GMT".format(wisp, format_time) return output
def test_filling_file_field(self): self.dummy = mommy.make(DummyFileFieldModel) field = DummyFileFieldModel._meta.get_field('file_field') self.assertIsInstance(field, FileField) import time path = "/tmp/%s/mock_file.txt" % time.strftime('%Y/%m/%d') from django import VERSION if VERSION[1] >= 4: self.assertEqual(self.dummy.file_field.path, path)
def checker(x , y , f_name, temp = ''): time = datetime.now() t = time.strftime(" (%d %m %Y - %H %M %S)") # дата-время к имени файла for i in x: for j in y: if i.lower() == j.lower(): # для сравнения все строки приводятся к нижнему регистру temp = temp + i if len(temp) > 0: # записываю, если найдены совпадения with open(workdir + f_name + t + '.csv', 'w') as f: f.write(temp)
def active_membership_lines_find(self, cr, uid, id, date, context=None): """Find the active membership lines""" if not date: date = time.strftime('%Y-%m-%d') line_ids = self.pool.get('membership.membership_line').search(cr, uid, [('partner','=', id),('date_to','>=', date),('date_from','<=', date),('state','=','paid'),('membership_cancel_id','=',False)]) membership_lines = [] for line in self.pool.get('membership.membership_line').browse(cr, uid, line_ids): if line.membership_id.membership: membership_lines.append(line.id) return membership_lines
def search_turnos_tecnico(request): """ Get the schedule of a technician :param request: :return: json_tecnico """ json_agendas = [] if request.is_ajax() and request.method == 'POST': nombre = request.POST.get('sName') fecha = horario.spain_timezone() fecha = '%s-%s-%s' % (fecha.year, fecha.month, fecha.day) agenda = models.Agenda() result_agenda = agenda.get_tecnico_in_agenda(nombre=nombre, fecha=fecha) db_utils.flush_transaction() for result in result_agenda: json_agenda = { 'tecnico_id': result.tecnico__id, 'tecnico_nombre': result.tecnico__nombre + ' ' + result.tecnico__apellidos } result_turnos = agenda.get_turnos_by_tecnico(tecnico_id=result.tecnico__id, fecha=fecha) i = 0 for turno in result_turnos: indice = 'turno_inicio%s' % i json_agenda[indice] = time.strftime(turno.turno__hora_inicio, '%H:%M') indice = 'turno_fin%s' % i json_agenda[indice] = time.strftime(turno.turno__hora_fin, '%H:%M') indice = 'turno_id%s' % i json_agenda[indice] = turno.tu_id i += 1 json_agendas.append(json_agenda) return HttpResponse(json.dumps(json_agendas), content_type='application/json')
def _get_leave_status(self, cr, uid, ids, name, args, context=None): holidays_obj = self.pool.get('hr.holidays') holidays_id = holidays_obj.search(cr, uid, [('employee_id', 'in', ids), ('date_from','<=',time.strftime('%Y-%m-%d')), ('date_to','>=',time.strftime('%Y-%m-%d')),('type','=','remove'),('state','not in',('cancel','refuse'))], context=context) result = {} for id in ids: result[id] = { 'current_leave_state': False, 'current_leave_id': False, 'leave_date_from':False, 'leave_date_to':False, } for holiday in self.pool.get('hr.holidays').browse(cr, uid, holidays_id, context=context): result[holiday.employee_id.id]['leave_date_from'] = holiday.date_from result[holiday.employee_id.id]['leave_date_to'] = holiday.date_to result[holiday.employee_id.id]['current_leave_state'] = holiday.state result[holiday.employee_id.id]['current_leave_id'] = holiday.holiday_status_id.id return result
def test_filling_image_file_field(self): self.dummy = mommy.make(models.DummyImageFieldModel, _create_files=True) field = models.DummyImageFieldModel._meta.get_field('image_field') self.assertIsInstance(field, django_models.ImageField) import time path = "%s/%s/mock-img.jpeg" % (gettempdir(), time.strftime('%Y/%m/%d')) # These require the file to exist in earlier versions of Django self.assertEqual(abspath(self.dummy.image_field.path), abspath(path)) self.assertTrue(self.dummy.image_field.width) self.assertTrue(self.dummy.image_field.height)