def padding(request): tomorrow = moment.now().add(day=1) tomorrow_plan = Plan.objects.filter( created_at=tomorrow.date, user=request.user) if tomorrow_plan: return render( request, 'padding.html', {'title': "padding", "plan_id": tomorrow_plan.first().id});
def createMsg(mtype, send_id, receive_id, post_id, content, mydb, create_time = None): msg = {} msg["type"] = mtype msg["post_id"] = post_id msg["sender_id"] = send_id msg["receiver_id"] = receive_id msg["content"] = content msg["create_time"] = moment.now().epoch() msg_id = 0; res = mydb.selectCollection("xmateMessage") if(res["status"]): return res match_list = {"sender_id":send_id,"receiver_id":receive_id,"post_id":post_id} res = mydb.getData(match_list) if(res["status"]): return res cursor = list(res["content"]) if(len(cursor) > 0): msg_id = cursor[0]["_id"] res = mydb.updateData(match_list,{"create_time":msg["create_time"]}) if(res["status"]): return res else: res = mydb.insertData(msg) if(res["status"]): return res msg_id = res["content"].inserted_id return returnHelper(content = msg_id)
def new_video(): form = VideoForm() if form.validate_on_submit(): now = moment.now().format('dddd, MMMM D YYYY') today = Day.query.filter_by(date=now).first() if today is not None: video = Video(title=form.title.data,description=form.description.data,video_link=form.video_link.data,day_id=today.id) db.session.add(video) db.session.flush() video.generate_hash() video.generate_thumbnail(app.config["UPLOAD_FOLDER"], form.thumbnail.data, app.config["ALLOWED_EXTENSIONS"]) db.session.add(video) db.session.commit() flash("Video Successfully Added") return redirect(url_for("index")) else: day = Day(date=now) db.session.add(day) db.session.flush() video = Video(title=form.title.data,description=form.description.data,video_link=form.video_link.data,day_id=day.id) db.session.add(video) db.session.flush() video.generate_hash() video.generate_thumbnail(app.config["UPLOAD_FOLDER"], form.thumbnail.data, app.config["ALLOWED_EXTENSIONS"]) db.session.add(video) db.session.commit() flash("Video Successfully Added") return redirect(url_for("index")) return render_template("videos/new.html",form=form)
def show(request): tomorrow = moment.now().add(day=1) if Plan.objects.filter(user=request.user, created_at=tomorrow.date): return redirect('/wechat/plan/padding') template_val = {} template_val['title'] = u'创建规划' format_date = tomorrow.format("YYYY.MM.DD") weekday = get_format_weekday(tomorrow.weekday) template_val['format_date'] = format_date template_val['weekday'] = weekday user_plans = Plan.objects.filter(user=request.user) if not user_plans: # 新用户, 添加三条作为引导 details = [] details.append({'created_at': '', 'content': ''}) #details.append({'created_at': '10:00-11:00', 'content': '向左滑动可以删除'}) #details.append({'created_at': '11:00-12:00', 'content': '下方可以编辑备注'}) template_val['details'] = details else: # 老用户,自动填充上次的内容 details = user_plans.last().details.all() template_val['details'] = details return render(request, 'show.html', template_val)
def test_now_function_with_current_date(self): d = moment.now().to_date() now = datetime.now() self.assertEquals(d.year, now.year) self.assertEquals(d.month, now.month) self.assertEquals(d.day, now.day) self.assertEquals(d.hour, now.hour) self.assertEquals(d.second, now.second)
def get_commits(bot_input, bot_output): gh = GithubHelper(bot_input, bot_output) team = gh.get_team() team_repos = team.get_repos() random_repo = team_repos[random.randrange(0,3)] start_date = moment.now().add(key='days',amount=-30).date commits = random_repo.get_commits(since=start_date) return commits
def parse_dotamax_html(html): soup = BeautifulSoup(html, 'lxml') dls = soup.find_all('div', attrs={'class': 'live-box'}) items = [] for dl in dls: try: atag = dl.select('a') data = {} data['video_id'] = atag[0].attrs['href'].split('link=')[1] data['id'] = md5.new(data['video_id']).hexdigest() imgs = dl.find_all('img', attrs={'class': 'none-opacity-img'}) data['img'] = imgs[0].attrs['src'] texts = dl.find_all('div', attrs={'class': 'overflow-text'}) data['title'] = texts[0].text.strip() arr = texts[1].text.strip().split() data['author'] = arr[0] if arr[2]: if arr[2].find('hour') >= 0: data['created_at'] = moment.now().subtract(hours=int(arr[1])).format('YYYY-M-DThh:mm:ss') if arr[2].find('minute') >= 0: data['created_at'] = moment.now().subtract(minutes=int(arr[1])).format('YYYY-M-DThh:mm:ss') if arr[2].find('year') >= 0: data['created_at'] = moment.now().subtract(years=int(arr[1])).format('YYYY-M-DThh:mm:ss') if arr[2].find('month') >= 0: data['created_at'] = moment.now().subtract(months=int(arr[1])).format('YYYY-M-DThh:mm:ss') if arr[2].find('day') >= 0: data['created_at'] = moment.now().subtract(days=int(arr[1])).format('YYYY-M-DThh:mm:ss') if arr[2].find('second') >= 0: data['created_at'] = moment.now().subtract(seconds=int(arr[1])).format('YYYY-M-DThh:mm:ss') data['update_at'] = moment.now().format('YYYY-M-DThh:mm:ss') items.append(data) except: pass return items
def today(request): today = moment.now().date today_plan = Plan.objects.filter( created_at=today, user=request.user) if today_plan: return redirect( '/wechat/plan/' + str(today_plan.first().id) + '/details') else: return redirect('/wechat/plan/manage')
def parseTime(self, o): mT = re.search(r"(\d\d):(\d\d) ([A-Z]{2})", self.gameText) time = { "hour": mT.group(1), "minute": mT.group(2) } if (mT.group(3) == "PM"): time['hour'] = str(int(time['hour']) + 12) if (o.find("ul", {"class": "game-details-list"}) == None): mD = re.search(r"\w{3}, (\d\d)-(\d\d) [A-Z]{3}", o.find("small").text) else: mD = re.search(r"\w{3}, (\d\d)/(\d\d) [A-Z]{3}", o.find("ul", {"class": "game-details-list"}).findNext("li").text) date = str(moment.now().year) + "-" + str(mD.group(1)) + "-" + str(mD.group(2)) + "T" + time['hour'] + ":" + time['minute'] + ":00" + moment.now().locale("US/Pacific").strftime('%z') # print date return date
def get_scoregraph(show_admin=False): def convert_to_time(time): #return time m, s = divmod(time, 60) h, m = divmod(m, 60) return "%d:%02d:%02d" % (h, m, s) teams = get_all_team_scores(show_admin=False, show_observer=False)[:6] # print (teams) db = api.common.db_conn() indices = [ ] for team in teams: submissions = list(db.submissions.find({ "tid": team["tid"], "correct": True, "timestamp": { "$lt": api.config.end_time.timestamp() } }).sort("timestamp", 1)) team["submissions"] = submissions team["points"] = 0 for submission in submissions: index = submission["timestamp"] - api.config.start_time.timestamp() if not(index in indices): indices.append(index) indices.sort() names = [[ "Time" ] + [ team["teamname"] for team in teams ]] names.append([ convert_to_time(0) ] + [ 0 ] * len(teams)) counted = { } for index in indices: frame = [ convert_to_time(index) ] for team in teams: if len(team["submissions"]) > 0: submission = team["submissions"][0] time = submission["timestamp"] - api.config.start_time.timestamp() if team["tid"] not in counted.keys(): counted[team["tid"]] = [ ] if time == index: if not(submission["pid"] in counted[team["tid"]]) and not(submission["timestamp"] > api.config.end_time.timestamp()): counted[team["tid"]].append(submission["pid"]) team["points"] += api.problem.get_problem_value(submission["pid"], submission["bonus_place"]) team["submissions"].pop(0) frame.append(team["points"]) names.append(frame) last = [ convert_to_time(min(moment.now().date.timestamp(), api.config.end_time.timestamp()) - api.config.start_time.timestamp()) ] + [ team["points"] for team in teams ] names.append(last) return { "points": names, "options": { "title": "YourCTF Score Progression", "height": 348, "width": "100%", "legend": { "position": "top" }, "hAxis": { "textPosition": "none" }, "vAxis": { "viewWindowMode": "explicit", "viewWindow": { "min": 0, "max": api.problem.get_max_points() } } } }
def get_team_score_progression(tid=None): def convert_to_time(time): #return time m, s = divmod(time, 60) h, m = divmod(m, 60) return "%d:%02d:%02d" % (h, m, s) db = api.common.db_conn() team = db.teams.find_one({ "tid": tid }) indices = [ ] submissions = list(db.submissions.find({ "tid": team["tid"], "correct": True, "timestamp": { "$lt": api.config.end_time.timestamp() } }).sort("timestamp", 1)) team["submissions"] = submissions team["points"] = 0 for submission in submissions: index = submission["timestamp"] - api.config.start_time.timestamp() if not(index in indices): indices.append(index) counted = [ ] indices.sort() names = [[ "Time", team["teamname"] ]] names.append([ convert_to_time(0) ] + [ 0 ]) for index in indices: frame = [ convert_to_time(index) ] if len(team["submissions"]) > 0: submission = team["submissions"][0] time = submission["timestamp"] - api.config.start_time.timestamp() if time == index: if submission["pid"] not in counted and not(submission["timestamp"] > api.config.end_time.timestamp()): counted.append(submission["pid"]) team["points"] += api.problem.get_problem_value(submission["pid"], submission["bonus_place"]) team["submissions"].pop(0) frame.append(team["points"]) names.append(frame) last = [ convert_to_time(min(moment.now().date.timestamp(), api.config.end_time.timestamp()) - api.config.start_time.timestamp()) ] + [ team["points"] ] names.append(last) obj = { "points": names, "options": { "title": "%s Score Progression" % team["teamname"], "height": 348, "legend": { "position": "top" }, "hAxis": { "textPosition": "none" } } } if "admin" in team and team["admin"] == True: obj["secret"] = "easyctf{h4xxing_th3_c0mpetition_s1t3}" return obj
def calendar_events(): """Route controller fetches events for a calendar in Google Calendar. """ # Get params from query string cal_id = request.args.get('id') timeMin = request.args.get('now') timeMax = moment.now().add('days', 1).replace( hours=0, minutes=0, seconds=0).format("YYYY-MM-DDTHH:mm:ssZ") username = session.get('username', None) assert username # Get calendar event items events = gc_helper.get_calendar_events( cal_id, timeMin, timeMax, username, Widgets.GCAL) # Return array of calendar events return jsonify(events)
def create_plan(request): plan_contents = json.loads(request.body) tomorrow = moment.now().add(day=1) plan = Plan() plan.user = request.user plan.note = plan_contents['note'] plan.created_at = tomorrow.date plan.save() for entry in plan_contents['entries']: detail = PlanDetail() detail.plan = plan detail.created_at = entry['title'] detail.content = entry['content'] detail.save() return JsonResponse({'success': True})
def parse_date_or_days_ahead(datestr, config, quit_if_none=False): '''Returns a moment date corresponding to given date, or days ahead number. quit_if_none: quit programm if no date parsed parse_date_or_days_ahead('4/10/2014') should return corresponding moment, if that format is defined in config file parse_date_or_days_ahead('1') should return the date of yesterday ''' # Try to find a formatted date date_formats = config.get('default', 'date_formats').split(',') date = parse_date(datestr, date_formats) if date: return date # It's not a date; maybe is it a number corresponding to some days ago if datestr.isdigit(): # It's a number, corresponding to some days ago from today. Retun that date return moment.now().subtract(days=int(datestr)) if quit_if_none: quit_with_parse_date_error(datestr, date_formats) return None
def today_timestamp(): today_str = moment.now().format("YYYY-M-D") time_stamp = datestr_2_timestamp(today_str) return time_stamp
import allure import moment import os import string from selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.support import expected_conditions as EC from selenium.common.exceptions import TimeoutException from allure.constants import AttachmentType now = moment.now().strftime("%d-%m-%Y") path = os.path.abspath(os.path.join(os.path.dirname(os.path.abspath(__file__)), os.pardir)) screen_dir = os.path.join(path, "screenshot", str(now)) def screen_path(): global screen_dir if not os.path.exists(screen_dir): os.makedirs(screen_dir) os.chmod(screen_dir, 0755) return screen_dir def remove_special_characters(text): return text.translate(string.maketrans('', ''), '\ / : * ? " < > |') def save_screenshot(driver, name): _name = remove_special_characters(name) driver.get_screenshot_as_file(os.path.join(screen_path(), _name + '-' + now + ".png")) allure.attach(_name + "-" + now, driver.get_screenshot_as_png(), type=AttachmentType.PNG)
def checkMsg(mydb): res = mydb.selectCollection("xmateMessage") if(res["status"]): return res #find out of date messages(more than 24hr) current_time = moment.now().epoch() st = current_time - 86400 match_list = {"create_time":{"$lt":st}} res = mydb.getData(match_list) if(res["status"]): return res cursor = list(res["content"]) #delete out of date msg_ids in users' unprocessed msg list user_msg = {} outoftime_msg = [] related_userlist = set() if(len(cursor) == 0): return returnHelper() for msg in cursor: outoftime_msg.append(msg["_id"]) related_userlist.add(ObjectId(msg["receiver_id"])) if(ObjectId(msg["receiver_id"]) in user_msg.keys()): pass else: user_msg[ObjectId(msg["receiver_id"])] = [] user_msg[ObjectId(msg["receiver_id"])].append(msg["_id"]) related_userlist = list(related_userlist) res = mydb.selectCollection("xmateUser") if(res["status"]): return res match_list = {"_id": {"$in": related_userlist}} res = mydb.getData(match_list) if(res["status"]): return res cursor = res["content"] for users in cursor: uid = users["_id"] nlist = users["unprocessed_message"] for mid in user_msg[uid]: if(mid in nlist): nlist.remove(mid) match_list = {"_id":uid} ndata = {"unprocessed_message":nlist} res = mydb.updateData(match_list,ndata) if(res["status"]): return res #delete the messages in database res = mydb.selectCollection("xmateMessage") if(res["status"]): return res match_list = {"_id": {"$in": outoftime_msg}} res = mydb.removeData(match_list) if(res["status"]): return res return returnHelper()
def verify(num, start=None, end=None, day=None): con = db.connect() cur = con.cursor() # default parameters # start date default if start is None: start = moment.now().subtract(days=1).format("YYYY/MM/DD") if end is None: end = moment.now().subtract(days=1).format("YYYY/MM/DD") # end date default if end is None: end = moment.now().format("YYYY/MM/DD") # day default # today if day is not None: start = moment.date(day).format("YYYY/MM/DD") end = moment.date(start).format("YYYY/MM/DD") # Query for verify the quantity SMS sent sql_sent = ''' Select count(RECIP_ADDRESS_ADDRESS) from SMSC_CDR where MSG_ORIG_SUBM_TIME_DATE between TO_DATE('{start} 00:00', 'yyyy/mm/dd hh24:mi') and TO_DATE('{end} 23:59', 'yyyy/mm/dd hh24:mi') and ORIG_ADDRESS_ADDRESS = '6262' and ORIG_APPL_ID = 'VASGWCONECTA_TR' and RECIP_APPL_ID = 'tpgsm_0340_ifx_R' and RECIP_ADDRESS_ADDRESS like '%{num}' and MSG_STATUS = '0' '''.format(start=start, end=end, num=num) # Query for verify the quantity SMS response sql_res = ''' Select count(RECIP_ADDRESS_ADDRESS) from SMSC_CDR where MSG_ORIG_SUBM_TIME_DATE between TO_DATE('{start} 00:00', 'yyyy/mm/dd hh24:mi') and TO_DATE('{end} 23:59', 'yyyy/mm/dd hh24:mi') and ORIG_ADDRESS_ADDRESS like '%{num}' and RECIP_APPL_ID = 'VASGWCONECTA_R' and RECIP_ADDRESS_ADDRESS = '6262' '''.format(start=start, end=end, num=num) resul_sent = cur.execute(sql_sent) qtd_sent = 0 qtd_res = 0 for row in resul_sent: qtd_sent = row[0] if qtd_sent > 0: resul_res = cur.execute(sql_res) qtd_res = 0 for row in resul_res: qtd_res = row[0] else: status = False if (qtd_res >= qtd_sent) and (qtd_sent is not 0): status = True else: status = False res = { 'number': int(num), 'date_start': start, 'date_end': end, 'sent': qtd_sent, 'res': qtd_res, 'status': status, } return res
def test_yesterday(self): d = moment.date("yesterday").zero expecting = moment.now().zero.subtract(days=1) self.assertEqual(d.date, expecting.date)
import moment import time from datetime import datetime x = moment.now() x = moment.now().strftime("%d-%m-%Y_%H-%M-%S") y = datetime.now().strftime("%d-%m-%Y_%H-%M-%S") print(y) print(x)
def GetDetails(self, url, muti): # time.sleep(2) resp = self.session.get(url) resp.encoding = "utf8" self.CheckIfNeedLogin(resp) jqdata = jq(resp.text) jqdetail = jqdata(".v_table_1") jqperson = jqdata(".v_table_2") try: uid = FixNums(jqperson("tr:nth-child(5) > td:nth-child(2)").text()) sid = FixNums(jqdetail("tr:nth-child(3) > td:nth-child(2)").text()) details = DarkNet_DataSale.select().where( (DarkNet_DataSale.sid == sid)) person = DarkNet_Saler.select().where((DarkNet_Saler.uid == uid)) notice = DarkNet_Notice.select().where((DarkNet_Notice.sid == sid)) img = DarkNet_IMGS.select().where((DarkNet_IMGS.sid == sid)) personDatas = { "salenums": FixNums(jqperson("tr:nth-child(3) > td:nth-child(4)").text()), "totalsales": float(jqperson("tr:nth-child(5) > td:nth-child(4)").text()), "totalbuys": float(jqperson("tr:nth-child(7) > td:nth-child(4)").text()), } username = jqperson("tr:nth-child(3) > td:nth-child(2)").text() if not person: personDatas.update({ "uid": uid, "user": username, "regtime": moment.date( jqperson("tr:nth-child(7) > td:nth-child(2)").text()). format("YYYY-MM-DD"), }) person = DarkNet_Saler.create(**personDatas) else: DarkNet_Saler.update(personDatas).where( (DarkNet_Saler.uid == uid)).execute() person = person[0].uid if not notice: notice = DarkNet_Notice.create(**{"sid": sid}) else: notice = notice[0].sid detailImages = None detailContent = " ".join( jqdata(".postbody .content").text().split()) if not img: urls = [_.attr("src") for _ in jqdata(".postbody img").items()] img = DarkNet_IMGS.create(**{ "sid": sid, "img": urls, "detail": detailContent }) detailImages = self.SavePics(urls, sid) else: img = img[0].sid currentYear = moment.now().year soldNum = FixNums( jqdetail("tr:nth-child(7) > td:nth-child(4)").text(), to=99999) toCurrentYearDateTime = moment.date( f"{currentYear} " + jqdetail("tr:nth-child(3) > td:nth-child(6)").text()) RealUpTimeJQ = jqdata(".author") RealUpTimeJQ.remove("a") RealUpTimeJQ.remove("span") RealUpTime = moment.date(RealUpTimeJQ.text().replace( "年", "").replace("月", "").replace("日", "")) RealUpTime = RealUpTime if RealUpTime._date else toCurrentYearDateTime detailsDatas = { "lasttime": moment.date(f"{currentYear} " + jqdetail("tr:nth-child(7) > td:nth-child(6)").text( )).format("YYYY-MM-DD HH:mm:ss"), "priceBTC": float( jqdetail( "tr:nth-child(3) > td:nth-child(4) > span").text()), "priceUSDT": float( jqdetail("tr:nth-child(5) > td:nth-child(4)").text().split( )[0]), "lines": muti["lines"], "uptime": RealUpTime.format("YYYY-MM-DD HH:mm:ss"), "hot": muti["hot"], "types": jqdetail("tr:nth-child(5) > td:nth-child(2)").text(), "status": jqdetail("tr:nth-child(7) > td:nth-child(2)").text(), "oversell": jqdetail("tr:nth-child(9) > td:nth-child(2)").text(), "sold": soldNum, } if not details: detailsDatas.update({ "sid": sid, "user": person, "area": muti["area"], "title": muti["title"], "detailurl": url, "img": img, "notice": notice, }) details = DarkNet_DataSale.create(**detailsDatas) self.MakeMsg(details, detailContent, detailImages, sid, username) else: self.warn(f'-{RealUpTime}- {muti["title"]}') DarkNet_DataSale.update(detailsDatas).where( (DarkNet_DataSale.sid == sid)).execute() except Exception as e: self.error(f"GetDetails {e}") self.SaveError("error_264.html", resp) raise
assert args['--user'].isdigit() user_id = args['--user'] else: print 'WARNING: you didn\'t specified an user id; deleting tasks for ALL users in Redmine !\n' # Get prefered date format from config file to display dates date_format = config.get('default', 'date_formats') if date_format.find(',') != -1: # More than one format is defined, take first date_format = (date_format.split(',')[0]).strip() # print confirmation to user, to check dates if from_date: if to_date is None: # implicitly takes today for to_date to_date = moment.now() question = "Delete tasks from {} to today (included) ?".format(from_date.format(date_format)) else: question = "Delete tasks from {} to {} (included) ?".format( from_date.format(date_format), to_date.format(date_format) ) elif for_date: if args['<date>'] == '0': question = "Delete tasks for today ?" elif args['<date>'] == '1': question = "Delete tasks for yesterday ({}) ?".format(for_date.format(date_format)) else: question = "Delete tasks for {} ?".format(for_date.format(date_format)) assert question
parser.add_argument("--year", "-y", help="Enter the desired year (if no year is inputed, the current year is automatically used)") parser.add_argument("--output", "-o", help="Enter the desired output file name") args = parser.parse_args() try: if args.month is None: month = raw_input("Enter the desired month: ") else: month = args.month if args.output is None: output = raw_input("Enter the output file name: ") else: output = args.output if args.year is None: year = moment.now().year else: year = args.year startdate = moment.date(month + " 1, " + str(year)) enddate = moment.date(month + " 1, " + str(year)).add(months=1).subtract(days=1) except Exception as e: verified = False print(e) if(verified): APIRequest.requestData(_workspace_id,_api_token,startdate, enddate, year, output) print("Success") else: print("Invalid option. Try --help for docs")
print('Insert Sub-Category: '); sub = input() print('Insert Type Category: '); type_cat = input() print('Accept? (!no or !yes)'); accept = input() if accept != '!no': obj = { 'title': title, 'company': company(), 'price': price(), 'price_percentage': 0, 'quantity': 100, 'num_of_shares': 0, 'images': images, 'about': { 'description': description(), 'release_date': moment.now().format("D-M-YYYY"), 'rating': rating() }, 'category': { 'category_name': category, 'sub_category': { 'name': sub, 'type': type_cat } } } response = requests.post(post_url, json=obj) print(response.text)
def current_year_start_timestamp(): month_str = moment.now().format("YYYY") time_stamp = datestr_2_timestamp(month_str) return time_stamp
def current_year_end_timestamp(): year_date = moment.now().date if calendar.isleap(year_date.year): return current_year_start_timestamp() + 366 * 24 * 60 * 60 * 1000 else: return current_year_start_timestamp() + 365 * 24 * 60 * 60 * 1000
def test_2_weeks_ago(self): d = moment.date("2 weeks ago").zero expecting = moment.now().zero.subtract(weeks=2) self.assertEqual(d.date, expecting.date)
def test_future(self): d = moment.date("tomorrow").zero expecting = moment.now().zero.add(days=1) self.assertEqual(d.date, expecting.date)
def new_resource(self, id, data=None, errors=None, error_summary=None): ''' FIXME: This is a temporary action to allow styling of the forms. ''' if request.method == 'POST' and not data: save_action = request.params.get('save') data = data or clean_dict( dict_fns.unflatten(tuplize_dict(parse_params(request.POST)))) # we don't want to include save as it is part of the form del data['save'] resource_id = data['id'] del data['id'] # Guardo los campos issued y modified time_now = moment.now().isoformat() data['issued'] = time_now data['modified'] = time_now self._validate_resource(data) context = { 'model': model, 'session': model.Session, 'user': c.user, 'auth_user_obj': c.userobj } if save_action == 'go-dataset': # go to first stage of add dataset h.redirect_to(controller='package', action='edit', id=id) # see if we have any data that we are trying to save data_provided = False for key, value in data.iteritems(): if ((value or isinstance(value, cgi.FieldStorage)) and key not in [ 'resource_type', 'license_id', 'attributesDescription' ]): data_provided = True break if not data_provided and save_action != "go-dataset-complete": if save_action == 'go-dataset': # go to first stage of add dataset h.redirect_to(controller='package', action='edit', id=id) try: data_dict = get_action('package_show')(context, {'id': id}) except NotAuthorized: abort(403, _('Unauthorized to update dataset')) except NotFound: abort( 404, _('The dataset {id} could not be found.').format( id=id)) if not len(data_dict['resources']): # no data so keep on page msg = _('You must add at least one data resource') # On new templates do not use flash message if asbool(config.get('ckan.legacy_templates')): h.flash_error(msg) h.redirect_to(controller='package', action='new_resource', id=id) else: errors = {} error_summary = {_('Error'): msg} return self.new_resource(id, data, errors, error_summary) # XXX race condition if another user edits/deletes data_dict = get_action('package_show')(context, {'id': id}) get_action('package_update')(dict(context, allow_state_change=True), dict(data_dict, state='active')) h.redirect_to(controller='package', action='read', id=id) data['package_id'] = id try: if resource_id: data['id'] = resource_id get_action('resource_update')(context, data) else: get_action('resource_create')(context, data) except ValidationError, e: errors = e.error_dict error_summary = e.error_summary return self.new_resource(id, data, errors, error_summary) except NotAuthorized: abort(403, _('Unauthorized to create a resource'))
def timeNow(): return moment.now().timezone("Asia/Shanghai").format("YYYY-M-D h:m:s A")
def test_copy_existing_course_instructor(self): try: driver = self.driver login_instructor = LoginInstructor(driver) login_instructor.login_as_instructor() time.sleep(2) home_page = HomePage(driver) home_page.click_course_options() home_page.click_copy_course() exchange_page = ExchangePage(driver) time.sleep(2) exchange_page.click_next() exchange_page.check_online_course() exchange_page.click_course_start_date() exchange_page.select_current_date_box() exchange_page.click_course_end_date() exchange_page.click_next_calendar() exchange_page.click_next_calendar() exchange_page.click_next_calendar() exchange_page.click_next_calendar() exchange_page.select_28day_calendar() time.sleep(3) copy_course_name = CopyCourseName(driver) copy_course_name.get_copy_course_name() home_page = HomePage(driver) exchange_page.click_save() time.sleep(12) assert home_page.coach_mark_title( ) == "Done setting up your course dates and times?" except AssertionError as error: print("Assertion error occurred") print(error) curr_time = moment.now().strftime("_%m-%d-%Y_%H-%M-%S") test_name = utils.whoami() screenshot_name = str(test_name) + "" + curr_time allure.attach(self.driver.get_screenshot_as_png(), name=screenshot_name, attachment_type=allure.attachment_type.PNG) self.driver.get_screenshot_as_file( "/Users/vburiol/PycharmProjects/GLP_Test/Screenshots/" + screenshot_name + ".png") raise except: print("There was an exception") curr_time = moment.now().strftime("%m-%d-%Y_%H-%M-%S_") test_name = utils.whoami() screenshot_name = str(test_name) + "_" + curr_time allure.attach(self.driver.get_screenshot_as_png(), name=screenshot_name, attachment_type=allure.attachment_type.PNG) self.driver.get_screenshot_as_file( "/Users/vburiol/PycharmProjects/GLP_Test/Screenshots/" + screenshot_name + ".png") raise else: print("No exceptions occurred") finally: print("This block will always execute") time.sleep(2) home_page.click_link_got_it() time.sleep(2) try: driver = self.driver get_copy_course_name_csv = GetCopyCourseName(driver) home_page_copy_course = HomePageCopyCourse(driver) copy_course_name = "Copy of " + home_page_copy_course.get_text_copy_course( ) assert copy_course_name == get_copy_course_name_csv.get_copy_course_name( ) except AssertionError as error: print("Assertion error occurred") print(error) curr_time = moment.now().strftime("_%m-%d-%Y_%H-%M-%S") test_name = utils.whoami() screenshot_name = str(test_name) + "" + curr_time allure.attach(self.driver.get_screenshot_as_png(), name=screenshot_name, attachment_type=allure.attachment_type.PNG) self.driver.get_screenshot_as_file( "/Users/vburiol/PycharmProjects/GLP_Test/Screenshots/" + screenshot_name + ".png") raise except: print("There was an exception") curr_time = moment.now().strftime("%m-%d-%Y_%H-%M-%S_") test_name = utils.whoami() screenshot_name = str(test_name) + "_" + curr_time allure.attach(self.driver.get_screenshot_as_png(), name=screenshot_name, attachment_type=allure.attachment_type.PNG) self.driver.get_screenshot_as_file( "/Users/vburiol/PycharmProjects/GLP_Test/Screenshots/" + screenshot_name + ".png") raise else: print("No exceptions occurred") finally: print("This block will always execute") time.sleep(2)
def Reg(self): self.warn("Start Regging") headers = { "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8", "Accept-Encoding": "gzip, deflate", "Accept-Language": "zh-CN,zh;q=0.9,en;q=0.8", "Cache-Control": "no-cache", "Connection": "keep-alive", "Content-Type": "application/x-www-form-urlencoded", "Origin": f"http://{self.domain}", "Pragma": "no-cache", "Referer": f"http://{self.domain}/ucp.php?mode=register&sid={self.sid}", "Upgrade-Insecure-Requests": "1", "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/69.0.3497.100 Safari/537.36", } step1resp = self.session.get( f"http://{self.domain}/ucp.php?mode=register").text step1 = jq(step1resp) self.info(f'RegPage Confirmed. {"sid" in step1resp}') token = step1('input[name="form_token"]').attr("value") self.report(f"Got Token: {token}") creation_time = step1('input[name="creation_time"]').attr("value") self.report(f"Got Create_time: {creation_time}") url = f"http://{self.domain}/ucp.php?mode=register" step2resp = self.session.post( url, data={ "agreed": "===好的,我已明白,请跳转到下一页继续注册====", "change_lang": "", "creation_time": creation_time, "form_token": token, }, headers=headers, ) self.SaveError("step2.html", step2resp) step2 = jq(step2resp.text) token = step2('input[name="form_token"]').attr("value") creation_time = step2('input[name="creation_time"]').attr("value") qa_answer = re.findall("请在右边框中输入: (.*?):</label>", step2resp.text)[0] self.report(f"Got answer: {qa_answer}") qa_confirm_id = step2("#qa_confirm_id").attr("value") self.usr = self.RandomKey(12) self.pwd = self.RandomKey() self.info(f"set Usr: {self.usr} ,Pwd: {self.pwd}") data = { "username": self.usr, "new_password": self.pwd, "password_confirm": self.pwd, "email": "*****@*****.**", "lang": "zh_cmn_hans", "tz_date": "UTC+08:00+-+Asia/Brunei+-+" + moment.now().format("DD+MM月+YYYY,+HH:mm"), "tz": "Asia/Hong_Kong", "agreed": "true", "change_lang": "0", "qa_answer": qa_answer, "qa_confirm_id": qa_confirm_id, "submit": " 用户名与密码已填好,+点此提交 ", "creation_time": creation_time, "form_token": token, } resp = self.session.post(url, data=data, headers=headers) try: assert "感谢注册" in resp.text self.report("Reg success!") DarkNet_User.create(**{"user": self.usr, "pwd": self.pwd}) except AssertionError: self.error(jq(resp.text).text()) self.SaveError("reg.html", resp)
def test_homework(self): try: driver = self.driver driver.get(utils.URL) homepage = HomePage(driver) homepage.navigate_our_jobs() time.sleep(2) homepage.navigate_our_customers() time.sleep(2) homepage.scroll_up() time.sleep(2) homepage.click_whatsapp() time.sleep(2) homepage.talk_to_us_test() # driver = self.driver # driver.get(utils.URL) # # login = LoginPage(driver) # homepage = HomePage(driver) # eventspage = EventsPage(driver) # # login.enter_username(utils.USERNAME) # login.enter_instance(utils.INSTANCE) # login.enter_password(utils.PASSWORD) # login.click_login() # homepage.navigate_events_screen() # eventspage.search_value("NewUser") # time.sleep(5) # eventspage.value_exists("NewUser") # time.sleep(5) # eventspage.search_value("Purchase") # time.sleep(5) # eventspage.value_exists("Purchase") # time.sleep(5) except AssertionError as error: print("Assertion error occurred") print(error) currTime = moment.now().strftime("%d-%m-%Y_%H-%M-%S") testName = utils.whoami() screenshotName = testName + "_" + currTime allure.attach(self.driver.get_screenshot_as_png(), name=screenshotName, attachment_type=allure.attachment_type.PNG) driver.get_screenshot_as_file( "C:/Users/Dror/Desktop/Automation/PythonAutomationFramework_1-master/screenshots/" + screenshotName + ".png") raise except: print("There was an exception") currTime = moment.now().strftime("%d-%m-%Y_%H-%M-%S") testName = utils.whoami() screenshotName = testName + "_" + currTime allure.attach(self.driver.get_screenshot_as_png(), name=screenshotName, attachment_type=allure.attachment_type.PNG) driver.get_screenshot_as_file( "C:/Users/Dror/Desktop/Automation/PythonAutomationFramework_1-master/screenshots/" + screenshotName + ".png") raise
import sys from pathlib import Path sys.path.append(str(Path(__file__).parent.parent.parent.parent)) from utils.common import is_expire import moment d1 = moment.now() d2 = moment.now().subtract('day', 2) d3 = moment.now().subtract('day', 0.4) d4 = moment.now().subtract('hour', 48) def test_is_expire(): assert not is_expire(d1) assert is_expire(d2) assert not is_expire(d3, span=0.5) assert is_expire(d4, span=1.6) assert not is_expire(d3, span=10, unit='hour')
def test_moment_can_transfer_between_datetime_and_moment(self): d = moment.now().to_date() self.assertEquals(d, moment.date(d).to_date())
def test_2_weeks_from_now(self): d = moment.date("2 weeks from now").zero expecting = moment.now().zero.add(weeks=2) self.assertEqual(d, expecting)
def test_today(self): d = moment.date("today").zero now = moment.now().zero self.assertEqual(d.date, now.date)
def test_now_clone_repr_error(self): repr(moment.now().clone())
def today_end_timestamp(): today_str = moment.now().format("YYYY-M-D") time_stamp = datestr_2_timestamp(today_str) + 24 * 60 * 60 * 1000 return time_stamp
self.rds.setListData(rdsKeyName=rdsKeyName, lists=self.dataToo.getLinkArr(catalogData)) self.logger.info('数据处理完成,存储书籍 [ %s ] 本, 共 [ %s ] 个章节' % (len(bookId), linkCount)) def saveAllBookListToRedis(self, rdsKeyName): bookId = self.getMySqlBookInfoDataToo.getBookId( environmentalType=self.b_environmentalType) self.saveBookListToRedis(bookId=bookId, rdsKeyName=rdsKeyName) if __name__ == '__main__': b_title = 'SaveBookToRedisTool' b_second = 1 b_timeStr = moment.now().format('YYYY-MM-DD-HH-mm-ss') rds = RedisTool() dataToo = DataTool(logName=b_title, second=b_second, timeStr=b_timeStr) logger = Logger(logname=dataToo.initLogName(), loglevel=1, logger=b_title).getlog() mySql = MySqlTool(logName=dataToo.initLogName()) rdsKeyName = 'bookIdsList3' environmental = ['dev', 'test', 'online'] print('可选环境:') for i in range(len(environmental)): print('\t\t%s : %s' % (i, environmental[i])) environmentalType = int(input("请输入0、1、2: >>")) print('参数确认: 环境 : %s \n' % (environmental[environmentalType]))
def index(): """Route controller fetches shifts info. """ # Get params from query string cal_id = gc_helper.SHIFTS_GCAL # Set default values timeMin = moment.now().replace( hours=0, minutes=0, seconds=0).format("YYYY-MM-DDTHH:mm:ssZ") timeMax = moment.now().add('days', 1).replace( hours=0, minutes=0, seconds=0).format("YYYY-MM-DDTHH:mm:ssZ") username = env.config['sys_user'] assert username # Get calendar event items events = gc_helper.get_calendar_events( cal_id, timeMin, timeMax, username, Widgets.SHIFTS) if events.get('redirect'): return jsonify(events) events = events.get('items', {}) events = dict( (event.get('description'), dict( start=event.get('start').get('dateTime'), end=event.get('end').get('dateTime'))) for event in events ) #changing from ISO8601 to timestamp, for Oracle timeMin = moment.date(timeMin, "YYYY-MM-DDTHH:mm:ssZ").strftime('%s') timeclockOracleQuery = tc_obj.TimeclockOracleQuery( username=None, time_in=timeMin, time_out=timeMin, job_ids=tc_depts.get_all_job_ids(), limit=0, clocked_in=True) tc_entries = (tc_helper .try_get_timeclock_entries(timeclockOracleQuery) .get('timeclock_entries')) logging.info('tc_entries : {0}'.format(tc_entries)) scheduled = {} not_scheduled = {} if tc_entries: for entry in tc_entries: username = entry.get('username') note = entry.get('note') time_in = entry.get('time_in') dept = entry.get('dept') # Try to get event corresponding to timeclock entry event = events.get(username, None) # Flesh out event if event: event['note'] = note event['clocked_in'] = time_in event['dept'] = dept # Add event to `scheduled` dict scheduled[username] = event # Remove event events.pop(username) else: not_scheduled[username] = dict( note=note, clocked_in=time_in, dept=dept) # Any event remaining in events dict is not clocked in not_clocked_in = events # Returns array of shifts return jsonify({ "shifts": dict(scheduled=scheduled, not_clocked_in=not_clocked_in, not_scheduled=not_scheduled)})
def gmail_screenshot_attach_allure(context,func_name): currTime = moment.now().strftime("%d-%m-%Y_%H-%M-%S") Snapshotfilename = func_name + "_" + currTime allure.attach(context.driver.get_screenshot_as_png(), name=Snapshotfilename, attachment_type=AttachmentType.PNG) print("Snapshot Attached in Allure Reports")
def index(action, idr): """Index.""" alarms = Alarm.query.filter(Alarm.users == current_user.id).all() form = addAlarmForm(state=True) # form2 = addAlarmForm2(state=True) monalarme = {} if form.submit.data: # Fast Form # --------------------------------------------------- lastid = alarms[-1].id if lastid is None: monalarme['id'] = 1 else: monalarme['id'] = lastid + 1 monalarme['heure'] = form.heures.data monalarme['minute'] = form.minutes.data print form.Radio.data print type(form.Radio.data) # return music or radio url depending on choice in form if form.Radio.data != '0' and form.musics.data == '0': radio_choice = Music.query.filter( Music.id == form.Radio.data).first() monalarme['path'] = radio_choice.url elif form.Radio.data == '0' and form.musics.data != '0': music_choice = Music.query.filter( Music.id == form.musics.data).first() monalarme['path'] = music_choice.name else: flash('Choose a media (music or radio)') # Complete Form # --------------------------------------------------- if form.repetition.data: monalarme['repetition'] = form.repetition.data # else: # monalarme['repetition'] = 0 if form.jours.data: monalarme['jours'] = form.jours.data else: # tomorrow (if date > now --> today else tomorow) monalarme['jours'] = moment.now().add(days=1).format("d") if form.name.data: monalarme['nom'] = form.name.data else: monalarme['nom'] = 'No-name' + str(monalarme['id']) # setting up crontab result = addcronenvoi(monalarme) # Add alarm in database if result == 0: alarme = Alarm( namealarme=monalarme['nom'], days=",".join([str(x) for x in monalarme['jours']]), startdate=str(monalarme['heure']) + ':' + str(monalarme['minute']), frequence='dows', users=current_user.id) db.session.add(alarme) try: db.session.commit() flash('Your alarm has been programed.') except Exception, e: errorstring = str(e) flash('Error adding your alarm in database. ' + errorstring) else: flash('Error adding your alarm.') return redirect(url_for('.index'))
def gmail_screenshot_save(context,func_name): currTime = moment.now().strftime("%d-%m-%Y_%H-%M-%S") Snapshotfilename = func_name + "_" + currTime context.driver.save_screenshot("./Screenshots/" + Snapshotfilename + ".png") print("Snapshot Saved in Specified Path")
import moment x = moment.now().strftime("%H-%M-%S_%d-%m_%Y") print(x)
def __reg(self): try: warning("Reg Confirm") resp = self.__refresh_new_target( self.session.get( self.__reg_url, headers={ "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8", "Accept-Encoding": "gzip, deflate", "Accept-Language": "en-US,en;q=0.5", "Cache-Control": "no-cache", "Connection": "keep-alive", "Content-Type": "application/x-www-form-urlencoded", "Pragma": "no-cache", "Referer": self.__reg_url, "Upgrade-Insecure-Requests": "1", "User-Agent": "Mozilla/5.0 (Windows NT 6.1; rv:60.0) Gecko/20100101 Firefox/60.0", }, )) token, creation_time = Parser.get_token_and_creation_time(resp) warning("Start Reg") resp = self.session.post( self.__reg_url, data={ "agreed": "===好的,我已明白,请跳转到下一页继续注册====", "autim": self.__autim, "change_lang": "", "creation_time": creation_time, "form_token": token, }, headers=self.__make_reg_headers(resp), ) token, creation_time = Parser.get_token_and_creation_time(resp) confirm_code, confirm_id = Parser.get_captcha(self.__get_pic, resp) self.__create_random_author() data = { "agreed": "true", "autim": self.__autim, "change_lang": "0", "confirm_code": confirm_code, "confirm_id": [confirm_id, confirm_id], "creation_time": creation_time, "email": "*****@*****.**", "form_token": token, "lang": "zh_cmn_hans", "new_password": self.pwd, "password_confirm": self.pwd, "submit": " 用户名与密码已填好,+点此提交 ", "tz": "Asia/Hong_Kong", "tz_date": "UTC+08:00+-+Asia/Brunei+-+" + moment.now().format("DD+MM月+YYYY,+HH:mm"), "username": self.usr, } resp = self.session.post(self.__reg_url, data=data, headers=self.__make_reg_headers(resp)) assert "感谢注册" in resp.text success("Reg success!") Cursor.create_new_user({"user": self.usr, "pwd": self.pwd}) except KeyboardInterrupt: exit() except AssertionError as e: error("Reg failed!") error(self.__clean_log(resp)) self.__save_error("__reg.html", resp) raise e
def __get_details(self, url, muti, name, page, index_str): resp = self.session.get(url) resp.encoding = "utf8" if not self.__check_if_need_relogin(resp): return bs_data = bs_4(resp.text, "lxml") uid, sid = Parser.get_uid_and_sid(bs_data) if not any((uid, sid)): return details, person, notice, img = Cursor.get_model_details(uid, sid) try: person_datas, username = Parser.get_person_data(bs_data) if not person: person_datas.update({ "uid": uid, "user": username, "regtime": Parser.get_reg_date(bs_data, "1999-01-01"), }) person = Cursor.create_person(person_datas) else: Cursor.update_person(person_datas, uid) person = person[0].uid if not notice: notice = Cursor.create_notice({"sid": sid}) else: notice = notice[0].sid detailImages = None detailContent = Parser.get_detail_content(bs_data) if not img: urls = Parser.get_img_urls(bs_data) img = Cursor.create_img({ "sid": sid, "img": urls, "detail": detailContent }) detailImages = self.__save_pics(urls, sid) else: img = img[0].sid current_year = moment.now().year real_up_time = Parser.get_up_time(bs_data, current_year) details_datas = Parser.get_details(bs_data, current_year, real_up_time, muti) if not details: details_datas.update({ "sid": sid, "user": person, "area": muti["area"], "title": muti["title"], "detailurl": url, "img": img, "notice": notice, }) details = Cursor.create_details(details_datas) self.__make_msg(details, detailContent, detailImages, sid, username) else: Cursor.update_details(details_datas, sid) short_msg = f'[{name}:{page}:{index_str}]-{real_up_time}- {muti["title"]}' success(short_msg) if not details else warning(short_msg) except KeyboardInterrupt: exit() except Exception as e: error(f"[run-->__get_details]: {e}") self.__save_error("__get_details.html", resp)
def test_properties_after_chaining(self): d = moment.now().replace(years=1984, months=1, days=1) self.assertEquals(d.year, 1984)
def test_moment_can_transfer_between_datetime_and_moment(self): d = moment.now().date self.assertEquals(d, moment.date(d).date)
def index(): """Tries to get timeclock entries from Oracle db Returns: An array of timeclock entries of the form: Example Response: [{ 'username': '******', 'dept': 'om', 'time_in': <timestamp>, 'time_out': <timestamp> }] or { 'error' <some indicative error message> } Example route: 'http://localhost:3000/timeclock?username=aburkart' """ # Default time range now_secs = str(moment.now().strftime('%s')) begin_of_month_secs = str(moment.now().replace(days=1).strftime('%s')) # Get named parameters from query, otherwise default username = request.args.get('username', None) time_in = request.args.get('time_in', begin_of_month_secs) time_out = request.args.get('time_out', now_secs) departments = request.args.get('depts', tc_depts.get_all_job_ids()) limit = request.args.get('limit', '50') clocked_in = request.args.get('clocked_in', False) error_message = [] # Parse clock_in clocked_in = False if not clocked_in else clocked_in.lower() == 'true' # Parse username unicode_username_error = tc_helper.check_for_unicode_username(username) if not (unicode_username_error == None): error_message.append(unicode_username_error) # Parse time_in if not time_in.isdigit(): error_message.append("Invalid parameter: 'time_in'") # Parse time_out if clocked_in: time_out = None time_in = (str(moment .now() .replace(hours=0, minutes=0, seconds=0) .strftime('%s'))) elif not time_out.isdigit(): error_message.append("Invalid parameter: 'time_out'") # Parse departments job_ids = [] dept_errors = [] if not isinstance(departments, basestring): job_ids = departments else: if tc_helper.is_departments_unicode(departments): error_message.append("No unicode allowed: 'depts'") else: # Remove parentheses from ends of array departments = departments[1:-1].split(',') # Map department names to their respective job_id for dept in departments: job_id = tc_depts.get_job_id(dept, None) if not job_id: dept_errors.append(dept) job_ids.append(job_id) # Parse limit if not limit.isdigit(): error_message.append("Invalid parameter: 'limit'") else: limit = 0 if clocked_in else abs(int(limit)) # Add any dept_errors to error_message if dept_errors: dept_errors = str(dept_errors) error_message.append("Invalid parameter: 'depts': {0}".format(dept_errors)) # If timeclock request has errors, return them to the user if len(error_message) > 0: err = {'error': error_message} return make_response(jsonify(err), 400) # Build an oracle query from the request timeclockOracleQuery = tc_obj.TimeclockOracleQuery( username, time_in, time_out, job_ids, limit, clocked_in) # Try to get timeclock entries with the timeclock oracle query tc_entries = tc_helper.try_get_timeclock_entries(timeclockOracleQuery) # Fetches timeclock entries from Oracle db return jsonify(tc_entries)
def _save_new(self, context, package_type=None): # The staged add dataset used the new functionality when the dataset is # partially created so we need to know if we actually are updating or # this is a real new. is_an_update = False ckan_phase = request.params.get('_ckan_phase') from ckan.lib.search import SearchIndexError def pop_groups_from_data_dict_and_get_package_name_and_group_name( a_data_dict): # sacamos los grupos para que no fallen más adelante las validaciones de ckan some_group_names = [ group['name'] for group in ( a_data_dict['groups'] if 'groups' in a_data_dict else []) ] a_data_dict['groups'] = [] a_package_name = a_data_dict[ 'name'] # El campo Name identifica unívocamente a un Dataset return a_package_name, some_group_names def update_package_group_relation(a_package_name, group_names_to_add): # obtener id del package usando el a_package_name package = model.Package.get(a_package_name) # Es necesario eliminar *todos* los objetos `Member` que relacionan `Group`s con `Package`s # ya que vamos a reescribir esas relaciones según el parámetro `group_names_to_add` for group in model.Session.query(model.Group): # con el ID del package queriear los Member con table_id = package_id eliminar members_to_delete = model.Session.query(model.Member).filter( model.Member.group_id == group.id, model.Member.table_name == 'package', model.Member.table_id == package.id) for member in members_to_delete: model.Session.delete(member) model.Session.commit() # Hace falta el commit? # relaciono los datasets con los grupos correspondientes (que fueron ingresados) for group_name in group_names_to_add: group = model.Group.get(group_name) group.add_package_by_name(a_package_name) group.save() try: data_dict = clean_dict( dict_fns.unflatten(tuplize_dict(parse_params(request.POST)))) # Guardamos como extras los campos issued y modified time_now = moment.now().isoformat() if 'extras' not in data_dict.keys(): data_dict['extras'] = [] self._add_or_replace_extra(key='issued', value=time_now, extras=data_dict['extras']) self._add_or_replace_extra(key='modified', value=time_now, extras=data_dict['extras']) superTheme = [] for field in data_dict['extras']: if (field['key'] == 'superTheme' or field['key'] == 'globalGroups') and field['value'] != []: superTheme = field['value'] break self._add_or_replace_extra(key='superTheme', value=superTheme, extras=data_dict['extras']) if ckan_phase: # prevent clearing of groups etc context['allow_partial_update'] = True # sort the tags if 'tag_string' in data_dict: data_dict['tags'] = self._tag_string_to_list( data_dict['tag_string']) self._validate_dataset(data_dict) # Limpiamos el data_dict para poder guardar el DS aun siendo colaborador no miembro del grupo package_name, group_names = pop_groups_from_data_dict_and_get_package_name_and_group_name( data_dict) if data_dict.get('pkg_name'): is_an_update = True # This is actually an update not a save data_dict['id'] = data_dict['pkg_name'] del data_dict['pkg_name'] # don't change the dataset state if data_dict.get('save', '') == u'go-metadata': data_dict['state'] = 'active' else: data_dict['state'] = 'draft' # this is actually an edit not a save pkg_dict = get_action('package_update')(context, data_dict) # Restauramos los grupos asignados al dataset (cuando es un update) update_package_group_relation(package_name, group_names) if request.params['save'] == 'go-metadata': # redirect to add metadata url = h.url_for(controller='package', action='new_metadata', id=pkg_dict['name']) elif request.params['save'] == 'save-draft': url = h.url_for(controller='package', action='read', id=pkg_dict['name']) else: # redirect to add dataset resources url = h.url_for(controller='package', action='new_resource', id=pkg_dict['name']) raise exc(location=url).exception # Make sure we don't index this dataset if request.params['save'] not in [ 'go-resource', 'go-metadata' ]: data_dict['state'] = 'draft' # allow the state to be changed context['allow_state_change'] = True data_dict['type'] = package_type context['message'] = data_dict.get('log_message', '') self.__generate_spatial_extra_field(data_dict) pkg_dict = get_action('package_create')(context, data_dict) # Restauramos los grupos asignados al dataset (cuando es un insert) update_package_group_relation(package_name, group_names) if ckan_phase and request.params['save'] != 'save-draft': url = h.url_for(controller='package', action='new_resource', id=pkg_dict['name']) raise exc(location=url).exception elif request.params['save'] == 'save-draft': url = h.url_for(controller='package', action='read', id=pkg_dict['name']) raise exc(location=url).exception self._form_save_redirect(pkg_dict['name'], 'new', package_type=package_type) except NotAuthorized: abort(401, _('Unauthorized to read package %s') % '') except NotFound, e: abort(404, _('Dataset not found'))
def checkSchedule(db): ''' What needs to be done in this function Target: all schedules end before current time. Logic: 1. Move this schedule to historyPost collection 2. Remove all related message 1). Remove all these messages from its receiver's unprocessed_list 3. Move this schedule from schedule_list to history_schedule_list in all members' profile 4. Add this schedule to its owner's history_schedule_list 5. Add members and owner to all members and owner's history_partner 6. Update owner and member's conflict_list ''' res = db.selectCollection("xmatePost") # handle db err if(res["status"]): return res # find all outdated messages current_time = moment.now().epoch() match_list = {"time_range.time_end": {"$lt":current_time}} res = db.getData(match_list) # handle db err if(res["status"]): return res cursor = list(res["content"]) # move these schedules to historyPost tobe_moved_schedule = [] tobe_moved_msg = [] tobe_affected_user = [] # no match, then return if(len(cursor) == 0): return returnHelper() # found matches, handle one ny one instead of all at last time # if err happens, then stop # handler logic order: # move to history post if not exist -> update unprocess_list if exist -> update schedule_list and history_schedule_list -> update conflict_list (need recompute) -> update history_partner -> remove from current post for schedule in cursor: print "" print "" print "" print "##############" print schedule print "start copying to history" res = moveToHistoryPost(schedule,db) if res["status"]: # fail to move to history_post return res print "start updating msg" res = updateMsg(schedule,db) if res["status"]: # fail to handle the related msg return res print "start updating schedule" res = updateUserScheduleList(schedule,db) if res["status"]: # fail to handle the schedule_list update # or the conflict_list update return res print "start updating historypartner" res = updateUserHistoryPartner(schedule,db) if res["status"]: # fail to handle the history partner update return res print "start removing post" # delete the post from current post collection at the last step res = removeFromPost(schedule,db) if res["status"]: # fail to handle the schedule_list update return res ''' ''' return returnHelper()
def Fetch(name): if name == "FEATURE_VECTOR": pass elif name == "TECHNICAL_INDICATORS": #{"name": "TECHNICAL_INDICATORS", "value": { # "TECH1": {"value": 0, "signal": "BUY"}, # "TECH2": {"value": 1, "signal": "SELL"}, # ... #}} pass elif name == "NETWORK_ATTRIBUTES": #{"name": "NETWORK_ATTRIBUTES", "value": { # "ATTR1": 0, # "ATTR2": 0, # ... #}} pass elif name == "TWITTER_STATS": #{"name": "TWITTER_STATS", "value": { # "AVG_SENTIMENT": 0, # "TWEET_VOLUME": 0, # "TOPICS": ["...", ...] #}} pass elif name == "COINDESK_STATS": html = requests.get("https://www.coindesk.com/", headers={ "User-Agent": random.choice(USER_AGENTS) }) soup = BeautifulSoup(html.text, "html.parser") featured_headline_containers = [feature.find_all('a', class_="fade")[0] for feature in soup.find_all("div", class_="article article-featured")] featured_headlines = [(headline.text, headline.find_all("time")[0].datetime) for headline in featured_headline_containers] other_headlines = [(headline.find_all("a", class_="fade")[0].title, headline.find_all("time")[0].datetime) for headline in soup.find_all("div", class_="post-info")] all_headlines = list(filter(lambda headline: moment.date(headline[0]).format("YYYY-M-D") == moment.now().locale("US/Pacific").timezone("US/Eastern"))) # TODO: Replace the 0 with either a sentiment rating or # of tweets return { "name": "COINDESK_STATS", "value": {headline[0]: 0 for headline in all_headlines} }