def save_visit(user_data): today = open('today.txt') today = list(today.readline()) if today[0] != date.isoformat(date.today()): with open('today.txt', 'w') as today: print((date.isoformat(date.today()), int(time())), file=today) today = date.isoformat(date.today()), int(time()) browser = get_browser(user_data['ua']) tables = ('user', 'browser', 'page', 'referrer', 'time', 'ip') conn, cur = db_con() sql = 'INSERT INTO `stats` (`time`, `user`, `referrer`, `browser`, `page`, `ip`, `mobile`) VALUES (%s, %s, %s, %s, %s, %s, %s)' params = (today[1], user_data['user'], user_data['referrer'], browser[1], user_data['page'], '"' + user_data['ip'] + '"', user_data['mobile']) cur.execute(sql, params) conn.commit() # Return recent online User (this day) sql = 'SELECT distinct `user` FROM `stats` WHERE FROM_UNIXTIME(`time`, %s) = FROM_UNIXTIME(%s, %s)' params = ('%Y-%M-%D', today[1], '%Y-%M-%D') cur.execute(sql, params) recents = '' for r in cur.fetchall(): if r[0] != 'Login': recents += r[0] + ', ' recents = recents[0:-2] db_close(conn, cur) return recents
def __init__(self, szulo, kon): self._kon = kon super().__init__(szulo) self._erkezett = StringVar() self._hatarido = StringVar() self._megjegyzes = StringVar() self._kontakt_valaszto = Valaszto("ajánlatkérő", self._kontaktszemelyek(), self) self._kontakt_valaszto.pack(ipadx=2, ipady=2) self._jelleg_valaszto = Valaszto("projekt", self._jellegek(), self) self._jelleg_valaszto.pack(ipadx=2, ipady=2) megjegyzes = LabelFrame(self, text="megjegyzés") Entry(megjegyzes, textvariable=self._megjegyzes, width=40).pack(ipadx=2, ipady=2, side=LEFT) megjegyzes.pack(ipadx=2, ipady=2, side=BOTTOM, fill=BOTH) self._temafelelos_valaszto = Valaszto("témafelelős", self._kontaktszemelyek(2), self) self._temafelelos_valaszto.pack(ipadx=2, ipady=2, side=BOTTOM) erkezett = LabelFrame(self, text="érkezett") Entry(erkezett, textvariable=self._erkezett, width=10).pack(ipadx=2, ipady=2) erkezett.pack(ipadx=2, ipady=2, side=LEFT) hatarido = LabelFrame(self, text="leadási határidő") Entry(hatarido, textvariable=self._hatarido, width=10).pack(ipadx=2, ipady=2) hatarido.pack(ipadx=2, ipady=2, side=LEFT) ma = date.isoformat(date.today()) egyhetmulva = date.isoformat(date.today() + timedelta(days=7)) self._erkezett.set(ma) self._hatarido.set(egyhetmulva)
def _get_all_months(start_dt, end_dt): """ Create list of tuples of dates to iterate through while downloading quandl data :param start_dt: fetch data from date :param end_dt: fetch data to date, can be > today, will fetch all available data :return: returns list of tuples, with start_dt, end_dt, incomplete """ today = datetime.now().date() start = datetime.strptime(start_dt, '%Y-%m-%d') end = datetime.strptime(end_dt, '%Y-%m-%d') start_plus_one = start + relativedelta(months=1) api_dates: List[Tuple[str, str, str]] = [] if start > end or start == end: print( f'End date:{end} has to be greater than Start date:{start}, aborting operation' ) else: while start < end: completion = 'partial' if start.date( ) > today or start_plus_one.date() > today else 'full' api_dates.append((date.isoformat(start), date.isoformat(start_plus_one), completion)) start, start_plus_one = start + relativedelta( months=1), start_plus_one + relativedelta(months=1) return api_dates
def checksite(message): bot.reply_to(message, "Proccessing...") url = message.text loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) async def main(url): browser = await pyppeteer.launch(args=['--no-sandbox'], handleSIGINT=False, handleSIGTERM=False, handleSIGHUP=False) page = await browser.newPage() await page.goto(url) await page.setViewport(dict(width=1920, height=1080)) time.sleep(2) await page.screenshot(path="screenshot.png", fullPage=True) await browser.close() try: loop.run_until_complete(main(url)) screen = open("screenshot.png", "rb") bot.send_message(message.chat.id, "And here you go \U0001F609".encode("utf-8")) bot.send_document(message.chat.id, screen) bot_stats.send_stats(message, statsData, "check", date.isoformat(date.today()), bot) screen.close() except NetworkError: bot.send_message(message.chat.id, "ERROR: Invalid link.") bot_stats.send_stats(message, statsData, "invalid", date.isoformat(date.today()), bot)
def update_device_description(self, device_addr, newDescription = ""): ''' DEVICE SCHEMA id - UNIQUE IDENTIFIER device_addr - TEXT - unique identifier for the device device_id - TEXT - unique identifier for the device device_type - TEXT - unique identifier for the device subscription_id - TEXT - unique identifier of the subscription subscription_timeout - TEXT - timeout of the subscription description - TEXT - string value added_date - TEXT - ISO8601 string ("YYYY-MM-DD HH:MM:SS.SSS") last_update - TEXT - ISO8601 string ("YYYY-MM-DD HH:MM:SS.SSS") is_active - BOOL ''' db = None try: db = sqlite3.connect(self.db) # @UndefinedVariable cursor = db.cursor() date = datetime.now() date.isoformat("T") cursor.execute(''' UPDATE nodes SET description= ?, last_update= ? WHERE device_addr = ? ''', (newDescription, date, device_addr)) db.commit() return 1 except sqlite3.Error as e: # @UndefinedVariable self.d_log.error("Error updating device: {0}".format(e.args[0])) return 0 finally: if db: db.close()
def time_entries(self, date, user=None, span='day'): if user is None: user = self.conf.auth_user if span == 'day': PAYLOAD = { 'format': 'json', 'where': "(User.Email eq '%s') and (Date eq '%s')" % (user, date.isoformat()) } elif span == 'week': PAYLOAD = { 'format': 'json', 'orderby': 'Date', 'where': "(User.Email eq '%s') and (Date gte '%s') " "and (Date lt '%s')" % (user, date.isoformat(), (date + timedelta(5)).isoformat()) } r = requests.get('https://%s/api/v1/Times' % (self.conf.tp_instance), auth=(self.conf.auth_user, self.conf.auth_password), params=PAYLOAD) return r.json()['Items']
class Params: """Optional boolean params which change factory output when True. :param exception_service_added: Create a service with an add service exception. :param exception_service_removed: Create a service with a remove service exception. :param expired_service: Create a service that has expired. :param fare_service: Create a service with a fare payment. :param future_ride_service: Create a service with future rides enabled. :param hub_address: Create a service with a hub. :param recurring_ride_service: Create a service with recurring rides enabled. :example usage: ServiceFactory.create(expired_service=True) """ exception_service_added: bool = Trait(exceptions=[ { 'end_time': f'{date.today() + timedelta(days=22)}T18:00:00.000Z', 'message': 'Testing Service Addition.', 'start_time': f'{date.today() + timedelta(days=15)}T09:30:00.000Z', 'type': 'service_added', }, ], ) exception_service_removed: bool = Trait(exceptions=[ { 'end_time': f'{date.today() + timedelta(days=7)}T18:00:00.000Z', 'message': 'Testing Service Removal.', 'start_time': f'{date.today() + timedelta(days=3)}T09:30:00.000Z', 'type': 'service_removed', }, ], ) expired_service: bool = Trait( end_date=date.isoformat(date.today() - timedelta(days=5)), start_date=date.isoformat(date.today() - timedelta(days=10)), ) fare_service: bool = Trait( fare_required=True, fare_price=2.0, ) future_ride_service: bool = Trait(in_advance_enabled=True) hub_address: bool = Trait(addresses=[{ 'hub': True, 'pickup': True, 'dropoff': True, 'address_id': '7539' }], ) recurring_ride_service: bool = Trait( in_advance_enabled=True, recurring_rides_enabled=True, )
def GetExpensesFromCatInMonthBeforeDate(key_name, date): cat = Category.get_by_key_name(key_name) end_date = date - timedelta(days=30) if cat.has_subcats is True: results = cat.expenses_from_parent.filter('date <', date.isoformat()) else: results = cat.expenses_from_child.filter('date <', date.isoformat()) return results.filter('date >=', end_date.isoformat())
def to_json(self): return {'id': self.id, 'duration_start': date.isoformat(self.duration_start), 'duration_end': date.isoformat(self.duration_end), 'planned_amount': self.planned_amount, 'category_id': self.category_id, 'category_name': self.category.name if self.category is not None else 'no category'}
def test_build__params__expired_service(self) -> None: """Check that an expired service is built from the ServiceFactory.""" expired_service: Service = ServiceFactory.build(expired_service=True) assert expired_service.end_date == date.isoformat( date.today() - timedelta(days=5), ) and expired_service.start_date == date.isoformat(date.today() - timedelta(days=10))
def getLastSunday(): today = date.today() if today.isoweekday == 7: return date.isoformat(today) elif today.isoweekday() == 1 or today.isoweekday() == 2 or today.isoweekday() == 3 or today.isoweekday() == 4 or today.isoweekday() == 5 or today.isoweekday() == 6: return date.isoformat(date.fromordinal(today.toordinal() - today.isoweekday())) else: return "Error"
def create_row(self): sa = print_money(self.sum_assured); pt = print_money(self.payment_total); pl = print_money(self.payment_left); incpt = date.isoformat(self.inception) lst_pay = date.isoformat(self.last_payment) x = [ self.policy_type, str(self.policy_number) , sa , self.premium , incpt, self.nr_years, self.years_left , pl , pt, print_percent(self.payment_percent) , print_percent(self.profit), print_money(self.bonus), print_money(self.net),print_percent(self.net_percent), print_percent(self.y2y)]; return x;
def _alapertelmezes(self, event): ma = date.isoformat(date.today()) egyhonapmulva = date.isoformat(date.today() + timedelta(days=30)) self._ajanlat_urlap.beallit( Ajanlat(ajanlatiar="", leadva=ma, ervenyes=egyhonapmulva, megjegyzes=""))
def printArticles(articlesData, fileName, display=False): if display: for (title, url, date) in articlesData: print(title, url, date.isoformat()) else: with open(fileName, 'a', encoding='utf-8') as file: for (title, url, date) in articlesData: file.write("%s, %s, %s\n" % (title, url, date.isoformat()))
def __init__(self, date=None, marvel=False): self.name = INDEX_NAME_PATTERN.format(date.isoformat().replace("-", ".")) if marvel: self.name = MARVEL_NAME_PATTERN.format(date.isoformat().replace("-", ".")) self._date = date self._exists = None self._url = None self._deleted = False self._stats = None
def __init__(self, date: date): self.__rows = [] self.__fix = [] self.__names = [] self.data = {} self.__date = date self.__data_file = date.isoformat() + '.csv' self.__fix_file = date.isoformat() + '-fix.csv' self.init()
def isoformat(date): if date: if isinstance(date, datetime.date) and not isinstance(date, datetime.datetime): return date.isoformat() if date.tzinfo: date = date.astimezone(datetime.timezone.utc) date = date.replace(tzinfo=None) date = date.replace(microsecond=0) return date.isoformat("T")
def _reszletek(self, event): """Megjeleníti a kiválasztott ajánlat módosítható részleteit. event: tkinter esemény-paraméter (itt nincs rá szükség)""" meglevo_ajanlat = self._ajanlat_valaszto.elem if not meglevo_ajanlat.leadva: meglevo_ajanlat.leadva = date.isoformat(date.today()) if not meglevo_ajanlat.ervenyes: meglevo_ajanlat.ervenyes = date.isoformat(date.today() + timedelta(days=30)) self._ajanlat_urlap.beallit(meglevo_ajanlat)
def isoformat(date): if date: if (isinstance(date, datetime.date) and not isinstance(date, datetime.datetime)): return date.isoformat() if date.tzinfo: date = date.astimezone(datetime.timezone.utc) date = date.replace(tzinfo=None) date = date.replace(microsecond=0) return date.isoformat('T')
def verify_date(self): day = int(self.spin_value_day.get()) month = int(self.spin_digits_month.get()) year = int(self.spin_digits_year.get()) try: date.isoformat(date(year, month, day)) except ValueError: return False else: return True
def default(self, o): # recommendation: always use double quotes with JSON if isinstance(o, Festival): f = o.__dict__.copy() f["lineups"] = json.dumps(f["lineups"], cls=LineupEncoder, indent=4) f["start"] = date.isoformat(f["start"]) f["end"] = date.isoformat(f["end"]) return {"__Festival__": f} return {f"__{o.__class__.__name__}__": o.__dict__}
def pretty_date(input): remap = { ord('/'): ' ', ord('-'): ' ', } cleaned_input = input.translate(remap).split() cleaned_input = list(map(int, cleaned_input)) # map() to automatically iterate. if cleaned_input[0] > 99: return date.isoformat(date(*cleaned_input)) # use *cleaned_input to make code shorter else: if cleaned_input[2] < 100: cleaned_input[2] += 2000 return date.isoformat(date(cleaned_input[2], *cleaned_input[:2]))
def wait_to_date(date_stamp): date = parse(date_stamp) print date LOGGER.log_message(Message("date: {}".format(date.isoformat()), "INFO")) now = datetime.now(tzlocal()) print now LOGGER.log_message(Message("now: {}".format(now.isoformat()), "INFO")) if (date.isoformat() > now.isoformat()): wait_seconds = (date - now).total_seconds() wait_seconds += 2 print wait_seconds return wait_seconds else: return 0
def main(home_team, road_team, home_rolling=0, road_rolling=0, date=datetime.today()): season = get_season(date) train, feats, df = feature_creation.main(range(2011, 2021)) feats = feats.drop_duplicates(subset=['id'], keep='last') feats = feats.tail(30) tri_team = tri_to_team() home_id = str(season) + tri_team[home_team].lower().replace(" ", "") away_id = str(season) + tri_team[road_team].lower().replace(" ", "") feats = feats.set_index('id') feats_x = feats.loc[[home_id]] feats_y = feats.loc[[away_id]] feats_x = feats_x.reset_index() feats_y = feats_y.reset_index() feats_x.reset_index() feats_y.reset_index() feats_x = clean_df.append_cols(feats_x, "_x") feats_y = clean_df.append_cols(feats_y, "_y") feats_x['rolling_x'] = home_rolling feats_y['rolling_y'] = road_rolling feats_x['game_id'] = date.isoformat()[:10] + home_id[4:] + away_id[4:] feats_y['game_id'] = date.isoformat()[:10] + home_id[4:] + away_id[4:] feats = feats_x.merge(feats_y, on="game_id") feats['DayOfSeason_x'] = add_dos(date) feats['at_home_x'] = 1 print(feats) list(feats) spread, total = predict(feats) if (spread > 0): print(home_team + " wins by: " + str(spread)) else: print(road_team + " wins by: " + str(spread)) return spread, total
def queries(): form = QueryForm() page = request.args.get('page') if form.is_submitted(): session["datefrom"] = date.isoformat( form.datefrom.data ) + " 00:00:00.000000" if form.datefrom.data is not None else None session["dateto"] = date.isoformat( form.dateto.data ) + " 00:00:00.000000" if form.dateto.data is not None else None session["country"] = form.country.data session["textq"] = form.text.data return redirect(url_for('main.results')) return render_template('queries.html', form=form)
def update_new_balance(transactions, date): conn = get_authenticated_connection() response = conn.balances.get(limit=1, date__lt=date.isoformat()) if response.get("results"): balance = response["results"][0]["closing_balance"] else: balance = 0 for t in transactions: if t["category"] == "credit": balance += t["amount"] elif t["category"] == "debit": balance -= t["amount"] conn.balances.post({"date": date.isoformat(), "closing_balance": balance})
def _parse_date(self, date_str): date_format = '%d %B %Y' # https://docs.python.org/3/library/datetime.html#strftime-strptime-behavior def parse_start_date(start_date_str, end_date): try: return datetime.strptime(start_date_str, date_format) except ValueError: pass try: # let's try some heuristics :) start_date_with_year = '{0} {1:%Y}'.format( start_date_str, end_date) return datetime.strptime(start_date_with_year, date_format) except ValueError: pass try: start_date_with_month_and_year = '{0} {1:%B} {1:%Y}'.format( start_date_str, end_date) return datetime.strptime(start_date_with_month_and_year, date_format) except ValueError: pass self.logger.error('Cannot understand date string: %s', start_date_str) return None date_list = date_str.split('–') if len(date_list) > 2: self.logger.error('Cannot understand date string: %s', date_str) return None, None try: end_date = datetime.strptime(date_list[-1].strip(), date_format) except ValueError: self.logger.error('Cannot understand date string: %s', date_list[-1]) return None, None if len(date_list) == 1: start_date = end_date else: start_date = parse_start_date(date_list[0].strip(), end_date) return (date.isoformat(start_date.date()) if start_date else None, date.isoformat(end_date.date()) if end_date else None)
def getOutstandingBugs(bzagent,someday): date_from = str(date.isoformat(someday)).encode("utf8") date_to = str(date.isoformat(someday)).encode("utf8") # NOTE: The reason it's laid out like this is because bztools doesn't # seem to work with the "product=foo,bar" syntax, despite what the docs say option_sets = { 'firefox_untriaged': { 'changed_field':'[Bug creation]', 'changed_after':date_from, 'changed_before': date_to, 'product': 'Firefox', 'component':'Untriaged', 'status': 'UNCONFIRMED'}, 'core_untriaged': { 'changed_field':'[Bug creation]', 'changed_after':date_from, 'changed_before': date_to, 'product': 'Toolkit', 'component':'Untriaged', 'status': 'UNCONFIRMED'}, 'toolkit_untriaged': { 'changed_field':'[Bug creation]', 'changed_after':date_from, 'changed_before': date_to, 'product': 'Core', 'component':'Untriaged', 'status': 'UNCONFIRMED'}, } buglist = list() # Get the bugs from the bugzilla API for opt in option_sets: bug_url = ("https://bugzilla.mozilla.org/rest/bug?" + urllib.urlencode(option_sets[opt]) ) bug_url = bug_url.replace("ug+creat","ug%20creat").replace("%5B","[").replace("%5D","]") print bug_url resp = urllib.urlopen(bug_url) bugdict = json.loads(resp.read()) for c in bugdict['bugs']: buglist.append(c) #print date_to + " - " + str(len(buglist)) return buglist
def isoformat(date): if date: if date.tzinfo: date = date.astimezone(datetime.timezone.utc) date = date.replace(tzinfo=None) date = date.replace(microsecond=0) return date.isoformat('T')
def landsat(self, report_id, id): r = Report.get(Key(report_id)) z, x, y = Cell.cell_id(id) cell = Cell.get_or_default(r, x, y, z) bounds = cell.bounds(amazon_bounds) bounds = "%f,%f,%f,%f" % (bounds[1][1], bounds[1][0], bounds[0][1], bounds[0][0]) ee = EELandsat(LANDSAT7) d = ee.list(bounds=bounds) data = {} if len(d) >= 1: x = d[-1] img_info = x.split("/")[2][3:] path = img_info[:3] row = img_info[3:6] year = int(img_info[6:10]) julian_date = img_info[10:13] date = date_from_julian(int(julian_date), year) data = { "info": img_info, "path": path, "row": row, "year": year, "timestamp": timestamp(date), "date": date.isoformat(), } return Response(json.dumps(data), mimetype="application/json")
def feed_item(self, added_date=date.isoformat(date.today())): playlist_entry = self.playlist_entry(added_date=added_date) feed_item = None if playlist_entry: feed_item = {'spotify_username': self._username} feed_item.update(playlist_entry.feed_item()) return feed_item
def __init__(self, parent = None): """ Constructor """ QMainWindow.__init__(self, parent) self.setupUi(self) self.label_version.setText(SoftwareVersion) #Update the Date i = datetime.date.today() ProductDate = date.isoformat(i) self.lineEdit_Today.setText(ProductDate) self.tabWidget.setEnabled(False) self.setWindowIcon(PyQt4.QtGui.QIcon("Data/Vaya.png")) #Set the channel number from 1 to 512 at Channel Test Tab for r in range(32): for c in range(16): t = r*16 + c + 1 item = PyQt4.QtGui.QTableWidgetItem(str(t)) self.tableWidget_channeltest.setItem(r, c, item) #Show the SJP device picture but not connect pixmap = PyQt4.QtGui.QPixmap( 'Data//sjp_off.jpg' ) PP = PyQt4.QtGui.QImage( pixmap ) self.label_SJP.setPixmap(pixmap) # Record History f.write(time.strftime('%Y/%m/%d_%H:%M:%S',time.localtime(time.time()))+' Software Startup. '+'\n') self.ConnectDeviceButton.setEnabled(True)
def _to_tuple(self, date): '''Convert the given date to the tuple according to what period type we want''' if self.granularity == 'days': return tuple(date.isoformat()[:10].split('-')) elif self.granularity == 'weeks': return date.isocalendar()[:2]
def feed_item(self): return { 'date_posted': date.isoformat(self.added_date), 'track': self.track.export_for_feed(), 'album': self.track.album.export_for_feed(), 'artists': [a.export_for_feed() for a in self.track.artists] }
def parse(self, file_path): with open(file_path) as f: html = f.read() page = lxml.html.fromstring( html ) l = [] url = WhoFiles.url.format(os.path.basename(file_path).rpartition('.')[0]) lis = page.cssselect('.auto_archive li') for li in lis: href = li.cssselect('a')[0] link = urlparse.urljoin( url, href.attrib.get('href') ) year,date = self.parse_date( href.text_content() ) info = li.cssselect('.link_info')[0].text_content() disease, where = self.parse_item(info) disease = disease.strip() where = where.strip() if '-' in where: where = where[0:where.find('-')].strip() if u'-' in where: where = where[0:where.find(u'-')].strip() for x in [',',';',':',u'\u2013' ]: if x in where: where = where[0:where.find(x)].strip() disease = where[where.find(x)+1:].strip() + ' ' + disease d = { 'year': year, 'date':date.isoformat(), 'link':link, 'disease': disease.title(), 'where':where.title() } l.append(d) return l
def timestamp(): """ Current date/time to minute resolution in an ISO format. """ now = datetime.utcnow() return "%sT%s" % (date.isoformat(now.date()), now.time().strftime("%H%M"))
def to_es(self, value): if isinstance(value, datetime): if value.microsecond: value = value.replace(microsecond=0) return value.isoformat() elif isinstance(value, date): return date.isoformat()
def version_header(filename, version_number): filename = os.path.basename(filename) VERSION = version_number # version of CSV files DATE = date.isoformat(date.today()) return "# (c) Scikit-HEP project - Particle package data file - {fname} - version {version} - {date}\n".format( fname=filename, version=VERSION, date=DATE )
def change_request_as_source(id_, rows, ref_ids): title = "Change Request Number {0}: ".format(id_) title += ", ".join( "{0} {1} [{2}]".format(r.Status.lower(), r.Change_Type.lower(), r.Affected_Identifier) for r in sorted( rows, key=lambda cr: (ChangeRequest.CHANGE_TYPES[cr.Change_Type], cr.Affected_Identifier))) date = None for row in rows: if row.Effective_Date: if date: assert date == row.Effective_Date # pragma: no cover else: date = row.Effective_Date if date: title += ' ({0})'.format(date.isoformat()) fields = { 'number': id_, 'title': title, 'howpublished': rows[0].url, 'address': "Dallas", 'author': "ISO 639-3 Registration Authority", 'publisher': "SIL International", 'url': rows[0].pdf, 'year': rows[0].year, 'hhtype': "overview", 'lgcode': ', '.join( "{0} [{1}]".format(r.Reference_Name, r.Affected_Identifier) for r in rows), 'src': "iso6393", } if id_ in ref_ids and ref_ids[id_]: fields['glottolog_ref_id'] = ref_ids[id_] return id_, ('misc', fields)
def run(self): while urlqueue.empty()==False: job = urlqueue.get() today = datetime.date.today() text = urlopen(job).read() print self.name if self.name == "most": soup = BeautifulSoup(text.decode('gb2312','ignore')) elif self.name == "nsfc": soup = BeautifulSoup(text,fromEncoding="gb18030") now_time = date.isoformat(date.today()) if job: project = self.ProjectParser.projectparser(soup,job,now_time) if project: print project['pubdate'] #分类 nc = ProjectClassify() project['sort'] = nc.classify(project['title']) if project['sort'] == 'others': pass else: project_time = project['pubdate'] #将每个小时刷新一次新闻 stmt = 'insert into project_info(ProjectTitle,ProjectContent,ProjectDate,ProjectSource,ProjectLogo,ProjectSort) values(\'%s\',\'%s\',\'%s\',\'%s\',\'%s\',\'%s\')' % (project['title'],project['content'],project['pubdate'],project['source'],project['logo'],project['sort']) projectDAO(stmt) print "insert success %s" % (job)
def gerarJSON(cidade): xdados = np.arange(1, len(cidade.dia)+1) #array para o eixo x dos dados diamax = len(cidade.dia)+100 xmodelo = np.arange(1,diamax) #array para o eixo x do fit do modelo x = [date.fromisoformat(cidade.dia[0])] #array para os labels dos ticks do eixo x for i in range(1, diamax): x.append(x[0]+timedelta(days=i)) # CASOS CONFIRMADOS dadosConfirmed = cidade.confirmed #dados para o acumulado poptConfirmed, _ = optimize.curve_fit(modelo, xdados, dadosConfirmed, maxfev=150000) #fit do modelo yConfirmed = modelo(xmodelo, *poptConfirmed) #dados do fit sigmaConfirmed = (np.mean((dadosConfirmed-modelo(xdados, *poptConfirmed))**2))**0.5 #desvio padrão dos dados # CASOS POR DIA dadosConfirmedInDate = cidade.confirmed_in_date #dados por dia yConfirmedInDate = derivada(xmodelo, *poptConfirmed) #dados do modelo por dia sigmaConfirmedInDate = (np.mean((dadosConfirmedInDate-derivada(xdados, *poptConfirmed))**2))**0.5 #desvio padrão dos dados # MORTES CONFIRMADAS dadosDeaths = cidade.deaths #dados para o acumulado poptDeaths, _ = optimize.curve_fit(modelo, xdados, dadosDeaths, maxfev=150000) #fit do modelo yDeaths = modelo(xmodelo, *poptDeaths) #dados do fit sigmaDeaths = (np.mean((dadosDeaths-modelo(xdados, *poptDeaths))**2))**0.5 #desvio padrão dos dados # MORTES POR DIA dadosDeathsInDate = cidade.deaths_in_date #dados por dia yDeathsInDate = derivada(xmodelo, *poptDeaths) #dados do modelo por dia sigmaDeathsInDate = (np.mean((dadosDeathsInDate-derivada(xdados, *poptDeaths))**2))**0.5 #desvio padrão dos dados output = ['"fit":{"cases":{"a":' + str(poptConfirmed[0]) + ', "b":' + str(poptConfirmed[1]) + ', "c": ' + str(poptConfirmed[2]) + ', "d":' + str(poptConfirmed[3]) + '}, "deaths":{{"a":' + str(poptDeaths[0]) + ', "b":' + str(poptDeaths[1]) + ', "c": ' + str(poptDeaths[2]) + ', "d":' + str(poptDeaths[3]) + '}, "dados":'] for dia in range(len(x)-1): output.append('{"date": ' + date.isoformat(x[dia]) + ':{"confirmed": ' + str(yConfirmed[dia]) + ', "confirmed_in_date": ' + str(yConfirmedInDate[dia]) + ', "deaths: ' + str(yDeaths[dia]) + ', "deaths_in_date": ' + str(yDeathsInDate[dia]) + '}') output.append('}') return output
def start(message): bot.send_message( message.chat.id, "🌀 You can create your own z̮͈̍͒͆ͅạ͔̠̍̆̿l̖͚͕͂̄͝ĝ͔͉̠̭̍͒͗o̬͉̓͞ ̟̈́t̰̬̜̳̽̄͊̍ex̗͒ť̪ by easily entering your text." ) bot_stats.send_stats(message, statsData, "start", date.isoformat(date.today()), bot)
def _add_to_cal(cal_data, event, date): date_str = date.isoformat() if date_str not in cal_data: cal_data[date_str] = [event] else: cal_data[date_str].append(event) return cal_data
def landsat(self, report_id, id): r = Report.get(Key(report_id)) z, x, y = Cell.cell_id(id) cell = Cell.get_or_default(r, x, y, z) bounds = cell.bounds(amazon_bounds) bounds = "%f,%f,%f,%f" % (bounds[1][1], bounds[1][0], bounds[0][1], bounds[0][0]) ee = EELandsat(LANDSAT7) d = ee.list(bounds=bounds) data = {} if len(d) >= 1: x = d[-1] img_info = x.split('/')[2][3:] path = img_info[:3] row = img_info[3:6] year = int(img_info[6: 10]) julian_date = img_info[10: 13] date = date_from_julian(int(julian_date), year) data = { 'info': img_info, 'path': path, 'row': row, 'year': year, 'timestamp': timestamp(date), 'date': date.isoformat() } return Response(json.dumps(data), mimetype='application/json')
def info_getter(driver, date): client = MongoClient() DB = client.Mobile_revenue Collection = DB.revenue i = 2 while True: try: rank = driver.find_element_by_xpath( '//*[@id="__t_grid_0"]/tbody/tr[' + str(i) + ']/td[1]') game = driver.find_element_by_xpath( '//*[@id="__t_grid_0"]/tbody/tr[' + str(i) + ']/td[2]') revenue = driver.find_element_by_xpath( '//*[@id="__t_grid_0"]/tbody/tr[' + str(i) + ']/td[3]') paid_player = driver.find_element_by_xpath( '//*[@id="__t_grid_0"]/tbody/tr[' + str(i) + ']/td[4]') paid_count = driver.find_element_by_xpath( '//*[@id="__t_grid_0"]/tbody/tr[' + str(i) + ']/td[5]') arppu = driver.find_element_by_xpath( '//*[@id="__t_grid_0"]/tbody/tr[' + str(i) + ']/td[6]') post = { 'rank': int(rank.text), 'game': game.text, 'revenue': float(revenue.text), 'paid_player': int(paid_player.text), 'paid_count': int(paid_count.text), 'arppu': float(arppu.text), 'date': date.isoformat() } Collection.insert_one(post) print(str(date) + ' data inserted') i += 1 except NoSuchElementException: break
def lambda_handler(event, context): # check admin status token = event['params']['header']['Authorization'][7:] url = "https://tqud77gtrh.execute-api.us-west-2.amazonaws.com/default/authorize/?token=" + token headers = {"Authorization":event['params']['header']['Authorization']} response = requests.get(url, headers=headers) json_data = json.loads(response.text) username = json_data.get('username') # set pacific time date_format='%m/%d/%Y %H:%M:%S %Z' date = datetime.now(tz=pytz.utc) date = date.astimezone(timezone('US/Pacific')) lastLogout = str(date.isoformat()) response = table.scan() for i in response['Items']: if username == i['email']: i['lastLogout'] = lastLogout table.put_item(Item = i) return {"Status":"200"} raise Exception({ "errorType" : "Exception", "httpStatus": 400 })
def get_events(day, service): date = datetime.datetime.combine(day, datetime.datetime.min.time()) end_date = datetime.datetime.combine(day, datetime.datetime.max.time()) utc = pytz.UTC date = date.astimezone(utc) end_date = end_date.astimezone(utc) events_result = service.events().list(calendarId='primary', timeMin=date.isoformat(), timeMax=end_date.isoformat(), singleEvents=True, orderBy='startTime').execute() events = events_result.get('items', []) if not events: speak('Δεν έχεις τίποτα προγραμματισμένο') else: speak(f"Έχεις {len(events)} συμβάντα.") for event in events: start = event['start'].get('dateTime', event['start'].get('date')) print(start, event['summary']) start_time = str(start.split("T")[1].split("+")[0]) if int(start_time.split(":")[0]) < 12: start_time = start_time + "προ μεσημβρίαν" else: start_time = str(int(start_time.split(":")[0])-12) +" "+ start_time.split(":")[1] start_time = start_time + "μετά μεσημβρίαν" speak(event["summary"] + " στις " + start_time)
def get_member_details(self, id): member = {} try: message = self.service.users().messages().get( userId='me', id=id, format='metadata').execute() header_data = message["payload"]["headers"] correct_subject = False for data in header_data: if 'subject' == data['name'].lower( ) and self.subject in data['value']: correct_subject = True if not correct_subject: return '' for data in header_data: if "Date" == data["name"]: date = parsedate_to_datetime(data["value"]) member["time"] = date.isoformat() if "From" == data["name"]: print(data["value"]) email_id = data["value"] if '<' in email_id: start = email_id.find('<') end = email_id.find('>') email_id = email_id[start + 1:end] member["email"] = email_id print(member) return member except errors.HttpError as error: print('An error occurred: %s' % error)
def janelaHistorico(self): janelaHistorico = Tk() janelaHistorico.title("Histórico de transações") yscrollbar = Scrollbar(janelaHistorico) yscrollbar.pack(side=RIGHT, fill=Y) lista_transacoes = Listbox(janelaHistorico, width=73, height=30) lista_transacoes.place(x=0, y=0) for key in self.usuario.transacoes: for transacao in self.usuario.transacoes[key]: data = date.isoformat(date.fromtimestamp(key)) lista_transacoes.insert(END, " DATA: " + data) remetente = transacao['remetente'] lista_transacoes.insert(END, " REMETENTE: " + remetente) destinatario = transacao['destinatario'] lista_transacoes.insert(END, " DESTINATÁRIO: " + destinatario) quantia = str(transacao['quantia']) lista_transacoes.insert(END, " QUANTIA: " + quantia) lista_transacoes.insert(END, "\n") lista_transacoes.config(yscrollcommand=yscrollbar.set) yscrollbar.config(command=lista_transacoes.yview) janelaHistorico.resizable(0, 0) janelaHistorico.geometry("601x450+200+100") janelaHistorico.mainloop()
def landsat(self, report_id, id): r = Report.get(Key(report_id)) z, x, y = Cell.cell_id(id) cell = Cell.get_or_default(r, x, y, z) bounds = cell.bounds(amazon_bounds) bounds = "%f,%f,%f,%f" % (bounds[1][1], bounds[1][0], bounds[0][1], bounds[0][0]) ee = EELandsat(LANDSAT7) d = ee.list(bounds=bounds) data = {} if len(d) >= 1: x = d[-1] img_info = x.split('/')[2][3:] path = img_info[:3] row = img_info[3:6] year = int(img_info[6:10]) julian_date = img_info[10:13] date = date_from_julian(int(julian_date), year) data = { 'info': img_info, 'path': path, 'row': row, 'year': year, 'timestamp': timestamp(date), 'date': date.isoformat() } return Response(json.dumps(data), mimetype='application/json')
def getNavWords(date): first_word = False prev_word = False next_word = False latest_word = False # get previous word prev_result = db.select( "palabros", what="*, UNIX_TIMESTAMP(publish) AS unix_publish", where='publish < "' + date.isoformat() + '"', order="publish DESC", limit=1, ) if prev_result: prev_word = prev_result[0] first_result = db.select( "palabros", what="*, UNIX_TIMESTAMP(publish) AS unix_publish", order="publish ASC", limit=1 )[0] if first_result["publish"] != prev_word["publish"]: first_word = first_result next_result = db.select( "palabros", what="*, UNIX_TIMESTAMP(publish) AS unix_publish", where='publish > "' + date.isoformat() + '" AND publish <= DATE(NOW())', order="publish ASC", limit=1, ) if next_result: next_word = next_result[0] latest_result = db.select( "palabros", what="*, UNIX_TIMESTAMP(publish) AS unix_publish", where="publish <= DATE(NOW())", order="publish DESC", limit=1, )[0] if latest_result["publish"] != next_word["publish"]: latest_word = latest_result return {"first": first_word, "prev": prev_word, "next": next_word, "latest": latest_word}
def update(self, date, pressure_list): """ Update or create new record in table. Args: date - date object. Points, what day we need to update. pressure_list - list object (size=2). """ self.table[date.isoformat()] = pressure_list
def poll_all_inque(): date_floor = datetime.now() - timedelta(days=3) date_floor = date.isoformat(date_floor) # print date_floor payload = {"start_date": date_floor ,"status": "In Queue"} r = client.get("http://ovspackager.indemand.com/api/jobs/", data=json.dumps(payload)) # print r.text return r.json()['objects'] #return list of dicts
def apply_allowance(self, date): transaction = Transaction( amount=self.weekly_allowance_at_date(date), comment=ALLOWANCE_COMMENT.format(date.isoformat()), kind=ALLOWANCE_NAME, time_stamp=date, ) self.transactions.append(transaction)
def remove_device(self, device_id): ''' ''' db = sqlite3.connect(self.db) # @UndefinedVariable cursor = db.cursor() date = datetime.now() date.isoformat("T") cursor.execute(''' DELETE FROM nodes WHERE device_id = ? ''', (device_id,)) db.commit() db.close()
def add_device(self, device_id, subscription_id, subscription_timeout, description, is_active): ''' ''' db = sqlite3.connect(self.db) # @UndefinedVariable cursor = db.cursor() date = datetime.now() date.isoformat("T") cursor.execute(''' INSERT INTO nodes (device_id, subscription_id, subscription_timeout, description, added_date, last_update, is_active) VALUES(?,?,?,?,?,?,?) ''', (device_id, subscription_id, subscription_timeout, description, date, date, is_active)) db.commit() db.close()
def summarizeSection( url, htmlFile, genusNames, speciesNames): myString = "Date: " + date.isoformat(datetime.today()) + "\n" myString += "Section URL: " + url + "\n" myString += "Local copy stored as: " + htmlFile + "\n" myString += "Number of genus names extracted: " + str(len(genusNames)) + "\n" myString += "Number of species names extracted: " + str(len(speciesNames)) + "\n" # myString += "Number of species names identified, but unable to extract proper name: " + str(len(missingTaxa)) + "\n" return myString
def send_status_mail(self, statistics_summary): if "status_recipients" not in settings: E(" Could not send status email, no recipients set!") return -1 to_list = settings["status_recipients"].split() if self.opts['layer_mode'] == 'yes': subject = "[AUH] Upgrade status %s: %s" \ % (self.opts['layer_name'], date.isoformat(date.today())) else: subject = "[AUH] Upgrade status: " + date.isoformat(date.today()) if self.statistics.total_attempted: self.email_handler.send_email(to_list, subject, statistics_summary) else: W("No recipes attempted, not sending status mail!")
def save_fig(): path = 'figs' filename = date.isoformat(date.today()) fig = plt.gcf() fig.set_size_inches(24, 13.2) fig.savefig(os.path.join(path, filename), dpi=128)