async def main(): limiter = RateLimiter() self.assertIsInstance(limiter, RateLimiter) # Hack a fake response object. response = object() response.headers = { 'Date': format_datetime(datetime.now(timezone.utc)), 'X-RateLimit-Remaining': randint(1, 10), # 2 minutes in the future. 'X-RateLimit-Reset': datetime.now(timezone.utc).timestamp() + (2 * 60), } bucket = ('POST', '/random') while True: if await limiter.cooldown_bucket(bucket) > 0: break # The loop is supposed to be interrupted # before this is no longer true. self.assertGreaterEqual(limiter.buckets[bucket]._remaining, 0) # Update headers response.headers['Date'] = format_datetime(datetime.now(timezone.utc)) response.headers['X-RateLimit-Remaining'] -= 1 # Update the limiter await limiter.update_bucket(bucket, response) anyio.run(main)
def results(cls, script_set): results = [] for script_result in filter_script_results(script_set, script_set.filters, script_set.hardware_type): result = { "id": script_result.id, "created": format_datetime(script_result.created), "updated": format_datetime(script_result.updated), "name": script_result.name, "status": script_result.status, "status_name": script_result.status_name, "exit_status": script_result.exit_status, "started": fmt_time(script_result.started), "ended": fmt_time(script_result.ended), "runtime": script_result.runtime, "starttime": script_result.starttime, "endtime": script_result.endtime, "estimated_runtime": script_result.estimated_runtime, "parameters": script_result.parameters, "script_id": script_result.script_id, "script_revision_id": script_result.script_version_id, "suppressed": script_result.suppressed, } if script_set.include_output: result["output"] = b64encode(script_result.output) result["stdout"] = b64encode(script_result.stdout) result["stderr"] = b64encode(script_result.stderr) result["result"] = b64encode(script_result.result) results.append(result) return results
def results(cls, script_set): results = [] for script_result in filter_script_results(script_set, script_set.filters, script_set.hardware_type): result = { 'id': script_result.id, 'created': format_datetime(script_result.created), 'updated': format_datetime(script_result.updated), 'name': script_result.name, 'status': script_result.status, 'status_name': script_result.status_name, 'exit_status': script_result.exit_status, 'started': fmt_time(script_result.started), 'ended': fmt_time(script_result.ended), 'runtime': script_result.runtime, 'starttime': script_result.starttime, 'endtime': script_result.endtime, 'estimated_runtime': script_result.estimated_runtime, 'parameters': script_result.parameters, 'script_id': script_result.script_id, 'script_revision_id': script_result.script_version_id, } if script_set.include_output: result['output'] = b64encode(script_result.output) result['stdout'] = b64encode(script_result.stdout) result['stderr'] = b64encode(script_result.stderr) result['result'] = b64encode(script_result.result) results.append(result) return results
def serve(self, last_modified, respond): """ serve file with caching """ t = parsedate(last_modified) if t: t = time.mktime(t) if t is None: t = 0 if not os.path.exists(self._fpath): respond("404 Not Found", []) return [] st = os.stat(self._fpath) if st.st_mtime < t: respond("304 Not Modified", [("Last-Modified", format_datetime(st.st_mtime))]) return [] with open(self._fpath, "rb") as f: data = f.read() respond("200 OK", [("Content-Type", "application/octect-stream"), ("Last-Modified", format_datetime(datetime.fromtimestamp(int(st.st_mtime)))), ("Content-Length", "{}".format(st.st_size))]) return [data]
def _get_last_modified_header(redis_key=None): # Default last modified is the UTC time now last_modified = format_datetime( datetime.datetime.utcnow().replace(tzinfo=timezone.utc), usegmt=True) # If we haven't been passed a Redis key, we just return the # current timeztamp as a last modified header. if redis_key is None: return last_modified # We have been given a Redis key, so attempt to pull it from Redis r = redis.Redis(host=REDIS_UCLAPI_HOST) redis_key = "http:headers:Last-Modified:" + redis_key value = r.get(redis_key) if value: # Convert the Redis bytes response to a string. value = value.decode('utf-8') # We need the UTC timezone so that we can convert to it. utc_tz = pytz.timezone("UTC") # Parse the ISO 8601 timestamp from Redis and represent it as UTC utc_timestamp = ciso8601.parse_datetime(value).astimezone(utc_tz) # Format the datetime object as per the HTTP Header RFC. # We replace the inner tzinfo in the timestamp to force it to be a UTC # timestamp as opposed to a naive one; this is a requirement for the # format_datetime function. last_modified = format_datetime( utc_timestamp.replace(tzinfo=timezone.utc), usegmt=True) return last_modified
def test_head_messages_posted_since(self): dt = datetime.datetime.now(tz=datetime.timezone.utc) - datetime.timedelta(minutes=1) print(format_datetime(dt, usegmt=True)) response = self.app.head('/abcdefghijklmnopqrstuvwxyzabcdefghijklmnopo', headers={'If-Modified-Since': format_datetime(dt, usegmt=True)}) assert response.status_code == status.HTTP_200_OK assert response.data == b''
def assert_datetime(self, expected, actual): if not isinstance(expected, str): expected = format_datetime(expected) if not isinstance(actual, str): actual = format_datetime(actual) return eq_(actual, expected)
def test_head_messages_posted_since(self): dt = datetime.datetime.now(tz=datetime.timezone.utc) - datetime.timedelta(minutes=1) print(format_datetime(dt, usegmt=True)) response = self.app.head( "/abcdefghijklmnopqrstuvwxyzabcdefghijklmnopo", HTTP_IF_MODIFIED_SINCE=format_datetime(dt, usegmt=True) ) assert response.status_code == status.HTTP_200_OK assert response.content == b""
def template_rss(posts: List[dict], config: Optional[dict] = None) -> str: config = config or {} posts = [ post for post in posts if TAG_DRAFT not in post.get('tags', []) and TAG_INDEX not in post.get('tags', []) ] posts = sorted(posts, key=lambda post: post['last_modified'] ) # type: ignore # mypy fails to infer lambda type base_url = xmlescape(config.get('site', {}).get('base_url', '')) title = xmlescape(config.get('site', {}).get('title', '')) title = base_url if (title == '' and base_url != '') else title rss_xml = [] rss_xml.append('''<?xml version="1.0" encoding="utf-8" standalone="yes"?> <rss version="2.0" xmlns:atom="http://www.w3.org/2005/Atom"> <channel>''') rss_xml.append(f''' <title>{title}</title>''') rss_xml.append(f''' <link>{base_url}</link>''') rss_xml.append(f''' <description></description>''') rss_xml.append( f''' <generator>Good Generator.py -- ggpy -- https://oliz.io/ggpy</generator>''' ) rss_xml.append( f''' <lastBuildDate>{utils.formatdate()}</lastBuildDate>''') rss_xml.append( f''' <atom:link href="{'rss.xml' if base_url == '' else f'{base_url}/rss.xml'}" rel="self" type="application/rss+xml" />''' ) for post in posts[-10:]: # Limit to the lastest 10 posts escaped_url = xmlescape(post.get('url', '')) escaped_title = xmlescape(post.get('title', '')) escaped_title = escaped_url if ( escaped_title == '' and escaped_url != '') else escaped_title date_to_format = post.get('date', '') date_to_format = post.get( 'last_modified', '') if date_to_format == '' else date_to_format date_to_format = now_utc_formatted( ) if date_to_format == '' else date_to_format pub_date = '' try: pub_date = utils.format_datetime( datetime.datetime.strptime(date_to_format, '%Y-%m-%dT%H:%M:%S%z')) except ValueError: pub_date = utils.format_datetime( datetime.datetime.strptime(date_to_format, '%Y-%m-%d')) rss_xml.append(f''' <item>''') rss_xml.append(f''' <title>{escaped_title}</title>''') rss_xml.append(f''' <link>{escaped_url}</link>''') rss_xml.append(f''' <pubDate>{xmlescape(pub_date)}</pubDate>''') rss_xml.append(f''' <guid>{escaped_url}</guid>''') rss_xml.append( f''' <description>{xmlescape(post.get('html_section', ''))}</description>''' ) rss_xml.append(f''' </item>''') rss_xml.append(''' </channel> </rss>\n''') return '\n'.join(rss_xml)
def test_acceptance_criteria_3(client: FlaskClient, session: Session): # create brand and category to add to new product brand = create_basic_db_brand() category = create_basic_db_category() session.add(brand) session.add(category) session.commit() now = datetime.utcnow() # Try to pass expiration date that is too early (creation) response = client.post('/products', data=json.dumps({ "name": "test", "rating": 5, "brand": brand.id, "categories": [category.id], "expiration_date": email_utils.format_datetime(now), "items_in_stock": 1 }), content_type='application/json') json_response = json.loads(response.data) assert response.status_code == 400 assert len(json_response["errors"]) == 1 assert json_response["errors"][0]["loc"][0] == 'expiration_date' # Try to pass expiration date that is too early (update) product = create_basic_db_product() session.add(product) session.commit() response = client.patch(f"/products/{product.id}", data=json.dumps({ "expiration_date": email_utils.format_datetime(now), }), content_type='application/json') json_response = json.loads(response.data) assert response.status_code == 400 assert len(json_response["errors"]) == 1 assert json_response["errors"][0]["loc"][0] == 'expiration_date'
def get_feed(page_link, self_link, user_posts): """Generate RSS feed using as source user_root_raw and user_posts_raw :param page_link: Link to resource index :param self_link: link to this view :param user_posts: Posts for using as source """ rss = Element('rss', { 'version': '2.0', 'xmlns:atom': 'http://www.w3.org/2005/Atom', }) channel = SubElement(rss, 'channel') title = SubElement(channel, 'title') title.text = '{} {} {}'.format(g.post_user.first_name, g.post_user.last_name, g.post_user.username).strip() link = SubElement(channel, 'link') link.text = page_link description = SubElement(channel, 'description') description.text = g.post_user.about_plain() SubElement(channel, 'atom:link', { 'href': self_link, 'rel': 'self', 'type': 'application/rss+xml' }) if user_posts: last_build_date = SubElement(channel, 'lastBuildDate') last_build_date.text = format_datetime(user_posts[0].edited) for user_post in user_posts: item = SubElement(channel, 'item') item_link = SubElement(item, 'link') item_link.text = url_for('post.post', username=user_post.user.username, postname=user_post.url, _external=True) guid = SubElement(item, 'guid') guid.text = item_link.text item_title = SubElement(item, 'title') item_title.text = user_post.title item_description = SubElement(item, 'description') item_description.text = user_post.content_plain() item_pub_date = SubElement(item, 'pubDate') item_pub_date.text = format_datetime(user_post.edited) buffer_rss = BytesIO() ElementTree(rss).write(buffer_rss, 'UTF-8', True) return buffer_rss.getvalue().decode('utf8'), {'content-type': 'text/xml'}
def test_read_by_name(self): dhcp_snippet = factory.make_DHCPSnippet() # Generate some history dhcp_snippet.value = dhcp_snippet.value.update(factory.make_string()) dhcp_snippet.save() uri = '/api/2.0/dhcp-snippets/%s/' % dhcp_snippet.name response = self.client.get(uri) self.assertEqual(http.client.OK, response.status_code, response.content) parsed_dhcp_snippet = json.loads(response.content.decode()) self.assertThat( parsed_dhcp_snippet, Equals({ 'id': dhcp_snippet.id, 'name': dhcp_snippet.name, 'value': dhcp_snippet.value.data, 'description': dhcp_snippet.description, 'history': [ { 'id': dhcp_snippet.value.id, 'value': dhcp_snippet.value.data, 'created': format_datetime(dhcp_snippet.value.created), }, { 'id': dhcp_snippet.value.previous_version.id, 'value': dhcp_snippet.value.previous_version.data, 'created': format_datetime( dhcp_snippet.value.previous_version.created), }, ], 'enabled': dhcp_snippet.enabled, 'node': None, 'subnet': None, 'global_snippet': True, 'resource_uri': self.get_dhcp_snippet_uri(dhcp_snippet), }))
def serialize(self): E = objectify.ElementMaker(annotate=False, nsmap={"atom": "http://www.w3.org/2005/Atom"}) channel = E.channel( E.title(self.metadata["site_title"]), E.link(self.metadata["site_url"]), E.description(self.metadata["site_description"]), ) channel.append( etree.Element( "{http://www.w3.org/2005/Atom}link", rel="self", href=urljoin(self.metadata["site_url"], "/feeds/all.atom"), ) ) for last_mod, url, page in sorted(self.pages, key=lambda i: i[0], reverse=True)[:10]: item = E.item(E.title(page.title), E.link(url), E.guid(url, isPermaLink="true")) if page.description: item.append(E.description(page.description)) if page.date_modified: item.append(E.pubDate(format_datetime(page.date_modified))) channel.append(item) rss = E.rss(channel, version="2.0") return etree.tostring(rss, pretty_print=True, encoding="utf-8", xml_declaration=True)
def msg_response(self, report=False): msg = MIMEMultipart() msg['Date'] = format_datetime(localtime()) if self.is_matched: msg.attach(MIMEText(self.string_respone())) if self.attachment: msg.attach(self.attachment) logging.debug('MATCHER - attachment appended') elif not report: # its not ma match and its not a report logging.warning('MATCHER - unmatched response') return None if not report: msg['subject'] = self.subj else: msg.attach(MIMEText(str(self))) msg.attach(MIMEText(self.payload)) if self.is_matched: msg['subject'] = "[MATCH] {}".format(self.msg['subject']) else: msg['subject'] = "[NO MATCH] {}".format(self.msg['subject']) return msg
def p(x: Any) -> DStep: if isinstance(x, datetime): return True, format_datetime( x.replace(tzinfo=timezone.utc), usegmt=True ) else: return False, EncodeError(path=(*path, tp), actual=x)
def signed_request(key, key_id, method, url, *args, signing_headers=(), headers=None, **kwargs): if headers is None: headers = {} urlinfo = urlparse(url) headers["Date"] = format_datetime(datetime.now(timezone.utc), True) headers["Host"] = urlinfo.hostname used_headers = [ (TARGET, f"{method.lower()} {urlinfo.path}"), ("Host", headers["Host"]), ("Date", headers["Date"]), ] + [(header, headers[header]) for header in signing_headers if header in headers] to_sign = "\n".join( [f"{header.lower()}: {header_value}" for header, header_value in used_headers] ) signature = sign(key, to_sign.encode("ascii")) signature_parts = ( ("keyId", key_id), ("algorithm", "rsa-sha256"), ("headers", " ".join(h for h, _ in used_headers)), ("signature", signature.decode("ascii")), ) headers["Signature"] = ",".join(['{}="{}"'.format(k, v) for k, v in signature_parts]) print(headers) return requests.request(method, url, *args, headers=headers, **kwargs)
def generate_entries(): posts = POSTS_DIR.glob("*.md") extensions = ['extra', 'smarty', 'meta'] loader = jinja2.FileSystemLoader(searchpath="./") env = jinja2.Environment(loader=loader, autoescape=True) all_posts = [] for post in posts: print("rendering {0}".format(post)) url = Path("posts") / f"{post.stem}" url_html = f"{url}.html" target_file = TEMPLATE_DIR / url_html _md = markdown.Markdown(extensions=extensions, output_format='html5') with open(post) as post_f: content = post_f.read() html = _md.convert(content) doc = env.get_template(str(BLOG_TEMPLATE_FILE)).render( content=html, baseurl=BASE_URL, url=url, **_md.Meta) with open(target_file, "w") as post_html_f: post_html_f.write(doc) post_date = datetime.strptime(_md.Meta['published'][0], "%B %d, %Y") post_dict = dict(**_md.Meta, date=post_date, rfc2822_date=format_datetime(post_date), rel_link=f"/{url}", link="{0}{1}".format(BASE_URL, url)) all_posts.append(post_dict) all_posts.sort(key=lambda item: item['date'], reverse=True)
def cache_response(self, url, response, force_cache): cache_control = parse_cache_control(response) if cache_control.get('no-store'): if not force_cache: _logger.debug('Skipping cache: response has ' 'Cache-Control: no-store') return _logger.debug('Caching: response has Cache-Control: no-store, ' 'but force_cache is set') etag = response.headers.get('etag') last_modified = response.headers.get('last-modified') if etag is None and last_modified is None: if force_cache: last_modified = eut.format_datetime( datetime.datetime.now(datetime.timezone.utc)) _logger.debug('Forcing URL into cache with ' f'last-modified={last_modified}') else: _logger.debug(f'Unable to cache {url}: ' 'No ETag or Last-Modified header returned') return self.cursor.execute( 'INSERT OR REPLACE INTO web_cache VALUES' ' (:url, :etag, :last_modified, :response)', { 'url': url, 'etag': etag, 'last_modified': last_modified, 'response': pickle.dumps(response), }) self.conn.commit()
def test_get_no_messages_posted_since_gmt1(self): dt = datetime.datetime.now(tz=pytz.timezone("Europe/Berlin")) + datetime.timedelta(minutes=1) response = self.app.get( "/abcdefghijklmnopqrstuvwxyzabcdefghijklmnopo", HTTP_IF_MODIFIED_SINCE=format_datetime(dt) ) assert response.status_code == status.HTTP_304_NOT_MODIFIED assert response.content == b""
def dehydrate_dhcp_snippet(self, dhcp_snippet): node_system_id = None subnet_id = None if dhcp_snippet.subnet is not None: subnet_id = dhcp_snippet.subnet.id elif dhcp_snippet.node is not None: node_system_id = dhcp_snippet.node.system_id return { 'id': dhcp_snippet.id, 'name': dhcp_snippet.name, 'description': dhcp_snippet.description, 'value': dhcp_snippet.value.data, 'history': [{ 'id': value.id, 'value': value.data, 'created': format_datetime(value.created), } for value in dhcp_snippet.value.previous_versions()], 'enabled': dhcp_snippet.enabled, 'node': node_system_id, 'subnet': subnet_id, 'updated': dehydrate_datetime(dhcp_snippet.updated), 'created': dehydrate_datetime(dhcp_snippet.created), }
def dehydrate_dhcp_snippet(self, dhcp_snippet): node_system_id = None subnet_id = None if dhcp_snippet.subnet is not None: subnet_id = dhcp_snippet.subnet.id elif dhcp_snippet.node is not None: node_system_id = dhcp_snippet.node.system_id return { "id": dhcp_snippet.id, "name": dhcp_snippet.name, "description": dhcp_snippet.description, "value": dhcp_snippet.value.data, "history": [{ "id": value.id, "value": value.data, "created": format_datetime(value.created), } for value in dhcp_snippet.value.previous_versions()], "enabled": dhcp_snippet.enabled, "node": node_system_id, "subnet": subnet_id, "updated": dehydrate_datetime(dhcp_snippet.updated), "created": dehydrate_datetime(dhcp_snippet.created), }
def send_new_donation(user, donation): if _cfg("smtp-host") == "": return smtp = smtplib.SMTP(_cfg("smtp-host"), _cfgi("smtp-port")) smtp.ehlo() smtp.starttls() smtp.login(_cfg("smtp-user"), _cfg("smtp-password")) with open("emails/new_donation") as f: message = MIMEText(html.parser.HTMLParser().unescape(\ pystache.render(f.read(), { "user": user, "root": _cfg("protocol") + "://" + _cfg("domain"), "your_name": _cfg("your-name"), "amount": currency.amount("{:.2f}".format( donation.amount / 100)), "frequency": (" per month" if donation.type == DonationType.monthly else ""), "comment": donation.comment or "", }))) message['Subject'] = "New donation on fosspay!" message['From'] = _cfg("smtp-from") message['To'] = "{} <{}>".format(_cfg("your-name"), _cfg("your-email")) message['Date'] = format_datetime(localtime()) smtp.sendmail(_cfg("smtp-from"), [_cfg('your-email')], message.as_string()) smtp.quit()
def __call__(self, request): # We have to format the date according to RFC 2616 # https://tools.ietf.org/html/rfc2616#section-14.18 now = datetime.now(timezone.utc) date = format_datetime(now, True) try: # Attempt to decode request.body (assume bytes received) msg = "\n".join( [self.serial.lower(), date, request.body.decode("utf8")]) except AttributeError: # Decode failed, assume request.body is already type str msg = "\n".join([self.serial.lower(), date, request.body]) signing = hmac.new( key=self.secret.encode("utf8"), msg=msg.encode("utf8"), digestmod=hashlib.sha256, ) request.headers["Date"] = date request.headers["Authorization"] = "NEATOAPP " + signing.hexdigest() return request
def wiki_commit(self, checkout_dir, filename, message, author=None, date=None): if not os.path.exists(checkout_dir + '/.git'): raise Exception("Not a git working directory %r" % checkout_dir) rel_file = os.path.relpath(filename, checkout_dir) args = ['git', '-C', checkout_dir, 'add', rel_file] exit_code = subprocess.call(args) if exit_code != 0: return False args = ['git'] if author is not None: args.extend(['-c', 'user.email=%s' % author]) args.extend(['-c', 'user.name=%s' % author]) args.extend(['-C', checkout_dir, 'commit', '-m', message]) if author is not None: args.extend(['--author', author]) if date is not None: if isinstance(date, datetime): args.extend(['--date', format_datetime(date)]) else: args.extend(['--date', str(date)]) args.append(rel_file) exit_code = subprocess.call(args) return True if exit_code == 0 else False
def render_js(request, template_name, cache=True, *args, **kwargs): response = render(request, template_name, *args, **kwargs) response["Content-Type"] = \ "application/javascript; charset=UTF-8" if cache: now = datetime.now(timezone.utc) response["Last-Modified"] = format_datetime(now, usegmt=True) # cache in the browser for 1 month expires = now + timedelta(days=31) response["Expires"] = format_datetime(expires, usegmt=True) else: response["Pragma"] = "No-Cache" return response
def make(args, blobs): msg = EmailMessage() msg['From'] = _mkemail(args['from']) msg['To'] = _mkemail(args['to']) msg['Cc'] = _mkemail(args['cc']) msg['Bcc'] = _mkemail(args['bcc']) msg['Subject'] = args['subject'] msg['Date'] = format_datetime(args['msgdate']) for header, val in args['headers'].items(): msg[header] = val if 'replyTo' in args: msg['replyTo'] = args['replyTo'] if 'textBody' in args: text = args['textBody'] else: text = htmltotext(args['htmlBody']) msg.add_header('Content-Type', 'text/plain') msg.set_content(text) if 'htmlBody' in args: htmlpart = EmailMessage() htmlpart.add_header('Content-Type', 'text/html') htmlpart.set_content(args['htmlBody']) msg.make_alternative() msg.add_alternative(htmlpart) for att in args.get('attachments', ()): msg.add_attachment(_makeatt(att, blobs)) return msg.as_string()
def send_gmails(send_to, subject, body, files=None): assert isinstance(send_to, list) send_from = os.getenv("SENDER_EMAIL") spass = os.getenv("SENDER_PASSWORD") IST = pytz.timezone("Asia/Kolkata") india_time = datetime.now().astimezone(IST) msg = MIMEMultipart() msg['From'] = send_from msg['To'] = COMMASPACE.join(send_to) msg['Date'] = format_datetime(india_time) msg['Subject'] = subject msg.attach(MIMEText(body)) for f in files or []: with open(f, "rb") as fil: part = MIMEApplication(fil.read(), Name=basename(f)) # After the file is closed part['Content-Disposition'] = 'attachment; filename="%s"' % basename(f) msg.attach(part) smtp_server = "smtp.gmail.com" port = 587 # For starttls message = "Subject: " + subject + "\n\n" + body context = ssl.create_default_context() smail = os.getenv("SENDER_EMAIL") spass = os.getenv("SENDER_PASSWORD") with smtplib.SMTP(smtp_server, port) as server: server.starttls(context=context) # Secure the connection server.login(smail, spass) server.sendmail(spass, send_to, msg.as_string())
def download_list(url): headers = None cache = Path(config['cache'], hashlib.sha1(url.encode()).hexdigest()) if cache.is_file(): last_modified = datetime.utcfromtimestamp(cache.stat().st_mtime) headers = { 'If-modified-since': eut.format_datetime(last_modified), 'User-Agent': 'Bind adblock zonfile updater v1.0 (https://github.com/Trellmor/bind-adblock)' } try: r = requests.get(url, headers=headers, timeout=config['req_timeout_s']) if r.status_code == 200: with cache.open('w', encoding='utf8') as f: f.write(r.text) if 'last-modified' in r.headers: last_modified = eut.parsedate_to_datetime( r.headers['last-modified']).timestamp() os.utime(str(cache), times=(last_modified, last_modified)) return r.text except requests.exceptions.RequestException as e: print(e) if cache.is_file(): with cache.open('r', encoding='utf8') as f: return f.read()
def transform_value(val): if isinstance(val, str): return val elif isinstance(val, datetime): return format_datetime(val.astimezone(timezone.utc), usegmt=True) else: return str(val)
async def async_throttle_delay( ip_address_str: str) -> Tuple[Optional[str], Optional[str]]: delay, next_retry = await _async_throttle_delay(ip_address_str) if delay > 0: return str(int(delay + 0.999)), format_datetime(next_retry, usegmt=True) return None, None
def download_list(url): headers = None cache = Path('.cache', 'bind_adblock') if not cache.is_dir(): cache.mkdir(parents=True) cache = Path(cache, hashlib.sha1(url.encode()).hexdigest()) if cache.is_file(): last_modified = datetime.utcfromtimestamp(cache.stat().st_mtime) headers = { 'If-modified-since': eut.format_datetime(last_modified), 'User-Agent': 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36' } try: r = requests.get(url, headers=headers, timeout=config['req_timeout_s']) if r.status_code == 200: with cache.open('w') as f: f.write(r.text) if 'last-modified' in r.headers: last_modified = eut.parsedate_to_datetime(r.headers['last-modified']).timestamp() os.utime(str(cache), times=(last_modified, last_modified)) return r.text elif r.status_code != 304: print("Error getting list at " + url + " HTTP STATUS:" + str(r.status_code)) except requests.exceptions.RequestException as e: print(e) if cache.is_file(): with cache.open() as f: return f.read()
def update_rss(rss_file, numero_id, title=''): """Update rss file. Parameters ---------- rss_file : str rss file name numero_id : int numero of the episode title : str """ text_to_replace = 'type="application/rss+xml" />' with open(rss_file, 'r') as f: rss_data = f.read() now = datetime.now(timezone.utc) now_str = utils.format_datetime(now) rss_item = jinja2.Template(RSS_TEMPLATE).render(numero=numero_id, date=now_str, title=title) rss_data = rss_data.replace(text_to_replace, "{}\n{}".format(text_to_replace, rss_item)) with open(rss_file, 'w') as f: f.write(rss_data)
def make_session(self, username: str, long_term: bool = False) -> Optional[SimpleCookie]: self.garbage_collect_sessions() c = self.con.cursor() c.execute("SELECT SessionID FROM Sessions WHERE Username=%s" % escape(username)) if len(list(c.fetchall())) > self.max_sessions_per_user: return None sess_id = os.urandom(32).hex() if long_term: expires = datetime.now( timezone.utc) + self.long_term_session_timeout else: expires = datetime.now( timezone.utc) + self.short_term_session_timeout cookie = SimpleCookie() cookie["test_field"] = "lol" cookie["sess_id"] = sess_id if long_term: cookie["sess_id"]["expires"] = format_datetime(expires, True) c.execute("INSERT INTO Sessions VALUES(%s, %s, %s)" % (escape(sess_id), escape(username), escape(utc_to_timestamp(expires)))) self.con.commit() return cookie
def _generate_episode_xml(self, episode, template): # Description, summary and subtitle are the same episode["summary"] = episode["description"] episode["subtitle"] = episode["description"] # link, keywords and the default image are taken from the show's metadata episode["link"] = self.metadata['link'] episode["keywords"] = self.metadata["keywords"] episode["default-episode-image"] = self.metadata[ "default-episode-image"] # Generate the local filepath for the mp3 filepath = os.path.join( self.resource_dir, 'mp3s', f'{episode["episode"]}-{episode["title"].replace(" ", "_")}.mp3') # Get get file size and episode length from the filepath episode["file-size"] = os.stat(filepath).st_size episode["duration"] = MP3(filepath).info.length # Create the url to the mp3 file episode["file_path"] = os.path.join(self.root_url, filepath) # Check if a pubDate is specified in the episde metadata if 'pub-date' in episode: pub_date = datetime.strptime(episode["pub-date"], '%d/%m/%Y') episode["pub-date"] = utils.format_datetime(pub_date) # Otherwise use the pubDate from the show's metadata else: episode["pub-date"] = self.metadata['pub-date'] # Generate a title for the episode episode["title"] = f'{episode["episode"]} - {episode["title"]}' return template % episode
def send_message( *, msg, smtp_host="localhost", smtp_port=25, use_tls=False, use_smtps=False, username=None, password=None, ): sender = msg.get("From") recipients = getaddresses( chain(msg.get_all("To", []), msg.get_all("Cc", []), msg.get_all("Bcc", []))) if not msg.get("Date"): msg["Date"] = format_datetime(datetime.now(timezone.utc)) SMTP = smtplib.SMTP_SSL if use_smtps else smtplib.SMTP with SMTP(host=smtp_host, port=smtp_port) as smtp: if use_tls: smtp.starttls() smtp.ehlo() if username or password: smtp.login(username, password) try: smtp.send_message( msg, from_addr=sender, to_addrs=[addr for _, addr in recipients if addr]) except smtplib.SMTPDataError as e: raise WEmailDeliveryError( f"Failed to deliver {subjectify(msg=msg)!r} - {e.args[1].decode()!r}" ) from e
async def jsonify(ctx, response: Response): """ JSONify the response. """ if not isinstance(response.response, dict): return response # json.dump the body. status_code = response.status_code if not any(response.response.values()): status_code = 404 if ctx.request.args.get("format", "json") in ["json_pretty", "pretty"]: d = json.dumps(response.response, sort_keys=True, indent=4, separators=(',', ': ')) else: d = json.dumps(response.response) response.set_data(d) response.headers["Content-Type"] = "application/json" # 261 response.headers["Cache-Control"] = "public, max-age=300" expires = (datetime.datetime.utcnow() + datetime.timedelta(seconds=300))\ .replace(tzinfo=datetime.timezone.utc) response.headers["Expires"] = format_datetime(expires, usegmt=True) response.status_code = status_code return response
def test_get_messages_posted_since_gmt1(self): dt = datetime.datetime.now(tz=pytz.timezone("Europe/Berlin")) - datetime.timedelta(minutes=1) response = self.app.get( "/abcdefghijklmnopqrstuvwxyzabcdefghijklmnopo", HTTP_IF_MODIFIED_SINCE=format_datetime(dt) ) assert response.status_code == status.HTTP_200_OK assert "Hello World" in response.content.decode()
def get_rfc_date(isodate): """ Convert a string of YYYY-MM-DD HH:ii:ss to RFC-1123 format """ isodateformat = "%Y-%m-%d %H:%M:%S" date = datetime.strptime(isodate, isodateformat) # Take the provided date and make it as if we are starting in UTC utc_date = datetime(year=date.year, month=date.month, day=date.day, hour=date.hour, minute=date.minute, second=date.second, tzinfo=timezone.utc) return format_datetime(utc_date, True)
def test_get_messages_posted_since_gmt1(self): dt = datetime.datetime.now(tz=pytz.timezone('Europe/Berlin')) - datetime.timedelta(minutes=1) response = self.app.get('/abcdefghijklmnopqrstuvwxyzabcdefghijklmnopo', headers={'If-Modified-Since': format_datetime(dt)}) assert response.status_code == status.HTTP_200_OK print(response.data.decode()) assert 'Hello World' in response.data.decode()
def test_get_messages_posted_since(self): dt = datetime.datetime.now(tz=datetime.timezone.utc) - datetime.timedelta(minutes=1) response = self.app.get( "/abcdefghijklmnopqrstuvwxyzabcdefghijklmnopo", HTTP_IF_MODIFIED_SINCE=format_datetime(dt, usegmt=True) ) assert response.status_code == status.HTTP_200_OK body = response.content.decode() assert "Hello World" in body assert "Bar" not in body
def test_delta_modified(self): client = APIClient() update_since_header = format_datetime(self.draft_model.updated - timedelta(seconds=10)) response = client.get('/SynchronizedViewSet/', None, **{"HTTP_IF_MODIFIED_SINCE": update_since_header}) # content = json.loads(response.content.decode()) # self.assertEquals(3, len(content)) # expected_titles = set(["published deleted", "published", "published draft"]) # titles = set() # for o in content: # titles.add(o["test_title"]) # self.assertSetEqual(expected_titles, titles)
def get(self, task_id=None): if self.current_user: user = yield self.settings['db'].users.find_one({'email': self.current_user}) if task_id is None: tasks_cursor = self.settings['db'].tasks.find({'user_id': user['_id']}) data = [] while (yield tasks_cursor.fetch_next): task = tasks_cursor.next_object() data.append({'id': str(task['_id']), 'content': task['name'], 'comment': task['comment'], 'completed': task['completed'], 'start': format_datetime(task['start']), 'end': format_datetime(datetime.utcnow()) if not task['end'] else format_datetime(task['end']), 'group': str(task['group_id'])}) else: task = yield self.settings['db'].tasks.find_one({'user_id': user['_id'], '_id': ObjectId(task_id)}) row_end_time = datetime.utcnow() if not task['end'] else task['end'] row_start_time = task['start'] if row_end_time < row_start_time: total_time = str(timedelta()) else: total_time = str(timedelta(hours=timedelta_to_int(row_end_time - row_start_time))).split(".")[0] data = { 'id': str(task['_id']), 'content': task['name'], 'comment': task['comment'], 'completed': task['completed'], 'start': format_datetime(row_start_time), 'end': format_datetime(row_end_time), 'total_time': total_time, 'group': str(task['group_id']) } self.write(dumps(data)) else: self.write(dumps({}))
def parse(cls, value, kwds): if not value: kwds["defects"].append(errors.HeaderMissingRequiredValue()) kwds["datetime"] = None kwds["decoded"] = "" kwds["parse_tree"] = parser.TokenList() return if isinstance(value, str): value = utils.parsedate_to_datetime(value) kwds["datetime"] = value kwds["decoded"] = utils.format_datetime(kwds["datetime"]) kwds["parse_tree"] = cls.value_parser(kwds["decoded"])
def opml_subscriptions(): by_category = defaultdict(list) for subscription in subscriptions.subscriptions(): if subscription.get('categories'): for category in subscription['categories']: by_category[category].append(subscription) else: by_category[''].append(subscription) return (render_template('subscriptions.opml', by_category=by_category, now_rfc822=format_datetime(datetime.utcnow())), 200, {'Content-Type': 'application/xml'})
def post(self, *args, **kwargs): data = json_decode(self.request.body) user = yield self.settings['db'].users.find_one({'email': self.current_user}) object_id = yield self.settings['db'].tasks.insert( {'name': data['content'], 'comment': data['comment'], 'start': datetime.strptime(data['start'], DATE_FORMAT), 'end': None, 'completed': False, 'group_id': ObjectId(data['group']), 'user_id': user['_id']}) data = {'id': str(object_id), 'end': format_datetime(datetime.utcnow())} self.write(dumps(data))
def _get_db(): date = format_datetime(datetime.now(timezone.utc), True) auth_string = get_authorization_string("get", "dbs", db_id, date, _master_key) req = Request( _urlbase + "dbs/" + db_id, headers = { "authorization": auth_string, "x-ms-date": date } ) try: resp = urlopen(req) print(resp.read()) except URLError as e: print(e)
def sendmail(topic, timestamp, from_address, to_address, mail_user, mail_password, mail_server, mail_port): msg = MIMEText("Topic {0} was last seen at {1}".format(topic, timestamp.isoformat())) # me == the sender's email address # you == the recipient's email address msg['Subject'] = 'Sensor node missed check in' msg['From'] = from_address msg['To'] = to_address msg['Date'] = format_datetime(datetime.now()) mail_user = mail_user mail_pwd = mail_password smtpserver = smtplib.SMTP(mail_server, mail_port) smtpserver.ehlo() smtpserver.starttls() smtpserver.ehlo() smtpserver.login(mail_user, mail_pwd) smtpserver.sendmail(msg['from'], msg['To'], msg.as_string()) smtpserver.close()
def test_modified_since(client, drop_messages, registry): with registry.assert_drop_sent(count=0): dt = datetime.datetime.now(tz=datetime.timezone.utc) + datetime.timedelta(minutes=1) response = client.get('/abcdefghijklmnopqrstuvwxyzabcdefghijklmnopo', HTTP_IF_MODIFIED_SINCE=format_datetime(dt, usegmt=True)) assert response.status_code == status.HTTP_304_NOT_MODIFIED
def test_modified_since(client, drop_messages, registry): with registry.assert_get_request(304): dt = datetime.datetime.now(tz=datetime.timezone.utc) + datetime.timedelta(minutes=1) response = client.get('/abcdefghijklmnopqrstuvwxyzabcdefghijklmnopo', headers={'If-Modified-Since': format_datetime(dt, usegmt=True)}) assert response.status_code == status.HTTP_304_NOT_MODIFIED
def test_get_no_messages_posted_since_gmt1(self): dt = datetime.datetime.now(tz=pytz.timezone('Europe/Berlin')) + datetime.timedelta(minutes=1) response = self.app.get('/abcdefghijklmnopqrstuvwxyzabcdefghijklmnopo', headers={'If-Modified-Since': format_datetime(dt)}) assert response.status_code == status.HTTP_304_NOT_MODIFIED assert response.data == b''
def test_naive_datetime(self): self.assertEqual(utils.format_datetime(self.naive_dt), self.datestring + ' -0000')
def test_usegmt(self): utc_dt = datetime.datetime(*self.dateargs, tzinfo=datetime.timezone.utc) self.assertEqual(utils.format_datetime(utc_dt, usegmt=True), self.datestring + ' GMT')
def test_aware_datetime(self): self.assertEqual(utils.format_datetime(self.aware_dt), self.datestring + self.offsetstring)
def test_usegmt_with_non_utc_datetime_raises(self): with self.assertRaises(ValueError): utils.format_datetime(self.aware_dt, usegmt=True)
def test_not_modified(self): client = APIClient() update_since_header = format_datetime(self.draft_model.updated + timedelta(seconds=10)) response = client.get('/SynchronizedViewSet/', None, **{"HTTP_IF_MODIFIED_SINCE": update_since_header}) self.assertEquals(304, response.status_code) self.assertEquals("", response.content.decode())
def _request_entries(url, etag=None, last_modified=None, fail_fast=False, loop=None): headers = {'A-IM': 'feed'} # RFC 3229 support if etag: headers['If-None-Match'] = etag if last_modified: headers['If-Modified-Since'] = format_datetime(last_modified) try: response = yield from aiohttp.request( 'get', url, headers=headers, loop=loop, ) if response.status == 304: return None try: xml = yield from response.text() except: content = yield from response.read() charset = 'utf-8' content_type = response.headers.get('Content-Type') parts = content_type.split(';') if len(parts) > 1: for part in parts[1:]: if '=' in part: key, value = part.split('=', 1) if key.lower().strip() == 'charset': charset = value xml = content.decode(charset) return response.headers, xml except (ConnectionError, HttpException): if fail_fast: raise if etag and last_modified: # try etag only try: response = yield from _request_entries( url, etag=etag, fail_fast=True, loop=loop ) except (ConnectionError, HttpException): pass else: return response # try last modified only try: response = yield from _request_entries( url, last_modified=last_modified, fail_fast=True, loop=loop ) except (ConnectionError, HttpException): pass else: return response if etag or last_modified: # try without etag and last modified response = yield from _request_entries( url, fail_fast=True, loop=loop ) return response else: raise
#!/usr/bin/env python3 import sys import re # import subprocess from email.utils import format_datetime, parsedate_to_datetime in_headers = True for line in sys.stdin.readlines(): if line == "\n": in_headers = False match = re.match(r'^Date: (.+)', line) if not in_headers or not match: print(line, end="") continue date_string = match.group(1) # use this if you do not have python 3.3+ # converted_date = subprocess.Popen(['date','-d',date_string], stdout=subprocess.PIPE).communicate()[0].strip() converted_date = format_datetime(parsedate_to_datetime(date_string).astimezone(tz=None)) print('Date:', converted_date)
def enviar_correo(self, correo_dest, asunto, cuerpo, intento=0, *rutas_adjuntos): """ Se envía un correo bajo el nombre indicado a mailer y sobre la conexión establecida en este, con los elementos cómunes de un correo, que se le pasan como argumentos. :param correo_dest: :param asunto: :param cuerpo: :param intento: para indicar el número de reintento. :param rutas_adjuntos: :return: """ if True in (type(x) is not str for x in (correo_dest, asunto, cuerpo)): raise Exception(__name__ + '.sendMail params must be str') if self.smtp_host == 'smtp.sendgrid.net': origen = '*****@*****.**' else: origen = self.email_origen enviado = False # Podríamos verificar cnx sigue, aunque sino ya saltara excepción. try: # Preparamos las cabeceras de addrs fromadrr = Address(self.nombre_origen, addr_spec=origen) name, addr = parseaddr(correo_dest) toaddr = Address(name, addr_spec=addr) # Encapsulamos el mensaje msg = MIMEMultipart() # Para poder combinar fragmentos de <> MIME msg['From'] = formataddr((fromadrr.display_name, fromadrr.addr_spec)) msg['To'] = formataddr((toaddr.display_name, toaddr.addr_spec)) msg['Subject'] = asunto msg['Date'] = format_datetime(localtime()) # Construir msg (MIME de texto) y añadir al contenedor msg.attach(MIMEText(cuerpo, 'plain')) # Adición de los adjuntos, a msg. for ruta in rutas_adjuntos: rutap = os.path.abspath(ruta) if not os.path.exists(rutap): # notificarlo de alguna forma print('{} error:\t fallo adjuntando {} para {}'.format( __name__, rutap, origen)) continue #with open(rutap) as fp: # part = MIMEText(fp.read(), _subtype='plain') part = MIMEBase('application', "octet-stream") part.set_payload(open(rutap, "rb").read()) encoders.encode_base64(part) # part.add_header('Content-Disposition', # 'attachment; filename="{}"'.format(os.path.basename(rutap))) part.add_header('Content-Disposition', 'attachment; filename="{}"'.format(os.path.basename(rutap))) msg.attach(part) # server.sendmail(fromadrr.addr_spec, tomail, msg.as_string()) self.smtpserver.send_message(msg) enviado = True self._incr_numenviados() except SMTPException as smtpe: print('RECONECTADO y REENVIO POR EXCEPT') if intento < mailer.REINTENTOS and not enviado: self._conectar() self.enviar_correo(correo_dest, asunto, cuerpo, intento+1, *rutas_adjuntos) else: raise except: raise