def delete_post(board, thread_num, post_num, rape_msg): last_deletion = False th = Thread.get(db.Key.from_path( "Board", board, "Thread", thread_num )) [post] = [p for p in th.posts if p.get('post') == post_num] logging.info("found: %r" % post) key = post.get("key") if key: post.pop("key", None) post.pop("image", None) info = blobstore.BlobInfo.get( blobstore.BlobKey(key)) info.delete() try: th.images.remove(post.get("key")) except: pass logging.info("removed image %r" % post) else: last_deletion = True post['text'] = 'Fuuuuuu' post['text_html'] = 'Fuuuuuu' post['rainbow_html'] = u'<b>' + rape_msg + '</b>' th.put() Cache.delete( ( dict(Board=board), ) ) r = Render(board, thread_num) #kind of shit: r.create(th.posts[0]) for a_post in th.posts[1:]: r.append(a_post) r.save() #FIXME: update records in memcache return last_deletion
def onStart(self): if not self.config['offline_mode']: self.login() self.session.get_article = self.get_article self.cache = Cache(APPNAME, AUTHOR) if self.config['clear_cache']: self.cache.clear() if not self.config['offline_mode']: self.cache.fetch_new(self.session, APPNAME, AUTHOR) if self.config['update_only']: sys.exit(0) self.addForm('MAIN', ArticlePicker, name='Article Picker') self.addForm('READER', ArticleReader, name='Article Reader')
def get_tz(location): cache = Cache() if location is None: return None timezone = cache.timezone_cached(location) if timezone: return timezone try: place, (lat, lng) = geo.geocode(location, timeout=10) except GeocoderTimedOut as e: print("Error: geocode failed on input %s with message %s" % (location, e.message)) return None except TypeError: return None tz = geo.timezone((lat, lng)) timezone = tz.zone cache.save_to_cache(location, timezone) return timezone
def save_batch(batch_number, path, correct_size, data, targets, num_classes): # transformacja danych do formy, która może być wykorzystana bezpośrednio na sieci data_array = DataUtils.get_array(correct_size, data) target_array = DataUtils.get_target_array(num_classes, targets) cache = Cache(data_array, target_array) # serializacja i zapis na dysku DataUtils.pickle_data(cache, path, 'cache' + str(batch_number) + '.pkl') return cache
def delete_post(board, thread_num, post_num, rape_msg): last_deletion = False th = Thread.load(thread_num, board) [post] = [p for p in th.posts if p.get('post') == post_num] logging.info("found: %r" % post) key = post.get("key") if key: post.pop("key", None) post.pop("image", None) info = blobstore.BlobInfo.get( blobstore.BlobKey(key)) if info: info.delete() try: th.images.remove(post.get("key")) except: pass logging.info("removed image %r" % post) else: last_deletion = True post['text'] = 'Fuuuuuu' post['text_html'] = 'Fuuuuuu' post['rainbow_html'] = u'<b>' + rape_msg + '</b>' th.put() Cache.remove("board", board) r = Render(thread=th) #kind of shit: r.create(th.posts[0]) for a_post in th.posts[1:]: r.append(a_post) r.save() return last_deletion
def get_cached_profile(self, screen_name): cache = Cache.all() cache.filter('name =', 'profile:' + screen_name.lower()) cache = cache.get() outdated = False # Let's see if it's outdated, give it 1 hour if cache: if cache.published + timedelta(hours=1) < datetime.now(): outdated = True # Return a tuple. Beware ;) return (cache, outdated)
async def create_cache(cache_key, content, minutes=30): pickle_protocol = pickle.HIGHEST_PROTOCOL cache = Cache.filter(cache_key=cache_key) cache_exists = await cache.exists() if not cache_exists: expires = datetime.utcnow() + timedelta(minutes=minutes) pickled = pickle.dumps(content, pickle_protocol) b64encoded = base64.b64encode(pickled).decode('latin1') try: cache = Cache(cache_key=cache_key, content=b64encoded, expires=expires) await cache.save() return True except: return False return False
def create(self,op): data = { "op" : op, "id" : self.thread, "board" : self.board, "subject" : op.get("subject"), } self.cache = Cache.create("thread", self.board, self.thread) self.cache.data = render_template("thread.html", thread = data, threads = [data], board = self.board, board_name = NAMES.get(self.board) or get_config("aib", "default_name"), boards = get_config("aib", "boardlist"), # context? )
def do_rebuild(cursor=None): cacheq = Cache.all() if cursor: cacheq.with_cursor(cursor) cache = cacheq.get() if not cache: import logging logging.info("stop rebuild") return if cache.data and not cache.comp: cache.comp = cache.data.encode('utf8') cache.data = None cache.put() deferred.defer(do_rebuild, cacheq.cursor())
def createApp(): SETTINGS = { "static_path": config.STATIC_PATH, "template_path": config.TEMPLATE_PATH, "cookie_secret": config.COOKIE_SECRET, "login_url": "/login", #"xsrf_cookies": config[default].XSRF_COOKIES, "debug": config.DEBUG, "gzip": config.GZIP, } app = tornado.web.Application(handlers=HANDLERS, **SETTINGS) app.config = config app.remote = RpycController(app.config) app.db = SQLClient(config) app.cache = Cache(config) app.data = Data(app.db, app.cache, app.remote) app.static = static(app.config) MinitrillLog.log_init(app) return app
def clear_profile_cache(screen_name): q = Cache.all() q.filter('name =', 'profile:' + screen_name.lower()) for c in q: c.delete()
def save(self): Cache.save(self.html, Board=self.board, Thread=self.thread)
def load(self,): self.html = Cache.load(Board=self.board, Thread=self.thread) if self.html: self.html = self.html.decode("utf8")
def save_profile_cache(screen_name, cache_data): clear_profile_cache(screen_name) cache = Cache(name='profile:' + screen_name.lower(), value=encoder.encode(cache_data)) cache.put()
class CorrCli(npyscreen.NPSAppManaged): def __init__(self, config): super(CorrCli, self).__init__() self.config = config def onStart(self): if not self.config['offline_mode']: self.login() self.session.get_article = self.get_article self.cache = Cache(APPNAME, AUTHOR) if self.config['clear_cache']: self.cache.clear() if not self.config['offline_mode']: self.cache.fetch_new(self.session, APPNAME, AUTHOR) if self.config['update_only']: sys.exit(0) self.addForm('MAIN', ArticlePicker, name='Article Picker') self.addForm('READER', ArticleReader, name='Article Reader') def login(self): """ Set the cookie for the session. """ s = r.Session() name = self.config['name'] email = self.config['email'] password = self.config['password'] try: page = s.post(base_url + 'api2/account/password-authenticate', data={ 'emailAddress': email, 'password': password }).text if name not in page: raise KeyError('name') except KeyError as e: sys.stderr.write('Login failed.\n' 'Is there a typo in the config file?\n') sys.exit(1) self.session = s def get_article(self, article_id): response = self.session.get(base_url + str(article_id)) if response.status_code == 404: return None article_page = response.text html_cruft = re.compile( '<h1.*[\t\n]*(?P<title>[A-Z\u201c].*)[\n\t]*</h1>') article_title = re.search('<h1.*</h1>', article_page, re.DOTALL)[0] article_title = re.sub(html_cruft, '\g<title>', article_title, re.DOTALL) html_cruft = re.compile(' ?</?p> ?') article_text = [ re.sub(html_cruft, '\n', x) for x in re.findall('<p>.*</p>', article_page) ] article_text = '\n'.join(article_text).replace('\n\n\n', '\n\n')[1:] article = { 'id': article_id, 'title': article_title, 'text': article_text } return article
def parse_yaml_config(config_file_path, with_notary, with_trivy, with_chartmuseum): ''' :param configs: config_parser object :returns: dict of configs ''' with open(config_file_path) as f: configs = yaml.safe_load(f) config_dict = { 'portal_url': 'http://portal:8080', 'registry_url': 'http://registry:5000', 'registry_controller_url': 'http://registryctl:8080', 'core_url': 'http://core:8080', 'core_local_url': 'http://127.0.0.1:8080', 'token_service_url': 'http://core:8080/service/token', 'jobservice_url': 'http://jobservice:8080', 'trivy_adapter_url': 'http://trivy-adapter:8080', 'notary_url': 'http://notary-server:4443', 'chart_repository_url': 'http://chartmuseum:9999' } config_dict['hostname'] = configs["hostname"] config_dict['protocol'] = 'http' http_config = configs.get('http') or {} config_dict['http_port'] = http_config.get('port', 80) https_config = configs.get('https') if https_config: config_dict['protocol'] = 'https' config_dict['https_port'] = https_config.get('port', 443) config_dict['cert_path'] = https_config["certificate"] config_dict['cert_key_path'] = https_config["private_key"] if configs.get('external_url'): config_dict['public_url'] = configs.get('external_url') else: if config_dict['protocol'] == 'https': if config_dict['https_port'] == 443: config_dict['public_url'] = '{protocol}://{hostname}'.format( **config_dict) else: config_dict[ 'public_url'] = '{protocol}://{hostname}:{https_port}'.format( **config_dict) else: if config_dict['http_port'] == 80: config_dict['public_url'] = '{protocol}://{hostname}'.format( **config_dict) else: config_dict[ 'public_url'] = '{protocol}://{hostname}:{http_port}'.format( **config_dict) # DB configs db_configs = configs.get('database') if db_configs: # harbor db config_dict['harbor_db_host'] = 'postgresql' config_dict['harbor_db_port'] = 5432 config_dict['harbor_db_name'] = 'registry' config_dict['harbor_db_username'] = '******' config_dict['harbor_db_password'] = db_configs.get("password") or '' config_dict['harbor_db_sslmode'] = 'disable' config_dict['harbor_db_max_idle_conns'] = db_configs.get( "max_idle_conns") or default_db_max_idle_conns config_dict['harbor_db_max_open_conns'] = db_configs.get( "max_open_conns") or default_db_max_open_conns if with_notary: # notary signer config_dict['notary_signer_db_host'] = 'postgresql' config_dict['notary_signer_db_port'] = 5432 config_dict['notary_signer_db_name'] = 'notarysigner' config_dict['notary_signer_db_username'] = '******' config_dict['notary_signer_db_password'] = '******' config_dict['notary_signer_db_sslmode'] = 'disable' # notary server config_dict['notary_server_db_host'] = 'postgresql' config_dict['notary_server_db_port'] = 5432 config_dict['notary_server_db_name'] = 'notaryserver' config_dict['notary_server_db_username'] = '******' config_dict['notary_server_db_password'] = '******' config_dict['notary_server_db_sslmode'] = 'disable' # Data path volume config_dict['data_volume'] = configs['data_volume'] # Initial Admin Password config_dict['harbor_admin_password'] = configs["harbor_admin_password"] # Registry storage configs storage_config = configs.get('storage_service') or {} config_dict['registry_custom_ca_bundle_path'] = storage_config.get( 'ca_bundle') or '' if storage_config.get('filesystem'): config_dict['storage_provider_name'] = 'filesystem' config_dict['storage_provider_config'] = storage_config['filesystem'] elif storage_config.get('azure'): config_dict['storage_provider_name'] = 'azure' config_dict['storage_provider_config'] = storage_config['azure'] elif storage_config.get('gcs'): config_dict['storage_provider_name'] = 'gcs' config_dict['storage_provider_config'] = storage_config['gcs'] elif storage_config.get('s3'): config_dict['storage_provider_name'] = 's3' config_dict['storage_provider_config'] = storage_config['s3'] elif storage_config.get('swift'): config_dict['storage_provider_name'] = 'swift' config_dict['storage_provider_config'] = storage_config['swift'] elif storage_config.get('oss'): config_dict['storage_provider_name'] = 'oss' config_dict['storage_provider_config'] = storage_config['oss'] else: config_dict['storage_provider_name'] = 'filesystem' config_dict['storage_provider_config'] = {} if storage_config.get('redirect'): config_dict['storage_redirect_disabled'] = storage_config['redirect'][ 'disabled'] # Global proxy configs proxy_config = configs.get('proxy') or {} proxy_components = proxy_config.get('components') or [] no_proxy_config = proxy_config.get('no_proxy') all_no_proxy = INTERNAL_NO_PROXY_DN if no_proxy_config: all_no_proxy |= set(no_proxy_config.split(',')) for proxy_component in proxy_components: config_dict[proxy_component + '_http_proxy'] = proxy_config.get('http_proxy') or '' config_dict[proxy_component + '_https_proxy'] = proxy_config.get('https_proxy') or '' config_dict[proxy_component + '_no_proxy'] = ','.join(all_no_proxy) # Trivy configs, optional trivy_configs = configs.get("trivy") or {} config_dict['trivy_github_token'] = trivy_configs.get("github_token") or '' config_dict['trivy_skip_update'] = trivy_configs.get( "skip_update") or False config_dict['trivy_offline_scan'] = trivy_configs.get( "offline_scan") or False config_dict['trivy_ignore_unfixed'] = trivy_configs.get( "ignore_unfixed") or False config_dict['trivy_insecure'] = trivy_configs.get("insecure") or False config_dict['trivy_timeout'] = trivy_configs.get("timeout") or '5m0s' # Chart configs chart_configs = configs.get("chart") or {} if chart_configs.get('absolute_url') == 'enabled': config_dict['chart_absolute_url'] = True else: config_dict['chart_absolute_url'] = False # jobservice config js_config = configs.get('jobservice') or {} config_dict['max_job_workers'] = js_config["max_job_workers"] config_dict['jobservice_secret'] = generate_random_string(16) # notification config notification_config = configs.get('notification') or {} config_dict['notification_webhook_job_max_retry'] = notification_config[ "webhook_job_max_retry"] # Log configs allowed_levels = ['debug', 'info', 'warning', 'error', 'fatal'] log_configs = configs.get('log') or {} log_level = log_configs['level'] if log_level not in allowed_levels: raise Exception( 'log level must be one of debug, info, warning, error, fatal') config_dict['log_level'] = log_level.lower() # parse local log related configs local_logs = log_configs.get('local') or {} if local_logs: config_dict['log_location'] = local_logs.get( 'location') or '/var/log/harbor' config_dict['log_rotate_count'] = local_logs.get('rotate_count') or 50 config_dict['log_rotate_size'] = local_logs.get( 'rotate_size') or '200M' # parse external log endpoint related configs if log_configs.get('external_endpoint'): config_dict['log_external'] = True config_dict['log_ep_protocol'] = log_configs['external_endpoint'][ 'protocol'] config_dict['log_ep_host'] = log_configs['external_endpoint']['host'] config_dict['log_ep_port'] = log_configs['external_endpoint']['port'] else: config_dict['log_external'] = False # external DB, optional, if external_db enabled, it will cover the database config external_db_configs = configs.get('external_database') or {} if external_db_configs: config_dict['external_database'] = True # harbor db config_dict['harbor_db_host'] = external_db_configs['harbor']['host'] config_dict['harbor_db_port'] = external_db_configs['harbor']['port'] config_dict['harbor_db_name'] = external_db_configs['harbor'][ 'db_name'] config_dict['harbor_db_username'] = external_db_configs['harbor'][ 'username'] config_dict['harbor_db_password'] = external_db_configs['harbor'][ 'password'] config_dict['harbor_db_sslmode'] = external_db_configs['harbor'][ 'ssl_mode'] config_dict['harbor_db_max_idle_conns'] = external_db_configs[ 'harbor'].get("max_idle_conns") or default_db_max_idle_conns config_dict['harbor_db_max_open_conns'] = external_db_configs[ 'harbor'].get("max_open_conns") or default_db_max_open_conns if with_notary: # notary signer config_dict['notary_signer_db_host'] = external_db_configs[ 'notary_signer']['host'] config_dict['notary_signer_db_port'] = external_db_configs[ 'notary_signer']['port'] config_dict['notary_signer_db_name'] = external_db_configs[ 'notary_signer']['db_name'] config_dict['notary_signer_db_username'] = external_db_configs[ 'notary_signer']['username'] config_dict['notary_signer_db_password'] = external_db_configs[ 'notary_signer']['password'] config_dict['notary_signer_db_sslmode'] = external_db_configs[ 'notary_signer']['ssl_mode'] # notary server config_dict['notary_server_db_host'] = external_db_configs[ 'notary_server']['host'] config_dict['notary_server_db_port'] = external_db_configs[ 'notary_server']['port'] config_dict['notary_server_db_name'] = external_db_configs[ 'notary_server']['db_name'] config_dict['notary_server_db_username'] = external_db_configs[ 'notary_server']['username'] config_dict['notary_server_db_password'] = external_db_configs[ 'notary_server']['password'] config_dict['notary_server_db_sslmode'] = external_db_configs[ 'notary_server']['ssl_mode'] else: config_dict['external_database'] = False # update redis configs config_dict.update( get_redis_configs(configs.get("external_redis", None), with_trivy)) # auto generated secret string for core config_dict['core_secret'] = generate_random_string(16) # UAA configs config_dict['uaa'] = configs.get('uaa') or {} config_dict['registry_username'] = REGISTRY_USER_NAME config_dict['registry_password'] = generate_random_string(32) internal_tls_config = configs.get('internal_tls') # TLS related configs if internal_tls_config and internal_tls_config.get('enabled'): config_dict['internal_tls'] = InternalTLS( internal_tls_config['enabled'], False, internal_tls_config['dir'], configs['data_volume'], with_notary=with_notary, with_trivy=with_trivy, with_chartmuseum=with_chartmuseum, external_database=config_dict['external_database']) else: config_dict['internal_tls'] = InternalTLS() # metric configs metric_config = configs.get('metric') if metric_config: config_dict['metric'] = Metric(metric_config['enabled'], metric_config['port'], metric_config['path']) else: config_dict['metric'] = Metric() # trace configs trace_config = configs.get('trace') config_dict['trace'] = Trace(trace_config or {}) if config_dict['internal_tls'].enabled: config_dict['portal_url'] = 'https://portal:8443' config_dict['registry_url'] = 'https://registry:5443' config_dict['registry_controller_url'] = 'https://registryctl:8443' config_dict['core_url'] = 'https://core:8443' config_dict['core_local_url'] = 'https://core:8443' config_dict['token_service_url'] = 'https://core:8443/service/token' config_dict['jobservice_url'] = 'https://jobservice:8443' config_dict['trivy_adapter_url'] = 'https://trivy-adapter:8443' # config_dict['notary_url'] = 'http://notary-server:4443' config_dict['chart_repository_url'] = 'https://chartmuseum:9443' # purge upload configs purge_upload_config = configs.get('upload_purging') config_dict['purge_upload'] = PurgeUpload(purge_upload_config or {}) # cache configs cache_config = configs.get('cache') config_dict['cache'] = Cache(cache_config or {}) return config_dict
def test_default(self): cache = Cache(4) cache.add(1) cache.add('q') cache.add(0.99) cache.add('q') cache.add(0.988) self.assertEqual(cache.length, 4) cache.remove('q') self.assertEqual(cache.length, 2) cache = Cache(3) cache.add(1) cache.add('q') cache.add('a') cache.add('b') cache.add('c') self.assertEqual(cache.length, 3) values = cache.cache self.assertEqual(values[0], 'a') self.assertEqual(values[1], 'b') self.assertEqual(values[2], 'c') self.assertFalse(cache.exists(2)) self.assertTrue(cache.exists('c'))
def test_size_zero_and_less(self): cache = Cache(0) cache.add(2) cache.add(3) cache.add(4) cache.add(5) self.assertFalse(cache.exists(2)) cache = Cache(-8) cache.add(2) cache.add(3) cache.add(4) cache.add(5) self.assertFalse(cache.exists(2))
def save_post(request, data, board, thread): board_db = Board.get_by_key_name(board) if not board_db: board_db = Board(key_name = board, thread = []) board_db.counter += 1 # create new thread new = False if thread == 'new': new = True if data.get("sage"): raise NotFound() # FIXME: move to form thread = board_db.counter posts = [] thread_db = Thread.create(thread, board) thread_db.posts = [] thread_db.subject = data.get("subject")[:SUBJECT_MAX] else: thread = int(thread) #if thread not in board_db.thread: # raise NotFound() if thread in board_db.thread and not data.get("sage"): board_db.thread.remove(thread) thread_db = Thread.load(thread, board) if not thread_db: raise NotFound() if not data.get("sage"): board_db.thread.insert(0, thread) board_db.thread = board_db.thread[:THREAD_PER_PAGE*BOARD_PAGES] rb = rainbow.make_rainbow(request.remote_addr, board, thread) data['rainbow'] = rb data['rainbow_html'] = rainbow.rainbow(rb) data['text_html'] = markup( board=board, postid=board_db.counter, data=escape(data.get('text', '')), ) # FIXME: move to field data['name'] = data.get("name") or "Anonymous" # save thread and post number data['post'] = board_db.counter data['thread'] = thread now = datetime.now() data['time'] = now.strftime("%Y-%m-%d, %H:%M") data['timestamp'] = int(now.strftime("%s")) img_key = data.get("key") if img_key: blob_key = blobstore.BlobKey(img_key) blob_info = blobstore.BlobInfo.get(blob_key) data['image'] = { "size" : blob_info.size, "content_type" : blob_info.content_type, "full" : images.get_serving_url(img_key), "thumb" : images.get_serving_url(img_key, 200), } for fname in board_options.get(board, []): func = globals().get('option_'+fname) if func: func(request, data) thread_db.posts.append(data) db.put( (thread_db, board_db)) Cache.delete( ( dict(Board=board), ) ) memcache.set("threadlist-%s" % board, board_db.thread) memcache.set("post-%s-%d" %(board, board_db.counter), data) r = Render(board, thread) r.add(data, new) r.save() key = "update-thread-%s-%d" % (board, thread) if not new: send = { "html" : r.post_html, "evt" : "newpost" , "count" : len(thread_db.posts), "last" : board_db.counter, } watchers = memcache.get(key) or [] for person in watchers: logging.info("send data to key %s" % (person+key)) channel.send_message(person+key, dumps(send)) return board_db.counter, thread
def save_post(request, data, board, thread): board_db = Board.get_by_key_name(board) if not board_db: board_db = Board(key_name = board, thread = []) board_db.counter += 1 # create new thread new = False if thread == 'new': new = True if data.get("sage"): raise NotFound() # FIXME: move to form thread = board_db.counter posts = [] thread_db = Thread.create(thread, board) thread_db.posts = [] thread_db.subject = data.get("subject")[:SUBJECT_MAX] else: thread = int(thread) if thread in board_db.thread and not data.get("sage"): board_db.thread.remove(thread) thread_db = Thread.load(thread, board) if not thread_db: raise NotFound() if not data.get("sage"): board_db.thread.insert(0, thread) per_page = get_config('aib.ib', 'thread_per_page') pages = get_config('aib.ib', 'board_pages') board_db.thread = board_db.thread[:per_page*pages] rb = rainbow.make_rainbow(request.remote_addr, board, thread) data['rainbow'] = rb data['overlay'] = board in OVER data['text_html'] = markup( board=board, postid=board_db.counter, data=escape(data.get('text', '')), ) # save thread and post number data['post'] = board_db.counter data['thread'] = thread now = datetime.now() data['time'] = now.strftime("%Y-%m-%d, %H:%M") data['timestamp'] = int(now.strftime("%s")) img_key = data.get("key") if img_key: blob_key = blobstore.BlobKey(img_key) blob_info = blobstore.BlobInfo.get(blob_key) data['image'] = { "size" : blob_info.size, "content_type" : blob_info.content_type, "full" : images.get_serving_url(img_key), "thumb" : images.get_serving_url(img_key, 200), } for fname in OPTIONS.get(board, []): func = globals().get('option_'+fname) if func: func(request, data) thread_db.posts.append(data) db.put( (thread_db, board_db)) Cache.remove("board", board) r = Render(board, thread) r.add(data, new) r.save() deferred.defer(rss.add, board, thread, board_db.counter, data.get("text_html") ) if not new: deferred.defer( watchers_post_notify, board, thread, r.post_html, len(thread_db.posts), board_db.counter ) return board_db.counter, thread
def load(self,): self.cache = Cache.load("thread", self.board, self.thread) if not self.cache: self.cache = Cache.create("thread", self.board, self.thread)
def get(self, year="current"): """year can be 'all', 'current', '2002', '2011', etc.""" self.response.headers["Content-Type"] = "text/calendar; charset=utf-8" # find from cache q = Cache.gql("WHERE site = :site AND year = :year", site="nuscs", year=year) c = q.get() if c: self.response.out.write(c.data) return # generate now cal = Calendar() cal.add("prodid", "-//NUS CS Seminars//ronhuang.org//") cal.add("version", "2.0") cal.add("X-WR-CALNAME", "NUS CS Seminars") cal.add("X-WR-CALDESC", "Seminars are open to the public, and usually held in the School's Seminar Room.") q = Seminar.all().order("start") if year != "all": yn = None try: yn = int(year) except: pass if yn: # return events within that year q = q.filter("start >=", datetime(yn, 1, 1, tzinfo=SGT)) q = q.filter("start <", datetime(yn + 1, 1, 1, tzinfo=SGT)) else: # default # return a year of events. q = q.filter("start >=", datetime.now(SGT) - timedelta(days=366)) for s in q: event = Event() event["uid"] = s.url event.add("summary", s.title) event.add("dtstart", s.start) event.add("dtend", s.end) event.add("dtstamp", s.stamp) event.add("location", s.venue) event.add("url", s.url) event.add("description", s.speaker) event.add("categories", "seminar") event.add("class", "PUBLIC") cal.add_component(event) # generated data data = cal.as_string() encoding = chardet.detect(data)["encoding"] data = unicode(data, encoding) # store in datastore q = Cache.gql("WHERE site = :site AND year = :year", site="nuscs", year=year) c = q.get() if c: c.data = data else: c = Cache(site="nuscs", data=data, year=year) c.put() self.response.out.write(data)
def main(): commands = {} config = None ignore_list = [] settings = AttributeStore() tools = {} asset_folders = [] p = argparse.ArgumentParser() p.add_argument( "-c", "--config", dest="config_path", metavar="CONFIG_FILE_PATH", help="Configuration file path to use when converting assets", required=True ) p.add_argument( "-p", "--platform", dest="platform" ) p.add_argument( "-y", "--clear-cache", dest="clear_cache", action="store_true" ) p.add_argument( "-s", "--source_root", dest="source_root" ) args = p.parse_args() config_cache = KeyValueCache() # load config config_data = load_config(args.config_path, config_cache) # the source_root can be specified on the command line; # this properly inserts it into the paths dict if "paths" in config_data: if "source_root" not in config_data["paths"]: if not args.source_root: raise Exception( "source_root is missing. This should be defined" " in a config file, or on the command line." ) else: # this path SHOULD be an absolute path config_data["paths"]["source_root"] = args.source_root config = AttributeStore(config_data) if not args.platform: args.platform = get_platform() logging.info("Target Platform is \"%s\"" % args.platform) # load tools tools_path = os.path.abspath( os.path.join( WorkingDirectory.current_directory(), os.path.dirname(__file__), "tools.conf" ) ) # get cache path cache = Cache(args.config_path, remove=args.clear_cache) cache.load() # conform all paths if getattr(config, "paths", None): base_path = os.path.dirname(os.path.abspath(args.config_path)) # setup environment variables, path, etc. config.paths = setup_environment(base_path, config.paths, args.platform) setattr(settings, "paths", AttributeStore(config.paths)) # parse all tools Tool.load_tools( tools, tools_path, config.tools ) logging.info("Loaded %i tools." % len(tools.items())) # parse asset folders for asset_glob in config.assets: data = dict( {u"glob" : asset_glob}.items() + config.assets[asset_glob].items() ) asset_folder = AssetFolderMask(**data) asset_folder.make_folders_absolute( settings.paths.source_root, settings.paths.destination_root ) asset_folders.append(asset_folder) logging.info("Loaded %i asset folders." % len(asset_folders)) # check if we need to enter monitoring mode monitor_mode = hasattr(config, "monitor") if monitor_mode: monitor = config.monitor if not "url" in monitor: raise Exception("Monitor block requires a \"url\" parameter") # run monitoring monitor_assets( cache, settings, asset_folders, tools, args.platform, monitor["url"] ) else: # just run through all assets iterate_assets( cache, settings, asset_folders, tools, args.platform ) # write cache to file cache.save()
from settings import settings import time from models import Runner, Cache from utils import CSVReader, HttpClient http_client = HttpClient( settings.API_ENDPOINTS, settings.STREAMELEMENT_ACCOUNT_ID, settings.STREAMELEMENT_JWT_TOKEN, ) csv_reader = CSVReader(settings.FILENAME, settings.CSV_FORMAT) cache = Cache(settings.CACHE_SIZE) runner = Runner( csv_reader.get_contests(), cache, http_client, settings.CONTEST_MIN_BET, settings.CONTEST_MAX_BET, settings.CONTEST_DURATION_SECONDS, ) nb_seconds_to_bet = round(settings.CONTEST_DURATION_SECONDS * 0.75) nb_seconds_rest = settings.CONTEST_DURATION_SECONDS - nb_seconds_to_bet runner.refund_contests() while True: runner.next_contest()
def addto_cache(source, target, text, translated, msg_id, match): cache_row = Cache(source, target, text, translated, msg_id, match) db.session.add(cache_row) db.session.commit()