def __init__(self, session_start_ms, session_end_ms, cameraId, clipId): self.alias = '' self.clips = [] clip_key = '/{0}/{1}.mp4'.format(cameraId, clipId) clip_url = utils.url_join(config.CLIP_URL, clip_key) thumbnail_key = '/{0}/{1}_{{size}}.jpg'.format(cameraId, clipId) thumbnail_url = utils.url_join(config.CLIP_URL, thumbnail_key) self.metadata = { "clipId": clipId, "cameraId": cameraId, "startTimestampInMs": session_start_ms, "endTimestampInMs": session_end_ms, "status": "CLIP_PENDING", "bucket": config.S3_BUCKET_DESTINATION, "clip": { "key": clip_key, "url": clip_url }, "retryTimestamps": [], "thumbnail": { "key": thumbnail_key, "url": thumbnail_url, "sizes": list(map(int, config.THUMBNAIL_SIZES.split())) } }
class BunnyPalette(object): """Bunny colours to choose from.""" RED = '#c52828' ORANGE = '#e59100' YELLOW = '#e2e05d' GREEN = '#12751b' BLUE = '#214ddc' PURPLE = '#a41bf3' PINK = '#ff14f3' LGREEN = '#90ee90' BLACK = '#3a363b' HPINK = '#ff69b4' GREY = '#c0c0c0' RAINB = 'linear-gradient(to right, red,orange,yellow,green,blue,indigo,violet)' GBLUE = '#0d9a86' MARON = '#800000' COMM = 'url(' + url_join(WebPaths.IMAGES, "comm.png") + ')' ANAR = 'url(' + url_join(WebPaths.IMAGES, "anar.jpg") + ')' allColors = {'RED':RED, 'ORANGE':ORANGE, 'YELLOW':YELLOW, 'GREEN':GREEN, 'BLUE':BLUE, 'PURPLE':PURPLE, 'PINK':PINK, 'LGREEN':LGREEN, 'BLACK':BLACK, 'HPINK':HPINK, 'GREY':GREY, 'RAINB':RAINB, 'MARON':MARON, 'GBLUE':GBLUE, 'COMM':COMM, 'ANAR':ANAR} allCols = [['RED', 'ORANGE', 'YELLOW','GREEN'] ,['BLUE', 'PURPLE','PINK', 'LGREEN'], [ 'BLACK', 'HPINK', 'GREY', 'RAINB'], ['MARON', 'GBLUE', 'COMM', 'ANAR']] @classmethod def is_colour(cls, cid): """Determines if the given colour id is valid.""" return cid in cls.allColors
def __init__(self): config = utils.get_conf() manager_url = config["manager_url"] self.node_id = config["node"]["node_id"] self.node_key = config["node"]["node_key"] self.get_next_task_url = utils.url_join([manager_url, "get_next_task"]) self.notify_task_state_url = utils.url_join( [manager_url, "notify_task_state"]) self.auth_node_url = utils.url_join([manager_url, "auth_node"])
class Images(object): """Client-side paths to images.""" BANNER = url_join(WebPaths.IMAGES, 'banner.png') BUNNY_READY = url_join(WebPaths.IMAGES, 'bunnyready.png') BUNNY_RUN = url_join(WebPaths.IMAGES, 'bunnyrun.png') THINKING = url_join(WebPaths.IMAGES, 'thinking.gif') CARD_BACK = url_join(WebPaths.IMAGES, 'cardback.png') VOTE_TOKEN = url_join(WebPaths.IMAGES, 'votetoken.png') YOUR_TURN = url_join(WebPaths.IMAGES, 'arrow.ico') ICON_ACTIVE = url_join(WebPaths.IMAGES, 'bunnyicongreen.png') ICON_AWAY = url_join(WebPaths.IMAGES, 'bunnyiconyellow.png') ICON_ASLEEP = url_join(WebPaths.IMAGES, 'bunnyicongrey.png')
def download_latest_build(self): """Downloads the latest OSS-Fuzz build from GCS. Returns: A path to where the OSS-Fuzz build was stored, or None if it wasn't. """ if os.path.exists(self.workspace.clusterfuzz_build): # This function can be called multiple times, don't download the build # again. return self.workspace.clusterfuzz_build os.makedirs(self.workspace.clusterfuzz_build, exist_ok=True) latest_build_name = self.get_latest_build_name() if not latest_build_name: return None oss_fuzz_build_url = utils.url_join(utils.GCS_BASE_URL, self.CLUSTERFUZZ_BUILDS, self.config.project_name, latest_build_name) if http_utils.download_and_unpack_zip( oss_fuzz_build_url, self.workspace.clusterfuzz_build): return self.workspace.clusterfuzz_build return None
def download_corpus(self, target_name, parent_dir): """Downloads the latest OSS-Fuzz corpus for the target. Returns: The local path to to corpus or None if download failed. """ corpus_dir = self.get_target_corpus_dir(target_name, parent_dir) os.makedirs(corpus_dir, exist_ok=True) # TODO(metzman): Clean up this code. project_qualified_fuzz_target_name = target_name qualified_name_prefix = self.config.project_name + '_' if not target_name.startswith(qualified_name_prefix): project_qualified_fuzz_target_name = qualified_name_prefix + target_name corpus_url = utils.url_join( utils.GCS_BASE_URL, '{0}-backup.clusterfuzz-external.appspot.com/corpus/libFuzzer/'. format(self.config.project_name), project_qualified_fuzz_target_name, self.CORPUS_ZIP_NAME) if download_and_unpack_zip(corpus_url, corpus_dir): return corpus_dir return None
def download_latest_corpus(self): """Downloads the latest OSS-Fuzz corpus for the target from google cloud. Returns: The local path to to corpus or None if download failed. """ if not self.project_name: return None if not os.path.exists(self.out_dir): logging.error('Out directory %s does not exist.', self.out_dir) return None corpus_dir = os.path.join(self.out_dir, 'backup_corpus', self.target_name) os.makedirs(corpus_dir, exist_ok=True) project_qualified_fuzz_target_name = self.target_name qualified_name_prefix = '%s_' % self.project_name if not self.target_name.startswith(qualified_name_prefix): project_qualified_fuzz_target_name = qualified_name_prefix + \ self.target_name corpus_url = utils.url_join( utils.GCS_BASE_URL, '{0}-backup.clusterfuzz-external.appspot.com/corpus/libFuzzer/'. format(self.project_name), project_qualified_fuzz_target_name, CORPUS_ZIP_NAME) return download_and_unpack_zip(corpus_url, corpus_dir)
def download_latest_build(self): """Downloads the latest OSS-Fuzz build from GCS. Returns: A path to where the OSS-Fuzz build was stored, or None if it wasn't. """ if os.path.exists(self.workspace.clusterfuzz_build): # This function can be called multiple times, don't download the build # again. return self.workspace.clusterfuzz_build _make_empty_dir_if_nonexistent(self.workspace.clusterfuzz_build) latest_build_name = self.get_latest_build_name() if not latest_build_name: return None logging.info('Downloading latest build.') oss_fuzz_build_url = utils.url_join(utils.GCS_BASE_URL, self.CLUSTERFUZZ_BUILDS, self.config.oss_fuzz_project_name, latest_build_name) if http_utils.download_and_unpack_zip( oss_fuzz_build_url, self.workspace.clusterfuzz_build): logging.info('Done downloading latest build.') return self.workspace.clusterfuzz_build return None
def load_pages(directory, output_path, kernel): """ Find all .text files in the specified directory and return a map of Page objects, keyed by the url for the page. \param directory starting directory to search \param output_path the directory we'll be writing output to \param configs the config map which we'll use to get the template for each page \param templates the Templates singleton """ page_map = {} length = len(directory) for root, name in utils.find_files(directory, False, '.text'): path = os.path.join(root, name) base_path = root[length:] if not base_path.startswith('/'): base_path = '/' + base_path name = name[0:-5] url = utils.url_join(base_path, name) config = utils.find_config(kernel.configs, base_path) page = Page(kernel, path, output_path, url, config) page_map[url] = page return page_map
def download_oss_fuzz_build(self): """Downloads the latest OSS-Fuzz build from GCS. Returns: A path to where the OSS-Fuzz build is located, or None. """ if not os.path.exists(self.out_dir): logging.error('Out directory %s does not exist.', self.out_dir) return None if not self.project_name: return None build_dir = os.path.join(self.out_dir, 'oss_fuzz_latest', self.project_name) if os.path.exists(os.path.join(build_dir, self.target_name)): return build_dir os.makedirs(build_dir, exist_ok=True) latest_build_str = self.get_latest_build_version() if not latest_build_str: return None oss_fuzz_build_url = utils.url_join(utils.GCS_BASE_URL, CLUSTERFUZZ_BUILDS, self.project_name, latest_build_str) return download_and_unpack_zip(oss_fuzz_build_url, build_dir)
def find_cards(folder, suffixes=('.jpg', '.png')): """Returns all urls for a given folder, matching the given suffixes.""" path = os.path.join(os.path.dirname(__file__), display.WebPaths.CARDS, folder) return [ url_join(display.WebPaths.CARDS, folder, name) for name in os.listdir(path) if has_suffix(name, suffixes) ]
def get_url_list_from_date(date): opener = utils.get_caida_opener(utils.caida_trace_base_url) team_dir = ["team-1/daily/", "team-2/daily/", "team-3/daily/"] res = [] for t in team_dir: f = opener.open(utils.url_join([utils.caida_trace_base_url,t])) text = f.read() parser = CaidaParser() parser.feed(text) target_year = date[:4] for e in parser.dir: if(time_cmp(e.strip('/'), target_year) == 0): res.extend( get_url_list_from_year_dir(date, utils.url_join([utils.caida_trace_base_url,t,e]), opener) ) break return res
def list_all_binary_packages_for_this_package_at_version(self, version): """ URL: /mr/package/<package>/<version>/binpackages http status codes: 200 500 404 304 summary: list all binary packages associated with this source package at that version :param version: :return: **temporary** return dict """ url = url_join( BASE_URL, "mr/package/{package}/{version}/binpackages".format(package=self.package_name, version=version) ) try: response = snapshot_get(url, self.timeout) except requests.exceptions.HTTPError: url = url_join( BASE_URL, "mr/package/{package}/{version}/binpackages".format(package=self.source_name, version=version) ) response = snapshot_get(url, self.timeout) return response.json()["result"]
def get(self, app, url): backend_url = url_join(self.backend, app, url, query_join(self.request.arguments)) try: response = HTTPClient().fetch(HTTPRequest(url=backend_url, method="GET")) self.set_header("Content-Type", "application/json") self.write(response.body) except HTTPError as e: self.set_status(e.code) self.write(e.message)
def get_latest_time_fromsite(): opener = utils.get_caida_opener(utils.caida_trace_base_url) team_dir = ["team-1/daily/", "team-2/daily/", "team-3/daily/"]; temp = [] for t in team_dir: f = opener.open(utils.url_join([utils.caida_trace_base_url,t])) text = f.read() parser = CaidaParser() parser.feed(text) e = parser.dir[-1] temp.append(get_latest_date_from_year_dir(utils.url_join([utils.caida_trace_base_url,t,e]), opener)) res = temp[0] for t in temp[1:]: if(time_cmp(t, res) > 0): res = t return res
def convert(self, to, amount, date=None): logging.debug(f"Convert {locals()}") to = to if isinstance(to, str) else to.name if to == self.name: return amount price_data, requested_price, date = self.get_cached(to, date) if requested_price is None: if (date == None) or (date.normalize() == pd.to_datetime("now").normalize()): save_file = f"{self.name}_{to}_latest.pkl" price_url = url_join(URL_MARKET_PRICE_FIAT, "latest") requested_price = requests.get(price_url, params={ "base": self.name }).json()["rates"][to] requested_price = float(requested_price) save_data(requested_price, save_file, add_path_prefix=True) else: save_file = f"{self.name}_{to.name}.pkl" date_query = date.strftime("%Y-%m-%d") price_url = url_join(URL_MARKET_PRICE_FIAT, date_query) requested_price = requests.get(price_url, params={"base": self.name})["rates"][to] requested_price = float(requested_price) new_value = pd.Series(name=date, data={ "price": requested_price }).to_frame() if price_data is None: price_data = price_data.append(new_value) else: price_data = new_value save_data(price_data, save_file, add_path_prefix=True) return requested_price * amount
def info_from_hash(self, version, arch=None): """ URL: /mr/file/<hash>/info http status codes: 200 500 404 304 :param hash: :return: """ try: url = url_join( BASE_URL, "/mr/package/{package}/{version}/allfiles".format(package=self.package_name, version=version) ) response = snapshot_get(url, self.timeout) except requests.exceptions.HTTPError: url = url_join( BASE_URL, "/mr/package/{package}/{version}/allfiles".format(package=self.source_name, version=version) ) response = snapshot_get(url, self.timeout) the_hash = self.target_version_hash(response, version, arch) url = url_join(BASE_URL, "/mr/file/{the_hash}/info".format(the_hash=the_hash)) response = snapshot_get(url, self.timeout) return response.json()
def _get_oss_fuzz_latest_cov_report_info(oss_fuzz_project_name): """Gets and returns a dictionary containing the latest coverage report info for |project|.""" latest_report_info_url = utils.url_join( utils.GCS_BASE_URL, OSS_FUZZ_LATEST_COVERAGE_INFO_PATH, oss_fuzz_project_name + '.json') latest_cov_info = http_utils.get_json_from_url(latest_report_info_url) if latest_cov_info is None: logging.error('Could not get the coverage report json from url: %s.', latest_report_info_url) return None return latest_cov_info
def get(self, app, url): backend_url = url_join(self.backend, app, url, query_join(self.request.arguments)) try: response = HTTPClient().fetch( HTTPRequest(url=backend_url, method='GET')) self.set_header("Content-Type", "application/json") self.write(response.body) except HTTPError as e: self.set_status(e.code) self.write(e.message)
def _get_latest_cov_report_info(project_name): """Gets and returns a dictionary containing the latest coverage report info for |project|.""" latest_report_info_url = utils.url_join(utils.GCS_BASE_URL, LATEST_REPORT_INFO_PATH, project_name + '.json') latest_cov_info = get_json_from_url(latest_report_info_url) if not latest_cov_info is None: logging.error('Could not get the coverage report json from url: %s.', latest_report_info_url) return None return latest_cov_info
def get_url_list_from_year_dir(date, url, opener): f = opener.open(url) text = f.read() parser = CaidaParser() parser.feed(text) for e in parser.dir: d = e.split('-')[1].strip('/') if (time_cmp(d, date) == 0): return get_url_list_from_date_dir(utils.url_join([url,e]), opener) return []
def list_all_available_source_versions(self): """ URL: /mr/package/<package>/ http status codes: 200 500 404 304 summary: list all available source versions for this package :return: list """ try: response = snapshot_get( url_join(BASE_URL, "/mr/package/{package}/".format(package=self.package_name)), self.timeout ) except requests.exceptions.HTTPError: # print 'NNNNNNNNNNNNNNNNNNB' self.loggerA.debug( "Making request with source_name:{source_name} instead of package_name:{package_name}".format( source_name=self.source_name, package_name=self.package_name ) ) response = snapshot_get( url_join(BASE_URL, "/mr/package/{package}/".format(package=self.source_name)), self.timeout ) return [str(version["version"]) for version in response.json()["result"]]
def list_all_sources_for_this_package_at_version(self, version): """ URL: /mr/package/<package>/<version>/srcfiles Options: fileinfo=1 includes fileinfo section http status codes: 200 500 404 304 summary: list all source files associated with this package at that version :param version: the version of the package :return: list of hashes """ url = url_join( BASE_URL, "mr/package/{package}/{version}/srcfiles".format(package=self.package_name, version=version) ) try: response = snapshot_get(url, self.timeout) except requests.exceptions.HTTPError: url = url_join( BASE_URL, "mr/package/{package}/{version}/srcfiles".format(package=self.source_name, version=version) ) response = snapshot_get(url, self.timeout) # return [str(h['hash']) for h in response.json()['result']] return response.json()
def get_latest_time_fromsite(): opener = utils.get_iplane_opener() f = opener.open(utils.iplane_base_url) text = f.read() f.close() parser = iPlaneParser() parser.feed(text) e = parser.dir[-1].strip('/') return get_latest_date_from_year_dir( utils.url_join([utils.iplane_base_url, e]), opener)
def list_all_files_associated_with_this_source_package_at_that_version(self, version, arch=None): """ URL: /mr/package/<package>/<version>/allfiles Options: fileinfo=1 includes fileinfo section http status codes: 200 500 404 304 summary: list all files associated with this source package at that version :param version: the version of the package :return: """ url = url_join( BASE_URL, "/mr/package/{package}/{version}/allfiles".format(package=self.package_name, version=version) ) response = snapshot_get(url, self.timeout) return response.json()["result"]
def get_target_coverage_report(self, target): """Get the coverage report for a specific fuzz target. Args: target: The name of the fuzz target whose coverage is requested. Returns: The target's coverage json dict or None on failure. """ if not self.fuzzer_stats_url: return None target_url = utils.url_join(self.fuzzer_stats_url, target + '.json') return get_json_from_url(target_url)
class WebPaths(object): """Client-side paths to resource directories.""" STATIC = 'static' IMAGES = url_join(STATIC, 'images') JS = url_join(STATIC, 'js') CSS = url_join(STATIC, 'css') CARDS = url_join(STATIC, 'cards') SMILIES = url_join(IMAGES, 'smilies') JQUERY_UI = url_join(JS, 'jquery-ui-1.10.4')
def delete(self, app, url): backend_url = url_join(self.backend, app, url) try: response = HTTPClient().fetch( HTTPRequest(url=backend_url, method="DELETE", body=self.request.body, allow_nonstandard_methods=True) ) # TODO: reformat maybe? self.set_header("Content-Type", "application/json") self.write(response.body) except HTTPError as e: self.set_status(e.code) self.write(e.message) except Exception as e: self.set_status(INTERNAL_SERVER_ERROR) self.write(e.message)
def get_latest_build_name(self): """Gets the name of the latest OSS-Fuzz build of a project. Returns: A string with the latest build version or None. """ version_file = (f'{self.config.project_name}-{self.config.sanitizer}' '-latest.version') version_url = utils.url_join(utils.GCS_BASE_URL, self.CLUSTERFUZZ_BUILDS, self.config.project_name, version_file) try: response = urllib.request.urlopen(version_url) except urllib.error.HTTPError: logging.error('Error getting latest build version for %s from: %s.', self.config.project_name, version_url) return None return response.read().decode()
def __init__(self, package_name, timeout=DEFAULT_TIMEOUT): self.loggerA = logging.getLogger(self.__class__.__name__) # logger = logging.getLogger(self.__class__.__name__) self.package_name = package_name # self.kwargs=kwargs # print '>?>?', self.kwargs # # self.session = requests.Session() self.timeout = timeout url = url_join(BASE_URL, "/mr/binary/{binary}/".format(binary=self.package_name)) self.initial_response = snapshot_get(url, self.timeout).json() # self.initial_response = self.find_binary_package_versions_and_corresponding_source_names_and_versions(self.package_name) self.source_name = self.initial_response["result"][0]["source"] self.binary_name = self.initial_response["result"][0]["name"] self.loggerA.debug( "\nsource_name:{s},\nbinary_name:{b}\npackage_name:{p}\n".format( s=self.source_name, b=self.binary_name, p=self.package_name ) )
def delete(self, app, url): backend_url = url_join(self.backend, app, url) try: response = HTTPClient().fetch( HTTPRequest( url=backend_url, method='DELETE', body=self.request.body, allow_nonstandard_methods=True)) # TODO: reformat maybe? self.set_header("Content-Type", "application/json") self.write(response.body) except HTTPError as e: self.set_status(e.code) self.write(e.message) except Exception as e: self.set_status(INTERNAL_SERVER_ERROR) self.write(e.message)
def list_all_files_associated_with_a_binary_package(self, version, binpkg, binversion): """ URL: /mr/package/<package>/<version>/binfiles/<binpkg>/<binversion> Options: fileinfo=1 includes fileinfo section http status codes: 200 500 404 304 summary: list all files associated with a binary package :param version: the version of binpkg :param binpkg: binpackage you get from list_all_binary_packages_for_this_package_at_version(self, version) :param binversion: version of **binpkg** :return: """ url = url_join( BASE_URL, "/mr/package/{package}/{version}/binfiles/{binpkg}/{binversion}".format( package=self.package_name, version=version, binpkg=binpkg, binversion=binversion ), ) response = snapshot_get(url, self.timeout) return response.json()
def get_measurements_list(start_ts, stop_ts, mt_num): page_size = 500 measurements_url = utils.url_join( [utils.ripeatlas_base_url, "/measurements/"]) params = {} params["format"] = "json" params["page_size"] = str(page_size) params["is_public"] = "true" params["type"] = "traceroute" params["af"] = "4" params["start_time__gte"] = start_ts params["stop_time_lte"] = stop_ts #resource list. resources = [''] result_list = [] #first page url = utils.construct_url(measurements_url, params) page = download_ripe_atlas_list_worker(url) page_num = int(math.ceil(float(page["count"]) / page_size)) result_list.extend(page["results"]) temp_list = ["" for i in range(page_num + 1)] #build argv_list argv = [] #for i in range(2,page_num+1): for i in range(2, 3): #debug params["page"] = str(i) url = utils.construct_url(measurements_url, params) arg = (url, temp_list, i) argv.append(arg) #run with multi thread. multi_thread.run_with_multi_thread(download_ripe_atlas_list_wrapper, argv, resources, mt_num) for i in range(2, page_num + 1): result_list.extend(temp_list[i]) return result_list
def get_latest_build_version(self): """Gets the latest OSS-Fuzz build version for a projects' fuzzers. Returns: A string with the latest build version or None. """ if not self.project_name: return None version = VERSION_STRING.format(project_name=self.project_name, sanitizer=self.sanitizer) version_url = utils.url_join(utils.GCS_BASE_URL, CLUSTERFUZZ_BUILDS, self.project_name, version) try: response = urllib.request.urlopen(version_url) except urllib.error.HTTPError: logging.error( 'Error getting latest build version for %s with url %s.', self.project_name, version_url) return None return response.read().decode()
def on_set_repositories(self, repositories): """ set repositories list """ self.repositories = [] for repo in repositories: try: rtype, url, dist, sections = repo.split(None, 3) except: raise InvalidRepository, \ "Repository is either invalid or not supported: %s" % repo for section in sections.split(): r = {} r["rtype"] = rtype r["url"] = url r["dist"] = dist r["section"] = section r["url"] = url_join(url, "dists", dist, section) self.repositories.append(r)
def student_search(): token = get_token() headers = {'Authorization': 'Bearer %s' % token} if len(sys.argv) < 2 or len(sys.argv) > 3: error('Invalid arguments') user_id = get_user_id(sys.argv[1], headers) if user_id is None: error('Not a valid user') if (len(sys.argv) == 3): flag = sys.argv[2] else: flag = None if (flag == 'staff'): flag = 'staff?' endpoint = '/users/%s/' % user_id url = url_join(endpoint) response = requests.get(url, headers=headers) response = bytetodict(response.content) if response == {}: error('Login not found') df = pd.Series(response) print_infos(df, flag)
def download_latest_build(self, out_dir): """Downloads the latest OSS-Fuzz build from GCS. Returns: A path to where the OSS-Fuzz build was stored, or None if it wasn't. """ build_dir = os.path.join(out_dir, self.BUILD_DIR_NAME) if os.path.exists(build_dir): return build_dir os.makedirs(build_dir, exist_ok=True) latest_build_name = self.get_latest_build_name() if not latest_build_name: return None oss_fuzz_build_url = utils.url_join(utils.GCS_BASE_URL, self.CLUSTERFUZZ_BUILDS, self.config.project_name, latest_build_name) if download_and_unpack_zip(oss_fuzz_build_url, build_dir): return build_dir return None
def blog_command(kernel, *args): """ Create the directory, and an empty .text file, for today's blog entry. """ config = utils.find_config(kernel.configs, '/') paths = config.get('blog', 'paths').split(',') path = paths[0] newpath = utils.url_join(kernel.options.output, path, time.strftime('%Y/%m/%d')) if not os.path.exists(newpath): os.makedirs(newpath) print('Created new blog dir %s' % newpath) if args and len(*args) > 0: title = ' '.join(*args) else: title = 'Temporary post' filename = os.path.join(newpath, title.lower().replace(' ', '-') + '.text') if not os.path.exists(filename): posted_on = time.strftime('%d %b, %Y') content = '''title: %(title)s posted-on: %(posted-on)s tags: %(title)s %(dashes)s ''' % { 'title': title, 'posted-on': posted_on, 'dashes': ('-' * len(title)) } with open(filename, 'w') as blog_file: blog_file.write(content)
def find_cards(folder, suffixes=('.jpg',)): """Returns all urls for a given folder, matching the given suffixes.""" path = os.path.join( os.path.dirname(__file__), display.WebPaths.CARDS, folder) return [url_join(display.WebPaths.CARDS, folder, name) for name in os.listdir(path) if has_suffix(name, suffixes)]
class Currency(): URL_MARKET_PRICE = url_join(KRAKEN_PUBLIC_END_POINT, "Ticker") def __init__(self, ticker, base_currency_unit): self.ticker = ticker self.base_currency_unit = base_currency_unit self.value = 0 self.currency_unit = None self.total_invested = 0 self.total_invested_up_now = 0 self.realized_profit = 0 self.compute_avg_base_price() def compute_avg_base_price(self): if self.value != 0: self.avg_base_price = self.total_invested / self.value else: self.avg_base_price = 0 def get_total_invested(self, currency_unit=None): if currency_unit is None: currency_unit = self.base_currency_unit return self.base_currency_unit.convert(currency_unit, self.total_invested) def get_all_return(self, currency_unit=None): ret = self.realized_profit + self.get_total_return() if currency_unit is not None: ret = self.base_currency_unit(currency_unit, ret) return ret def get_total_invested_up_now(self, currency_unit=None): if currency_unit is None: currency_unit = self.base_currency_unit return self.base_currency_unit.convert(currency_unit, self.total_invested_up_now) def get_realized_return(self, currency_unit=None): ret = self.realized_profit if currency_unit is not None: ret = self.base_currency_unit.convert(currency_unit, ret) return ret def get_all_return_rate(self): if self.total_invested_up_now == 0: return 0 return (self.realized_profit + self.get_total_return()) / self.total_invested_up_now def get_realized_return_rate(self): denom = (self.total_invested_up_now - self.total_invested) if denom == 0: return 0 return self.realized_profit / denom def withdraw(self, value, fee=0, update_avg_price=True): logging.debug(f"Withdraw {self.ticker}") logging.debug(f"\t Withdraw Args: {locals()}") value_with_fee = value + fee if value_with_fee > self.value: raise ValueError( f"Not enough funds to withdraw {value} {self.ticker}") avg_value_with_fees = value_with_fee * self.avg_base_price self.value -= value_with_fee self.total_invested -= (value_with_fee * self.avg_base_price) if update_avg_price: self.compute_avg_base_price() logging.debug(f"Withdraw {self.ticker} finished") filtered_global_var = filter( lambda x: not (x.startswith("__") or callable(getattr(self, x))), dir(self)) logging.debug( f"{self.ticker}_Portfolio State {list(map(lambda x: (x,getattr(self,x)),filtered_global_var))}" ) return (avg_value_with_fees, self.base_currency_unit) def sell(self, buy_currency, value_sold, value_bought, sell_fee=0, buy_fee=0, date=None): logging.debug(f"sell {self.ticker}") logging.debug(f"\t sell Args: {locals()}") avg_value_with_fees, _ = self.withdraw(value_sold, fee=sell_fee, update_avg_price=False) logging.debug( f"AVG VALUE WITH FEES {self.ticker + buy_currency.ticker}: {avg_value_with_fees}" ) realized_profit = buy_currency.currency_unit.convert( self.base_currency_unit, value_bought, date=date) logging.debug( f"Price at SELL {self.ticker + buy_currency.ticker}: {realized_profit}" ) realized_profit = realized_profit - avg_value_with_fees logging.debug( f"REALIZED PROFIT {self.ticker + buy_currency.ticker}: {realized_profit}" ) self.realized_profit += realized_profit buy_rate = value_sold / value_bought buy_currency.buy(self, value_bought, buy_rate, buy_fee=buy_fee, date=date) self.compute_avg_base_price() logging.debug(f"Sell {self.ticker} finished") filtered_global_var = filter( lambda x: not (x.startswith("__") or callable(getattr(self, x))), dir(self)) logging.debug( f"{self.ticker}_Portfolio State {list(map(lambda x: (x,getattr(self,x)),filtered_global_var))}" ) return realized_profit, self.base_currency_unit def top_up(self, value, fee=0, avg_base_price=None, avg_base_price_currency_unit=None, update_avg_price=True, date=None): logging.debug(f"Top up with {self.ticker}") logging.debug(f"\t top up Args: {locals()}") if avg_base_price is None: avg_base_price = self.get_current_unit_value() elif avg_base_price_currency_unit is not None: avg_base_price = avg_base_price_currency_unit.convert( self.base_currency_unit, avg_base_price, date=date) value_with_fees = value - fee self.value += value_with_fees top_up_base = (value * avg_base_price) logging.debug(f"{self.ticker} Top up base value {top_up_base}") self.total_invested += top_up_base self.total_invested_up_now += top_up_base if update_avg_price: self.compute_avg_base_price() logging.debug(f"{self.ticker} Top up finished {self}") filtered_global_var = filter( lambda x: not (x.startswith("__") or callable(getattr(self, x))), dir(self)) logging.debug( f"{self.ticker}_Portfolio State {list(map(lambda x: (x,getattr(self,x)),filtered_global_var))}" ) def buy(self, from_currency, value, buy_rate, buy_fee=0, date=None): logging.debug(f"Buy {self.ticker}") logging.debug(f"\t buy Args: {locals()}") self.top_up( value, fee=buy_fee, avg_base_price=from_currency.avg_base_price * buy_rate, avg_base_price_currency_unit=from_currency.base_currency_unit, update_avg_price=True, date=date) logging.debug(f"{self.ticker} Buy finished") filtered_global_var = filter( lambda x: not (x.startswith("__") or callable(getattr(self, x))), dir(self)) logging.debug( f"{self.ticker}_Portfolio State {list(map(lambda x: (x,getattr(self,x)),filtered_global_var))}" ) def get_total_return(self, currency_unit=None, date=None): total_return = (self.get_current_value(date=date) - self.total_invested) if currency_unit is not None: total_return = self.base_currency_unit.convert(currency_unit, total_return, date=date) return total_return def get_return_rate(self, date=None): if self.total_invested == 0: return 0 return self.get_total_return(date=date) / self.total_invested def get_current_value(self, currency_unit=None, date=None): if currency_unit is None: currency_unit = self.base_currency_unit logging.debug(f"CU: {currency_unit}") return self.currency_unit.convert(currency_unit, self.value, date=date) def get_current_unit_value(self, currency_unit=None): if currency_unit is None: currency_unit = self.base_currency_unit return self.currency_unit.convert(currency_unit, 1) def display(self, verbose=True, tabulation="", currency_unit=None, tabulation_char="\t"): display_str = f"{tabulation}{self.ticker}:\n" current_val = self.get_current_value(currency_unit=currency_unit) total_invested = self.get_total_invested(currency_unit) total_invested_up_now = self.get_total_invested_up_now(currency_unit) display_str += f"{tabulation}{tabulation_char}Current value: {current_val} {currency_unit.name}\n" display_str += f"{tabulation}{tabulation_char}Invested: {total_invested} {currency_unit.name}\n" display_str += f"{tabulation}{tabulation_char}Invested all to now: {total_invested_up_now} {currency_unit.name}\n" profit = self.get_total_return(currency_unit=currency_unit) # realized_profit = self.get_realized_return(currency_unit) display_str += f"{tabulation}{tabulation_char}Unrealized Return: {profit} {currency_unit.name}\n" # display_str += f"{tabulation}{tabulation_char}Realized Return: {realized_profit} {currency_unit.name}\n" # total_profit = realized_profit + profit # display_str += f"{tabulation}{tabulation_char}Total Return: {total_profit} {currency_unit.name}\n" return_rate = self.get_return_rate() # realized_return_rate = self.get_realized_return_rate() # all_return_rate = self.get_all_return_rate() display_str += f"{tabulation}{tabulation_char}Return rate: {return_rate*100} %\n" # display_str += f"{tabulation}{tabulation_char}Realized Return rate: {realized_return_rate*100} %\n" # display_str += f"{tabulation}{tabulation_char}All Return rate: {all_return_rate*100} %\n" if verbose: print(display_str) return display_str def __str__(self): return f"<Currency_Portfolio with {self.value:.4f} {self.ticker}: invested -> {self.total_invested:.4f} {self.base_currency_unit} >" def __repr__(self): return str(self)
#run with multi thread. multi_thread.run_with_multi_thread(download_caida_restricted_wrapper, argv, resources, mt_num) #assemble segements. assemble_segements(file_path) def download_directory(url, directory, mt_num=-1): #get file list. is_succeeded = False round_cnt = 1 while (not is_succeeded): try: file_list = get_file_list_from_directory(url) is_succeeded = True except Exception, e: utils.log(str(e)) is_succeed = False round_cnt = round_cnt + 1 time.sleep(1 * round_cnt) utils.touch(directory + '/') #resource list resources = [''] for f in file_list: download_file(utils.url_join([url, f]), utils.path_join([directory, f]), resources, mt_num)
def get_all_packages(): with SnapConnection(url_join(BASE_URL, ALL_PACKAGES)) as response: return response
return team+"."+suffix def download_date(date, directory, mt_num=-1): #get url list. is_succeeded = False round_cnt = 1 while(not is_succeeded): try: url_list = get_url_list_from_date(date) is_succeeded = True except Exception, e: utils.log(str(e)) is_succeed = False round_cnt = round_cnt + 1 time.sleep(1*round_cnt) utils.touch(directory+'/') #resource list resources = [''] #build argv_list argv = [] for url in url_list: file_path = utils.url_join([directory, get_caida_filename_from_url(url)]) arg = (url, file_path) argv.append(arg) #run with multi thread. multi_thread.run_with_multi_thread(download_caida_restricted_wrapper, argv, resources, mt_num)