def get_versions(self, fail_on_missing=True): if not self.versions: self.versions = utils.get_versions( self.package_fullpath, fail_on_missing=fail_on_missing, version_reg=self.get_version_selection_reg()) return self.versions
def run(): # TODO: make configurable with open('nvdcve.json') as f: data = json.load(f) for d in data.get('CVE_Items'): cve = CVE.from_dict(d) logger.info('---') logger.info('Found {cve}'.format(cve=cve.cve_id)) # TODO: make configurable if is_older_than(cve, 1): logger.info('The CVE is too old, skipping...') continue if not cve.configurations: logger.info('The vulnerability is still under analysis, skipping...') continue vendor, product, cpe_versions = get_vendor_product_versions(cve) pkg_name_candidates = get_package_name_candidates(cve) product = list(OrderedDict.fromkeys(product + list(pkg_name_candidates))) vendor = list(OrderedDict.fromkeys(vendor)) if not product or not vendor: continue query = construct_lucene_query(vendor, product) logger.info('Query: {q}'.format(q=query)) results = run_cpe2pkg(query) # try to exclude false positives # winner is simply the first match we found winner = None for result in results: ga = result['ga'] upstream_versions = get_versions(ga) # check if at least one version mentioned in the CVE exists for given groupId:artifactId; # if not, this is a false positive if cpe_versions & set(upstream_versions): logger.info('Hit for {ga}'.format(ga=ga)) result['versions'] = upstream_versions winner = result break if winner: generate_yaml(cve, winner, results)
"u_snes_atol": 1e-6, "u_snes_rtol": 1e-6, "u_snes_max_it": 1000, "u_snes_monitor": '' } alt_min_parameters = { "max_it": 300, "tol": 1.e-5, "solver_alpha": "tao", "solver_u": petsc_options_u, # "solver_alpha_snes": petsc_options_alpha_snes "solver_alpha_tao": petsc_options_alpha_tao } versions = get_versions() versions.update({'filename': __file__}) parameters = { "alt_min": alt_min_parameters, # "solver_u": petsc_options_u, # "solver_alpha_tao": petsc_options_alpha_tao, "solver_alpha_snes": petsc_options_alpha_snes, "stability": stability_parameters, "time_stepping": timestepping_parameters, "material": {}, "geometry": {}, "experiment": {}, "code": versions } # constants ell = 0.1
def main(max_rows, cache, name, version, threshold): print("Download %r" % name) pd.set_option("max_rows", max_rows) expire_after = datetime.timedelta(days=cache) session = init_session(expire_after) df = download_index(session) # df_first = df.groupby('name').last() # df_last = df.groupby('name').first() if name not in df['name'].values: print("%r is not a valid library name" % name) print() print("Possible library names are:") names = find_nearest_names(name, threshold=threshold, df=df) print(names[0:10]) find_nearest_name = names.iloc[0] if find_nearest_name.score == 1: print() new_name = find_nearest_name['name'] print("Case is important! Autofixing it as %r instead of %r" % (new_name, name)) name = new_name input("") else: print() sys.exit("Please correct library name!") df_all_versions = df[df['name'].str.upper() == name.upper()] name = df_all_versions.name.unique()[0] print(df_all_versions.set_index('version')) print("") versions = get_versions(name, df) print("versions: %s" % versions.map(str).values) if version == 'latest': version = get_latest_version(name, df) print() print("version: %s" % str(version)) url = get_archive_url(name, version, df) print() print("Downloading from %r" % url) assert url[-4:] == '.zip', "URL must finish with .zip" print() show_licence_from_archive_url(url) print() print("(possible) repository url") print() try: repository_url = repository_url_from_archive_url(url) print(repository_url) except Exception as e: print("Can't find repository url") print() traceback.print_exc()
size = comm.Get_size() from dolfin.cpp.log import log, LogLevel, set_log_level dolfin.parameters["std_out_all_processes"] = False from solvers import EquilibriumAM from solver_stability import StabilitySolver from linsearch import LineSearch from dolfin import * import yaml import mshr from lib import create_output, compile_continuation_data, getDefaultParameters from utils import get_versions code_parameters = get_versions() set_log_level(LogLevel.INFO) def perturbState(state, perturb): """ Perturbs current state with perturbation Arguments --------- state: dict like {'u': Coefficient, 'alpha': Coefficient} perturb: dict like {'v': Coefficient, 'beta': Coefficient, 'h': Float} """ u = state['u']
def get_versions(self, fail_on_missing=True): if not self.versions: self.versions = utils.get_versions(self.package_fullpath, fail_on_missing=fail_on_missing, version_reg=self.get_version_selection_reg() ) return self.versions
def get_versions(path, limit=None, bellow=None): r = utils.get_versions(dbcon, '/' + path, limit, bellow) if r is None: flask.abort(404) return flask.Response(json.dumps(r), mimetype='application/json')