def key_values(): cli = dict() about_info = About(PKG_NAME) version_info = VersionInfo(PKG_NAME) cli['tapis_cli_name'] = about_info.project cli['tapis_cli_version'] = version_info.version_string() return cli
def get(self): """Retrieve the Storyboard system information. Example:: curl https://my.example.org/api/v1/systeminfo """ sb_ver = VersionInfo('storyboard') return wmodels.SystemInfo(version=sb_ver.version_string())
def test_render_template_passed_vals_supercede_builtin(): """Values in a passed dict should override default/discovered values """ from pbr.version import VersionInfo from tapis_cli import PKG_NAME from tapis_cli.templating import render_template version_info = VersionInfo(PKG_NAME) version_string = version_info.version_string() source = 'Tapis CLI version {{ tapis_cli_version }} is cool' env = {} rendered = render_template(source, passed_vals=env) assert version_string in rendered # Pass over-ride value env = {'tapis_cli_version': 9000} rendered = render_template(source, passed_vals=env) assert '9000' in rendered
# -*- coding: utf-8 -*- # Copyright (C) 2004-2008 Tristan Seligmann and Jonathan Jacobs # Copyright (C) 2012-2014 Bastian Kleineidam # Copyright (C) 2015-2017 Tobias Gruetzmacher """ Automated comic downloader. Dosage traverses comic websites in order to download each strip of the comic. The intended use is for mirroring the strips locally for ease of viewing; redistribution of the downloaded strips may violate copyright, and is not advisable unless you have communicated with all of the relevant copyright holders, described your intentions, and received permission to distribute. The primary interface is the 'dosage' commandline script. Comic modules for each comic are located in L{dosagelib.plugins}. """ from __future__ import absolute_import, division, print_function from pbr.version import VersionInfo AppName = u'dosage' version_info = VersionInfo(AppName) __version__ = version_info.version_string() # PEP 396 AppVersion = version_info.release_string()
from os import getenv, path, environ from pbr.version import VersionInfo import platform PROJECT_NAME = 'youcomment' MANUAL_VERSION = '0.6.6' try: info = VersionInfo(PROJECT_NAME) except Exception: environ['PBR_VERSION'] = MANUAL_VERSION info = VersionInfo(PROJECT_NAME) VERSION = info.version_string() VERSION_VCS = info.version_string_with_vcs() PLATFORM = platform.system() # Modifiable Settings REDDIT_AUTHOR_USERNAME = '******' DEFAULT_SUBREDDITS = ["you_comment_bot"] DEFAULT_BOT_RUN_INTERVAL_MINS = 3 SIMILARITY_LIMIT = 0.75 YOUTUBE_COMMENTS_PER_PAGE = 100 YOUTUBE_COMMENTS_MAX_NUM = 500 YOUTUBE_LIKE_THRESHOLD = 3 YOUTUBE_NUM_TOP_COMMENTS = 50 REDDIT_MAX_POSTS = 50 REDDIT_COMMENTS_MAX_NUM = 25 REDDIT_NUM_RETRIES = 3 REDDIT_REPLY_INTERVAL = 60 * 10 # Default is usually 9 minutes, but 10 just in case. DEV_MODE = 'DEV'
def get(self): """Retrieve the Storyboard system information. """ sb_ver = VersionInfo('storyboard') return wmodels.SystemInfo(version=sb_ver.version_string())
def masq(masq_args): """ Main function for running masq. Accepts a dictionary of arguments from the argument parser and calls the appropriate module function. :param masq_args: Arguments. :return: """ if masq_args['version']: info = VersionInfo('anuran') logger.info('Version ' + info.version_string()) sys.exit(0) # unpack args config = masq_args['config'] database = masq_args['config'] networks = masq_args['networks'] bioms = masq_args['bioms'] username = masq_args['username'] password = masq_args['password'] host = masq_args['host'] mapping = masq_args['mapping'] sources = masq_args['sources'] if masq_args['mapping']: try: with open(masq_args['mapping'], 'r') as file: contents = file.read() mapping = literal_eval(contents) except (ValueError, TypeError): logger.warning("Mapping file could not be imported,\n" "and will be ignored. ") if masq_args['sources']: try: with open(masq_args['sources'], 'r') as file: contents = file.read() mapping = literal_eval(contents) except (ValueError, TypeError): logger.warning("Source file could not be imported,\n" "and will be ignored. ") if masq_args['create']: logger.info('Setting up tables in PostgreSQL database. ') setup_database(config=config, create=True, host=host, database=database, username=username, password=password) elif masq_args['delete']: setup_database(config=config, create=False, host=host, database=database, username=username, password=password) logger.info("Deleted tables in PostgreSQL database.") if masq_args['bioms']: logger.info('Importing BIOM files... ') for biom in bioms: import_biom(location=biom, mapping=mapping, config=config, host=host, database=database, username=username, password=password) if masq_args['networks']: logger.info('Importing network files...') for network in networks: import_networks(location=network, mapping=mapping, sources=sources, config=config, host=host, database=database, username=username, password=password) logger.info('Completed tasks! ')
master_doc = 'index' # -- Project information ----------------------------------------------------- project = 'wexpect' copyright = '2020, Benedek Racz' author = 'Benedek Racz' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. package_name = 'wexpect' info = VersionInfo(package_name) version = info.version_string() # The full version, including alpha/beta/rc tags. release = version # -- General configuration --------------------------------------------------- # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx'] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # List of patterns, relative to source directory, that match files and
def main(): args = set_manta().parse_args(sys.argv[1:]) args = vars(args) if args['version']: info = VersionInfo('manta') logger.info('Version ' + info.version_string()) exit(0) if args['graph'] != 'demo': filename = args['graph'].split(sep=".") extension = filename[len(filename)-1] # see if the file can be detected # if not, try appending current working directory and then read. if not os.path.isfile(args['graph']): if os.path.isfile(os.getcwd() + '/' + args['graph']): args['graph'] = os.getcwd() + '/' else: logger.error('Could not find the specified file. Is your file path correct?') exit() try: if extension == 'graphml': network = nx.read_graphml(args['graph']) elif extension == 'txt': network = nx.read_weighted_edgelist(args['graph']) elif extension == 'gml': network = nx.read_gml(args['graph']) elif extension == 'cyjs': network = read_cyjson(args['graph']) else: logger.warning('Format not accepted. ' 'Please specify the filename including extension (e.g. test.graphml).', exc_info=True) exit() except Exception: logger.error('Could not import network file!', exc_info=True) exit() # first need to convert network to undirected elif args['graph'] == 'demo': path = os.path.dirname(manta.__file__) path = path + '//demo.graphml' network = nx.read_graphml(path) if args['direction']: if extension == 'txt': logger.warning('Directed networks from edge lists not supported, use graphml or cyjs! ') exit() else: network = nx.to_undirected(network) if args['bin']: orig_edges = dict() # store original edges for export for edge in network.edges: orig_edges[edge] = network.edges[edge]['weight'] network.edges[edge]['weight'] = np.sign(network.edges[edge]['weight']) if sum(value == 0 for value in np.any(nx.get_edge_attributes(network, 'weight').values())) > 0: logger.error("Some edges in the network have a weight of exactly 0. \n" "Such edges cannot be clustered. Try converting weights to 1 and -1. ") weight_properties = nx.get_edge_attributes(network, 'weight') if len(weight_properties) == 0: logger.error("The imported network has no 'weight' edge property. \n" "Please make sure you are formatting the network correctly. ") results = cluster_graph(network, limit=args['limit'], max_clusters=args['max'], min_clusters=args['min'], min_cluster_size=args['ms'], iterations=args['iter'], subset=args['subset'], ratio=args['ratio'], edgescale=args['edgescale'], permutations=args['perm'], verbose=args['verbose']) graph = results[0] if args['cr']: perm_clusters(graph=graph, limit=args['limit'], max_clusters=args['max'], min_clusters=args['min'], min_cluster_size=args['ms'], iterations=args['iter'], ratio=args['ratio'], partialperms=args['perm'], relperms=args['rel'], subset=args['subset'], error=args['error'], verbose=args['verbose']) layout = None if args['bin']: for edge in network.edges: network.edges[edge]['weight'] = orig_edges[edge] if args['layout']: layout = generate_layout(graph, args['tax']) if args['fp']: if args['f'] == 'graphml': nx.write_graphml(graph, args['fp'] + '.graphml') elif args['f'] == 'csv': node_keys = graph.nodes[list(graph.nodes)[0]].keys() properties = {} for key in node_keys: properties[key] = nx.get_node_attributes(graph, key) data = pd.DataFrame(properties) data.to_csv(args['fp'] + '.csv') elif args['f'] == 'gml': nx.write_gml(graph, args['fp'] + '.gml') elif args['f'] == 'cyjs': write_cyjson(graph=graph, filename=args['fp'] + '.cyjs', layout=layout) logger.info('Wrote clustered network to ' + args['fp'] + '.' + args['f']) else: logger.error('Could not write network to disk, no file path given.') exit(0)
def main(): args = set_manta().parse_args(sys.argv[1:]) args = vars(args) if args['version']: info = VersionInfo('manta') logger.info('Version ' + info.version_string()) exit(0) if args['graph'] != 'demo': filename = args['graph'].split(sep=".") extension = filename[len(filename) - 1] # see if the file can be detected # if not, try appending current working directory and then read. if not os.path.isfile(args['graph']): if os.path.isfile(os.getcwd() + '/' + args['graph']): args['graph'] = os.getcwd() + '/' else: logger.error( 'Could not find the specified file. Is your file path correct?' ) exit() try: if extension == 'graphml': network = nx.read_graphml(args['graph']) elif extension == 'txt': network = nx.read_weighted_edgelist(args['graph']) elif extension == 'gml': network = nx.read_gml(args['graph']) elif extension == 'cyjs': network = read_cyjson(args['graph']) else: logger.warning( 'Format not accepted. ' 'Please specify the filename including extension (e.g. test.graphml).', exc_info=True) exit() except Exception: logger.error('Could not import network file!', exc_info=True) exit() # first need to convert network to undirected elif args['graph'] == 'demo': path = os.path.dirname(manta.__file__) path = path + '//demo.graphml' network = nx.read_graphml(path) if args['direction']: if extension == 'txt': logger.warning( 'Directed networks from edge lists not supported, use graphml or cyjs! ' ) exit() else: network = nx.to_undirected(network) if args['bin']: orig_edges = dict() # store original edges for export for edge in network.edges: orig_edges[edge] = network.edges[edge]['weight'] network.edges[edge]['weight'] = np.sign( network.edges[edge]['weight']) results = cluster_graph(network, limit=args['limit'], max_clusters=args['max'], min_clusters=args['min'], min_cluster_size=args['ms'], iterations=args['iter'], subset=args['subset'], ratio=args['ratio'], edgescale=args['edgescale'], permutations=args['perm'], verbose=args['verbose']) graph = results[0] if args['cr']: perm_clusters(graph=graph, limit=args['limit'], max_clusters=args['max'], min_clusters=args['min'], min_cluster_size=args['ms'], iterations=args['iter'], ratio=args['ratio'], partialperms=args['perm'], relperms=args['rel'], subset=args['subset'], error=args['error'], verbose=args['verbose']) layout = None if args['bin']: for edge in network.edges: network.edges[edge]['weight'] = orig_edges[edge] if args['layout']: layout = generate_layout(graph, args['tax']) if args['fp']: if args['f'] == 'graphml': nx.write_graphml(graph, args['fp'] + '.graphml') elif args['f'] == 'edgelist': nx.write_weighted_edgelist(graph, args['fp'] + '.txt') elif args['f'] == 'gml': nx.write_gml(graph, args['fp'] + '.gml') elif args['f'] == 'adj': nx.write_multiline_adjlist(graph, args['fp'] + '.txt') elif args['f'] == 'cyjs': write_cyjson(graph=graph, filename=args['fp'] + '.cyjs', layout=layout) logger.info('Wrote clustered network to ' + args['fp'] + '.' + args['f']) else: logger.error('Could not write network to disk, no file path given.') exit(0)
def main(): args = set_anuran().parse_args(sys.argv[1:]) args = vars(args) if args['version']: info = VersionInfo('anuran') logger.info('Version ' + info.version_string()) sys.exit(0) if not args['graph']: logger.info('Please give an input location.') if not args['fp']: logger.info('No file path given, writing to current directory.') args['fp'] = os.getcwd() + '/' if args['graph'] != ['demo']: networks = {} for x in args['graph']: if len(os.path.basename(x)) == 0: name = 'anuran' else: name = os.path.basename(x) networks[name] = list() new_graph = [] for location in args['graph']: if not os.path.isdir(location): if os.path.isdir(os.getcwd() + '/' + location): new_graph.append(os.getcwd() + '/' + location) else: logger.error('Could not find the specified directory. Is your file path correct?') sys.exit() else: new_graph.append(location) args['graph'] = new_graph # code for importing from multiple folders for location in args['graph']: files = [f for f in glob.glob(location + "**/*.graphml", recursive=True)] files.extend([f for f in glob.glob(location + "**/*.txt", recursive=True)]) files.extend([f for f in glob.glob(location + "**/*.gml", recursive=True)]) for file in files: filename = file.split(sep=".") extension = filename[len(filename)-1] try: if extension == 'graphml': network = nx.read_graphml(file) elif extension == 'txt': network = nx.read_weighted_edgelist(file) elif extension == 'gml': network = nx.read_gml(file) else: logger.warning('Ignoring file with wrong format.', exc_info=True) network = False # need to make sure the graphml function does not arbitrarily assign node ID if network: try: if 'name' in network.nodes[list(network.nodes)[0]]: if network.nodes[list(network.nodes)[0]]['name'] != list(network.nodes)[0]: network = nx.relabel_nodes(network, nx.get_node_attributes(network, 'name')) except IndexError: logger.warning('One of the imported networks contains no nodes.', exc_info=True) networks[os.path.basename(location)].append((os.path.basename(file), nx.to_undirected(network))) except Exception: logger.error('Could not import network file!', exc_info=True) sys.exit() elif args['graph'] == ['demo']: networks = {'demo': list()} path = os.path.dirname(anuran.__file__) networks['demo'].append(('conet_family_a.graphml', nx.read_graphml(path + '//data//conet_family_a.graphml'))) networks['demo'].append(('conet_family_b.graphml', nx.read_graphml(path + '//data//conet_family_b.graphml'))) networks['demo'].append(('conet_family_c.graphml', nx.read_graphml(path + '//data//conet_family_c.graphml'))) logger.info('Imported ' + str(len(networks)) + ' group(s) of networks.') for network in networks: if len(networks[network]) < 20: logger.warning('One of the groups (' + network + ') does not contain enough networks ' 'to generate robust tests for centralities or set sizes. \n' 'Suppressing warnings, but please be careful with the statistics! \n' 'Preferably use groups with at least 20 networks. ') model_calcs(networks, args) logger.info('anuran completed all tasks.') exit(0)
from pbr.version import VersionInfo package_name = 'pymodsim' info = VersionInfo(package_name) __version__ = info.version_string()