def get_status_api_data(): cookie = request.form.get('downloadCookieKey') try: ids = request.get_json()['ids'] except: ids = request.form.getlist('ids') if len(ids) == 1: # Only 1 file to download file_url = get_urls_for_download(ids)[0] # Wrap the redirect in a response to reset cookie (prevents inappropriate UI error popup) response = make_response(redirect(file_url)) # Reset the cookie if cookie: response.set_cookie(cookie, '', expires=0) return response else: # Multiple files to download print("IDS: ", ids) # Get File download URLs download_urls = get_urls_for_download(ids) print "download_urls: ", download_urls # Download token - use the request cookie or create uuid snippet # - This is returned to UI and later used to check the download's status token = cookie if cookie else str(uuid.uuid4()).rsplit("-")[-1] #use cookie or last group of uuid print("TOKEN: ", token) print("Token '%s' sending request..." % token) # Send task to server data = {'token': token, 'urls': download_urls} # Initialize the multifile downloader plugin. # As currently implemented, this will only run the QueueClient class, which submits # the token and download urls to the queue. from plugin_collection import PluginCollection mfd_plugin = PluginCollection('plugins.multifile_downloader') #only load MFD plugin mfd_plugin.apply_all_plugins_on_value(data) print("Delivered token %s with %s urls." % (token, str(len(download_urls))) ) response = make_response(json.dumps({"data": {"download_token": token}})) # Reset cookie to satisfy UI side of downloader. Prevents the inappropriate trigger of error alert if cookie: response.set_cookie(cookie, '', expires=0) return response
def set_plugin_parameters(status: bool = False, alerts: bool = False): # Store args and monitor_plugins for lazy loading. global default_args, pool_collection, node_info if not default_args: # Create plugin instance and set default args default_monitor_plugins = PluginCollection('plugins') parser = argparse.ArgumentParser() parser.add_argument("-v", "--verbose", default=(os.environ.get( 'VERBOSE', 'False').lower() == 'true'), action="store_true") default_monitor_plugins.get_parse_args(parser) default_args, unknown = parser.parse_known_args() enable_verbose(default_args.verbose) pool_collection = PoolCollection(default_args.verbose, Networks()) node_info = FetchStatus(default_args.verbose, pool_collection) # Create namespace with default args and load them into api_args api_args = argparse.Namespace() for name, value in default_args._get_kwargs(): setattr(api_args, name, value) # Set api_args with the values from the parameters setattr(api_args, 'status', status) setattr(api_args, 'alerts', alerts) # Create and load plugins with api_args monitor_plugins = PluginCollection('plugins') monitor_plugins.load_all_parse_args(api_args) return monitor_plugins
help= "The path to the genesis file describing the ledger pool. Can be specified using the 'GENESIS_PATH' environment variable." ) parser.add_argument( "-s", "--seed", default=os.environ.get('SEED'), help= "The privileged DID seed to use for the ledger requests. Can be specified using the 'SEED' environment variable. If DID seed is not given the request will run anonymously." ) parser.add_argument("-v", "--verbose", action="store_true", help="Enable verbose logging.") monitor_plugins = PluginCollection('plugins') monitor_plugins.get_parse_args(parser) args, unknown = parser.parse_known_args() monitor_plugins.load_all_parse_args(args) enable_verbose(args.verbose) log("Starting from the command line ...") if args.list_nets: print(json.dumps(Networks.get_networks(), indent=2)) exit() log("indy-vdr version:", indy_vdr.version()) did_seed = None if not args.seed else args.seed ident = create_did(did_seed)
from plugin_collection import PluginCollection my_plugins = PluginCollection('plugins') my_plugins.apply_all_plugins_on_value(5)
import argparse import json from plugin_collection import PluginCollection from jsonschema import validate, ValidationError def get_available_plugins(plugins): """ Returns a string containing a list of available plugins in the given PluginCollection""" av_plugins = '' for plugin in plugins: av_plugins += plugin.identifier + ', ' return av_plugins[:-2] #? Collecting the available plugins in the corresponding folders frontend_plugins = PluginCollection('frontend') conversion_plugins = PluginCollection('conversion') backend_plugins = PluginCollection('backend') #? Parsing command line arguments parser = argparse.ArgumentParser( description= 'Translates high-level neural network model to native code for specified backend' ) parser.add_argument( '-f', '--frontend', type=str, required=True, help='Frontend type of the input file, available at the momement: ' + get_available_plugins(frontend_plugins.plugins))
logging.basicConfig(level=logging.INFO) log = logging.getLogger("Main") if __name__ == "__main__": # Load config from file try: with open('config/settings.json', 'r') as settings_file: data = settings_file.read() settings = json.loads(data) except EnvironmentError: log.error("Could not open src/config/settings.json") log.error("Please fill in sample_settings.json to your needs and copy to src/config/settings.json or to your config volume!") sys.exit("Could not open settings.json") # Create plugin collection with all plugins found in the plugins folder # and install assets plugin_collection = PluginCollection( settings['common']['pluginsPackage'], settings['common']['static-assets'], settings['common']['templates'] ) # Apply settings to plugins. There must be a top level key in the # settings dict named after the plugin class. Otherwise, the plugin # default settings will be used. plugin_collection.apply_settings(settings) try: WebServer(settings, plugin_collection).run() except KeyboardInterrupt: log.info("Bye bye!")
def main(): """main function that runs the application """ my_plugins = PluginCollection('plugins') my_plugins.apply_all_plugins_on_value(5)