def get_buyable_paths(*args, **kwargs): """Wrapper for ``MCTSTreeBuilder.get_buyable_paths`` function. Returns: tree_status ((int, int, dict)): Result of tree_status(). trees (list of dict): List of dictionaries, where each dictionary defines a synthetic route. """ run_async = kwargs.pop('run_async', False) print('Treebuilder MCTS coordinator was asked to expand {}'.format( args[0])) _id = get_buyable_paths.request.id try: status, paths = treeBuilder.get_buyable_paths(*args, **kwargs) graph = treeBuilder.return_chemical_results() result_doc = {'status': status, 'paths': paths, 'graph': graph} except: if run_async: update_result_state(_id, 'failed') raise if run_async: update_result_state(_id, 'completed') settings = {'smiles': args[0]} settings.update(kwargs) save_results(result_doc, settings, _id) print('Task completed, returning results.') return (status, paths)
def settings(self): path = os.path.join(self.working_directory(), '.frigg.yml') # Default value for project .frigg.yml settings = {'webhooks': [], 'comment': True} with open(path) as f: settings.update(yaml.load(f)) return settings
def update_settings(self, update_settings): settings = json.loads(self.settings) settings.update(update_settings) self.settings = json.dumps(settings, cls=LazyDateTimeEncoder) self.save()
def ficuspumila(name, default=None): settings = get('FICUSPUMILA', {}) settings.update(get('TASTYPIE_RPC_PROXY', {})) try: return settings[name] except KeyError, e: return default
def get_form_settings(self): # todo : Review form_class = self.get_form_class() if hasattr(form_class, 'initial_settings'): settings = form_class.initial_settings.copy() else: settings = {} settings.update(self.form_settings or {}) return settings
def initialize_form(cls): form_class = cls.get_form_class() if hasattr(form_class, 'initial_settings'): settings = form_class.initial_settings.copy() else: settings = {} settings.update(cls.form_settings or {}) settings['from_doc_date'] = cls.get_default_from_date() return form_class(**{'form_settings': settings})
def freeze(self): settings = {} for key, v in DEFAULTS.items(): settings[key] = self._unserialize(v['default'], v['type']) if self._parent: settings.update(self._parent.settings.freeze()) for key, value in self._cache().items(): settings[key] = self.get(key) return settings
def settings(self): path = os.path.join(self.working_directory(), '.frigg.yml') # Default value for project .frigg.yml settings = { 'webhooks': [], 'comment': True } with open(path) as f: settings.update(yaml.load(f)) return settings
def post(self, request): """Save the Paypal settings 0f each provider""" try: template = "app/prosumers/provider-dashboard/payment/settings.html" payload = request.POST if payload.get('username', None) in [None, ""]: raise IntegrityError(_("Please enter the Paypal client ID of your application")) if payload.get('password', None) in [None, ""]: raise IntegrityError(_("Please enter the Paypal client secret of your application")) user = Users.objects.get(pk=request.session['id']) provider = Providers.objects.get(user_id=request.session['id']) settings = PaypalCredentials.objects.filter(provider=provider) insertion_flag = True if settings.count(): insertion_flag = False settings.update(username=payload['username'], password=payload['password']) else: PaypalCredentials.objects.create( provider=provider, username=payload['username'], password=payload['password'] ) # inform provider subject = "[P4ALL] Payment settings" content = "Dear " + user.username +",\n\n" if insertion_flag == True: content += "You have inserted successfully your payment settings in the AoD platform.\n\n" else: content += "You have modified your payment settings in the AoD platform.\n\n" content += "Let us know if this action has not been performed from your side!\n\n" content += "Sincerely,\nThe administration team" utilities.sendEmail([user.email], subject, content, False) retrieve_paypal_access_token(request.session['id']) messages.info(request, _("Your settings haved been saved!")) return redirect(reverse('payment_settings'), permanent=True) except IntegrityError as ie: logger.exception(str(ie)) messages.error(request, str(ie)) return render(request, template, { 'year': datetime.now().year, 'payment_settings': payload } ) except Exception as ex: logger.exception(str(ex)) return HttpResponseServerError(str(ex))
def student_view(self, context=None): context_html = self.get_context_student() template = self.render_template('static/html/scormxblock.html', context_html) frag = Fragment(template) frag.add_css(self.resource_string("static/css/scormxblock.css")) frag.add_javascript( self.resource_string("static/js/src/scormxblock.js")) settings = {'version_scorm': self.version_scorm} settings.update(self.get_settings_student()) frag.initialize_js('ScormXBlock', json_args=settings) return frag
def initialize_form(cls): # todo remove me form_class = cls.get_form_class() if hasattr(form_class, 'initial_settings'): settings = form_class.initial_settings.copy() else: settings = {} settings.update(cls.form_settings or {}) # settings['from_doc_date'] = cls.get_default_from_date() # settings['to_doc_date'] = cls.get_default_to_date() # return form_class(support_doc_type=True, **{'form_settings': settings}) return form_class()
def freeze(self) -> dict: """ Returns a dictionary of all settings set for this object, including any default values of its parents or hardcoded in pretix. """ settings = {} for key, v in DEFAULTS.items(): settings[key] = self._unserialize(v['default'], v['type']) if self._parent: settings.update(self._parent.settings.freeze()) for key in self._cache(): settings[key] = self.get(key) return settings
def _prepare_cloned_data(self, original_asset, source_version, partial_update): """ Some business rules must be applied when cloning an asset to another with a different type. It prepares the data to be cloned accordingly. It raises an exception if source and destination are not compatible for cloning. :param original_asset: Asset :param source_version: AssetVersion :param partial_update: Boolean :return: dict """ if self._validate_destination_type(original_asset): # `to_clone_dict()` returns only `name`, `content`, `asset_type`, # and `tag_string` cloned_data = original_asset.to_clone_dict(version=source_version) # Allow the user's request data to override `cloned_data` cloned_data.update(self.request.data.items()) if partial_update: # Because we're updating an asset from another which can have another type, # we need to remove `asset_type` from clone data to ensure it's not updated # when serializer is initialized. cloned_data.pop("asset_type", None) else: # Change asset_type if needed. cloned_data["asset_type"] = self.request.data.get( ASSET_TYPE_ARG_NAME, original_asset.asset_type) cloned_asset_type = cloned_data.get("asset_type") # Settings are: Country, Description, Sector and Share-metadata # Copy settings only when original_asset is `survey` or `template` # and `asset_type` property of `cloned_data` is `survey` or `template` # or None (partial_update) if cloned_asset_type in [None, ASSET_TYPE_TEMPLATE, ASSET_TYPE_SURVEY] and \ original_asset.asset_type in [ASSET_TYPE_TEMPLATE, ASSET_TYPE_SURVEY]: settings = original_asset.settings.copy() settings.pop("share-metadata", None) settings.update(cloned_data.get('settings', {})) cloned_data['settings'] = json.dumps(settings) # until we get content passed as a dict, transform the content obj to a str # TODO, verify whether `Asset.content.settings.id_string` should be cleared out. cloned_data["content"] = json.dumps(cloned_data.get("content")) return cloned_data else: raise BadAssetTypeException( "Destination type is not compatible with source type")
def __init__(self, config=None, **override_settings): config = config or getattr(self, 'using_storage', None) if type(config) in types.StringTypes: config = S3BotoStorageConfig(name=config) config = config or S3BotoStorageConfig() self.config = config settings = config.read_settings() settings.update(override_settings) # Implement our own property assignment, since the class we're # inheriting from didn't plan on being extended ... for name, value in settings.iteritems(): if name in S3BotoStorageConfig.read_config: setattr(self, name, value) super(S3BotoMulticonfigStorage, self).__init__()
def get(self, request, project_id, model_id): HasModel.load(self, request, project_id, model_id) maboss_model = self.getMaBoSSModel() output_variables = {var: not value.is_internal for var, value in maboss_model.network.items()} initial_states = maboss_model.network.get_istate() mutations = maboss_model.get_mutations() settings = {key: value for key, value in maboss_model.param.items() if not key.startswith("$")} if settings['use_physrandgen'] in [0, 1]: settings.update({'use_physrandgen': bool(settings['use_physrandgen'])}) if settings['discrete_time'] in [0, 1]: settings.update({'discrete_time': bool(settings['discrete_time'])}) return Response({ 'output_variables': OrderedDict(sorted(output_variables.items())), 'initial_states': dumpIstates(initial_states), 'mutations': OrderedDict(sorted(mutations)), 'settings': settings })
def get_settings(self): settings = {} for name, env_var in self.setting_env_map.items(): settings[name] = self.lookup(env_var) settings.update(self.provided_settings) return settings
def __init__(self, acl='private', bucket=None, **settings): settings.update({'custom_domain': None}) super().__init__(acl, bucket, **settings)
def augment_settings(settings): default_installed_apps = [ 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'ptree.session', ] third_party_apps = ['data_exports', 'crispy_forms'] # order is important: # ptree unregisters User & Group, which are installed by auth. # ptree templates need to get loaded before the admin. # but ptree also unregisters data_exports, which comes afterwards? new_installed_apps = collapse_to_unique_list(['django.contrib.auth', 'ptree', 'django.contrib.admin',], default_installed_apps, third_party_apps, settings['INSTALLED_APPS'], settings['INSTALLED_PTREE_APPS']) new_template_dirs = collapse_to_unique_list( [os.path.join(settings['BASE_DIR'], 'templates/')], settings.get('TEMPLATE_DIRS')) new_staticfiles_dirs = collapse_to_unique_list(settings.get('STATICFILES_DIRS'), [os.path.join(settings['BASE_DIR'], 'static')]) new_middleware_classes = collapse_to_unique_list( [ 'django.contrib.sessions.middleware.SessionMiddleware', #'django.middleware.locale.LocaleMiddleware', 'django.middleware.common.CommonMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware',], settings.get('MIDDLEWARE_CLASSES') ) augmented_settings = { 'INSTALLED_APPS': new_installed_apps, 'TEMPLATE_DIRS': new_template_dirs, 'STATICFILES_DIRS': new_staticfiles_dirs, 'MIDDLEWARE_CLASSES': new_middleware_classes, } overridable_settings = { 'CRISPY_TEMPLATE_PACK': 'bootstrap3', # pages with a time limit for the participant can have a grace period # to compensate for network latency. # the timer is started and stopped server-side, # so this grace period should account for time spent during # download, upload, page rendering, etc. 'TIME_LIMIT_GRACE_PERIOD_SECONDS': 5, 'SESSION_SAVE_EVERY_REQUEST': True, 'TEMPLATE_DEBUG': settings['DEBUG'], 'STATIC_ROOT': 'staticfiles', 'STATIC_URL': '/static/', 'CURRENCY_CODE': 'USD', 'CURRENCY_LOCALE': 'en_US', 'CURRENCY_DECIMAL_PLACES': 2, 'TIME_ZONE': 'UTC', 'SESSION_SERIALIZER': 'django.contrib.sessions.serializers.PickleSerializer', 'ALLOWED_HOSTS': ['*'], 'PTREE_CHANGE_LIST_COLUMN_MIN_WIDTH': 50, # In pixels 'PTREE_CHANGE_LIST_UPDATE_INTERVAL': '10000', # default to 10 seconds(10000 miliseconds) 'TEMPLATE_CONTEXT_PROCESSORS': global_settings.TEMPLATE_CONTEXT_PROCESSORS + ("django.core.context_processors.request",), } settings.update(augmented_settings) for k,v in overridable_settings.items(): if not settings.has_key(k): settings[k] = v
def parse_storage_url(url, aws_setting_prefix='AWS_MEDIA_', djeese_setting_prefix='DJEESE_', storage_setting_name='DEFAULT_FILE_STORAGE'): settings = {} url = parse.urlparse(url) scheme = url.scheme.split('+', 1) if storage_setting_name: settings[storage_setting_name] = SCHEMES[scheme[0]] if scheme[0] == 's3': os.environ['S3_USE_SIGV4'] = 'True' config = { 'ACCESS_KEY_ID': parse.unquote(url.username or ''), 'SECRET_ACCESS_KEY': parse.unquote(url.password or ''), 'STORAGE_BUCKET_NAME': url.hostname.split('.', 1)[0], 'STORAGE_HOST': url.hostname.split('.', 1)[1], 'BUCKET_PREFIX': url.path.lstrip('/'), } media_url = yurl.URL( scheme='https', host='.'.join([ config['STORAGE_BUCKET_NAME'], config['STORAGE_HOST'], ]), path=config['BUCKET_PREFIX'], ) settings['MEDIA_URL'] = media_url.as_string() settings.update({ '{}{}'.format(aws_setting_prefix, key): value for key, value in config.items() }) elif scheme[0] == 'djfs': hostname = ('{}:{}'.format(url.hostname, url.port) if url.port else url.hostname) config = { 'STORAGE_ID': url.username or '', 'STORAGE_KEY': url.password or '', 'STORAGE_HOST': parse.urlunparse(( scheme[1], hostname, url.path, url.params, url.query, url.fragment, )), } media_url = yurl.URL( scheme=scheme[1], host=url.hostname, path=url.path, port=url.port or '', ) settings['MEDIA_URL'] = media_url.as_string() settings.update({ '{}{}'.format(djeese_setting_prefix, key): value for key, value in config.items() }) if settings['MEDIA_URL'] and not settings['MEDIA_URL'].endswith('/'): # Django (or something else?) silently sets MEDIA_URL to an empty # string if it does not end with a '/' settings['MEDIA_URL'] = '{}/'.format(settings['MEDIA_URL']) return settings
def run(self, runjob_id): """ Code here are run asynchronously in Celery thread. To prevent re-creating a deleted object, any write to database should use one of the following: + `queryset.update()` + `obj.save(update_fields=[...])` + `obj.file_field.save(..., save=False)` + `obj.save(update_fields=['file_field'])` instead of: + `obj.save()` + `obj.file_field.save(..., save=True)` """ runjob = RunJob.objects.get(uuid=runjob_id) settings = self._settings(runjob) inputs = self._inputs(runjob) with self.tempdir() as temp_dir: outputs = self._outputs(runjob) # build argument for run_my_task and mapping dictionary arg_outputs = {} temppath_map = {} # retains where originally assigned paths are from... prevent jobs changing them for opt_name, output_list in outputs.iteritems(): if opt_name not in arg_outputs: arg_outputs[opt_name] = [] for output in output_list: if output['is_list'] is False: output_res_tempname = str(uuid.uuid4()) output_res_temppath = os.path.join(temp_dir, output_res_tempname) arg_outputs[opt_name].append({ 'resource_path': output_res_temppath, 'resource_type': output['resource_type'] }) output['resource_temp_path'] = output_res_temppath temppath_map[output_res_temppath] = output else: # create a folder for them output_res_tempname = str(uuid.uuid4()) output_res_tempfolder = os.path.join(temp_dir, output_res_tempname) + os.sep os.mkdir(output_res_tempfolder) arg_outputs[opt_name].append({ 'resource_folder': output_res_tempfolder, 'resource_type': output['resource_type'] }) output['resource_temp_folder'] = output_res_tempfolder temppath_map[output_res_tempfolder] = output retval = self.run_my_task(inputs, settings, arg_outputs) if isinstance(retval, self.WAITING_FOR_INPUT): settings.update(retval.settings_update) runjob.status = task_status.WAITING_FOR_INPUT runjob.job_settings = settings runjob.error_summary = None runjob.error_details = None runjob.celery_task_id = None runjob.save(update_fields=['status', 'job_settings', 'error_summary', 'error_details', 'celery_task_id']) # Send an email to owner of WorkflowRun wfrun_id = RunJob.objects.filter(pk=runjob_id).values_list('workflow_run__uuid', flat=True)[0] workflowrun = WorkflowRun.objects.get(uuid=wfrun_id) user = WorkflowRun.objects.get(uuid=wfrun_id).creator if not rodan_settings.TEST: if user.email and rodan_settings.EMAIL_USE and user.user_preference.send_email: subject = "Workflow Run '{0}' is waiting for user input".format(workflowrun.name) body = "A workflow run you started is waiting for user input.\n\n" body = body + "Name: {0}\n".format(workflowrun.name) body = body + "Description: {0}".format(workflowrun.description) to = [user.email] registry.tasks['rodan.core.send_email'].apply_async((subject, body, to)) return 'WAITING FOR INPUT' else: # ensure the runjob did not produce any error try: err = self.error_details if len(self.error_details) > 0: raise RuntimeError(self.error_details) except AttributeError: pass # ensure the job has produced all output files for opt_name, output_list in outputs.iteritems(): for output in output_list: if output['is_list'] is False: if not os.path.isfile(output['resource_temp_path']): raise RuntimeError("The job did not produce the output file for {0}".format(opt_name)) else: files = [f for f in os.listdir(output['resource_temp_folder']) if os.path.isfile(os.path.join(output['resource_temp_folder'], f))] if len(files) == 0: raise RuntimeError("The job did not produce any output files for the resource list for {0}".format(opt_name)) # save outputs for temppath, output in temppath_map.iteritems(): if output['is_list'] is False: with open(temppath, 'rb') as f: resource = Output.objects.get(uuid=output['uuid']).resource resource.resource_file.save(temppath, File(f), save=False) # Django will resolve the path according to upload_to resource.save(update_fields=['resource_file']) #registry.tasks['rodan.core.create_thumbnails'].run(resource.uuid.hex) # call synchronously #registry.tasks['rodan.core.create_diva'].run(resource.uuid.hex) # call synchronously else: files = [ff for ff in os.listdir(output['resource_temp_folder']) if os.path.isfile(os.path.join(output['resource_temp_folder'], f))] files.sort() # alphabetical order resourcelist = Output.objects.get(uuid=output['uuid']).resource_list for index, ff in enumerate(files): with open(os.path.join(output['resource_temp_folder'], ff), 'rb') as f: resource = Resource( project=resourcelist.project, resource_type=resourcelist.resource_type, name=ff, description="Order #{0} in ResourceList {1}".format(index, resourcelist.name), origin=resourcelist.origin ) resource.save() resource.resource_file.save(ff, File(f), save=False) # Django will resolve the path according to upload_to resource.save(update_fields=['resource_file']) #registry.tasks['rodan.core.create_thumbnails'].run(resource.uuid.hex) # call synchronously #registry.tasks['rodan.core.create_diva'].run(resource.uuid.hex) # call synchronously resourcelist.resources.add(resource) runjob.status = task_status.FINISHED runjob.error_summary = None runjob.error_details = None runjob.celery_task_id = None runjob.save(update_fields=['status', 'error_summary', 'error_details', 'celery_task_id']) # Call master task. master_task = registry.tasks['rodan.core.master_task'] wfrun_id = str(runjob.workflow_run.uuid) mt_retval = master_task.run(wfrun_id) return "FINISHED | master_task: {0}".format(mt_retval)
def run(self, runjob_id): """ Code here are run asynchronously in Celery thread. To prevent re-creating a deleted object, any write to database should use one of the following: + `queryset.update()` + `obj.save(update_fields=[...])` + `obj.file_field.save(..., save=False)` + `obj.save(update_fields=['file_field'])` instead of: + `obj.save()` + `obj.file_field.save(..., save=True)` """ runjob = RunJob.objects.get(uuid=runjob_id) settings = self._settings(runjob) inputs = self._inputs(runjob) with self.tempdir() as temp_dir: outputs = self._outputs(runjob) # build argument for run_my_task and mapping dictionary arg_outputs = {} temppath_map = { } # retains where originally assigned paths are from... prevent jobs changing them for opt_name, output_list in outputs.items(): if opt_name not in arg_outputs: arg_outputs[opt_name] = [] for output in output_list: if output['is_list'] is False: output_res_tempname = str(uuid.uuid4()) output_res_temppath = os.path.join( temp_dir, output_res_tempname) arg_outputs[opt_name].append({ 'resource_path': output_res_temppath, 'resource_type': output['resource_type'] }) output['resource_temp_path'] = output_res_temppath temppath_map[output_res_temppath] = output else: # create a folder for them output_res_tempname = str(uuid.uuid4()) output_res_tempfolder = os.path.join( temp_dir, output_res_tempname) + os.sep os.mkdir(output_res_tempfolder) arg_outputs[opt_name].append({ 'resource_folder': output_res_tempfolder, 'resource_type': output['resource_type'] }) output['resource_temp_folder'] = output_res_tempfolder temppath_map[output_res_tempfolder] = output retval = self.run_my_task(inputs, settings, arg_outputs) if isinstance(retval, self.WAITING_FOR_INPUT): settings.update(retval.settings_update) runjob.status = task_status.WAITING_FOR_INPUT runjob.job_settings = settings runjob.error_summary = None runjob.error_details = None runjob.celery_task_id = None runjob.save(update_fields=[ 'status', 'job_settings', 'error_summary', 'error_details', 'celery_task_id' ]) # Send an email to owner of WorkflowRun wfrun_id = RunJob.objects.filter(pk=runjob_id).values_list( 'workflow_run__uuid', flat=True)[0] workflowrun = WorkflowRun.objects.get(uuid=wfrun_id) user = WorkflowRun.objects.get(uuid=wfrun_id).creator if not rodan_settings.TEST: if user.email and rodan_settings.EMAIL_USE and user.user_preference.send_email: subject = "Workflow Run '{0}' is waiting for user input".format( workflowrun.name) body = "A workflow run you started is waiting for user input.\n\n" body = body + "Name: {0}\n".format(workflowrun.name) body = body + "Description: {0}".format( workflowrun.description) to = [user.email] registry.tasks['rodan.core.send_email'].apply_async( (subject, body, to)) # registry.tasks['rodan.core.send_email'].apply_async((subject, body, to), queue="celery") return 'WAITING FOR INPUT' else: # ensure the runjob did not produce any error try: err = self.error_details if len(self.error_details) > 0: raise RuntimeError(self.error_details) except AttributeError: pass # ensure the job has produced all output files for opt_name, output_list in outputs.items(): for output in output_list: if output['is_list'] is False: if not os.path.isfile( output['resource_temp_path']): raise RuntimeError( "The job did not produce the output file for {0}" .format(opt_name)) else: files = [ f for f in os.listdir( output['resource_temp_folder']) if os.path.isfile( os.path.join( output['resource_temp_folder'], f)) ] if len(files) == 0: raise RuntimeError( "The job did not produce any output files for the resource list for {0}" .format(opt_name)) # save outputs for temppath, output in temppath_map.items(): if output['is_list'] is False: with open(temppath, 'rb') as f: resource = Output.objects.get( uuid=output['uuid']).resource resource.resource_file.save( temppath, File(f), save=False ) # Django will resolve the path according to upload_to resource.save(update_fields=['resource_file']) if resource.resource_type.mimetype.startswith( 'image'): # registry.tasks['rodan.core.create_thumbnails'].run(resource.uuid.hex) # call synchronously # registry.tasks['rodan.core.create_diva'].run(resource.uuid.hex) # call synchronously registry.tasks['rodan.core.create_diva'].si( resource.uuid.hex).apply_async( queue="celery") # call asynchronously else: files = [ ff for ff in os.listdir( output['resource_temp_folder']) if os.path.isfile( os.path.join(output['resource_temp_folder'], f)) ] files.sort() # alphabetical order resourcelist = Output.objects.get( uuid=output['uuid']).resource_list for index, ff in enumerate(files): with open( os.path.join( output['resource_temp_folder'], ff), 'rb') as f: resource = Resource( project=resourcelist.project, resource_type=resourcelist.resource_type, name=ff, description="Order #{0} in ResourceList {1}" .format(index, resourcelist.name), origin=resourcelist.origin) resource.save() resource.resource_file.save( ff, File(f), save=False ) # Django will resolve the path according to upload_to resource.save(update_fields=['resource_file']) if resource.resource_type.mimetype.startswith( 'image'): #registry.tasks['rodan.core.create_thumbnails'].run(resource.uuid.hex) # call synchronously registry.tasks[ 'rodan.core.create_diva'].run( resource.uuid.hex ) # call synchronously # registry.tasks['rodan.core.create_diva'].si(resource.uuid.hex).apply_async(queue="celery") # call synchronously resourcelist.resources.add(resource) runjob.status = task_status.FINISHED runjob.error_summary = None runjob.error_details = None runjob.celery_task_id = None runjob.save(update_fields=[ 'status', 'error_summary', 'error_details', 'celery_task_id' ]) # Call master task. master_task = registry.tasks['rodan.core.master_task'] wfrun_id = str(runjob.workflow_run.uuid) mt_retval = master_task.si(wfrun_id).apply_async( queue="celery") return "FINISHED | master_task: {0}".format(mt_retval)