def start(scenario, settings, events, aggregator_device_mapping, saved_state): from d3a.d3a_core.rq_job_handler import launch_simulation_from_rq_job job = get_current_job() job.save_meta() launch_simulation_from_rq_job(scenario, settings, events, aggregator_device_mapping, saved_state, job.id)
def start(scenario, settings): logging.getLogger().setLevel(logging.ERROR) interface = environ.get('WORKER_INTERFACE', "0.0.0.0") port = int(environ.get('WORKER_PORT', 5000)) api_host = environ.get('WORKER_HOST', interface) api_url = "http://{}:{}/api".format(api_host, port) job = get_current_job() job.meta['api_url'] = api_url job.save_meta() if settings is None: settings = {} advanced_settings = settings.get('advanced_settings', None) if advanced_settings is not None: update_advanced_settings(ast.literal_eval(advanced_settings)) config = SimulationConfig( duration=pendulum.duration(days=1 if 'duration' not in settings else settings['duration'].days), slot_length=pendulum.duration( seconds=15 * 60 if 'slot_length' not in settings else settings['slot_length'].seconds), tick_length=pendulum.duration( seconds=15 if 'tick_length' not in settings else settings['tick_length'].seconds), market_count=settings.get('market_count', 1), cloud_coverage=settings.get( 'cloud_coverage', ConstSettings.PVSettings.DEFAULT_POWER_PROFILE), pv_user_profile=settings.get('pv_user_profile', None), market_maker_rate=settings.get( 'market_maker_rate', str(ConstSettings.GeneralSettings.DEFAULT_MARKET_MAKER_RATE)), iaa_fee=settings.get('iaa_fee', ConstSettings.IAASettings.FEE_PERCENTAGE)) if scenario is None: scenario_name = "default" elif scenario in available_simulation_scenarios: scenario_name = scenario else: scenario_name = 'json_arg' config.area = scenario simulation = Simulation(scenario_name, config, slowdown=settings.get('slowdown', 0), exit_on_finish=True, exit_on_finish_wait=pendulum.duration(seconds=10), api_url=api_url, redis_job_id=job.id) start_web(interface, port, simulation) simulation.run()
def save_key_ttl(key): # Stores key ttl in meta job = get_current_job() ttl = job.connection.ttl(key) job.meta = {'ttl': ttl} job.save_meta()
def bulk_email(taskid, *args, **kwargs): task = AsyncCronMail.objects.get(pk=taskid) if task.log_file.name == "": log_file_name = 'log_email_'+uuid.uuid4().hex+".csv" task.log_file.name = 'emails/' + log_file_name task.save() with open(settings.MEDIA_ROOT + task.log_file.name, "a") as log_file: with open(task.csvfile.path, newline='') as csvfile: csvreader = list(csv.reader(csvfile, delimiter=' ', quotechar='|')) job = get_current_job() try: row_id=int(job.meta['row_id']) except: row_id =0 try: sent=int(job.meta['sent']) except: sent=0 try: errors=int(job.meta['errors']) except: errors=0 for i,row in enumerate(csvreader[row_id:], row_id): job.meta['row_id'] = i job.save_meta() if len(row) < 1: continue if i%10 == 0: print('Total ran: ',i) time.sleep(5) email = EmailMultiAlternatives( task.subject, task.message, task.sender, to = [row[0]], headers = { "Content-type" : "text/html" } ) try: validate_email(row[0]) email.attach_alternative(task.message, "text/html") email.send() sent += 1 job.meta['sent'] = sent job.save_meta() log_file.write(str(row[0])+','+str(1)+'\n') except ValidationError as mail_error: log_file.write(str(row[0])+','+str(0)+','+str(mail_error)+'\n') errors+=1 job.meta['errors'] = errors job.save_meta() except SMTPException as send_error: log_file.write(str(row[0])+','+str(0)+','+str('SMTP mail send error.')+'\n') errors+=1 job.meta['errors'] = errors job.save_meta() except BadHeaderError as header_error: log_file.write(str(row[0])+','+str(0)+','+str(header_error)+'\n') errors+=1 job.meta['errors'] = errors job.save_meta() except ConnectionRefusedError as refused: log_file.write(str(row[0])+','+str(0)+','+str('Failed to connect to SMTP server.')+'\n') errors+=1 except SMTPServerDisconnected as disconnect: log_file.write(str(row[0])+','+str(0)+','+str('Failed to connect to SMTP server.')+'\n') errors+=1 job.meta['errors'] = errors job.save_meta() except OSError as e: log_file.write(str(row[0])+','+str(0)+','+str('Failed to connect to SMTP server.')+'\n') errors+=1 job.meta['errors'] = errors job.save_meta() task.completed_at = timezone.now() task.report = "Total: "+ str(sent+errors)+"\n"+ "Sent: "\ +str(sent)+"\n"+"Errors: "+ str(errors) task.status=True task.save()
def start(scenario, settings, events, aggregator_device_mapping): logging.getLogger().setLevel(logging.ERROR) scenario = decompress_and_decode_queued_strings(scenario) job = get_current_job() job.save_meta() try: if settings is None: settings = {} else: settings = { k: v for k, v in settings.items() if v is not None and v != "None" } advanced_settings = settings.get('advanced_settings', None) if advanced_settings is not None: update_advanced_settings(ast.literal_eval(advanced_settings)) aggregator_device_mapping = json.loads(aggregator_device_mapping) if events is not None: events = ast.literal_eval(events) config_settings = { "start_date": instance( datetime.combine(settings.get('start_date'), datetime.min.time())) if 'start_date' in settings else GlobalConfig.start_date, "sim_duration": duration(days=settings['duration'].days) if 'duration' in settings else GlobalConfig.sim_duration, "slot_length": duration(seconds=settings['slot_length'].seconds) if 'slot_length' in settings else GlobalConfig.slot_length, "tick_length": duration(seconds=settings['tick_length'].seconds) if 'tick_length' in settings else GlobalConfig.tick_length, "market_maker_rate": settings.get( 'market_maker_rate', str(ConstSettings.GeneralSettings.DEFAULT_MARKET_MAKER_RATE)), "market_count": settings.get('market_count', GlobalConfig.market_count), "cloud_coverage": settings.get('cloud_coverage', GlobalConfig.cloud_coverage), "pv_user_profile": settings.get('pv_user_profile', None), "max_panel_power_W": settings.get('max_panel_power_W', ConstSettings.PVSettings.MAX_PANEL_OUTPUT_W), "grid_fee_type": settings.get('grid_fee_type', GlobalConfig.grid_fee_type), "external_connection_enabled": settings.get('external_connection_enabled', False), "aggregator_device_mapping": aggregator_device_mapping } validate_global_settings(config_settings) config = SimulationConfig(**config_settings) spot_market_type = settings.get('spot_market_type', None) if spot_market_type is not None: ConstSettings.IAASettings.MARKET_TYPE = spot_market_type if scenario is None: scenario_name = "default_2a" elif scenario in available_simulation_scenarios: scenario_name = scenario else: scenario_name = 'json_arg' config.area = scenario kwargs = { "no_export": True, "pricing_scheme": 0, "seed": settings.get('random_seed', 0) } slowdown_factor = environ.get('D3A_SLOWDOWN_FACTOR', None) if slowdown_factor is None: slowdown_factor = settings.get('slowdown', 0) else: slowdown_factor = float(slowdown_factor) run_simulation(setup_module_name=scenario_name, simulation_config=config, simulation_events=events, slowdown=slowdown_factor, redis_job_id=job.id, kwargs=kwargs) except Exception: import traceback from d3a.d3a_core.redis_connections.redis_communication import publish_job_error_output publish_job_error_output(job.id, traceback.format_exc()) logging.getLogger().error( f"Error on jobId {job.id}: {traceback.format_exc()}")
def save_key_ttl(key): # Stores key ttl in meta job = get_current_job() ttl = job.connection.ttl(key) job.meta = {'ttl': ttl} job.save_meta()