def main(): input_file_name: str = 'C:\\Program Files\\Alteryx\\Samples\\en\\SampleData\\Customers.csv' output_file_name: str = 'C:\\Temp\\output.csv' # Configure and create input tool input_tool_cfg: InputToolConfiguration = InputToolConfiguration( input_file_name=input_file_name, header_row=True ) source: str = 'File: ' + input_file_name record_info: List[Field] = [ Field(name='Customer ID', source=source), Field(name='Store Number', source=source), Field(name='Customer Segment', source=source), Field(name='Responder', source=source), Field(name='First Name', source=source), Field(name='Last Name', source=source), Field(name='Address', source=source), Field(name='City', source=source), Field(name='State', source=source), Field(name='Zip', source=source), Field(name='Lat', source=source), Field(name='Lon', source=source) ] input_tool: InputTool = InputTool('1', input_tool_cfg, record_info) input_tool.position = (78, 66) # Configure and create autofield tool autofield_fields: List[AutofieldField] = [ AutofieldField(field='Customer ID'), AutofieldField(field='Store Number'), AutofieldField(field='Customer Segment'), AutofieldField(field='Responder'), AutofieldField(field='First Name'), AutofieldField(field='Last Name'), AutofieldField(field='Address'), AutofieldField(field='City'), AutofieldField(field='State'), AutofieldField(field='Zip'), AutofieldField(field='Lat'), AutofieldField(field='Lon') ] autofield_tool: AutofieldTool = AutofieldTool('3', autofield_fields) autofield_tool.position = (196, 66) # Configure and create select tool select_tool_cfg: SelectToolConfiguration = SelectToolConfiguration() select_fields: List[SelectField] = [ SelectField(field='Lat'), SelectField(field='Lon'), SelectField(field='*Unknown', selected=True) ] select_tool: SelectTool = SelectTool('2', select_tool_cfg, select_fields) select_tool.position = (313, 66) # Configure and create filter tool filter_tool_configuration: FilterToolConfiguration = FilterToolConfiguration( expression='[City] != "DENVER" AND [Responder] == "Yes"', filter_mode=FilterMode.CUSTOM ) filter_tool: FilterTool = FilterTool('4', filter_tool_configuration) filter_tool.position = (407, 66) # Configure and create output tool output_tool_cfg: OutputToolConfiguration = OutputToolConfiguration( output_file_name=output_file_name ) output_tool: OutputTool = OutputTool('6', output_tool_cfg) output_tool.position = (510, 54) # Configure, create, and write workflow workflow = Workflow('Simple', '2019.1') workflow \ .add_tool(input_tool) \ .add_tool(autofield_tool) \ .add_tool(select_tool) \ .add_tool(filter_tool) \ .add_tool(output_tool) \ .add_connection(input_tool, 'Output', autofield_tool, 'Input') \ .add_connection(autofield_tool, 'Output', select_tool, 'Input') \ .add_connection(select_tool, 'Output', filter_tool, 'Input') \ .add_connection(filter_tool, 'True', output_tool, 'Input') \ .write() \ .run('"C:\\Program Files\\Alteryx\\bin\\AlteryxEngineCmd.exe"')
from workflow import Workflow w = Workflow('usecase05_hirs_n17_n15', 7, '/group_workspaces/cems2/fiduceo/Software/mms/config') w.add_primary_sensor('hirs-n17', '2002-07-10', '2013-04-09', '1.0') w.add_secondary_sensor('hirs-n15', '2002-07-10', '2013-04-09', '1.0') w.set_usecase_config('usecase-05.xml') w.run_matchup(hosts=[('localhost', 24)])
arg=u'{deliver_status} {time} {message}'.format( deliver_status=deliver_status, time=item['time'], message=item['message']), valid=True, icon=icon) else: wf.add_item(title=u"快递助手", subtitle=u"无需输入公司,直接输入单号即可", arg="", valid=True, icon=package_icon) wf.send_feedback() if __name__ == u"__main__": wf = Workflow(update_settings={ 'github_slug': 'deamwork/kuaidi-workflow', 'frequency': 7 }) if wf.update_available: wf.add_item(title=u'发现新版本', subtitle=u'选中本条目开始更新', autocomplete='workflow:update', icon=ICON_INFO) sys.exit(wf.run(main))
from period import Period from workflow import Workflow period = Period('2007-05-29', '2010-12-31') w = Workflow('post_process_uc03_avhrr_n16_iasi_ma', 7, '/group_workspaces/cems2/esacci_sst/mms_new/config', period) w.set_input_dir('/group_workspaces/cems2/fiduceo/Data/mms/mmd/mmd03/avhrr_n16_iasi_ma') w.set_usecase_config('usecase-03-pp.xml') w.run_post_processing(hosts=[('localhost', 48)])
from workflow import Workflow w = Workflow('ingest_avhrr_n11', 7, '/group_workspaces/cems2/fiduceo/Software/mms/config') w.add_primary_sensor('avhrr-n11', '1988-11-08', '1994-12-31', 'v01.2') w.run_ingestion(hosts=[('localhost', 24)])
else: books = wf.filter(query, books, key=lambda book: u' '.join(book.title) + u' ' + u' '.join(book.author), match_on=MATCH_ALL ^ MATCH_ALLCHARS, min_score=30) for b in books: if b.genre == '': b.genre = 'No genre for this title available in Books' wf.add_item(type='file', title=b.title, valid=True, subtitle=b.author, arg=b.path, icon=b.path, icontype='fileicon', quicklookurl=b.path, largetext=b.title + u', by ' + b.author + u'\nGenre: ' + b.genre + u'\nCompleted: ' + b.read_pct + u'\nDescription:\n' + b.book_desc) wf.send_feedback() if __name__ == u"__main__": wf = Workflow(help_url='https://github.com/codycodes/alfred-books/issues', update_settings={'github_slug': 'codycodes/alfred-books'}) log = wf.logger sys.exit(wf.run(main))
def wf(): global _wf if _wf is None: _wf = Workflow() return _wf
import os from workflow import Workflow from subprocess import call logger = Workflow().logger def external_trigger(name, argument): """ Call to external trigger in Alfred. This utilize apple script functionality to trigger in Alfred. Args: name (str): Name of the trigger. argument: Argument to the trigger. Returns: int: Return code from osascript exec """ major_version = os.environ['alfred_version'].split('.')[0] osascript = 'tell application "Alfred {version}" to run trigger ' \ '"{name}" in workflow "{uuid}" with argument "{arg}"' \ .format(version=major_version, name=name, uuid=os.environ['alfred_workflow_bundleid'], arg=argument) cmd = ['/usr/bin/osascript', '-e', osascript]
"OmniFocus direct and MAS db's found; using {0} as it's newer " "(Direct {1} vs. MAS {2})".format(db, db_mod, mas_mod)) log.debug(db) return db def mod_date(filename): mtime = os.path.getmtime(filename) return datetime.datetime.fromtimestamp(mtime) def run_query(sql): db_path = find_omnifocus_db() conn = sqlite3.connect(db_path) conn.row_factory = sqlite3.Row cursor = conn.cursor() log.debug(sql) cursor.execute(sql) results = cursor.fetchall() log.debug("Found {0} results".format(len(results))) cursor.close() return results if __name__ == '__main__': workflow = Workflow(update_settings=UPDATE_SETTINGS) log = workflow.logger sys.exit(workflow.run(main))
from workflow import Workflow w = Workflow('usecase22_amsub_n17_ssmt2_f12', 7, '/group_workspaces/cems2/fiduceo/Software/mms/config') w.add_primary_sensor('amsub-n17', '2002-06-05', '2002-07-30', 'v1.0') w.add_secondary_sensor('ssmt2-f12', '2002-06-05', '2002-07-30', 'v01') w.set_usecase_config('usecase-22.xml') w.run_matchup(hosts=[('localhost', 96)])
icon_active = 'active.png' icon_paused = 'paused.png' icon_waiting = 'waiting.png' icon_complete = 'complete.png' icon_deleted = 'deleted.png' icon_removed = 'removed.png' icon_error = 'error.png' icon_download = 'download.png' icon_upload = 'upload.png' icon_stopped = 'stopped.png' defaults = { 'rpc_path': 'http://localhost:6800/rpc', 'secret': '' } update_settings = { 'github_slug': 'Wildog/Ariafred', 'frequency': 1 } wf = Workflow(default_settings=defaults, update_settings=update_settings) server = None if 'secret' not in wf.settings: wf.settings['secret'] = '' secret = 'token:' + wf.settings['secret'] sys.exit(wf.run(main))
options['query']), valid=True, arg=engine.url_for(options['query']), icon=engine.icon) else: if display_text: length = sorted([len(r) for r in results])[-1] fmt = '{{0:{0}s}} {{1}}'.format(length) for phrase in results: print(fmt.format(phrase, engine.url_for(phrase))) else: for phrase in results: url = engine.url_for(phrase) wf.add_item(phrase, "Search {0} for '{1}'".format(engine.name, phrase), valid=True, autocomplete=phrase + ' ', uid=url, arg=url, icon=engine.icon) if not display_text: wf.send_feedback() if __name__ == '__main__': wf = Workflow(update_settings=UPDATE_SETTINGS, help_url=HELP_URL) log = wf.logger sys.exit(wf.run(main))
def wf(): """Create a :class:`~workflow.Workflow` object.""" with InfoPlist(): yield Workflow()
from workflow import Workflow w = Workflow('ingest_ssmt2-f14', 7, '/group_workspaces/cems2/fiduceo/Software/mms/config') w.add_primary_sensor('ssmt2-f14', '1997-04-28', '2006-10-30', 'v01') w.run_ingestion(hosts=[('localhost', 24)])
from workflow import Workflow w = Workflow('usecase01_aatsr_en_avhrr_n17', 7, '/group_workspaces/cems2/fiduceo/Software/mms/config') w.add_primary_sensor('aatsr-en', '2002-06-25', '2010-12-31', 'v3') w.add_secondary_sensor('avhrr-n17', '2002-06-25', '2010-12-31', 'v01.2') w.set_usecase_config('usecase-01.xml') w.run_matchup(hosts=[('localhost', 72)])
if len(actions) > 0: for action in actions: wf.add_item(action['name'], action['description'], uid=action['name'], autocomplete=action['autocomplete'], arg=action['arg'], valid=action['valid'], icon=helpers.get_icon(wf, 'chevron-right')) else: wf.add_item('No action found for "%s"' % query, autocomplete='', icon=helpers.get_icon(wf, 'info')) if len(wf._items) == 0: query_name = query[query.find(' ') + 1:] wf.add_item('No formula found for "%s"' % query_name, autocomplete='%s ' % query[:query.find(' ')], icon=helpers.get_icon(wf, 'info')) wf.send_feedback() # refresh cache cmd = ['/usr/bin/python', wf.workflowfile('brew_refresh.py')] run_in_background('brew_refresh', cmd) if __name__ == '__main__': wf = Workflow(update_settings={'github_slug': GITHUB_SLUG}) sys.exit(wf.run(main))
from workflow import Period from workflow import Workflow usecase = 'mms10a' mmdtype = 'mmd10' w = Workflow(usecase) w.add_primary_sensor('atsr.1', '1991-08-01', '1996-09-01') w.add_primary_sensor('atsr.1', '1996-10-01', '1996-11-01') w.add_primary_sensor('atsr.1', '1996-12-30', '1997-02-01') w.add_primary_sensor('atsr.1', '1997-03-01', '1997-04-01') w.add_primary_sensor('atsr.1', '1997-05-01', '1997-06-01') w.add_primary_sensor('atsr.1', '1997-07-01', '1997-09-01') w.add_primary_sensor('atsr.1', '1997-10-01', '1997-11-01') w.add_primary_sensor('atsr.1', '1997-12-01', '1997-12-18') w.add_primary_sensor('atsr.2', '1995-06-01', '1996-01-01') w.add_primary_sensor('atsr.2', '1996-07-01', '2003-06-23') w.add_primary_sensor('atsr.3', '2002-05-20', '2012-04-09') #w.add_secondary_sensor('avhrr.n10', '1986-11-17', '1991-09-17') w.add_secondary_sensor('avhrr.n11', '1988-11-08', '1994-09-14') w.add_secondary_sensor('avhrr.n12', '1991-09-16', '1998-12-15') w.add_secondary_sensor('avhrr.n14', '1995-01-01', '2000-01-01') w.add_secondary_sensor('avhrr.n15', '1998-10-26', '2003-04-09') w.add_secondary_sensor('avhrr.n15', '2003-12-21', '2011-01-01') w.add_secondary_sensor('avhrr.n16', '2001-01-01', '2003-04-09') w.add_secondary_sensor('avhrr.n16', '2003-12-21', '2011-01-01') w.add_secondary_sensor('avhrr.n17', '2002-07-10', '2003-04-09') w.add_secondary_sensor('avhrr.n17', '2003-12-21', '2010-10-01') w.add_secondary_sensor('avhrr.n18', '2005-06-05', '2014-01-01') w.add_secondary_sensor('avhrr.n19', '2009-02-06', '2014-01-01') w.add_secondary_sensor('avhrr.m02', '2006-11-21', '2014-01-01')
device = {} if j == "device_minorClassOfDevice_string": device['type'] = filter_type(i['dict']['string']) added_type = True if added_type: device['name'] = i['key'] index_of_connected = i['dict']['key'].index( 'device_isconnected') device['is_connected'] = "Connected" if i['dict']['string'][ index_of_connected] == "attrib_Yes" else "Not connected" devices.append(device) break """ @desc: Goes through a list of devices and populates alfred script filter list """ for i in devices: wf.add_item(title=i['name'], subtitle=i["type"] + " - " + i['is_connected'], arg=i['name'], valid=True, icon="icons/" + i['type'].lower() + ".ico") wf.send_feedback() if __name__ == u"__main__": wf = Workflow(libraries=['./lib']) sys.exit(wf.run(main))
if not fake_data: wf.add_item('No matching fake data', 'Try a different query', icon=ICON_WARNING) for name, data in fake_data: subtitle = data if count: example = data.split('\n')[0].strip() subtitle = '{} ✕ e.g. "{}"'.format(count, example) wf.add_item(name, subtitle, arg=data, autocomplete='{} {} '.format(name, DELIMITER), valid=True, largetext=data, copytext=data) wf.send_feedback() if __name__ == '__main__': wf = Workflow(default_settings=DEFAULT_SETTINGS, update_settings=UPDATE_SETTINGS, help_url=HELP_URL) log = wf.logger sys.exit(wf.run(main))
#!/usr/bin/python # Separated unit for configuration, in case we have extra features in the future. from workflow import Workflow3 as Workflow from lib.config import Config from workflow.notify import notify def main(workflow): path = workflow.args[0].strip() config = Config(path) if config.validate(): # Behavior: overwrite existing data workflow.store_data("configuration", config) notify(title="Success!", text="Cheat sheets updated to {}".format(config.getPath())) else: notify(title="Error:(", text="The path doesn't exist") return 0 if __name__ == "__main__": workflow = Workflow() exit(workflow.run(main))
from workflow import Workflow w = Workflow('usecase02_avhrr_n08_n07', 7, '/group_workspaces/cems2/fiduceo/Software/mms/config') w.add_primary_sensor('avhrr-n08', '1983-05-04', '1985-01-30', 'v01.2') w.add_secondary_sensor('avhrr-n07', '1983-05-04', '1985-01-30', 'v01.2') w.set_usecase_config('usecase-02.xml') w.run_matchup(hosts=[('localhost', 24)])
from workflow import Workflow w = Workflow('ingest_hirs_tn', 7, '/group_workspaces/cems2/fiduceo/Software/mms/config') w.add_primary_sensor('hirs-tn', '1978-10-29', '1980-01-30', '1.0') w.run_ingestion(hosts=[('localhost', 24)])
import os.path import subprocess # Internal Dependencies import config from lib.docopt import docopt from lib import html2text, utils from zotero import api from backend import data # Alfred-Workflow from workflow import Workflow, ICON_WARNING from workflow.workflow import isascii # create global methods from `Workflow()` WF = Workflow() log = WF.logger decode = WF.decode fold = WF.fold_to_ascii #------------------------------------------------------------------------------ # :class:`ZotWorkflow` -------------------------------------------------------- #------------------------------------------------------------------------------ # TODO: break up codepaths into classes class ZotWorkflow(object): """Represents all the Alfred Workflow actions. :param wf: a :class:`Workflow` instance. :type wf: :class:`object`
actions_pairs_to_show = actions_pairs icon_path = getAppIconPath(app) for action, shortcut, _ in actions_pairs_to_show: addShortcut(action, shortcut, app, icon_path) return len(actions_pairs_to_show) def addShortcut(action, shortcut, app, icon_path): if (action.strip() and shortcut.strip()): wf.add_item(action, shortcut, icon=icon_path, largetext=action, copytext=shortcut, modifier_subtitles={u'ctrl': u'Customize this shortcut'}, arg=action) if __name__ == '__main__': wf = Workflow(libraries=['./lib'], update_settings=update_settings) wf.data_serializer = 'json' log = wf.logger custom = wf.stored_data('custom') if (custom == None): custom = {"custom_app_example": {"action": "shortcut"}} wf.store_data('custom', custom) apps = getApps() sys.exit(wf.run(main, text_errors='--commit' in wf.args))
from workflow import Workflow w = Workflow('usecase17_mhs_mb_amsub_n15', 7, '/group_workspaces/cems2/fiduceo/Software/mms/config') w.add_primary_sensor('mhs-mb', '2013-01-15', '2016-03-07', 'v1.0') w.add_secondary_sensor('amsub-n15', '2013-01-15', '2016-03-07', 'v1.0') w.set_usecase_config('usecase-17.xml') w.run_matchup(hosts=[('localhost', 72)])
s = set() ar = [] for i in ea.archive: if tuple(i.fitness) not in s: s.add(tuple(i.fitness)) ar.append(i) return ar if __name__ == '__main__': from sys import argv from workflow import Workflow from pool import AWS import hotshot, hotshot.stats wf = Workflow(argv[1]) pool = AWS("aws.info") ar = nsga_2(*get_info(wf, pool)) # prof = hotshot.Profile("emo3.prof") # archive = prof.runcall(nsga_2, *get_info(wf, pool)) # prof.close() # prof.close() # stats = hotshot.stats.load("emo3.prof") # stats.strip_dirs() # stats.sort_stats('time', 'calls') # stats.print_stats() ar.sort(key=lambda x: x.fitness[0]) for i in range(len(ar)):
from period import Period from workflow import Workflow # Parameters # 1 - usecase name # 2 - number of days per time slot # 3 - configuration directory # 4 - processing period period = Period('2004-01-08', '2011-10-07') w = Workflow('post_process_sst_animal', 7, '/group_workspaces/cems2/esacci_sst/mms_new/config', period) w.set_input_dir( '/group_workspaces/cems2/esacci_sst/mms_new/mmd/mmd06c/animal-sst_amsre-aq' ) w.set_usecase_config('usecase-06-pp.xml') # Parameters # 1 - host (usually localhost), number of tasks to submit to scheduler at once # 2 - number of parallel executed tasks w.run_post_processing(hosts=[('localhost', 10)], num_parallel_tasks=24)
sub = r.get("Id") url = get_object_url(instance_url, r.get("Id"), use_classic) ico = './contract.png' elif r.get("attributes").get("type") == "SBQQ__Quote__c": title = r.get("Name") sub = r.get("Type") url = get_object_url(instance_url, r.get("Id"), use_classic) ico = './quote.png' wf.add_item(title=title, subtitle=sub, arg=url, valid=True, icon=ico) if (len(results.get('searchRecords', []))) == 0: wf.add_item("No result for: %s" % query) else: wf.add_item("Type at least two characters to search on Salesforce.") wf.send_feedback() return 0 if __name__ == u"__main__": wf = Workflow(update_settings={ 'github_slug': 'jereze/alfred-salesforce', 'frequency': 5, }) sys.exit(wf.run(main))
results = pool.map(req_hn_api, urls) pool.close() pool.join() return results def main(wf): posts = wf.cached_data('posts', multi_get_top_news, max_age=60 * 30) posts = reduce(lambda x, y: x + y, posts) # Loop through the returned posts and add an item for each to # the list of results for Alfred for post in posts: subtitle = "points: {points} | user: {user} | {time_ago} | comments:{comments_count} | {url}".format( points=post['points'], user=post['user'], time_ago=post['time_ago'], comments_count=post['comments_count'], url=post['url']) wf.add_item(title=post['title'], subtitle=subtitle, arg=post['url'], valid=True, icon='./icon.png') # Send the results to Alfred as XML wf.send_feedback() if __name__ == u"__main__": wf = Workflow() sys.exit(wf.run(main))
def setup_configuration(self): """ Parse the command line arguments, initialize the configuration object and update configuration parameters with values passed via command line arguments. :return: - """ parser = ArgumentParser() parser.add_argument("job_input", nargs='+', help="input video files or still image folders") parser.add_argument("-p", "--protocol", action="store_true", help="Store protocol with results") parser.add_argument("--protocol_detail", type=int, choices=[0, 1, 2], default=1, help="Protocol detail level") parser.add_argument("-b", "--buffering_level", type=int, choices=[0, 1, 2, 3, 4], default=2, help="Buffering level") parser.add_argument("--out_format", choices=["png", "tiff", "fits"], default="png", help="Image format for output") parser.add_argument( "--name_add_f", action="store_true", help="Add number of stacked frames to output file name") parser.add_argument( "--name_add_p", action="store_true", help="Add percentage of stacked frames to output file name") parser.add_argument( "--name_add_apb", action="store_true", help="Add alignment point box size (pixels) to output file name") parser.add_argument( "--name_add_apn", action="store_true", help="Add number of alignment points to output file name") parser.add_argument("--debayering", choices=[ "Auto detect color", "Grayscale", "RGB", "RGB", "BGR", "Force Bayer RGGB", "Force Bayer GRBG", "Force Bayer GBRG", "Force Bayer BGGR" ], default="Auto detect color", help="Debayering option") parser.add_argument("--noise", type=noise_type, default=7, help="Noise level (add Gaussian blur)") parser.add_argument("-m", "--stab_mode", choices=["Surface", "Planet"], default="Surface", help="Frame stabilization mode") parser.add_argument("--stab_size", type=stab_size_type, default=33, help="Stabilization patch size (%% of frame)") parser.add_argument("--stab_sw", type=stab_sw_type, default=34, help="Stabilization search width (pixels)") parser.add_argument( "--rf_percent", type=rf_percent_type, default=5, help="Percentage of best frames for reference frame computation") parser.add_argument("-d", "--dark", help="Image file for dark frame correction") parser.add_argument("-f", "--flat", help="Image file for flat frame correction") parser.add_argument("-a", "--align_box_width", type=align_box_width_type, default=48, help="Alignment point box width (pixels)") parser.add_argument("-w", "--align_search_width", type=align_search_width_type, default=14, help="Alignment point search width (pixels)") parser.add_argument("--align_min_struct", type=align_min_struct_type, default=0.04, help="Alignment point minimum structure") parser.add_argument("--align_min_bright", type=align_min_bright_type, default=10, help="Alignment point minimum brightness") parser.add_argument("-s", "--stack_percent", type=stack_percent_type, default=10, help="Percentage of best frames to be stacked") parser.add_argument("--stack_number", type=stack_number_type, help="Number of best frames to be stacked") parser.add_argument("-n", "--normalize_bright", action="store_true", help="Normalize frame brightness") parser.add_argument("--normalize_bco", type=normalize_bco_type, default=15, help="Normalization black cut-off") arguments = parser.parse_args() # self.print_arguments(arguments) # Create and initialize the configuration object. The configuration stored in the .ini file # in the user's home directory is ignored in this case. Modifications to standard values # come as command line arguments. self.configuration = Configuration() self.configuration.initialize_configuration(read_from_file=False) # Modify the standard configuration as specified in the command line arguments. self.configuration.global_parameters_store_protocol_with_result = arguments.protocol self.configuration.global_parameters_protocol_level = arguments.protocol_detail self.configuration.global_parameters_buffering_level = arguments.buffering_level self.configuration.global_parameters_image_format = arguments.out_format self.configuration.global_parameters_stack_number_frames = arguments.name_add_f self.configuration.global_parameters_stack_percent_frames = arguments.name_add_p self.configuration.global_parameters_ap_box_size = arguments.name_add_apb self.configuration.global_parameters_ap_number = arguments.name_add_apn self.configuration.frames_debayering_default = arguments.debayering self.configuration.frames_gauss_width = arguments.noise self.configuration.align_frames_mode = arguments.stab_mode self.configuration.align_frames_rectangle_scale_factor = 100. / arguments.stab_size self.configuration.align_frames_search_width = arguments.stab_sw self.configuration.align_frames_average_frame_percent = arguments.rf_percent self.configuration.alignment_points_half_box_width = int( round(arguments.align_box_width / 2)) self.configuration.alignment_points_search_width = arguments.align_search_width self.configuration.alignment_points_structure_threshold = arguments.align_min_struct self.configuration.alignment_points_brightness_threshold = arguments.align_min_bright self.configuration.alignment_points_frame_percent = arguments.stack_percent # Add something here for "number of frames to be stacked" self.configuration.frames_normalization = arguments.normalize_bright self.configuration.frames_normalization_threshold = arguments.normalize_bco # Re-compute derived parameters after the configuration was changed. self.configuration.set_derived_parameters() # Create the workflow thread and start it. self.thread = QtCore.QThread() self.workflow = Workflow(self) self.workflow.moveToThread(self.thread) self.workflow.calibration.report_calibration_error_signal.connect( self.report_calibration_error) self.workflow.work_next_task_signal.connect(self.work_next_task) self.workflow.report_error_signal.connect(self.report_error) self.workflow.abort_job_signal.connect(self.next_job_after_error) self.thread.start() # Connect signals to start activities on the workflow thread (e.g. in method # "work_next_task"). self.signal_load_master_dark.connect( self.workflow.calibration.load_master_dark) self.signal_load_master_flat.connect( self.workflow.calibration.load_master_flat) self.signal_frames.connect(self.workflow.execute_frames) self.signal_rank_frames.connect(self.workflow.execute_rank_frames) self.signal_align_frames.connect(self.workflow.execute_align_frames) self.signal_set_roi.connect(self.workflow.execute_set_roi) self.signal_set_alignment_points.connect( self.workflow.execute_set_alignment_points) self.signal_compute_frame_qualities.connect( self.workflow.execute_compute_frame_qualities) self.signal_stack_frames.connect(self.workflow.execute_stack_frames) self.signal_save_stacked_image.connect( self.workflow.execute_save_stacked_image) # Set "automatic" to True. There is no interactive mode in this case. self.automatic = True # Create the job objects using the names passed as positional arguments. self.jobs = [] for name in [f for name in arguments.job_input for f in glob(name)]: try: job = Job(name) # Test if the path specifies a stacking job. if job.type == 'video' or job.type == 'image': self.jobs.append(job) else: if self.configuration.global_parameters_protocol_level > 0: Miscellaneous.protocol( "Error: '" + name + "' does not contain valid input for a stacking job," " continune with next job.\n", self.workflow.attached_log_file) except InternalError: if self.configuration.global_parameters_protocol_level > 0: Miscellaneous.protocol( "Error: '" + name + "' does not contain valid input for a stacking job," " continune with next job.\n", self.workflow.attached_log_file) self.job_number = len(self.jobs) if self.job_number == 0: if self.configuration.global_parameters_protocol_level > 0: Miscellaneous.protocol( "Error: No valid job specified, execution halted.", self.workflow.attached_log_file) self.stop_execution() self.job_index = 0 # If a dark frame was specified, load it. if arguments.dark: if self.configuration.global_parameters_protocol_level > 0: Miscellaneous.protocol("+++ Loading master dark frame +++", self.workflow.attached_log_file) self.signal_load_master_dark.emit(arguments.dark) # If a flat frame was specified, load it. if arguments.flat: if self.configuration.global_parameters_protocol_level > 0: Miscellaneous.protocol("+++ Loading master flat frame +++", self.workflow.attached_log_file) self.signal_load_master_flat.emit(arguments.flat)