def generate_user_key(configuration, client_id, key_filename, truncate=False): """Generate a new key and save it as key_filename in settings dir""" # TODO: switch to paramiko key generation? logger = configuration.logger key_dir = os.path.join(configuration.user_settings, client_id_dir(client_id), user_keys_dir) key_path = os.path.join(key_dir, key_filename) makedirs_rec(key_dir, configuration) if os.path.exists(key_path) and not truncate: logger.error("user key %s already exists!" % key_path) return (False, 'user key %s already exists!' % key_filename) logger.debug("generating user key %s" % key_path) gen_proc = subprocess_popen([ 'ssh-keygen', '-t', default_key_type, '-b', '%d' % default_key_bits, '-f', key_path, '-N', '', '-C', key_filename ], stdout=subprocess_pipe, stderr=subprocess_pipe) exit_code = gen_proc.wait() out, err = gen_proc.communicate() if exit_code != 0: logger.error("user key generation in %s failed: %s %s (%s)" % \ (key_path, out, err, exit_code)) return (False, "user key generation in %s failed!" % key_filename) logger.info('done generating user key %s: %s : %s (%s)' % \ (key_path, out, err, exit_code)) pub_key = '' try: pub_fd = open(key_path + '.pub') pub_key = pub_fd.read() pub_fd.close() except Exception, exc: logger.error("user key generation %s did not create a pub key: %s" % \ (key_path, exc)) return (False, "user key generation in %s failed!" % key_filename)
def transfer_result(configuration, client_id, transfer_dict, exit_code, out_msg, err_msg): """Update status file from transfer_dict with the result from transfer that reurned exit_code, out_msg and err_msg. """ time_stamp = datetime.datetime.now().ctime() transfer_id = transfer_dict['transfer_id'] rel_src = transfer_dict.get("rel_src", False) if not rel_src: rel_src = ', '.join(transfer_dict['src']) res_dir = get_status_dir(configuration, client_id, transfer_id) makedirs_rec(res_dir, configuration) status_msg = '''%s: %s %s of %s in %s finished with status %s ''' % (time_stamp, transfer_dict['protocol'], transfer_dict['action'], rel_src, transfer_dict['transfer_id'], exit_code) out_msg = '%s:\n%s\n' % (time_stamp, out_msg) err_msg = '%s:\n%s\n' % (time_stamp, err_msg) status = True for (ext, msg) in [("status", status_msg), ("stdout", out_msg), ("stderr", err_msg)]: path = os.path.join(res_dir, "%s.%s" % (transfer_id, ext)) try: if os.path.exists(path): status_fd = open(path, "a") else: status_fd = open(path, "w") status_fd.write(msg) status_fd.close() except Exception, exc: logger.error("writing status file %s for %s failed: %s" % (path, blind_pw(transfer_dict), exc)) status = False
def list_frozen_archives(configuration, client_id): """Find all frozen_archives owned by user""" logger = configuration.logger frozen_list = [] dir_content = [] try: dir_content = os.listdir(configuration.freeze_home) except Exception: if not makedirs_rec(configuration.freeze_home, configuration): logger.error( 'freezefunctions.py: not able to create directory %s' % configuration.freeze_home) return (False, "archive setup is broken") dir_content = [] for entry in dir_content: # Skip dot files/dirs if entry.startswith('.'): continue if is_frozen_archive(entry, configuration): # entry is a frozen archive - check ownership (meta_status, meta_out) = get_frozen_meta(entry, configuration) if meta_status and meta_out['CREATOR'] == client_id: frozen_list.append(entry) else: logger.warning( '%s in %s is not a directory, move it?' % (entry, configuration.freeze_home)) return (True, frozen_list)
def modify_data_transfers(configuration, client_id, transfer_dict, action, transfers=None): """Modify data transfers with given action and transfer_dict for client_id. In practice this a shared helper to add or remove transfers from the saved data transfers. The optional transfers argument can be used to pass an already loaded dictionary of saved transfers to avoid reloading. """ logger = configuration.logger transfer_id = transfer_dict['transfer_id'] if transfers is None: (load_status, transfers) = load_data_transfers(configuration, client_id) if not load_status: logger.error("modify_data_transfers failed in load: %s" % \ transfers) return (load_status, transfers) if action == "create": now = datetime.datetime.now() transfer_dict.update({ 'created_timestamp': now, 'updated_timestamp': now, 'owner': client_id, }) transfers[transfer_id] = transfer_dict elif action == "modify": transfer_dict['updated_timestamp'] = datetime.datetime.now() transfers[transfer_id].update(transfer_dict) elif action == "delete": del transfers[transfer_id] else: return (False, "Invalid action %s on data transfers" % action) try: transfers_path = os.path.join(configuration.user_settings, client_id_dir(client_id), datatransfers_filename) dump(transfers, transfers_path) res_dir = get_status_dir(configuration, client_id, transfer_id) makedirs_rec(res_dir, configuration) except Exception, err: logger.error("modify_data_transfers failed: %s" % err) return (False, 'Error updating data transfers: %s' % err)
def setUp(self): self.created_workflows = [] self.username = '******' self.test_vgrid = default_vgrid if not os.environ.get('MIG_CONF', False): os.environ['MIG_CONF'] = '/home/mig/mig/server/MiGserver.conf' self.configuration = get_configuration_object() self.logger = self.configuration.logger # Ensure that the vgrid_files_home exist vgrid_file_path = os.path.join(self.configuration.vgrid_files_home, self.test_vgrid) if not os.path.exists(vgrid_file_path): self.assertTrue( makedirs_rec(vgrid_file_path, self.configuration, accept_existing=True)) # Ensure that the mrsl_files home exists mrsl_file_path = os.path.join(self.configuration.mrsl_files_dir, self.username) if not os.path.exists(mrsl_file_path): self.assertTrue( makedirs_rec(mrsl_file_path, self.configuration, accept_existing=True)) self.assertTrue(os.path.exists(vgrid_file_path)) self.configuration.workflows_db_home = this_path self.configuration.workflows_db = \ os.path.join(this_path, 'test_sessions_db.pickle') self.configuration.workflows_db_lock = \ os.path.join(this_path, 'test_sessions_db.lock') self.assertTrue( reset_workflows(self.configuration, vgrid=self.test_vgrid)) created = touch_workflow_sessions_db(self.configuration, force=True) self.assertTrue(created) self.session_id = create_workflow_session_id(self.configuration, self.username) self.assertIsNot(self.session_id, False) self.assertIsNotNone(self.session_id) self.workflow_sessions_db = load_workflow_sessions_db( self.configuration) self.assertIn(self.session_id, self.workflow_sessions_db) self.workflow_session = self.workflow_sessions_db.get( self.session_id, None) self.assertIsNotNone(self.workflow_session)
def __transfer_log(configuration, client_id, msg, level='info'): """Wrapper to send a single msg to transfer log file of client_id""" status_dir = get_status_dir(configuration, client_id) log_path = os.path.join(status_dir, configuration.site_transfer_log) makedirs_rec(os.path.dirname(log_path), configuration) transfers_logger = logging.getLogger('background-transfer') transfers_logger.setLevel(logging.INFO) handler = logging.handlers.RotatingFileHandler( log_path, maxBytes=transfers_log_size, backupCount=transfers_log_cnt - 1) formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s') handler.setFormatter(formatter) transfers_logger.addHandler(handler) if level == 'error': transfers_logger.error(msg) elif level == 'warning': transfers_logger.warning(msg) else: transfers_logger.info(msg) handler.flush() handler.close() transfers_logger.removeHandler(handler)
def main(client_id, user_arguments_dict): """Main function used by front end""" (configuration, logger, output_objects, op_name) = \ initialize_main_variables(client_id, op_header=False) defaults = signature()[1] title_entry = find_entry(output_objects, 'title') label = "%s" % configuration.site_vgrid_label title_entry['text'] = '%s Workflows' % label # NOTE: Delay header entry here to include vgrid_name (validate_status, accepted) = validate_input_and_cert( user_arguments_dict, defaults, output_objects, client_id, configuration, allow_rejects=False, ) if not validate_status: return (accepted, returnvalues.CLIENT_ERROR) vgrid_name = accepted['vgrid_name'][-1] operation = accepted['operation'][-1] flags = ''.join(accepted['flags'][-1]) if not vgrid_is_owner_or_member(vgrid_name, client_id, configuration): output_objects.append({ 'object_type': 'error_text', 'text': '''You must be an owner or member of %s vgrid to access the workflows.''' % vgrid_name }) return (output_objects, returnvalues.CLIENT_ERROR) if not operation in allowed_operations: output_objects.append({ 'object_type': 'error_text', 'text': '''Operation must be one of %s.''' % ', '.join(allowed_operations) }) return (output_objects, returnvalues.OK) if operation in show_operations: # jquery support for tablesorter (and unused confirmation dialog) # table initially sorted by 0 (last update / date) refresh_call = 'ajax_workflowjobs("%s", "%s")' % (vgrid_name, flags) table_spec = { 'table_id': 'workflowstable', 'sort_order': '[[0,1]]', 'refresh_call': refresh_call } (add_import, add_init, add_ready) = man_base_js(configuration, [table_spec]) if operation == "show": add_ready += '%s;' % refresh_call add_ready += ''' /* Init variables helper as foldable but closed and with individual heights */ $(".variables-accordion").accordion({ collapsible: true, active: false, heightStyle: "content" }); /* fix and reduce accordion spacing */ $(".ui-accordion-header").css("padding-top", 0) .css("padding-bottom", 0).css("margin", 0); /* NOTE: requires managers CSS fix for proper tab bar height */ $(".workflow-tabs").tabs(); $("#logarea").scrollTop($("#logarea")[0].scrollHeight); ''' title_entry['script']['advanced'] += add_import title_entry['script']['init'] += add_init title_entry['script']['ready'] += add_ready output_objects.append({ 'object_type': 'html_form', 'text': man_base_html(configuration) }) output_objects.append({ 'object_type': 'header', 'text': '%s Workflows for %s' % (label, vgrid_name) }) logger.info('vgridworkflows %s %s' % (vgrid_name, operation)) # Iterate through jobs and list details for each trigger_jobs = [] log_content = '' if operation in list_operations: trigger_job_dir = os.path.join( configuration.vgrid_home, os.path.join(vgrid_name, '.%s.jobs' % configuration.vgrid_triggers)) trigger_job_pending_dir = os.path.join(trigger_job_dir, 'pending_states') trigger_job_final_dir = os.path.join(trigger_job_dir, 'final_states') if makedirs_rec(trigger_job_pending_dir, configuration) \ and makedirs_rec(trigger_job_final_dir, configuration): abs_vgrid_dir = '%s/' \ % os.path.abspath(os.path.join(configuration.vgrid_files_home, vgrid_name)) for filename in os.listdir(trigger_job_pending_dir): trigger_job_filepath = \ os.path.join(trigger_job_pending_dir, filename) trigger_job = unpickle(trigger_job_filepath, logger) serverjob_filepath = \ os.path.join(configuration.mrsl_files_dir, os.path.join( client_id_dir(trigger_job['owner']), '%s.mRSL' % trigger_job['jobid'])) serverjob = unpickle(serverjob_filepath, logger) if serverjob: if serverjob['STATUS'] in pending_states: trigger_event = trigger_job['event'] trigger_rule = trigger_job['rule'] trigger_action = trigger_event['event_type'] trigger_time = time.ctime(trigger_event['time_stamp']) trigger_path = '%s %s' % \ (trigger_event['src_path'].replace( abs_vgrid_dir, ''), trigger_event['dest_path'].replace( abs_vgrid_dir, '')) job = { 'object_type': 'trigger_job', 'job_id': trigger_job['jobid'], 'rule_id': trigger_rule['rule_id'], 'path': trigger_path, 'action': trigger_action, 'time': trigger_time, 'status': serverjob['STATUS'] } if not job['rule_id'].startswith(img_trigger_prefix) \ or verbose(flags): trigger_jobs.append(job) elif serverjob['STATUS'] in final_states: src_path = os.path.join(trigger_job_pending_dir, filename) dest_path = os.path.join(trigger_job_final_dir, filename) move_file(src_path, dest_path, configuration) else: logger.error( 'Trigger job: %s, unknown state: %s' % (trigger_job['jobid'], serverjob['STATUS'])) log_content = read_trigger_log(configuration, vgrid_name, flags) if operation in show_operations: # Always run as rule creator to avoid users being able to act on behalf # of ANY other user using triggers (=exploit) extra_fields = [ ('path', None), ('match_dirs', ['False', 'True']), ('match_recursive', ['False', 'True']), ('changes', [keyword_all] + valid_trigger_changes), ('action', [keyword_auto] + valid_trigger_actions), ('arguments', None), ('run_as', client_id), ] # NOTE: we do NOT show saved template contents - see addvgridtriggers optional_fields = [('rate_limit', None), ('settle_time', None)] # Only include system triggers in verbose mode if verbose(flags): system_filter = [] else: system_filter = [('rule_id', '%s_.*' % img_trigger_prefix)] (init_status, oobjs) = vgrid_add_remove_table(client_id, vgrid_name, 'trigger', 'vgridtrigger', configuration, extra_fields + optional_fields, filter_items=system_filter) if not init_status: output_objects.append({ 'object_type': 'error_text', 'text': 'failed to load triggers: %s' % oobjs }) return (output_objects, returnvalues.SYSTEM_ERROR) # Generate variable helper values for a few concrete samples for help # text vars_html = '' dummy_rule = {'run_as': client_id, 'vgrid_name': vgrid_name} samples = [('input.txt', 'modified'), ('input/image42.raw', 'changed')] for (path, change) in samples: vgrid_path = os.path.join(vgrid_name, path) vars_html += "<b>Expanded variables when %s is %s:</b><br/>" % \ (vgrid_path, change) expanded = get_path_expand_map(vgrid_path, dummy_rule, change) for (key, val) in expanded.items(): vars_html += " %s: %s<br/>" % (key, val) commands_html = '' commands = get_usage_map(configuration) for usage in commands.values(): commands_html += " %s<br/>" % usage helper_html = """ <div class='variables-accordion'> <h4>Help on available trigger variable names and values</h4> <p> Triggers can use a number of helper variables on the form +TRIGGERXYZ+ to dynamically act on targets. Some of the values are bound to the rule owner the %s while the remaining ones are automatically expanded for the particular trigger target as shown in the following examples:<br/> %s </p> <h4>Help on available trigger commands and arguments</h4> <p> It is possible to set up trigger rules that basically run any operation with a side effect you could manually do on %s. I.e. like submitting/cancelling a job, creating/moving/deleting a file or directory and so on. When you select 'command' as the action for a trigger rule, you have the following commands at your disposal:<br/> %s </p> </div> """ % (label, vars_html, configuration.short_title, commands_html) # Make page with manage triggers tab and active jobs and log tab output_objects.append({ 'object_type': 'html_form', 'text': ''' <div id="wrap-tabs" class="workflow-tabs"> <ul> <li><a href="#manage-tab">Manage Triggers</a></li> <li><a href="#jobs-tab">Active Trigger Jobs</a></li> </ul> ''' }) # Display existing triggers and form to add new ones output_objects.append({ 'object_type': 'html_form', 'text': ''' <div id="manage-tab"> ''' }) output_objects.append({ 'object_type': 'sectionheader', 'text': 'Manage Triggers' }) output_objects.extend(oobjs) output_objects.append({ 'object_type': 'html_form', 'text': helper_html }) if configuration.site_enable_crontab: output_objects.append({ 'object_type': 'html_form', 'text': ''' <p>You can combine these workflows with the personal ''' }) output_objects.append({ 'object_type': 'link', 'destination': 'crontab.py', 'class': 'crontablink iconspace', 'text': 'schedule task' }) output_objects.append({ 'object_type': 'html_form', 'text': ''' facilities in case you want to trigger flows at given times rather than only in reaction to file system events.</p> ''' }) output_objects.append({ 'object_type': 'html_form', 'text': ''' </div> ''' }) # Display active trigger jobs and recent logs for this vgrid output_objects.append({ 'object_type': 'html_form', 'text': ''' <div id="jobs-tab"> ''' }) output_objects.append({ 'object_type': 'sectionheader', 'text': 'Active Trigger Jobs' }) output_objects.append({ 'object_type': 'table_pager', 'entry_name': 'job', 'default_entries': default_pager_entries }) output_objects.append({ 'object_type': 'trigger_job_list', 'trigger_jobs': trigger_jobs }) if operation in show_operations: output_objects.append({ 'object_type': 'sectionheader', 'text': 'Trigger Log' }) output_objects.append({ 'object_type': 'trigger_log', 'log_content': log_content }) if operation in show_operations: output_objects.append({ 'object_type': 'html_form', 'text': ''' </div> ''' }) output_objects.append({ 'object_type': 'html_form', 'text': ''' </div> ''' }) return (output_objects, returnvalues.OK)
'object_type': 'error_text', 'text': 'Errors parsing upload files: %s' % '\n '.join(["%s %s" % pair for pair in upload_rejected]) }) for (rel_path, err) in upload_rejected: uploaded.append({ 'object_type': 'uploadfile', 'name': rel_path, 'size': -1, "error": "upload rejected: %s" % err }) return (output_objects, returnvalues.CLIENT_ERROR) if not makedirs_rec(cache_dir, configuration): output_objects.append({ 'object_type': 'error_text', 'text': "Problem creating temporary upload dir" }) return (output_objects, returnvalues.SYSTEM_ERROR) if action == "status" and not upload_files: # Default to entire cache dir upload_files = [(os.path.join(upload_tmp_dir, i), '') for i in os.listdir(cache_dir)] elif not upload_files: logger.error('Rejecting upload with: %s' % upload_files) output_objects.append({ 'object_type': 'error_text', 'text': 'No files included to upload!'
# IMPORTANT: path must be expanded to abs for proper chrooting abs_dest = os.path.abspath(dest) # Don't use abs_path in output as it may expose underlying # fs layout. relative_dest = abs_dest.replace(dst_base, '') if not valid_user_path(configuration, abs_dest, dst_base, True): logger.warning('%s tried to %s restricted path %s ! (%s)' % (client_id, op_name, abs_dest, dst)) output_objects.append( {'object_type': 'error_text', 'text': "Invalid destination (%s expands to an illegal path)" % dst}) return (output_objects, returnvalues.CLIENT_ERROR) # We must make sure target dir exists if called in import X mode if (share_id or freeze_id) and not makedirs_rec(abs_dest, configuration): logger.error('could not create import destination dir: %s' % abs_dest) output_objects.append( {'object_type': 'error_text', 'text': 'cannot import to "%s" : file in the way?' % relative_dest}) return (output_objects, returnvalues.SYSTEM_ERROR) if not check_write_access(abs_dest, parent_dir=True): logger.warning('%s called without write access: %s' % (op_name, abs_dest)) output_objects.append( {'object_type': 'error_text', 'text': 'cannot copy to "%s": inside a read-only location!' % relative_dest}) return (output_objects, returnvalues.CLIENT_ERROR) if share_id and not force(flags) and not check_empty_dir(abs_dest): logger.warning('%s called %s sharelink import with non-empty dst: %s'
def main(client_id, user_arguments_dict): """Main function used by front end""" (configuration, logger, output_objects, op_name) = \ initialize_main_variables(client_id, op_header=False) defaults = signature()[1] (validate_status, accepted) = validate_input_and_cert( user_arguments_dict, defaults, output_objects, client_id, configuration, allow_rejects=False, ) if not validate_status: return (accepted, returnvalues.CLIENT_ERROR) vgrid_name = accepted['vgrid_name'][-1] if not vgrid_is_owner_or_member(vgrid_name, client_id, configuration): output_objects.append({'object_type': 'error_text', 'text': '''You must be an owner or member of %s vgrid to access the workflows.''' % vgrid_name}) return (output_objects, returnvalues.CLIENT_ERROR) title_entry = find_entry(output_objects, 'title') title_entry['text'] = '%s Workflows' \ % configuration.site_vgrid_label title_entry['style'] = themed_styles(configuration) title_entry['javascript'] = \ ''' <script type="text/javascript" src="/images/js/jquery.js"></script> <script type="text/javascript" src="/images/js/jquery.tablesorter.js"></script> <script type="text/javascript" src="/images/js/jquery.tablesorter.pager.js"></script> <script type="text/javascript" src="/images/js/jquery.tablesorter.widgets.js"></script> <script type="text/javascript" src="/images/js/jquery-ui.js"></script> <script type="text/javascript"> $(document).ready(function() { $("#logarea").scrollTop($("#logarea")[0].scrollHeight); // table initially sorted by 0 (last update / date) var sortOrder = [[0,1]]; // use image path for sorting if there is any inside var imgTitle = function(contents) { var key = $(contents).find("a").attr("class"); if (key == null) { key = $(contents).html(); } return key; } $("#workflowstable").tablesorter({widgets: ["zebra", "saveSort"], sortList:sortOrder, textExtraction: imgTitle }) .tablesorterPager({ container: $("#pager"), size: %s }); } ); </script> ''' \ % default_pager_entries output_objects.append({'object_type': 'html_form', 'text': ''' <div id="confirm_dialog" title="Confirm" style="background:#fff;"> <div id="confirm_text"><!-- filled by js --></div> <textarea cols="72" rows="10" id="confirm_input" style="display:none;"></textarea> </div> '''}) output_objects.append({'object_type': 'header', 'text': '%s Workflows for %s' % (configuration.site_vgrid_label, vgrid_name)}) logger.info('vgridworkflows %s' % vgrid_name) # Display active trigger jobs for this vgrid output_objects.append({'object_type': 'sectionheader', 'text': 'Active Trigger Jobs'}) html = '<table><thead><tr>' html += '<th>Job ID</th>' html += '<th>Rule</th>' html += '<th>Path</th>' html += '<th>Change</th>' html += '<th>Time</th>' html += '<th>Status</th>' html += '</tr></thead>' html += '<tbody>' trigger_job_dir = os.path.join(configuration.vgrid_home, os.path.join(vgrid_name, '.%s.jobs' % configuration.vgrid_triggers)) trigger_job_pending_dir = os.path.join(trigger_job_dir, 'pending_states') trigger_job_final_dir = os.path.join(trigger_job_dir, 'final_states' ) if makedirs_rec(trigger_job_pending_dir, logger) \ and makedirs_rec(trigger_job_final_dir, logger): abs_vgrid_dir = '%s/' \ % os.path.abspath(os.path.join(configuration.vgrid_files_home, vgrid_name)) for filename in os.listdir(trigger_job_pending_dir): trigger_job_filepath = \ os.path.join(trigger_job_pending_dir, filename) trigger_job = unpickle(trigger_job_filepath, logger) serverjob_filepath = \ os.path.join(configuration.mrsl_files_dir, os.path.join(client_id_dir(trigger_job['owner' ]), '%s.mRSL' % trigger_job['jobid'])) serverjob = unpickle(serverjob_filepath, logger) if serverjob: if serverjob['STATUS'] in pending_states: trigger_event = trigger_job['event'] trigger_rule = trigger_job['rule'] trigger_action = trigger_event['event_type'] trigger_time = time.ctime(trigger_event['time_stamp' ]) trigger_path = '%s %s' % (trigger_event['src_path' ].replace(abs_vgrid_dir, ''), trigger_event['dest_path' ].replace(abs_vgrid_dir, '')) html += \ '<tr><td>%s</td><td>%s</td><td>%s</td><td>%s</td><td>%s</td></td><td>%s</td>' \ % (trigger_job['jobid'], trigger_rule['rule_id' ], trigger_path, trigger_action, trigger_time, serverjob['STATUS']) elif serverjob['STATUS'] in final_states: src_path = os.path.join(trigger_job_pending_dir, filename) dest_path = os.path.join(trigger_job_final_dir, filename) move_file(src_path, dest_path, configuration) else: logger.error('Trigger job: %s, unknown state: %s' % (trigger_job['jobid'], serverjob['STATUS'])) html += '</tbody>' html += '</table>' output_objects.append({'object_type': 'html_form', 'text': html}) # Display active trigger jobs for this vgrid output_objects.append({'object_type': 'sectionheader', 'text': 'Trigger Log'}) log_content = read_trigger_log(configuration, vgrid_name) output_objects.append({'object_type': 'html_form', 'text': ''' <div class="form_container"> <textarea id="logarea" rows=10 readonly="readonly">%s</textarea> </div> ''' % log_content}) output_objects.append({'object_type': 'sectionheader', 'text': 'Manage Triggers'}) # Always run as rule creator to avoid users being able to act on behalf # of ANY other user using triggers (=exploit) extra_fields = [ ('path', None), ('match_dirs', ['False', 'True']), ('match_recursive', ['False', 'True']), ('changes', [keyword_all] + valid_trigger_changes), ('action', [keyword_auto] + valid_trigger_actions), ('arguments', None), ('run_as', client_id), ] # NOTE: we do NOT show saved template contents - see addvgridtriggers optional_fields = [('rate_limit', None), ('settle_time', None)] (status, oobjs) = vgrid_add_remove_table( client_id, vgrid_name, 'trigger', 'vgridtrigger', configuration, extra_fields, optional_fields, ) output_objects.extend(oobjs) if not status: return (output_objects, returnvalues.SYSTEM_ERROR) return (output_objects, returnvalues.OK)
def run_transfer(configuration, client_id, transfer_dict): """Actual data transfer built from transfer_dict on behalf of client_id""" logger.debug('run transfer for %s: %s' % (client_id, blind_pw(transfer_dict))) transfer_id = transfer_dict['transfer_id'] action = transfer_dict['action'] protocol = transfer_dict['protocol'] status_dir = get_status_dir(configuration, client_id, transfer_id) cmd_map = get_cmd_map() if not protocol in cmd_map[action]: raise ValueError('unsupported protocol: %s' % protocol) client_dir = client_id_dir(client_id) makedirs_rec(status_dir, configuration) # Please note that base_dir must end in slash to avoid access to other # user dirs when own name is a prefix of another user name base_dir = os.path.abspath( os.path.join(configuration.user_home, client_dir)) + os.sep # TODO: we should refactor to move command extraction into one function command_pattern = cmd_map[action][protocol] target_helper_list = [] key_path = transfer_dict.get("key", "") if key_path: # Use key with given name from settings dir settings_base_dir = os.path.abspath( os.path.join(configuration.user_settings, client_dir)) + os.sep key_path = os.path.join(settings_base_dir, user_keys_dir, key_path.lstrip(os.sep)) # IMPORTANT: path must be expanded to abs for proper chrooting key_path = os.path.abspath(key_path) if not valid_user_path(configuration, key_path, settings_base_dir): logger.error('rejecting illegal directory traversal for %s (%s)' % (key_path, blind_pw(transfer_dict))) raise ValueError("user provided a key outside own settings!") rel_src_list = transfer_dict['src'] rel_dst = transfer_dict['dst'] compress = transfer_dict.get("compress", False) exclude = transfer_dict.get("exclude", []) if transfer_dict['action'] in ('import', ): logger.debug('setting abs dst for action %(action)s' % transfer_dict) src_path_list = transfer_dict['src'] dst_path = os.path.join(base_dir, rel_dst.lstrip(os.sep)) dst_path = os.path.abspath(dst_path) for src in rel_src_list: abs_dst = os.path.join(dst_path, src.lstrip(os.sep)) # IMPORTANT: path must be expanded to abs for proper chrooting abs_dst = os.path.abspath(abs_dst) # Reject illegal directory traversal and hidden files if not valid_user_path(configuration, abs_dst, base_dir, True): logger.error( 'rejecting illegal directory traversal for %s (%s)' % (abs_dst, blind_pw(transfer_dict))) raise ValueError("user provided a destination outside home!") if src.endswith(os.sep): target_helper_list.append( (get_lftp_target(True, False, exclude), get_rsync_target(True, False, exclude, compress))) else: target_helper_list.append( (get_lftp_target(True, True, exclude), get_rsync_target(True, True, exclude, compress))) makedirs_rec(dst_path, configuration) elif transfer_dict['action'] in ('export', ): logger.debug('setting abs src for action %(action)s' % transfer_dict) dst_path = transfer_dict['dst'] src_path_list = [] for src in rel_src_list: src_path = os.path.join(base_dir, src.lstrip(os.sep)) # IMPORTANT: path must be expanded to abs for proper chrooting src_path = os.path.abspath(src_path) # Reject illegal directory traversal and hidden files if not valid_user_path(configuration, src_path, base_dir, True): logger.error( 'rejecting illegal directory traversal for %s (%s)' % (src, blind_pw(transfer_dict))) raise ValueError("user provided a source outside home!") src_path_list.append(src_path) if src.endswith(os.sep) or os.path.isdir(src): target_helper_list.append( (get_lftp_target(False, False, exclude), get_rsync_target(False, False, exclude, compress))) else: target_helper_list.append( (get_lftp_target(False, True, exclude), get_rsync_target(False, True, exclude, compress))) else: raise ValueError('unsupported action for %(transfer_id)s: %(action)s' % transfer_dict) run_dict = transfer_dict.copy() run_dict['log_path'] = os.path.join(status_dir, 'transfer.log') # Use private known hosts file for ssh transfers as explained above # NOTE: known_hosts containing '=' silently leads to rest getting ignored! # use /dev/null to skip host key verification completely for now. #run_dict['known_hosts'] = os.path.join(base_dir, '.ssh', 'known_hosts') run_dict['known_hosts'] = '/dev/null' # Make sure password is set to empty string as default run_dict['password'] = run_dict.get('password', '') # TODO: this is a bogus cert path for now - we don't support ssl certs run_dict['cert'] = run_dict.get('cert', '') # IMPORTANT: must be implicit proto or 'ftp://' (not ftps://) and similarly # webdav(s) must use explicit http(s) instead. In both cases we # replace protocol between cmd selection and lftp path expansion if run_dict['protocol'] == 'ftps': run_dict['orig_proto'] = run_dict['protocol'] run_dict['protocol'] = 'ftp' logger.info( 'force %(orig_proto)s to %(protocol)s for %(transfer_id)s' % run_dict) elif run_dict['protocol'].startswith('webdav'): run_dict['orig_proto'] = run_dict['protocol'] run_dict['protocol'] = run_dict['protocol'].replace('webdav', 'http') logger.info( 'force %(orig_proto)s to %(protocol)s for %(transfer_id)s' % run_dict) if key_path: rel_key = run_dict['key'] rel_cert = run_dict['cert'] run_dict['key'] = key_path run_dict['cert'] = key_path.replace(rel_key, rel_cert) run_dict['ssh_auth'] = get_ssh_auth(True, run_dict) run_dict['ssl_auth'] = get_ssl_auth(True, run_dict) else: # Extract encrypted password password_digest = run_dict.get('password_digest', '') if password_digest: _, _, _, payload = password_digest.split("$") unscrambled = unscramble_digest(configuration.site_digest_salt, payload) _, _, password = unscrambled.split(":") run_dict['password'] = password run_dict['ssh_auth'] = get_ssh_auth(False, run_dict) run_dict['ssl_auth'] = get_ssl_auth(False, run_dict) run_dict['rel_dst'] = rel_dst run_dict['dst'] = dst_path run_dict['lftp_buf_size'] = run_dict.get('lftp_buf_size', lftp_buffer_bytes) run_dict['lftp_sftp_block_size'] = run_dict.get('sftp_sftp_block_size', lftp_sftp_block_bytes) status = 0 for (src, rel_src, target_helper) in zip(src_path_list, rel_src_list, target_helper_list): (lftp_target, rsync_target) = target_helper logger.debug('setting up %(action)s for %(src)s' % run_dict) if run_dict['protocol'] == 'sftp' and not os.path.isabs(src): # NOTE: lftp interprets sftp://FQDN/SRC as absolute path /SRC # We force relative paths into user home with a tilde. # The resulting sftp://FQDN/~/SRC looks funky but works. run_dict['src'] = "~/%s" % src else: # All other paths are probably absolute or auto-chrooted anyway run_dict['src'] = src run_dict['rel_src'] = rel_src run_dict['lftp_args'] = ' '.join(lftp_target[0]) % run_dict run_dict['lftp_excludes'] = ' '.join(lftp_target[1]) # src and dst may actually be reversed for lftp, but for symmetry ... run_dict['lftp_src'] = lftp_target[2][0] % run_dict run_dict['lftp_dst'] = lftp_target[2][1] % run_dict run_dict['rsync_args'] = ' '.join(rsync_target[0]) % run_dict # Preserve excludes on list form for rsync, where it matters run_dict[RSYNC_EXCLUDES_LIST] = rsync_target[1] run_dict['rsync_src'] = rsync_target[2][0] % run_dict run_dict['rsync_dst'] = rsync_target[2][1] % run_dict blind_dict = blind_pw(run_dict) logger.debug('expanded vars to %s' % blind_dict) # NOTE: Make sure NOT to break rsync excludes on list form as they # won't work if concatenated to a single string in command_list! command_list, blind_list = [], [] for i in command_pattern: if i == RSYNC_EXCLUDES_LIST: command_list += run_dict[RSYNC_EXCLUDES_LIST] blind_list += run_dict[RSYNC_EXCLUDES_LIST] else: command_list.append(i % run_dict) blind_list.append(i % blind_dict) command_str = ' '.join(command_list) blind_str = ' '.join(blind_list) logger.info('run %s on behalf of %s' % (blind_str, client_id)) transfer_proc = subprocess_popen(command_list, stdout=subprocess_pipe, stderr=subprocess_pipe) # Save transfer_proc.pid for use in clean up during shutdown # in that way we can resume pretty smoothly in next run. sub_pid = transfer_proc.pid logger.info('%s %s running transfer process %s' % (client_id, transfer_id, sub_pid)) add_sub_pid(configuration, sub_pid_map, client_id, transfer_id, sub_pid) out, err = transfer_proc.communicate() exit_code = transfer_proc.wait() status |= exit_code del_sub_pid(configuration, sub_pid_map, client_id, transfer_id, sub_pid) logger.info('done running transfer %s: %s' % (transfer_id, blind_str)) logger.debug('raw output is: %s' % out) logger.debug('raw error is: %s' % err) logger.debug('result was %s' % exit_code) if not transfer_result(configuration, client_id, run_dict, exit_code, out.replace(base_dir, ''), err.replace(base_dir, '')): logger.error('writing transfer status for %s failed' % transfer_id) logger.debug('done handling transfers in %(transfer_id)s' % transfer_dict) transfer_dict['exit_code'] = status if status == 0: transfer_dict['status'] = 'DONE' else: transfer_dict['status'] = 'FAILED'
def main(client_id, user_arguments_dict): """Main function used by front end""" (configuration, logger, output_objects, op_name) = \ initialize_main_variables(client_id, op_header=False) client_dir = client_id_dir(client_id) defaults = signature()[1] (validate_status, accepted) = validate_input_and_cert( user_arguments_dict, defaults, output_objects, client_id, configuration, allow_rejects=False, ) if not validate_status: WARNING_MSG = str(accepted) output_objects.append({'object_type': 'warning', 'text': WARNING_MSG}) return (accepted, returnvalues.CLIENT_ERROR) action = ''.join(accepted['action']) flags = ''.join(accepted['flags']) path = ''.join(accepted['path']) extension = ''.join(accepted['extension']) # Please note that base_dir must end in slash to avoid access to other # user dirs when own name is a prefix of another user name base_dir = os.path.abspath(os.path.join(configuration.user_home, client_dir)) + os.sep abs_path = os.path.join(base_dir, path) settings_dict = load_settings(client_id, configuration) javascript = None title_entry = find_entry(output_objects, 'title') title_entry['text'] = 'FILEMETAIO Management' title_entry['javascript'] = javascript output_objects.append({'object_type': 'header', 'text': 'FILEMETAIO Management'}) status = returnvalues.ERROR if flags == 'i': if action == 'list': image_meta = get_image_file_settings(logger, abs_path) if image_meta is not None: extension_list = [] settings_status_list = [] settings_progress_list = [] image_count_list = [] for entry in image_meta: extension_list.append(entry['extension']) settings_status_list.append(entry['settings_status' ]) settings_progress_list.append(entry['settings_update_progress' ]) image_count_list.append(get_image_file_count(logger, abs_path, entry['extension'])) output_objects.append({ 'object_type': 'image_settings_list', 'extension_list': extension_list, 'settings_status_list': settings_status_list, 'settings_progress_list': settings_progress_list, 'image_count_list': image_count_list, }) status = returnvalues.OK else: status = returnvalues.ERROR ERROR_MSG = "No image settings found for path: '%s'" \ % path output_objects.append({'object_type': 'text', 'text': ERROR_MSG}) logger.error('filemetaio.py: %s -> %s' % (action, ERROR_MSG)) elif action == 'remove_dir': remove_ext = None vgrid_name = path.split('/')[0] if extension != '': remove_ext = extension try: (result, removed_ext_list) = \ remove_image_file_settings(logger, abs_path, remove_ext) except Exception, ex: logger.debug(str(traceback.format_exc())) if result is not None: result = returnvalues.OK else: result = returnvalues.ERROR ERROR_MSG = \ 'Unable to remove image settings for path: %s' \ % path output_objects.append({'object_type': 'text', 'text': ERROR_MSG}) logger.error('filemetaio.py: %s -> %s' % (action, ERROR_MSG)) for removed_ext in removed_ext_list: abs_last_modified_filepath = \ __get_image_settings_trigger_last_modified_filepath(logger, abs_path, removed_ext) # Remove trigger if delete_file(abs_last_modified_filepath, logger): # FYSIKER HACK: Sleep 1 to prevent trigger rule/event race # TODO: Modify events handler to accept trigger action + delete time.sleep(1) # Remove old vgrid submit trigger for files rule_id = __get_image_file_trigger_rule_id(logger, path, removed_ext) status = __remove_image_file_trigger( configuration, vgrid_name, path, extension, rule_id, output_objects, ) if status != returnvalues.OK: result = status # Remove old vgrid submit trigger for settings rule_id = \ __get_image_settings_trigger_rule_id(logger, path, removed_ext) status = __remove_image_settings_trigger( configuration, vgrid_name, path, extension, rule_id, output_objects, ) if status != returnvalues.OK: result = status else: result = returnvalues.ERROR ERROR_MSG = 'Unable to remove file: %s ' \ % abs_last_modified_filepath output_objects.append({'object_type': 'text', 'text': ERROR_MSG}) logger.error('filemetaio.py: %s -> %s' % (action, ERROR_MSG)) elif action == 'get_dir': image_count = get_image_file_count(logger, abs_path, extension) image_meta = get_image_file_setting(logger, abs_path, extension) if image_meta is not None: extension = str(image_meta['extension']) settings_status = str(image_meta['settings_status']) settings_update_progress = \ str(image_meta['settings_update_progress']) settings_recursive = str(image_meta['settings_recursive' ]) image_count = str(image_count) image_type = str(image_meta['image_type']) offset = str(image_meta['offset']) x_dimension = str(image_meta['x_dimension']) y_dimension = str(image_meta['y_dimension']) preview_image_extension = \ str(image_meta['preview_image_extension']) preview_x_dimension = \ str(image_meta['preview_x_dimension']) preview_y_dimension = \ str(image_meta['preview_y_dimension']) preview_cutoff_min = str(image_meta['preview_cutoff_min' ]) preview_cutoff_max = str(image_meta['preview_cutoff_max' ]) data_type = str(image_meta['data_type']) output_objects.append({ 'object_type': 'image_setting', 'path': path, 'extension': extension, 'settings_status': settings_status, 'settings_update_progress': settings_update_progress, 'settings_recursive': settings_recursive, 'image_count': image_count, 'image_type': image_type, 'offset': offset, 'x_dimension': x_dimension, 'y_dimension': y_dimension, 'preview_image_extension': preview_image_extension, 'preview_x_dimension': preview_x_dimension, 'preview_y_dimension': preview_y_dimension, 'preview_cutoff_min': preview_cutoff_min, 'preview_cutoff_max': preview_cutoff_max, 'data_type': data_type, }) status = returnvalues.OK else: status = returnvalues.ERROR ERROR_MSG = \ "No image setting information for path: '%s', extension: '%s'" \ % (path, extension) output_objects.append({'object_type': 'text', 'text': ERROR_MSG}) logger.error('filemetaio.py: %s -> %s' % (action, ERROR_MSG)) elif action == 'put_dir': settings_status = ''.join(accepted['settings_status']) if ''.join(accepted['settings_recursive']) == 'True': settings_recursive = True else: settings_recursive = False image_type = ''.join(accepted['image_type']) data_type = ''.join(accepted['data_type']) offset = int(''.join(accepted['offset'])) x_dimension = int(''.join(accepted['x_dimension'])) y_dimension = int(''.join(accepted['y_dimension'])) preview_image_extension = \ ''.join(accepted['preview_image_extension']) preview_x_dimension = \ int(''.join(accepted['preview_x_dimension'])) preview_y_dimension = \ int(''.join(accepted['preview_y_dimension'])) preview_cutoff_min = \ float(''.join(accepted['preview_cutoff_min'])) preview_cutoff_max = \ float(''.join(accepted['preview_cutoff_max'])) path_array = path.split('/') vgrid_name = path_array[0] vgrid_data_path = '/'.join(path_array[1:]) vgrid_meta_path = os.path.join(vgrid_data_path, __metapath) vgrid_image_meta_path = os.path.join(vgrid_data_path, __image_metapath) OK_MSG = \ "Created/updated settings for image extension: '%s' for path '%s'" \ % (extension, path) ERROR_MSG = \ "Failed to change settings for image extension: '%s' for path: '%s'" \ % (extension, path) (is_valid, is_valid_msg) = __is_valid_image_settings_update( configuration, base_dir, vgrid_name, vgrid_data_path, extension, settings_recursive, ) if is_valid: status = returnvalues.OK else: status = returnvalues.ERROR output_objects.append({'object_type': 'error_text', 'text': ERROR_MSG}) output_objects.append({'object_type': 'error_text', 'text': is_valid_msg}) logger.error('filemetaio.py: %s -> %s' % (action, ERROR_MSG)) logger.error('filemetaio.py: %s -> %s' % (action, is_valid_msg)) # Ensure meta path existence if status == returnvalues.OK: makedirs_rec(os.path.join(base_dir, os.path.join(vgrid_name, vgrid_meta_path)), configuration) # Ensure image meta path existence makedirs_rec(os.path.join(base_dir, os.path.join(vgrid_name, vgrid_image_meta_path)), configuration) try: add_status = add_image_file_setting( logger, abs_path, extension, settings_status, None, settings_recursive, image_type, data_type, offset, x_dimension, y_dimension, preview_image_extension, preview_x_dimension, preview_y_dimension, preview_cutoff_min, preview_cutoff_max, overwrite=True, ) except Exception, ex: add_status = False logger.debug(str(traceback.format_exc())) if add_status: status = returnvalues.OK output_objects.append({'object_type': 'text', 'text': OK_MSG}) else: status = returnvalues.ERROR output_objects.append({'object_type': 'error_text', 'text': ERROR_MSG}) logger.error('filemetaio.py: %s -> %s' % (action, ERROR_MSG)) if status == returnvalues.OK: # Generate vgrid trigger for files if settings_recursive: vgrid_trigger_path = os.path.join(vgrid_data_path, '*/*.%s' % extension) else: vgrid_trigger_path = os.path.join(vgrid_data_path, '*.%s' % extension) rule_id = __get_image_file_trigger_rule_id(logger, path, extension) rule_dict = { 'rule_id': rule_id, 'vgrid_name': vgrid_name, 'path': vgrid_trigger_path, 'changes': ['created', 'modified', 'deleted', 'moved'], 'run_as': client_id, 'action': 'submit', 'arguments': 'template_from_filemetaio.py', 'templates': [__get_image_update_preview_mrsl_template(path)], 'settle_time': '60s', 'rate_limit': '', } # Remove old vgrid submit trigger for files status = __remove_image_file_trigger( configuration, vgrid_name, path, extension, rule_id, output_objects, ) if status == returnvalues.OK: # Add generated vgrid submit trigger for files (add_status, add_msg) = \ vgrid_add_triggers(configuration, vgrid_name, [rule_dict]) if add_status: status = returnvalues.OK OK_MSG = \ "Created/updated image file trigger for extension: '%s', path '%s'" \ % (extension, path) output_objects.append({'object_type': 'text', 'text': OK_MSG}) else: status = returnvalues.ERROR ERROR_MSG = \ "Failed change image file trigger for extension: '%s', path '%s'" \ % (extension, path) ERROR_MSG2 = "Makes sure '%s' is a VGrid" \ % vgrid_name output_objects.append({'object_type': 'error_text', 'text': ERROR_MSG}) output_objects.append({'object_type': 'error_text', 'text': ERROR_MSG2}) logger.error('filemetaio.py: %s -> %s' % (action, ERROR_MSG)) logger.error('filemetaio.py: %s -> %s' % (action, ERROR_MSG2)) if status == returnvalues.OK: # Generate vgrid trigger for settings vgrid_path = '/'.join(path.split('/')[1:]) vgrid_trigger_filepath = \ __get_image_settings_trigger_last_modified_filepath(logger, vgrid_path, extension) rule_id = __get_image_settings_trigger_rule_id(logger, path, extension) rule_dict = { 'rule_id': rule_id, 'vgrid_name': vgrid_name, 'path': vgrid_trigger_filepath, 'changes': ['modified', 'deleted'], 'run_as': client_id, 'action': 'submit', 'arguments': 'template_from_filemetaio.py', 'templates': [__get_image_create_previews_mrsl_template(path, extension)], 'settle_time': '1s', 'rate_limit': '', } # Remove old vgrid submit trigger for settings status = __remove_image_settings_trigger( configuration, vgrid_name, path, extension, rule_id, output_objects, ) if status == returnvalues.OK: # Add generated vgrid submit trigger for settings (add_status, add_msg) = \ vgrid_add_triggers(configuration, vgrid_name, [rule_dict]) if add_status: status = returnvalues.OK OK_MSG = \ "Created/updated old image setting trigger for extension: '%s', path '%s'" \ % (extension, path) output_objects.append({'object_type': 'text', 'text': OK_MSG}) else: status = returnvalues.ERROR ERROR_MSG = \ "Failed change old image setting trigger for extension: '%s', path '%s'" \ % (extension, path) ERROR_MSG2 = "Makes sure '%s' is a VGrid" \ % vgrid_name output_objects.append({'object_type': 'error_text', 'text': ERROR_MSG}) output_objects.append({'object_type': 'error_text', 'text': ERROR_MSG2}) logger.error('filemetaio.py: %s -> %s' % (action, ERROR_MSG)) logger.error('filemetaio.py: %s -> %s' % (action, ERROR_MSG2)) if status == returnvalues.OK: # Trigger Trigger (Trigger Happty) abs_vgrid_trigger_filepath = os.path.join(base_dir, os.path.join(vgrid_name, vgrid_trigger_filepath)) # FYSIKER HACK: Sleep 1 to prevent trigger rule/event race # TODO: Modify events handler to accept add+trigger action time.sleep(1) timestamp = time.time() touch(abs_vgrid_trigger_filepath, timestamp)