예제 #1
0
def enable():
    #this is using the most recent file timestamp.
    hutil.do_parse_context('Enable')

    freezer = FsFreezer(patching= MyPatching, logger = backup_logger)
    unfreeze_result = None
    snapshot_result = None
    freeze_result = None
    global_error_result = None
    para_parser = None
    run_result = 1
    error_msg = ''
    run_status = None
    # precheck
    freeze_called = False
    try:
        # we need to freeze the file system first
        backup_logger.log('starting to enable', True)

        # handle the restoring scenario.
        mi = MachineIdentity()
        stored_identity = mi.stored_identity()
        if(stored_identity is None):
            mi.save_identity()
        else:
            current_identity = mi.current_identity()
            if(current_identity != stored_identity):
                current_seq_no = -1
                backup_logger.log("machine identity not same, set current_seq_no to " + str(current_seq_no) + " " + str(stored_identity) + " " + str(current_identity), True)
                hutil.set_last_seq(current_seq_no)
                mi.save_identity()

        hutil.exit_if_same_seq()
        hutil.save_seq()

        """
        protectedSettings is the privateConfig passed from Powershell.
        WATCHOUT that, the _context_config are using the most freshest timestamp.
        if the time sync is alive, this should be right.
        """
        protected_settings = hutil._context._config['runtimeSettings'][0]['handlerSettings'].get('protectedSettings')
        public_settings = hutil._context._config['runtimeSettings'][0]['handlerSettings'].get('publicSettings')
        para_parser = ParameterParser(protected_settings, public_settings)

        if(para_parser.commandStartTimeUTCTicks is not None and para_parser.commandStartTimeUTCTicks != ""):
            utcTicksLong = long(para_parser.commandStartTimeUTCTicks)
            commandStartTime = convert_time(utcTicksLong)
            utcNow = datetime.datetime.utcnow()
            backup_logger.log('command start time is ' + str(commandStartTime) + " and utcNow is " + str(utcNow))
            timespan = utcNow - commandStartTime
            THIRTY_MINUTES = 30 * 60 # in seconds
            # handle the machine identity for the restoration scenario.
            total_span_in_seconds = timedelta_total_seconds(timespan)
            backup_logger.log('timespan is ' + str(timespan) + ' ' + str(total_span_in_seconds))
            if(abs(total_span_in_seconds) > THIRTY_MINUTES):
                error_msg = 'the call time stamp is out of date. so skip it.'
                exit_with_commit_log(error_msg, para_parser)

        if(para_parser.taskId is not None and para_parser.taskId != ""):
            taskIdentity = TaskIdentity()
            taskIdentity.save_identity(para_parser.taskId)
        commandToExecute = para_parser.commandToExecute
        #validate all the required parameter here
        if(commandToExecute.lower() == CommonVariables.iaas_install_command):
            backup_logger.log('install succeed.',True)
            run_status = 'success'
            error_msg = 'Install Succeeded'
            run_result = CommonVariables.success
            backup_logger.log(error_msg)
        elif(commandToExecute.lower() == CommonVariables.iaas_vmbackup_command):
            if(para_parser.backup_metadata is None or para_parser.public_config_obj is None or para_parser.private_config_obj is None):
                run_result = CommonVariables.error_parameter
                run_status = 'error'
                error_msg = 'required field empty or not correct'
                backup_logger.log(error_msg, False, 'Error')
            else:
                backup_logger.log('commandToExecute is ' + commandToExecute, True)
                """
                make sure the log is not doing when the file system is freezed.
                """
                backup_logger.log('doing freeze now...', True)
                freeze_called = True
                freeze_result = freezer.freezeall()
                backup_logger.log('freeze result ' + str(freeze_result))

                # check whether we freeze succeed first?
                if(freeze_result is not None and len(freeze_result.errors) > 0):
                    run_result = CommonVariables.error
                    run_status = 'error'
                    error_msg = 'Enable failed with error: ' + str(freeze_result)
                    backup_logger.log(error_msg, False, 'Warning')
                else:
                    backup_logger.log('doing snapshot now...')
                    snap_shotter = Snapshotter(backup_logger)
                    snapshot_result = snap_shotter.snapshotall(para_parser)
                    backup_logger.log('snapshotall ends...')
                    if(snapshot_result is not None and len(snapshot_result.errors) > 0):
                        error_msg = 'snapshot result: ' + str(snapshot_result)
                        run_result = CommonVariables.error
                        run_status = 'error'
                        backup_logger.log(error_msg, False, 'Error')
                    else:
                        run_result = CommonVariables.success
                        run_status = 'success'
                        error_msg = 'Enable Succeeded'
                        backup_logger.log(error_msg)
        else:
            run_status = 'error'
            run_result = CommonVariables.error_parameter
            error_msg = 'command is not correct'
            backup_logger.log(error_msg, False, 'Error')
    except Exception as e:
        errMsg = 'Failed to enable the extension with error: %s, stack trace: %s' % (str(e), traceback.format_exc())
        backup_logger.log(errMsg, False, 'Error')
        global_error_result = e
    finally:
        if(freeze_called):
            unfreeze_result = freezer.unfreezeall()
            backup_logger.log('unfreeze result ' + str(unfreeze_result))
            if(unfreeze_result is not None and len(unfreeze_result.errors) > 0):
                error_msg += ('Enable Succeeded with error: ' + str(unfreeze_result.errors))
                backup_logger.log(error_msg, False, 'Warning')
            backup_logger.log('unfreeze ends...')

    """
    we do the final report here to get rid of the complex logic to handle the logging when file system be freezed issue.
    """
    if(global_error_result is not None):
        if(hasattr(global_error_result,'errno') and global_error_result.errno == 2):
            run_result = CommonVariables.error_12
        elif(para_parser is None):
            run_result = CommonVariables.error_parameter
        else:
            run_result = CommonVariables.error
        run_status = 'error'
        error_msg  += ('Enable failed.' + str(global_error_result))

    if(para_parser is not None and para_parser.statusBlobUri is not None and para_parser.statusBlobUri != ""):
        status_report_msg = do_backup_status_report(operation='Enable',status = run_status,\
                                status_code=str(run_result), \
                                message=error_msg,\
                                taskId=para_parser.taskId,\
                                commandStartTimeUTCTicks=para_parser.commandStartTimeUTCTicks,\
                                blobUri=para_parser.statusBlobUri)
    if(status_report_msg is not None):
        backup_logger.log("status report message:")
        backup_logger.log(status_report_msg)
    else:
        backup_logger.log("status_report_msg is none")
    if(para_parser is not None and para_parser.logsBlobUri is not None and para_parser.logsBlobUri != ""):
        backup_logger.commit(para_parser.logsBlobUri)
    else:
        backup_logger.log("the logs blob uri is not there, so do not upload log.")
        backup_logger.commit_to_local()
    

    hutil.do_exit(0, 'Enable', run_status, str(run_result), error_msg)
예제 #2
0
def daemon():
    global MyPatching, backup_logger, hutil, run_result, run_status, error_msg, freezer, para_parser, snapshot_done
    #this is using the most recent file timestamp.
    hutil.do_parse_context('Executing')
    freezer = FsFreezer(patching=MyPatching, logger=backup_logger)
    global_error_result = None
    # precheck
    freeze_called = False

    configfile = '/etc/azure/vmbackup.conf'
    thread_timeout = str(60)
    try:
        config = ConfigParser.ConfigParser()
        config.read(configfile)
        thread_timeout = config.get('SnapshotThread', 'timeout')
    except Exception as e:
        errMsg = 'cannot read config file or file not present'
        backup_logger.log(errMsg, False, 'Warning')
    backup_logger.log("final thread timeout" + thread_timeout, True)

    try:
        # we need to freeze the file system first
        backup_logger.log('starting to enable', True)

        # handle the restoring scenario.
        mi = MachineIdentity()
        stored_identity = mi.stored_identity()
        if (stored_identity is None):
            mi.save_identity()
        else:
            current_identity = mi.current_identity()
            if (current_identity != stored_identity):
                current_seq_no = -1
                backup_logger.log(
                    "machine identity not same, set current_seq_no to " +
                    str(current_seq_no) + " " + str(stored_identity) + " " +
                    str(current_identity), True)
                hutil.set_last_seq(current_seq_no)
                mi.save_identity()

        hutil.exit_if_same_seq()
        hutil.save_seq()
        """
        protectedSettings is the privateConfig passed from Powershell.
        WATCHOUT that, the _context_config are using the most freshest timestamp.
        if the time sync is alive, this should be right.
        """
        protected_settings = hutil._context._config['runtimeSettings'][0][
            'handlerSettings'].get('protectedSettings')
        public_settings = hutil._context._config['runtimeSettings'][0][
            'handlerSettings'].get('publicSettings')
        para_parser = ParameterParser(protected_settings, public_settings)

        if (para_parser.commandStartTimeUTCTicks is not None
                and para_parser.commandStartTimeUTCTicks != ""):
            utcTicksLong = long(para_parser.commandStartTimeUTCTicks)
            backup_logger.log('utcTicks in long format' + str(utcTicksLong),
                              True)
            commandStartTime = convert_time(utcTicksLong)
            utcNow = datetime.datetime.utcnow()
            backup_logger.log('command start time is ' +
                              str(commandStartTime) + " and utcNow is " +
                              str(utcNow))
            timespan = utcNow - commandStartTime
            THIRTY_MINUTES = 30 * 60  # in seconds
            # handle the machine identity for the restoration scenario.
            total_span_in_seconds = timedelta_total_seconds(timespan)
            backup_logger.log('timespan is ' + str(timespan) + ' ' +
                              str(total_span_in_seconds))
            if (abs(total_span_in_seconds) > THIRTY_MINUTES):
                error_msg = 'the call time stamp is out of date. so skip it.'
                exit_with_commit_log(error_msg, para_parser)

        if (para_parser.taskId is not None and para_parser.taskId != ""):
            taskIdentity = TaskIdentity()
            taskIdentity.save_identity(para_parser.taskId)
        commandToExecute = para_parser.commandToExecute
        #validate all the required parameter here
        if (commandToExecute.lower() == CommonVariables.iaas_install_command):
            backup_logger.log('install succeed.', True)
            run_status = 'success'
            error_msg = 'Install Succeeded'
            run_result = CommonVariables.success
            backup_logger.log(error_msg)
        elif (commandToExecute.lower() == CommonVariables.iaas_vmbackup_command
              ):
            if (para_parser.backup_metadata is None
                    or para_parser.public_config_obj is None
                    or para_parser.private_config_obj is None):
                run_result = CommonVariables.error_parameter
                run_status = 'error'
                error_msg = 'required field empty or not correct'
                backup_logger.log(error_msg, False, 'Error')
            else:
                backup_logger.log('commandToExecute is ' + commandToExecute,
                                  True)
                """
                make sure the log is not doing when the file system is freezed.
                """
                temp_status = 'transitioning'
                temp_result = CommonVariables.success
                temp_msg = 'Transitioning state in extension'
                trans_report_msg = None
                if (para_parser is not None
                        and para_parser.statusBlobUri is not None
                        and para_parser.statusBlobUri != ""):
                    trans_report_msg = do_backup_status_report(operation='Enable',status=temp_status,\
                                    status_code=str(temp_result),\
                                    message=temp_msg,\
                                    taskId=para_parser.taskId,\
                                    commandStartTimeUTCTicks=para_parser.commandStartTimeUTCTicks,\
                                    blobUri=para_parser.statusBlobUri)
                    if (trans_report_msg is not None):
                        backup_logger.log("trans status report message:")
                        backup_logger.log(trans_report_msg)
                    else:
                        backup_logger.log("trans_report_msg is none")
                hutil.do_status_report('Enable', temp_status, str(temp_result),
                                       temp_msg)
                backup_logger.log('doing freeze now...', True)
                snapshot_thread = Thread(target=snapshot)
                start_time = datetime.datetime.utcnow()
                snapshot_thread.start()
                snapshot_thread.join(float(thread_timeout))
                if not snapshot_done:
                    run_result = CommonVariables.error
                    run_status = 'error'
                    error_msg = 'T:W Snapshot timeout'
                    backup_logger.log(error_msg, False, 'Warning')

                end_time = datetime.datetime.utcnow()
                time_taken = end_time - start_time
                backup_logger.log('total time taken..' + str(time_taken))

                for i in range(0, 3):
                    unfreeze_result = freezer.unfreezeall()
                    backup_logger.log('unfreeze result ' +
                                      str(unfreeze_result))
                    if (unfreeze_result is not None):
                        if len(unfreeze_result.errors) > 0:
                            error_msg += ('unfreeze with error: ' +
                                          str(unfreeze_result.errors))
                            backup_logger.log(error_msg, False, 'Warning')
                        else:
                            backup_logger.log('unfreeze result is None')
                            break
                backup_logger.log('unfreeze ends...')

        else:
            run_status = 'error'
            run_result = CommonVariables.error_parameter
            error_msg = 'command is not correct'
            backup_logger.log(error_msg, False, 'Error')
    except Exception as e:
        errMsg = 'Failed to enable the extension with error: %s, stack trace: %s' % (
            str(e), traceback.format_exc())
        backup_logger.log(errMsg, False, 'Error')
        global_error_result = e
    """
    we do the final report here to get rid of the complex logic to handle the logging when file system be freezed issue.
    """
    if (global_error_result is not None):
        if (hasattr(global_error_result, 'errno')
                and global_error_result.errno == 2):
            run_result = CommonVariables.error_12
        elif (para_parser is None):
            run_result = CommonVariables.error_parameter
        else:
            run_result = CommonVariables.error
        run_status = 'error'
        error_msg += ('Enable failed.' + str(global_error_result))
    status_report_msg = None
    if (para_parser is not None and para_parser.statusBlobUri is not None
            and para_parser.statusBlobUri != ""):
        status_report_msg = do_backup_status_report(operation='Enable',status=run_status,\
                                status_code=str(run_result),\
                                message=error_msg,\
                                taskId=para_parser.taskId,\
                                commandStartTimeUTCTicks=para_parser.commandStartTimeUTCTicks,\
                                blobUri=para_parser.statusBlobUri)
    if (status_report_msg is not None):
        backup_logger.log("status report message:")
        backup_logger.log(status_report_msg)
    else:
        backup_logger.log("status_report_msg is none")
    if (para_parser is not None and para_parser.logsBlobUri is not None
            and para_parser.logsBlobUri != ""):
        backup_logger.commit(para_parser.logsBlobUri)
    else:
        backup_logger.log(
            "the logs blob uri is not there, so do not upload log.")
        backup_logger.commit_to_local()

    hutil.do_exit(0, 'Enable', run_status, str(run_result), error_msg)
예제 #3
0
def enable():
    freezer = FsFreezer(backup_logger)
    unfreeze_result     = None
    snapshot_result     = None
    freeze_result       = None
    global_error_result = None
    para_parser         = None
    run_result          = 1
    error_msg           = None
    run_status          = None
    # precheck 
    try:
        hutil.do_parse_context('Enable')
        hutil.exit_if_enabled()
        # we need to freeze the file system first
        backup_logger.log('starting to enable', True)
        """
        protectedSettings is the privateConfig passed from Powershell.
        """
        protected_settings = hutil._context._config['runtimeSettings'][0]['handlerSettings'].get('protectedSettings')
        public_settings = hutil._context._config['runtimeSettings'][0]['handlerSettings'].get('publicSettings')
        para_parser = ParameterParser(protected_settings, public_settings)

        commandToExecute = para_parser.commandToExecute
        #validate all the required parameter here
        if(commandToExecute.lower() == CommonVariables.iaas_install_command):
            backup_logger.log("install succeed.",True)
            run_status = 'success'
            error_msg  = 'Install Succeeded'
            run_result = 0
            backup_logger.log(error_msg)
        elif(commandToExecute.lower() == CommonVariables.iaas_vmbackup_command):
            if(para_parser.backup_metadata is None or para_parser.public_config_obj is None or para_parser.private_config_obj is None):
                run_result = 11
                run_status = 'error'
                error_msg  = 'required field empty or not correct'
            else:
                backup_logger.log('commandToExecute is ' + commandToExecute, True)
                if(commandToExecute.lower() == CommonVariables.iaas_vmbackup_command):
                    """
                    make sure the log is not doing when the file system is freezed.
                    """
                    backup_logger.log("doing freeze now...", True)
                    freeze_result   = freezer.freezeall()
                    backup_logger.log("freeze result " + str(freeze_result))
                
                    backup_logger.log("doing snapshot now...")
                    snap_shotter    = Snapshotter(backup_logger)
                    snapshot_result = snap_shotter.snapshotall(para_parser)
                    backup_logger.log("snapshotall ends...")
                    if(snapshot_result != None and len(snapshot_result.errors) > 0):
                        error_msg  = "snapshot result: " + str(snapshot_result.errors)
                        run_result = 2
                        run_status = 'error'
                        backup_logger.log(error_msg, False, 'Error')
                    else:
                        if(freeze_result != None and len(freeze_result.errors) > 0 ):
                            run_result = 1
                            run_status = 'warning'
                            error_msg  = 'Enable Succeeded with error' + str(freeze_result.errors)
                            backup_logger.log(error_msg, False, 'Warning')
                        else:
                            run_result = 1
                            run_status = 'success'
                            error_msg  = 'Enable Succeeded'
                            backup_logger.log(error_msg)

    except Exception as e:
        errMsg = "Failed to enable the extension with error: %s, stack trace: %s" % (str(e), traceback.format_exc())
        backup_logger.log(errMsg, False, 'Error')
        global_error_result = e
    finally:
        backup_logger.log("doing unfreeze now...")
        unfreeze_result = freezer.unfreezeall()
        backup_logger.log("unfreeze result " + str(unfreeze_result))
        error_msg += ('Enable Succeeded with error: ' + str(unfreeze_result.errors))
        if(unfreeze_result is not None and len(unfreeze_result.errors) > 0):
            backup_logger.log(error_msg, False, 'Warning')
        backup_logger.log("unfreeze ends...")

    if(para_parser!= None):
        backup_logger.commit(para_parser.logsBlobUri)
    """
    we do the final report here to get rid of the complex logic to handle the logging when file system be freezed issue.
    """
    if(global_error_result != None):
        if(global_error_result.errno==2):
            run_result = 12
        else:
            run_result = 2
        run_status = 'error'
        error_msg  += ('Enable failed.' + str(global_error_result))

    hutil.do_exit(run_result, 'Enable', run_status, str(run_result), error_msg)
예제 #4
0
def daemon():
    global MyPatching,backup_logger,hutil,run_result,run_status,error_msg,freezer,para_parser,snapshot_done,snapshot_info_array,g_fsfreeze_on,total_used_size
    #this is using the most recent file timestamp.
    hutil.do_parse_context('Executing')
    freezer = FsFreezer(patching= MyPatching, logger = backup_logger)
    global_error_result = None
    # precheck
    freeze_called = False
    configfile='/etc/azure/vmbackup.conf'
    thread_timeout=str(60)

    #Adding python version to the telemetry
    try:
        python_version_info = sys.version_info
        python_version = str(sys.version_info[0])+ '.'  + str(sys.version_info[1]) + '.'  + str(sys.version_info[2])
        HandlerUtil.HandlerUtility.add_to_telemetery_data("pythonVersion", python_version)
    except Exception as e:
        errMsg = 'Failed to do retrieve python version with error: %s, stack trace: %s' % (str(e), traceback.format_exc())
        backup_logger.log(errMsg, True, 'Error')

    try:
        if(freezer.mounts is not None):
            hutil.partitioncount = len(freezer.mounts.mounts)
        backup_logger.log(" configfile " + str(configfile), True)
        config = ConfigParsers.ConfigParser()
        config.read(configfile)
        if config.has_option('SnapshotThread','timeout'):
            thread_timeout= config.get('SnapshotThread','timeout')
    except Exception as e:
        errMsg='cannot read config file or file not present'
        backup_logger.log(errMsg, True, 'Warning')
    backup_logger.log("final thread timeout" + thread_timeout, True)
    
    snapshot_info_array = None

    try:
        # we need to freeze the file system first
        backup_logger.log('starting daemon', True)
        """
        protectedSettings is the privateConfig passed from Powershell.
        WATCHOUT that, the _context_config are using the most freshest timestamp.
        if the time sync is alive, this should be right.
        """
        if(hutil.is_prev_in_transition()):
            backup_logger.log('retrieving the previous logs for this again inside daemon', True)
            backup_logger.set_prev_log()

        protected_settings = hutil._context._config['runtimeSettings'][0]['handlerSettings'].get('protectedSettings')
        public_settings = hutil._context._config['runtimeSettings'][0]['handlerSettings'].get('publicSettings')
        para_parser = ParameterParser(protected_settings, public_settings)

        commandToExecute = para_parser.commandToExecute
        #validate all the required parameter here
        backup_logger.log(commandToExecute,True)
        if(CommonVariables.iaas_install_command in commandToExecute.lower()):
            backup_logger.log('install succeed.',True)
            run_status = 'success'
            error_msg = 'Install Succeeded'
            run_result = CommonVariables.success
            backup_logger.log(error_msg)
        elif(CommonVariables.iaas_vmbackup_command in commandToExecute.lower()):
            if(para_parser.backup_metadata is None or para_parser.public_config_obj is None or para_parser.private_config_obj is None):
                run_result = CommonVariables.error_parameter
                hutil.SetExtErrorCode(ExtensionErrorCodeHelper.ExtensionErrorCodeEnum.error_parameter)
                run_status = 'error'
                error_msg = 'required field empty or not correct'
                backup_logger.log(error_msg, True, 'Error')
            else:
                backup_logger.log('commandToExecute is ' + commandToExecute, True)
                """
                make sure the log is not doing when the file system is freezed.
                """
                temp_status= 'success'
                temp_result=CommonVariables.ExtensionTempTerminalState
                temp_msg='Transitioning state in extension'
                blob_report_msg, file_report_msg = get_status_to_report(temp_status, temp_result, temp_msg, None)
                if(hutil.is_status_file_exists()):
                    status_report_to_file(file_report_msg)
                status_report_to_blob(blob_report_msg)
                #partial logging before freeze
                if(para_parser is not None and para_parser.logsBlobUri is not None and para_parser.logsBlobUri != ""):
                    backup_logger.commit_to_blob(para_parser.logsBlobUri)
                else:
                    backup_logger.log("the logs blob uri is not there, so do not upload log.")
                backup_logger.log('commandToExecute is ' + commandToExecute, True)

                PluginHostObj = PluginHost(logger=backup_logger)
                PluginHostErrorCode,dobackup,g_fsfreeze_on = PluginHostObj.pre_check()
                doFsConsistentbackup = False
                appconsistentBackup = False

                if not (PluginHostErrorCode == CommonVariables.FailedPrepostPluginhostConfigParsing or
                        PluginHostErrorCode == CommonVariables.FailedPrepostPluginConfigParsing or
                        PluginHostErrorCode == CommonVariables.FailedPrepostPluginhostConfigNotFound or
                        PluginHostErrorCode == CommonVariables.FailedPrepostPluginhostConfigPermissionError or
                        PluginHostErrorCode == CommonVariables.FailedPrepostPluginConfigNotFound):
                    backup_logger.log('App Consistent Consistent Backup Enabled', True)
                    HandlerUtil.HandlerUtility.add_to_telemetery_data("isPrePostEnabled", "true")
                    appconsistentBackup = True

                if(PluginHostErrorCode != CommonVariables.PrePost_PluginStatus_Success):
                    backup_logger.log('Triggering File System Consistent Backup because of error code' + ExtensionErrorCodeHelper.ExtensionErrorCodeHelper.StatusCodeStringBuilder(PluginHostErrorCode), True)
                    doFsConsistentbackup = True

                preResult = PluginHostResult()
                postResult = PluginHostResult()

                if not doFsConsistentbackup:
                    preResult = PluginHostObj.pre_script()
                    dobackup = preResult.continueBackup

                    if(g_fsfreeze_on == False and preResult.anyScriptFailed):
                        dobackup = False

                if dobackup:
                    freeze_snapshot(thread_timeout)

                if not doFsConsistentbackup:
                    postResult = PluginHostObj.post_script()
                    if not postResult.continueBackup:
                        dobackup = False
                
                    if(g_fsfreeze_on == False and postResult.anyScriptFailed):
                        dobackup = False

                if not dobackup:
                    if run_result == CommonVariables.success and PluginHostErrorCode != CommonVariables.PrePost_PluginStatus_Success:
                        run_status = 'error'
                        run_result = PluginHostErrorCode
                        hutil.SetExtErrorCode(PluginHostErrorCode)
                        error_msg = 'Plugin Host Precheck Failed'
                        error_msg = error_msg + ExtensionErrorCodeHelper.ExtensionErrorCodeHelper.StatusCodeStringBuilder(hutil.ExtErrorCode)
                        backup_logger.log(error_msg, True)

                    if run_result == CommonVariables.success:
                        pre_plugin_errors = preResult.errors
                        for error in pre_plugin_errors:
                            if error.errorCode != CommonVariables.PrePost_PluginStatus_Success:
                                run_status = 'error'
                                run_result = error.errorCode
                                hutil.SetExtErrorCode(error.errorCode)
                                error_msg = 'PreScript failed for the plugin ' +  error.pluginName
                                error_msg = error_msg + ExtensionErrorCodeHelper.ExtensionErrorCodeHelper.StatusCodeStringBuilder(hutil.ExtErrorCode)
                                backup_logger.log(error_msg, True)
                                break

                    if run_result == CommonVariables.success:
                        post_plugin_errors = postResult.errors
                        for error in post_plugin_errors:
                            if error.errorCode != CommonVariables.PrePost_PluginStatus_Success:
                                run_status = 'error'
                                run_result = error.errorCode
                                hutil.SetExtErrorCode(error.errorCode)
                                error_msg = 'PostScript failed for the plugin ' +  error.pluginName
                                error_msg = error_msg + ExtensionErrorCodeHelper.ExtensionErrorCodeHelper.StatusCodeStringBuilder(hutil.ExtErrorCode)
                                backup_logger.log(error_msg, True)
                                break

                if appconsistentBackup:
                    if(PluginHostErrorCode != CommonVariables.PrePost_PluginStatus_Success):
                        hutil.SetExtErrorCode(PluginHostErrorCode)
                    pre_plugin_errors = preResult.errors
                    for error in pre_plugin_errors:
                        if error.errorCode != CommonVariables.PrePost_PluginStatus_Success:
                            hutil.SetExtErrorCode(error.errorCode)
                    post_plugin_errors = postResult.errors
                    for error in post_plugin_errors:
                        if error.errorCode != CommonVariables.PrePost_PluginStatus_Success:
                            hutil.SetExtErrorCode(error.errorCode)

                if run_result == CommonVariables.success and not doFsConsistentbackup and not (preResult.anyScriptFailed or postResult.anyScriptFailed):
                    run_status = 'success'
                    run_result = CommonVariables.success_appconsistent
                    hutil.SetExtErrorCode(ExtensionErrorCodeHelper.ExtensionErrorCodeEnum.success_appconsistent)
                    error_msg = 'Enable Succeeded with App Consistent Snapshot'
                    backup_logger.log(error_msg, True)

        else:
            run_status = 'error'
            run_result = CommonVariables.error_parameter
            hutil.SetExtErrorCode(ExtensionErrorCodeHelper.ExtensionErrorCodeEnum.error_parameter)
            error_msg = 'command is not correct'
            backup_logger.log(error_msg, True, 'Error')
    except Exception as e:
        errMsg = 'Failed to enable the extension with error: %s, stack trace: %s' % (str(e), traceback.format_exc())
        backup_logger.log(errMsg, True, 'Error')
        global_error_result = e

    """
    we do the final report here to get rid of the complex logic to handle the logging when file system be freezed issue.
    """
    try:
        if(global_error_result is not None):
            if(hasattr(global_error_result,'errno') and global_error_result.errno == 2):
                run_result = CommonVariables.error_12
                hutil.SetExtErrorCode(ExtensionErrorCodeHelper.ExtensionErrorCodeEnum.error_12)
            elif(para_parser is None):
                run_result = CommonVariables.error_parameter
                hutil.SetExtErrorCode(ExtensionErrorCodeHelper.ExtensionErrorCodeEnum.error_parameter)
            else:
                run_result = CommonVariables.error
                hutil.SetExtErrorCode(ExtensionErrorCodeHelper.ExtensionErrorCodeEnum.error)
            run_status = 'error'
            error_msg  += ('Enable failed.' + str(global_error_result))
        status_report_msg = None
        HandlerUtil.HandlerUtility.add_to_telemetery_data("extErrorCode", str(ExtensionErrorCodeHelper.ExtensionErrorCodeHelper.ExtensionErrorCodeNameDict[hutil.ExtErrorCode]))
        total_used_size = -1
        blob_report_msg, file_report_msg = get_status_to_report(run_status,run_result,error_msg, snapshot_info_array)
        if(hutil.is_status_file_exists()):
            status_report_to_file(file_report_msg)
        status_report_to_blob(blob_report_msg)
    except Exception as e:
        errMsg = 'Failed to log status in extension'
        backup_logger.log(errMsg, True, 'Error')
    if(para_parser is not None and para_parser.logsBlobUri is not None and para_parser.logsBlobUri != ""):
        backup_logger.commit(para_parser.logsBlobUri)
    else:
        backup_logger.log("the logs blob uri is not there, so do not upload log.")
        backup_logger.commit_to_local()

    sys.exit(0)
예제 #5
0
def daemon():
    global MyPatching, backup_logger, hutil, run_result, run_status, error_msg, freezer, para_parser, snapshot_done, snapshot_info_array, g_fsfreeze_on, total_used_size, patch_class_name, orig_distro, workload_patch
    #this is using the most recent file timestamp.
    hutil.do_parse_context('Executing')

    try:
        backup_logger.log('starting daemon', True)
        backup_logger.log(
            "patch_class_name: " + str(patch_class_name) +
            " and orig_distro: " + str(orig_distro), True)
        # handle the restoring scenario.
        mi = MachineIdentity()
        stored_identity = mi.stored_identity()
        if (stored_identity is None):
            mi.save_identity()
        else:
            current_identity = mi.current_identity()
            if (current_identity != stored_identity):
                current_seq_no = -1
                backup_logger.log(
                    "machine identity not same, set current_seq_no to " +
                    str(current_seq_no) + " " + str(stored_identity) + " " +
                    str(current_identity), True)
                hutil.set_last_seq(current_seq_no)
                mi.save_identity()
    except Exception as e:
        errMsg = 'Failed to validate sequence number with error: %s, stack trace: %s' % (
            str(e), traceback.format_exc())
        backup_logger.log(errMsg, True, 'Error')

    freezer = FsFreezer(patching=MyPatching, logger=backup_logger, hutil=hutil)
    global_error_result = None
    # precheck
    freeze_called = False
    configfile = '/etc/azure/vmbackup.conf'
    thread_timeout = str(60)
    snapshot_type = "appAndFileSystem"

    #Adding python version to the telemetry
    try:
        python_version_info = sys.version_info
        python_version = str(sys.version_info[0]) + '.' + str(
            sys.version_info[1]) + '.' + str(sys.version_info[2])
        HandlerUtil.HandlerUtility.add_to_telemetery_data(
            "pythonVersion", python_version)
    except Exception as e:
        errMsg = 'Failed to do retrieve python version with error: %s, stack trace: %s' % (
            str(e), traceback.format_exc())
        backup_logger.log(errMsg, True, 'Error')

    #fetching platform architecture
    try:
        architecture = platform.architecture()[0]
        HandlerUtil.HandlerUtility.add_to_telemetery_data(
            "platformArchitecture", architecture)
    except Exception as e:
        errMsg = 'Failed to do retrieve "platform architecture" with error: %s, stack trace: %s' % (
            str(e), traceback.format_exc())
        backup_logger.log(errMsg, True, 'Error')

    try:
        if (freezer.mounts is not None):
            hutil.partitioncount = len(freezer.mounts.mounts)
        backup_logger.log(" configfile " + str(configfile), True)
        config = ConfigParsers.ConfigParser()
        config.read(configfile)
        if config.has_option('SnapshotThread', 'timeout'):
            thread_timeout = config.get('SnapshotThread', 'timeout')
        if config.has_option('SnapshotThread', 'snapshot_type'):
            snapshot_type = config.get(
                'SnapshotThread',
                'snapshot_type')  #values - appOnly, appAndFileSystem
    except Exception as e:
        errMsg = 'cannot read config file or file not present'
        backup_logger.log(errMsg, True, 'Warning')
    backup_logger.log("final thread timeout" + thread_timeout, True)

    snapshot_info_array = None

    try:
        # we need to freeze the file system first
        backup_logger.log('starting daemon', True)
        """
        protectedSettings is the privateConfig passed from Powershell.
        WATCHOUT that, the _context_config are using the most freshest timestamp.
        if the time sync is alive, this should be right.
        """
        protected_settings = hutil._context._config['runtimeSettings'][0][
            'handlerSettings'].get('protectedSettings', {})
        public_settings = hutil._context._config['runtimeSettings'][0][
            'handlerSettings'].get('publicSettings')
        para_parser = ParameterParser(protected_settings, public_settings,
                                      backup_logger)
        hutil.update_settings_file()

        if (bool(public_settings) == False and not protected_settings):
            error_msg = "unable to load certificate"
            hutil.SetExtErrorCode(
                ExtensionErrorCodeHelper.ExtensionErrorCodeEnum.
                FailedHandlerGuestAgentCertificateNotFound)
            temp_result = CommonVariables.FailedHandlerGuestAgentCertificateNotFound
            temp_status = 'error'
            exit_with_commit_log(temp_status, temp_result, error_msg,
                                 para_parser)

        if (para_parser.commandStartTimeUTCTicks is not None
                and para_parser.commandStartTimeUTCTicks != ""):
            utcTicksLong = int(para_parser.commandStartTimeUTCTicks)
            backup_logger.log('utcTicks in long format' + str(utcTicksLong),
                              True)
            commandStartTime = convert_time(utcTicksLong)
            utcNow = datetime.datetime.utcnow()
            backup_logger.log(
                'command start time is ' + str(commandStartTime) +
                " and utcNow is " + str(utcNow), True)
            timespan = utcNow - commandStartTime
            MAX_TIMESPAN = 140 * 60  # in seconds
            # handle the machine identity for the restoration scenario.
            total_span_in_seconds = timedelta_total_seconds(timespan)
            backup_logger.log('timespan is ' + str(timespan) + ' ' +
                              str(total_span_in_seconds))

            if total_span_in_seconds > MAX_TIMESPAN:
                error_msg = "CRP timeout limit has reached, will not take snapshot."
                errMsg = error_msg
                hutil.SetExtErrorCode(
                    ExtensionErrorCodeHelper.ExtensionErrorCodeEnum.
                    FailedGuestAgentInvokedCommandTooLate)
                temp_result = CommonVariables.FailedGuestAgentInvokedCommandTooLate
                temp_status = 'error'
                exit_with_commit_log(temp_status, temp_result, error_msg,
                                     para_parser)

        if (para_parser.taskId is not None and para_parser.taskId != ""):
            backup_logger.log('taskId: ' + str(para_parser.taskId), True)
            exit_if_same_taskId(para_parser.taskId)
            taskIdentity = TaskIdentity()
            taskIdentity.save_identity(para_parser.taskId)

        hutil.save_seq()

        commandToExecute = para_parser.commandToExecute
        #validate all the required parameter here
        backup_logger.log(commandToExecute, True)
        if (CommonVariables.iaas_install_command in commandToExecute.lower()):
            backup_logger.log('install succeed.', True)
            run_status = 'success'
            error_msg = 'Install Succeeded'
            run_result = CommonVariables.success
            backup_logger.log(error_msg)
        elif (CommonVariables.iaas_vmbackup_command
              in commandToExecute.lower()):
            if (para_parser.backup_metadata is None
                    or para_parser.public_config_obj is None):
                run_result = CommonVariables.error_parameter
                hutil.SetExtErrorCode(ExtensionErrorCodeHelper.
                                      ExtensionErrorCodeEnum.error_parameter)
                run_status = 'error'
                error_msg = 'required field empty or not correct'
                backup_logger.log(error_msg, True, 'Error')
            else:
                backup_logger.log('commandToExecute is ' + commandToExecute,
                                  True)
                """
                make sure the log is not doing when the file system is freezed.
                """
                temp_status = 'success'
                temp_result = CommonVariables.ExtensionTempTerminalState
                temp_msg = 'Transitioning state in extension'
                blob_report_msg, file_report_msg = get_status_to_report(
                    temp_status, temp_result, temp_msg, None)
                status_report_to_file(file_report_msg)
                status_report_to_blob(blob_report_msg)
                #partial logging before freeze
                if (para_parser is not None
                        and para_parser.logsBlobUri is not None
                        and para_parser.logsBlobUri != ""):
                    backup_logger.commit_to_blob(para_parser.logsBlobUri)
                else:
                    backup_logger.log(
                        "the logs blob uri is not there, so do not upload log."
                    )
                backup_logger.log('commandToExecute is ' + commandToExecute,
                                  True)

                workload_patch = WorkloadPatch.WorkloadPatch(backup_logger)
                #new flow only if workload name is present in workload.conf
                if workload_patch.name != None:
                    backup_logger.log(
                        "workload backup enabled for workload: " +
                        workload_patch.name, True)
                    pre_skipped = False
                    if len(workload_patch.error_details) > 0:
                        backup_logger.log("skip pre and post")
                        pre_skipped = True
                    else:
                        workload_patch.pre()
                    if len(workload_patch.error_details) > 0:
                        backup_logger.log("file system consistent backup only")
                    #todo error handling
                    if snapshot_type == "appOnly":
                        g_fsfreeze_on = False
                    else:
                        g_fsfreeze_on = True
                    freeze_snapshot(thread_timeout)
                    if pre_skipped == False:
                        workload_patch.post()
                    workload_error = workload_patch.populateErrors()
                    if workload_error != None and g_fsfreeze_on == False:
                        run_status = 'error'
                        run_result = workload_error.errorCode
                        hutil.SetExtErrorCode(workload_error.errorCode)
                        error_msg = 'Workload Patch failed with error message: ' + workload_error.errorMsg
                        error_msg = error_msg + ExtensionErrorCodeHelper.ExtensionErrorCodeHelper.StatusCodeStringBuilder(
                            hutil.ExtErrorCode)
                        backup_logger.log(error_msg, True)
                    elif workload_error != None and g_fsfreeze_on == True:
                        hutil.SetExtErrorCode(workload_error.errorCode)
                        error_msg = 'Workload Patch failed with warning message: ' + workload_error.errorMsg
                        error_msg = error_msg + ExtensionErrorCodeHelper.ExtensionErrorCodeHelper.StatusCodeStringBuilder(
                            hutil.ExtErrorCode)
                        backup_logger.log(error_msg, True)
                    else:
                        if (run_status == CommonVariables.status_success):
                            run_status = 'success'
                            run_result = CommonVariables.success_appconsistent
                            hutil.SetExtErrorCode(
                                ExtensionErrorCodeHelper.
                                ExtensionErrorCodeEnum.success_appconsistent)
                            error_msg = 'Enable Succeeded with App Consistent Snapshot'
                            backup_logger.log(error_msg, True)
                        else:
                            error_msg = 'Enable failed in fsfreeze snapshot flow'
                            backup_logger.log(error_msg, True)
                else:
                    PluginHostObj = PluginHost(logger=backup_logger)
                    PluginHostErrorCode, dobackup, g_fsfreeze_on = PluginHostObj.pre_check(
                    )
                    doFsConsistentbackup = False
                    appconsistentBackup = False

                    if not (PluginHostErrorCode == CommonVariables.
                            FailedPrepostPluginhostConfigParsing
                            or PluginHostErrorCode
                            == CommonVariables.FailedPrepostPluginConfigParsing
                            or PluginHostErrorCode == CommonVariables.
                            FailedPrepostPluginhostConfigNotFound
                            or PluginHostErrorCode == CommonVariables.
                            FailedPrepostPluginhostConfigPermissionError
                            or PluginHostErrorCode ==
                            CommonVariables.FailedPrepostPluginConfigNotFound):
                        backup_logger.log(
                            'App Consistent Consistent Backup Enabled', True)
                        HandlerUtil.HandlerUtility.add_to_telemetery_data(
                            "isPrePostEnabled", "true")
                        appconsistentBackup = True

                    if (PluginHostErrorCode !=
                            CommonVariables.PrePost_PluginStatus_Success):
                        backup_logger.log(
                            'Triggering File System Consistent Backup because of error code'
                            +
                            ExtensionErrorCodeHelper.ExtensionErrorCodeHelper.
                            StatusCodeStringBuilder(PluginHostErrorCode), True)
                        doFsConsistentbackup = True

                    preResult = PluginHostResult()
                    postResult = PluginHostResult()

                    if not doFsConsistentbackup:
                        preResult = PluginHostObj.pre_script()
                        dobackup = preResult.continueBackup

                        if (g_fsfreeze_on == False
                                and preResult.anyScriptFailed):
                            dobackup = False

                    if dobackup:
                        freeze_snapshot(thread_timeout)

                    if not doFsConsistentbackup:
                        postResult = PluginHostObj.post_script()
                        if not postResult.continueBackup:
                            dobackup = False

                        if (g_fsfreeze_on == False
                                and postResult.anyScriptFailed):
                            dobackup = False

                    if not dobackup:
                        if run_result == CommonVariables.success and PluginHostErrorCode != CommonVariables.PrePost_PluginStatus_Success:
                            run_status = 'error'
                            run_result = PluginHostErrorCode
                            hutil.SetExtErrorCode(PluginHostErrorCode)
                            error_msg = 'Plugin Host Precheck Failed'
                            error_msg = error_msg + ExtensionErrorCodeHelper.ExtensionErrorCodeHelper.StatusCodeStringBuilder(
                                hutil.ExtErrorCode)
                            backup_logger.log(error_msg, True)

                        if run_result == CommonVariables.success:
                            pre_plugin_errors = preResult.errors
                            for error in pre_plugin_errors:
                                if error.errorCode != CommonVariables.PrePost_PluginStatus_Success:
                                    run_status = 'error'
                                    run_result = error.errorCode
                                    hutil.SetExtErrorCode(error.errorCode)
                                    error_msg = 'PreScript failed for the plugin ' + error.pluginName
                                    error_msg = error_msg + ExtensionErrorCodeHelper.ExtensionErrorCodeHelper.StatusCodeStringBuilder(
                                        hutil.ExtErrorCode)
                                    backup_logger.log(error_msg, True)
                                    break

                        if run_result == CommonVariables.success:
                            post_plugin_errors = postResult.errors
                            for error in post_plugin_errors:
                                if error.errorCode != CommonVariables.PrePost_PluginStatus_Success:
                                    run_status = 'error'
                                    run_result = error.errorCode
                                    hutil.SetExtErrorCode(error.errorCode)
                                    error_msg = 'PostScript failed for the plugin ' + error.pluginName
                                    error_msg = error_msg + ExtensionErrorCodeHelper.ExtensionErrorCodeHelper.StatusCodeStringBuilder(
                                        hutil.ExtErrorCode)
                                    backup_logger.log(error_msg, True)
                                    break

                    if appconsistentBackup:
                        if (PluginHostErrorCode !=
                                CommonVariables.PrePost_PluginStatus_Success):
                            hutil.SetExtErrorCode(PluginHostErrorCode)
                        pre_plugin_errors = preResult.errors
                        for error in pre_plugin_errors:
                            if error.errorCode != CommonVariables.PrePost_PluginStatus_Success:
                                hutil.SetExtErrorCode(error.errorCode)
                        post_plugin_errors = postResult.errors
                        for error in post_plugin_errors:
                            if error.errorCode != CommonVariables.PrePost_PluginStatus_Success:
                                hutil.SetExtErrorCode(error.errorCode)

                    if run_result == CommonVariables.success and not doFsConsistentbackup and not (
                            preResult.anyScriptFailed
                            or postResult.anyScriptFailed):
                        run_status = 'success'
                        run_result = CommonVariables.success_appconsistent
                        hutil.SetExtErrorCode(
                            ExtensionErrorCodeHelper.ExtensionErrorCodeEnum.
                            success_appconsistent)
                        error_msg = 'Enable Succeeded with App Consistent Snapshot'
                        backup_logger.log(error_msg, True)

        else:
            run_status = 'error'
            run_result = CommonVariables.error_parameter
            hutil.SetExtErrorCode(ExtensionErrorCodeHelper.
                                  ExtensionErrorCodeEnum.error_parameter)
            error_msg = 'command is not correct'
            backup_logger.log(error_msg, True, 'Error')
    except Exception as e:
        hutil.update_settings_file()
        errMsg = 'Failed to enable the extension with error: %s, stack trace: %s' % (
            str(e), traceback.format_exc())
        backup_logger.log(errMsg, True, 'Error')
        global_error_result = e
    """
    we do the final report here to get rid of the complex logic to handle the logging when file system be freezed issue.
    """
    try:
        if (global_error_result is not None):
            if (hasattr(global_error_result, 'errno')
                    and global_error_result.errno == 2):
                run_result = CommonVariables.error_12
                hutil.SetExtErrorCode(
                    ExtensionErrorCodeHelper.ExtensionErrorCodeEnum.error_12)
            elif (para_parser is None):
                run_result = CommonVariables.error_parameter
                hutil.SetExtErrorCode(ExtensionErrorCodeHelper.
                                      ExtensionErrorCodeEnum.error_parameter)
            else:
                run_result = CommonVariables.error
                hutil.SetExtErrorCode(
                    ExtensionErrorCodeHelper.ExtensionErrorCodeEnum.error)
            run_status = 'error'
            error_msg += ('Enable failed.' + str(global_error_result))
        status_report_msg = None
        hutil.SetExtErrorCode(
            run_result
        )  #setting extension errorcode at the end if missed somewhere
        HandlerUtil.HandlerUtility.add_to_telemetery_data(
            "extErrorCode",
            str(ExtensionErrorCodeHelper.ExtensionErrorCodeHelper.
                ExtensionErrorCodeNameDict[hutil.ExtErrorCode]))
        total_used_size = -1
        blob_report_msg, file_report_msg = get_status_to_report(
            run_status, run_result, error_msg, snapshot_info_array)
        if (hutil.is_status_file_exists()):
            status_report_to_file(file_report_msg)
        status_report_to_blob(blob_report_msg)
    except Exception as e:
        errMsg = 'Failed to log status in extension'
        backup_logger.log(errMsg, True, 'Error')
    if (para_parser is not None and para_parser.logsBlobUri is not None
            and para_parser.logsBlobUri != ""):
        backup_logger.commit(para_parser.logsBlobUri)
    else:
        backup_logger.log(
            "the logs blob uri is not there, so do not upload log.")
        backup_logger.commit_to_local()

    sys.exit(0)
예제 #6
0
def daemon():
    global MyPatching,backup_logger,hutil,run_result,run_status,error_msg,freezer,para_parser,snapshot_done
    #this is using the most recent file timestamp.
    hutil.do_parse_context('Executing')
    freezer = FsFreezer(patching= MyPatching, logger = backup_logger)
    global_error_result = None
    # precheck
    freeze_called = False
    configfile='/etc/azure/vmbackup.conf'
    thread_timeout=str(60)
    safe_freeze_on = True
    try:
        config = ConfigParser.ConfigParser()
        config.read(configfile)
        thread_timeout= config.get('SnapshotThread','timeout')
        safe_freeze_on=config.get('SnapshotThread','safefreeze')
    except Exception as e:
        errMsg='cannot read config file or file not present'
        backup_logger.log(errMsg, False, 'Warning')
    backup_logger.log("final thread timeout" + thread_timeout, True)
    backup_logger.log(" safe freeze flag " + str(safe_freeze_on), True)

    try:
        # we need to freeze the file system first
        backup_logger.log('starting daemon', True)
        """
        protectedSettings is the privateConfig passed from Powershell.
        WATCHOUT that, the _context_config are using the most freshest timestamp.
        if the time sync is alive, this should be right.
        """

        protected_settings = hutil._context._config['runtimeSettings'][0]['handlerSettings'].get('protectedSettings')
        public_settings = hutil._context._config['runtimeSettings'][0]['handlerSettings'].get('publicSettings')
        para_parser = ParameterParser(protected_settings, public_settings)

        commandToExecute = para_parser.commandToExecute
        #validate all the required parameter here
        if(commandToExecute.lower() == CommonVariables.iaas_install_command):
            backup_logger.log('install succeed.',True)
            run_status = 'success'
            error_msg = 'Install Succeeded'
            run_result = CommonVariables.success
            backup_logger.log(error_msg)
        elif(commandToExecute.lower() == CommonVariables.iaas_vmbackup_command):
            if(para_parser.backup_metadata is None or para_parser.public_config_obj is None or para_parser.private_config_obj is None):
                run_result = CommonVariables.error_parameter
                run_status = 'error'
                error_msg = 'required field empty or not correct'
                backup_logger.log(error_msg, False, 'Error')
            else:
                backup_logger.log('commandToExecute is ' + commandToExecute, True)
                """
                make sure the log is not doing when the file system is freezed.
                """
                if para_parser.vmType == CommonVariables.VmTypeV1 :
                    temp_status= 'success'
                    temp_result=CommonVariables.ExtensionTempTerminalState
                else :
                    temp_status= 'transitioning'
                    temp_result=CommonVariables.success
                temp_msg='Transitioning state in extension'
                status_report(temp_status,temp_result,temp_msg)
                hutil.do_status_report('Enable', temp_status, str(temp_result), temp_msg)
                backup_logger.log('doing freeze now...', True)
                if(safe_freeze_on==True):
                    freeze_snapshot(thread_timeout)
                else:
                    snapshot_thread = Thread(target = snapshot)
                    start_time=datetime.datetime.utcnow()
                    snapshot_thread.start()
                    snapshot_thread.join(float(thread_timeout))
                    if not snapshot_done:
                        run_result = CommonVariables.error
                        run_status = 'error'
                        error_msg = 'T:W Snapshot timeout'
                        backup_logger.log(error_msg, False, 'Warning')
                    end_time=datetime.datetime.utcnow()
                    time_taken=end_time-start_time
                    backup_logger.log('total time taken..' + str(time_taken))
                    for i in range(0,3):
                        unfreeze_result = freezer.unfreezeall()
                        backup_logger.log('unfreeze result ' + str(unfreeze_result))
                        if(unfreeze_result is not None):
                            if len(unfreeze_result.errors) > 0:
                                error_msg += ('unfreeze with error: ' + str(unfreeze_result.errors))
                                backup_logger.log(error_msg, False, 'Warning')
                            else:
                                backup_logger.log('unfreeze result is None')
                                break;
                    backup_logger.log('unfreeze ends...')
                
        else:
            run_status = 'error'
            run_result = CommonVariables.error_parameter
            error_msg = 'command is not correct'
            backup_logger.log(error_msg, False, 'Error')
    except Exception as e:
        errMsg = 'Failed to enable the extension with error: %s, stack trace: %s' % (str(e), traceback.format_exc())
        backup_logger.log(errMsg, False, 'Error')
        global_error_result = e

    """
    we do the final report here to get rid of the complex logic to handle the logging when file system be freezed issue.
    """
    if(global_error_result is not None):
        if(hasattr(global_error_result,'errno') and global_error_result.errno == 2):
            run_result = CommonVariables.error_12
        elif(para_parser is None):
            run_result = CommonVariables.error_parameter
        else:
            run_result = CommonVariables.error
        run_status = 'error'
        error_msg  += ('Enable failed.' + str(global_error_result))
    status_report_msg = None
    status_report(run_status,run_result,error_msg)
    if(para_parser is not None and para_parser.logsBlobUri is not None and para_parser.logsBlobUri != ""):
        backup_logger.commit(para_parser.logsBlobUri)
    else:
        backup_logger.log("the logs blob uri is not there, so do not upload log.")
        backup_logger.commit_to_local()

    hutil.do_exit(0, 'Enable', run_status, str(run_result), error_msg)
def enable():
    #this is using the most recent file timestamp.
    hutil.do_parse_context('Enable')

    freezer = FsFreezer(backup_logger)
    unfreeze_result = None
    snapshot_result = None
    freeze_result = None
    global_error_result = None
    para_parser = None
    run_result = 1
    error_msg = ''
    run_status = None
    # precheck
    freeze_called = False
    try:
        # we need to freeze the file system first
        backup_logger.log('starting to enable', True)

        # handle the restoring scenario.
        mi = MachineIdentity()
        stored_identity = mi.stored_identity()
        if (stored_identity is None):
            mi.save_identity()
        else:
            current_identity = mi.current_identity()
            if (current_identity != stored_identity):
                current_seq_no = -1
                backup_logger.log(
                    "machine identity not same, set current_seq_no to " +
                    str(current_seq_no) + " " + str(stored_identity) + " " +
                    str(current_identity), True)
                hutil.set_inused_config_seq(current_seq_no)
                mi.save_identity()

        hutil.save_seq()
        """
        protectedSettings is the privateConfig passed from Powershell.
        WATCHOUT that, the _context_config are using the most freshest timestamp.
        if the time sync is alive, this should be right.
        """
        protected_settings = hutil._context._config['runtimeSettings'][0][
            'handlerSettings'].get('protectedSettings')
        public_settings = hutil._context._config['runtimeSettings'][0][
            'handlerSettings'].get('publicSettings')
        para_parser = ParameterParser(protected_settings, public_settings)

        if (para_parser.commandStartTimeUTCTicks is not None
                and para_parser.commandStartTimeUTCTicks != ""):
            utcTicksLong = long(para_parser.commandStartTimeUTCTicks)
            commandStartTime = convert_time(utcTicksLong)
            utcNow = datetime.datetime.utcnow()
            backup_logger.log('command start time is ' +
                              str(commandStartTime) + " and utcNow is " +
                              str(utcNow))
            timespan = utcNow - commandStartTime
            THIRTY_MINUTES = 30 * 60  # in seconds
            # handle the machine identity for the restoration scenario.
            total_span_in_seconds = timespan.days * 24 * 60 * 60 + timespan.seconds
            backup_logger.log('timespan is ' + str(timespan) + ' ' +
                              str(total_span_in_seconds))
            if (abs(total_span_in_seconds) > THIRTY_MINUTES):
                error_msg = 'the call time stamp is out of date. so skip it.'
                exit_with_commit_log(error_msg, para_parser)

        if (para_parser.taskId is not None and para_parser.taskId != ""):
            taskIdentity = TaskIdentity()
            taskIdentity.save_identity(para_parser.taskId)
        commandToExecute = para_parser.commandToExecute
        #validate all the required parameter here
        if (commandToExecute.lower() == CommonVariables.iaas_install_command):
            backup_logger.log('install succeed.', True)
            run_status = 'success'
            error_msg = 'Install Succeeded'
            run_result = CommonVariables.success
            backup_logger.log(error_msg)
        elif (commandToExecute.lower() == CommonVariables.iaas_vmbackup_command
              ):
            if (para_parser.backup_metadata is None
                    or para_parser.public_config_obj is None
                    or para_parser.private_config_obj is None):
                run_result = CommonVariables.error_parameter
                run_status = 'error'
                error_msg = 'required field empty or not correct'
                backup_logger.log(error_msg, False, 'Error')
            else:
                backup_logger.log('commandToExecute is ' + commandToExecute,
                                  True)
                """
                make sure the log is not doing when the file system is freezed.
                """
                backup_logger.log('doing freeze now...', True)
                freeze_called = True
                freeze_result = freezer.freezeall()
                backup_logger.log('freeze result ' + str(freeze_result))

                # check whether we freeze succeed first?
                if (freeze_result is not None
                        and len(freeze_result.errors) > 0):
                    run_result = CommonVariables.error
                    run_status = 'error'
                    error_msg = 'Enable failed with error: ' + str(
                        freeze_result)
                    backup_logger.log(error_msg, False, 'Warning')
                else:
                    backup_logger.log('doing snapshot now...')
                    snap_shotter = Snapshotter(backup_logger)
                    snapshot_result = snap_shotter.snapshotall(para_parser)
                    backup_logger.log('snapshotall ends...')
                    if (snapshot_result is not None
                            and len(snapshot_result.errors) > 0):
                        error_msg = 'snapshot result: ' + str(snapshot_result)
                        run_result = CommonVariables.error
                        run_status = 'error'
                        backup_logger.log(error_msg, False, 'Error')
                    else:
                        run_result = CommonVariables.success
                        run_status = 'success'
                        error_msg = 'Enable Succeeded'
                        backup_logger.log(error_msg)
        else:
            run_status = 'error'
            run_result = CommonVariables.error_parameter
            error_msg = 'command is not correct'
            backup_logger.log(error_msg, False, 'Error')
    except Exception as e:
        errMsg = 'Failed to enable the extension with error: %s, stack trace: %s' % (
            str(e), traceback.format_exc())
        backup_logger.log(errMsg, False, 'Error')
        global_error_result = e
    finally:
        backup_logger.log('doing unfreeze now...')
        if (freeze_called):
            unfreeze_result = freezer.unfreezeall()
            backup_logger.log('unfreeze result ' + str(unfreeze_result))
            if (unfreeze_result is not None
                    and len(unfreeze_result.errors) > 0):
                error_msg += ('Enable Succeeded with error: ' +
                              str(unfreeze_result.errors))
                backup_logger.log(error_msg, False, 'Warning')
            backup_logger.log('unfreeze ends...')

    if (para_parser is not None and para_parser.logsBlobUri is not None):
        backup_logger.commit(para_parser.logsBlobUri)
    else:
        backup_logger.commit_to_local()
    """
    we do the final report here to get rid of the complex logic to handle the logging when file system be freezed issue.
    """
    if (global_error_result is not None):
        if (hasattr(global_error_result, 'errno')
                and global_error_result.errno == 2):
            run_result = CommonVariables.error_12
        elif (para_parser is None):
            run_result = CommonVariables.error_parameter
        else:
            run_result = CommonVariables.error
        run_status = 'error'
        error_msg += ('Enable failed.' + str(global_error_result))

    if (para_parser is not None and para_parser.statusBlobUri is not None):
        do_backup_status_report(operation='Enable',status = run_status,\
                                status_code=str(run_result), \
                                message=error_msg,\
                                taskId=para_parser.taskId,\
                                commandStartTimeUTCTicks=para_parser.commandStartTimeUTCTicks,\
                                blobUri=para_parser.statusBlobUri)

    hutil.do_exit(0, 'Enable', run_status, str(run_result), error_msg)
예제 #8
0
def enable():
    freezer = FsFreezer(backup_logger)
    unfreeze_result = None
    snapshot_result = None
    freeze_result = None
    global_error_result = None
    para_parser = None
    run_result = 1
    error_msg = ''
    run_status = None
    # precheck
    freeze_called = False
    try:
        hutil.do_parse_context('Enable')

        # handle the restoring scenario.
        mi = MachineIdentity()
        stored_identity = mi.stored_identity()
        if (stored_identity is None):
            mi.save_identity()
            hutil.exit_if_enabled()
        else:
            current_identity = mi.current_identity()
            hutil.log(" current identity " + current_identity)
            if (current_identity != stored_identity):
                current_seq_no = hutil._get_current_seq_no(
                    hutil._context._config_dir)
                backup_logger.log(
                    "machine identity not same, set current_seq_no to " +
                    str(current_seq_no) + " " + str(stored_identity) + " " +
                    str(current_identity), True)
                #remove other .config files.  or the waagent would report the 3
                #status...
                for subdir, dirs, files in os.walk(hutil._context._config_dir):
                    for file in files:
                        try:
                            cur_seq_no = int(
                                os.path.basename(file).split('.')[0])
                            if (cur_seq_no != current_seq_no):
                                os.remove(
                                    join(hutil._context._config_dir, file))
                        except ValueError:
                            continue
                hutil.set_inused_config_seq(current_seq_no)
                mi.save_identity()
            else:
                hutil.exit_if_enabled()

        # we need to freeze the file system first
        backup_logger.log('starting to enable', True)
        """
        protectedSettings is the privateConfig passed from Powershell.
        """
        protected_settings = hutil._context._config['runtimeSettings'][0][
            'handlerSettings'].get('protectedSettings')
        public_settings = hutil._context._config['runtimeSettings'][0][
            'handlerSettings'].get('publicSettings')
        para_parser = ParameterParser(protected_settings, public_settings)

        commandToExecute = para_parser.commandToExecute
        #validate all the required parameter here
        if (commandToExecute.lower() == CommonVariables.iaas_install_command):
            backup_logger.log("install succeed.", True)
            run_status = 'success'
            error_msg = 'Install Succeeded'
            run_result = 0
            backup_logger.log(error_msg)
        elif (commandToExecute.lower() == CommonVariables.iaas_vmbackup_command
              ):
            if (para_parser.backup_metadata is None
                    or para_parser.public_config_obj is None
                    or para_parser.private_config_obj is None):
                run_result = 11
                run_status = 'error'
                error_msg = 'required field empty or not correct'
                backup_logger.log(error_msg, False, 'Error')
            else:
                backup_logger.log('commandToExecute is ' + commandToExecute,
                                  True)
                """
                make sure the log is not doing when the file system is freezed.
                """
                backup_logger.log("doing freeze now...", True)
                freeze_called = True
                freeze_result = freezer.freezeall()
                backup_logger.log("freeze result " + str(freeze_result))

                # check whether we freeze succeed first?
                if (freeze_result is not None
                        and len(freeze_result.errors) > 0):
                    run_result = 2
                    run_status = 'error'
                    error_msg = 'Enable failed with error' + str(freeze_result)
                    backup_logger.log(error_msg, False, 'Warning')
                else:
                    backup_logger.log("doing snapshot now...")
                    snap_shotter = Snapshotter(backup_logger)
                    snapshot_result = snap_shotter.snapshotall(para_parser)
                    backup_logger.log("snapshotall ends...")
                    if (snapshot_result is not None
                            and len(snapshot_result.errors) > 0):
                        error_msg = "snapshot result: " + str(snapshot_result)
                        run_result = 2
                        run_status = 'error'
                        backup_logger.log(error_msg, False, 'Error')
                    else:
                        run_result = 1
                        run_status = 'success'
                        error_msg = 'Enable Succeeded'
                        backup_logger.log(error_msg)
        else:
            run_status = 'error'
            run_result = 11
            error_msg = 'command is not correct'
            backup_logger.log(error_msg, False, 'Error')
    except Exception as e:
        errMsg = "Failed to enable the extension with error: %s, stack trace: %s" % (
            str(e), traceback.format_exc())
        backup_logger.log(errMsg, False, 'Error')
        print(errMsg)
        global_error_result = e
    finally:
        backup_logger.log("doing unfreeze now...")
        if (freeze_called):
            unfreeze_result = freezer.unfreezeall()
            backup_logger.log("unfreeze result " + str(unfreeze_result))
            error_msg += ('Enable Succeeded with error: ' +
                          str(unfreeze_result.errors))
            if (unfreeze_result is not None
                    and len(unfreeze_result.errors) > 0):
                backup_logger.log(error_msg, False, 'Warning')
            backup_logger.log("unfreeze ends...")

    if (para_parser is not None):
        backup_logger.commit(para_parser.logsBlobUri)
    """
    we do the final report here to get rid of the complex logic to handle the logging when file system be freezed issue.
    """
    if (global_error_result is not None):
        if (hasattr(global_error_result, 'errno')
                and global_error_result.errno == 2):
            run_result = 12
        elif (para_parser is None):
            run_result = 11
        else:
            run_result = 2
        run_status = 'error'
        error_msg += ('Enable failed.' + str(global_error_result))

    hutil.do_exit(0, 'Enable', run_status, str(run_result), error_msg)
예제 #9
0
def enable():
    freezer = FsFreezer(backup_logger)
    unfreeze_result = None
    snapshot_result = None
    freeze_result = None
    global_error_result = None
    para_parser = None
    run_result = 1
    error_msg = ''
    run_status = None
    # precheck
    freeze_called = False
    try:
        hutil.do_parse_context('Enable')

        # we need to freeze the file system first
        backup_logger.log('starting to enable', True)

        """
        protectedSettings is the privateConfig passed from Powershell.
        WATCHOUT that, the _context_config are using the most freshest timestamp.
        if the time sync is alive, this should be right.
        """
        protected_settings = hutil._context._config['runtimeSettings'][0]['handlerSettings'].get('protectedSettings')
        public_settings = hutil._context._config['runtimeSettings'][0]['handlerSettings'].get('publicSettings')
        para_parser = ParameterParser(protected_settings, public_settings)
        commandStartTime = datetime.datetime(1, 1, 1) + datetime.timedelta(microseconds = para_parser.commandStartTimeUTCTicks / 10)
        
        utcNow = datetime.datetime.utcnow()
        backup_logger.log('command start time is ' + str(commandStartTime) + " and utcNow is " + str(utcNow))
        timespan = utcNow - commandStartTime
        TWENTY_MINUTES = 20 * 60
        taskIdentity = TaskIdentity()
        currentTaskIdentity = taskIdentity.stored_identity()
        # handle the machine identity for the restoration scenario.
        backup_logger.log('timespan is '+str(timespan))
        if(abs(timespan.total_seconds()) > TWENTY_MINUTES):
            error_msg = 'the call time stamp is out of date.'
            exit_with_commit_log(error_msg,para_parser)

        elif(para_parser.taskId == currentTaskIdentity):
            error_msg = 'the task id is handled.'
            exit_with_commit_log(error_msg,para_parser)
        else:
            taskIdentity.save_identity(para_parser.taskId)
            commandToExecute = para_parser.commandToExecute
            #validate all the required parameter here
            if(commandToExecute.lower() == CommonVariables.iaas_install_command):
                backup_logger.log("install succeed.",True)
                run_status = 'success'
                error_msg = 'Install Succeeded'
                run_result = CommonVariables.success
                backup_logger.log(error_msg)
            elif(commandToExecute.lower() == CommonVariables.iaas_vmbackup_command):
                if(para_parser.backup_metadata is None or para_parser.public_config_obj is None or para_parser.private_config_obj is None):
                    run_result = CommonVariables.parameter_error
                    run_status = 'error'
                    error_msg = 'required field empty or not correct'
                    backup_logger.log(error_msg, False, 'Error')
                else:
                    backup_logger.log('commandToExecute is ' + commandToExecute, True)
                    """
                    make sure the log is not doing when the file system is freezed.
                    """
                    backup_logger.log("doing freeze now...", True)
                    freeze_called = True
                    freeze_result = freezer.freezeall()
                    backup_logger.log("freeze result " + str(freeze_result))
                    
                    # check whether we freeze succeed first?
                    if(freeze_result is not None and len(freeze_result.errors) > 0):
                        run_result = CommonVariables.error
                        run_status = 'error'
                        error_msg = 'Enable failed with error' + str(freeze_result)
                        backup_logger.log(error_msg, False, 'Warning')
                    else:
                        backup_logger.log("doing snapshot now...")
                        snap_shotter = Snapshotter(backup_logger)
                        snapshot_result = snap_shotter.snapshotall(para_parser)
                        backup_logger.log("snapshotall ends...")
                        if(snapshot_result is not None and len(snapshot_result.errors) > 0):
                            error_msg = "snapshot result: " + str(snapshot_result)
                            run_result = CommonVariables.error
                            run_status = 'error'
                            backup_logger.log(error_msg, False, 'Error')
                        else:
                            run_result = CommonVariables.success
                            run_status = 'success'
                            error_msg = 'Enable Succeeded'
                            backup_logger.log(error_msg)
            else:
                run_status = 'error'
                run_result = CommonVariables.parameter_error
                error_msg = 'command is not correct'
                backup_logger.log(error_msg, False, 'Error')
    except Exception as e:
        errMsg = "Failed to enable the extension with error: %s, stack trace: %s" % (str(e), traceback.format_exc())
        backup_logger.log(errMsg, False, 'Error')
        global_error_result = e
    finally:
        backup_logger.log("doing unfreeze now...")
        if(freeze_called):
            unfreeze_result = freezer.unfreezeall()
            backup_logger.log("unfreeze result " + str(unfreeze_result))
            error_msg += ('Enable Succeeded with error: ' + str(unfreeze_result.errors))
            if(unfreeze_result is not None and len(unfreeze_result.errors) > 0):
                backup_logger.log(error_msg, False, 'Warning')
            backup_logger.log("unfreeze ends...")

    if(para_parser is not None):
        backup_logger.commit(para_parser.logsBlobUri)
    """
    we do the final report here to get rid of the complex logic to handle the logging when file system be freezed issue.
    """
    if(global_error_result is not None):
        if(hasattr(global_error_result,'errno') and global_error_result.errno == 2):
            run_result = CommonVariables.error_12
        elif(para_parser is None):
            run_result = CommonVariables.parameter_error
        else:
            run_result = CommonVariables.error
        run_status = 'error'
        error_msg  += ('Enable failed.' + str(global_error_result))
    print("para_parser is "+str(para_parser))

    do_status_report(operation='Enable',status = run_status,status_code=str(run_result),message=error_msg,taskId=para_parser.taskId,commandStartTimeUTCTicks=para_parser.commandStartTimeUTCTicks,blobUri=para_parser.statusBlobUri)

    hutil.do_exit(0, 'Enable', run_status, str(run_result), error_msg)
예제 #10
0
def enable():
    freezer = FsFreezer(backup_logger)
    unfreeze_result     = None
    snapshot_result     = None
    freeze_result       = None
    global_error_result = None
    para_parser         = None
    run_result          = 1
    error_msg           = ''
    run_status          = None
    # precheck 
    freeze_called       = False
    try:
        hutil.do_parse_context('Enable')

        # handle the restoring scenario.
        mi = MachineIdentity()
        stored_identity = mi.stored_identity()
        if(stored_identity is None):
            mi.save_identity()
            hutil.exit_if_enabled()
        else:
            current_identity = mi.current_identity()
            hutil.log(" current identity " + current_identity)
            if(current_identity != stored_identity):
                current_seq_no = hutil._get_current_seq_no(hutil._context._config_dir)
                backup_logger.log("machine identity not same, set current_seq_no to " + str(current_seq_no) + " " + str(stored_identity) + " " + str(current_identity), True)
                #remove other .config files.  or the waagent would report the 3
                #status...
                
                for subdir, dirs, files in os.walk(hutil._context._config_dir):
                    for file in files:
                        try:
                            cur_seq_no = int(os.path.basename(file).split('.')[0])
                            if(cur_seq_no != current_seq_no):
                                os.remove(join(hutil._context._config_dir,file))
                        except ValueError:
                            continue
                hutil.set_inused_config_seq(current_seq_no)
                mi.save_identity()
            else:
                hutil.exit_if_enabled()

        # we need to freeze the file system first
        backup_logger.log('starting to enable', True)
        """
        protectedSettings is the privateConfig passed from Powershell.
        """
        protected_settings = hutil._context._config['runtimeSettings'][0]['handlerSettings'].get('protectedSettings')
        public_settings = hutil._context._config['runtimeSettings'][0]['handlerSettings'].get('publicSettings')
        para_parser = ParameterParser(protected_settings, public_settings)

        commandToExecute = para_parser.commandToExecute
        #validate all the required parameter here
        if(commandToExecute.lower() == CommonVariables.iaas_install_command):
            backup_logger.log("install succeed.",True)
            run_status = 'success'
            error_msg  = 'Install Succeeded'
            run_result = 0
            backup_logger.log(error_msg)
        elif(commandToExecute.lower() == CommonVariables.iaas_vmbackup_command):
            if(para_parser.backup_metadata is None or para_parser.public_config_obj is None or para_parser.private_config_obj is None):
                run_result = 11
                run_status = 'error'
                error_msg  = 'required field empty or not correct'
                backup_logger.log(error_msg, False, 'Error')
            else:
                backup_logger.log('commandToExecute is ' + commandToExecute, True)
                """
                make sure the log is not doing when the file system is freezed.
                """
                backup_logger.log("doing freeze now...", True)
                freeze_called = True
                freeze_result = freezer.freezeall()
                backup_logger.log("freeze result " + str(freeze_result))
                    
                # check whether we freeze succeed first?
                if(freeze_result is not None and len(freeze_result.errors) > 0 ):
                    run_result = 2
                    run_status = 'error'
                    error_msg  = 'Enable failed with error' + str(freeze_result.errors)
                    backup_logger.log(error_msg, False, 'Warning')
                else:
                    backup_logger.log("doing snapshot now...")
                    snap_shotter    = Snapshotter(backup_logger)
                    snapshot_result = snap_shotter.snapshotall(para_parser)
                    backup_logger.log("snapshotall ends...")
                    if(snapshot_result is not None and len(snapshot_result.errors) > 0):
                        error_msg  = "snapshot result: " + str(snapshot_result.errors)
                        run_result = 2
                        run_status = 'error'
                        backup_logger.log(error_msg, False, 'Error')
                    else:
                        run_result = 1
                        run_status = 'success'
                        error_msg  = 'Enable Succeeded'
                        backup_logger.log(error_msg)
        else:
            run_status = 'error'
            run_result = 11
            error_msg = 'command is not correct'
            backup_logger.log(error_msg, False, 'Error')
    except Exception as e:
        errMsg = "Failed to enable the extension with error: %s, stack trace: %s" % (str(e), traceback.format_exc())
        backup_logger.log(errMsg, False, 'Error')
        print(errMsg)
        global_error_result = e
    finally:
        backup_logger.log("doing unfreeze now...")
        if(freeze_called):
            unfreeze_result = freezer.unfreezeall()
            backup_logger.log("unfreeze result " + str(unfreeze_result))
            error_msg += ('Enable Succeeded with error: ' + str(unfreeze_result.errors))
            if(unfreeze_result is not None and len(unfreeze_result.errors) > 0):
                backup_logger.log(error_msg, False, 'Warning')
            backup_logger.log("unfreeze ends...")

    if(para_parser is not None):
        backup_logger.commit(para_parser.logsBlobUri)
    """
    we do the final report here to get rid of the complex logic to handle the logging when file system be freezed issue.
    """
    if(global_error_result  is not None):
        if(hasattr(global_error_result,'errno') and global_error_result.errno==2):
            run_result = 12
        elif(para_parser is None):
            run_result = 11
        else:
            run_result = 2
        run_status = 'error'
        error_msg  += ('Enable failed.' + str(global_error_result))

    hutil.do_exit(0, 'Enable', run_status, str(run_result), error_msg)