Example #1
0
 def __execute_command(self, command):
     gen_log.info(command)
     is_success, message = process_helper.ProcessHelper(
         command).execute_command()
     gen_log.info(message)
     if not is_success:
         raise BackupCommandExecuteException(message)
Example #2
0
def full_backup(bucket):
    gen_log.info('full_amount_backup')
    full_amount_dir_path = DatabasesBackup().full_amount_backup()
    DatabasesBackup().full_file_transfer_oss(full_amount_dir_path, bucket)
    DatabasesBackup().remote_deleteout_file(bucket)
    DatabasesBackup().deleteout_file()
    FtpUploader.compress_full_file()
Example #3
0
 def close(self):
     self.unregister_commands()
     self.server.close_session(self.session_key)
     self.conn.on_close()
     self.stream.close()
     self.state = CLOSED
     gen_log.info("CONNECTION CLOSED: %s", self.session_key)
Example #4
0
    def incremental_backup(self):
        """
        进行增量备份
        :return: 
        """
        dir_name_list = os.listdir(self.__back_up_dir)
        if not dir_name_list:
            gen_log.info(u'没有备份,无法完成增量备份。')
            return
        dir_name_list.sort(reverse=True)
        dir_name = dir_name_list[0]
        incremental_backup_dir_path = os.path.join(
            self.__back_up_dir, dir_name, self.__incremental_backup_dir_name,
            self.__current_datetime_string)
        if not os.path.exists(incremental_backup_dir_path):
            os.makedirs(incremental_backup_dir_path)
        basedir = self.__full_amount_dir_path(dir_name)
        command = 'innobackupex --incremental %s --incremental-basedir=%s --user=%s --password=%s --host=%s--no-timestamp' % (
            incremental_backup_dir_path, basedir, self.__user_name,
            self.__password, "172.17.0.1")
        try:
            self.__execute_command(command)
            return incremental_backup_dir_path

        except BackupCommandExecuteException, ex:
            if os.path.exists(incremental_backup_dir_path):
                shutil.rmtree(incremental_backup_dir_path)
            raise ex
Example #5
0
 def terminal_message_flow(self, msg):
     r"""Sets message flow"""
     try:
         log, sack, from_buffer = super(
             TerminalSession, self).terminal_message_flow(msg)
     except MessageNotImplemented as e:  # silence exc
         gen_log.exception(e)
         return
     count_num = log.log.count_number
     if log.type == conf.ACK:
         if not log.header == conf.HEARTBEAT_ACK:
             self.unregister_command_on_ack(log.log)
         yield self.conn.on_ack(msg, log, sack)
     else:
         if self.is_pending_rto(log.header):
             self.make_rto_response(log)
         # bad accuracy of gps
         # may be warn by email our guys that gps accuracy is weak
         # skip message logic
         if log.header == conf.FIXED_REPORT:
             gps_accuracy = int(log.log.gps_accuracy)
             if not gps_accuracy:    # or (20 < gps_accuracy <= 50):
                 self.skip_message = True
             else:
                 self.skip_message = False
         if getattr(self, 'skip_message', False):
             gen_log.info("Hey, GPS ACCURACY IS BAD")
             return
         yield self.conn.on_report(msg, log, sack, from_buffer=from_buffer)
     if not self.session_key and hasattr(log.log, 'unique_id'):
         self.session_key = log.log.unique_id
         self.state = OPEN
     raise gen.Return(count_num)
Example #6
0
    def _tail_messagebus(self):

        def job_complete(f):
            self.cnt_number = f.result()

        while True:
            if self.should_stop():
                break
            message = yield self.job_queue.get()
            schedule_at_loop(self.io_loop, self.terminal_message_flow(message),
                             job_complete)
            self.job_queue.task_done()
            gen_log.info("INCOMING MSG: %s", message)
Example #7
0
    def on_report(self, original_msg, response, sack, from_buffer=False):
        log = dict(response.log.__dict__)

        log_entry = LogEntry()

        log_entry.imei = log.get('unique_id', self.session_key)

        try:
            log_entry.gps_utc_time = time_utils.dt2ts(time_utils.to_dt(
                log.get('gps_utc_time')))
        except (ValueError, TypeError):
            return

        if response.header in (conf.FIXED_REPORT, conf.OBD_REPORT):
            log_entry.gps_accuracy = log.get('gps_accuracy', None)
            log_entry.speed = log.get('speed', None)
            log_entry.altitude = log.get('altitude', None)
            log_entry.longitude = log.get('longitude', None)
            log_entry.latitude = log.get('latitude', None)
            # mapped_log['rpm'] = log.get('rpm', None)
        else:
            gen_log.warning("Common Protocol hasn't conform to report %s",
                            response.header)
            raise gen.Return(None)
        session = self.backend.get_session()
        json_log = log_entry.json()
        try:
            """Everything I need to do here"""
            session.add(log_entry)
            session.commit()
            json_log = log_entry.json()
        except Exception as e:
            session.rollback()
        finally:
            session.close()

        my_data = {'d': {
            "lat": str(log.get('latitude', None)),
            "long": str(log.get('longitude', None))
        }}
        try:

            dev = ibmiotf.device.Client(options)
            dev.connect()
            dev.publishEvent("gps", "json", my_data)
            dev.disconnect()
        except Exception as ex:
            gen_log.info('failed to publish %s', ex)

        gen_log.info('MESSAGE PUBLISHED %s', json_log)
        raise gen.Return(None)
Example #8
0
 def open(self):
     """Opens the connect. According to the protocol,
     we should first configure the device and after than
     connection should be flagged as opened."""
     # unique_id = yield self.conn.configure()
     log = yield self.conn.verify_conn()
     unique_id = log.unique_id
     gen_log.info('CONNECTION OPENED WITH: %s' % unique_id)
     if unique_id:
         self.session_key = unique_id
         self.state = OPEN
         self.conn.on_open(unique_id)
         raise gen.Return(unique_id)
     raise gen.Return(conf.DISCONN_RESULT)
Example #9
0
    def deleteout_file(self):
        """
        删除本地过期文件,保留最近N天的文件
        :return: 
        """
        if not isinstance(self.__keep_days, int):
            raise Exception(u'wrong type keep days')

        path = self.__back_up_dir
        if self.__keep_days > len(os.listdir(path)):
            pass
        else:
            for i in range(len(os.listdir(path))):
                dlist = os.listdir(path)
                dlist.sort()
                dlist.reverse()
                d = os.path.join(path, dlist[self.__keep_days])
                timestamp = os.path.getmtime(d)
                gen_log.info(u'delete dir %s it timestamp --> %s' %
                             (d, timestamp))
                if os.path.isdir(d):
                    try:
                        gen_log.info(u'removing %s' % d)
                        shutil.rmtree(d)
                    except Exception, e:
                        gen_log.exception(u'删除过期备份文件出现异常:')
                    else:
                        gen_log.info(u'删除文件成功.')
Example #10
0
 def upload_backup_file(self):
     ftp = self.__ftp_login()
     if not os.path.exists(self.__upload_source_path):
         os.makedirs(self.__upload_source_path)
     output_file_path = os.path.join(
         self.__upload_source_path,
         self.__current_datetime_string + self.__file_extension)
     back_up_file_path = os.path.join(self.__back_up_path,
                                      os.listdir(self.__back_up_path)[-1])
     self.__make_tarfile(output_file_path, back_up_file_path)
     uploaded_file_path = self.__current_datetime_string + self.__file_extension
     file_object = open(output_file_path, 'rb')
     gen_log.info('start upload file %s start' % output_file_path)
     gen_log.info('file path is %s ' % uploaded_file_path)
     ftp.storbinary('STOR ' + uploaded_file_path, file_object, 1024)
     ftp.close()
     file_object.close()
     gen_log.info('start upload file %s end' % output_file_path)
Example #11
0
def incremental_backup(bucket):
    gen_log.info('incremental_backup')
    incremental_backup_dir_path = DatabasesBackup().incremental_backup()
    DatabasesBackup().inc_file_transfer_oss(incremental_backup_dir_path,
                                            bucket)
    DatabasesBackup().remote_deleteout_file(bucket)
Example #12
0
 def send_message(self, msg):
     self.stream.write(msg)
     gen_log.info("SACK: %s", msg)
Example #13
0
        self.dongles[unique_id] = session

    def close_session(self, unique_id):
        self.dongles.pop(unique_id, None)


def handle_stop(io_loop, obd_server, signum, stack):
    r"""Properly kills the process by interrupting it first."""
    obd_server.stop()
    io_loop.stop()
    io_loop.close()
    signal.signal(signal.SIGTERM, signal.SIG_DFL)
    os.kill(os.getpid(), signal.SIGTERM)


if __name__ == '__main__':
    io_loop = ioloop.IOLoop.instance()
    port = int(os.getenv('VCAP_APP_PORT', 9002))
    server = QueclinkServer(io_loop=io_loop, ipaddr='0.0.0.0',
                            port=port)
    server.listen(port)
    # register signal handlers
    handle_stop = functools.partial(handle_stop, io_loop, server)
    signal.signal(signal.SIGTERM, handle_stop)
    gen_log.info("Queclink Server is UP on port {}.".format(port))
    io_loop.start()



"""+RESP:GTOBD,1F0106,864251020002568,,gv500,0,70FFFF,,1,11814,983A8140,836,0,88,Inf,,1,0,1,0300,12,27,,0,0.0,316,843.0,76.862894,43.226609,20141120134941,0401,0001,08DE,9707,00,0.0,20141120194942,4DA1$"""
Example #14
0
def verify_remote_file(bucket):
    gen_log.info('verify_remote_file')
    DatabasesBackup().verify_remote_file(bucket)
Example #15
0
def error_listener(ev):
    if ev.exception:
        gen_log.exception("%s error", str(ev.job))
    else:
        gen_log.info('%s missed.', str(ev.job))
Example #16
0
def clear_ftp():
    gen_log.info('clear_ftp')
    FtpUploader().clear_ftp()
Example #17
0
def compress_full_file():
    gen_log.info('compress_full')
    FtpUploader().compress_full_file()
Example #18
0
def upload_backup_file():
    gen_log.info('upload_backup_file')
    FtpUploader().upload_backup_file()
Example #19
0
def remote_download_file(bucket):
    gen_log.info('download_file')
    DatabasesBackup().remote_download_file(bucket)
Example #20
0
def object_dir_detail(bucket):
    gen_log.info('show_backup')
    DatabasesBackup().object_dir_detail(bucket)
Example #21
0
 def on_ack(self, original_msg, msg, sack):
     gen_log.info("PROCESSED ACK: %s[ack-%s]", msg, sack)
     if self.config['sack_enable'] or (msg.type == conf.ACK and
                                       msg.header == conf.HEARTBEAT_ACK):
         self._session.send_message(sack)
     raise gen.Return(None)
Example #22
0
def show_bucket_file(bucket):
    gen_log.info('show_bucket_file')
    DatabasesBackup().show_bucket_file(bucket)