Пример #1
0
    def install_update(self):
        if not self._update_ready or self._status == UPDATER_STATUS_INSTALLING:
            return False
        self._status = UPDATER_STATUS_INSTALLING
        self.emit_status()
        logger.info('Installing update')
        try:
            assert self._update_file_path and isfile(self._update_file_path)
            logger.debug("self._update_file_path %s", self._update_file_path)
            path, name = split(self._update_file_path)
            old_cwd = os.getcwd()
            os.chdir(path)
            system = get_platform()
            if system == 'Windows':
                from common.config import load_config

                config = load_config()
                root = config.sync_directory
                log_basename = time.strftime('%Y%m%d_%H%M%S.log')
                log_filename = get_bases_filename(root, log_basename)
                if not self._is_ascii(log_filename):
                    log_filename = log_basename
                args = [name, '/verysilent', '/Log={}'.format(log_filename)]
                if is_portable():
                    args.append('/PATH={}'.format(get_application_path()))
                subprocess.Popen(
                    args,
                    creationflags=0x00000200  # CREATE_NEW_PROCESS_GROUP
                    | 0x00000008,  # DETACHED_PROCESS
                    close_fds=True)
            elif system == 'Darwin':
                bundle_path = normpath(
                    join(get_application_path(), '..', '..', '..', '..'))
                logger.debug("bundle_path: %s", bundle_path)
                subprocess.call(
                    ['ditto', '-xk', self._update_file_path, bundle_path])
                subprocess.call(
                    ['xattr', '-d', '-r', 'com.apple.quarantine', bundle_path])
                logger.debug("Update completed, restart")
                remove_file(get_cfg_filename('lock'))
                if is_portable():
                    launcher_path = normpath(
                        join(bundle_path, "..", "Pvtbox-Mac.command"))
                else:
                    launcher_path = bundle_path
                subprocess.call(['open', launcher_path])
            os.chdir(old_cwd)
            Application.exit()
        except Exception as e:
            logger.warning("Can't install update. Reason: %s", e)
            self._status = UPDATER_STATUS_INSTALL_ERROR
            self.emit_status()
            return False

        self._status = UPDATER_STATUS_INSTALLED
        self.emit_status()
        return True
Пример #2
0
 def _clear_old_logs(self):
     old_logs = sorted(glob.glob(
         get_bases_filename(
             root_directory, self._file_name_prefix +
             '[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]'
             '_[0-9][0-9][0-9][0-9][0-9][0-9]*.log')),
                       reverse=True)[self._logs_count:]
     try:
         list(map(os.remove, old_logs))
     except Exception:
         pass
Пример #3
0
 def doRollover(self):
     old_filename = self.baseFilename
     self.baseFilename = get_bases_filename(
         root_directory,
         time.strftime(self._file_name_prefix + '%Y%m%d_%H%M%S.log'))
     if self.baseFilename == old_filename:
         # log is full in 1 second
         self.baseFilename = self.baseFilename[:-4] + '_1.log'
     try:
         # RotatingFileHandler can't rename file because we changed
         # self.baseFilename. Now it checks file existence.
         # But realization may change, so do try - except
         RotatingFileHandler.doRollover(self)
     except Exception:
         self.baseFilename = old_filename
     self._clear_old_logs()
Пример #4
0
    def _on_start(self):
        if not self._address:
            return

        stats_db = get_bases_filename(self._root, self._db_name)
        self._init(stats_db, self._address, 2)
Пример #5
0
def logging_setup(loglevel, logfilename=None, copies_logging=True):
    """
    Configures logging module

    @param loglevel Log level to be used [str]
    @param logfilename Name of file to save log into [str]
    """

    if set_verbose and loglevel == 'DEBUG':
        loglevel = VERBOSE

    config = load_config()
    set_root_directory(config.sync_directory)

    copies_file_prefix = 'copies_'
    if not logfilename:
        logfilename = time.strftime('%Y%m%d_%H%M%S.log')
        logfilename = get_bases_filename(root_directory, logfilename)

    copies_logs = sorted(glob.glob(
        get_bases_filename(
            root_directory,
            copies_file_prefix + '[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]'
            '_[0-9][0-9][0-9][0-9][0-9][0-9]*.log')),
                         reverse=True)
    if copies_logs:
        copies_filename = copies_logs[0]
    else:
        log_dir, log_file = split(logfilename)
        copies_filename = join(log_dir, copies_file_prefix + log_file)

    cfg = {
        'version': 1,
        'disable_existing_loggers': False,
        'formatters': {
            'logfile': {
                'format':
                '[%(asctime)s %(levelname)s %(name)s:%(lineno)d] %(threadName)s(%(thread)d): %(message)s',  # noqa
            },
            'console': {
                'format':
                '[%(asctime)s %(levelname)s %(name)s:%(lineno)d] %(threadName)s(%(thread)d): %(message)s',  # noqa
            },
        },
        'handlers': {
            'console': {
                'class': 'logging.StreamHandler',
                'formatter': 'console',
                'stream': sys.stdout,
                'level': loglevel,
                'filters': ['console_filter'],
            },
            'file': {
                'formatter': 'logfile',
                'class': 'common.logging_setup.EconoRotatingFileHandler',
                'filename': logfilename,
                'logsCount': DEFAULT_LOGS_COUNT,
                'level': loglevel,
            },
        },
        'filters': {
            'console_filter': {
                '()': ConsoleFilter
            }
        },
        'loggers': {
            # for any logger
            '': {
                'handlers': [
                    'console',
                    'file',
                ],
                'level': loglevel,
            },
        },
    }

    if copies_logging:
        cfg['handlers']['copies_file'] = {
            'formatter': 'logfile',
            'class': 'common.logging_setup.EconoRotatingFileHandler',
            'filename': copies_filename,
            'logsCount': DEFAULT_COPIES_LOGS_COUNT,
            'file_name_prefix': copies_file_prefix,
            'level': loglevel,
        }
        cfg['loggers']['copies_logger'] = {
            'handlers': [
                'copies_file',
            ],
            'level': loglevel,
            'propagate': False,
        }

    make_dirs(logfilename, is_folder=False)

    logging.raiseExceptions = False

    logging.config.dictConfig(cfg)
Пример #6
0
 def _open_stderr_log(self):
     config = load_config()
     root = config.sync_directory
     baseFilename = get_bases_filename(
         root, time.strftime('%Y%m%d_%H%M%S_stderr.log'))
     self._stderr_log = open(baseFilename, 'a')
Пример #7
0
    def __init__(self,
                 config=None,
                 download_limiter=None,
                 tracker=None,
                 sync=None,
                 network_speed_calculator=None,
                 db=None,
                 filename='shares.db',
                 parent=None):
        """
        Constructor

        @param config Application configuration
        @param tracker Instance of stat_tracking.Tracker
        @param download_limiter Instance of Download limiter
        """

        self._cfg = config

        # Indicates that share downloading could be started
        self._enabled = Event()

        # Download limiter instance
        self._download_limiter = download_limiter
        # Tracker class instance
        self._tracker = tracker

        self._network_speed_calculator = network_speed_calculator
        # Signalling server client instance
        self._ss_client = SignalServerClient(parent, client_type='webshare')
        self._ss_addr = SERVER
        self._ss_port = PORT

        self._sync = sync
        self._db = db
        # Share processing queue
        self._queue = Queue()

        # share_hash <> its object IDs
        self._tasks = defaultdict(list)
        self._cancelled_tasks = defaultdict(set)
        self._downloaded_tasks = defaultdict(set)

        self._failed_downloads = defaultdict(list)
        self._retry_download_timer = None
        self._retry_download_timeout = RETRY_DOWNLOAD_TIMEOUT

        # Start thread
        self._share_processing_thread()
        self._stop = False

        # Hash of share being processed
        self._current_share_hash = None

        # Name of share being processed
        self._current_share_name = None

        self._is_folder = False
        self._is_deleting = False
        self._fullname = ""
        self._in_data_dir = False

        # Number of files
        self._num_files = 0

        # Destination directories dict
        self._dest_dirs = {}
        self._dest_uuids = {}

        # Initialize signalling server client
        self._connect_ss_slots()

        self._init_connectivity()

        # Signals to be emitted for current class instance
        self.signals = WebshareHandlerSignals()

        self.signals.download_success.connect(self._on_download_success,
                                              Qt.QueuedConnection)
        self.signals.download_failure.connect(self._on_download_failure,
                                              Qt.QueuedConnection)

        self._folder_uuid_ready = Event()
        self._folder_uuid_ready.set()
        self._special_event_no = 0
        self._special_event_lock = RLock()

        self._filename = get_bases_filename(self._cfg.sync_directory, filename)
        self._spec_files = dict()
        self._clean_spec_files()