Example #1
0
 def __init__(self, f = sys.stdout, level = "info"):
     frmt = '{record.message}'
     if level == "info":
         self.debug = NullHandler(level = DEBUG)
         self.info = StreamHandler(f, level = INFO, format_string = frmt)
     else:    
         self.debug = StreamHandler(f, level = DEBUG, format_string = frmt)
         self.info = None
Example #2
0
def inject_logging(quiet):
    """Injects logging"""
    null_handler = NullHandler(level='DEBUG')
    null_handler.push_application()  # Discard any message lesser than INFO
    log_handler = MonitoringFileHandler(os.path.join(LOG_DIR, 'thekraken.log'),
                                        level='INFO')
    log_handler.push_application()
    if not quiet:
        console_handler = StreamHandler(sys.stdout, level='DEBUG', bubble=True)
        console_handler.push_application()
Example #3
0
def setup_logger(config):
    if config.has_option(SLACK_SECTION_NAME, "log_output"):
        output_path = config.get(SLACK_SECTION_NAME, "log_output")
        dir_path, file_name = os.path.split(output_path)
        if not os.path.exists(dir_path):
            os.makedirs(dir_path)

        file_handler = RotatingFileHandler(output_path, backup_count=5)
        file_handler.push_application()
    else:
        stream_handler = StreamHandler(sys.stdout)
        stream_handler.push_application()
def main():
    """
    The main routine which kicks everything off
    :return:
    """

    # Setup the command line arguments
    flags = argparse.ArgumentParser(description="Tool to validate and fix errors in CSV files for TADC imports")
    flags.add_argument('csv_file', type=str, help="Path to a CSV file to validate")
    flags.add_argument('header_rows', type=str, help="Number of header rows")
    flags.add_argument('--fix-missing', '-f', action='store_true', help="Fix missing fields by inserting the value 'unknown'")
    flags.add_argument('--output-dir', '-o', type=str, help='Where to put output files', default=os.getcwd())
    flags.add_argument('--log-dir', '-l', type=str, help='Where to put log files', default='/tmp')
    flags.add_argument('--log-level', type=str, help='Choose a log level', default='INFO')
    flags.add_argument('--old-date-format', type=str, help="the format of dates that will be fixed", default='%d/%m/%Y')
    args = flags.parse_args()

    log_filename = os.path.join(
            args.log_dir,
            'tadc_import_validator_{}.log'.format(os.path.basename(time.strftime('%Y%m%d-%H%M%S')))
        )

    # register some logging handlers
    log_handler = FileHandler(
        log_filename,
        mode='w',
        level=args.log_level,
        bubble=True
    )
    stdout_handler = StreamHandler(sys.stdout, level=args.log_level, bubble=True)

    with stdout_handler.applicationbound():
        with log_handler.applicationbound():
            log.info("Arguments: {}".format(args))
            start = time.time()
            log.info("starting at {}".format(time.strftime('%l:%M%p %Z on %b %d, %Y')))

            with CSVFileValidator(
                    csv_file=args.csv_file,
                    header_rows=args.header_rows,
                    output_dir=args.output_dir,
                    old_date_format=args.old_date_format,
                    fix_missing=args.fix_missing) as validator:
                validator.validate_file()
                log.info("Running time: {}".format(str(datetime.timedelta(seconds=(round(time.time() - start, 3))))))
                log.info("Log written to {}:".format(log_filename))
                log.info("Fixed data is in: {}".format(validator.get_fixed_filename()))
Example #5
0
def run():
    f = NamedTemporaryFile()
    out = StringIO()
    with NullHandler():
        with StreamHandler(out, level=WARNING):
            with FileHandler(f.name, level=ERROR):
                for x in xrange(100):
                    list(Handler.stack_manager.iter_context_objects())
Example #6
0
def make_logger(name, file_name, path):
    log_file_addr = path + '/' + file_name
    new_logger = Logger(name)
    new_logger.handlers.append(StreamHandler(sys.stdout, bubble=True))
    new_logger.handlers.append(
        FileHandler(log_file_addr, bubble=True, mode='w'))

    return new_logger
Example #7
0
 def __init__(self, log=None, ts_api_token=""):
     if log:
         self._log = log
     else:
         StreamHandler(sys.stdout).push_application()
         self._log = Logger(self.__class__.__name__)
     self.log.info("tushare version: " + ts.__version__)
     self._proapi = ts.pro_api(ts_api_token)
Example #8
0
 def set_debug(self, set_to=True):
     """
     Sets the capture to debug mode (or turns it off if specified).
     """
     if set_to:
         StreamHandler(sys.stdout).push_application()
         self._log.level = logbook.DEBUG
     self.debug = set_to
Example #9
0
def init_log(log_file=False, file_path=None):
    if log_file:
        file_path = os.path.join(_get_logs_dir(), config.log_name) if not file_path else file_path
        log_file_handler = FileHandler(file_path, format_string=log_format_string, bubble=True, mode='a')
        log_file_handler.format_string = log_format_string
        print(f"Session logs can be found here {file_path}")
        log_file_handler.push_application()
        log.handlers.append(log_file_handler)
    log.handlers.append(StreamHandler(sys.stdout, level="DEBUG", format_string=log_format_string))
Example #10
0
def get_logger(format_string=None):
    """Returns a singleton instance of a LogBook Logger

    Args:
        format_string: specifies how the log messages should be formatted

    Returns:
        A logbook Logger
    """
    if format_string is None:
        format_string = (
            u'[{record.time:%Y-%m-%d %H:%M:%S.%f} pid({record.process})] ' +
            u'{record.level_name}: {record.module}::{record.func_name}:{record.lineno} {record.message}'
        )
    # default_handler = StderrHandler(format_string=log_format)
    default_handler = StreamHandler(sys.stdout, format_string=format_string)
    default_handler.push_application()
    return LoggerSingle(__name__)
Example #11
0
def main(botcls, config):
    if 'logfile' in config.data:
        handler = RotatingFileHandler(os.path.expanduser(config.logfile))
    else:
        handler = StreamHandler(sys.stdout)

    handler.push_application()

    bot = botcls(config)
    bot.run()
Example #12
0
    def __init__(self,
                 filename,
                 arena='backtest',
                 logname='Backtest',
                 level=NOTSET):
        super().__init__(logname, level)

        path, ext = os.path.splitext(filename)
        now = datetime.datetime.now()
        log_filename = path + '_' + now.strftime('%Y-%m-%d_%H%M') + ".log"
        file_handler = FileHandler(log_filename, level=DEBUG, bubble=True)
        file_handler.format_string = LOG_ENTRY_FMT
        self.handlers.append(file_handler)

        stream_handler = StreamHandler(sys.stdout, level=INFO)
        stream_handler.format_string = LOG_ENTRY_FMT
        self.handlers.append(stream_handler)

        self.arena = arena
Example #13
0
def main(docs, bundle, build_dir, tmp_dir, recreate, debug, w):
    '''
    annotatedocs analyzes your sphinx-based documentation and provides helpful
    feedback about the quality and possible improvements.

    The first argument should be the path to where your documentation lives
    (e.g. the one in which you usually call 'make html').

    If you leave the first argument empty it defaults to the current working
    directory.

    The build will usually be written to <your docs dir>/_build/annotatedhtml/.
    You can change the output directory with the -b option.
    '''

    # Ignore SIG_PIPE so that piping works correctly.
    signal(SIGPIPE, SIG_DFL)

    if debug:
        log_level = 'DEBUG'
    else:
        log_level = 'INFO'

    null_handler = NullHandler(level='DEBUG')
    log_handler = StreamHandler(sys.stderr, level=log_level)
    with null_handler.applicationbound():
        with log_handler.applicationbound():

            loader = get_loader(docs,
                                build_dir=build_dir,
                                tmp_dir=tmp_dir)

            if recreate:
                loader.cleanup()
            loader.setup()
            confoverrides = {}
            if bundle:
                confoverrides['annotatedocs_bundle'] = bundle
            index_file = loader.build(confoverrides=confoverrides)

            if w:
                webbrowser.open(index_file)
Example #14
0
def logger(name, stream_level=DEBUG, file_level=DEBUG):
    set_datetime_format('local')
    StreamHandler(sys.stdout, level=stream_level,
                  bubble=True).push_application()
    filename = __file__.split('/')[-1][:-3]
    FileHandler(
        f'logs/{name}_{datetime.today().strftime("%Y-%m-%d_%H-%M")}.log',
        bubble=True,
        level=file_level,
        delay=True).push_application()
    return Logger(name)
Example #15
0
    def __init__(self):
        self.started = False
        self.services = {}
        self.pid = os.getpid()
        self.logger = ProcessLogger(self)

        # Configure the process logger
        self.file_handler = FileHandler(f"logs/{self.name}.log",
                encoding="utf-8", level="DEBUG", delay=True)
        self.file_handler.format_string = (
                "{record.time:%Y-%m-%d %H:%M:%S.%f%z} [{record.level_name}] "
                "{record.message}"
        )
        self.stream_handler = StreamHandler(sys.stdout, encoding="utf-8",
                level="INFO", bubble=True)
        self.stream_handler.format_string = (
                "[{record.level_name}] {record.channel}: {record.message}"
        )
        self.file_handler.push_application()
        self.stream_handler.push_application()
Example #16
0
def server():
    def handler(command: str) -> str:
        if command == "ls":
            return "file1\nfile2"
        elif command.startswith("echo"):
            return command[4:].strip()
        raise CommandFailure(f"Unknown command {command}")

    StreamHandler(sys.stdout).push_application()
    with Server(command_handler=handler) as server:
        yield server
Example #17
0
 def __init__(
     self,
     log_level=LogLevel.INFO,
     format_str='[{record.time:%Y-%m-%d %H:%M:%S}] - {record.channel} - {record.level_name} '
     '- {record.message}'):
     self.logger = Logger('WindAdapter')
     set_datetime_format('local')
     StreamHandler(sys.stdout, format_string=format_str).push_application()
     FileHandler('WindAdapter.log', bubble=True,
                 format_string=format_str).push_application()
     self.set_level(log_level)
Example #18
0
def basicConfig(level='INFO', redirectLogging=False, colorized=False):
    if not colorized:
        handler = StreamHandler(sys.stderr, level=level, bubble=True)
    else:
        handler = ColorizedHandler(level=level, bubble=True)
        handler.force_color()

    handler.format_string = mainFormatString
    handler.push_application()

    if redirectLogging:
        redirect_logging()
        redirect_warnings()
Example #19
0
def main(hash_algo, gather_time, pull_addr, pull_type, router_addr,
         router_type, verbose):
    StreamHandler(sys.stdout,
                  level='DEBUG' if verbose else 'INFO').push_application()
    merkle_processor = MerkleInputProcessor(pull_addr=pull_addr,
                                            pull_type=pull_type,
                                            router_addr=router_addr,
                                            router_type=router_type,
                                            gather_time=gather_time,
                                            hash_algo=hash_algo)

    merkle_processor.start_poll()
Example #20
0
def main(name):
    if HAVE_RELOADER:
        print('Live reload enabled')
        hupper.start_reloader('sud.cli.service.main')

    debug = config('DEBUG', default=False, cast=bool)
    loglevel = config('LOGLEVEL', default='DEBUG' if debug else 'INFO').upper()

    with StreamHandler(sys.stdout, level=loglevel).applicationbound():
        module_name = 'sud.services.{0}.server'.format(name.replace('-', '_'))
        module = import_module(module_name)
        module.main()
Example #21
0
def initialize(eventlog_file=None):
    """
    Initialize the analytics output. This will cause analytics events to be output to either a file or stdout.

    If this function is not called, analytics events will not be output. If it is called with a filename, the events
    will be output to that file. If it is called with 'STDOUT' or None, the events will be output to stdout.

    :param eventlog_file: The filename to output events to, 'STDOUT' to output to stdout, None to disable event logging
    :type eventlog_file: str | None
    """
    global _analytics_logger, _eventlog_file

    _eventlog_file = eventlog_file
    if not eventlog_file:
        _analytics_logger = None
        return

    if eventlog_file.upper() == 'STDOUT':
        event_handler = StreamHandler(sys.stdout)
    else:
        fs.create_dir(os.path.dirname(eventlog_file))
        previous_log_file_exists = os.path.exists(eventlog_file)

        event_handler = RotatingFileHandler(
            filename=eventlog_file,
            max_size=Configuration['max_eventlog_file_size'],
            backup_count=Configuration['max_eventlog_file_backups'],
        )
        if previous_log_file_exists:
            event_handler.perform_rollover()  # force starting a new eventlog file on application startup

    event_handler.format_string = '{record.message}'  # only output raw log message -- no timestamp or log level
    handler = TaggingHandler(
        {'event': event_handler},  # enable logging to the event_handler with the event() method
        bubble=True,
    )
    handler.push_application()

    _analytics_logger = TaggingLogger('analytics', ['event'])
    def __init__(self):
        StreamHandler(stdout).push_application()
        self.logger = Logger(self.__class__.__name__)
        set_datetime_format("local")

        try:
            with open('config.yaml', 'r') as stream:
                self.config = yaml.load(stream)
        except yaml.YAMLError as e:
            logger.critical(e)
            exit()
        except IOError as e:
            logger.critical(e)
            exit()
 def __init__(self):
     StreamHandler(sys.stdout).push_application()
     self._log = Logger(self.__class__.__name__)
     self._perf = pd.DataFrame()
     self._records = EnhancedOrderedDict()
     self._trade_context = TradeContext()
     self._sink_fn = self.sink
     self._sink_out_dict = EnhancedOrderedDict()
     # build bar_data
     self._bar_data = BarData()
     # build trade order
     self._trade_order = TradeOrder()
     # build multiple order position dict
     self._multiple_order_position_dict = EnhancedOrderedDict()
Example #24
0
class ConciseLog(object):
    def __init__(self, f = sys.stdout, level = "info"):
        frmt = '{record.message}'
        if level == "info":
            self.debug = NullHandler(level = DEBUG)
            self.info = StreamHandler(f, level = INFO, format_string = frmt)
        else:    
            self.debug = StreamHandler(f, level = DEBUG, format_string = frmt)
            self.info = None
    def __enter__(self):
        self.debug.__enter__()
        if self.info: self.info.__enter__()
    def __exit__(self, exc_type, exc_value, traceback):
        if self.info: 
            self.info.__exit__(exc_type, exc_value, traceback)
        self.debug.__exit__(exc_type, exc_value, traceback)
Example #25
0
    def __init__(self, render_modes, **kwargs):
        """
        Plotting controls, can be passed as kwargs.

        Args:
            render_state_as_image=True,
            render_state_channel=0,
            render_size_human=(6, 3.5),
            render_size_state=(7, 3.5),
            render_size_episode=(12,8),
            render_dpi=75,
            render_plotstyle='seaborn',
            render_cmap='PRGn',
            render_xlabel='Relative timesteps',
            render_ylabel='Value',
            render_title='local step: {}, state observation min: {:.4f}, max: {:.4f}',
            render_boxtext=dict(fontsize=12,
                                fontweight='bold',
                                color='w',
                                bbox={'facecolor': 'k', 'alpha': 0.3, 'pad': 3},
                                )
        """
        # Update parameters with relevant kwargs:
        for key, value in kwargs.items():
            if key in self.params.keys():
                self.params[key] = value

        # Unpack it as attributes:
        for key, value in self.params.items():
            setattr(self, key, value)

        # Logging:
        if 'log_level' not in dir(self):
            self.log_level = WARNING

        StreamHandler(sys.stdout).push_application()
        self.log = Logger('BTgymRenderer', level=self.log_level)

        #from matplotlib.backends.backend_agg import FigureCanvasAgg as FigureCanvas
        #self.FigureCanvas = FigureCanvas

        self.plt = None  # Will set it inside server process when calling initialize_pyplot().

        #self.plotter = BTgymPlotter() # Modified bt.Cerebro() plotter, to get episode renderings.

        # Set empty plugs for each render mode:
        self.render_modes = render_modes
        for mode in self.render_modes:
            self.rgb_dict[mode] = self.rgb_empty()
Example #26
0
    def __init__(
        self,
        env,
        policy,
        task,
        rollout_length,
        episode_summary_freq,
        env_render_freq,
        test,
        ep_summary,
        runner_fn_ref=BaseEnvRunnerFn,
        memory_config=None,
        log_level=WARNING,
    ):
        """

        Args:
            env:                    environment instance
            policy:                 policy instance
            task:                   int
            rollout_length:         int
            episode_summary_freq:   int
            env_render_freq:        int
            test:                   Atari or BTGyn
            ep_summary:             tf.summary
            runner_fn_ref:          callable defining runner execution logic
            memory_config:          replay memory configuration dictionary
            log_level:              int, logbook.level
        """
        threading.Thread.__init__(self)
        self.queue = queue.Queue(5)
        self.rollout_length = rollout_length
        self.env = env
        self.last_features = None
        self.policy = policy
        self.runner_fn_ref = runner_fn_ref
        self.daemon = True
        self.sess = None
        self.summary_writer = None
        self.episode_summary_freq = episode_summary_freq
        self.env_render_freq = env_render_freq
        self.task = task
        self.test = test
        self.ep_summary = ep_summary
        self.memory_config = memory_config
        self.log_level = log_level
        StreamHandler(sys.stdout).push_application()
        self.log = Logger('ThreadRunner_{}'.format(self.task),
                          level=self.log_level)
Example #27
0
    def __init__(self, *args, **kwargs):
        super().__init__(command_prefix=when_mentioned_or(
            setup_file["discord"]["command_prefix"]),
                         description="A bot for weebs programmed by Recchan")

        # Set a custom user agent for Pixie
        self.http.user_agent = user_agent

        # Logging setup
        redirect_logging()
        StreamHandler(sys.stderr).push_application()
        self.logger = Logger("Pixie")
        self.logger.level = getattr(logbook,
                                    setup_file.get("log_level",
                                                   "INFO"), logbook.INFO)
        logging.root.setLevel(self.logger.level)
Example #28
0
def main(identity, dealer_connect, pub_addr, pub_type, sloth_bits,
         sloth_iterations, verbose):
    StreamHandler(sys.stdout,
                  level="DEBUG" if verbose else "INFO").push_application()

    sloth_compute = SlothComputation(
        dealer_connect=dealer_connect,
        pub_addr=pub_addr,
        pub_type=pub_type,
        timeout=10,
        identity=identity,
        sloth_bits=sloth_bits,
        sloth_iterations=sloth_iterations,
    )
    time.sleep(1)
    sloth_compute.start_compute_loop()
Example #29
0
class ConciseLog(object):
    def __init__(self, f=sys.stdout, level="info"):
        frmt = '{record.message}'
        if level == "info":
            self.debug = NullHandler(level=DEBUG)
            self.info = StreamHandler(f, level=INFO, format_string=frmt)
        else:
            self.debug = StreamHandler(f, level=DEBUG, format_string=frmt)
            self.info = None

    def __enter__(self):
        self.debug.__enter__()
        if self.info: self.info.__enter__()

    def __exit__(self, exc_type, exc_value, traceback):
        if self.info:
            self.info.__exit__(exc_type, exc_value, traceback)
        self.debug.__exit__(exc_type, exc_value, traceback)
Example #30
0
def run(driver, email, password, keep_creds, output_dir, scrape_only, resume_only, website_only, profile_file, timeout,
        **kwargs):
    # Setup logging
    logbook.set_datetime_format('local')
    format_string = '[{record.time:%Y-%m-%d %H:%M:%S}] {record.level_name}: {record.message}'
    StreamHandler(sys.stdout, format_string=format_string).push_application()
    log = Logger()

    # Create output directory
    make_dir(output_dir)

    # Check if user has provided the profile json file
    if profile_file is None:
        if driver.lower() not in DRIVERS:
            raise ValueError(f'Browser driver has to be one of these: {", ".join(DRIVERS)}')

        # Check if credentials file exists
        credentials_file = os.path.expanduser(CREDENTIALS_FILE)
        if os.path.exists(credentials_file):
            with open(credentials_file) as f:
                credentials = json.load(f)
                email = credentials['email']
                password = credentials['password']
        else:
            if email is None:
                email = input('Enter your LinkedIn login email: ')
            if password is None:
                password = getpass('Enter your LinkedIn login password: '******'Scraping LinkedIn profile')
        log.notice('Please keep the browser window on top')
        profile = scrape(driver.lower(), email, password, output_dir, timeout)

        if keep_creds:
            store_creds(email, password, credentials_file)
    else:
        with open(profile_file) as f:
            profile = json.load(f)

    if not scrape_only:
        if resume_only:
            make_resume_files(profile, output_dir, timeout)
        elif website_only:
            make_website_files(profile, output_dir)
        else:
            make_resume_files(profile, output_dir, timeout)
            make_website_files(profile, output_dir)
def create_app(config_class = DefaultConfig):
	app = connexion.FlaskApp(
		    __name__, specification_dir='openapi/', options={"swagger_ui": False, "serve_spec": False}
		)
	app.app.config.from_object(config_class)

	log = Logger('logbook')
	log.info(app.app.config['LOG_LEVEL'])
	#show logging messages in terminal
	StreamHandler(sys.stdout ,
		level = app.app.config['LOG_LEVEL']).push_application()

	log.info('welcome to my application CHALLENGE CODE API MODE {}'.format(env('FLASK_ENV','developement')))

	app.add_api("swagger.yaml", strict_validation=True)
	flask_app = app.app
	return flask_app
Example #32
0
    def __init__(self, stream, level):
        """ Virtually private constructor. """
        if UniLogger.__instance != None:
            raise Exception("Logger is already been instantiated")

        UniLogger.__instance = self
        UniLogger.logger = Logger('uni-logger')

        handler = StreamHandler(stream)
        handler.level_name = level
        handler.formatter = self.json_formatter
        handler.push_application()
Example #33
0
def __init():
    driver = config.get('app.log.driver', 'stderr')
    level = config.get('app.log.level', 'DEBUG').upper()
    global __handler
    global __loggers
    if driver == 'stderr':
        __handler = StreamHandler(sys.stderr, level=level)
    elif driver == 'stdout':
        __handler = StreamHandler(sys.stdout, level=level)
    elif driver == 'file':
        __handler = FileHandler(filename=__get_log_file(), level=level)
    else:
        raise Exception('Invalid driver for log')
    __handler.push_application()
    __loggers['core'] = Logger('Core')
    container.register('logger', __loggers['core'])
Example #34
0
    def __init__(self, timeout: float = None, log_level=logbook.INFO):
        """ ActionHandle constructor.

        :param timeout: the timeout in seconds for the action handle to wait before it considers the action has
        completed. This is for handling actions on the robot that don't support triggers.
        :param log_level: the level for displaying and logging information, e.g. debugging information.
        """

        StreamHandler(sys.stdout).push_application()
        self._log = Logger('Robot')
        self._log.level = log_level

        self.id = generate_id()
        self.callbacks = []
        self.timeout = timeout
        self.event_ = threading.Event()
        if self.timeout is not None:
            self.timer_ = threading.Timer(self.timeout, self.done)
Example #35
0
    def __init__(self,
                 history_size,
                 max_sample_size,
                 priority_sample_size,
                 log_level=WARNING,
                 rollout_provider=None,
                 task=-1,
                 reward_threshold=0.1,
                 use_priority_sampling=False):
        """

        Args:
            history_size:           number of experiences stored;
            max_sample_size:        maximum allowed sample size (e.g. off-policy rollout length);
            priority_sample_size:   sample size of priority_sample() method
            log_level:              int, logbook.level;
            rollout_provider:       callable returning list of Rollouts NOT USED
            task:                   parent worker id;
            reward_threshold:       if |experience.reward| > reward_threshold: experience is saved as 'prioritized';
        """
        self._history_size = history_size
        self._frames = deque(maxlen=history_size)
        self.reward_threshold = reward_threshold
        self.max_sample_size = int(max_sample_size)
        self.priority_sample_size = int(priority_sample_size)
        self.rollout_provider = rollout_provider
        self.task = task
        self.log_level = log_level
        StreamHandler(sys.stdout).push_application()
        self.log = Logger('ReplayMemory_{}'.format(self.task),
                          level=self.log_level)
        self.use_priority_sampling = use_priority_sampling
        # Indices for non-priority frames:
        self._zero_reward_indices = deque()
        # Indices for priority frames:
        self._non_zero_reward_indices = deque()
        self._top_frame_index = 0

        if use_priority_sampling:
            self.sample_priority = self._sample_priority

        else:
            self.sample_priority = self._sample_dummy
def main():
    """Shows basic usage of the Google Drive API.

    Creates a Google Drive API service object and outputs the names and IDs
    for up to 10 files.
    """

    log_filename = os.path.join(
        args.log_dir,
        'google-drive-to-s3-{}.log'.format(os.path.basename(time.strftime('%Y%m%d-%H%M%S')))
    )

    # register some logging handlers
    log_handler = FileHandler(
        log_filename,
        mode='w',
        level=args.log_level,
        bubble=True
    )
    stdout_handler = StreamHandler(sys.stdout, level=args.log_level, bubble=True)

    with stdout_handler.applicationbound():
        with log_handler.applicationbound():
            log.info("Arguments: {}".format(args))
            start = time.time()
            log.info("starting at {}".format(time.strftime('%l:%M%p %Z on %b %d, %Y')))

            credentials = get_credentials()
            http = credentials.authorize(httplib2.Http())
            drive_service = discovery.build('drive', 'v3', http=http)

            s3 = boto3.resource('s3')

            # load up a match file if we have one.
            if args.match_file:
                with open(args.match_file, 'r') as f:
                    match_filenames = f.read().splitlines()
            else:
                match_filenames = None

            # get the files in the specified folder.
            files = drive_service.files()
            request = files.list(
                pageSize=args.page_size,
                q="'{}' in parents".format(args.folder_id),
                fields="nextPageToken, files(id, name)"
            )

            # make sure our S3 Key prefix has a trailing slash
            key_prefix = ensure_trailing_slash(args.key_prefix)

            page_counter = 0
            file_counter = 0
            while request is not None:
                file_page = request.execute(http=http)
                page_counter += 1
                page_file_counter = 0  # reset the paging file counter

                # determine the page at which to start processing.
                if page_counter >= args.start_page:
                    log.info(u"######## Page {} ########".format(page_counter))

                    for this_file in file_page['files']:
                        file_counter += 1
                        page_file_counter += 1
                        if we_should_process_this_file(this_file['name'], match_filenames):
                            log.info(u"#== Processing {} file number {} on page {}. {} files processed.".format(
                                this_file['name'],
                                page_file_counter,
                                page_counter,
                                file_counter
                            ))

                            # download the file
                            download_request = drive_service.files().get_media(fileId=this_file['id'])
                            fh = io.BytesIO()  # Using an in memory stream location
                            downloader = MediaIoBaseDownload(fh, download_request)
                            done = False
                            pbar = InitBar(this_file['name'])
                            while done is False:
                                status, done = downloader.next_chunk()
                                pbar(int(status.progress()*100))
                                # print("\rDownload {}%".format(int(status.progress() * 100)))
                            del pbar

                            # upload to bucket
                            log.info(u"Uploading to S3")
                            s3.Bucket(args.bucket).put_object(
                                Key="{}{}".format(key_prefix, this_file['name']),
                                Body=fh.getvalue(),
                                ACL='public-read'
                            )
                            log.info(u"Uploaded to S3")
                            fh.close()  # close the file handle to release memory
                        else:
                            log.info(u"Do not need to process {}".format(this_file['name']))

                # stop if we have come to the last user specified page
                if args.end_page and page_counter == args.end_page:
                    log.info(u"Finished paging at page {}".format(page_counter))
                    break
                # request the next page of files
                request = files.list_next(request, file_page)

            log.info("Running time: {}".format(str(datetime.timedelta(seconds=(round(time.time() - start, 3))))))
            log.info("Log written to {}:".format(log_filename))
Example #37
0
def get_logger(name, debug=True):
    logbook.set_datetime_format('local')
    handler = StreamHandler(sys.stdout) if debug else NullHandler()
    handler.push_application()
    return Logger(os.path.basename(name))
Example #38
0
    s = s[len(w):].strip()
    return Item1(s)

def level(s):
    return r_word.search(s).start() // 4

        
source, dest = [], []
current_level = -1

debugging = False
if debugging: 
    dhandler = NullHandler(level = DEBUG)
    dhandler.format_string = '{record.message}'
    dhandler.push_application()
handler = StreamHandler(stdout, level = NOTICE)
handler.format_string = '{record.message}'
handler.push_application()


for s in open("todo.txt"):
    l = level(s)
    debug("levels {}, {}".format(current_level, l))
    s = s.strip()
    if not s: continue
    if l > current_level:
        d = join(downloads_home,  *dest)
        if not isdir(d): mkdir(d)
    if l <= current_level:  
        if current_level: store()
        source = source[:l]
def main():
    """
    Copy a folder from Source to Target

    """

    log_filename = os.path.join(
        args.log_dir,
        'copy-google-drive-folder-{}.log'.format(os.path.basename(time.strftime('%Y%m%d-%H%M%S')))
    )

    # register some logging handlers
    log_handler = FileHandler(
        log_filename,
        mode='w',
        level=args.log_level,
        bubble=True
    )
    stdout_handler = StreamHandler(sys.stdout, level=args.log_level, bubble=True)

    with stdout_handler.applicationbound():
        with log_handler.applicationbound():
            log.info("Arguments: {}".format(args))
            start = time.time()
            log.info("starting at {}".format(time.strftime('%l:%M%p %Z on %b %d, %Y')))

            credentials = get_credentials()
            http = credentials.authorize(httplib2.Http())
            drive_service = discovery.build('drive', 'v3', http=http)

            # get the files in the specified folder.
            files = drive_service.files()
            request = files.list(
                pageSize=args.page_size,
                q="'{}' in parents".format(args.source_folder_id),
                fields="nextPageToken, files(id, name, mimeType)"
            )

            page_counter = 0
            file_counter = 0
            while request is not None:
                file_page = request.execute(http=http)
                page_counter += 1
                page_file_counter = 0  # reset the paging file counter

                # determine the page at which to start processing.
                if page_counter >= args.start_page:
                    log.info(u"######## Page {} ########".format(page_counter))

                    for this_file in file_page['files']:
                        file_counter += 1
                        page_file_counter += 1
                        log.info(u"#== Processing {} {} file number {} on page {}. {} files processed.".format(
                            this_file['mimeType'],
                            this_file['name'],
                            page_file_counter,
                            page_counter,
                            file_counter
                        ))

                        # if not a folder
                        if this_file['mimeType'] != 'application/vnd.google-apps.folder':
                            # Copy the file
                            new_file = {'title': this_file['name']}
                            copied_file = drive_service.files().copy(fileId=this_file['id'], body=new_file).execute()
                            # move it to it's new location
                            drive_service.files().update(
                                fileId=copied_file['id'],
                                addParents=args.target_folder_id,
                                removeParents=args.source_folder_id
                            ).execute()
                        else:
                            log.info(u"Skipped Folder")

                else:
                    log.info(u"Skipping Page {}".format(page_counter))

                # stop if we have come to the last user specified page
                if args.end_page and page_counter == args.end_page:
                    log.info(u"Finished paging at page {}".format(page_counter))
                    break

                # request the next page of files
                request = files.list_next(request, file_page)

            log.info("Running time: {}".format(str(datetime.timedelta(seconds=(round(time.time() - start, 3))))))
            log.info("Log written to {}:".format(log_filename))