# IMPORTS ---------------------------------------------------------------------- from os import system from os import listdir as list_dir from os.path import basename as base_name from os.path import dirname as dir_name from os.path import realpath as real_path from os.path import relpath as rel_path from os.path import isfile as is_file from os.path import join as join_path from invoke import task # ------------------------------------------------------------------------------ # CONFIGURATION ---------------------------------------------------------------- ROOT_DIR = dir_name(real_path(__file__)) SHARED_DIR = join_path(ROOT_DIR, 'shared') TEST_DIR = join_path(ROOT_DIR, 'test') # ------------------------------------------------------------------------------ # TASKS ------------------------------------------------------------------------ @task(help={'ctf_name': 'CTF name', 'challenge_name': 'Challenge name'}) def create_challenge(ctf_name, challenge_name): """Prepare the storage for a CTF challenge.""" challenge_dir = join_path(ROOT_DIR, ctf_name, challenge_name) shared_link = rel_path(SHARED_DIR, challenge_dir) print('Preparing {storage} for challenge {challenge} of CTF {ctf}'.format( storage=challenge_dir, challenge=challenge_name, ctf=ctf_name))
""" """ # Prefer setuptools over distutils from setuptools import setup, find_packages from os import path # io.open is needed for projects supporting Python 2.7 # ensures open() deafults to text mode with universal newline #from io import open here = path.abspath(path.dir_name(__file__)) # Get long description from the README file with open(path.join(here, 'README.md'), encoding='utf-8') as f: long_description = f.read() setup( # Name of the project. Name registers when package is published name='BackupScraper', #required version='1.2.0', description='A Custom Backups Scraper', long_description=long_description, long_description_content_type='text/markdown', url='https://github.com/DavidECrowley', author='David E Crowley', author_email='*****@*****.**', )
def __init__(self, html_file): self.html_file = html_file self.json_file = path_join(dir_name(html_file), 'question.json') self.image_dir = dir_name(html_file)
from concurrent.futures import ThreadPoolExecutor from os.path import dirname as dir_name, realpath as real_path from time import sleep from unittest import TestCase, main try: from asyncio import get_event_loop except ImportError: # Python 2 compatibility try: from trollius import get_event_loop except: print('Python 2 is supported through the external package ' + '`trollius`: you need to install it (`pip install trollius`)') exit(-1) sys.path.append(dir_name(dir_name(real_path(__file__)))) from shared.irc import IRCClient from shared.output import enable_verbose, print_debug # ------------------------------------------------------------------------------ # UTILITIES -------------------------------------------------------------------- def _delayed_disconnect(client, delay=1): def _inner(): print_debug('Waiting {} second(s) before disconnect'.format(delay)) sleep(delay) print_debug('Sending disconnect') client.disconnect() return _inner
from concurrent.futures import ThreadPoolExecutor from os.path import dirname as dir_name, realpath as real_path from time import sleep from unittest import TestCase, main try: from asyncio import get_event_loop except ImportError: # Python 2 compatibility try: from trollius import get_event_loop except: print('Python 2 is supported through the external package ' + '`trollius`: you need to install it (`pip install trollius`)') exit(-1) sys.path.append(dir_name(dir_name(real_path(__file__)))) from shared.irc import IRCClient from shared.output import enable_verbose, print_debug # ------------------------------------------------------------------------------ # UTILITIES -------------------------------------------------------------------- def _delayed_disconnect(client, delay=1): def _inner(): print_debug('Waiting {} second(s) before disconnect'.format(delay)) sleep(delay) print_debug('Sending disconnect') client.disconnect() return _inner def _spawn(func, max_workers=1):