def cmd(): arguments = docopt(__doc__, version=version()) if arguments.get('init'): init(arguments.get('--folder')) else: server(arguments.get('--port'), arguments.get('--host'))
def cmd(): arguments = docopt(__doc__, version=version()) if arguments.get('init'): # nit folder init(arguments.get('--folder')) else: # Call django cmd server()
from gerapy import version from gerapy.cmd.init import init from gerapy.cmd.parse import parse from gerapy.cmd.generate import generate from gerapy.server.manage import manage import argparse parser = argparse.ArgumentParser() parser.add_argument('-v', '--version', action='version', version=version(), help='get version') subparsers = parser.add_subparsers(dest='command', title='available commands', metavar='') # migrate parser_migrate = subparsers.add_parser('migrate', help='migrate database') # create superuser parser_createsuperuser = subparsers.add_parser('createsuperuser', help='create superuser') # makemigrations parser_makemigrations = subparsers.add_parser('makemigrations', help='create superuser') # runserver
def package_files(directories): paths = [] for item in directories: if isfile(item): paths.append(join('..', item)) continue for (path, directories, filenames) in walk(item): for filename in filenames: paths.append(join('..', path, filename)) return paths setup(name='gerapy', version=gerapy.version(), description='distributed crawler', keywords=['gerapy', 'scrapy', 'distributed'], author='germey', author_email='*****@*****.**', url='http://pypi.python.org/pypi/gerapy/', license='MIT', install_requires=read_requirements('requirements.txt'), packages=find_packages(), entry_points={'console_scripts': ['gerapy = gerapy.cmd:cmd']}, package_data={ '': package_files([ 'gerapy/server/static', 'gerapy/server/core/templates', 'gerapy/templates', 'gerapy/VERSION' ])
if not item_help: return '' # add the heading if the section was non-empty if self.heading is not argparse.SUPPRESS and self.heading is not None: current_indent = self.formatter._current_indent if self.heading == optional_title: heading = '%*s%s:\n' % (current_indent, '', self.heading) else: heading = '%*s%s:' % (current_indent, '', self.heading) else: heading = '' return join(['\n', heading, item_help]) parser = argparse.ArgumentParser(description='Gerapy %s - Distributed Crawler Management Framework' % version(), formatter_class=CapitalisedHelpFormatter, add_help=False) parser._optionals.title = optional_title parser.add_argument('-v', '--version', action='version', version=version(), help='Get version of Gerapy') parser.add_argument('-h', '--help', action='help', help='Show this help message and exit') subparsers = parser.add_subparsers(dest='command', title='Available commands', metavar='') # init parser_init = subparsers.add_parser('init', help='Init workspace, default to gerapy') parser_init.add_argument('folder', default='gerapy', nargs='?', type=str, help='Initial workspace folder') # init admin parser_initadmin = subparsers.add_parser('initadmin', help='Create default super user admin')
def package_files(directories): paths = [] for item in directories: if isfile(item): paths.append(join('..', item)) continue for (path, directories, filenames) in walk(item): for filename in filenames: paths.append(join('..', path, filename)) return paths setup( name='gerapy', version=gerapy.version(), description='distributed crawler', keywords=['gerapy', 'scrapy', 'distributed'], author='germey', author_email='*****@*****.**', url='http://pypi.python.org/pypi/gerapy/', license='MIT', install_requires=read_requirements('requirements.txt'), packages=find_packages(), entry_points={ 'console_scripts': ['gerapy = gerapy.cmd:cmd'] }, package_data={ '': package_files([ 'gerapy/server/static', 'gerapy/server/core/templates',