コード例 #1
0
ファイル: remote.py プロジェクト: olibrook/djangae
class Command(BaseCommand):

    def __init__(self, *args, **kwargs):
        from djangae.boot import setup_paths, setup_datastore_stubs
        setup_paths()
        super(Command, self).__init__(*args, **kwargs)

    def run_from_argv(self, argv):
        from google.appengine.ext.remote_api import remote_api_stub
        from google.appengine.tools import appengine_rpc
        import getpass
        from djangae.boot import find_project_root

        self.stdout = OutputWrapper(sys.stdout)

        def auth_func():
            return (raw_input('Google Account Login:'******'Password:'******'app.yaml')).read()

        app_id = app_yaml.split("application:")[1].lstrip().split()[0]

        self.stdout.write("Opening Remote API connection to {0}...\n".format(app_id))
        remote_api_stub.ConfigureRemoteApi(None,
            '/_ah/remote_api',
            auth_func,
            servername='{0}.appspot.com'.format(app_id),
            secure=True,
        )
        self.stdout.write("...Connection established...have a nice day :)\n".format(app_id))
        argv = argv[:1] + argv[2:]
        execute_from_command_line(argv)
コード例 #2
0
ファイル: base.py プロジェクト: antoniocdff/django-tenants
def run_migrations(args, options, executor_codename, schema_name, allow_atomic=True):
    from django.core.management import color
    from django.core.management.base import OutputWrapper
    from django.db import connection

    style = color.color_style()

    def style_func(msg):
        return '[%s:%s] %s' % (
            style.NOTICE(executor_codename),
            style.NOTICE(schema_name),
            msg
        )

    connection.set_schema(schema_name)
    stdout = OutputWrapper(sys.stdout)
    stdout.style_func = style_func
    stderr = OutputWrapper(sys.stderr)
    stderr.style_func = style_func
    if int(options.get('verbosity', 1)) >= 1:
        stdout.write(style.NOTICE("=== Starting migration"))
    MigrateCommand(stdout=stdout, stderr=stderr).execute(*args, **options)

    try:
        transaction.commit()
        connection.close()
        connection.connection = None
    except transaction.TransactionManagementError:
        if not allow_atomic:
            raise

        # We are in atomic transaction, don't close connections
        pass

    connection.set_schema_to_public()
コード例 #3
0
class Command(BaseCommand):
    def __init__(self, *args, **kwargs):
        from djangae.boot import setup_paths, setup_datastore_stubs
        setup_paths()
        super(Command, self).__init__(*args, **kwargs)

    def run_from_argv(self, argv):
        from google.appengine.ext.remote_api import remote_api_stub
        from google.appengine.tools import appengine_rpc
        import getpass
        from djangae.boot import find_project_root

        self.stdout = OutputWrapper(sys.stdout)

        def auth_func():
            return (raw_input('Google Account Login:'******'Password:'******'app.yaml')).read()

        app_id = app_yaml.split("application:")[1].lstrip().split()[0]

        self.stdout.write(
            "Opening Remote API connection to {0}...\n".format(app_id))
        remote_api_stub.ConfigureRemoteApi(
            None,
            '/_ah/remote_api',
            auth_func,
            servername='{0}.appspot.com'.format(app_id),
            secure=True,
        )
        self.stdout.write(
            "...Connection established...have a nice day :)\n".format(app_id))
        argv = argv[:1] + argv[2:]
        execute_from_command_line(argv)
コード例 #4
0
def multiprocess_reader(urls, stdout=None):
    stdout = OutputWrapper(stdout or sys.stdout)
    result = URLReader(urls=urls)()
    out = set()
    for built_result in result:
        out.add(built_result)
        stdout.write("Read {}".format(built_result.url))
    return out
コード例 #5
0
def write(msg, is_error=False):
    stdout = OutputWrapper(sys.stdout)
    style = color_style()
    if is_error:
        styling_msg = style.ERROR(msg)
    else:
        styling_msg = style.SUCCESS(msg)

    stdout.write(styling_msg)
コード例 #6
0
ファイル: base.py プロジェクト: tikservices/django-tenants
def run_migrations(args, options, executor_codename, schema_name, tenant_type='',
                   allow_atomic=True, idx=None, count=None):
    from django.core.management import color
    from django.core.management.base import OutputWrapper
    from django.db import connections
    style = color.color_style()

    def style_func(msg):
        percent_str = ''
        if idx is not None and count is not None and count > 0:
            percent_str = '%d/%d (%s%%) ' % (idx + 1, count, int(100 * (idx + 1) / count))

        message = '[%s%s:%s] %s' % (
            percent_str,
            style.NOTICE(executor_codename),
            style.NOTICE(schema_name),
            msg
        )
        signal_message = '[%s%s:%s] %s' % (
            percent_str,
            executor_codename,
            schema_name,
            msg
        )
        schema_migrate_message.send(run_migrations, message=signal_message)
        return message

    connection = connections[options.get('database', get_tenant_database_alias())]
    connection.set_schema(schema_name, tenant_type=tenant_type)

    # ensure that django_migrations table is created in the schema before migrations run, otherwise the migration
    # table in the public schema gets picked and no migrations are applied
    migration_recorder = MigrationRecorder(connection)
    migration_recorder.ensure_schema()

    stdout = OutputWrapper(sys.stdout)
    stdout.style_func = style_func
    stderr = OutputWrapper(sys.stderr)
    stderr.style_func = style_func
    if int(options.get('verbosity', 1)) >= 1:
        stdout.write(style.NOTICE("=== Starting migration"))
    MigrateCommand(stdout=stdout, stderr=stderr).execute(*args, **options)

    try:
        transaction.commit()
        connection.close()
        connection.connection = None
    except transaction.TransactionManagementError:
        if not allow_atomic:
            raise

        # We are in atomic transaction, don't close connections
        pass

    connection.set_schema_to_public()
    schema_migrated.send(run_migrations, schema_name=schema_name)
コード例 #7
0
 def join_individual(self, b: 'Individual', out: OutputWrapper = None):
     if out:
         out.write("Карт для переноса: %s" % Card.objects.filter(individual=b).count())
     slog.Log(key=str(self.pk), type=2002,
              body=simplejson.dumps({"Сохраняемая запись": str(self), "Объединяемая запись": str(b)}),
              user=None).save()
     for c in Card.objects.filter(individual=b):
         c.individual = self
         c.save()
     b.delete()
コード例 #8
0
def multiprocess_writer(data, stdout=None):
    stdout = OutputWrapper(stdout or sys.stdout)
    result = URLWriter(data=data)()
    out = set()
    for built_result in result:
        out.add(built_result)
        if built_result.created:
            stdout.write("Created {}".format(built_result.name))
        elif built_result.modified:
            stdout.write("Updated {}".format(built_result.name))
    return out
コード例 #9
0
class Log:
    def __init__(self):
        self._output = OutputWrapper(stdout)

    def print(self, msg, ending='\n', color_prefix=''):
        self._output.write(f'{color_prefix}{msg}\033[0m', ending=ending)

    def printerr(self, msg, ending='\n'):
        self.print(msg, ending, color_prefix='\033[91m')

    def printok(self, msg, ending='\n'):
        self.print(msg, ending, color_prefix='\033[92m')
コード例 #10
0
    def execute(self, *args, **options):
        try:
            super(Command, self).execute(*args, **options)
        except CommandError as e:
            if options.get('traceback', False):
                raise

            # self.stderr is not guaranteed to be set here
            stderr = getattr(self, 'stderr', None)
            if not stderr:
                stderr = OutputWrapper(sys.stderr, self.style.ERROR)
            stderr.write('%s: %s' % (e.__class__.__name__, e))
            sys.exit(2)
コード例 #11
0
def main():
    parser = OptionParser(usage=textwrap.dedent("""\
            %prog [options] <instance-name> [<target-directory>]

            Creates a new EOxServer instance with all necessary files and
            folder structure. Optionally, a SQLite database is initiated.
        """),
                          version=eoxserver.get_version())
    parser.add_option('-i',
                      '--init-spatialite',
                      '--init_spatialite',
                      dest='init_spatialite',
                      action='store_true',
                      default=False,
                      help='Flag to initialize the sqlite database.')
    parser.add_option('-v',
                      '--verbosity',
                      action='store',
                      dest='verbosity',
                      default='1',
                      type='choice',
                      choices=['0', '1', '2', '3'])
    parser.add_option('--traceback',
                      action='store_true',
                      help='Raise on exception')

    options, args = parser.parse_args()

    error_stream = OutputWrapper(sys.stderr, color_style().ERROR)

    if not args:
        error_stream.write("Mandatory argument 'instance-name' not given.\n")
        sys.exit(1)

    name = args[0]
    try:
        target = args[1]
    except IndexError:
        target = None

    try:
        create_instance(name, target, **options.__dict__)
    except Exception as e:
        if options.traceback:
            raise
        error_stream.write("%s: %s\n" % (e.__class__.__name__, e))
コード例 #12
0
ファイル: base.py プロジェクト: sguermond/django-tenants
def run_migrations(args,
                   options,
                   executor_codename,
                   schema_name,
                   allow_atomic=True,
                   idx=None,
                   count=None):
    from django.core.management import color
    from django.core.management.base import OutputWrapper
    from django.db import connections

    style = color.color_style()

    def style_func(msg):
        percent_str = ''
        if idx is not None and count is not None and count > 0:
            percent_str = '%d/%d (%s%%) ' % (idx + 1, count,
                                             int(100 * (idx + 1) / count))
        return '[%s%s:%s] %s' % (percent_str, style.NOTICE(executor_codename),
                                 style.NOTICE(schema_name), msg)

    include_public = True if (options.get('shared')
                              or schema_name == 'public') else False
    connection = connections[get_tenant_database_alias()]
    connection.set_schema(schema_name, include_public=include_public)

    stdout = OutputWrapper(sys.stdout)
    stdout.style_func = style_func
    stderr = OutputWrapper(sys.stderr)
    stderr.style_func = style_func
    if int(options.get('verbosity', 1)) >= 1:
        stdout.write(style.NOTICE("=== Starting migration"))
    MigrateCommand(stdout=stdout, stderr=stderr).execute(*args, **options)

    try:
        transaction.commit()
        connection.close()
        connection.connection = None
    except transaction.TransactionManagementError:
        if not allow_atomic:
            raise

        # We are in atomic transaction, don't close connections
        pass

    connection.set_schema_to_public()
コード例 #13
0
def run_migrations(args,
                   options,
                   executor_codename,
                   schema_name,
                   allow_atomic=True):
    from django.core.management import color
    from django.core.management.base import OutputWrapper
    from django.db import connections

    PUBLIC_SCHEMA_NAME = get_public_schema_name()

    options['database'] = settings.TENANT_DATABASE
    if schema_name == PUBLIC_SCHEMA_NAME:
        options['database'] = DEFAULT_DB_ALIAS

    style = color.color_style()

    def style_func(msg):
        return '[%s:%s:%s] %s' % (options['database'],
                                  style.NOTICE(executor_codename),
                                  style.NOTICE(schema_name), msg)

    connections[options['database']].set_schema(schema_name)

    stdout = OutputWrapper(sys.stdout)
    stdout.style_func = style_func
    stderr = OutputWrapper(sys.stderr)
    stderr.style_func = style_func
    if int(options.get('verbosity', 1)) >= 1:
        stdout.write(style.NOTICE("=== Starting migration"))
    MigrateCommand(stdout=stdout, stderr=stderr).execute(*args, **options)

    try:
        transaction.commit()
        connections[options['database']].close()
        connections[options['database']].connection = None
    except transaction.TransactionManagementError:
        if not allow_atomic:
            raise

        # We are in atomic transaction, don't close connections
        pass
コード例 #14
0
def main():
    parser = OptionParser(
        usage=textwrap.dedent("""\
            %prog [options] <instance-name> [<target-directory>]

            Creates a new EOxServer instance with all necessary files and
            folder structure. Optionally, a SQLite database is initiated.
        """),
        version=eoxserver.get_version()
    )
    parser.add_option('-i', '--init-spatialite', '--init_spatialite',
        dest='init_spatialite', action='store_true', default=False,
        help='Flag to initialize the sqlite database.'
    )
    parser.add_option('-v', '--verbosity',
        action='store', dest='verbosity', default='1',
        type='choice', choices=['0', '1', '2', '3']
    )
    parser.add_option('--traceback',
        action='store_true', help='Raise on exception'
    )

    options, args = parser.parse_args()

    error_stream = OutputWrapper(sys.stderr, color_style().ERROR)

    if not args:
        error_stream.write("Mandatory argument 'instance-name' not given.\n")
        sys.exit(1)

    name = args[0]
    try:
        target = args[1]
    except IndexError:
        target = None

    try:
        create_instance(name, target, **options.__dict__)
    except Exception as e:
        if options.traceback:
            raise
        error_stream.write("%s: %s\n" % (e.__class__.__name__, e))
コード例 #15
0
ファイル: base.py プロジェクト: tomturner/django-tenants
def run_migrations(args, options, executor_codename, schema_name, allow_atomic=True, idx=None, count=None):
    from django.core.management import color
    from django.core.management.base import OutputWrapper
    from django.db import connections

    style = color.color_style()

    def style_func(msg):
        percent_str = ''
        if idx is not None and count is not None and count > 0:
            percent_str = '%d/%d (%s%%) ' % (idx + 1, count, int(100 * (idx + 1) / count))
        return '[%s%s:%s] %s' % (
            percent_str,
            style.NOTICE(executor_codename),
            style.NOTICE(schema_name),
            msg
        )

    connection = connections[get_tenant_database_alias()]
    connection.set_schema(schema_name)

    stdout = OutputWrapper(sys.stdout)
    stdout.style_func = style_func
    stderr = OutputWrapper(sys.stderr)
    stderr.style_func = style_func
    if int(options.get('verbosity', 1)) >= 1:
        stdout.write(style.NOTICE("=== Starting migration"))
    MigrateCommand(stdout=stdout, stderr=stderr).execute(*args, **options)

    try:
        transaction.commit()
        connection.close()
        connection.connection = None
    except transaction.TransactionManagementError:
        if not allow_atomic:
            raise

        # We are in atomic transaction, don't close connections
        pass

    connection.set_schema_to_public()
コード例 #16
0
class ImportData:
    def __init__(self):
        self.stdout = OutputWrapper(sys.stdout)
        self.stderr = OutputWrapper(sys.stderr)

    def init_sampledataset(self, target_lists):
        """

        :param target_lists:
        :return:
        """

        self.stdout.write('Loading data...')
        for file in target_lists:
            self.stdout.write(f'file: {file}')

            with open(file) as f:
                reader = csv.reader(f)
                # skip header
                next(reader, None)
                for row in reader:
                    data = {
                        'date': datetime.strptime(row[0], '%d.%m.%Y').date(),
                        'channel': row[1],
                        'country': row[2],
                        'os': row[3],
                        'impressions': row[4],
                        'clicks': row[5],
                        'installs': row[6],
                        'spend': Decimal(str(row[7])),
                        'revenue': Decimal(str(row[8])),
                    }
                    SampleDataset.objects.update_or_create(**data)
        self.stdout.write('Done!')
コード例 #17
0
 def on_execute_command(self, tenant, args, options):
     style = color.color_style()
     stdout = OutputWrapper(sys.stdout)
     stdout.write(style.MIGRATE_HEADING("=".ljust(70, "=")))
     stdout.write(style.MIGRATE_HEADING("=== Starting collectstatic: {0} ".format(tenant.schema_name).ljust(70, "=")))
     stdout.write(style.MIGRATE_HEADING("=".ljust(70, "=")))
     options["interactive"] = False
     super(Command, self).on_execute_command(tenant, args, options)
コード例 #18
0
ファイル: base.py プロジェクト: estebanaldana/udemy-exame
def run_migrations(args,
                   options,
                   executor_codename,
                   schema_name,
                   allow_atomic=True):
    from django.core.management import color
    from django.core.management.base import OutputWrapper
    from django.db import connection

    style = color.color_style()

    def style_func(msg):
        return '[%s:%s] %s' % (style.NOTICE(executor_codename),
                               style.NOTICE(schema_name), msg)

    stdout = OutputWrapper(sys.stdout)
    stdout.style_func = style_func
    stderr = OutputWrapper(sys.stderr)
    stderr.style_func = style_func
    if int(options.get('verbosity', 1)) >= 1:
        stdout.write(
            style.NOTICE("=== Running migrate for schema %s" % schema_name))

    connection.set_schema(schema_name)
    MigrateCommand(stdout=stdout, stderr=stderr).execute(*args, **options)

    try:
        transaction.commit()
        connection.close()
        connection.connection = None
    except transaction.TransactionManagementError:
        if not allow_atomic:
            raise

        # We are in atomic transaction, don't close connections
        pass

    connection.set_schema_to_public()
コード例 #19
0
 def on_execute_command(self, tenant, args, options):
     style = color.color_style()
     stdout = OutputWrapper(sys.stdout)
     stdout.write(style.MIGRATE_HEADING("=".ljust(70, "=")))
     stdout.write(
         style.MIGRATE_HEADING("=== Starting collectstatic: {0} ".format(
             tenant.schema_name).ljust(70, "=")))
     stdout.write(style.MIGRATE_HEADING("=".ljust(70, "=")))
     options["interactive"] = False
     super(Command, self).on_execute_command(tenant, args, options)
コード例 #20
0
ファイル: reporting_orgs.py プロジェクト: punchagan/akvo-rsr
class ReportingOrgMaker(object):

    NO_CANDIDATE = 'no candidate'
    MULTIPLE_CANDIDATES = 'multiple candidates'
    SYNC_NOT_SUPPORT = 'sync not support'

    def __init__(self, options):
        self.stdout = OutputWrapper(options.get('stdout', sys.stdout))
        self.keyword_based_reporting_orgs = {
            'WASH Alliance': 8,
            'Connect4Change': 34,
            'SRHR Alliance': 1043,
            'WvW': 43,
            'wvw2014': 43,
            'wvw2015': 43,
            'wvw2016': 43,
            'WfW': 43,
            'wfw2014': 43,
            'wfw2015': 43,
            'wfw2016': 43,
        }
        self.keywords_set = set(self.keyword_based_reporting_orgs.keys())
        self.migrate = options['migrate']
        self.ok_list = []
        self.fix_list = []

    def add_to_ok(self, project, org):
        self.ok_list += [OKProject(project, org)]

    def add_to_fix(self, project, reason, partners, sync_owner=None):
        self.fix_list += [FixProject(project, reason, partners, sync_owner)]

    def find_reporting_org_for_projects(self):
        # loop over all projects, trying ot figure reporting-org for each.
        self.stdout.write('\nData gathering progress:')
        i = 1
        for project in Project.objects.published().prefetch_related(
                'partnerships', 'partnerships__organisation', 'keywords'):
            if not i % 100:
                self.stdout.write(str(i))
            else:
                self.stdout.write(".", ending='')
            i += 1
            self.stdout.flush()

            # first check if we have a keyword from the keyword_based_reporting_orgs.keys() list
            reporting_keyword = self.keywords_set.intersection(
                set(project.keywords.values_list('label', flat=True)))
            if reporting_keyword:
                # if we do, set the reporting-org to the org connected to the keyword
                self.add_to_ok(project, Organisation.objects.get(
                    pk=self.keyword_based_reporting_orgs[list(reporting_keyword)[0]]))
            else:
                # otherwise try to find the reporting org among sync_owner and accountable partners
                support_partners = project.partnerships.filter(
                    iati_organisation_role=Partnership.IATI_ACCOUNTABLE_PARTNER
                ).select_related('organisation')

                # If there's no support partner, we set the sync_owner as reporting-org,
                # if there is one. Otherwise we report the problem.
                if support_partners.count() == 0:
                    if project.sync_owner:
                        self.add_to_ok(project, project.sync_owner)
                    else:
                        self.add_to_fix(project, self.NO_CANDIDATE, [])

                # If we have exactly one support partner, then things are in order if either:
                # 1) the sync_owner matches the support partner
                #   2) there is no sync_owner
                # In both cases we should be fine to set the sync_owner/support partner as the
                # reporting-org.
                elif support_partners.count() == 1:
                    if project.sync_owner:
                        # 1)
                        if project.sync_owner == support_partners[0].organisation:
                            self.add_to_ok(project, project.sync_owner)
                        else:
                            self.add_to_fix(project, self.SYNC_NOT_SUPPORT, support_partners,
                                            project.sync_owner)
                    # 2)
                    else:
                        self.add_to_ok(project, support_partners[0].organisation)

                # If there are multiple support partners we check if one of the partners is sync_owner
                # we set that organisation to reporting. Otherwise we report the problem.
                else:
                    if project.sync_owner:
                        if project.sync_owner.id in [p.organisation.id for p in support_partners]:
                            self.add_to_ok(project, project.sync_owner)
                        else:
                            self.add_to_fix(project, self.MULTIPLE_CANDIDATES, support_partners)

    def create_reporting_orgs(self):
        try:
            reporting_org_choice = Partnership.IATI_REPORTING_ORGANISATION
            self.stdout.write(
                u"\n*** Assigning reporting-org partners to the following projects ***"
            )
            self.stdout.write(
                u"project ID, project title, organisation id, organisation name"
            )
            for data in self.ok_list:
                partner = Partnership(
                    organisation=data.organisation,
                    iati_organisation_role=reporting_org_choice)
                data.project.partnerships.add(partner)
                self.print_ok_data(data)
        except:
            self.stdout.write(
                u"\n*** Reporting organisation choice not available for Partnerships ***"
            )

    def print_ok_data(self, data):
        self.stdout.write(
            u'{},"{}",{},"{}"'.format(data.project.id, data.project.title, data.organisation.id,
                                      data.organisation.name))

    def print_fix_data(self, data, partner):
        self.stdout.write(
            u'{},"{}",{},"{}","{}",{},"{}"'.format(
                data.project.id,
                data.project.title,
                partner.organisation.id,
                partner.organisation.name,
                data.reason,
                data.sync_owner.id if data.sync_owner else '',
                data.sync_owner.name if data.sync_owner else ''))

    def output_ok_list(self):
        self.stdout.write(
            u"\n*** List of projects and the <reporting-org> partner they will get when migrating ***"
        )
        self.stdout.write(
            u"project ID, project title, organisation id, organisation name"
        )
        for data in self.ok_list:
            self.print_ok_data(data)

    def output_fix_list(self):
        self.stdout.write(
            u"\n*** List of projects where no clear-cut reporting-org candidate was found ***"
        )
        self.stdout.write(
            u"project ID, project title, support partner id, support partner name, type of problem, sync_owner id, sync_owner name"
        )
        for data in self.fix_list:
            for partner in data.partners:
                self.print_fix_data(data, partner)
コード例 #21
0
ファイル: reporting_orgs.py プロジェクト: nagyist/akvo-rsr
class ReportingOrgMaker(object):

    NO_CANDIDATE = 'no candidate'
    MULTIPLE_CANDIDATES = 'multiple candidates'
    SYNC_NOT_SUPPORT = 'sync not support'

    def __init__(self, options):
        self.stdout = OutputWrapper(options.get('stdout', sys.stdout))
        self.keyword_based_reporting_orgs = {
            'WASH Alliance': 8,
            'Connect4Change': 34,
            'SRHR Alliance': 1043,
            'WvW': 43,
            'wvw2014': 43,
            'wvw2015': 43,
            'wvw2016': 43,
            'WfW': 43,
            'wfw2014': 43,
            'wfw2015': 43,
            'wfw2016': 43,
        }
        self.keywords_set = set(self.keyword_based_reporting_orgs.keys())
        self.migrate = options['migrate']
        self.ok_list = []
        self.fix_list = []

    def add_to_ok(self, project, org):
        self.ok_list += [OKProject(project, org)]

    def add_to_fix(self, project, reason, partners, sync_owner=None):
        self.fix_list += [FixProject(project, reason, partners, sync_owner)]

    def find_reporting_org_for_projects(self):
        # loop over all projects, trying ot figure reporting-org for each.
        self.stdout.write('\nData gathering progress:')
        i = 1
        for project in Project.objects.published().prefetch_related(
                'partnerships', 'partnerships__organisation', 'keywords'):
            if not i % 100:
                self.stdout.write(str(i))
            else:
                self.stdout.write(".", ending='')
            i += 1
            self.stdout.flush()

            # first check if we have a keyword from the keyword_based_reporting_orgs.keys() list
            reporting_keyword = self.keywords_set.intersection(
                set(project.keywords.values_list('label', flat=True)))
            if reporting_keyword:
                # if we do, set the reporting-org to the org connected to the keyword
                self.add_to_ok(
                    project,
                    Organisation.objects.get(
                        pk=self.keyword_based_reporting_orgs[list(
                            reporting_keyword)[0]]))
            else:
                # otherwise try to find the reporting org among sync_owner and accountable partners
                support_partners = project.partnerships.filter(
                    iati_organisation_role=Partnership.IATI_ACCOUNTABLE_PARTNER
                ).select_related('organisation')

                # If there's no support partner, we set the sync_owner as reporting-org,
                # if there is one. Otherwise we report the problem.
                if support_partners.count() == 0:
                    if project.sync_owner:
                        self.add_to_ok(project, project.sync_owner)
                    else:
                        self.add_to_fix(project, self.NO_CANDIDATE, [])

                # If we have exactly one support partner, then things are in order if either:
                # 1) the sync_owner matches the support partner
                #   2) there is no sync_owner
                # In both cases we should be fine to set the sync_owner/support partner as the
                # reporting-org.
                elif support_partners.count() == 1:
                    if project.sync_owner:
                        # 1)
                        if project.sync_owner == support_partners[
                                0].organisation:
                            self.add_to_ok(project, project.sync_owner)
                        else:
                            self.add_to_fix(project, self.SYNC_NOT_SUPPORT,
                                            support_partners,
                                            project.sync_owner)
                    # 2)
                    else:
                        self.add_to_ok(project,
                                       support_partners[0].organisation)

                # If there are multiple support partners we check if one of the partners is sync_owner
                # we set that organisation to reporting. Otherwise we report the problem.
                else:
                    if project.sync_owner:
                        if project.sync_owner.id in [
                                p.organisation.id for p in support_partners
                        ]:
                            self.add_to_ok(project, project.sync_owner)
                        else:
                            self.add_to_fix(project, self.MULTIPLE_CANDIDATES,
                                            support_partners)

    def create_reporting_orgs(self):
        try:
            reporting_org_choice = Partnership.IATI_REPORTING_ORGANISATION
            self.stdout.write(
                u"\n*** Assigning reporting-org partners to the following projects ***"
            )
            self.stdout.write(
                u"project ID, project title, organisation id, organisation name"
            )
            for data in self.ok_list:
                partner = Partnership(
                    organisation=data.organisation,
                    iati_organisation_role=reporting_org_choice)
                data.project.partnerships.add(partner)
                self.print_ok_data(data)
        except:
            self.stdout.write(
                u"\n*** Reporting organisation choice not available for Partnerships ***"
            )

    def print_ok_data(self, data):
        self.stdout.write(u'{},"{}",{},"{}"'.format(data.project.id,
                                                    data.project.title,
                                                    data.organisation.id,
                                                    data.organisation.name))

    def print_fix_data(self, data, partner):
        self.stdout.write(u'{},"{}",{},"{}","{}",{},"{}"'.format(
            data.project.id, data.project.title, partner.organisation.id,
            partner.organisation.name, data.reason,
            data.sync_owner.id if data.sync_owner else '',
            data.sync_owner.name if data.sync_owner else ''))

    def output_ok_list(self):
        self.stdout.write(
            u"\n*** List of projects and the <reporting-org> partner they will get when migrating ***"
        )
        self.stdout.write(
            u"project ID, project title, organisation id, organisation name")
        for data in self.ok_list:
            self.print_ok_data(data)

    def output_fix_list(self):
        self.stdout.write(
            u"\n*** List of projects where no clear-cut reporting-org candidate was found ***"
        )
        self.stdout.write(
            u"project ID, project title, support partner id, support partner name, type of problem, sync_owner id, sync_owner name"
        )
        for data in self.fix_list:
            for partner in data.partners:
                self.print_fix_data(data, partner)
コード例 #22
0
ファイル: config.py プロジェクト: d9pouces/django-floor
class Command(BaseCommand):
    help = (
        "show the current configuration."
        'Can display as python file ("config python") or as .ini file ("config ini"). Use -v 2 to display more info.'
    )
    requires_system_checks = False
    options = {
        "python": "display the current config as Python module",
        "ini": "display the current config as .ini file",
        "heroku": "display a configuration valid to deploy on Heroku",
        "apache": "display an example of Apache config",
        "nginx": "display an example of Nginx config",
        "systemd": "display an example of systemd config",
        "supervisor": "display an example of Supervisor config",
        "social_authentications": "display configured social authentications",
    }
    if settings.USE_CELERY:
        options["signals"] = "show the defined signals and remote functions"

    def add_arguments(self, parser):
        assert isinstance(parser, ArgumentParser)
        parser.add_argument(
            "action",
            default="show",
            choices=self.options,
            help=",\n".join(['"%s": %s' % x for x in self.options.items()]),
        )
        parser.add_argument(
            "--filename", default=None, help="write output to this file"
        )
        remove_arguments_from_help(
            parser, {"--settings", "--traceback", "--pythonpath"}
        )

    def handle(self, *args, **options):
        try:
            self.handle_head(**options)
        except BrokenPipeError:
            pass

    def handle_head(self, **options):
        action = options["action"]
        verbosity = options["verbosity"]
        filename = options["filename"]
        if filename:
            self.stdout = OutputWrapper(open(filename, "w"))
            self.style = no_style()

        if action == "python":
            self.show_python_config(verbosity)
        elif action == "ini":
            self.show_ini_config(verbosity)
        elif action == "signals":
            self.show_signals_config()
        elif action == "heroku":
            self.show_heroku_config()
        elif action == "apache":
            self.show_external_config("djangofloor/config/apache.conf")
        elif action == "nginx":
            self.show_external_config("djangofloor/config/nginx.conf")
        elif action == "systemd":
            self.show_external_config("djangofloor/config/systemd.conf")
        elif action == "supervisor":
            self.show_external_config("djangofloor/config/supervisor.conf")
        elif action == "social_authentications":
            self.show_social_auth_config()

    def show_external_config(self, config):
        content = render_to_string(config, merger.settings)
        self.stdout.write(content)

    def show_signals_config(self):
        import_signals_and_functions()

        def display_callable(conn):
            fn = conn.function
            if getattr(fn, "__module__", None) and getattr(fn, "__name__", None):
                path = "%s.%s" % (fn.__module__, fn.__name__)
            elif getattr(fn, "__name__", None):
                path = fn.__name__
            else:
                path = str(fn)
            return path

        self.stdout.write(self.style.ERROR("Signals"))
        data = list(decorators.REGISTERED_SIGNALS.items())
        for name, connections in sorted(data, key=lambda x: x[0]):

            self.stdout.write(self.style.WARNING('    "%s"' % name))
            for connection in connections:
                self.stdout.write(
                    self.style.NOTICE("      -> %s" % display_callable(connection))
                )
        self.stdout.write(self.style.ERROR("Functions"))
        data = list(decorators.REGISTERED_FUNCTIONS.items())
        for name, connection in sorted(data, key=lambda x: x[0]):
            self.stdout.write(
                self.style.WARNING(
                    '    "%s" -> %s' % (name, display_callable(connection))
                )
            )

    def show_ini_config(self, verbosity):
        if verbosity >= 2:
            self.stdout.write(self.style.SUCCESS("# read configuration files:"))
        for provider in merger.providers:
            if not isinstance(provider, IniConfigProvider):
                continue
            elif provider.is_valid():
                self.stdout.write(
                    self.style.SUCCESS('    #  - %s "%s"' % (provider.name, provider))
                )
            elif verbosity >= 2:
                self.stdout.write(
                    self.style.ERROR(
                        '    #  - %s "%s" (not found)' % (provider.name, provider)
                    )
                )
        provider = IniConfigProvider()
        merger.write_provider(provider, include_doc=verbosity >= 2)
        self.stdout.write(provider.to_str())

    def show_python_config(self, verbosity):
        self.stdout.write(self.style.SUCCESS("# " + "-" * 80))
        self.stdout.write(
            self.style.SUCCESS(
                _("# Djangofloor version %(version)s") % {"version": version}
            )
        )
        self.stdout.write(
            self.style.SUCCESS(
                _("# %(project)s version %(version)s")
                % {
                    "version": guess_version(merger.settings),
                    "project": merger.settings["DF_PROJECT_NAME"],
                }
            )
        )
        self.stdout.write(self.style.SUCCESS("# Configuration providers:"))
        for provider in merger.providers:
            if provider.is_valid():
                self.stdout.write(
                    self.style.SUCCESS('#  - %s "%s"' % (provider.name, provider))
                )
            elif verbosity > 1:
                self.stdout.write(
                    self.style.ERROR(
                        '#  - %s "%s" (not found)' % (provider.name, provider)
                    )
                )
        self.stdout.write(self.style.SUCCESS("# " + "-" * 80))
        setting_names = list(merger.raw_settings)
        setting_names.sort()

        # first, compute all imports to do
        imports = {}

        def add_import(val):
            if not isinstance(val, type):
                val = val.__class__
            if val.__module__ != "builtins":
                imports.setdefault(val.__module__, set()).add(val.__name__)

        for setting_name in setting_names:
            if setting_name not in merger.settings:
                continue
            value = merger.settings[setting_name]
            add_import(value)
        if imports:
            self.stdout.write("\n")
            for module_name in sorted(imports):
                objects = ", ".join(sorted(imports[module_name]))
                self.stdout.write(
                    self.style.WARNING("from %s import %s" % (module_name, objects))
                )
            self.stdout.write("\n")

        for setting_name in setting_names:
            if setting_name not in merger.settings:
                continue
            value = merger.settings[setting_name]
            self.stdout.write(self.style.SUCCESS("%s = %r" % (setting_name, value)))
            if verbosity <= 1:
                continue
            for provider_name, raw_value in merger.raw_settings[setting_name].items():
                self.stdout.write(
                    self.style.WARNING(
                        "    #   %s -> %r" % (provider_name or "built-in", raw_value)
                    )
                )

    def show_heroku_config(self):
        # Pipfile
        # add extra packages (due to the config) to the Pipfile
        # requirements.txt
        # heroku addons:create heroku-postgresql:dev
        queues = get_expected_queues()
        self.stdout.write("web: %s-aiohttp" % settings.DF_MODULE_NAME)
        for queue in queues:
            self.stdout.write(
                "%s: %s-%s worker -Q %s"
                % (queue, settings.DF_MODULE_NAME, "celery", queue)
            )

    def show_social_auth_config(self):
        from djangofloor.management.commands.social_authentications import (
            Command as SACommand,
        )

        # noinspection PyCallByClass
        SACommand.show_config(self)
コード例 #23
0
ファイル: config.py プロジェクト: parcox/django-floor
class Command(BaseCommand):
    help = (
        "show the current configuration."
        'Can display as python file ("config python") or as .ini file ("config ini"). Use -v 2 to display more info.'
    )
    requires_system_checks = False
    options = {
        "python": "display the current config as Python module",
        "ini": "display the current config as .ini file",
        "heroku": "display a configuration valid to deploy on Heroku",
        "apache": "display an example of Apache config",
        "nginx": "display an example of Nginx config",
        "systemd": "display an example of systemd config",
        "supervisor": "display an example of Supervisor config",
        "social_authentications": "display configured social authentications",
    }
    if settings.USE_CELERY:
        options["signals"] = "show the defined signals and remote functions"

    def add_arguments(self, parser):
        assert isinstance(parser, ArgumentParser)
        parser.add_argument(
            "action",
            default="show",
            choices=self.options,
            help=",\n".join(['"%s": %s' % x for x in self.options.items()]),
        )
        parser.add_argument("--filename",
                            default=None,
                            help="write output to this file")
        remove_arguments_from_help(
            parser, {"--settings", "--traceback", "--pythonpath"})

    def handle(self, *args, **options):
        try:
            self.handle_head(**options)
        except BrokenPipeError:
            pass

    def handle_head(self, **options):
        action = options["action"]
        verbosity = options["verbosity"]
        filename = options["filename"]
        if filename:
            self.stdout = OutputWrapper(open(filename, "w"))
            self.style = no_style()

        if action == "python":
            self.show_python_config(verbosity)
        elif action == "ini":
            self.show_ini_config(verbosity)
        elif action == "signals":
            self.show_signals_config()
        elif action == "heroku":
            self.show_heroku_config()
        elif action == "apache":
            self.show_external_config("djangofloor/config/apache.conf")
        elif action == "nginx":
            self.show_external_config("djangofloor/config/nginx.conf")
        elif action == "systemd":
            self.show_external_config("djangofloor/config/systemd.conf")
        elif action == "supervisor":
            self.show_external_config("djangofloor/config/supervisor.conf")
        elif action == "social_authentications":
            self.show_social_auth_config()

    def show_external_config(self, config):
        content = render_to_string(config, merger.settings)
        self.stdout.write(content)

    def show_signals_config(self):
        import_signals_and_functions()

        def display_callable(conn):
            fn = conn.function
            if getattr(fn, "__module__", None) and getattr(
                    fn, "__name__", None):
                path = "%s.%s" % (fn.__module__, fn.__name__)
            elif getattr(fn, "__name__", None):
                path = fn.__name__
            else:
                path = str(fn)
            return path

        self.stdout.write(self.style.ERROR("Signals"))
        data = list(decorators.REGISTERED_SIGNALS.items())
        for name, connections in sorted(data, key=lambda x: x[0]):

            self.stdout.write(self.style.WARNING('    "%s"' % name))
            for connection in connections:
                self.stdout.write(
                    self.style.NOTICE("      -> %s" %
                                      display_callable(connection)))
        self.stdout.write(self.style.ERROR("Functions"))
        data = list(decorators.REGISTERED_FUNCTIONS.items())
        for name, connection in sorted(data, key=lambda x: x[0]):
            self.stdout.write(
                self.style.WARNING('    "%s" -> %s' %
                                   (name, display_callable(connection))))

    def show_ini_config(self, verbosity):
        if verbosity >= 2:
            self.stdout.write(
                self.style.SUCCESS("# read configuration files:"))
        for provider in merger.providers:
            if not isinstance(provider, IniConfigProvider):
                continue
            elif provider.is_valid():
                self.stdout.write(
                    self.style.SUCCESS('    #  - %s "%s"' %
                                       (provider.name, provider)))
            elif verbosity >= 2:
                self.stdout.write(
                    self.style.ERROR('    #  - %s "%s" (not found)' %
                                     (provider.name, provider)))
        provider = IniConfigProvider()
        merger.write_provider(provider, include_doc=verbosity >= 2)
        self.stdout.write(provider.to_str())

    def show_python_config(self, verbosity):
        self.stdout.write(self.style.SUCCESS("# " + "-" * 80))
        self.stdout.write(
            self.style.SUCCESS(
                _("# Djangofloor version %(version)s") % {"version": version}))
        self.stdout.write(
            self.style.SUCCESS(
                _("# %(project)s version %(version)s") % {
                    "version": guess_version(merger.settings),
                    "project": merger.settings["DF_PROJECT_NAME"],
                }))
        self.stdout.write(self.style.SUCCESS("# Configuration providers:"))
        for provider in merger.providers:
            if provider.is_valid():
                self.stdout.write(
                    self.style.SUCCESS('#  - %s "%s"' %
                                       (provider.name, provider)))
            elif verbosity > 1:
                self.stdout.write(
                    self.style.ERROR('#  - %s "%s" (not found)' %
                                     (provider.name, provider)))
        self.stdout.write(self.style.SUCCESS("# " + "-" * 80))
        setting_names = list(merger.raw_settings)
        setting_names.sort()

        # first, compute all imports to do
        imports = {}

        def add_import(val):
            if not isinstance(val, type):
                val = val.__class__
            if val.__module__ != "builtins":
                imports.setdefault(val.__module__, set()).add(val.__name__)

        for setting_name in setting_names:
            if setting_name not in merger.settings:
                continue
            value = merger.settings[setting_name]
            add_import(value)
        if imports:
            self.stdout.write("\n")
            for module_name in sorted(imports):
                objects = ", ".join(sorted(imports[module_name]))
                self.stdout.write(
                    self.style.WARNING("from %s import %s" %
                                       (module_name, objects)))
            self.stdout.write("\n")

        for setting_name in setting_names:
            if setting_name not in merger.settings:
                continue
            value = merger.settings[setting_name]
            self.stdout.write(
                self.style.SUCCESS("%s = %r" % (setting_name, value)))
            if verbosity <= 1:
                continue
            for provider_name, raw_value in merger.raw_settings[
                    setting_name].items():
                self.stdout.write(
                    self.style.WARNING(
                        "    #   %s -> %r" %
                        (provider_name or "built-in", raw_value)))

    def show_heroku_config(self):
        # Pipfile
        # add extra packages (due to the config) to the Pipfile
        # requirements.txt
        # heroku addons:create heroku-postgresql:dev
        queues = get_expected_queues()
        self.stdout.write("web: %s-aiohttp" % settings.DF_MODULE_NAME)
        for queue in queues:
            self.stdout.write(
                "%s: %s-%s worker -Q %s" %
                (queue, settings.DF_MODULE_NAME, "celery", queue))

    def show_social_auth_config(self):
        from djangofloor.management.commands.social_authentications import (
            Command as SACommand, )

        # noinspection PyCallByClass
        SACommand.show_config(self)
コード例 #24
0
ファイル: config.py プロジェクト: d9pouces/df_config
class Command(BaseCommand):
    help = (
        "show the current configuration."
        'Can display as python file ("config python") or as .ini file ("config ini"). Use -v 2 to display more info.'
    )
    requires_system_checks = False
    options = {
        "python": "display the current config as Python module",
        "ini": "display the current config as .ini file",
        "env": "display the current config as environment variables",
    }

    def add_arguments(self, parser: ArgumentParser):
        parser.add_argument(
            "action",
            default="show",
            choices=self.options,
            help=",\n".join(['"%s": %s' % x for x in self.options.items()]),
        )
        parser.add_argument("--filename",
                            default=None,
                            help="write output to this file")
        remove_arguments_from_help(
            parser, {"--settings", "--traceback", "--pythonpath"})

    def handle(self, *args, **options):
        try:
            self.handle_head(**options)
        except BrokenPipeError:
            pass

    def handle_head(self, **options):
        action = options["action"]
        verbosity = options["verbosity"]
        filename = options["filename"]
        fd = None
        if filename:
            fd = io.StringIO()
            self.stdout = OutputWrapper(fd)
            self.style = no_style()

        if action == "python":
            self.show_python_config(verbosity)
        elif action == "ini":
            self.show_ini_config(verbosity)
        elif action == "env":
            self.show_env_config(verbosity)

        if filename and action in {"python", "env"}:
            content = fd.getvalue()
            # noinspection PyBroadException
            if action == "python":
                try:
                    # noinspection PyPackageRequirements,PyUnresolvedReferences
                    import black

                    mode = black.FileMode()
                    # noinspection PyArgumentList
                    content = black.format_file_contents(content,
                                                         fast=False,
                                                         mode=mode)
                except Exception:
                    pass
            with open(filename, "w") as dst_fd:
                dst_fd.write(content)

    def show_external_config(self, config):
        content = render_to_string(config, merger.settings)
        self.stdout.write(content)

    def show_ini_config(self, verbosity):
        if verbosity >= 2:
            self.stdout.write(
                self.style.SUCCESS("# read configuration files:"))
        for provider in merger.providers:
            if not isinstance(provider, IniConfigProvider):
                continue
            elif provider.is_valid():
                self.stdout.write(
                    self.style.SUCCESS('    #  - %s "%s"' %
                                       (provider.name, provider)))
            elif verbosity >= 2:
                self.stdout.write(
                    self.style.ERROR('    #  - %s "%s" (not found)' %
                                     (provider.name, provider)))
        provider = IniConfigProvider()
        merger.write_provider(provider, include_doc=verbosity >= 2)
        self.stdout.write(provider.to_str())

    def show_env_config(self, verbosity):
        prefix = None
        for provider in merger.providers:
            if not isinstance(provider, EnvironmentConfigProvider):
                continue
            prefix = provider.prefix
        if not prefix:
            self.stderr.write("Environment variables are not used•")
            return
        if verbosity >= 2:
            self.stdout.write(
                self.style.SUCCESS("# read environment variables:"))
        provider = EnvironmentConfigProvider(prefix)
        merger.write_provider(provider, include_doc=verbosity >= 2)
        self.stdout.write(provider.to_str())

    def show_python_config(self, verbosity):
        self.stdout.write(self.style.SUCCESS("# " + "-" * 80))
        self.stdout.write(
            self.style.SUCCESS(
                _("# df_config version %(version)s") % {"version": version}))
        self.stdout.write(
            self.style.SUCCESS(
                _("# %(project)s version %(version)s") % {
                    "version": guess_version(merger.settings),
                    "project": merger.settings["DF_PROJECT_NAME"],
                }))
        self.stdout.write(self.style.SUCCESS("# Configuration providers:"))
        for provider in merger.providers:
            if provider.is_valid():
                self.stdout.write(
                    self.style.SUCCESS('#  - %s "%s"' %
                                       (provider.name, provider)))
            elif verbosity > 1:
                self.stdout.write(
                    self.style.ERROR('#  - %s "%s" (not found)' %
                                     (provider.name, provider)))
        self.stdout.write(self.style.SUCCESS("# " + "-" * 80))
        setting_names = list(merger.raw_settings)
        setting_names.sort()

        # first, compute all imports to do
        imports = {}

        def add_import(val):
            if not isinstance(val, type):
                val = val.__class__
            if val.__module__ != "builtins":
                imports.setdefault(val.__module__, set()).add(val.__name__)

        for setting_name in setting_names:
            if setting_name not in merger.settings:
                continue
            value = merger.settings[setting_name]
            add_import(value)
        if imports:
            self.stdout.write("\n")
            for module_name in sorted(imports):
                objects = ", ".join(sorted(imports[module_name]))
                self.stdout.write(
                    self.style.WARNING("from %s import %s" %
                                       (module_name, objects)))
            self.stdout.write("\n")

        for setting_name in setting_names:
            if setting_name not in merger.settings:
                continue
            value = merger.settings[setting_name]
            self.stdout.write(
                self.style.SUCCESS("%s = %r" % (setting_name, value)))
            if verbosity <= 1:
                continue
            for provider_name, raw_value in merger.raw_settings[
                    setting_name].items():
                self.stdout.write(
                    self.style.WARNING(
                        "    #   %s -> %r" %
                        (provider_name or "built-in", raw_value)))
コード例 #25
0
ファイル: loggingmixin.py プロジェクト: 2ps/djenga
class LoggingMixin:
    verbosity = 3 if settings.DEBUG else 1
    """@type: int"""
    indent = 0
    """@type: int"""
    logging_level = logging.DEBUG if settings.DEBUG else 1
    log_map = dict()
    logging_initialized = False
    print_level = True

    def set_verbosity(self, verbosity):
        LEVELS = {
            0: logging.CRITICAL,
            1: logging.ERROR,
            2: logging.WARNING,
            3: logging.DEBUG,
        }
        self.verbosity = verbosity
        self.logging_level = LEVELS[verbosity]

    def initialize_logging(self):
        if not self.logging_initialized:
            try:
                self.stdout = OutputWrapper(self.stdout._out, ending='')
            except AttributeError:
                self.stdout = OutputWrapper(sys.stdout, ending='')
            # self.stdout = codecs.getwriter('utf8')(self.stdout)
            self.logging_initialized = True

    def color_format(self, level, message):
        level_colors = {
            # Level and a pair of colors: first for the label,
            # the rest for the text;
            #   the bolder color label can make them easier to spot
            #   in the console log.
            logging.DEBUG: (33, 39),
            # logging.TRACE:        (147, 153),
            logging.INFO: (43, 49),
            logging.WARNING: (214, 226),
            logging.ERROR: (196, 197),
            logging.CRITICAL: (196, 197),
        }.get(level, (33, 39))
        # 256-color to give wider spectrum than just ANSI
        color = "\033[38;5;{:d}m"
        reset = "\033[0m"

        # Pass any simple messages from internal things, like Django's
        # runserver, without special formatting.
        mp_levels = {
            logging.INFO: u'INF',
            logging.WARNING: u'WRN',
            logging.ERROR: u'ERR',
            logging.DEBUG: u'DBG',
            logging.CRITICAL: u'CRT'
        }
        st_level = mp_levels[level]
        level_prefix = '%s[%s] ' % (color.format(level_colors[0]), st_level)
        return u'{level_prefix}{color_normal}{message}{reset}'.format(
            level_prefix=level_prefix if self.print_level else '',
            message=message,
            color_normal=color.format(level_colors[1]),
            reset=reset
        )

    def llog(self, logging_level, format_string, *args):
        """
        @param logging_level:
            50 = summary/critical
            40 = error
            30 = warning
            20 = info
            10 = debug
        @return:
        """
        LEVELS = {
            logging.CRITICAL,
            logging.ERROR,
            logging.WARNING,
            logging.INFO,
            logging.DEBUG
        }
        if logging_level not in LEVELS:
            logging_level = logging.DEBUG
        message = format_string % args
        if logging_level >= self.logging_level:
            if hasattr(self, 'stdout'):
                self.initialize_logging()
                self.stdout.write(u' ' * self.indent)
                if self.stdout.isatty():
                    self.stdout.write(self.color_format(
                        logging_level, message))
                else:
                    self.stdout.write(message)
                self.stdout.write('\n')
                self.log_map.setdefault(logging_level, []).append(message)

    def log(self, format_string, *args):
        message = format_string % args
        if hasattr(self, 'stdout'):
            self.initialize_logging()
            self.stdout.write(u' ' * self.indent)
            self.stdout.write(message)
            self.stdout.write('\n')

    def critical(self, format_string, *args):
        self.llog(logging.CRITICAL, format_string, *args)

    def debug(self, format_string, *args):
        self.llog(logging.DEBUG, format_string, *args)

    def info(self, format_string, *args):
        self.llog(logging.INFO, format_string, *args)

    def warning(self, format_string, *args):
        self.llog(logging.WARNING, format_string, *args)

    def error(self, format_string, *args):
        self.llog(logging.ERROR, format_string, *args)

    def exception(self, format_string, *args):
        p_type, p_exception, _ = sys.exc_info()
        self.llog(logging.ERROR, format_string, *args)
        self.llog(logging.ERROR, u'Exception message: %s', p_exception)
        self.llog(logging.ERROR, u'Exception type   : %s', p_type)
        self.llog(logging.ERROR, u'Traceback\n%s', format_exc())
コード例 #26
0
class BaseCommand:
    """
    The base class from which all management commands ultimately
    derive.

    Use this class if you want access to all of the mechanisms which
    parse the command-line arguments and work out what code to call in
    response; if you don't need to change any of that behavior,
    consider using one of the subclasses defined in this file.

    If you are interested in overriding/customizing various aspects of
    the command-parsing and -execution behavior, the normal flow works
    as follows:

    1. ``django-admin`` or ``manage.py`` loads the command class
       and calls its ``run_from_argv()`` method.

    2. The ``run_from_argv()`` method calls ``create_parser()`` to get
       an ``ArgumentParser`` for the arguments, parses them, performs
       any environment changes requested by options like
       ``pythonpath``, and then calls the ``execute()`` method,
       passing the parsed arguments.

    3. The ``execute()`` method attempts to carry out the command by
       calling the ``handle()`` method with the parsed arguments; any
       output produced by ``handle()`` will be printed to standard
       output and, if the command is intended to produce a block of
       SQL statements, will be wrapped in ``BEGIN`` and ``COMMIT``.

    4. If ``handle()`` or ``execute()`` raised any exception (e.g.
       ``CommandError``), ``run_from_argv()`` will  instead print an error
       message to ``stderr``.

    Thus, the ``handle()`` method is typically the starting point for
    subclasses; many built-in commands and command types either place
    all of their logic in ``handle()``, or perform some additional
    parsing work in ``handle()`` and then delegate from it to more
    specialized methods as needed.

    Several attributes affect behavior at various steps along the way:

    ``help``
        A short description of the command, which will be printed in
        help messages.

    ``output_transaction``
        A boolean indicating whether the command outputs SQL
        statements; if ``True``, the output will automatically be
        wrapped with ``BEGIN;`` and ``COMMIT;``. Default value is
        ``False``.

    ``requires_migrations_checks``
        A boolean; if ``True``, the command prints a warning if the set of
        migrations on disk don't match the migrations in the database.

    ``requires_system_checks``
        A boolean; if ``True``, entire Django project will be checked for errors
        prior to executing the command. Default value is ``True``.
        To validate an individual application's models
        rather than all applications' models, call
        ``self.check(app_configs)`` from ``handle()``, where ``app_configs``
        is the list of application's configuration provided by the
        app registry.

    ``stealth_options``
        A tuple of any options the command uses which aren't defined by the
        argument parser.
    """
    # Metadata about this command.
    help = ''

    # Configuration shortcuts that alter various logic.
    _called_from_command_line = False
    output_transaction = False  # Whether to wrap the output in a "BEGIN; COMMIT;"
    requires_migrations_checks = False
    requires_system_checks = True
    # Arguments, common to all commands, which aren't defined by the argument
    # parser.
    base_stealth_options = ('skip_checks', 'stderr', 'stdout')
    # Command-specific options not defined by the argument parser.
    stealth_options = ()

    def __init__(self, stdout=None, stderr=None, no_color=False):
        self.stdout = OutputWrapper(stdout or sys.stdout)
        self.stderr = OutputWrapper(stderr or sys.stderr)
        if no_color:
            self.style = no_style()
        else:
            self.style = color_style()
            self.stderr.style_func = self.style.ERROR

    def get_version(self):
        """
        Return the Django version, which should be correct for all built-in
        Django commands. User-supplied commands can override this method to
        return their own version.
        """
        return django.get_version()

    def create_parser(self, prog_name, subcommand):
        """
        Create and return the ``ArgumentParser`` which will be used to
        parse the arguments to this command.
        """
        parser = CommandParser(
            prog='%s %s' % (os.path.basename(prog_name), subcommand),
            description=self.help or None,
            formatter_class=DjangoHelpFormatter,
            missing_args_message=getattr(self, 'missing_args_message', None),
            called_from_command_line=getattr(self, '_called_from_command_line',
                                             None),
        )
        # parser.add_argument('--version', action='version', version=self.get_version())
        # parser.add_argument(
        #     '-v', '--verbosity', action='store', dest='verbosity', default=1,
        #     type=int, choices=[0, 1, 2, 3],
        #     help='Verbosity level; 0=minimal output, 1=normal output, 2=verbose output, 3=very verbose output',
        # )
        parser.add_argument(
            '--settings',
            help=('The Python path to a settings module, e.g. '
                  '"myproject.settings.main". If this isn\'t provided, the '
                  'DJANGO_SETTINGS_MODULE environment variable will be used.'),
        )
        parser.add_argument(
            '--pythonpath',
            help=
            'A directory to add to the Python path, e.g. "/home/djangoprojects/myproject".',
        )
        parser.add_argument('--traceback',
                            action='store_true',
                            help='Raise on CommandError exceptions')
        parser.add_argument(
            '--no-color',
            action='store_true',
            dest='no_color',
            help="Don't colorize the command output.",
        )
        self.add_arguments(parser)
        return parser

    def add_arguments(self, parser):
        """
        Entry point for subclassed commands to add custom arguments.
        """
        pass

    def print_help(self, prog_name, subcommand):
        """
        Print the help message for this command, derived from
        ``self.usage()``.
        """
        parser = self.create_parser(prog_name, subcommand)
        parser.print_help()

    def run_from_argv(self, argv):
        """
        Set up any environment changes requested (e.g., Python path
        and Django settings), then run this command. If the
        command raises a ``CommandError``, intercept it and print it sensibly
        to stderr. If the ``--traceback`` option is present or the raised
        ``Exception`` is not ``CommandError``, raise it.
        """
        self._called_from_command_line = True
        parser = self.create_parser(argv[0], argv[1])

        options = parser.parse_args(argv[2:])
        cmd_options = vars(options)
        # Move positional args out of options to mimic legacy optparse
        args = cmd_options.pop('args', ())
        handle_default_options(options)
        try:
            self.execute(*args, **cmd_options)
        except Exception as e:
            if options.traceback or not isinstance(e, CommandError):
                raise

            # SystemCheckError takes care of its own formatting.
            if isinstance(e, SystemCheckError):
                self.stderr.write(str(e), lambda x: x)
            else:
                self.stderr.write('%s: %s' % (e.__class__.__name__, e))
            sys.exit(1)
        finally:
            try:
                connections.close_all()
            except ImproperlyConfigured:
                # Ignore if connections aren't setup at this point (e.g. no
                # configured settings).
                pass

    def execute(self, *args, **options):
        """
        Try to execute this command, performing system checks if needed (as
        controlled by the ``requires_system_checks`` attribute, except if
        force-skipped).
        """
        if options['no_color']:
            self.style = no_style()
            self.stderr.style_func = None
        if options.get('stdout'):
            self.stdout = OutputWrapper(options['stdout'])
        if options.get('stderr'):
            self.stderr = OutputWrapper(options['stderr'],
                                        self.stderr.style_func)

        if self.requires_system_checks and not options.get('skip_checks'):
            self.check()
        if self.requires_migrations_checks:
            self.check_migrations()
        output = self.handle(*args, **options)
        if output:
            if self.output_transaction:
                connection = connections[options.get('database',
                                                     DEFAULT_DB_ALIAS)]
                output = '%s\n%s\n%s' % (
                    self.style.SQL_KEYWORD(
                        connection.ops.start_transaction_sql()),
                    output,
                    self.style.SQL_KEYWORD(
                        connection.ops.end_transaction_sql()),
                )
            self.stdout.write(output)
        return output

    def _run_checks(self, **kwargs):
        return checks.run_checks(**kwargs)

    def check(self,
              app_configs=None,
              tags=None,
              display_num_errors=False,
              include_deployment_checks=False,
              fail_level=checks.ERROR):
        """
        Use the system check framework to validate entire Django project.
        Raise CommandError for any serious message (error or critical errors).
        If there are only light messages (like warnings), print them to stderr
        and don't raise an exception.
        """
        all_issues = self._run_checks(
            app_configs=app_configs,
            tags=tags,
            include_deployment_checks=include_deployment_checks,
        )

        header, body, footer = "", "", ""
        visible_issue_count = 0  # excludes silenced warnings

        if all_issues:
            debugs = [
                e for e in all_issues
                if e.level < checks.INFO and not e.is_silenced()
            ]
            infos = [
                e for e in all_issues
                if checks.INFO <= e.level < checks.WARNING
                and not e.is_silenced()
            ]
            warnings = [
                e for e in all_issues
                if checks.WARNING <= e.level < checks.ERROR
                and not e.is_silenced()
            ]
            errors = [
                e for e in all_issues
                if checks.ERROR <= e.level < checks.CRITICAL
                and not e.is_silenced()
            ]
            criticals = [
                e for e in all_issues
                if checks.CRITICAL <= e.level and not e.is_silenced()
            ]
            sorted_issues = [
                (criticals, 'CRITICALS'),
                (errors, 'ERRORS'),
                (warnings, 'WARNINGS'),
                (infos, 'INFOS'),
                (debugs, 'DEBUGS'),
            ]

            for issues, group_name in sorted_issues:
                if issues:
                    visible_issue_count += len(issues)
                    formatted = (self.style.ERROR(str(e)) if e.is_serious()
                                 else self.style.WARNING(str(e))
                                 for e in issues)
                    formatted = "\n".join(sorted(formatted))
                    body += '\n%s:\n%s\n' % (group_name, formatted)

        if visible_issue_count:
            header = "System check identified some issues:\n"

        if display_num_errors:
            if visible_issue_count:
                footer += '\n'
            footer += "System check identified %s (%s silenced)." % (
                "no issues" if visible_issue_count == 0 else
                "1 issue" if visible_issue_count == 1 else "%s issues" %
                visible_issue_count,
                len(all_issues) - visible_issue_count,
            )

        if any(
                e.is_serious(fail_level) and not e.is_silenced()
                for e in all_issues):
            msg = self.style.ERROR(
                "SystemCheckError: %s" % header) + body + footer
            raise SystemCheckError(msg)
        else:
            msg = header + body + footer

        if msg:
            if visible_issue_count:
                self.stderr.write(msg, lambda x: x)
            else:
                self.stdout.write(msg)

    def handle(self, *args, **options):
        """
        The actual logic of the command. Subclasses must implement
        this method.
        """
        raise NotImplementedError(
            'subclasses of BaseCommand must provide a handle() method')
コード例 #27
0
    def sync_with_rmis(self, out: OutputWrapper = None, c=None):
        if out:
            out.write("Обновление данных для: %s" % self.fio(full=True))
        if c is None:
            from rmis_integration.client import Client
            c = Client()
        ok = False
        has_rmis = False
        rmis_uid = ""
        if Card.objects.filter(individual=self, base__is_rmis=True).exists():
            rmis_uid = Card.objects.filter(individual=self, base__is_rmis=True)[0].number
            ok = has_rmis = True
            if out:
                out.write("Есть РМИС запись: %s" % rmis_uid)

        if not ok:
            docs = Document.objects.filter(individual=self).exclude(document_type__check_priority=0).order_by(
                "-document_type__check_priority")
            for document in docs:
                s = c.patients.search_by_document(document)
                if len(s) > 0:
                    rmis_uid = s[0]
                    ok = True
                    if out:
                        out.write("Физ.лицо найдено по документу: %s -> %s" % (document, rmis_uid))
                    break

        if ok:
            data = c.patients.get_data(rmis_uid)
            upd = self.family != data["family"] or self.name != data["name"] or self.patronymic != data[
                "patronymic"] or (self.birthday != data["birthday"] and data["birthday"] is not None)

            if upd:
                prev = str(self)
                self.family = data["family"]
                self.name = data["name"]
                self.patronymic = data["patronymic"]
                if data["birthday"] is not None:
                    self.birthday = data["birthday"]
                self.sex = data["sex"]
                self.save()
                if out:
                    out.write("Обновление данных: %s" % self.fio(full=True))
                slog.Log(key=str(self.pk), type=2003,
                         body=simplejson.dumps({"Новые данные": str(self), "Не актуальные данные": prev}),
                         user=None).save()

        if not ok:
            query = {"surname": self.family, "name": self.name, "patrName": self.patronymic,
                     "birthDate": self.birthday.strftime("%Y-%m-%d")}
            rows = c.patients.client.searchIndividual(**query)
            if len(rows) == 1:
                rmis_uid = rows[0]
                ok = True
                if out:
                    out.write("Физ.лицо найдено по ФИО и д.р.: %s" % rmis_uid)

        if not has_rmis and rmis_uid and rmis_uid != '':
            ex = Card.objects.filter(number=rmis_uid, is_archive=False, base__is_rmis=True)
            if ex.exists():
                for e in ex:
                    self.join_individual(e.individual, out)
            s = str(c.patients.create_rmis_card(self, rmis_uid))
            if out:
                out.write("Добавление РМИС карты -> %s" % s)

        save_docs = []

        if ok and rmis_uid != "" and Card.objects.filter(individual=self, base__is_rmis=True, is_archive=False).exists():
            pat_data = c.patients.extended_data(rmis_uid)
            cards = Card.objects.filter(individual=self, base__is_rmis=True, is_archive=False)
            for card_i in cards:
                c.patients.sync_card_data(card_i, out)

            def get_key(d: dict, val):
                r = [key for key, v in d.items() if v == val]
                if len(r) > 0:
                    return r[0]
                return None

            if out:
                out.write("Типы документов: %s" % simplejson.dumps(c.patients.local_types))
            for document_object in pat_data["identifiers"] or []:
                k = get_key(c.patients.local_types, document_object["type"])
                if k and document_object["active"]:
                    if out:
                        out.write("Тип: %s -> %s (%s)" % (document_object["type"], k, document_object["active"]))
                    data = dict(document_type=DocumentType.objects.get(pk=k),
                                serial=document_object["series"] or "",
                                number=document_object["number"] or "",
                                date_start=document_object["issueDate"],
                                date_end=document_object["expiryDate"],
                                who_give=(document_object["issueOrganization"] or {"name": ""})["name"] or "",
                                individual=self,
                                is_active=True)
                    rowss = Document.objects.filter(document_type=data['document_type'], individual=self, from_rmis=True)
                    if rowss.exclude(serial=data["serial"]).exclude(number=data["number"]).filter(
                            card__isnull=True).exists():
                        Document.objects.filter(document_type=data['document_type'], individual=self, from_rmis=True).delete()
                    docs = Document.objects.filter(document_type=data['document_type'],
                                                   serial=data['serial'],
                                                   number=data['number'], from_rmis=True)
                    if not docs.exists():
                        doc = Document(**data)
                        doc.save()
                        if out:
                            out.write("Добавление докумена: %s" % doc)
                        kk = "%s_%s_%s" % (doc.document_type.pk, doc.serial, doc.number)
                        save_docs.append(kk)
                        continue
                    else:
                        to_delete = []
                        has = []
                        ndocs = {}
                        for d in docs:
                            kk = "%s_%s_%s" % (d.document_type.pk, d.serial, d.number)
                            if out:
                                out.write("Checking: %s" % kk)
                            if kk in has:
                                if out:
                                    out.write("to delete: %s" % d.pk)
                                to_delete.append(d.pk)
                                if Card.objects.filter(polis=d).exists():
                                    for c in Card.objects.filter(polis=d):
                                        c.polis = ndocs[kk]
                                        c.save()
                            else:
                                if out:
                                    out.write("To has: %s" % d.pk)
                                has.append(kk)
                                save_docs.append(kk)
                                ndocs[kk] = d

                        Document.objects.filter(pk__in=to_delete).delete()
                        docs = Document.objects.filter(document_type=data['document_type'],
                                                       serial=data['serial'],
                                                       number=data['number'],
                                                       individual=self)
                        for d in docs:
                            if d.date_start != data["date_start"]:
                                d.date_start = data["date_start"]
                                d.save()
                                if out:
                                    out.write("Update date_start: %s" % d.date_start)
                            if d.date_end != data["date_end"]:
                                d.date_end = data["date_end"]
                                d.save()
                                if out:
                                    out.write("Update date_end: %s" % d.date_end)
                            if d.who_give != data["who_give"]:
                                d.who_give = data["who_give"]
                                d.save()
                                if out:
                                    out.write("Update who_give: %s" % d.who_give)

                        if out:
                            out.write("Данные для документов верны: %s" % [str(x) for x in docs])

                    docs = Document.objects.filter(document_type=data['document_type'],
                                                   document_type__title__in=['СНИЛС', 'Паспорт гражданина РФ',
                                                                             'Полис ОМС'],
                                                   serial=data['serial'],
                                                   number=data['number']).exclude(individual=self).exclude(number="")
                    if docs.exists():
                        if out:
                            out.write("Объединение записей физ.лиц")
                        for doc in docs:
                            self.join_individual(doc.individual, out)

            to_delete_pks = []
            for d in Document.objects.filter(individual=self, from_rmis=True):
                kk = "%s_%s_%s" % (d.document_type.pk, d.serial, d.number)
                if out:
                    out.write("TD %s %s %s" % (kk, kk not in save_docs, save_docs,))
                if kk not in save_docs:
                    to_delete_pks.append(d.pk)
            Document.objects.filter(pk__in=to_delete_pks).delete()
        else:
            if out:
                out.write("Физ.лицо не найдено в РМИС")
        return ok
コード例 #28
0
class Command(ProgressBarMixin, BaseCommand):
    def __init__(self, *args, **kwargs):
        super().__init__(*args, **kwargs)

        # Country names mapping
        self.country_names = dict(Country.objects.values_list('name', 'pk'))

        # Partner entity id mapping
        self.entity_ids = dict(
            Partner.objects.annotate(entity_id=Subquery(
                EntityVersion.objects.filter(
                    entity__organization=OuterRef('organization_id'),
                    parent__isnull=True,
                ).order_by('-start_date').values('entity_id')[:1]),
                                     ).values_list('pk', 'entity_id'))

        # Existing partner addresses
        self.partners = self.get_existing_partner_addresses()

        # Geocoding url
        self.url = "{esb_api}/{endpoint}".format(
            esb_api=settings.ESB_API_URL,
            endpoint=settings.ESB_GEOCODING_ENDPOINT,
        )

        self.counts = defaultdict(int)
        self.delayed_io = io.StringIO()
        self.stdout_wrapper = OutputWrapper(self.delayed_io)

    def add_arguments(self, parser: argparse.ArgumentParser):
        parser.add_argument(
            'csv_file',
            type=argparse.FileType('r', encoding="utf-8-sig"),
        )

    @staticmethod
    def check_file(reader):
        expected_fields = (ID, ERASMUS, STREET_NUM, STREET, POSTAL_CODE, CITY,
                           COUNTRY)
        try:
            assert all(field in reader.fieldnames for field in expected_fields)
        except AssertionError:
            raise CommandError('Encoding must be UTF-8\n'
                               'Delimiter must be ";"\n'
                               'Column names must contain {}\n'
                               'Headers detected : {}'.format(
                                   ", ".join(expected_fields),
                                   reader.fieldnames))

    @staticmethod
    def get_existing_partner_addresses():
        end_date_condition = (Q(entity_version__end_date__isnull=True)
                              | Q(entity_version__end_date__gte=date.today()))
        queryset = EntityVersionAddress.objects.filter(
            end_date_condition,
            entity_version__entity__organization__partner__isnull=False,
        ).annotate(
            partner_id=F('entity_version__entity__organization__partner__id'),
        ).select_related(
            'country',
            'entity_version').order_by('-entity_version__start_date')
        partners = {}
        for record in queryset:
            if record.partner_id not in partners:
                partners[record.partner_id] = record

        return partners

    def handle(self, *args, **options):
        file: io.TextIOWrapper = options['csv_file']

        # Read number of lines
        total = sum(1 for _ in file) - 1
        file.seek(0)

        reader = csv.DictReader(file, delimiter=';')
        self.check_file(reader)

        for i, row in enumerate(reader, start=1):
            try:
                self.import_address(row, options)
            except AttributeError:
                self.stdout_wrapper.write(
                    self.style.ERROR(
                        'Conflicting entity version for partner {:>4} (entity {})'
                        .format(
                            row[ID],
                            self.entity_ids[int(row[ID])],
                        )))
                self.counts['skipped'] += 1
            self.print_progress_bar(i, total)
            self.counts['updated'] += 1

        self.print_progress_bar(total, total)
        self.delayed_io.seek(0)
        self.stdout.write(self.delayed_io.read())

        self.stdout.write(
            self.style.SUCCESS(
                'Updated: {}\nNot found: {}\nSkipped: {}\nWarnings: {}\nExisting: {}'
                .format(
                    self.counts['updated'],
                    self.counts['not_found'],
                    self.counts['skipped'],
                    self.counts['warning'],
                    self.counts['existing'],
                )))

    @transaction.atomic
    def import_address(self, row, options):
        # We need at least the street to update
        if not row[STREET]:
            if options['verbosity'] >= 2:
                self.stdout_wrapper.write(
                    self.style.WARNING(
                        'Skipping partner id {}: no address'.format(row[ID])))
            return

        # Country name must exist
        if row[COUNTRY] not in self.country_names:
            self.counts['skipped'] += 1
            self.stdout_wrapper.write(
                self.style.ERROR(
                    'Skipping partner id {}: country {} does not exist'.format(
                        row[ID],
                        row[COUNTRY],
                    )))
            return

        # Check that the address has changed
        existing_address = (self.partners[int(row[ID])]
                            if int(row[ID]) in self.partners else None)
        if self._is_address_unchanged(row, existing_address):
            if options['verbosity'] >= 2:
                self.stdout_wrapper.write(
                    self.style.WARNING(
                        'Skipping partner id {}: same address'.format(
                            row[ID])))
            self.counts['existing'] += 1
            return

        parts = [
            row[STREET_NUM],
            row[STREET],
            row[POSTAL_CODE],
            row[CITY],
            row[COUNTRY],
        ]
        search = ' '.join(filter(None, parts))
        response = requests.get(self.url, {'address': search},
                                headers={
                                    'Authorization':
                                    settings.ESB_AUTHORIZATION,
                                })
        results = response.json()['results']
        if not results:
            self.counts['not_found'] += 1
            self.stdout_wrapper.write(
                self.style.ERROR(
                    'Address not found for partner id {:>4} not found: {}'.
                    format(
                        row[ID],
                        search,
                    )))
            return

        if len(results) > 1:
            self.counts['warning'] += 1
            self.stdout_wrapper.write(
                self.style.WARNING('Multiple results for partner id {}'.format(
                    row[ID])))
        location = results[0]['geometry']['location']
        if options['verbosity'] >= 2:
            self.stdout_wrapper.write(
                self.style.SUCCESS(
                    'Address found for partner id {:>4} : {}, {}'.format(
                        row[ID],
                        location['lat'],
                        location['lng'],
                    )))

        # Handle entity version
        last_version = None
        if existing_address:
            last_version = self._override_current_version(existing_address)

        if not last_version:
            entity_id = self.entity_ids[int(row[ID])]
            entity = Entity.objects.select_related('organization').get(
                pk=entity_id)
            # Create a new entity version
            last_version = EntityVersion.objects.create(
                title=entity.organization.name,
                entity_id=entity_id,
                parent=None,
                start_date=date.today(),
                end_date=None,
            )

        # Create the address
        EntityVersionAddress.objects.create(
            street_number=row[STREET_NUM],
            street=row[STREET],
            postal_code=row[POSTAL_CODE],
            city=row[CITY],
            location=Point(
                location['lng'],
                location['lat'],
            ),
            country_id=self.country_names[row[COUNTRY]],
            entity_version=last_version,
        )

    @staticmethod
    def _override_current_version(existing_address):
        """
        Update a previous version of an address, or override an existing one

        :param existing_address: existing EntityVersionAddress
        :return: EntityVersion or None if a new vesion must be created
        """
        today = date.today()
        last_version = existing_address.entity_version

        if last_version.start_date != today:
            # End the previous version if start_date is changed
            last_version.end_date = today - timedelta(days=1)
            last_version.save()
            # We can safely create a new version
            last_version = None
        else:
            # Latest version date is today, delete the related address (if existing)
            last_version.entityversionaddress_set.all().delete()
        return last_version

    @staticmethod
    def _is_address_unchanged(row, address):
        """
        Check if there are differences between imported address and existing

        :param row: Dict address from CSV file
        :param address: existing EntityVersionAddress or None
        :return: True if the two address are the same
        """
        return address and all([
            row[STREET_NUM] == address.street_number,
            row[STREET] == address.street,
            row[POSTAL_CODE] == address.postal_code,
            row[CITY] == address.city,
            row[COUNTRY] == (address.country and address.country.name),
        ])
コード例 #29
0
ファイル: base.py プロジェクト: wolever/dwdj
class BaseCommand(DjBaseCommand):
    """ A less terrible base class for Djagno management commands.

        Changes:

        * Tracebacks are turned on by default
        * Verbosity is controlled by a counter instead of a number (ex,
          ``-vvv`` instead of ``-v3``) and are used to set up logging levels
          too (disable by setting ``logging_enabled = False``).
        * Will override a named logger (see ``logging_handler_name``) with the
          level and formatter settings from the command line (or add a new
          stderr logger if no matching logger is found).
        * Subclassess can set ``option_list`` directly - the base options are
          stored in ``base_option_list`` and merged with ``get_option_list``.
        * The ``self.log`` attribute will be set to a logger configured to use
          the name from ``self.get_log_name()`` (which defaults to the fully
          qualified name of the module containing the command).
        * If ``handle`` returns an integer it will be treated as a numeric exit
          status. If a ``str`` or ``unicode`` is returned it will be treated
          "normally" and 0 will be returned.
        * If ``handle`` raises an exception that exception will be logged
          (unless ``self.log_exc`` is ``False``) and the command will exit
          with a status of 1.

        Example::

            from dwdj.management.base import BaseCommand, make_option

            class MyCommand(BaseCommand):
                option_list = [
                    make_option("-f", "--foo", action="store_true"),
                ]

                def handle(self, *args, **options):
                    if options.get("foo"):
                        return 1
                    return 0
    """

    # For now, tell Django to use the legacy optparse option parsing. At some
    # point we'll need to update all the option parsing so it's done the fancy
    # new way with argparse, but that day is not today.
    use_argparse = False

    base_option_list = [
        make_option('-q', '--quiet', action="store_const", const=-1, dest="verbosity"),
        make_option('-v', '--verbose', action="count", default=0, dest="verbosity"),
        make_option('--verbose-log', action="store_true", help=dedent("""
            Use a more verbose logging format.
        """)),
        make_option('--settings', help=dedent("""
            The Python path to a settings module, e.g.
            "myproject.settings.main". If this isn\'t provided, the
            DJANGO_SETTINGS_MODULE environment variable will be used.')
        """)),
        make_option('--pythonpath', help=dedent("""
            A directory to add to the Python path, e.g.
            "/home/djangoprojects/myproject".
        """)),
        make_option('--traceback', action='store_true', default=True, help=dedent("""
            Log complete exception traceback, not just exception and message.
        """)),
        make_option('--no-traceback', action='store_false', dest="traceback", help=dedent("""
            Log only exception messages, not complete tracebacks.
        """))
    ]

    option_list = []

    logging_enabled = True
    logging_handler_name = "stderr"
    logging_format = '%(levelname)s [%(name)s]: %(message)s'
    logging_format_verbose =  '%(asctime)s %(levelname)s [%(processName)s:%(threadName)s:%(name)s]: %(message)s'
    log_exc = True

    def get_option_list(self):
        return self.base_option_list + self.option_list

    def create_parser(self, prog_name, subcommand):
        """
        Create and return the ``OptionParser`` which will be used to
        parse the arguments to this command.

        """
        parser = OptionParser(prog=prog_name,
                              usage=self.usage(subcommand),
                              version=self.get_version(),
                              option_list=self.get_option_list())
        original_parse = parser.parse_args
        parser.parse_args = partial(self._override_parse_args, parser,
                                    original_parse)
        return parser

    def _override_parse_args(self, parser, original_parse, *argv, **kwargs):
        options, args = original_parse(*argv, **kwargs)
        self.logging_setup(options)
        if options.verbosity < 0:
            options.traceback = False
        return (options, args)

    def logging_setup(self, options):
        verbosity = options.verbosity
        level = (
            logging.CRITICAL if verbosity < 0 else
            logging.ERROR if verbosity == 0 else
            logging.WARNING if verbosity == 1 else
            logging.INFO if verbosity == 2 else
            logging.DEBUG
        )
        format = (
            self.logging_format if not options.verbose_log else
            self.logging_format_verbose
        )
        logger, handler = self.logging_get_handler()
        formatter = logging.Formatter(format)
        handler.setFormatter(formatter)
        if logger.level > level:
            logger.setLevel(level)
        handler.setLevel(level)

    def get_log_name(self):
        return type(self).__module__

    @cached_property
    def log(self):
        return logging.getLogger(self.get_log_name())

    def logging_get_handler(self):
        logger = logging.getLogger("")
        for handler in logger.handlers:
            if handler.name == self.logging_handler_name:
                return logger, handler
        handler = logging.StreamHandler(sys.stderr)
        handler.name = self.logging_handler_name
        logger.addHandler(handler)
        return logger, handler

    def run_from_argv(self, argv):
        """
        Set up any environment changes requested (e.g., Python path
        and Django settings), then run this command. If the
        command raises a ``CommandError``, intercept it and print it sensibly
        to stderr.
        """
        signal_pre_run.send(self, argv=argv)
        parser = self.create_parser(argv[0], argv[1])
        options, args = parser.parse_args(argv[2:])
        handle_default_options(options)
        try:
            result = self.execute(*args, **options.__dict__)
        except SystemExit as e:
            signal_post_run.send(self, status=e.code)
            raise
        except BaseException:
            result = self.handle_execute_exc(options)
            if result is None:
                result = 1
        status = result or 0
        signal_post_run.send(self, status=status)
        sys.exit(status)

    def handle_execute_exc(self, options):
        if not self.log_exc:
            return 1
        if options.traceback:
            self.log.exception("Exception running command:")
        else:
            exc_info = sys.exc_info()
            self.log.error("%s: %s", exc_info[0].__name__, exc_info[1])
        return 1

    def execute(self, *args, **options):
        """
        Try to execute this command, performing model validation if
        needed (as controlled by the attribute
        ``self.requires_model_validation``, except if force-skipped).
        """

        # Switch to English, because django-admin.py creates database content
        # like permissions, and those shouldn't contain any translations.
        # But only do this if we can assume we have a working settings file,
        # because django.utils.translation requires settings.
        saved_lang = None
        self.stdout = OutputWrapper(options.get('stdout', sys.stdout))
        self.stderr = OutputWrapper(options.get('stderr', sys.stderr), self.style.ERROR)

        if self.can_import_settings:
            from django.utils import translation
            saved_lang = translation.get_language()
            translation.activate('en-us')

        try:
            if getattr(self, "requires_model_validation", False) and not options.get('skip_validation'):
                self.validate()
            result = self.handle(*args, **options)
            if isinstance(result, basestring):
                if self.output_transaction:
                    # This needs to be imported here, because it relies on
                    # settings.
                    from django.db import connections, DEFAULT_DB_ALIAS
                    connection = connections[options.get('database', DEFAULT_DB_ALIAS)]
                    if connection.ops.start_transaction_sql():
                        self.stdout.write(self.style.SQL_KEYWORD(connection.ops.start_transaction_sql()))
                self.stdout.write(result)
                if self.output_transaction:
                    self.stdout.write('\n' + self.style.SQL_KEYWORD("COMMIT;"))
                result = 0
            return result
        finally:
            if saved_lang is not None:
                translation.activate(saved_lang)
コード例 #30
0
class Daemon(BaseDaemon):
    def __init__(self,
                 wait=2,
                 threshold=4,
                 stdout=None,
                 stderr=None,
                 no_color=False):
        """Task Daemon: Gets task and execute them.

        :arg wait : How many seconds should wait before check new records again. This should be lower than threshold
        :arg threshold :
        """
        self.wait = wait
        self.threshold = threshold
        self.threads = []
        self.queues = []
        self.stdout = OutputWrapper(stdout or sys.stdout)
        self.stderr = OutputWrapper(stderr or sys.stderr)
        if no_color:
            self.style = no_style()
        else:
            self.style = color_style()
            self.stderr.style_func = self.style.ERROR

        super(Daemon, self).__init__(name="Daemon",
                                     pidfile=_pidfile,
                                     runfile=_runfile,
                                     stoptimeout=10,
                                     debug=1)

    def start(self):
        # Check daemon is running
        pid = self.__getpid()
        if pid:
            logger.warning("Daemon: Already Running, can not start.")
            raise DaemonRunning()

        # Start Daemon
        logger.info("Daemon: Started.")
        self.daemonize()
        self.run()

    def stop(self):
        # Check alive daemon if not exists already
        pid = self.__getpid()
        if not pid:
            logger.warning("Daemon: Not Running, can not stop.")
            raise DaemonNotRunning()

        self.stdout.write(self.style.WARNING("Trying to stop..."))
        self.__cleanstop(pid)

        try:
            while True:
                os.kill(pid, signal.SIGTERM)
                time.sleep(0.1)
        except OSError as err:
            err = str(err)

            if err.find('No such process') > 0:
                if os.path.exists(self.pidfile):
                    os.remove(self.pidfile)
            else:
                raise DaemonError(err)

    def is_running(self):
        """Checks daemon is running"""
        return os.path.exists(self.runfile)

    @staticmethod
    def _is_queue_time_came(q: Queue):
        return q.timer <= timezone.now()

    def start_queue(self, q: Queue):
        if not self._is_queue_time_came(q):
            return

        logger.debug("Daemon: Start Queue<%d>" % q.id)
        try:
            thread = QueueThread(q.id)
            thread.start()
            self.threads.append(thread)
        except Exception:
            logger.exception("Daemon: Start Queue<%d> failed." % q.id)
            try:
                q.set_status_error()
            except:
                logger.exception(
                    "Daemon: Queue<%d> status can not set error." % q.id)

    def queue_timeout(self, q: Queue):
        try:
            logger.warning("Queue<%d> timeout, changing status." % q.id)
            q.status = QueueStatus.Timeout.value
            q.save()
        except Exception:
            logger.exception("Queue<%d> status can not changed to Timeout." %
                             q.id)

    @staticmethod
    def get_queues(stat: QueueStatus):
        return Queue.objects.all().filter(status=stat.value).order_by('timer')

    def _add_queue_list(self, q: Queue):
        if q.id not in self.queues:
            logger.debug("Daemon: Queue<%d> is adding to the queues." % q.id)
            self.queues.append(q.id)
        else:
            logger.debug("Daemon: Queue<%d> is already in queues." % q.id)

    def run(self):
        start_time = timezone.now()
        try:
            while self.is_running():
                for queue in self.get_queues(QueueStatus.Processing):
                    queue.calculate_queue_status()

                for queue in self.get_queues(QueueStatus.Created):
                    if queue.timer:
                        # Check is timeout
                        if queue.timer < timezone.now() - timezone.timedelta(
                                seconds=self.threshold):
                            self.queue_timeout(queue)
                        else:
                            self.start_queue(queue)
                    else:
                        self.start_queue(queue)

                # Log every 10 seconds
                passed = (timezone.now() - start_time).total_seconds()
                if int(passed) % 10 == 0:
                    logger.debug("Daemon: Running %d seconds." % passed)
                time.sleep(self.wait)
        except Exception:
            logger.exception("Daemon: Failed.")
            self.delrun()
            raise DaemonError()
        logger.warning("Daemon: Exiting.")
コード例 #31
0
ファイル: utils.py プロジェクト: Reception123/IRCLogBot
def parse_logs(qs, stdout=None):
    """
    Parse logs for kudos.
    """
    names = collections.deque(maxlen=200)
    unattributed = 0
    count = 0
    kudos = {}
    kudos_count = 0
    kudos_first = {}
    kudos_recent = {}

    if stdout and not isinstance(stdout, OutputWrapper):
        stdout = OutputWrapper(stdout)

    def set_thanked(nick):
        timestamp = log[3]
        kudos[nick] = kudos.get(nick, 0) + 1
        kudos_first.setdefault(nick, timestamp)
        kudos_recent[nick] = timestamp

    qs = qs.order_by('pk').filter(command='PRIVMSG')
    qs = qs.values_list('pk', 'nick', 'text', 'timestamp')
    for log in _iterate_log(qs):
        log_nick = log[1].lower()
        log_text = log[2]
        count += 1
        directed = directed_message(log_text)
        if directed:
            directed = directed.lower()
            if directed == log_nick:
                # Can't thank yourself :P
                directed = None
        if RE_KUDOS.search(log_text):
            kudos_count += 1
            attributed = False
            if directed:
                for nick, _ in names:
                    if nick == directed:
                        set_thanked(nick)
                        attributed = True
                        break
            if not attributed:
                lower_text = log_text.lower()
                for recent in (bits[0] for bits in names
                               if bits[0] != log_nick):
                    re_text = '(?:^| )@?{}(?:$|\W)'.format(re.escape(recent))
                    if re.search(re_text, lower_text):
                        set_thanked(recent)
                        attributed = True
            if not attributed:
                for nick, directed in names:
                    if directed == log_nick:
                        set_thanked(nick)
                        attributed = True
                        break
            if not attributed:
                unattributed += 1
        names.append((log_nick, directed))
        if stdout and not count % 10000:
            stdout.write('.', ending='')
            stdout.flush()
    if stdout:
        stdout.write('')

    kudos_list = []
    for c, nick in sorted((c, nick) for nick, c in kudos.items()):
        kudos_list.append({
            'nick': nick,
            'count': c,
            'first': kudos_first[nick],
            'recent': kudos_recent[nick]
        })
    return {
        'kudos': kudos_list,
        'message_count': count,
        'kudos_given': kudos_count,
        'unattributed': unattributed,
    }
コード例 #32
0
class SettingMerger:
    """Load different settings modules and config files and merge them.
    """

    def __init__(
        self,
        fields_provider,
        providers,
        extra_values=None,
        stdout=None,
        stderr=None,
        no_color=False,
    ):
        self.fields_provider = fields_provider or PythonConfigFieldsProvider(None)
        extra_values = extra_values or {}
        self.extra_values = extra_values
        self.providers = providers or []
        self.__formatter = string.Formatter()
        self.settings = {}
        self.config_values = (
            []
        )  # list of (ConfigValue, provider_name, setting_name, final_value)
        self.raw_settings = OrderedDict()
        for key, value in extra_values.items():
            self.raw_settings[key] = OrderedDict()
            self.raw_settings[key][None] = value
        # raw_settings[setting_name][str(provider) or None] = raw_value
        self.__working_stack = set()
        self.stdout = OutputWrapper(stdout or sys.stdout)
        self.stderr = OutputWrapper(stderr or sys.stderr)
        if no_color:
            self.style = no_style()
        else:
            self.style = color_style()
            self.stderr.style_func = self.style.ERROR

    def add_provider(self, provider):
        self.providers.append(provider)

    def process(self):
        self.load_raw_settings()
        self.load_settings()

    def load_raw_settings(self):
        # get all setting names and sort them
        all_settings_names_set = set()
        for field in self.fields_provider.get_config_fields():
            assert isinstance(field, ConfigField)
            all_settings_names_set.add(field.setting_name)
        for provider in self.providers:
            assert isinstance(provider, ConfigProvider)
            for setting_name, value in provider.get_extra_settings():
                all_settings_names_set.add(setting_name)
        all_settings_names = list(sorted(all_settings_names_set))
        # initialize all defined settings
        for setting_name in all_settings_names:
            self.raw_settings[setting_name] = OrderedDict()
        # fetch default values if its exists (useless?)
        for field in self.fields_provider.get_config_fields():
            assert isinstance(field, ConfigField)
            self.raw_settings[field.setting_name][None] = field.value
        # read all providers (in the right order)
        for provider in self.providers:
            assert isinstance(provider, ConfigProvider)
            source_name = str(provider)
            for field in self.fields_provider.get_config_fields():
                assert isinstance(field, ConfigField)
                if provider.has_value(field):
                    value = provider.get_value(field)
                    # noinspection PyTypeChecker
                    self.raw_settings[field.setting_name][source_name] = value
            for setting_name, value in provider.get_extra_settings():
                self.raw_settings[setting_name][source_name] = value

    def has_setting_value(self, setting_name):
        return setting_name in self.raw_settings

    def get_setting_value(self, setting_name):
        if setting_name in self.settings:
            return self.settings[setting_name]
        elif setting_name in self.__working_stack:
            raise ValueError(
                "Invalid cyclic dependency between " + ", ".join(self.__working_stack)
            )
        elif setting_name not in self.raw_settings:
            raise ValueError("Invalid setting reference: %s" % setting_name)
        self.__working_stack.add(setting_name)
        provider_name, raw_value = None, None
        for provider_name, raw_value in self.raw_settings[setting_name].items():
            pass
        value = self.analyze_raw_value(raw_value, provider_name, setting_name)
        self.settings[setting_name] = value
        self.__working_stack.remove(setting_name)
        return value

    def load_settings(self):
        for setting_name in self.raw_settings:
            self.get_setting_value(setting_name)

    def call_method_on_config_values(self, method_name: str):
        """Scan all settings, looking for :class:`django.conf.config_values.ConfigValue` and calling one of their
        methods.

        :param method_name: 'pre_collectstatic', 'pre_migrate', 'post_collectstatic', or 'post_migrate'.
        """
        for raw_value, provider_name, setting_name, final_value in self.config_values:
            try:
                getattr(raw_value, method_name)(
                    self, provider_name, setting_name, final_value
                )
            except Exception as e:
                self.stdout.write(
                    self.style.ERROR(
                        'Invalid value "%s" in %s for %s (%s)'
                        % (raw_value, provider_name or "built-in", setting_name, e)
                    )
                )

    def analyze_raw_value(self, obj, provider_name, setting_name):
        """Parse the object for replacing variables by their values.

        If `obj` is a string like "THIS_IS_{TEXT}", search for a setting named "TEXT" and replace {TEXT} by its value
        (say, "VALUE"). The returned object is then equal to "THIS_IS_VALUE".
        If `obj` is a list, a set, a tuple or a dict, its components are recursively parsed.
        If `obj` is a subclass of :class:`djangofloor.conf.config_values.ConfigValue`, its value is on-the-fly computed.
        Otherwise, `obj` is returned as-is.

        :param obj: object to analyze
        :param provider_name: the name of the config file
        :param setting_name: the name of the setting containing this value
            but this value can be inside a dict or a list (like `SETTING = [Directory("/tmp"), ]`)
        :return: the parsed setting
        """
        if isinstance(obj, str):
            values = {}
            for (
                literal_text,
                field_name,
                format_spec,
                conversion,
            ) in self.__formatter.parse(obj):
                if field_name is not None:
                    values[field_name] = self.get_setting_value(field_name)
            return self.__formatter.format(obj, **values)
        elif isinstance(obj, ConfigValue):
            final_value = obj.get_value(self, provider_name, setting_name)
            self.config_values.append((obj, provider_name, setting_name, final_value))
            return final_value
        elif isinstance(obj, list) or isinstance(obj, tuple):
            result = []
            for sub_obj in obj:
                if isinstance(sub_obj, ExpandIterable):
                    result += self.get_setting_value(sub_obj.value)
                else:
                    result.append(
                        self.analyze_raw_value(sub_obj, provider_name, setting_name)
                    )
            if isinstance(obj, tuple):
                return tuple(result)
            return result
        elif isinstance(obj, set):
            result = set()
            for sub_obj in obj:
                if isinstance(sub_obj, ExpandIterable):
                    result |= self.get_setting_value(sub_obj.value)
                else:
                    result.add(
                        self.analyze_raw_value(sub_obj, provider_name, setting_name)
                    )
            return result
        elif isinstance(obj, dict):
            result = obj.__class__()  # OrderedDict or plain dict
            for sub_key, sub_obj in obj.items():
                if isinstance(sub_obj, ExpandIterable):
                    result.update(self.get_setting_value(sub_obj.value))
                else:
                    value = self.analyze_raw_value(sub_obj, provider_name, setting_name)
                    key = self.analyze_raw_value(sub_key, provider_name, setting_name)
                    result[key] = value
            return result
        return obj

    def post_process(self):
        """Perform some cleaning on settings:

            * remove duplicates in `INSTALLED_APPS` (keeps only the first occurrence)
        """
        # remove duplicates in INSTALLED_APPS
        self.settings["INSTALLED_APPS"] = list(
            OrderedDict.fromkeys(self.settings["INSTALLED_APPS"])
        )
        django_version = get_version()
        # remove deprecated settings
        if LooseVersion(django_version) >= LooseVersion("1.8"):
            if "TEMPLATES" in self.settings:
                for key in (
                    "TEMPLATE_DIRS",
                    "TEMPLATE_CONTEXT_PROCESSORS",
                    "TEMPLATE_LOADERS",
                    "TEMPLATE_DEBUG",
                ):
                    if key in self.settings:
                        del self.settings[key]

    def write_provider(self, provider, include_doc=False):
        for config_field in sorted(
            self.fields_provider.get_config_fields(), key=lambda x: x.name
        ):
            assert isinstance(config_field, ConfigField)
            if config_field.setting_name not in self.settings:
                continue
            config_field.value = self.settings[config_field.setting_name]
            provider.set_value(config_field, include_doc=include_doc)
コード例 #33
0
ファイル: utils.py プロジェクト: BotBotMe/botbot-web
def parse_logs(qs, stdout=None):
    """
    Parse logs for kudos.
    """
    names = collections.deque(maxlen=200)
    unattributed = 0
    count = 0
    kudos = {}
    kudos_count = 0
    kudos_first = {}
    kudos_recent = {}

    if stdout and not isinstance(stdout, OutputWrapper):
        stdout = OutputWrapper(stdout)

    def set_thanked(nick):
        timestamp = log[3]
        kudos[nick] = kudos.get(nick, 0) + 1
        kudos_first.setdefault(nick, timestamp)
        kudos_recent[nick] = timestamp

    qs = qs.order_by('pk').filter(command='PRIVMSG')
    qs = qs.values_list('pk', 'nick', 'text', 'timestamp')
    for log in _iterate_log(qs):
        log_nick = log[1].lower()
        log_text = log[2]
        count += 1
        directed = directed_message(log_text)
        if directed:
            directed = directed.lower()
            if directed == log_nick:
                # Can't thank yourself :P
                directed = None
        if RE_KUDOS.search(log_text):
            kudos_count += 1
            attributed = False
            if directed:
                for nick, _ in names:
                    if nick == directed:
                        set_thanked(nick)
                        attributed = True
                        break
            if not attributed:
                lower_text = log_text.lower()
                for recent in (
                        bits[0] for bits in names if bits[0] != log_nick):
                    re_text = '(?:^| )@?{}(?:$|\W)'.format(re.escape(recent))
                    if re.search(re_text, lower_text):
                        set_thanked(recent)
                        attributed = True
            if not attributed:
                for nick, directed in names:
                    if directed == log_nick:
                        set_thanked(nick)
                        attributed = True
                        break
            if not attributed:
                unattributed += 1
        names.append((log_nick, directed))
        if stdout and not count % 10000:
            stdout.write('.', ending='')
            stdout.flush()
    if stdout:
        stdout.write('')

    kudos_list = []
    for c, nick in sorted((c, nick) for nick, c in kudos.items()):
        kudos_list.append({
            'nick': nick,
            'count': c,
            'first': kudos_first[nick],
            'recent': kudos_recent[nick]
        })
    return {
        'kudos': kudos_list,
        'message_count': count,
        'kudos_given': kudos_count,
        'unattributed': unattributed,
    }