def import_by_path(dotted_path, error_prefix=''):  # pragma: no cover
        """
        Added in Django 1.6 to django.utils.module_loading

        Import a dotted module path and return the attribute/class designated by the
        last name in the path. Raise ImproperlyConfigured if something goes wrong.
        """
        from importlib import import_module
        import sys

        from django.core.exceptions import ImproperlyConfigured
        from django.utils import six

        try:
            module_path, class_name = dotted_path.rsplit('.', 1)
        except ValueError:
            raise ImproperlyConfigured("%s%s doesn't look like a module path" % (
                error_prefix, dotted_path))
        try:
            module = import_module(module_path)
        except ImportError as e:
            msg = '%sError importing module %s: "%s"' % (
                error_prefix, module_path, e)
            six.reraise(ImproperlyConfigured, ImproperlyConfigured(msg),
                        sys.exc_info()[2])
        try:
            attr = getattr(module, class_name)
        except AttributeError:
            raise ImproperlyConfigured('%sModule "%s" does not define a "%s" attribute/class' % (
                error_prefix, module_path, class_name))
        return attr
Example #2
0
    def handle_noargs(self, **options):
        db = options.get('database')
        connection = connections[db]
        verbosity = int(options.get('verbosity'))
        interactive = options.get('interactive')
        # The following are stealth options used by Django's internals.
        reset_sequences = options.get('reset_sequences', True)
        allow_cascade = options.get('allow_cascade', False)
        inhibit_post_migrate = options.get('inhibit_post_migrate', False)

        self.style = no_style()

        # Import the 'management' module within each installed app, to register
        # dispatcher events.
        for app_name in settings.INSTALLED_APPS:
            try:
                import_module('.management', app_name)
            except ImportError:
                pass

        sql_list = sql_flush(self.style, connection, only_django=True,
                             reset_sequences=reset_sequences,
                             allow_cascade=allow_cascade)

        if interactive:
            confirm = input("""You have requested a flush of the database.
This will IRREVERSIBLY DESTROY all data currently in the %r database,
and return each table to a fresh state.
Are you sure you want to do this?

    Type 'yes' to continue, or 'no' to cancel: """ % connection.settings_dict['NAME'])
        else:
            confirm = 'yes'

        if confirm == 'yes':
            try:
                with transaction.commit_on_success_unless_managed():
                    cursor = connection.cursor()
                    for sql in sql_list:
                        cursor.execute(sql)
            except Exception as e:
                new_msg = (
                    "Database %s couldn't be flushed. Possible reasons:\n"
                    "  * The database isn't running or isn't configured correctly.\n"
                    "  * At least one of the expected database tables doesn't exist.\n"
                    "  * The SQL was invalid.\n"
                    "Hint: Look at the output of 'django-admin.py sqlflush'. That's the SQL this command wasn't able to run.\n"
                    "The full error: %s") % (connection.settings_dict['NAME'], e)
                six.reraise(CommandError, CommandError(new_msg), sys.exc_info()[2])

            if not inhibit_post_migrate:
                self.emit_post_migrate(verbosity, interactive, db)

            # Reinstall the initial_data fixture.
            if options.get('load_initial_data'):
                # Reinstall the initial_data fixture.
                call_command('loaddata', 'initial_data', **options)

        else:
            self.stdout.write("Flush cancelled.\n")
Example #3
0
 def executemany( self, operation, seq_parameters ):
     try:
         if operation.count("db2regexExtraField(%s)") > 0:
              raise ValueError("Regex not supported in this operation")
         if operation.count( "%s" ) > 0:
             operation = operation % ( tuple( "?" * operation.count( "%s" ) ) )
         if ( djangoVersion[0:2] >= ( 1, 4 ) ):
             seq_parameters = [ self._format_parameters( parameters ) for parameters in seq_parameters ]
             
         if ( djangoVersion[0:2] <= ( 1, 1 ) ):
             return super( DB2CursorWrapper, self ).executemany( operation, seq_parameters )
         else:
             try:
                 return super( DB2CursorWrapper, self ).executemany( operation, seq_parameters )
             except IntegrityError, e:
                 if (djangoVersion[0:2] >= (1, 5)):
                     six.reraise(utils.IntegrityError, utils.IntegrityError( *tuple( six.PY3 and e.args or ( e._message, ) ) ), sys.exc_info()[2])
                     raise
                 else:
                     raise utils.IntegrityError, utils.IntegrityError( *tuple( e ) ), sys.exc_info()[2]
             except DatabaseError, e:
                 if (djangoVersion[0:2] >= (1, 5)):
                     six.reraise(utils.DatabaseError, utils.DatabaseError( *tuple( six.PY3 and e.args or ( e._message, ) ) ), sys.exc_info()[2])
                     raise
                 else:
                     raise utils.DatabaseError, utils.DatabaseError( *tuple( e ) ), sys.exc_info()[2] 
Example #4
0
    def validate(self, value, model_instance):
        """
        Validates value and throws ValidationError. Here, value is a
        AudioFieldFile instance

        TODO validate length of audio file as this is not processed or compressed (yet)
        """

        # Ensure that we are getting a File object
        assert value is not None and isinstance(value, FieldFile), 'Invalid arguments'

        # get the whitelist or defaults
        whitelist = getattr(settings, 'AUDIO_WHITELIST', DEFAULT_WHITELIST)
        whitelisted_mimetypes = whitelist.get('MIMETYPES', ())
        whitelisted_extensions = whitelist.get('EXTENSIONS', ())

        #validate mimetype
        mimetype, encoding = mimetypes.guess_type(value.name)

        if mimetype not in whitelisted_mimetypes:
            six.reraise(ValidationError,
                        ValidationError(self.error_messages['invalid_audio'], code='invalid_audio'),
                        sys.exc_info()[2])


        #validate extension
        file, ext = os.path.splitext(value.name)
        if ext not in whitelisted_extensions:
            six.reraise(ValidationError,
                        ValidationError(self.error_messages['invalid_audio'], code='invalid_audio'),
                        sys.exc_info()[2])

        super(AudioField, self).validate(value, model_instance)
Example #5
0
 def executemany(self, query, args):
     try:
         return self.cursor.executemany(query, args)
     except Database.IntegrityError as e:
         six.reraise(utils.IntegrityError, utils.IntegrityError(*tuple(e.args)), sys.exc_info()[2])
     except Database.DatabaseError as e:
         six.reraise(utils.DatabaseError, utils.DatabaseError(*tuple(e.args)), sys.exc_info()[2])
Example #6
0
def parse_http_date(date):
    """
    Parses a date format as specified by HTTP RFC7231 section 7.1.1.1.

    The three formats allowed by the RFC are accepted, even if only the first
    one is still in widespread use.

    Returns an integer expressed in seconds since the epoch, in UTC.
    """
    # emails.Util.parsedate does the job for RFC1123 dates; unfortunately
    # RFC7231 makes it mandatory to support RFC850 dates too. So we roll
    # our own RFC-compliant parsing.
    for regex in RFC1123_DATE, RFC850_DATE, ASCTIME_DATE:
        m = regex.match(date)
        if m is not None:
            break
    else:
        raise ValueError("%r is not in a valid HTTP date format" % date)
    try:
        year = int(m.group('year'))
        if year < 100:
            if year < 70:
                year += 2000
            else:
                year += 1900
        month = MONTHS.index(m.group('mon').lower()) + 1
        day = int(m.group('day'))
        hour = int(m.group('hour'))
        min = int(m.group('min'))
        sec = int(m.group('sec'))
        result = datetime.datetime(year, month, day, hour, min, sec)
        return calendar.timegm(result.utctimetuple())
    except Exception:
        six.reraise(ValueError, ValueError("%r is not a valid date" % date), sys.exc_info()[2])
    def get_foreign_field(self, model):
        path_items = self._field_path
        path_item_count = len(path_items)
        current_model = model

        for idx, cur_field_name in enumerate(path_items):
            fields = {f.name: f for f in current_model._meta.get_fields()}

            if idx < path_item_count-1:
                try:
                    current_field = fields[cur_field_name]
                except KeyError:
                    six.reraise(
                        KeyError,
                        "Field %s doesn't exists (model %s, path: %s)"
                        % (cur_field_name, current_model.__name__,
                           '__'.join(path_items[0:idx])))

                try:
                    current_model = current_field.related_model
                except AttributeError:
                    six.reraise(
                        AttributeError,
                        "Field %s is not a foreign key (model %s, path %s)" %
                        (cur_field_name, current_model.__name__,
                         '__'.join(path_items[0:idx])))
            else:
                foreign_field = fields[cur_field_name]

        return foreign_field
Example #8
0
    def setUpClass(cls):
        connections_override = {}
        for conn in connections.all():
            # If using in-memory sqlite databases, pass the connections to
            # the server thread.
            if (conn.vendor == 'sqlite' and
                    conn.settings_dict['NAME'] == ':memory:'):
                # Explicitly enable thread-shareability for this connection
                conn.allow_thread_sharing = True
                connections_override[conn.alias] = conn

        # Launch the live server's thread
        specified_address = os.environ.get(
            'DJANGO_LIVE_TEST_SERVER_ADDRESS', 'localhost:8081')

        # The specified ports may be of the form '8000-8010,8080,9200-9300'
        # i.e. a comma-separated list of ports or ranges of ports, so we break
        # it down into a detailed list of all possible ports.
        possible_ports = []
        try:
            host, port_ranges = specified_address.split(':')
            for port_range in port_ranges.split(','):
                # A port range can be of either form: '8000' or '8000-8010'.
                extremes = list(map(int, port_range.split('-')))
                assert len(extremes) in [1, 2]
                if len(extremes) == 1:
                    # Port range of the form '8000'
                    possible_ports.append(extremes[0])
                else:
                    # Port range of the form '8000-8010'
                    for port in range(extremes[0], extremes[1] + 1):
                        possible_ports.append(port)
        except Exception:
            msg = 'Invalid address ("%s") for live server.' % specified_address
            six.reraise(
                ImproperlyConfigured,
                ImproperlyConfigured(msg),
                sys.exc_info()[2]
            )
        cls.server_thread = LiveTornadoThread(
            host,
            possible_ports,
            cls.static_handler,
            connections_override=connections_override
        )
        cls.server_thread.daemon = True
        cls.server_thread.start()

        # Wait for the live server to be ready
        cls.server_thread.is_ready.wait()
        if cls.server_thread.error:
            # Clean up behind ourselves, since tearDownClass won't get called
            # in case of errors.
            cls._tearDownClassInternal()
            raise cls.server_thread.error

        cls.live_server_url = 'http://%s:%s' % (
            cls.server_thread.host, cls.server_thread.port)

        super(LiveTornadoTestCase, cls).setUpClass()
Example #9
0
 def get_template(self, template_name):
     try:
         return Template(self.env.get_template(template_name), engine=self)
     except jinja2.TemplateNotFound as exc:
         six.reraise(TemplateDoesNotExist, TemplateDoesNotExist(exc.args), sys.exc_info()[2])
     except jinja2.TemplateSyntaxError as exc:
         six.reraise(TemplateSyntaxError, TemplateSyntaxError(exc.args), sys.exc_info()[2])
Example #10
0
 def executemany(self, query, params=None):
     # cx_Oracle doesn't support iterators, convert them to lists
     if params is not None and not isinstance(params, (list, tuple)):
         params = list(params)
     try:
         args = [(':arg%d' % i) for i in range(len(params[0]))]
     except (IndexError, TypeError):
         # No params given, nothing to do
         return None
     # cx_Oracle wants no trailing ';' for SQL statements.  For PL/SQL, it
     # it does want a trailing ';' but not a trailing '/'.  However, these
     # characters must be included in the original query in case the query
     # is being passed to SQL*Plus.
     if query.endswith(';') or query.endswith('/'):
         query = query[:-1]
     query = convert_unicode(query % tuple(args), self.charset)
     formatted = [self._format_params(i) for i in params]
     self._guess_input_sizes(formatted)
     try:
         return self.cursor.executemany(query,
                             [self._param_generator(p) for p in formatted])
     except Database.DatabaseError as e:
         # cx_Oracle <= 4.4.0 wrongly raises a DatabaseError for ORA-01400.
         if hasattr(e.args[0], 'code') and e.args[0].code == 1400 and not isinstance(e, IntegrityError):
             six.reraise(utils.IntegrityError, utils.IntegrityError(*tuple(e.args)), sys.exc_info()[2])
         raise
Example #11
0
def show_menu_item(context, menu_item):
    if not isinstance(menu_item, MenuItem):
        six.reraise(template.TemplateSyntaxError, 'Given argument must be a '
                                                  'MenuItem object.')

    context['menu_item'] = menu_item
    return context
Example #12
0
    def to_python(self, data):
        """
        Checks that the file-upload field data contains a valid image (GIF, JPG,
        PNG, possibly others -- whatever the Python Imaging Library supports).
        """
        f = super(ImageField, self).to_python(data)
        if f is None:
            return None

        from django.utils.image import Image

        # We need to get a file object for Pillow. We might have a path or we might
        # have to read the data into memory.
        if hasattr(data, 'temporary_file_path'):
            file = data.temporary_file_path()
        else:
            if hasattr(data, 'read'):
                file = BytesIO(data.read())
            else:
                file = BytesIO(data['content'])

        try:
            # load() could spot a truncated JPEG, but it loads the entire
            # image in memory, which is a DoS vector. See #3848 and #18520.
            # verify() must be called immediately after the constructor.
            Image.open(file).verify()
        except Exception:
            # Pillow (or PIL) doesn't recognize it as an image.
            six.reraise(ValidationError, ValidationError(self.error_messages['invalid_image']), sys.exc_info()[2])
        if hasattr(f, 'seek') and callable(f.seek):
            f.seek(0)
        return f
Example #13
0
    def get_geometry_type(self, table_name, geo_col):
        cursor = self.connection.cursor()
        try:
            # Querying USER_SDO_GEOM_METADATA to get the SRID and dimension information.
            try:
                cursor.execute(
                    'SELECT "DIMINFO", "SRID" FROM "USER_SDO_GEOM_METADATA" '
                    'WHERE "TABLE_NAME"=%s AND "COLUMN_NAME"=%s',
                    (table_name.upper(), geo_col.upper())
                )
                row = cursor.fetchone()
            except Exception as msg:
                new_msg = (
                              'Could not find entry in USER_SDO_GEOM_METADATA '
                              'corresponding to "%s"."%s"\n'
                              'Error message: %s.') % (table_name, geo_col, msg)
                six.reraise(Exception, Exception(new_msg), sys.exc_info()[2])

            # TODO: Research way to find a more specific geometry field type for
            # the column's contents.
            field_type = 'GeometryField'

            # Getting the field parameters.
            field_params = {}
            dim, srid = row
            if srid != 4326:
                field_params['srid'] = srid
            # Length of object array ( SDO_DIM_ARRAY ) is number of dimensions.
            dim = len(dim)
            if dim != 2:
                field_params['dim'] = dim
        finally:
            cursor.close()

        return field_type, field_params
Example #14
0
File: loader.py Project: 01-/django
        def _reraise_missing_dependency(migration, missing, exc):
            """
            Checks if ``missing`` could have been replaced by any squash
            migration but wasn't because the the squash migration was partially
            applied before. In that case raise a more understandable exception.

            #23556
            """
            if missing in reverse_replacements:
                candidates = reverse_replacements.get(missing, set())
                is_replaced = any(candidate in self.graph.nodes for candidate in candidates)
                if not is_replaced:
                    tries = ', '.join('%s.%s' % c for c in candidates)
                    exc_value = NodeNotFoundError(
                        "Migration {0} depends on nonexistent node ('{1}', '{2}'). "
                        "Django tried to replace migration {1}.{2} with any of [{3}] "
                        "but wasn't able to because some of the replaced migrations "
                        "are already applied.".format(
                            migration, missing[0], missing[1], tries
                        ),
                        missing)
                    exc_value.__cause__ = exc
                    if not hasattr(exc, '__traceback__'):
                        exc.__traceback__ = sys.exc_info()[2]
                    six.reraise(NodeNotFoundError, exc_value, sys.exc_info()[2])
            raise exc
Example #15
0
    def handle_uncaught_exception(self, request, resolver, exc_info):
        """
        Processing for any otherwise uncaught exceptions (those that will
        generate HTTP 500 responses). Can be overridden by subclasses who want
        customised 500 handling.

        Be *very* careful when overriding this because the error could be
        caused by anything, so assuming something like the database is always
        available would be an error.
        """
        if settings.DEBUG_PROPAGATE_EXCEPTIONS:
            raise

        logger.error('Internal Server Error: %s', request.path,
            exc_info=exc_info,
            extra={
                'status_code': 500,
                'request': request
            }
        )

        if settings.DEBUG:
            return debug.technical_500_response(request, *exc_info)

        # If Http500 handler is not installed, re-raise last exception
        if resolver.urlconf_module is None:
            six.reraise(*exc_info)
        # Return an HttpResponse that displays a friendly error message.
        callback, param_dict = resolver.resolve500()
        return callback(request, **param_dict)
Example #16
0
 def import_by_path(dotted_path, error_prefix=''):
     """
     Import a dotted module path and return the attribute/class designated
     by the last name in the path. Raise ImproperlyConfigured if something
     goes wrong.
     """
     try:
         module_path, class_name = dotted_path.rsplit('.', 1)
     except ValueError:
         raise ImproperlyConfigured("%s%s doesn't look like a module path" %
                                    (error_prefix, dotted_path))
     try:
         module = import_module(module_path)
     except ImportError as e:
         msg = '%sError importing module %s: "%s"' % (
             error_prefix, module_path, e)
         six.reraise(ImproperlyConfigured, ImproperlyConfigured(msg),
                     sys.exc_info()[2])
     try:
         attr = getattr(module, class_name)
     except AttributeError:
         raise ImproperlyConfigured(
             '%sModule "%s" does not define a "%s" attribute/class' %
             (error_prefix, module_path, class_name))
     return attr
Example #17
0
 def server_bind(self):
     """Override server_bind to store the server name."""
     try:
         super(WSGIServer, self).server_bind()
     except Exception as e:
         six.reraise(WSGIServerException, WSGIServerException(e), sys.exc_info()[2])
     self.setup_environ()
Example #18
0
def import_by_path(dotted_path, error_prefix=''):
    """
    Import a dotted module path and return the attribute/class designated by
    the last name in the path. Raise ImproperlyConfigured if something goes
    wrong.

    Backported from Django 1.6.
    """
    try:
        module_path, class_name = dotted_path.rsplit('.', 1)
    except ValueError:
        raise ImproperlyConfigured("{0}{1} doesn't look like "
                                   "a module path".format(error_prefix,
                                                          dotted_path))
    try:
        module = import_module(module_path)
    except ImportError as err:
        msg = '{0}Error importing module {1}: "{2}"'.format(error_prefix,
                                                            module_path,
                                                            err)
        six.reraise(ImproperlyConfigured, ImproperlyConfigured(msg),
                    sys.exc_info()[2])
    try:
        attr = getattr(module, class_name)
    except AttributeError:
        raise ImproperlyConfigured('{0}Module "{1}" does not define a '
                                   '"{2}" attribute/class'.format(error_prefix,
                                                                  module_path,
                                                                  class_name))
    return attr
Example #19
0
 def execute( self, operation, parameters = () ):
     try:
         if operation.count("db2regexExtraField(%s)") > 0:
             operation = operation.replace("db2regexExtraField(%s)", "")
             operation = operation % parameters
             parameters = ()
         if operation.count( "%s" ) > 0:
             operation = operation % ( tuple( "?" * operation.count( "%s" ) ) )
         if ( djangoVersion[0:2] >= ( 1, 4 ) ):
             parameters = self._format_parameters( parameters )
             
         if ( djangoVersion[0:2] <= ( 1, 1 ) ):
             return super( DB2CursorWrapper, self ).execute( operation, parameters )
         else:
             try:
                 return super( DB2CursorWrapper, self ).execute( operation, parameters )
             except IntegrityError, e:
                 if (djangoVersion[0:2] >= (1, 5)):
                     six.reraise(utils.IntegrityError, utils.IntegrityError( *tuple( six.PY3 and e.args or ( e._message, ) ) ), sys.exc_info()[2])
                     raise
                 else:
                     raise utils.IntegrityError, utils.IntegrityError( *tuple( e ) ), sys.exc_info()[2]
                     
             except ProgrammingError, e:
                 if (djangoVersion[0:2] >= (1, 5)):
                     six.reraise(utils.ProgrammingError, utils.ProgrammingError( *tuple( six.PY3 and e.args or ( e._message, ) ) ), sys.exc_info()[2])
                     raise
                 else:
                     raise utils.ProgrammingError, utils.ProgrammingError( *tuple( e ) ), sys.exc_info()[2]
def get_or_create(model, manager, commit=True, **kwargs):
    assert kwargs, "get_or_create() must be passed at least one keyword argument"
    defaults = kwargs.pop("defaults", {})
    lookup = kwargs.copy()
    for f in model._meta.fields:
        if f.attname in lookup:
            lookup[f.name] = lookup.pop(f.attname)
    try:
        return get_object(model, **lookup), False
    except model.DoesNotExist:
        try:
            params = dict([(k, v) for k, v in kwargs.items() if "__" not in k])
            params.update(defaults)
            obj = model(**params)
            if commit:
                with atomic(using=manager.db):
                    obj.save(force_insert=True)
            return obj, True
        except IntegrityError:
            exc_info = sys.exc_info()
            try:
                return get_object(model, **lookup), False
            except model.DoesNotExist:
                # Re-raise the DatabaseError with its original traceback.
                six.reraise(*exc_info)
Example #21
0
def Deserializer(stream_or_string, Model, **options):
    if not isinstance(stream_or_string, (bytes, six.string_types)):
        stream_or_string = stream_or_string.read()
    if isinstance(stream_or_string, bytes):
        stream_or_string = stream_or_string.decode('utf-8')
    foreign_keys = [field.name for field in Model._meta.fields if isinstance(field, ForeignKey)]
    try:
        data = json.loads(stream_or_string)
        objects = []
        for datum in data:
            pk_name = Model._meta.pk.name
            if pk_name in datum:
                pk = datum[pk_name]
                del datum[pk_name]
            else:
                pk = None
            for foreign_key in foreign_keys:
                if foreign_key in datum and datum[foreign_key] == 0:
                    datum[foreign_key] = None
            obj = {'model': "%s.%s" % (Model._meta.app_label, Model._meta.object_name),
                   'pk': pk,
                   'fields': datum,
                   }
            print obj
            objects.append(obj)
        for obj in PythonDeserializer(objects, **options):
            yield obj
    except GeneratorExit:
        raise
    except Exception as e:
        # Map to deserializer error
        six.reraise(DeserializationError, DeserializationError(e), sys.exc_info()[2])
        
Example #22
0
def get_internal_wsgi_application():
    """
    Loads and returns the WSGI application as configured by the user in
    ``settings.WSGI_APPLICATION``. With the default ``startproject`` layout,
    this will be the ``application`` object in ``projectname/wsgi.py``.

    This function, and the ``WSGI_APPLICATION`` setting itself, are only useful
    for Django's internal servers (runserver, runfcgi); external WSGI servers
    should just be configured to point to the correct application object
    directly.

    If settings.WSGI_APPLICATION is not set (is ``None``), we just return
    whatever ``django.core.wsgi.get_wsgi_application`` returns.

    """
    from django.conf import settings
    app_path = getattr(settings, 'WSGI_APPLICATION')
    if app_path is None:
        return get_wsgi_application()

    try:
        return import_string(app_path)
    except ImportError as e:
        msg = (
            "WSGI application '%(app_path)s' could not be loaded; "
            "Error importing module: '%(exception)s'" % ({
                'app_path': app_path,
                'exception': e,
            })
        )
        six.reraise(ImproperlyConfigured, ImproperlyConfigured(msg),
                    sys.exc_info()[2])
Example #23
0
def Deserializer(stream_or_string, **options):
    """
    Deserialize a stream or string of JSON data.
    """
    if not isinstance(stream_or_string, (bytes, six.string_types)):
        stream_or_string = stream_or_string.read()
    if isinstance(stream_or_string, bytes):
        stream_or_string = stream_or_string.decode('utf-8')
    try:
        objects = json.loads(stream_or_string)
        for obj in objects:
            Model = _get_model(obj['model'])
            if isinstance(obj['pk'], (tuple, list)):
                try:
                    o = Model.objects.get_by_natural_key(*obj['pk'])
                except Model.DoesNotExist:
                    obj['pk'] = None
                else:
                    obj['pk'] = o.pk
        for obj in PythonDeserializer(objects, **options):
            yield obj
    except GeneratorExit:
        raise
    except Exception as e:
        # Map to deserializer error
        six.reraise(DeserializationError, DeserializationError(e), sys.exc_info()[2])
Example #24
0
    def handle(self, *args, **options):
        close_old_connections()

        db = options.get('database')
        connection = connections[db]
        interactive = options.get('interactive')
        settings = dict(connection.settings_dict)
        name = settings['NAME']
        if interactive and self.should_ask:
            msg = ''.join([self.message % {
                'name': name,
            }, self.ask_message])
            confirm = input(msg)
        else:
            confirm = self.default

        if confirm == 'yes':
            backend = get_backend(settings)
            backend.connect()
            try:
                self.execute_sql(backend, **options)
            except Exception as e:
                six.reraise(CommandError, CommandError(self.error_message % {
                    'name': name,
                    'error': e,
                }), sys.exc_info()[2])
            finally:
                backend.close()

            self.post_execute(**options)
Example #25
0
    def _destroy_test_db(self, test_database_name, verbosity=1):
        """
        Drop the test databases using a connection to database 'master'.
        """
        if not self._test_database_create(settings):
            if verbosity >= 1:
                six.print_("Skipping Test DB destruction")
            return

        for alias in connections:
            connections[alias].close()
        try:
            with self._nodb_connection.cursor() as cursor:
                qn_db_name = self.connection.ops.quote_name(test_database_name)
                # boot all other connections to the database, leaving only this
                # connection
                cursor.execute(
                    "ALTER DATABASE %s SET SINGLE_USER WITH ROLLBACK IMMEDIATE" % qn_db_name)
                time.sleep(1)
                # database is now clear to drop
                cursor.execute("DROP DATABASE %s" % qn_db_name)
        except Exception:
            # if 'it is currently in use' in str(e):
            #     six.print_('Cannot drop database %s because it is in use' % test_database_name)
            # else:
            six.reraise(*sys.exc_info())
Example #26
0
 def __exit__(self, exc_type, exc_value, traceback):
     if exc_type is None:
         return
     for dj_exc_type in (
             DataError,
             OperationalError,
             IntegrityError,
             InternalError,
             ProgrammingError,
             NotSupportedError,
             DatabaseError,
             InterfaceError,
             Error,
     ):
         db_exc_type = getattr(self.wrapper.Database, dj_exc_type.__name__)
         if issubclass(exc_type, db_exc_type):
             dj_exc_value = dj_exc_type(*exc_value.args)
             dj_exc_value.__cause__ = exc_value
             if not hasattr(exc_value, '__traceback__'):
                 exc_value.__traceback__ = traceback
             # Only set the 'errors_occurred' flag for errors that may make
             # the connection unusable.
             if dj_exc_type not in (DataError, IntegrityError):
                 self.wrapper.errors_occurred = True
             six.reraise(dj_exc_type, dj_exc_value, traceback)
Example #27
0
 def __exit__(self, exc_type, exc_value, traceback):
     if exc_type is None:
         return
     for dj_exc_type in (
         DataError,
         OperationalError,
         IntegrityError,
         InternalError,
         ProgrammingError,
         NotSupportedError,
         DatabaseError,
         InterfaceError,
         Error,
     ):
         db_exc_type = getattr(self.database, dj_exc_type.__name__)
         if issubclass(exc_type, db_exc_type):
             # Under Python 2.6, exc_value can still be a string.
             try:
                 args = tuple(exc_value.args)
             except AttributeError:
                 args = (exc_value,)
             dj_exc_value = dj_exc_type(*args)
             if six.PY3:
                 dj_exc_value.__cause__ = exc_value
             six.reraise(dj_exc_type, dj_exc_value, traceback)
Example #28
0
 def __exit__(self, exc_type, exc_value, traceback):
     if exc_type is None:
         return
     for dj_exc_type in (
             DataError,
             OperationalError,
             IntegrityError,
             InternalError,
             ProgrammingError,
             NotSupportedError,
             DatabaseError,
             InterfaceError,
             Error,
         ):
         db_exc_type = getattr(self.wrapper.Database, dj_exc_type.__name__)
         if issubclass(exc_type, db_exc_type):
             # Under Python 2.6, exc_value can still be a string.
             try:
                 args = tuple(exc_value.args)
             except AttributeError:
                 args = (exc_value,)
             dj_exc_value = dj_exc_type(*args)
             dj_exc_value.__cause__ = exc_value
             # Only set the 'errors_occurred' flag for errors that may make
             # the connection unusable.
             if dj_exc_type not in (DataError, IntegrityError):
                 self.wrapper.errors_occurred = True
             six.reraise(dj_exc_type, dj_exc_value, traceback)
Example #29
0
    def get_filters(self, request):
        lookup_params = self.get_filters_params()
        use_distinct = False

        # Normalize the types of keys
        for key, value in lookup_params.items():
            if not isinstance(key, str):
                # 'key' will be used as a keyword argument later, so Python
                # requires it to be a string.
                del lookup_params[key]
                lookup_params[force_str(key)] = value

            if not self.model_admin.lookup_allowed(key, value):
                raise DisallowedModelAdminLookup("Filtering by %s not allowed" % key)

        filter_specs = []
        if self.list_filter:
            for list_filter in self.list_filter:
                if callable(list_filter):
                    # This is simply a custom list filter class.
                    spec = list_filter(request, lookup_params,
                                       self.model, self.model_admin)
                else:
                    field_path = None
                    if isinstance(list_filter, (tuple, list)):
                        # This is a custom FieldListFilter class for a given field.
                        field, field_list_filter_class = list_filter
                    else:
                        # This is simply a field name, so use the default
                        # FieldListFilter class that has been registered for
                        # the type of the given field.
                        field, field_list_filter_class = list_filter, FieldListFilter.create
                    if not isinstance(field, models.Field):
                        field_path = field
                        field = get_fields_from_path(self.model, field_path)[-1]
                    spec = field_list_filter_class(field, request, lookup_params,
                                                   self.model, self.model_admin,
                                                   field_path=field_path)
                    # Check if we need to use distinct()
                    use_distinct = (use_distinct or
                                    lookup_needs_distinct(self.lookup_opts,
                                                          field_path))
                if spec and spec.has_output():
                    filter_specs.append(spec)

        # At this point, all the parameters used by the various ListFilters
        # have been removed from lookup_params, which now only contains other
        # parameters passed via the query string. We now loop through the
        # remaining parameters both to ensure that all the parameters are valid
        # fields and to determine if at least one of them needs distinct(). If
        # the lookup parameters aren't real fields, then bail out.
        try:
            for key, value in lookup_params.items():
                lookup_params[key] = prepare_lookup_value(key, value)
                use_distinct = (use_distinct or
                                lookup_needs_distinct(self.lookup_opts, key))
            return filter_specs, bool(filter_specs), lookup_params, use_distinct
        except FieldDoesNotExist as e:
            six.reraise(IncorrectLookupParameters, IncorrectLookupParameters(e), sys.exc_info()[2])
Example #30
0
File: flush.py Project: 01-/django
    def handle(self, **options):
        database = options['database']
        connection = connections[database]
        verbosity = options['verbosity']
        interactive = options['interactive']
        # The following are stealth options used by Django's internals.
        reset_sequences = options.get('reset_sequences', True)
        allow_cascade = options.get('allow_cascade', False)
        inhibit_post_migrate = options.get('inhibit_post_migrate', False)

        self.style = no_style()

        # Import the 'management' module within each installed app, to register
        # dispatcher events.
        for app_config in apps.get_app_configs():
            try:
                import_module('.management', app_config.name)
            except ImportError:
                pass

        sql_list = sql_flush(self.style, connection, only_django=True,
                             reset_sequences=reset_sequences,
                             allow_cascade=allow_cascade)

        if interactive:
            confirm = input("""You have requested a flush of the database.
This will IRREVERSIBLY DESTROY all data currently in the %r database,
and return each table to an empty state.
Are you sure you want to do this?

    Type 'yes' to continue, or 'no' to cancel: """ % connection.settings_dict['NAME'])
        else:
            confirm = 'yes'

        if confirm == 'yes':
            try:
                with transaction.atomic(using=database,
                                        savepoint=connection.features.can_rollback_ddl):
                    with connection.cursor() as cursor:
                        for sql in sql_list:
                            cursor.execute(sql)
            except Exception as e:
                new_msg = (
                    "Database %s couldn't be flushed. Possible reasons:\n"
                    "  * The database isn't running or isn't configured correctly.\n"
                    "  * At least one of the expected database tables doesn't exist.\n"
                    "  * The SQL was invalid.\n"
                    "Hint: Look at the output of 'django-admin sqlflush'. "
                    "That's the SQL this command wasn't able to run.\n"
                    "The full error: %s") % (connection.settings_dict['NAME'], e)
                six.reraise(CommandError, CommandError(new_msg), sys.exc_info()[2])

            # Empty sql_list may signify an empty database and post_migrate would then crash
            if sql_list and not inhibit_post_migrate:
                # Emit the post migrate signal. This allows individual applications to
                # respond as if the database had been migrated from scratch.
                emit_post_migrate_signal(verbosity, interactive, database)
        else:
            self.stdout.write("Flush cancelled.\n")
Example #31
0
    def handle_noargs(self, **options):
        db = options.get('database')
        connection = connections[db]
        verbosity = int(options.get('verbosity'))
        interactive = options.get('interactive')
        # The following are stealth options used by Django's internals.
        reset_sequences = options.get('reset_sequences', True)
        allow_cascade = options.get('allow_cascade', False)
        inhibit_post_migrate = options.get('inhibit_post_migrate', False)

        self.style = no_style()

        # Import the 'management' module within each installed app, to register
        # dispatcher events.
        for app_name in settings.INSTALLED_APPS:
            try:
                import_module('.management', app_name)
            except ImportError:
                pass

        sql_list = sql_flush(self.style,
                             connection,
                             only_django=True,
                             reset_sequences=reset_sequences,
                             allow_cascade=allow_cascade)

        if interactive:
            confirm = input("""You have requested a flush of the database.
This will IRREVERSIBLY DESTROY all data currently in the %r database,
and return each table to a fresh state.
Are you sure you want to do this?

    Type 'yes' to continue, or 'no' to cancel: """ %
                            connection.settings_dict['NAME'])
        else:
            confirm = 'yes'

        if confirm == 'yes':
            try:
                with transaction.commit_on_success_unless_managed():
                    cursor = connection.cursor()
                    for sql in sql_list:
                        cursor.execute(sql)
            except Exception as e:
                new_msg = (
                    "Database %s couldn't be flushed. Possible reasons:\n"
                    "  * The database isn't running or isn't configured correctly.\n"
                    "  * At least one of the expected database tables doesn't exist.\n"
                    "  * The SQL was invalid.\n"
                    "Hint: Look at the output of 'django-admin.py sqlflush'. That's the SQL this command wasn't able to run.\n"
                    "The full error: %s") % (connection.settings_dict['NAME'],
                                             e)
                six.reraise(CommandError, CommandError(new_msg),
                            sys.exc_info()[2])

            if not inhibit_post_migrate:
                self.emit_post_migrate(verbosity, interactive, db)

            # Reinstall the initial_data fixture.
            if options.get('load_initial_data'):
                # Reinstall the initial_data fixture.
                call_command('loaddata', 'initial_data', **options)

        else:
            self.stdout.write("Flush cancelled.\n")
Example #32
0
def reraise(exc):
    """
    Reraise AWSBackendError while maintaining traceback.
    """
    six.reraise(AWSBackendError, exc, sys.exc_info()[2])
Example #33
0
 def _commit(self):
     if self.connection is not None:
         try:
             return self.connection.commit()
         except Database.IntegrityError as e:
             six.reraise(utils.IntegrityError, utils.IntegrityError(*tuple(e.args)), sys.exc_info()[2])
def render(self, context):
    from django.core.urlresolvers import reverse, NoReverseMatch
    args = [arg.resolve(context) for arg in self.args]
    kwargs = {
        smart_text(k, 'ascii'): v.resolve(context)
        for k, v in self.kwargs.items()
    }

    view_name = self.view_name.resolve(context)

    try:
        current_app = context.request.current_app
    except AttributeError:
        # Change the fallback value to None when the deprecation path for
        # Context.current_app completes in Django 2.0.
        current_app = context.current_app

    # Try to look up the URL twice: once given the view name, and again
    # relative to what we guess is the "main" app. If they both fail,
    # re-raise the NoReverseMatch unless we're using the
    # {% url ... as var %} construct in which case return nothing.
    url = ''

    try:
        url = reverse(view_name,
                      args=args,
                      kwargs=kwargs,
                      current_app=current_app)
    except NoReverseMatch:
        # try external apps
        for urlconf, config in six.iteritems(
                ApplicationWidget._feincms_content_models[0].ALL_APPS_CONFIG):
            partials = view_name.split(':')[1:]
            try:
                url = do_app_reverse(':'.join(partials),
                                     urlconf,
                                     args=args,
                                     kwargs=kwargs,
                                     current_app=context.current_app)
            except NoReverseMatch:
                pass
            else:
                return url

        exc_info = sys.exc_info()
        if settings.SETTINGS_MODULE:
            project_name = settings.SETTINGS_MODULE.split('.')[0]
            try:
                url = reverse(project_name + '.' + view_name,
                              args=args,
                              kwargs=kwargs,
                              current_app=current_app)
            except NoReverseMatch:
                if self.asvar is None:
                    # Re-raise the original exception, not the one with
                    # the path relative to the project. This makes a
                    # better error message.
                    six.reraise(*exc_info)
        else:
            if self.asvar is None:
                raise

    if self.asvar:
        context[self.asvar] = url
        return ''
    else:
        return url
Example #35
0
 def build_graph(self):
     """
     Builds a migration dependency graph using both the disk and database.
     You'll need to rebuild the graph if you apply migrations. This isn't
     usually a problem as generally migration stuff runs in a one-shot process.
     """
     # Load disk data
     self.load_disk()
     # Load database data
     if self.connection is None:
         self.applied_migrations = set()
     else:
         recorder = MigrationRecorder(self.connection)
         self.applied_migrations = recorder.applied_migrations()
     # To start, populate the migration graph with nodes for ALL migrations
     # and their dependencies. Also make note of replacing migrations at this step.
     self.graph = MigrationGraph()
     self.replacements = {}
     for key, migration in self.disk_migrations.items():
         self.graph.add_node(key, migration)
         # Internal (aka same-app) dependencies.
         self.add_internal_dependencies(key, migration)
         # Replacing migrations.
         if migration.replaces:
             self.replacements[key] = migration
     # Add external dependencies now that the internal ones have been resolved.
     for key, migration in self.disk_migrations.items():
         self.add_external_dependencies(key, migration)
     # Carry out replacements where possible.
     for key, migration in self.replacements.items():
         # Get applied status of each of this migration's replacement targets.
         applied_statuses = [(target in self.applied_migrations) for target in migration.replaces]
         # Ensure the replacing migration is only marked as applied if all of
         # its replacement targets are.
         if all(applied_statuses):
             self.applied_migrations.add(key)
         else:
             self.applied_migrations.discard(key)
         # A replacing migration can be used if either all or none of its
         # replacement targets have been applied.
         if all(applied_statuses) or (not any(applied_statuses)):
             self.graph.remove_replaced_nodes(key, migration.replaces)
         else:
             # This replacing migration cannot be used because it is partially applied.
             # Remove it from the graph and remap dependencies to it (#25945).
             self.graph.remove_replacement_node(key, migration.replaces)
     # Ensure the graph is consistent.
     try:
         self.graph.validate_consistency()
     except NodeNotFoundError as exc:
         # Check if the missing node could have been replaced by any squash
         # migration but wasn't because the squash migration was partially
         # applied before. In that case raise a more understandable exception
         # (#23556).
         # Get reverse replacements.
         reverse_replacements = {}
         for key, migration in self.replacements.items():
             for replaced in migration.replaces:
                 reverse_replacements.setdefault(replaced, set()).add(key)
         # Try to reraise exception with more detail.
         if exc.node in reverse_replacements:
             candidates = reverse_replacements.get(exc.node, set())
             is_replaced = any(candidate in self.graph.nodes for candidate in candidates)
             if not is_replaced:
                 tries = ', '.join('%s.%s' % c for c in candidates)
                 exc_value = NodeNotFoundError(
                     "Migration {0} depends on nonexistent node ('{1}', '{2}'). "
                     "Django tried to replace migration {1}.{2} with any of [{3}] "
                     "but wasn't able to because some of the replaced migrations "
                     "are already applied.".format(
                         exc.origin, exc.node[0], exc.node[1], tries
                     ),
                     exc.node
                 )
                 exc_value.__cause__ = exc
                 if not hasattr(exc, '__traceback__'):
                     exc.__traceback__ = sys.exc_info()[2]
                 six.reraise(NodeNotFoundError, exc_value, sys.exc_info()[2])
         raise exc
Example #36
0
    def get_filters(self, request):
        lookup_params = self.get_filters_params()
        use_distinct = False

        for key, value in lookup_params.items():
            if not self.model_admin.lookup_allowed(key, value):
                raise DisallowedModelAdminLookup(
                    "Filtering by %s not allowed" % key)

        filter_specs = []
        if self.list_filter:
            for list_filter in self.list_filter:
                if callable(list_filter):
                    # This is simply a custom list filter class.
                    spec = list_filter(request, lookup_params, self.model,
                                       self.model_admin)
                else:
                    field_path = None
                    if isinstance(list_filter, (tuple, list)):
                        # This is a custom FieldListFilter class for a given field.
                        field, field_list_filter_class = list_filter
                    else:
                        # This is simply a field name, so use the default
                        # FieldListFilter class that has been registered for
                        # the type of the given field.
                        field, field_list_filter_class = list_filter, FieldListFilter.create
                    if not isinstance(field, models.Field):
                        field_path = field
                        field = get_fields_from_path(self.model,
                                                     field_path)[-1]

                    lookup_params_count = len(lookup_params)
                    spec = field_list_filter_class(field,
                                                   request,
                                                   lookup_params,
                                                   self.model,
                                                   self.model_admin,
                                                   field_path=field_path)
                    # field_list_filter_class removes any lookup_params it
                    # processes. If that happened, check if distinct() is
                    # needed to remove duplicate results.
                    if lookup_params_count > len(lookup_params):
                        use_distinct = use_distinct or lookup_needs_distinct(
                            self.lookup_opts, field_path)
                if spec and spec.has_output():
                    filter_specs.append(spec)

        # At this point, all the parameters used by the various ListFilters
        # have been removed from lookup_params, which now only contains other
        # parameters passed via the query string. We now loop through the
        # remaining parameters both to ensure that all the parameters are valid
        # fields and to determine if at least one of them needs distinct(). If
        # the lookup parameters aren't real fields, then bail out.
        try:
            for key, value in lookup_params.items():
                lookup_params[key] = prepare_lookup_value(key, value)
                use_distinct = use_distinct or lookup_needs_distinct(
                    self.lookup_opts, key)
            return filter_specs, bool(
                filter_specs), lookup_params, use_distinct
        except FieldDoesNotExist as e:
            six.reraise(IncorrectLookupParameters,
                        IncorrectLookupParameters(e),
                        sys.exc_info()[2])
Example #37
0
def raise_last_exception():
    global _exception
    if _exception is not None:
        six.reraise(*_exception)
Example #38
0
    def request(self, **request):
        """
        The master request method. Composes the environment dictionary
        and passes to the handler, returning the result of the handler.
        Assumes defaults for the query environment, which can be overridden
        using the arguments to the request.
        """
        environ = self._base_environ(**request)

        # Curry a data dictionary into an instance of the template renderer
        # callback function.
        data = {}
        on_template_render = curry(store_rendered_templates, data)
        signals.template_rendered.connect(on_template_render,
                                          dispatch_uid="template-render")
        # Capture exceptions created by the handler.
        got_request_exception.connect(self.store_exc_info,
                                      dispatch_uid="request-exception")
        try:

            try:
                response = self.handler(environ)
            except TemplateDoesNotExist as e:
                # If the view raises an exception, Django will attempt to show
                # the 500.html template. If that template is not available,
                # we should ignore the error in favor of re-raising the
                # underlying exception that caused the 500 error. Any other
                # template found to be missing during view error handling
                # should be reported as-is.
                if e.args != ('500.html', ):
                    raise

            # Look for a signalled exception, clear the current context
            # exception data, then re-raise the signalled exception.
            # Also make sure that the signalled exception is cleared from
            # the local cache!
            if self.exc_info:
                exc_info = self.exc_info
                self.exc_info = None
                six.reraise(*exc_info)

            # Save the client and request that stimulated the response.
            response.client = self
            response.request = request

            # Add any rendered template detail to the response.
            response.templates = data.get("templates", [])
            response.context = data.get("context")

            # Flatten a single context. Not really necessary anymore thanks to
            # the __getattr__ flattening in ContextList, but has some edge-case
            # backwards-compatibility implications.
            if response.context and len(response.context) == 1:
                response.context = response.context[0]

            # Update persistent cookie data.
            if response.cookies:
                self.cookies.update(response.cookies)

            return response
        finally:
            signals.template_rendered.disconnect(
                dispatch_uid="template-render")
            got_request_exception.disconnect(dispatch_uid="request-exception")
Example #39
0
def do_block_translate(parser, token):
    bits = token.split_contents()

    options = {}
    remaining_bits = bits[1:]
    while remaining_bits:
        option = remaining_bits.pop(0)
        if option in options:
            raise TemplateSyntaxError(
                'The %r option was specified more than once.' % option)
        if option == 'with':
            value = token_kwargs(remaining_bits, parser, support_legacy=True)
            if not value:
                raise TemplateSyntaxError(
                    '"with" in %r tag needs at least one keyword argument.' %
                    bits[0])
        elif option == 'count':
            value = token_kwargs(remaining_bits, parser, support_legacy=True)
            if len(value) != 1:
                raise TemplateSyntaxError(
                    '"count" in %r tag expected exactly one keyword argument.'
                    % bits[0])
        elif option == "context":
            try:
                value = remaining_bits.pop(0)
                value = parser.compile_filter(value)
            except Exception:
                msg = ('"context" in %r tag expected exactly one argument.'
                       ) % bits[0]
                six.reraise(TemplateSyntaxError, TemplateSyntaxError(msg),
                            sys.exc_info()[2])
        elif option == "trimmed":
            value = True
        else:
            raise TemplateSyntaxError('Unknown argument for %r tag: %r.' %
                                      (bits[0], option))
        options[option] = value

    trimmed = options.get("trimmed", False)

    if 'count' in options:
        countervar, counter = list(six.iteritems(options['count']))[0]
    else:
        countervar, counter = None, None
    if 'context' in options:
        message_context = options['context']
    else:
        message_context = None

    extra_context = options.get('with', {})

    singular = []
    plural = []
    while parser.tokens:
        token = parser.next_token()
        if token.token_type in (TOKEN_VAR, TOKEN_TEXT):
            singular.append(token)
        else:
            break
    if countervar and counter:
        if token.contents.strip() != 'plural':
            raise TemplateSyntaxError(
                "'blocktrans' doesn't allow other block tags inside it")
        while parser.tokens:
            token = parser.next_token()
            if token.token_type in (TOKEN_VAR, TOKEN_TEXT):
                plural.append(token)
            else:
                break
    if token.contents.strip() != 'endblocktrans':
        raise TemplateSyntaxError(
            "'blocktrans' doesn't allow other block tags (seen %r) inside it" %
            token.contents)

    if phrase_settings.PHRASE_ENABLED:
        node = PhraseBlockTranslateNode(extra_context, singular, plural,
                                        countervar, counter, message_context,
                                        trimmed)
    else:
        node = BlockTranslateNode(extra_context,
                                  singular,
                                  plural,
                                  countervar,
                                  counter,
                                  message_context,
                                  trimmed=trimmed)

    return node
Example #40
0
    def import_data(self,
                    dataset,
                    dry_run=False,
                    raise_errors=False,
                    use_transactions=None):
        """
        Imports data from ``dataset``.

        ``use_transactions``
            If ``True`` import process will be processed inside transaction.
            If ``dry_run`` is set, or error occurs, transaction will be rolled
            back.
        """
        result = Result()

        if use_transactions is None:
            use_transactions = self.get_use_transactions()

        if use_transactions is True:
            # when transactions are used we want to create/update/delete object
            # as transaction will be rolled back if dry_run is set
            real_dry_run = False
            transaction.enter_transaction_management()
            transaction.managed(True)
        else:
            real_dry_run = dry_run

        instance_loader = self._meta.instance_loader_class(self, dataset)

        try:
            self.before_import(dataset, real_dry_run)
        except Exception as e:
            tb_info = traceback.format_exc(sys.exc_info()[2])
            result.base_errors.append(Error(repr(e), tb_info))
            if raise_errors:
                if use_transactions:
                    transaction.rollback()
                    transaction.leave_transaction_management()
                raise

        for row in dataset.dict:
            try:
                row_result = RowResult()
                instance, new = self.get_or_init_instance(instance_loader, row)
                if new:
                    row_result.import_type = RowResult.IMPORT_TYPE_NEW
                else:
                    row_result.import_type = RowResult.IMPORT_TYPE_UPDATE
                row_result.new_record = new
                original = deepcopy(instance)
                if self.for_delete(row, instance):
                    if new:
                        row_result.import_type = RowResult.IMPORT_TYPE_SKIP
                        row_result.diff = self.get_diff(
                            None, None, real_dry_run)
                    else:
                        row_result.import_type = RowResult.IMPORT_TYPE_DELETE
                        self.delete_instance(instance, real_dry_run)
                        row_result.diff = self.get_diff(
                            original, None, real_dry_run)
                else:
                    self.import_obj(instance, row, real_dry_run)
                    if self.skip_row(instance, original):
                        row_result.import_type = RowResult.IMPORT_TYPE_SKIP
                    else:
                        self.save_instance(instance, real_dry_run)
                        self.save_m2m(instance, row, real_dry_run)
                    row_result.diff = self.get_diff(original, instance,
                                                    real_dry_run)
            except Exception as e:
                tb_info = traceback.format_exc(2)
                row_result.errors.append(Error(repr(e), tb_info))
                if raise_errors:
                    if use_transactions:
                        transaction.rollback()
                        transaction.leave_transaction_management()
                    six.reraise(*sys.exc_info())
            if (row_result.import_type != RowResult.IMPORT_TYPE_SKIP
                    or self._meta.report_skipped):
                result.rows.append(row_result)

        if use_transactions:
            if dry_run or result.has_errors():
                transaction.rollback()
            else:
                transaction.commit()
            transaction.leave_transaction_management()

        return result
Example #41
0
def do_translate(parser, token):
    """
    This will mark a string for translation and will
    translate the string for the current language.
    Usage::
        {% trans "this is a test" %}
    This will mark the string for translation so it will
    be pulled out by mark-messages.py into the .po files
    and will run the string through the translation engine.
    There is a second form::
        {% trans "this is a test" noop %}
    This will only mark for translation, but will return
    the string unchanged. Use it when you need to store
    values into forms that should be translated later on.
    You can use variables instead of constant strings
    to translate stuff you marked somewhere else::
        {% trans variable %}
    This will just try to translate the contents of
    the variable ``variable``. Make sure that the string
    in there is something that is in the .po file.
    It is possible to store the translated string into a variable::
        {% trans "this is a test" as var %}
        {{ var }}
    Contextual translations are also supported::
        {% trans "this is a test" context "greeting" %}
    This is equivalent to calling pgettext instead of (u)gettext.
    """
    bits = token.split_contents()
    if len(bits) < 2:
        raise TemplateSyntaxError("'%s' takes at least one argument" % bits[0])
    message_string = parser.compile_filter(bits[1])
    remaining = bits[2:]

    noop = False
    asvar = None
    message_context = None
    seen = set()
    invalid_context = {'as', 'noop'}

    while remaining:
        option = remaining.pop(0)
        if option in seen:
            raise TemplateSyntaxError(
                "The '%s' option was specified more than once." % option, )
        elif option == 'noop':
            noop = True
        elif option == 'context':
            try:
                value = remaining.pop(0)
            except IndexError:
                msg = "No argument provided to the '%s' tag for the context option." % bits[
                    0]
                six.reraise(TemplateSyntaxError, TemplateSyntaxError(msg),
                            sys.exc_info()[2])
            if value in invalid_context:
                raise TemplateSyntaxError(
                    "Invalid argument '%s' provided to the '%s' tag for the context option"
                    % (value, bits[0]), )
            message_context = parser.compile_filter(value)
        elif option == 'as':
            try:
                value = remaining.pop(0)
            except IndexError:
                msg = "No argument provided to the '%s' tag for the as option." % bits[
                    0]
                six.reraise(TemplateSyntaxError, TemplateSyntaxError(msg),
                            sys.exc_info()[2])
            asvar = value
        else:
            raise TemplateSyntaxError(
                "Unknown argument for '%s' tag: '%s'. The only options "
                "available are 'noop', 'context' \"xxx\", and 'as VAR'." % (
                    bits[0],
                    option,
                ))
        seen.add(option)

    if phrase_settings.PHRASE_ENABLED:
        return PhraseTranslateNode(message_string, noop, asvar,
                                   message_context)
    else:
        return TranslateNode(message_string, noop, asvar, message_context)
Example #42
0
    def parse(self):
        """
        Parse the POST data and break it into a FILES MultiValueDict and a POST
        MultiValueDict.

        Returns a tuple containing the POST and FILES dictionary, respectively.
        """
        # We have to import QueryDict down here to avoid a circular import.
        from django.http import QueryDict

        encoding = self._encoding
        handlers = self._upload_handlers

        # HTTP spec says that Content-Length >= 0 is valid
        # handling content-length == 0 before continuing
        if self._content_length == 0:
            return QueryDict('', encoding=self._encoding), MultiValueDict()

        # See if any of the handlers take care of the parsing.
        # This allows overriding everything if need be.
        for handler in handlers:
            result = handler.handle_raw_input(self._input_data, self._meta,
                                              self._content_length,
                                              self._boundary, encoding)
            # Check to see if it was handled
            if result is not None:
                return result[0], result[1]

        # Create the data structures to be used later.
        self._post = QueryDict('', mutable=True)
        self._files = MultiValueDict()

        # Instantiate the parser and stream:
        stream = LazyStream(ChunkIter(self._input_data, self._chunk_size))

        # Whether or not to signal a file-completion at the beginning of the loop.
        old_field_name = None
        counters = [0] * len(handlers)

        try:
            for item_type, meta_data, field_stream in Parser(
                    stream, self._boundary):
                if old_field_name:
                    # We run this at the beginning of the next loop
                    # since we cannot be sure a file is complete until
                    # we hit the next boundary/part of the multipart content.
                    self.handle_file_complete(old_field_name, counters)
                    old_field_name = None

                try:
                    disposition = meta_data['content-disposition'][1]
                    field_name = disposition['name'].strip()
                except (KeyError, IndexError, AttributeError):
                    continue

                transfer_encoding = meta_data.get('content-transfer-encoding')
                if transfer_encoding is not None:
                    transfer_encoding = transfer_encoding[0].strip()
                field_name = force_text(field_name, encoding, errors='replace')

                if item_type == FIELD:
                    # This is a post field, we can just set it in the post
                    if transfer_encoding == 'base64':
                        raw_data = field_stream.read()
                        try:
                            data = base64.b64decode(raw_data)
                        except _BASE64_DECODE_ERROR:
                            data = raw_data
                    else:
                        data = field_stream.read()

                    self._post.appendlist(
                        field_name, force_text(data,
                                               encoding,
                                               errors='replace'))
                elif item_type == FILE:
                    # This is a file, use the handler...
                    file_name = disposition.get('filename')
                    if file_name:
                        file_name = force_text(file_name,
                                               encoding,
                                               errors='replace')
                        file_name = self.IE_sanitize(
                            unescape_entities(file_name))
                    if not file_name:
                        continue

                    content_type, content_type_extra = meta_data.get(
                        'content-type', ('', {}))
                    content_type = content_type.strip()
                    charset = content_type_extra.get('charset')

                    try:
                        content_length = int(
                            meta_data.get('content-length')[0])
                    except (IndexError, TypeError, ValueError):
                        content_length = None

                    counters = [0] * len(handlers)
                    try:
                        for handler in handlers:
                            try:
                                handler.new_file(field_name, file_name,
                                                 content_type, content_length,
                                                 charset, content_type_extra)
                            except StopFutureHandlers:
                                break

                        for chunk in field_stream:
                            if transfer_encoding == 'base64':
                                # We only special-case base64 transfer encoding
                                # We should always decode base64 chunks by multiple of 4,
                                # ignoring whitespace.

                                stripped_chunk = b"".join(chunk.split())

                                remaining = len(stripped_chunk) % 4
                                while remaining != 0:
                                    over_chunk = field_stream.read(4 -
                                                                   remaining)
                                    stripped_chunk += b"".join(
                                        over_chunk.split())
                                    remaining = len(stripped_chunk) % 4

                                try:
                                    chunk = base64.b64decode(stripped_chunk)
                                except Exception as e:
                                    # Since this is only a chunk, any error is an unfixable error.
                                    msg = "Could not decode base64 data: %r" % e
                                    six.reraise(MultiPartParserError,
                                                MultiPartParserError(msg),
                                                sys.exc_info()[2])

                            for i, handler in enumerate(handlers):
                                chunk_length = len(chunk)
                                chunk = handler.receive_data_chunk(
                                    chunk, counters[i])
                                counters[i] += chunk_length
                                if chunk is None:
                                    # If the chunk received by the handler is None, then don't continue.
                                    break

                    except SkipFile:
                        self._close_files()
                        # Just use up the rest of this file...
                        exhaust(field_stream)
                    else:
                        # Handle file upload completions on next iteration.
                        old_field_name = field_name
                else:
                    # If this is neither a FIELD or a FILE, just exhaust the stream.
                    exhaust(stream)
        except StopUpload as e:
            self._close_files()
            if not e.connection_reset:
                exhaust(self._input_data)
        else:
            # Make sure that the request data is all fed
            exhaust(self._input_data)

        # Signal that the upload has completed.
        for handler in handlers:
            retval = handler.upload_complete()
            if retval:
                break

        return self._post, self._files
Example #43
0
 def readline(self, *args, **kwargs):
     self._read_started = True
     try:
         return self._stream.readline(*args, **kwargs)
     except IOError as e:
         six.reraise(UnreadablePostError, UnreadablePostError(*e.args), sys.exc_info()[2])
Example #44
0
    def handle_noargs(self, **options):
        db = options.get('database')
        connection = connections[db]
        verbosity = int(options.get('verbosity'))
        interactive = options.get('interactive')
        # 'reset_sequences' is a stealth option
        reset_sequences = options.get('reset_sequences', True)

        self.style = no_style()

        # Import the 'management' module within each installed app, to register
        # dispatcher events.
        for app_name in settings.INSTALLED_APPS:
            try:
                import_module('.management', app_name)
            except ImportError:
                pass

        sql_list = sql_flush(self.style, connection, only_django=True, reset_sequences=reset_sequences)

        if interactive:
            confirm = input("""You have requested a flush of the database.
This will IRREVERSIBLY DESTROY all data currently in the %r database,
and return each table to the state it was in after syncdb.
Are you sure you want to do this?

    Type 'yes' to continue, or 'no' to cancel: """ % connection.settings_dict['NAME'])
        else:
            confirm = 'yes'

        if confirm == 'yes':
            try:
                with transaction.commit_on_success_unless_managed():
                    cursor = connection.cursor()
                    for sql in sql_list:
                        cursor.execute(sql)
            except Exception as e:
                new_msg = (
                    "Database %s couldn't be flushed. Possible reasons:\n"
                    "  * The database isn't running or isn't configured correctly.\n"
                    "  * At least one of the expected database tables doesn't exist.\n"
                    "  * The SQL was invalid.\n"
                    "Hint: Look at the output of 'django-admin.py sqlflush'. That's the SQL this command wasn't able to run.\n"
                    "The full error: %s") % (connection.settings_dict['NAME'], e)
                six.reraise(CommandError, CommandError(new_msg), sys.exc_info()[2])
            # Emit the post sync signal. This allows individual
            # applications to respond as if the database had been
            # sync'd from scratch.
            all_models = []
            for app in models.get_apps():
                all_models.extend([
                    m for m in models.get_models(app, include_auto_created=True)
                    if router.allow_syncdb(db, m)
                ])
            emit_post_sync_signal(set(all_models), verbosity, interactive, db)

            # Reinstall the initial_data fixture.
            if options.get('load_initial_data'):
                # Reinstall the initial_data fixture.
                call_command('loaddata', 'initial_data', **options)

        else:
            self.stdout.write("Flush cancelled.\n")
Example #45
0
def do_block_translate(parser, token):
    """
    This will translate a block of text with parameters.

    Usage::

        {% blocktrans with bar=foo|filter boo=baz|filter %}
        This is {{ bar }} and {{ boo }}.
        {% endblocktrans %}

    Additionally, this supports pluralization::

        {% blocktrans count count=var|length %}
        There is {{ count }} object.
        {% plural %}
        There are {{ count }} objects.
        {% endblocktrans %}

    This is much like ngettext, only in template syntax.

    The "var as value" legacy format is still supported::

        {% blocktrans with foo|filter as bar and baz|filter as boo %}
        {% blocktrans count var|length as count %}

    Contextual translations are supported::

        {% blocktrans with bar=foo|filter context "greeting" %}
            This is {{ bar }}.
        {% endblocktrans %}

    This is equivalent to calling pgettext/npgettext instead of
    (u)gettext/(u)ngettext.
    """
    bits = token.split_contents()

    options = {}
    remaining_bits = bits[1:]
    while remaining_bits:
        option = remaining_bits.pop(0)
        if option in options:
            raise TemplateSyntaxError('The %r option was specified more '
                                      'than once.' % option)
        if option == 'with':
            value = token_kwargs(remaining_bits, parser, support_legacy=True)
            if not value:
                raise TemplateSyntaxError('"with" in %r tag needs at least '
                                          'one keyword argument.' % bits[0])
        elif option == 'count':
            value = token_kwargs(remaining_bits, parser, support_legacy=True)
            if len(value) != 1:
                raise TemplateSyntaxError('"count" in %r tag expected exactly '
                                          'one keyword argument.' % bits[0])
        elif option == "context":
            try:
                value = remaining_bits.pop(0)
                value = parser.compile_filter(value)
            except Exception:
                msg = (
                    '"context" in %r tag expected '
                    'exactly one argument.') % bits[0]
                six.reraise(TemplateSyntaxError, TemplateSyntaxError(msg), sys.exc_info()[2])
        elif option == "trimmed":
            value = True
        else:
            raise TemplateSyntaxError('Unknown argument for %r tag: %r.' %
                                      (bits[0], option))
        options[option] = value

    if 'count' in options:
        countervar, counter = list(options['count'].items())[0]
    else:
        countervar, counter = None, None
    if 'context' in options:
        message_context = options['context']
    else:
        message_context = None
    extra_context = options.get('with', {})

    trimmed = options.get("trimmed", False)

    singular = []
    plural = []
    while parser.tokens:
        token = parser.next_token()
        if token.token_type in (TOKEN_VAR, TOKEN_TEXT):
            singular.append(token)
        else:
            break
    if countervar and counter:
        if token.contents.strip() != 'plural':
            raise TemplateSyntaxError("'blocktrans' doesn't allow other block tags inside it")
        while parser.tokens:
            token = parser.next_token()
            if token.token_type in (TOKEN_VAR, TOKEN_TEXT):
                plural.append(token)
            else:
                break
    if token.contents.strip() != 'endblocktrans':
        raise TemplateSyntaxError("'blocktrans' doesn't allow other block tags (seen %r) inside it" % token.contents)

    return BlockTranslateNode(extra_context, singular, plural, countervar,
                              counter, message_context, trimmed=trimmed)
Example #46
0
    def create_invoice(self, invoice):
        """
        Creates invoice with invoice items in it.
        https://developer.paypal.com/docs/api/invoicing/#definition-payment_term
        :param invoice: instance of Invoice class.
        :return: instance of Invoice with backend_id filled.
        """

        if invoice.backend_id:
            return

        if not invoice.items.count():
            raise PayPalError('"items" size must be between 1 and 100.')

        if not invoice.price:
            raise PayPalError('The total cost must not be zero.')

        phone = invoice.issuer_details.get('phone', {})
        if not phone:
            raise PayPalError('"phone" is a required attribute')

        if phone and 'country_code' not in phone:
            raise PayPalError('"phone"."country_code" is a required attribute')

        if phone and 'national_number' not in phone:
            raise PayPalError(
                '"phone"."national_number" is a required attribute')

        invoice_details = {
            'merchant_info': {
                'email': invoice.issuer_details.get('email'),
                'business_name': invoice.issuer_details.get('company'),
                'phone': {
                    'country_code': phone.get('country_code'),
                    'national_number': phone.get('national_number'),
                },
                'address': {
                    'line1': invoice.issuer_details.get('address'),
                    'city': invoice.issuer_details.get('city'),
                    'state': invoice.issuer_details.get('state'),
                    'postal_code': invoice.issuer_details.get('postal'),
                    'country_code': invoice.issuer_details.get('country_code')
                }
            },
            'items': [{
                'name': item.name,
                'unit_of_measure': item.unit_of_measure,
                'quantity': item.quantity,
                'date': self._format_date(item.start.date()),
                'unit_price': {
                    'currency': self.currency_name,
                    'value': self._format_decimal(item.unit_price),
                }
            } for item in invoice.items.iterator()],
            'tax_inclusive':
            False,
            'payment_term': {
                'due_date': self._format_date(invoice.end_date),
            },
            'total_amount': {
                'currency': self.currency_name,
                'value': self._format_decimal(invoice.total)
            }
            # 'logo_url': pass logo url if needed. 250x90, HTTPS. Image is not displayed o PDF atm.
        }

        if invoice.tax_percent and invoice.tax_percent > 0:
            for item in invoice_details['items']:
                item['tax'] = {
                    'name': 'VAT',
                    'percent': self._format_decimal(invoice.tax_percent),
                }

        invoice_details['billing_info'] = [{
            'email':
            invoice.customer.email,
            'business_name':
            invoice.customer.name,
        }]

        backend_invoice = paypal.Invoice(invoice_details)

        try:
            if backend_invoice.create():
                invoice.state = backend_invoice.status
                invoice.backend_id = backend_invoice.id
                invoice.number = backend_invoice.number
                invoice.save(update_fields=['state', 'backend_id', 'number'])

                return invoice
            else:
                raise PayPalError(backend_invoice.error)
        except paypal.exceptions.ConnectionError as e:
            six.reraise(PayPalError, e)
        def request(self,
                    url,
                    method='GET',
                    data=None,
                    verify=False,
                    **kwargs):
            response_types = {
                'application/json': 'json',
                'application/xml': 'xml'
            }
            headers = {
                'User-Agent': 'NodeConductor/%s' % __version__,
                'Accept': 'application/json',
                'X-Killbill-ApiKey': self.api_key,
                'X-Killbill-ApiSecret': self.api_secret
            }

            if method.upper() in ('POST', 'DELETE'):
                headers['Content-Type'] = self.type
                headers['X-Killbill-CreatedBy'] = 'NodeConductor'

            url = url if url.startswith(self.api_url) else self.api_url + url

            try:
                response = getattr(requests, method.lower())(url,
                                                             params=kwargs,
                                                             data=data,
                                                             auth=self.auth,
                                                             headers=headers,
                                                             verify=verify)
            except requests.ConnectionError as e:
                six.reraise(KillBillError, e)

            codes = requests.status_codes.codes
            response_type = response_types.get(
                response.headers.get('content-type'), '')

            if response.status_code == codes.created:
                location = response.headers.get('location')
                if location:
                    return self.request(location)

            elif response.status_code != codes.ok:
                reason = response.reason
                if response_type == 'json':
                    try:
                        reason = response.json()['message']
                    except ValueError:
                        pass
                elif response.status_code == codes.server_error:
                    try:
                        txt = etree.fromstring(response.text)
                        reason = txt.xpath('.//pre/text()')[1].split('\n')[2]
                    except ValueError:
                        pass

                error_message = "%s. Request to Killbill backend failed: %s" % (
                    response.status_code, reason)
                if response.status_code == codes.not_found:
                    raise NotFoundKillBillError(error_message)
                raise KillBillError(error_message)

            try:
                if response_type == 'xml':
                    data = etree.fromstring(
                        response.text.encode('utf-8'),
                        etree.XMLParser(ns_clean=True,
                                        recover=True,
                                        encoding='utf-8'))

                elif response_type == 'json' and response.text:
                    data = response.json()

                else:
                    data = response.text

            except ValueError as e:
                raise KillBillError(
                    "Incorrect response from Killbill backend %s: %s" %
                    (url, e))

            return data
Example #48
0
    def create_plan(self, amount, tax, name, description, return_url,
                    cancel_url):
        """
        Create and activate monthly billing plan.
        https://developer.paypal.com/docs/api/payments.billing-plans

        :param amount: Decimal value of plan payment for one month including tax.
        :param tax: Decimal value of VAT tax.
        :param name: Name of the billing plan.
        :param description: Description of the billing plan.
        :param return_url: Callback view URL for approved billing plan.
        :param cancel_url: Callback view URL for cancelled billing plan.
        :return: Billing plan ID.
        """
        if amount < tax:
            raise PayPalError('Plan price should be greater than tax.')

        plan = paypal.BillingPlan({
            'name':
            name,
            'description':
            description,
            'type':
            'INFINITE',
            'payment_definitions': [{
                'name':
                'Monthly payment for {}'.format(name),
                'type':
                'REGULAR',
                'frequency_interval':
                1,
                'frequency':
                'MONTH',
                'cycles':
                0,
                'amount': {
                    'currency': self.currency_name,
                    'value': self._format_decimal(amount - tax)
                },
                'charge_models': [{
                    'type': 'TAX',
                    'amount': {
                        'currency': self.currency_name,
                        'value': self._format_decimal(tax)
                    }
                }]
            }],
            'merchant_preferences': {
                'return_url': return_url,
                'cancel_url': cancel_url,
                'auto_bill_amount': 'YES',
            }
        })

        try:
            if plan.create() and plan.activate():
                return plan.id
            else:
                raise PayPalError(plan.error)
        except paypal.exceptions.ConnectionError as e:
            six.reraise(PayPalError, e)
Example #49
0
 def get_strategy(self):
     try:
         return utils.get_object_backup_strategy(self.backup_source)
     except KeyError:
         six.reraise(exceptions.BackupStrategyNotFoundError, exceptions.BackupStrategyNotFoundError())