예제 #1
0
def _alter_field(editor, model, old_field, new_field, old_type, new_type,
                 old_db_params, new_db_params, strict=False):
    orig_alter_field(editor, model, old_field, new_field, old_type, new_type,
                     old_db_params, new_db_params, strict=False)
    if isinstance(new_field,JSONField):
        old_index=getattr(old_field,"db_index_options",None)
        new_index=getattr(new_field,"db_index_options",None)
        if new_index!=old_index:
            if old_index:
                normal_index,with_path_index=partition(lambda index:bool(index.get("path")),old_index)
                index_names = editor._constraint_names(model, [old_field.column], index=True) if normal_index else []
                if with_path_index:
                    all_indexes=editor._constraint_names(model, index=True)

                    for index_info in with_path_index:
                        path_hash=editor._digest(index_info["path"])
                        for i,index in enumerate(all_indexes):
                            if index.endswith(path_hash):
                                index_names.append(all_indexes.pop(i))
                                break

                for index_name in index_names:
                    editor.execute(editor._delete_constraint_sql(editor.sql_delete_index, model, index_name))
            if new_index:
                for sql in editor._create_jsonb_index_sql(model,new_field):
                    editor.execute(sql)
예제 #2
0
 def send_messages(self, email_messages):
     if not email_messages:
         return
     msg_count = 0
     try:
         client_created = self.open()
         prepared_messages = [
             self.prepare_message(message) for message in email_messages
         ]
         pre_send.send_robust(self.__class__, messages=prepared_messages)
         responses = self.client.emails.send_batch(
             *prepared_messages, TrackOpens=self.get_option("TRACK_OPENS"))
         post_send.send_robust(self.__class__,
                               messages=prepared_messages,
                               response=responses)
         sent, not_sent = partition(lambda x: x["ErrorCode"] != 0,
                                    responses)
         msg_count = len(sent)
         if not_sent:
             self.raise_for_response(not_sent)
         if client_created:
             self.close()
     except Exception as exc:
         on_exception.send_robust(self.__class__,
                                  raw_messages=email_messages,
                                  exception=exc)
         if not self.fail_silently:
             raise
     return msg_count
예제 #3
0
    def resolve_expression(
        self,
        query=None,
        allow_joins=True,
        reuse=None,
        summarize=False,
        for_save=False,
    ):
        expressions = list(self.flatten())
        # Split expressions and wrappers.
        index_expressions, wrappers = partition(
            lambda e: isinstance(e, self.wrapper_classes),
            expressions,
        )
        wrapper_types = [type(wrapper) for wrapper in wrappers]
        if len(wrapper_types) != len(set(wrapper_types)):
            raise ValueError(
                "Multiple references to %s can't be used in an indexed "
                "expression."
                % ", ".join(
                    [wrapper_cls.__qualname__ for wrapper_cls in self.wrapper_classes]
                )
            )
        if expressions[1 : len(wrappers) + 1] != wrappers:
            raise ValueError(
                "%s must be topmost expressions in an indexed expression."
                % ", ".join(
                    [wrapper_cls.__qualname__ for wrapper_cls in self.wrapper_classes]
                )
            )
        # Wrap expressions in parentheses if they are not column references.
        root_expression = index_expressions[1]
        resolve_root_expression = root_expression.resolve_expression(
            query,
            allow_joins,
            reuse,
            summarize,
            for_save,
        )
        if not isinstance(resolve_root_expression, Col):
            root_expression = Func(root_expression, template="(%(expressions)s)")

        if wrappers:
            # Order wrappers and set their expressions.
            wrappers = sorted(
                wrappers,
                key=lambda w: self.wrapper_classes.index(type(w)),
            )
            wrappers = [wrapper.copy() for wrapper in wrappers]
            for i, wrapper in enumerate(wrappers[:-1]):
                wrapper.set_source_expressions([wrappers[i + 1]])
            # Set the root expression on the deepest wrapper.
            wrappers[-1].set_source_expressions([root_expression])
            self.set_source_expressions([wrappers[0]])
        else:
            # Use the root expression, if there are no wrappers.
            self.set_source_expressions([root_expression])
        return super().resolve_expression(
            query, allow_joins, reuse, summarize, for_save
        )
예제 #4
0
def _alter_field(editor, model, old_field, new_field, old_type, new_type,
                 old_db_params, new_db_params, strict=False):
    orig_alter_field(editor, model, old_field, new_field, old_type, new_type,
                     old_db_params, new_db_params, strict=False)
    if isinstance(new_field,JSONField):
        old_index=getattr(old_field,"db_index_options",None)
        new_index=getattr(new_field,"db_index_options",None)
        if new_index!=old_index:
            if old_index:
                normal_index,with_path_index=partition(lambda index:bool(index.get("path")),old_index)
                index_names = editor._constraint_names(model, [old_field.column], index=True) if normal_index else []
                if with_path_index:
                    all_indexes=editor._constraint_names(model, index=True)

                    for index_info in with_path_index:
                        path_hash=editor._digest(index_info["path"])
                        for i,index in enumerate(all_indexes):
                            if index.endswith(path_hash):
                                index_names.append(all_indexes.pop(i))
                                break

                for index_name in index_names:
                    editor.execute(editor._delete_constraint_sql(editor.sql_delete_index, model, index_name))
            if new_index:
                for sql in editor._create_jsonb_index_sql(model,new_field):
                    editor.execute(sql)
예제 #5
0
    def list(self, request, *args, **kwargs):
        # cites_to can contain citations or IDs, so split out IDs into separate
        # cites_to_id parameter
        if 'cites_to' in request.query_params:
            request._request.GET = params = request._request.GET.copy()
            cites_to, cites_to_id = partition(lambda c: c.isdigit(),
                                              params.getlist('cites_to'))
            params.setlist('cites_to', cites_to)
            params.setlist('cites_to_id', cites_to_id)

        response = super(CaseDocumentViewSet,
                         self).list(request, *args, **kwargs)

        if request.accepted_renderer.format == 'csv':
            response = self.bundle_csv_response(response)

        return response
예제 #6
0
def bulk_update(model, instances, update_fields=None):
    instances, create = partition(lambda obj: obj.pk is None, instances)
    set_fields = ', '.join(
        ('%(field)s=update_{table}.column%(i)s' % {'field': field, 'i': i + 2})
        for i, field in enumerate(update_fields)
    )
    value_placeholder = '({})'.format(', '.join(['%s'] * (len(update_fields) + 1)))
    values = ','.join([value_placeholder] * len(instances))
    query = ' '.join([
        'UPDATE {table} SET ',
        set_fields,
        'FROM (VALUES ', values, ') update_{table}',
        'WHERE {table}.{pk} = update_{table}.column1'
    ]).format(table=model._meta.db_table, pk=model._meta.pk.get_attname_column()[1])
    print(query)
    params = []
    for instance in instances:
        params.append(instance.pk)
        for field in update_fields:
            params.append(getattr(instance, field))

    connection.cursor().execute(query, params)
예제 #7
0
from django.test import TestCase

# Create your tests here.
from django.utils.functional import partition

# list_a = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
k = partition(lambda a: a > 2, range(6))
print(k)
예제 #8
0
    def handle(self, *args, **kwargs):
        print(datetime.today(), "Building dashboard...")
        pages = {os.path.basename(x).rstrip('.md'): open(x, 'r').read() for x in glob.glob(Filepaths.BACKUP_DIR.value) if os.path.isfile(x)}
        graph = Graph()
        flashcards: List[AnkiFlashcard] = []
        full_deck_names = make_anki_request('deckNames')

        # Create nodes
        for page_title, page in pages.items():
            lines = squeeze(page.split('\n'))
            if not len(lines):
                continue

            for page_type in [Tags.QUESTION.value, Tags.REFERENCE.value, Tags.NOTE.value, Tags.POST.value]:
                matches = [x for x in lines if f'#{page_type}' in x]

                content, metadata = partition(lambda x: '::' in x, matches)

                for line in content:
                    # This node is a single line.
                    graph.add_node(line.strip().lstrip('- '), type=page_type, word_count=len(line.split(' ')))

                if len(metadata):
                    # This node is a page.
                    graph.add_node(page_title, lines=lines, type=page_type)

            # Find flashcards
            bulleted_lines = get_bulleted_lines(lines)
            num_bulleted_lines = len(bulleted_lines)
            for i, line in enumerate(bulleted_lines):
                if f"#{Tags.FLASHCARD.value}" in line:
                    well_formatted_flashcard = re.match(FLASHCARD_FRONT_REGEX, line)
                    if not well_formatted_flashcard:
                        post_slack(f"Flashcard front improperly formatted: {line}")
                        continue

                    front_1, flashcard_uuid, front_2 = well_formatted_flashcard.groups()
                    front = front_1.strip() + " " + front_2.strip()

                    # Handle Cloze-style flashcards
                    is_cloze = re.search(CLOZE_REGEX, front)
                    if is_cloze:
                        front = re.sub(CLOZE_REGEX, lambda x: "{{c1::" + x.group(1) + "}}", front)
                        front = AnkiFlashcard.maybe_download_media_files_and_convert(front) + f"<hr/><div>{page_title}</div>"

                        maybe_deck_name, line = extract_tags(ANKI_DECK_TAG_REGEX, line)
                        deck_name = get_full_deck_name(full_deck_names, maybe_deck_name)
                        flashcards.append(ClozeFlashcard(uuid=flashcard_uuid, front=front, deck=deck_name))
                        continue

                    is_num_lines_long_enough = i + 1 < num_bulleted_lines
                    if not is_num_lines_long_enough:
                        post_slack(f"Flashcard is missing a back: {line}")
                        continue

                    # Handle two-sided flashcards
                    next_line = bulleted_lines[i + 1]
                    next_line_is_indented = re.match(BULLET_REGEX, line).span()[1] < re.match(BULLET_REGEX, next_line).span()[1]
                    if next_line_is_indented:
                        back = re.match(SANS_BULLET_REGEX, next_line).group(1).strip()
                        front = AnkiFlashcard.maybe_download_media_files_and_convert(front)
                        back = AnkiFlashcard.maybe_download_media_files_and_convert(back) + f"<hr/><div>{page_title}</div>"

                        maybe_deck_name, line = extract_tags(ANKI_DECK_TAG_REGEX, line)
                        deck_name = get_full_deck_name(full_deck_names, maybe_deck_name)
                        flashcards.append(TwoSidedFlashcard(uuid=flashcard_uuid, front=front, back=back, deck=deck_name))

                        continue

                    post_slack(f"Flashcard improperly formatted (next line not indented): {line}\n{next_line}")

        try:
            make_anki_request('sync')
        except Exception:
            print("Waiting on sync...")
            time.sleep(5)
            try:
                make_anki_request('sync')
            except Exception:
                print("Waiting on sync...")
                time.sleep(5)
                make_anki_request('sync')

        # POST flashcards to AnkiConnect
        for flashcard in flashcards:
            query = f"deck:{flashcard.deck} ID:{flashcard.uuid}"
            anki_ids = make_anki_request('findNotes', query=query)
            if len(anki_ids) == 0:
                make_anki_request('addNote', note=flashcard.to_add_note_json())
            elif len(anki_ids) == 1:
                make_anki_request('updateNoteFields', note=flashcard.to_update_note_fields_json(anki_ids[0]))
            else:
                raise AssertionError(f"{len(anki_ids)} number of cards returned for query {query}")

        make_anki_request('sync')

        num_edges = 0
        word_count = 0
        num_shares = 0

        # Add information to nodes
        for title, data in graph.nodes(data=True):
            if 'lines' not in data:
                continue

            # Edges
            links = flatten([parse_links(x) for x in data['lines']])
            for link in [x for x in links if graph.has_node(x)]:
                graph.add_edge(link, title)
                num_edges += 1

            # Word count
            word_count += sum([len(x.split(' ')) for x in data['lines']])  # TODO FIXME

            # Shares
            num_shares += len(flatten([parse_shares(x) for x in data['lines'] if f"{Tags.SHARES.value}::" in x]))

        # Save num_edges
        with open(Filepaths.NUM_EDGES_FILEPATH.value, "r+") as f:
            line = f"\n{datetime.today()} {num_edges}"
            if line not in f.read():
                f.write(line)

        # Save word count
        with open(Filepaths.WORD_COUNT_FILEPATH.value, "r+") as f:
            line = f"\n{datetime.today()} {word_count}"
            if line not in f.read():
                f.write(line)

        # Save shares
        with open(Filepaths.NUM_SHARES_FILEPATH.value, "r+") as f:
            line = f"\n{datetime.today()} {num_shares}"
            if line not in f.read():
                f.write(line)

        # Count edges
        for title, data in graph.nodes(data=True):
            data['num_edges'] = len(graph[title])

        # Build index.html
        nodes = list(graph.nodes(data=True))
        questions = [(title, data) for title, data in nodes if data['type'] == Tags.QUESTION.value]
        references = [(title, data) for title, data in nodes if data['type'] == Tags.REFERENCE.value]
        notes = [(title, data) for title, data in nodes if data['type'] == Tags.NOTE.value]
        posts = [(title, data) for title, data in nodes if data['type'] == Tags.POST.value]

        template = Template(open(Filepaths.DASHBOARD_HTML.value).read())
        last_updated = datetime.fromtimestamp(os.path.getmtime(glob.glob(Filepaths.BACKUP_DIR.value)[0]))

        with open(Filepaths.INDEX_HTML.value, "w") as f:
            f.write(template.render(
                questions=questions,
                references=references,
                notes=notes,
                posts=posts,
                words_metric=get_words_metric(),
                connections_metric=get_connections_metric(),
                shares_metric=get_shares_metric(),
                last_updated=last_updated,
                last_built=open(Filepaths.LAST_BUILT.value).read()
            ))
        print(datetime.today(), "Done building dashboard!")
예제 #9
0
    def bulk_upsert(self, objs, unique_constraint=None, update_fields=None, return_ids=True, batch_size=None):

        assert batch_size is None or batch_size > 0
        assert (not return_ids) or (unique_constraint and update_fields)
        assert (not update_fields) or (unique_constraint and update_fields)
        assert (not unique_constraint) or (isinstance(
            unique_constraint, str) or (isinstance(unique_constraint, tuple)))

        for parent in self.model._meta.get_parent_list():
            if parent._meta.concrete_model is not self.model._meta.concrete_model:
                raise ValueError(
                    "Can't bulk upsert a multi-table inherited model")

        if not objs:
            return objs

        self._for_write = True

        fields = self.model._meta.concrete_fields

        if unique_constraint:

            if isinstance(unique_constraint, str):
                unique_constraint = self.model._meta.get_field_by_name(unique_constraint)[
                    0]

                if not unique_constraint.unique:
                    raise ValueError(
                        "The unique_constraint argument must be the name of a unique=True field or a tuple of field names declared as unique_together")

                if isinstance(unique_constraint, AutoField):
                    raise ValueError(
                        "The unique_constraint argument cannot be an AutoField instance")

            else:
                if not self.model._meta.unique_together:
                    raise ValueError(
                        "The model hasn't declared any unique_together index")
                else:
                    unique_constraint = [
                        uc for uc in self.model._meta.unique_together if uc == unique_constraint]

                    if not unique_constraint:
                        raise ValueError(
                            "The unique_constraint tuple must be an exact match of an existing unique_together index")
                    else:
                        unique_constraint = unique_constraint[0]

        if update_fields:
            _up_fields = list()

            for upf in update_fields:
                upf = self.model._meta.get_field_by_name(upf)[0]
                if upf.unique:
                    raise ValueError(
                        "The update_fields argument must be a list of non unique/AutoField field names")

                _up_fields.append(upf)

            update_fields = _up_fields

        objs = list(objs)
        self._populate_pk_values(objs)

        with transaction.atomic(using=self.db, savepoint=False):
            objs_with_pk, objs_without_pk = partition(
                lambda o: o.pk is None, objs)
            if objs_with_pk:
                self._batched_update(objs_with_pk, fields, batch_size)
            if objs_without_pk:

                fields = [
                    f for f in fields if not isinstance(f, AutoField)]

                ids = self._batched_upsert(objs_without_pk, fields, unique_constraint=unique_constraint,
                                           update_fields=update_fields, batch_size=batch_size)

                if return_ids:
                    assert len(ids) == len(objs_without_pk)

                    for i, id in enumerate(ids):
                        objs_without_pk[i].pk = id

                    del ids

        return objs