示例#1
0
def log_stats(db, schema, values):
    """
    Insert the number, relation name, timestamp 
    for each collection transfer.

    Parameters
    ----------

    Returns
    -------
    -

    Example
    -------
    log_stats(pg, 'purr', [])

    """
    table_name = "purr_transfer_stats"
    # id is SERIAL type, we can skip it when inserting rows:
    attrs = table_desc[table_name]["attrs"][1:]
    try:
        row.insert_bulk(db, schema, table_name, attrs, values)
    except Exception as ex:
        logger.error(
            "[TRANSFER_INFO] Failed to insert logs into table %s: %s"
            % (table_name, ex))
示例#2
0
def log_rows(db, schema, values):
    """
    Holds the operation, relation name, object id and
    timestamp for each entry of the oplog.

    Parameters
    ----------

    Returns
    -------
    -

    Example
    -------
    create_oplog_table(pg, 'purr')

    """
    table_name = "purr_oplog"
    # id is SERIAL type, we can skip it when inserting rows:
    attrs = table_desc[table_name]["attrs"][1:]
    try:
        row.insert_bulk(db, schema, table_name, attrs, values)
    except Exception as ex:
        logger.error(
            "[TRANSFER_INFO] Failed to insert logs into table %s: %s"
            % (table_name, ex))
示例#3
0
 def test_upsert_bulk_tail(self):
     reset_dataset_pg()
     schema = 'public'
     table = 'employee'
     attrs = ["id", "first_name", "last_name", "hair"]
     constraint.add_pk(pg, schema, table, attrs[0])
     values = [["12345", "Purr", "Rito", "orange"],
               ["12346", "James", "Cat", "black and white"],
               ["12347", "Morgo", None, "black and white"],
               ["12348", "Voros", "Macska", "orange"],
               ["12349", "Szurke", "Macska", "grey"]]
     row.insert_bulk(pg, schema, table, attrs, values)
     values_new = [
         ["12346", "James", "Cat", "black and white"],
         ["12347", "Morgo", None, "black and white"],
         ["12348", "Zuta", "Macska", "yellow"],  # changed
         ["12349", "Szurke", "Cicus", "grey"],  # changed
         ["12350", "Fekete", "Cica", "black"]  # new
     ]
     row.upsert_bulk_tail(pg, schema, table, attrs, values_new)
     cmd = "select * from %s" % table
     cursor = pg.conn.cursor()
     cursor.execute(cmd)
     res = cursor.fetchall()
     mocked = [["12345", "Purr", "Rito", "orange"],
               ["12346", "James", "Cat", "black and white"],
               ["12347", "Morgo", None, "black and white"],
               ["12348", "Zuta", "Macska", "yellow"],
               ["12349", "Szurke", "Cicus", "grey"],
               ["12350", "Fekete", "Cica", "black"]]
     mocked = [tuple(x) for x in mocked]
     cursor.close()
     assert mocked == res
示例#4
0
 def test_insert_bulk(self):
     reset_dataset_pg()
     schema = 'public'
     table = 'employee'
     attrs = ["id", "first_name", "last_name", "hair"]
     values = [["12345", "Purr", "Rito", "orange"],
               ["12346", "James", "Cat", "black and white"],
               ["12347", "Morgo", None, "black and white"],
               ["12348", "Voros", "Macska", "orange"],
               ["12349", "Szurke", "Macska", "grey"]]
     row.insert_bulk(pg, schema, table, attrs, values)
     cmd = "select * from %s where id = '%s'" % (table, values[2][0])
     cursor = pg.conn.cursor()
     cursor.execute(cmd)
     res = cursor.fetchone()
     print(tuple(values[2]))
     print(res)
     cursor.close()
     assert tuple(values[2]) == res
示例#5
0
    def insert_bulk_no_extra_props(self,
                                   docs,
                                   attrs,
                                   include_extra_props=True):
        """
        Transforms document and inserts it into the corresponding table.
        Parameters
        ----------
        docs : dict
              the documents we want to insert
         unset: string[]
              list of fields to unset
        """
        # This is needed because
        # sometimes there is no value for attributes (null)
        result = []
        if type(docs) is not list:
            docs = [docs]
        for doc in docs:
            attrs = init_values(attrs)
            attrs = set_values(attrs, doc)

            if len(docs) > 1:
                attrs_pg = [v["name_cm"] for k, v in attrs.items()]
                values = [v["value"] for k, v in attrs.items()]
            else:
                attrs_pg = [
                    v["name_cm"] for k, v in attrs.items() if k in doc.keys()
                ]
                values = [
                    v["value"] for k, v in attrs.items() if k in doc.keys()
                ]
            result.append(tuple(values))

        if self.created is True or len(docs) == 1:
            row.upsert_bulk(self.db, self.schema, self.relation_name, attrs_pg,
                            result)
        else:
            row.insert_bulk(self.db, self.schema, self.relation_name, attrs_pg,
                            result)
示例#6
0
    def insert_bulk(self, docs, attrs, include_extra_props=True):
        """
        Transforms document and inserts it into the corresponding table.
        Parameters
        ----------
        doc : dict
              the document we want to insert
        TODO add unset
        """
        # This is needed because
        # sometimes there is no value for attributes (null)
        result = []
        if type(docs) is not list:
            docs = [docs]
        for doc in docs:
            (attrs_pg, values) = prepare_row_for_insert(attrs, doc)
            result.append(tuple(values))

        if self.created is True:
            row.upsert_bulk(self.db, self.schema, self.relation_name, attrs_pg,
                            result)
        else:
            row.insert_bulk(self.db, self.schema, self.relation_name, attrs_pg,
                            result)