Example #1
0
 def insert_to_db(cursor,
                  to_db):  # Записьм данных по ТОП списку пользователей
     for item in to_db:
         from psycopg2._json import Json
         cursor.execute(
             "INSERT INTO USER_LIST(USER_ID, DATA) values(%s, %s);",
             (item['id'], Json(item)))
     print(f'\t - данные записаны в БД')
Example #2
0
def _event_to_row(event: LogEvent):
    return {
        "id": str(event.event_id),
        "event_type": event.event_type.value,
        "event_subtype": event.event_subtype.value,
        # event_time is set by db upon creation, need not be passed in.
        "event_state": Json(event.event_state),
    }
Example #3
0
 def value_normalizer(value):
     if value is None:
         return 'NULL'
     if isinstance(value, dict):
         return str(Json(value))
     if isinstance(value, str):
         return f"'{value}'"
     return value
 def redis_to_db_row(redis_entry):
     created_at_raw = redis_entry["created_at"]
     created_at = safe_get_time(created_at_raw).replace(tzinfo=datetime.timezone.utc)
     return (
         redis_entry["type"],
         redis_entry["user_id"],
         redis_entry["message"],
         created_at,
         Json(redis_entry["data"]),  # for usage with the JSONB column
     )
Example #5
0
    def insert_rows(self, archive_table_name: str, values: List[dict], tabs: str):
        column_names = ', '.join(values[0].keys())
        query = SQL(f'INSERT INTO {self.config.db_config.schema}.{archive_table_name} ({column_names}) VALUES %s')

        # Convert dict to json
        for row in values:
            for col_name, col_val in row.items():
                if isinstance(col_val, dict):
                    row[col_name] = Json(col_val)

        logging.debug(f"{tabs}INSERT INTO {archive_table_name} - {len(values)} rows")
        with self.conn.cursor(cursor_factory=DictCursor) as cursor:
            execute_values(cursor, query, values)
Example #6
0
    def save_unknown_group_message(self, cleaned_message):
        message_dict = cleaned_message.to_dict(recursive=True)
        db_connection = create_db_connection()

        try:
            cleaned_message_dict = self.__clean_message(message_dict)
            # print(datetime.time(datetime.now()), 'Message after cleaning', cleaned_message_dict, '\n')

            db_cursor = db_connection.cursor()
            db_cursor.execute(
                'INSERT into traced_messages (timestamp, chat_id, full_message) values (%s, %s, %s)',
                [
                    time(), cleaned_message.to_id.channel_id,
                    Json(cleaned_message_dict)
                ])
        except Exception as err:
            print(datetime.time(datetime.now()), err)
        finally:
            db_connection.commit()
            db_connection.close()
Example #7
0
    def _hash_old_changeset_values(
            self, changeset: Any,
            table_schema: TableSchema) -> Tuple[Digest, int]:
        """
        Since we're not storing the values of the deleted rows (and we don't have access to them in the staging table
        because they've been deleted), we have to hash them in Python. This involves mimicking the return value of
        `SELECT t::text FROM table t`.

        :param changeset: Map PK -> (upserted/deleted, Map col -> old val)
        :param table_schema: Table schema
        :return: `Digest` object and the number of deleted rows.
        """
        rows = self._extract_deleted_rows(changeset, table_schema)
        if not rows:
            return Digest.empty(), 0

        # Horror alert: we hash newly created tables by essentially calling digest(row::text) in Postgres and
        # we don't really know how it turns some types to strings. So instead we give Postgres all of its deleted
        # rows back and ask it to hash them for us in the same way.
        inner_tuple = "(" + ",".join("%s::" + c.pg_type
                                     for c in table_schema) + ")"
        query = ("SELECT digest(o::text, 'sha256') FROM (VALUES " +
                 ",".join(itertools.repeat(inner_tuple, len(rows))) + ") o")

        # By default (e.g. for changesets where nothing was deleted) we use a 0 hash (since adding it to any other
        # hash has no effect).
        digests = self.object_engine.run_sql(
            query,
            [
                o if not isinstance(o, dict) else Json(o) for row in rows
                for o in row
            ],
            return_shape=ResultShape.MANY_ONE,
        )
        return (
            reduce(operator.add, map(Digest.from_memoryview, digests),
                   Digest.empty()),
            len(digests),
        )
Example #8
0
def update_db_execution_log(result_folder):
    raise_new_bug = True
    oDictAllIterationDetails = request.json
    conn = None
    try:

        conn = get_results_db_conn()
        # if conn.is_connected():
        c = conn.cursor()
        for _, iter in oDictAllIterationDetails.items():
            if 'FAIL' in iter['status'].upper():
                c.execute(
                    "SELECT fail_type, fail_description, jira_reference, comments, triage_workflow_status FROM triage WHERE "
                    + "test_id ='" + iter['testID'] + "' AND " +
                    "iteration_id ='" + iter['iterationID'] + "' AND " +
                    "step_id ='" + str(iter['stepNo']) + "';")
                rs = c.fetchall()
                if len(rs) > 0:
                    bugStatus = ''
                    bugID = rs[0][2].strip()
                    print("bugID", bugID)

                    if not bugID == "":
                        bugStatus, _ = __get_bug_status(bugID)
                        print("bugstatus", bugStatus)
                    print(iter['testID'], rs[0][4].upper())
                    if not 'DONE' in bugStatus.upper():
                        if not 'DONE' in rs[0][4].upper():
                            iter['failType'] = rs[0][0]
                            iter['failDescription'] = rs[0][1]
                            iter['triagedStatus'] = "Yes"
                            iter['jiraReference'] = rs[0][2]
                            iter['comments'] = rs[0][3]
                            raise_new_bug = False
                #         else:
                #             raise_new_bug =True
                #     else:
                #         raise_new_bug =True
                #
                # else:
                #     raise_new_bug = True

            if raise_new_bug:
                if 'REGRESSION' in iter['executionPhase'].upper():
                    if 'APP' in iter['failType'].upper():
                        bug_details = {
                            "bugSummary": iter["testName"],
                            "bugDescription": iter["failDescription"],
                            "bugPriority": 'Minor',
                            "results": {
                                "resultFolder": result_folder,
                                "results": iter['results']
                            }
                        }
                        r = __create_bug(bug_details)
                        iter['jiraReference'] = r.json()["key"]

            c.execute(
                "INSERT INTO execution_logs (test_pack, test_id, test_name, iteration_id, step_id, status, fail_type, fail_description, test_triaged_status, time_stamp, duration, jira_reference, execution_id, execution_phase, execution_host, comments, results) values ('"
                + iter['testPack'] + "', '" + iter['testID'] + "', '" +
                iter['testName'] + "', '" + iter['iterationID'] + "', '" +
                str(iter['stepNo']) + "', '" + iter['status'] + "', '" +
                iter['failType'] + "', '" + iter['failDescription'] + "', '" +
                iter['triagedStatus'] + "', '" + iter['execTime'] + "', '" +
                iter['durationSec'] + "', '" + iter['jiraReference'] + "', '" +
                iter['executionID'] + "', '" + iter['executionPhase'] +
                "', '" + iter['executionHost'] + "', '" + iter['comments'] +
                "', " + "%s" + ")", [
                    Json({
                        "resultFolder": result_folder,
                        "results": iter['results']
                    })
                ])

    except Exception as e:
        print(e)
    finally:
        if conn:
            conn.commit()
            conn.close()

    return make_response(
        jsonify({"response":
                 "Successfully inserted/updated execution logs: "}), 200)
Example #9
0
def update_db_triage_execution_logs():
    selectedRows = request.get_json()
    try:
        conn = get_results_db_conn()

        for row in selectedRows:
            current_time_stamp = datetime.today().strftime("%d-%m-%Y %H:%M:%S")
            c = conn.cursor()
            c.execute("UPDATE execution_logs "
                      "SET "
                      "status = '" + row["status"] + "',"
                      "fail_type = '" + row["fail_type"] + "',"
                      "fail_description = '" + row["fail_description"] + "',"
                      "test_triaged_status = 'YES',"
                      "jira_reference = '" + row["jira_reference"] + "',"
                      "comments = '" + row["comments"] + "' "
                      " WHERE "
                      "sno = '" + str(row["sno"]) + "' AND "
                      "test_id = '" + row["test_id"] + "'"
                      ";")

            c.execute(
                "SELECT fail_type, fail_description, jira_reference, comments FROM triage WHERE "
                + "test_id ='" + row['test_id'] + "' AND " +
                "iteration_id ='" + row['iteration_id'] + "' AND " +
                "step_id ='" + str(row['step_id']) + "';")
            rs = c.fetchall()
            if len(rs) > 0:
                c.execute(
                    "UPDATE triage "
                    "SET "
                    "fail_type = '" + row["fail_type"] + "',"
                    "fail_description = '" + row["fail_description"] + "',"
                    "test_pack = '" + row["test_pack"] + "',"
                    "dev_triaged_status = 'NO',"
                    "triage_workflow_status = 'NEW',"
                    "last_updated = '" + current_time_stamp + "',"
                    "jira_reference = '" + row["jira_reference"] + "',"
                    "comments = '" + row["comments"] + "',"
                    "results = %s"
                    " WHERE "
                    "test_id ='" + row['test_id'] + "' AND " +
                    "iteration_id ='" + row['iteration_id'] + "' AND " +
                    "step_id ='" + str(row['step_id']) + "'",
                    [Json(row['results'])])
            else:
                c.execute(
                    "INSERT INTO triage (test_pack, test_id, test_name, iteration_id, step_id, fail_type, fail_description, dev_triaged_status, triage_workflow_status, last_updated, jira_reference, comments, results) values ('"
                    + row['test_pack'] + "', '" + row['test_id'] + "', '" +
                    row['test_name'] + "', '" + row['iteration_id'] + "', '" +
                    str(row['step_id']) + "', '" + row['fail_type'] + "', '" +
                    row['fail_description'] + "', '" + "NO', '" + "NEW', '" +
                    current_time_stamp + "', '" + row['jira_reference'] +
                    "', '" + row['comments'] + "', " + "%s" + ")",
                    [Json(row['results'])])
    except Exception as e:
        print(e)
        if conn:
            conn.commit()
            conn.close()
        return make_response(jsonify({"message": "Exception"}),
                             555)  #{"errorMessage": str(e)}
    finally:
        if conn:
            try:
                conn.commit()
                conn.close()
            except:
                pass
    return make_response(jsonify({"message": "Success"}), 200)