コード例 #1
0
ファイル: mysql_clients.py プロジェクト: costaafm/grr
    def WriteClientSnapshot(self, snapshot, cursor=None):
        """Write new client snapshot."""
        insert_history_query = (
            "INSERT INTO client_snapshot_history(client_id, timestamp, "
            "client_snapshot) VALUES (%s, FROM_UNIXTIME(%s), %s)")
        insert_startup_query = (
            "INSERT INTO client_startup_history(client_id, timestamp, "
            "startup_info) VALUES(%s, FROM_UNIXTIME(%s), %s)")

        now = rdfvalue.RDFDatetime.Now()

        current_timestamp = mysql_utils.RDFDatetimeToTimestamp(now)
        client_info = {
            "last_snapshot_timestamp": current_timestamp,
            "last_startup_timestamp": current_timestamp,
            "last_version_string": snapshot.GetGRRVersionString(),
            "last_platform": snapshot.knowledge_base.os,
            "last_platform_release": snapshot.Uname(),
        }
        update_clauses = [
            "last_snapshot_timestamp = FROM_UNIXTIME(%(last_snapshot_timestamp)s)",
            "last_startup_timestamp = FROM_UNIXTIME(%(last_startup_timestamp)s)",
            "last_version_string = %(last_version_string)s",
            "last_platform = %(last_platform)s",
            "last_platform_release = %(last_platform_release)s",
        ]

        update_query = (
            "UPDATE clients SET {} WHERE client_id = %(client_id)s".format(
                ", ".join(update_clauses)))

        int_client_id = db_utils.ClientIDToInt(snapshot.client_id)
        client_info["client_id"] = int_client_id

        startup_info = snapshot.startup_info
        snapshot.startup_info = None
        try:
            cursor.execute(insert_history_query,
                           (int_client_id, current_timestamp,
                            snapshot.SerializeToString()))
            cursor.execute(insert_startup_query,
                           (int_client_id, current_timestamp,
                            startup_info.SerializeToString()))
            cursor.execute(update_query, client_info)
        except MySQLdb.IntegrityError as e:
            raise db.UnknownClientError(snapshot.client_id, cause=e)
        finally:
            snapshot.startup_info = startup_info
コード例 #2
0
ファイル: mysql_users.py プロジェクト: sperezintexas/grr
    def UpdateUserNotifications(self,
                                username,
                                timestamps,
                                state=None,
                                cursor=None):
        """Updates existing user notification objects."""

        query = ("UPDATE user_notification "
                 "SET notification_state = %s "
                 "WHERE username_hash = %s"
                 "AND UNIX_TIMESTAMP(timestamp) IN {}").format(
                     mysql_utils.Placeholders(len(timestamps)))

        args = [
            int(state),
            mysql_utils.Hash(username),
        ] + [mysql_utils.RDFDatetimeToTimestamp(t) for t in timestamps]
        cursor.execute(query, args)
コード例 #3
0
  def WriteForemanRule(self, rule, cursor=None):
    """Writes a foreman rule to the database."""
    query = (
        "INSERT INTO foreman_rules "
        "  (hunt_id, expiration_time, rule) "
        "VALUES (%(hunt_id)s, FROM_UNIXTIME(%(exp_time)s), %(rule_bytes)s) "
        "ON DUPLICATE KEY UPDATE "
        "  expiration_time=FROM_UNIXTIME(%(exp_time)s), rule=%(rule_bytes)s")

    cursor.execute(
        query, {
            "hunt_id":
                rule.hunt_id,
            "exp_time":
                mysql_utils.RDFDatetimeToTimestamp(rule.expiration_time),
            "rule_bytes":
                rule.SerializeToBytes()
        })
コード例 #4
0
ファイル: mysql_clients.py プロジェクト: jiandandan/grr
    def MultiReadClientFullInfo(self,
                                client_ids,
                                min_last_ping=None,
                                cursor=None):
        """Reads full client information for a list of clients."""
        if not client_ids:
            return {}

        query = (
            "SELECT "
            "c.client_id, c.fleetspeak_enabled, c.certificate, "
            "UNIX_TIMESTAMP(c.last_ping), UNIX_TIMESTAMP(c.last_clock), "
            "c.last_ip, UNIX_TIMESTAMP(c.last_foreman), "
            "UNIX_TIMESTAMP(c.first_seen), "
            "UNIX_TIMESTAMP(c.last_snapshot_timestamp), "
            "UNIX_TIMESTAMP(c.last_crash_timestamp), "
            "UNIX_TIMESTAMP(c.last_startup_timestamp), "
            "h.client_snapshot, s.startup_info, s_last.startup_info, "
            "l.owner_username, l.label "
            "FROM clients as c "
            "FORCE INDEX (PRIMARY) "
            "LEFT JOIN client_snapshot_history as h FORCE INDEX (PRIMARY) ON ( "
            "c.client_id = h.client_id AND "
            "h.timestamp = c.last_snapshot_timestamp) "
            "LEFT JOIN client_startup_history as s FORCE INDEX (PRIMARY) ON ( "
            "c.client_id = s.client_id AND "
            "s.timestamp = c.last_snapshot_timestamp) "
            "LEFT JOIN client_startup_history as s_last FORCE INDEX (PRIMARY) ON ( "
            "c.client_id = s_last.client_id "
            "AND s_last.timestamp = c.last_startup_timestamp) "
            "LEFT JOIN client_labels AS l FORCE INDEX (PRIMARY) "
            "ON (c.client_id = l.client_id) ")

        query += "WHERE c.client_id IN (%s) " % ", ".join(
            ["%s"] * len(client_ids))

        values = [db_utils.ClientIDToInt(cid) for cid in client_ids]
        if min_last_ping is not None:
            query += "AND c.last_ping >= FROM_UNIXTIME(%s)"
            values.append(mysql_utils.RDFDatetimeToTimestamp(min_last_ping))

        cursor.execute(query, values)
        return dict(self._ResponseToClientsFullInfo(cursor.fetchall()))
コード例 #5
0
 def WriteAPIAuditEntry(self, entry, cursor=None):
     """Writes an audit entry to the database."""
     args = {
         "username":
         entry.username,
         "router_method_name":
         entry.router_method_name,
         "details":
         entry.SerializeToString(),
         "timestamp":
         mysql_utils.RDFDatetimeToTimestamp(rdfvalue.RDFDatetime.Now()),
     }
     query = """
 INSERT INTO api_audit_entry (username, router_method_name, details,
                              timestamp)
 VALUES (%(username)s, %(router_method_name)s, %(details)s,
         FROM_UNIXTIME(%(timestamp)s))
 """
     cursor.execute(query, args)
コード例 #6
0
ファイル: mysql_clients.py プロジェクト: skirankumar/grr
    def WriteClientStats(self, client_id, stats, cursor=None):
        """Stores a ClientStats instance."""

        try:
            cursor.execute(
                """
          INSERT INTO client_stats (client_id, payload, timestamp)
          VALUES (%s, %s, FROM_UNIXTIME(%s))
          ON DUPLICATE KEY UPDATE payload=VALUES(payload)
          """, [
                    db_utils.ClientIDToInt(client_id),
                    stats.SerializeToBytes(),
                    mysql_utils.RDFDatetimeToTimestamp(
                        rdfvalue.RDFDatetime.Now())
                ])
        except MySQLdb.IntegrityError as e:
            if e.args[0] == mysql_error_constants.NO_REFERENCED_ROW_2:
                raise db.UnknownClientError(client_id, cause=e)
            else:
                raise
コード例 #7
0
ファイル: mysql_clients.py プロジェクト: jiandandan/grr
    def ListClientsForKeywords(self, keywords, start_time=None, cursor=None):
        """Lists the clients associated with keywords."""
        keywords = set(keywords)
        hash_to_kw = {mysql_utils.Hash(kw): kw for kw in keywords}
        result = {kw: [] for kw in keywords}

        query = """
      SELECT keyword_hash, client_id
      FROM client_keywords
      FORCE INDEX (client_index_by_keyword_hash)
      WHERE keyword_hash IN ({})
    """.format(", ".join(["%s"] * len(result)))
        args = list(iterkeys(hash_to_kw))
        if start_time:
            query += " AND timestamp >= FROM_UNIXTIME(%s)"
            args.append(mysql_utils.RDFDatetimeToTimestamp(start_time))
        cursor.execute(query, args)

        for kw_hash, cid in cursor.fetchall():
            result[hash_to_kw[kw_hash]].append(db_utils.IntToClientID(cid))
        return result
コード例 #8
0
    def WriteCronJobRun(self, run_object, cursor=None):
        """Stores a cron job run object in the database."""
        query = ("INSERT INTO cron_job_runs "
                 "(job_id, run_id, write_time, run) "
                 "VALUES (%s, %s, FROM_UNIXTIME(%s), %s) "
                 "ON DUPLICATE KEY UPDATE "
                 "run=VALUES(run), write_time=VALUES(write_time)")

        write_time_str = mysql_utils.RDFDatetimeToTimestamp(
            rdfvalue.RDFDatetime.Now())
        try:
            cursor.execute(query, [
                run_object.cron_job_id,
                db_utils.CronJobRunIDToInt(run_object.run_id),
                write_time_str,
                run_object.SerializeToBytes(),
            ])
        except MySQLdb.IntegrityError as e:
            raise db.UnknownCronJobError("CronJob with id %s not found." %
                                         run_object.cron_job_id,
                                         cause=e)
コード例 #9
0
ファイル: mysql_users.py プロジェクト: sperezintexas/grr
    def WriteApprovalRequest(self, approval_request, cursor=None):
        """Writes an approval request object."""
        # Copy the approval_request to ensure we don't modify the source object.
        approval_request = approval_request.Copy()
        # Generate random approval id.
        approval_id_int = random.UInt64()

        grants = approval_request.grants
        approval_request.grants = None

        expiry_time = approval_request.expiration_time

        args = {
            "username_hash":
            mysql_utils.Hash(approval_request.requestor_username),
            "approval_type": int(approval_request.approval_type),
            "subject_id": approval_request.subject_id,
            "approval_id": approval_id_int,
            "expiration_time": mysql_utils.RDFDatetimeToTimestamp(expiry_time),
            "approval_request": approval_request.SerializeToString()
        }
        query = """
    INSERT INTO approval_request (username_hash, approval_type,
                                  subject_id, approval_id, expiration_time,
                                  approval_request)
    VALUES (%(username_hash)s, %(approval_type)s,
            %(subject_id)s, %(approval_id)s, FROM_UNIXTIME(%(expiration_time)s),
            %(approval_request)s)
    """
        cursor.execute(query, args)

        for grant in grants:
            self._GrantApproval(approval_request.requestor_username,
                                approval_id_int, grant.grantor_username,
                                cursor)

        return _IntToApprovalID(approval_id_int)
コード例 #10
0
    def UpdateCronJob(self,
                      cronjob_id,
                      last_run_status=db.Database.unchanged,
                      last_run_time=db.Database.unchanged,
                      current_run_id=db.Database.unchanged,
                      state=db.Database.unchanged,
                      forced_run_requested=db.Database.unchanged,
                      cursor=None):
        """Updates run information for an existing cron job."""
        updates = []
        args = []
        if last_run_status != db.Database.unchanged:
            updates.append("last_run_status=%s")
            args.append(int(last_run_status))
        if last_run_time != db.Database.unchanged:
            updates.append("last_run_time=FROM_UNIXTIME(%s)")
            args.append(mysql_utils.RDFDatetimeToTimestamp(last_run_time))
        if current_run_id != db.Database.unchanged:
            updates.append("current_run_id=%s")
            args.append(db_utils.CronJobRunIDToInt(current_run_id))
        if state != db.Database.unchanged:
            updates.append("state=%s")
            args.append(state.SerializeToBytes())
        if forced_run_requested != db.Database.unchanged:
            updates.append("forced_run_requested=%s")
            args.append(forced_run_requested)

        if not updates:
            return

        query = "UPDATE cron_jobs SET "
        query += ", ".join(updates)
        query += " WHERE job_id=%s"
        res = cursor.execute(query, args + [cronjob_id])
        if res != 1:
            raise db.UnknownCronJobError("CronJob with id %s not found." %
                                         cronjob_id)
コード例 #11
0
    def WriteClientGraphSeries(
        self,
        graph_series: rdf_stats.ClientGraphSeries,
        client_label: Text,
        timestamp: rdfvalue.RDFDatetime,
        cursor=None,
    ):
        """Writes the provided graphs to the DB with the given client label."""
        args = {
            "client_label": client_label,
            "report_type": graph_series.report_type.SerializeToWireFormat(),
            "timestamp": mysql_utils.RDFDatetimeToTimestamp(timestamp),
            "graph_series": graph_series.SerializeToBytes(),
        }

        query = """
      INSERT INTO client_report_graphs (client_label, report_type, timestamp,
                                        graph_series)
      VALUES (%(client_label)s, %(report_type)s, FROM_UNIXTIME(%(timestamp)s),
              %(graph_series)s)
      ON DUPLICATE KEY UPDATE graph_series = VALUES(graph_series)
    """

        cursor.execute(query, args)
コード例 #12
0
ファイル: mysql_paths.py プロジェクト: grrrrrrrrr/grr
    def ReadPathInfo(self,
                     client_id,
                     path_type,
                     components,
                     timestamp=None,
                     cursor=None):
        """Retrieves a path info record for a given path."""
        if timestamp is None:
            path_infos = self.ReadPathInfos(client_id, path_type, [components])

            path_info = path_infos[components]
            if path_info is None:
                raise db.UnknownPathError(client_id=client_id,
                                          path_type=path_type,
                                          components=components)

            return path_info

        query = """
    SELECT directory, UNIX_TIMESTAMP(p.timestamp),
           stat_entry, UNIX_TIMESTAMP(last_stat_entry_timestamp),
           hash_entry, UNIX_TIMESTAMP(last_hash_entry_timestamp)
      FROM client_paths as p
 LEFT JOIN (SELECT client_id, path_type, path_id, stat_entry
              FROM client_path_stat_entries
             WHERE client_id = %(client_id)s
               AND path_type = %(path_type)s
               AND path_id = %(path_id)s
               AND UNIX_TIMESTAMP(timestamp) <= %(timestamp)s
          ORDER BY timestamp DESC
             LIMIT 1) AS s
        ON p.client_id = s.client_id
       AND p.path_type = s.path_type
       AND p.path_id = s.path_id
 LEFT JOIN (SELECT client_id, path_type, path_id, hash_entry
              FROM client_path_hash_entries
             WHERE client_id = %(client_id)s
               AND path_type = %(path_type)s
               AND path_id = %(path_id)s
               AND UNIX_TIMESTAMP(timestamp) <= %(timestamp)s
          ORDER BY timestamp DESC
             LIMIT 1) AS h
        ON p.client_id = h.client_id
       AND p.path_type = h.path_type
       AND p.path_id = h.path_id
     WHERE p.client_id = %(client_id)s
       AND p.path_type = %(path_type)s
       AND p.path_id = %(path_id)s
    """
        values = {
            "client_id": db_utils.ClientIDToInt(client_id),
            "path_type": int(path_type),
            "path_id": rdf_objects.PathID.FromComponents(components).AsBytes(),
            "timestamp": mysql_utils.RDFDatetimeToTimestamp(timestamp),
        }

        cursor.execute(query, values)
        row = cursor.fetchone()
        if row is None:
            raise db.UnknownPathError(client_id=client_id,
                                      path_type=path_type,
                                      components=components)

        # pyformat: disable
        (directory, timestamp, stat_entry_bytes, last_stat_entry_timestamp,
         hash_entry_bytes, last_hash_entry_timestamp) = row
        # pyformat: enable

        if stat_entry_bytes is not None:
            stat_entry = rdf_client_fs.StatEntry.FromSerializedString(
                stat_entry_bytes)
        else:
            stat_entry = None

        if hash_entry_bytes is not None:
            hash_entry = rdf_crypto.Hash.FromSerializedString(hash_entry_bytes)
        else:
            hash_entry = None

        datetime = mysql_utils.TimestampToRDFDatetime
        return rdf_objects.PathInfo(
            path_type=path_type,
            components=components,
            timestamp=datetime(timestamp),
            last_stat_entry_timestamp=datetime(last_stat_entry_timestamp),
            last_hash_entry_timestamp=datetime(last_hash_entry_timestamp),
            directory=directory,
            stat_entry=stat_entry,
            hash_entry=hash_entry)
コード例 #13
0
 def RemoveExpiredForemanRules(self, cursor=None):
     now = rdfvalue.RDFDatetime.Now()
     cursor.execute(
         "DELETE FROM foreman_rules WHERE expiration_time < FROM_UNIXTIME(%s)",
         [mysql_utils.RDFDatetimeToTimestamp(now)])
コード例 #14
0
 def DeleteOldCronJobRuns(self, cutoff_timestamp, cursor=None):
     """Deletes cron job runs that are older then the given timestamp."""
     query = "DELETE FROM cron_job_runs WHERE write_time < FROM_UNIXTIME(%s)"
     cursor.execute(query,
                    [mysql_utils.RDFDatetimeToTimestamp(cutoff_timestamp)])
コード例 #15
0
ファイル: mysql_paths.py プロジェクト: avmi/grr
    def WritePathInfos(
        self,
        client_id: str,
        path_infos: Sequence[rdf_objects.PathInfo],
        cursor: Optional[MySQLdb.cursors.Cursor] = None,
    ) -> None:
        """Writes a collection of path_info records for a client."""
        now = rdfvalue.RDFDatetime.Now()

        int_client_id = db_utils.ClientIDToInt(client_id)

        path_info_values = []
        parent_path_info_values = []

        stat_entry_keys = []
        stat_entry_values = []

        hash_entry_keys = []
        hash_entry_values = []

        for path_info in path_infos:
            path = mysql_utils.ComponentsToPath(path_info.components)

            key = (
                int_client_id,
                int(path_info.path_type),
                path_info.GetPathID().AsBytes(),
            )

            path_info_values.append(key +
                                    (mysql_utils.RDFDatetimeToTimestamp(now),
                                     path, bool(path_info.directory),
                                     len(path_info.components)))

            if path_info.HasField("stat_entry"):
                stat_entry_keys.extend(key)
                stat_entry_values.append(
                    key + (mysql_utils.RDFDatetimeToTimestamp(now),
                           path_info.stat_entry.SerializeToBytes()))

            if path_info.HasField("hash_entry"):
                hash_entry_keys.extend(key)
                hash_entry_values.append(
                    key + (mysql_utils.RDFDatetimeToTimestamp(now),
                           path_info.hash_entry.SerializeToBytes(),
                           path_info.hash_entry.sha256.AsBytes()))

            # TODO(hanuszczak): Implement a trie in order to avoid inserting
            # duplicated records.
            for parent_path_info in path_info.GetAncestors():
                path = mysql_utils.ComponentsToPath(
                    parent_path_info.components)
                parent_path_info_values.append((
                    int_client_id,
                    int(parent_path_info.path_type),
                    parent_path_info.GetPathID().AsBytes(),
                    path,
                    len(parent_path_info.components),
                ))

        if path_info_values:
            query = """
        INSERT INTO client_paths(client_id, path_type, path_id,
                                 timestamp,
                                 path, directory, depth)
        VALUES (%s, %s, %s, FROM_UNIXTIME(%s), %s, %s, %s)
        ON DUPLICATE KEY UPDATE
          timestamp = VALUES(timestamp),
          directory = directory OR VALUES(directory)
      """

            try:
                cursor.executemany(query, path_info_values)
            except MySQLdb.IntegrityError as error:
                raise db.UnknownClientError(client_id=client_id, cause=error)

        if parent_path_info_values:
            query = """
        INSERT INTO client_paths(client_id, path_type, path_id, path,
                                 directory, depth)
        VALUES (%s, %s, %s, %s, TRUE, %s)
        ON DUPLICATE KEY UPDATE
          directory = TRUE,
          timestamp = NOW(6)
      """
            cursor.executemany(query, parent_path_info_values)

        if stat_entry_values:
            query = """
        INSERT INTO client_path_stat_entries(client_id, path_type, path_id,
                                             timestamp,
                                             stat_entry)
        VALUES (%s, %s, %s, FROM_UNIXTIME(%s), %s)
      """
            cursor.executemany(query, stat_entry_values)

            condition = "(client_id = %s AND path_type = %s AND path_id = %s)"

            query = """
        UPDATE client_paths
        FORCE INDEX (PRIMARY)
        SET last_stat_entry_timestamp = FROM_UNIXTIME(%s)
        WHERE {}
      """.format(" OR ".join([condition] * len(stat_entry_values)))

            params = [mysql_utils.RDFDatetimeToTimestamp(now)
                      ] + stat_entry_keys
            cursor.execute(query, params)

        if hash_entry_values:
            query = """
        INSERT INTO client_path_hash_entries(client_id, path_type, path_id,
                                             timestamp,
                                             hash_entry, sha256)
        VALUES (%s, %s, %s, FROM_UNIXTIME(%s), %s, %s)
      """
            cursor.executemany(query, hash_entry_values)

            condition = "(client_id = %s AND path_type = %s AND path_id = %s)"

            query = """
        UPDATE client_paths
        FORCE INDEX (PRIMARY)
        SET last_hash_entry_timestamp = FROM_UNIXTIME(%s)
        WHERE {}
      """.format(" OR ".join([condition] * len(hash_entry_values)))

            params = [mysql_utils.RDFDatetimeToTimestamp(now)
                      ] + hash_entry_keys
            cursor.execute(query, params)
コード例 #16
0
ファイル: mysql_paths.py プロジェクト: grrrrrrrrr/grr
    def ListDescendentPathInfos(self,
                                client_id,
                                path_type,
                                components,
                                timestamp=None,
                                max_depth=None,
                                cursor=None):
        """Lists path info records that correspond to descendants of given path."""
        path_infos = []

        query = ""

        path = mysql_utils.ComponentsToPath(components)
        values = {
            "client_id": db_utils.ClientIDToInt(client_id),
            "path_type": int(path_type),
            "path": db_utils.EscapeWildcards(path),
        }

        query += """
    SELECT path, directory, UNIX_TIMESTAMP(p.timestamp),
           stat_entry, UNIX_TIMESTAMP(last_stat_entry_timestamp),
           hash_entry, UNIX_TIMESTAMP(last_hash_entry_timestamp)
      FROM client_paths AS p
    """
        if timestamp is None:
            query += """
      LEFT JOIN client_path_stat_entries AS s ON
                (p.client_id = s.client_id AND
                 p.path_type = s.path_type AND
                 p.path_id = s.path_id AND
                 p.last_stat_entry_timestamp = s.timestamp)
      LEFT JOIN client_path_hash_entries AS h ON
                (p.client_id = h.client_id AND
                 p.path_type = h.path_type AND
                 p.path_id = h.path_id AND
                 p.last_hash_entry_timestamp = h.timestamp)
      """
            only_explicit = False
        else:
            query += """
      LEFT JOIN (SELECT sr.client_id, sr.path_type, sr.path_id, sr.stat_entry
                   FROM client_path_stat_entries AS sr
             INNER JOIN (SELECT client_id, path_type, path_id,
                                MAX(timestamp) AS max_timestamp
                           FROM client_path_stat_entries
                          WHERE UNIX_TIMESTAMP(timestamp) <= %(timestamp)s
                       GROUP BY client_id, path_type, path_id) AS st
                     ON sr.client_id = st.client_id
                    AND sr.path_type = st.path_type
                    AND sr.path_id = st.path_id
                    AND sr.timestamp = st.max_timestamp) AS s
             ON (p.client_id = s.client_id AND
                 p.path_type = s.path_type AND
                 p.path_id = s.path_id)
      LEFT JOIN (SELECT hr.client_id, hr.path_type, hr.path_id, hr.hash_entry
                   FROM client_path_hash_entries AS hr
             INNER JOIN (SELECT client_id, path_type, path_id,
                                MAX(timestamp) AS max_timestamp
                           FROM client_path_hash_entries
                          WHERE UNIX_TIMESTAMP(timestamp) <= %(timestamp)s
                       GROUP BY client_id, path_type, path_id) AS ht
                     ON hr.client_id = ht.client_id
                    AND hr.path_type = ht.path_type
                    AND hr.path_id = ht.path_id
                    AND hr.timestamp = ht.max_timestamp) AS h
             ON (p.client_id = h.client_id AND
                 p.path_type = h.path_type AND
                 p.path_id = h.path_id)
      """
            values["timestamp"] = mysql_utils.RDFDatetimeToTimestamp(timestamp)
            only_explicit = True

        query += """
    WHERE p.client_id = %(client_id)s
      AND p.path_type = %(path_type)s
      AND path LIKE concat(%(path)s, '/%%')
    """

        if max_depth is not None:
            query += """
      AND depth <= %(depth)s
      """
            values["depth"] = len(components) + max_depth

        cursor.execute(query, values)
        for row in cursor.fetchall():
            # pyformat: disable
            (path, directory, timestamp, stat_entry_bytes,
             last_stat_entry_timestamp, hash_entry_bytes,
             last_hash_entry_timestamp) = row
            # pyformat: enable

            components = mysql_utils.PathToComponents(path)

            if stat_entry_bytes is not None:
                stat_entry = rdf_client_fs.StatEntry.FromSerializedString(
                    stat_entry_bytes)
            else:
                stat_entry = None

            if hash_entry_bytes is not None:
                hash_entry = rdf_crypto.Hash.FromSerializedString(
                    hash_entry_bytes)
            else:
                hash_entry = None

            datetime = mysql_utils.TimestampToRDFDatetime
            path_info = rdf_objects.PathInfo(
                path_type=path_type,
                components=components,
                timestamp=datetime(timestamp),
                last_stat_entry_timestamp=datetime(last_stat_entry_timestamp),
                last_hash_entry_timestamp=datetime(last_hash_entry_timestamp),
                directory=directory,
                stat_entry=stat_entry,
                hash_entry=hash_entry)

            path_infos.append(path_info)

        path_infos.sort(key=lambda _: tuple(_.components))

        # For specific timestamp, we return information only about explicit paths
        # (paths that have associated stat or hash entry or have an ancestor that is
        # explicit).
        if not only_explicit:
            return path_infos

        explicit_path_infos = []
        has_explicit_ancestor = set()

        # This list is sorted according to the keys component, so by traversing it
        # in the reverse order we make sure that we process deeper paths first.
        for path_info in reversed(path_infos):
            components = tuple(path_info.components)

            if (path_info.HasField("stat_entry")
                    or path_info.HasField("hash_entry")
                    or components in has_explicit_ancestor):
                explicit_path_infos.append(path_info)
                has_explicit_ancestor.add(components[:-1])

        # Since we collected explicit paths in reverse order, we need to reverse it
        # again to conform to the interface.
        return list(reversed(explicit_path_infos))
コード例 #17
0
ファイル: mysql_paths.py プロジェクト: threatintel-c/grr
    def ReadPathInfosHistories(self,
                               client_id,
                               path_type,
                               components_list,
                               cutoff=None,
                               cursor=None):
        """Reads a collection of hash and stat entries for given paths."""
        # MySQL does not handle well empty `IN` clauses so we guard against that.
        if not components_list:
            return {}

        path_infos = {components: [] for components in components_list}

        path_id_components = {}
        for components in components_list:
            path_id = rdf_objects.PathID.FromComponents(components)
            path_id_components[path_id] = components

        params = {
            "client_id": db_utils.ClientIDToInt(client_id),
            "path_type": int(path_type),
            "path_ids": [path_id.AsBytes() for path_id in path_id_components]
        }

        if cutoff is not None:
            stat_entry_timestamp_condition = """
      AND s.timestamp <= FROM_UNIXTIME(%(cutoff)s)
      """
            hash_entry_timestamp_condition = """
      AND h.timestamp <= FROM_UNIXTIME(%(cutoff)s)
      """
            params["cutoff"] = mysql_utils.RDFDatetimeToTimestamp(cutoff)
        else:
            stat_entry_timestamp_condition = ""
            hash_entry_timestamp_condition = ""

        # MySQL does not support full outer joins, so we emulate them with a union.
        query = """
    SELECT s.path_id, s.stat_entry, UNIX_TIMESTAMP(s.timestamp),
           h.path_id, h.hash_entry, UNIX_TIMESTAMP(h.timestamp)
      FROM client_path_stat_entries AS s
 LEFT JOIN client_path_hash_entries AS h
        ON s.client_id = h.client_id
       AND s.path_type = h.path_type
       AND s.path_id = h.path_id
       AND s.timestamp = h.timestamp
     WHERE s.client_id = %(client_id)s
       AND s.path_type = %(path_type)s
       AND s.path_id IN %(path_ids)s
       {stat_entry_timestamp_condition}
     UNION
    SELECT s.path_id, s.stat_entry, UNIX_TIMESTAMP(s.timestamp),
           h.path_id, h.hash_entry, UNIX_TIMESTAMP(h.timestamp)
      FROM client_path_hash_entries AS h
 LEFT JOIN client_path_stat_entries AS s
        ON h.client_id = s.client_id
       AND h.path_type = s.path_type
       AND h.path_id = s.path_id
       AND h.timestamp = s.timestamp
     WHERE h.client_id = %(client_id)s
       AND h.path_type = %(path_type)s
       AND h.path_id IN %(path_ids)s
       {hash_entry_timestamp_condition}
    """.format(stat_entry_timestamp_condition=stat_entry_timestamp_condition,
               hash_entry_timestamp_condition=hash_entry_timestamp_condition)

        cursor.execute(query, params)
        for row in cursor.fetchall():
            # pyformat: disable
            (stat_entry_path_id_bytes, stat_entry_bytes, stat_entry_timestamp,
             hash_entry_path_id_bytes, hash_entry_bytes,
             hash_entry_timestamp) = row
            # pyformat: enable

            path_id_bytes = stat_entry_path_id_bytes or hash_entry_path_id_bytes
            path_id = rdf_objects.PathID.FromSerializedBytes(path_id_bytes)
            components = path_id_components[path_id]

            timestamp = stat_entry_timestamp or hash_entry_timestamp

            if stat_entry_bytes is not None:
                stat_entry = rdf_client_fs.StatEntry.FromSerializedBytes(
                    stat_entry_bytes)
            else:
                stat_entry = None

            if hash_entry_bytes is not None:
                hash_entry = rdf_crypto.Hash.FromSerializedBytes(
                    hash_entry_bytes)
            else:
                hash_entry = None

            path_info = rdf_objects.PathInfo(
                path_type=path_type,
                components=components,
                stat_entry=stat_entry,
                hash_entry=hash_entry,
                timestamp=mysql_utils.TimestampToRDFDatetime(timestamp))

            path_infos[components].append(path_info)

        for components in components_list:
            path_infos[components].sort(
                key=lambda path_info: path_info.timestamp)

        return path_infos
コード例 #18
0
ファイル: mysql_clients.py プロジェクト: jiandandan/grr
    def WriteClientSnapshotHistory(self, clients, cursor=None):
        """Writes the full history for a particular client."""
        client_id = clients[0].client_id
        latest_timestamp = max(client.timestamp for client in clients)

        base_params = {
            "client_id":
            db_utils.ClientIDToInt(client_id),
            "latest_timestamp":
            mysql_utils.RDFDatetimeToTimestamp(latest_timestamp)
        }

        try:
            for client in clients:
                startup_info = client.startup_info
                client.startup_info = None

                params = base_params.copy()
                params.update({
                    "timestamp":
                    mysql_utils.RDFDatetimeToTimestamp(client.timestamp),
                    "client_snapshot":
                    client.SerializeToBytes(),
                    "startup_info":
                    startup_info.SerializeToBytes(),
                })

                cursor.execute(
                    """
        INSERT INTO client_snapshot_history (client_id, timestamp,
                                             client_snapshot)
        VALUES (%(client_id)s, FROM_UNIXTIME(%(timestamp)s),
                %(client_snapshot)s)
        """, params)

                cursor.execute(
                    """
        INSERT INTO client_startup_history (client_id, timestamp,
                                            startup_info)
        VALUES (%(client_id)s, FROM_UNIXTIME(%(timestamp)s),
                %(startup_info)s)
        """, params)

                client.startup_info = startup_info

            cursor.execute(
                """
      UPDATE clients
         SET last_snapshot_timestamp = FROM_UNIXTIME(%(latest_timestamp)s)
       WHERE client_id = %(client_id)s
         AND (last_snapshot_timestamp IS NULL OR
              last_snapshot_timestamp < FROM_UNIXTIME(%(latest_timestamp)s))
      """, base_params)

            cursor.execute(
                """
      UPDATE clients
         SET last_startup_timestamp = FROM_UNIXTIME(%(latest_timestamp)s)
       WHERE client_id = %(client_id)s
         AND (last_startup_timestamp IS NULL OR
              last_startup_timestamp < FROM_UNIXTIME(%(latest_timestamp)s))
      """, base_params)
        except MySQLdb.IntegrityError as error:
            raise db.UnknownClientError(client_id, cause=error)
コード例 #19
0
ファイル: mysql_hunts.py プロジェクト: threatintel-c/grr
    def ReadHuntResults(self,
                        hunt_id,
                        offset,
                        count,
                        with_tag=None,
                        with_type=None,
                        with_substring=None,
                        with_timestamp=None,
                        cursor=None):
        """Reads hunt results of a given hunt using given query options."""
        hunt_id_int = db_utils.HuntIDToInt(hunt_id)

        query = ("SELECT client_id, flow_id, hunt_id, payload, type, "
                 "UNIX_TIMESTAMP(timestamp), tag "
                 "FROM flow_results "
                 "FORCE INDEX(flow_results_hunt_id_flow_id_timestamp) "
                 "WHERE hunt_id = %s ")

        args = [hunt_id_int]

        if with_tag:
            query += "AND tag = %s "
            args.append(with_tag)

        if with_type:
            query += "AND type = %s "
            args.append(with_type)

        if with_substring:
            query += "AND payload LIKE %s "
            args.append("%" + db_utils.EscapeWildcards(with_substring) + "%")

        if with_timestamp:
            query += "AND timestamp = FROM_UNIXTIME(%s) "
            args.append(mysql_utils.RDFDatetimeToTimestamp(with_timestamp))

        query += "ORDER BY timestamp ASC LIMIT %s OFFSET %s"
        args.append(count)
        args.append(offset)

        cursor.execute(query, args)

        ret = []
        for (
                client_id_int,
                flow_id_int,
                hunt_id_int,
                serialized_payload,
                payload_type,
                timestamp,
                tag,
        ) in cursor.fetchall():
            if payload_type in rdfvalue.RDFValue.classes:
                payload = rdfvalue.RDFValue.classes[
                    payload_type].FromSerializedBytes(serialized_payload)
            else:
                payload = rdf_objects.SerializedValueOfUnrecognizedType(
                    type_name=payload_type, value=serialized_payload)

            result = rdf_flow_objects.FlowResult(
                client_id=db_utils.IntToClientID(client_id_int),
                flow_id=db_utils.IntToFlowID(flow_id_int),
                hunt_id=hunt_id,
                payload=payload,
                timestamp=mysql_utils.TimestampToRDFDatetime(timestamp))
            if tag is not None:
                result.tag = tag

            ret.append(result)

        return ret
コード例 #20
0
ファイル: mysql_hunts.py プロジェクト: threatintel-c/grr
    def ListHuntObjects(self,
                        offset,
                        count,
                        with_creator=None,
                        created_after=None,
                        with_description_match=None,
                        cursor=None):
        """Reads metadata for hunt objects from the database."""
        query = """
    SELECT
      hunt_id,
      UNIX_TIMESTAMP(create_timestamp),
      UNIX_TIMESTAMP(last_update_timestamp),
      creator,
      duration_micros,
      client_rate,
      client_limit,
      hunt_state,
      hunt_state_comment,
      UNIX_TIMESTAMP(init_start_time),
      UNIX_TIMESTAMP(last_start_time),
      description
    FROM hunts """
        args = []

        components = []
        if with_creator is not None:
            components.append("creator = %s ")
            args.append(with_creator)

        if created_after is not None:
            components.append("create_timestamp > FROM_UNIXTIME(%s) ")
            args.append(mysql_utils.RDFDatetimeToTimestamp(created_after))

        if with_description_match is not None:
            components.append("description LIKE %s")
            args.append("%" + with_description_match + "%")

        if components:
            query += "WHERE " + " AND ".join(components)

        query += " ORDER BY create_timestamp DESC LIMIT %s OFFSET %s"
        args.append(count)
        args.append(offset)

        cursor.execute(query, args)
        result = []
        for row in cursor.fetchall():
            (hunt_id, create_timestamp, last_update_timestamp, creator,
             duration_micros, client_rate, client_limit, hunt_state,
             hunt_state_comment, init_start_time, last_start_time,
             description) = row
            result.append(
                rdf_hunt_objects.HuntMetadata(
                    hunt_id=db_utils.IntToHuntID(hunt_id),
                    description=description or None,
                    create_time=mysql_utils.TimestampToRDFDatetime(
                        create_timestamp),
                    creator=creator,
                    duration=rdfvalue.Duration.From(duration_micros,
                                                    rdfvalue.MICROSECONDS),
                    client_rate=client_rate,
                    client_limit=client_limit,
                    hunt_state=hunt_state,
                    hunt_state_comment=hunt_state_comment or None,
                    last_update_time=mysql_utils.TimestampToRDFDatetime(
                        last_update_timestamp),
                    init_start_time=mysql_utils.TimestampToRDFDatetime(
                        init_start_time),
                    last_start_time=mysql_utils.TimestampToRDFDatetime(
                        last_start_time)))

        return result
コード例 #21
0
ファイル: mysql_paths.py プロジェクト: LubyRuffy/grr
    def _MultiWritePathInfos(self, path_infos, cursor=None):
        """Writes a collection of path info records for specified clients."""
        now = rdfvalue.RDFDatetime.Now()

        path_info_values = []
        parent_path_info_values = []

        stat_entry_keys = []
        stat_entry_values = []

        hash_entry_keys = []
        hash_entry_values = []

        for client_id, client_path_infos in iteritems(path_infos):
            for path_info in client_path_infos:
                path = mysql_utils.ComponentsToPath(path_info.components)

                key = (
                    db_utils.ClientIDToInt(client_id),
                    int(path_info.path_type),
                    path_info.GetPathID().AsBytes(),
                )

                path_info_values.append(key + (
                    mysql_utils.RDFDatetimeToTimestamp(now), path,
                    bool(path_info.directory), len(path_info.components)))

                if path_info.HasField("stat_entry"):
                    stat_entry_keys.extend(key)
                    stat_entry_values.append(
                        key + (mysql_utils.RDFDatetimeToTimestamp(now),
                               path_info.stat_entry.SerializeToBytes()))

                if path_info.HasField("hash_entry"):
                    hash_entry_keys.extend(key)
                    hash_entry_values.append(
                        key + (mysql_utils.RDFDatetimeToTimestamp(now),
                               path_info.hash_entry.SerializeToBytes(),
                               path_info.hash_entry.sha256.AsBytes()))

                # TODO(hanuszczak): Implement a trie in order to avoid inserting
                # duplicated records.
                for parent_path_info in path_info.GetAncestors():
                    path = mysql_utils.ComponentsToPath(
                        parent_path_info.components)
                    parent_path_info_values.append((
                        db_utils.ClientIDToInt(client_id),
                        int(parent_path_info.path_type),
                        parent_path_info.GetPathID().AsBytes(),
                        path,
                        len(parent_path_info.components),
                    ))

        if path_info_values:
            query = """
        INSERT INTO client_paths(client_id, path_type, path_id,
                                 timestamp,
                                 path, directory, depth)
        VALUES (%s, %s, %s, FROM_UNIXTIME(%s), %s, %s, %s)
        ON DUPLICATE KEY UPDATE
          timestamp = VALUES(timestamp),
          directory = directory OR VALUES(directory)
      """
            cursor.executemany(query, path_info_values)

        if parent_path_info_values:
            query = """
        INSERT INTO client_paths(client_id, path_type, path_id, path,
                                 directory, depth)
        VALUES (%s, %s, %s, %s, TRUE, %s)
        ON DUPLICATE KEY UPDATE
          directory = TRUE,
          timestamp = NOW(6)
      """
            cursor.executemany(query, parent_path_info_values)

        if stat_entry_values:
            query = """
        INSERT INTO client_path_stat_entries(client_id, path_type, path_id,
                                             timestamp,
                                             stat_entry)
        VALUES (%s, %s, %s, FROM_UNIXTIME(%s), %s)
      """
            cursor.executemany(query, stat_entry_values)

            condition = "(client_id = %s AND path_type = %s AND path_id = %s)"

            query = """
        UPDATE client_paths
        SET last_stat_entry_timestamp = FROM_UNIXTIME(%s)
        WHERE {}
      """.format(" OR ".join([condition] * len(stat_entry_values)))

            params = [mysql_utils.RDFDatetimeToTimestamp(now)
                      ] + stat_entry_keys
            cursor.execute(query, params)

        if hash_entry_values:
            query = """
        INSERT INTO client_path_hash_entries(client_id, path_type, path_id,
                                             timestamp,
                                             hash_entry, sha256)
        VALUES (%s, %s, %s, FROM_UNIXTIME(%s), %s, %s)
      """
            cursor.executemany(query, hash_entry_values)

            condition = "(client_id = %s AND path_type = %s AND path_id = %s)"

            query = """
        UPDATE client_paths
        SET last_hash_entry_timestamp = FROM_UNIXTIME(%s)
        WHERE {}
      """.format(" OR ".join([condition] * len(hash_entry_values)))

            params = [mysql_utils.RDFDatetimeToTimestamp(now)
                      ] + hash_entry_keys
            cursor.execute(query, params)
コード例 #22
0
ファイル: mysql_paths.py プロジェクト: grrrrrrrrr/grr
    def ReadLatestPathInfosWithHashBlobReferences(self,
                                                  client_paths,
                                                  max_timestamp=None,
                                                  cursor=None):
        """Returns PathInfos that have corresponding HashBlobReferences."""
        path_infos = {client_path: None for client_path in client_paths}

        path_id_components = {}
        for client_path in client_paths:
            path_id_components[client_path.path_id] = client_path.components

        params = []
        query = """
    SELECT t.client_id, t.path_type, t.path_id, UNIX_TIMESTAMP(t.timestamp),
           s.stat_entry, h.hash_entry
      FROM (SELECT h.client_id, h.path_type, h.path_id,
                   MAX(h.timestamp) AS timestamp
              FROM client_path_hash_entries AS h
        INNER JOIN hash_blob_references AS b
                ON b.hash_id = h.sha256
             WHERE {conditions}
          GROUP BY client_id, path_type, path_id) AS t
 LEFT JOIN client_path_stat_entries AS s
        ON s.client_id = t.client_id
       AND s.path_type = t.path_type
       AND s.path_id = t.path_id
       AND s.timestamp = t.timestamp
 LEFT JOIN client_path_hash_entries AS h
        ON h.client_id = t.client_id
       AND h.path_type = t.path_type
       AND h.path_id = t.path_id
       AND h.timestamp = t.timestamp
    """

        path_conditions = []

        for client_path in client_paths:
            path_conditions.append("""
      (client_id = %s AND path_type = %s AND path_id = %s)
      """)
            params.append(db_utils.ClientIDToInt(client_path.client_id))
            params.append(int(client_path.path_type))
            params.append(client_path.path_id.AsBytes())

        conditions = " OR ".join(path_conditions)
        if max_timestamp is not None:
            conditions = "({}) AND UNIX_TIMESTAMP(timestamp) <= %s".format(
                conditions)
            params.append(mysql_utils.RDFDatetimeToTimestamp(max_timestamp))

        cursor.execute(query.format(conditions=conditions), params)
        for row in cursor.fetchall():
            # pyformat: disable
            (client_id, path_type, path_id_bytes, timestamp, stat_entry_bytes,
             hash_entry_bytes) = row
            # pyformat: enable

            path_id = rdf_objects.PathID.FromBytes(path_id_bytes)
            components = path_id_components[path_id]

            if stat_entry_bytes is not None:
                stat_entry = rdf_client_fs.StatEntry.FromSerializedString(
                    stat_entry_bytes)
            else:
                stat_entry = None

            hash_entry = rdf_crypto.Hash.FromSerializedString(hash_entry_bytes)

            client_path = db.ClientPath(
                client_id=db_utils.IntToClientID(client_id),
                path_type=path_type,
                components=path_id_components[path_id])

            path_info = rdf_objects.PathInfo(
                path_type=path_type,
                components=components,
                stat_entry=stat_entry,
                hash_entry=hash_entry,
                timestamp=mysql_utils.TimestampToRDFDatetime(timestamp))

            path_infos[client_path] = path_info

        return path_infos
コード例 #23
0
ファイル: mysql_clients.py プロジェクト: jiandandan/grr
 def _DeleteClientStats(self, limit, retention_time, cursor=None):
     """Deletes up to `limit` ClientStats older than `retention_time`."""
     cursor.execute(
         "DELETE FROM client_stats WHERE timestamp < FROM_UNIXTIME(%s) LIMIT %s",
         [mysql_utils.RDFDatetimeToTimestamp(retention_time), limit])
     return cursor.rowcount
コード例 #24
0
ファイル: mysql_clients.py プロジェクト: jiandandan/grr
    def _CountClientStatisticByLabel(self, statistic, day_buckets, cursor):
        """Returns client-activity metrics for a given statistic.

    Args:
      statistic: The name of the statistic, which should also be a column in the
        'clients' table.
      day_buckets: A set of n-day-active buckets.
      cursor: MySQL cursor for executing queries.
    """
        day_buckets = sorted(day_buckets)
        sum_clauses = []
        ping_cast_clauses = []
        timestamp_buckets = []
        now = rdfvalue.RDFDatetime.Now()

        for day_bucket in day_buckets:
            column_name = "days_active_{}".format(day_bucket)
            sum_clauses.append(
                "CAST(SUM({0}) AS UNSIGNED) AS {0}".format(column_name))
            ping_cast_clauses.append(
                "CAST(c.last_ping > FROM_UNIXTIME(%s) AS UNSIGNED) AS {}".
                format(column_name))
            timestamp_bucket = now - rdfvalue.DurationSeconds.FromDays(
                day_bucket)
            timestamp_buckets.append(
                mysql_utils.RDFDatetimeToTimestamp(timestamp_bucket))

        # Count all clients with a label owned by 'GRR', aggregating by label.
        query = """
    SELECT j.{statistic}, j.label, {sum_clauses}
    FROM (
      SELECT c.{statistic} AS {statistic}, l.label AS label, {ping_cast_clauses}
      FROM clients c
      LEFT JOIN client_labels l USING(client_id)
      WHERE c.last_ping IS NOT NULL AND l.owner_username = '******'
    ) AS j
    GROUP BY j.{statistic}, j.label
    """.format(statistic=statistic,
               sum_clauses=", ".join(sum_clauses),
               ping_cast_clauses=", ".join(ping_cast_clauses))

        cursor.execute(query, timestamp_buckets)

        fleet_stats_builder = fleet_utils.FleetStatsBuilder(day_buckets)
        for response_row in cursor.fetchall():
            statistic_value, client_label = response_row[:2]
            for i, num_actives in enumerate(response_row[2:]):
                if num_actives <= 0:
                    continue
                fleet_stats_builder.IncrementLabel(client_label,
                                                   statistic_value,
                                                   day_buckets[i],
                                                   delta=num_actives)

        # Get n-day-active totals for the statistic across all clients (including
        # those that do not have a 'GRR' label).
        query = """
    SELECT j.{statistic}, {sum_clauses}
    FROM (
      SELECT c.{statistic} AS {statistic}, {ping_cast_clauses}
      FROM clients c
      WHERE c.last_ping IS NOT NULL
    ) AS j
    GROUP BY j.{statistic}
    """.format(statistic=statistic,
               sum_clauses=", ".join(sum_clauses),
               ping_cast_clauses=", ".join(ping_cast_clauses))

        cursor.execute(query, timestamp_buckets)

        for response_row in cursor.fetchall():
            statistic_value = response_row[0]
            for i, num_actives in enumerate(response_row[1:]):
                if num_actives <= 0:
                    continue
                fleet_stats_builder.IncrementTotal(statistic_value,
                                                   day_buckets[i],
                                                   delta=num_actives)

        return fleet_stats_builder.Build()
コード例 #25
0
ファイル: mysql_clients.py プロジェクト: khanhgithead/grr
    def WriteClientMetadata(self,
                            client_id,
                            certificate=None,
                            fleetspeak_enabled=None,
                            first_seen=None,
                            last_ping=None,
                            last_clock=None,
                            last_ip=None,
                            last_foreman=None,
                            fleetspeak_validation_info=None,
                            cursor=None):
        """Write metadata about the client."""
        placeholders = []
        values = collections.OrderedDict()

        placeholders.append("%(client_id)s")
        values["client_id"] = db_utils.ClientIDToInt(client_id)

        if certificate:
            placeholders.append("%(certificate)s")
            values["certificate"] = certificate.SerializeToBytes()
        if fleetspeak_enabled is not None:
            placeholders.append("%(fleetspeak_enabled)s")
            values["fleetspeak_enabled"] = fleetspeak_enabled
        if first_seen is not None:
            placeholders.append("FROM_UNIXTIME(%(first_seen)s)")
            values["first_seen"] = mysql_utils.RDFDatetimeToTimestamp(
                first_seen)
        if last_ping is not None:
            placeholders.append("FROM_UNIXTIME(%(last_ping)s)")
            values["last_ping"] = mysql_utils.RDFDatetimeToTimestamp(last_ping)
        if last_clock:
            placeholders.append("FROM_UNIXTIME(%(last_clock)s)")
            values["last_clock"] = mysql_utils.RDFDatetimeToTimestamp(
                last_clock)
        if last_ip:
            placeholders.append("%(last_ip)s")
            values["last_ip"] = last_ip.SerializeToBytes()
        if last_foreman:
            placeholders.append("FROM_UNIXTIME(%(last_foreman)s)")
            values["last_foreman"] = mysql_utils.RDFDatetimeToTimestamp(
                last_foreman)

        placeholders.append("%(last_fleetspeak_validation_info)s")
        if fleetspeak_validation_info:
            pb = rdf_client.FleetspeakValidationInfo.FromStringDict(
                fleetspeak_validation_info)
            values["last_fleetspeak_validation_info"] = pb.SerializeToBytes()
        else:
            # Write null for empty or non-existent validation info.
            values["last_fleetspeak_validation_info"] = None

        updates = []
        for column in values.keys():
            updates.append("{column} = VALUES({column})".format(column=column))

        query = """
    INSERT INTO clients ({columns})
    VALUES ({placeholders})
    ON DUPLICATE KEY UPDATE {updates}
    """.format(columns=", ".join(values.keys()),
               placeholders=", ".join(placeholders),
               updates=", ".join(updates))

        cursor.execute(query, values)