Beispiel #1
0
def test():
    print '...'
    #log()
    #change to CUR_DIR
    os.chdir(CUR_DIR)
    data = json.loads(request.data)

    for event in data["events"]:
        # modifications to images are push events
        if event["action"] == "push":
            repository = event["target"]["repository"]
            url = event["target"]["url"]
            if "manifests" in url:
                # Get the image-blocks in this manifest
                image_blocks = []
                image_manifest = requests.get(url, verify=False, auth=auth)
                for entry in image_manifest.json()["layers"]:
                    image_blocks.append(entry["digest"])

                # Get all tags. Syntax: GET /v2/<name>/tags/list
                temp = url.split("manifests/")
                tags_url = temp[0] + "tags/list"
                tags = requests.get(tags_url, verify=False, auth=auth)

                # Iterate over each tag and get its blocks. If blocks of
                # tag matches with those of the manifest, then this tag
                # is the latest added/modified one. This is just a hack
                # since proper API is not available.
                # Syntax for fetching manifest: GET /v2/<name>/manifests/<tag>
                for tag in tags.json()["tags"]:
                    temp_req = temp[0] + "manifests/" + tag
                    tag_manifest = requests.get(temp_req,
                                                verify=False,
                                                auth=auth)

                    blocks = []
                    fsLayers = tag_manifest.json()["fsLayers"]
                    for layer in fsLayers:
                        blocks.append(layer["blobSum"])

                    if sorted(image_blocks) == sorted(blocks):
                        print "New image uploaded is: %s | tag: %s" % (
                            repository, tag)
                        host = temp[0].split("/")[2]
                        image = host + "/" + repository + ":" + tag
                        try:
                            MSG_QUEUE.send(image)
                        except pika.exceptions.ConnectionClosed:
                            print('RMQ connection closed. Re-establishing...')
                            global MSG_QUEUE
                            MSG_QUEUE = MessageQueue('localhost',
                                                     'dockerqueue', None)
                            MSG_QUEUE.send(image)
                        except Exception as e:
                            print('MessageQueue error: ', type(e), e)
                            traceback.print_exc()
                        break
    return "Done", 200
def test():
    print "..."
    # log()
    # change to CUR_DIR
    os.chdir(CUR_DIR)
    data = json.loads(request.data)

    for event in data["events"]:
        # modifications to images are push events
        if event["action"] == "push":
            repository = event["target"]["repository"]
            url = event["target"]["url"]
            if "manifests" in url:
                # Get the image-blocks in this manifest
                image_blocks = []
                image_manifest = requests.get(url, verify=False, auth=auth)
                for entry in image_manifest.json()["layers"]:
                    image_blocks.append(entry["digest"])

                # Get all tags. Syntax: GET /v2/<name>/tags/list
                temp = url.split("manifests/")
                tags_url = temp[0] + "tags/list"
                tags = requests.get(tags_url, verify=False, auth=auth)

                # Iterate over each tag and get its blocks. If blocks of
                # tag matches with those of the manifest, then this tag
                # is the latest added/modified one. This is just a hack
                # since proper API is not available.
                # Syntax for fetching manifest: GET /v2/<name>/manifests/<tag>
                for tag in tags.json()["tags"]:
                    temp_req = temp[0] + "manifests/" + tag
                    tag_manifest = requests.get(temp_req, verify=False, auth=auth)

                    blocks = []
                    fsLayers = tag_manifest.json()["fsLayers"]
                    for layer in fsLayers:
                        blocks.append(layer["blobSum"])

                    if sorted(image_blocks) == sorted(blocks):
                        print "New image uploaded is: %s | tag: %s" % (repository, tag)
                        host = temp[0].split("/")[2]
                        image = host + "/" + repository + ":" + tag
                        try:
                            MSG_QUEUE.send(image)
                        except pika.exceptions.ConnectionClosed:
                            print ("RMQ connection closed. Re-establishing...")
                            global MSG_QUEUE
                            MSG_QUEUE = MessageQueue("localhost", "dockerqueue", None)
                            MSG_QUEUE.send(image)
                        except Exception as e:
                            print ("MessageQueue error: ", type(e), e)
                            traceback.print_exc()
                        break
    return "Done", 200
Beispiel #3
0
def process_image(imagename, short_imagename, base_image, operation,
                  elasticDB):
    print 'Processing image: ', imagename, '. Operation is: ', operation
    tmpname = string.replace(imagename, ":", "_")
    imagetar = os.path.join(TEMP_DIR, tmpname, 'image.tar')
    imagedir = os.path.join(TEMP_DIR, tmpname, 'image')
    flat_imgdir = os.path.join(TEMP_DIR, tmpname, 'flat_image')
    dstdir = os.path.join(TEMP_DIR, tmpname, 'hashed_image')
    #make_dir(TEMP_DIR)
    exec_cmd(['sudo', 'rm', '-rf', TEMP_DIR])
    make_dir(imagedir)
    make_dir(flat_imgdir)
    make_dir(dstdir)
    make_dir("/tmp/files")  # for debugging purpose, will remove it

    pull_image(imagename)
    save_image(imagetar, imagename)
    untar_image(imagetar, imagedir)

    get_leaf_and_flatten(imagedir, flat_imgdir)

    msg_queue = MessageQueue('localhost', 'dockerqueue', elasticDB)
    process_sdhash(short_imagename, base_image, flat_imgdir, msg_queue,
                   operation)
Beispiel #4
0
def rmq_callback(ch, method, properties, body):
    print "rmq_callback"
    image = body
    print 'processing image ', image
    tag = image.split(':')[-1]
    operation = 'compare'
    if tag == "golden":
        operation = 'store'

    process_image(image, operation)
    

if __name__ == "__main__":
    global CUR_DIR
    CUR_DIR = os.getcwd()
    proc_thread1 = FileProcessor('Procthread1')
    proc_thread2 = FileProcessor('Procthread2')

    index_thread1 = IndexOrLookup('Idxthread1')
    index_thread2 = IndexOrLookup('Idxthread2')

    elasticDB = ElasticDatabase(EsCfg)
    # TODO: add queuename and host to config
    msg_queue = MessageQueue('localhost', 'dockerqueue', elasticDB)
    try:
        msg_queue.start_consuming(rmq_callback)
    except KeyboardInterrupt:
        msg_queue.close()

    print "Done"
Beispiel #5
0
import os

sys.path.append(os.getcwd() + "/../")
from scripts.messagequeue import MessageQueue

global CUR_DIR
CUR_DIR = ""

app = Flask(__name__)

APP_ROOT = os.path.dirname(os.path.abspath(__file__))
CONFIG_FILE = os.path.join(APP_ROOT, 'settings.ini')

config = ConfigParser.ConfigParser()
config.read(CONFIG_FILE)
MSG_QUEUE = MessageQueue('localhost', 'dockerqueue', None)

username = config.get('registry', 'username')
password = config.get('registry', 'password')
auth = (username, password)


@app.route("/")
def registry_endpoint():
    return "Docker registry endpoint!\n"


@app.route("/test", methods=['POST'])
def test():
    print '...'
    #log()
Beispiel #6
0
#####################################################################
# File: processor.py
# Author: Jeremy Mwenda <*****@*****.**>
# Desc: This file processes messages (sdhashes) from rabbitMQ.
#
#######
import os
import sys

sys.path.append(os.getcwd() + "/../")
from scripts.elasticdatabase import ElasticDatabase
from scripts.messagequeue import MessageQueue
from scripts.esCfg import EsCfg

if __name__ == "__main__":
    elasticDB = ElasticDatabase(EsCfg)
    # TODO: add queuename and host to config
    msg_queue = MessageQueue('localhost', 'dockerqueue', elasticDB)
    msg_queue.start_consuming()