Exemple #1
0
def get_cc_index_records(url_query, archive_bucket, page=0, pageSize=1):
    payload = { 'url': url_query, 'output' : 'json', 'page' : page, 'filter' : '!~filename:crawldiagnostics&mime:text/html&status:200' , 'pageSize': pageSize, 'from': '2016', 'to': '2017'}
    resp = requests.get('http://index.commoncrawl.org/' + archive_bucket, params=payload, stream=True)
    content = StringIO()
    for chunk in progress.dots(resp.iter_content(chunk_size=1024)):
        if chunk:
            content.write(chunk.decode('utf8'))
            content.flush()   
    return [json.loads(x) for x in content.getvalue().strip().splitlines()]
def load_qld_data(qld_filename, dumpdir=None):
    from qld import parse_ahpi_xml

    qld_places = parse_ahpi_xml(qld_filename)

    for place in progress.dots(qld_places):
        if dumpdir:
            extra = qld_ehp_site_parser.load_extra(place['url'], dumpdir)
            place.update(extra)
        add_heritage_place(place)
Exemple #3
0
def update_progress3():
    from time import sleep
    from random import random
    from clint.textui import progress

    for i in progress.bar(range(100)):
        sleep(random() * 0.2)

    for i in progress.dots(range(100)):
        sleep(random() * 0.2)
Exemple #4
0
    def run(self, bucket, prefix='', show_progress=True):
        self.pre_run()

        bucket = self.get_bucket(bucket)
        keys = bucket.list(prefix)
        chunk_size = self.threads if self.threads is not None else cpu_count()
        it = self.pool.imap(self.analyse_key, keys, chunksize=chunk_size)
        if show_progress:
            list(progress.dots(it, label='Analysing bucket "%s"' % bucket.name))
        else:
            list(it)

        self.post_run()
Exemple #5
0
def load_sites():
    """ Load the site information.
    """
    global offset
    global zoom

    if offset is None:
        calc_globals()

    fname = filefinder.legends_xml()
    logging.debug("Reading legends xml ({} Mb)".format(
        os.path.getsize(fname) // 1024 // 1024))

    sites = []
    sites_started = False
    with open(fname, "r", encoding="iso-8859-1") as xmlfile:
        for line in progress.dots(xmlfile, every=1000):
            if not sites_started:
                if line.startswith("<sites>"):
                    sites_started = True
                continue
            try:
                if not add_to_sites(sites, line.strip()):
                    break
            except Exception as e:
                print(e)
                print("Line: '{}'".format(line))
                break

    print()  # A very nice linebreak after all the dots generated by the loop

    # iterate over the sites and do some filtering
    show_vaults = conf.getboolean("Map", "show_spoilers", fallback=True)
    result = []
    for site in sites:
        if site["type"] == "vault" and not show_vaults:
            continue
        result.append(site)

    with open(os.path.join(build_dir, "sites.json"), "w") as sitesjson:
        sitesjson.write(json.dumps(result))
Exemple #6
0
def load_sites():
    """ Load the site information.
    """
    global offset
    global zoom

    if offset is None:
        calc_globals()

    fname = filefinder.legends_xml()
    logging.debug("Reading legends xml ({} Mb)".format(os.path.getsize(fname) // 1024 // 1024))

    sites = []
    sites_started = False
    with open(fname, "r", encoding="iso-8859-1") as xmlfile:
        for line in progress.dots(xmlfile, every=1000):
            if not sites_started:
                if line.startswith("<sites>"):
                    sites_started = True
                continue
            try:
                if not add_to_sites(sites, line.strip()):
                    break
            except Exception as e:
                print(e)
                print("Line: '{}'".format(line))
                break

    print() # A very nice linebreak after all the dots generated by the loop

    # iterate over the sites and do some filtering
    show_vaults = conf.getboolean("Map", "show_spoilers", fallback=True)
    result = []
    for site in sites:
        if site["type"] == "vault" and not show_vaults:
            continue
        result.append(site)

    with open(os.path.join(build_dir, "sites.json"), "w") as sitesjson:
        sitesjson.write(json.dumps(result))
Exemple #7
0
    def login(self):
        print("Logging in...")

        self.browser_visit('login')

        for i in progress.bar(range(60)):
            sleep(random() * 0.2)

        for i in progress.dots(range(60)):
            sleep(random() * 0.2)

        self.browser.find_by_name('username').type(self._username)
        self.browser.find_by_name('password').type("{0}\t".format(self._password))

        button = self.browser.find_by_id('login')
        button.click()

        link_elem = wait_visible(self.browser.driver, "//a[@title='Close']", timeout=30)
        if link_elem:
            logging.info("close button ={0}".format(link_elem))
            link_elem.click()
        else:
            logging.info("No close button.")
            self.login()
Exemple #8
0
    except e:
        pass

if __name__ == '__main__':
    assertRaises(progress.bar, TypeError)
    assertRaises(progress.mill, TypeError)

    with progress.bar(expected_size=200) as bar:
        for i in range(200):
            bar.update(i+1)
            sleep(0.05)

    for i in progress.bar(range(100)):
        sleep(random() * 0.2)

    for i in progress.dots(range(100)):
        sleep(random() * 0.2)

    with progress.dots() as bar:
        for i in range(200):
            bar.update()
            sleep(0.05)

    for i in progress.mill(range(100)):
        sleep(random() * 0.2)

    with progress.mill(expected_size=200) as bar:
        for i in range(200):
            bar.update(i+1)
            sleep(0.05)
Exemple #9
0
        pass


if __name__ == '__main__':
    assertRaises(progress.bar, TypeError)
    assertRaises(progress.mill, TypeError)

    with progress.bar(expected_size=200) as bar:
        for i in range(200):
            bar.update(i + 1)
            sleep(0.05)

    for i in progress.bar(range(100)):
        sleep(random() * 0.2)

    for i in progress.dots(range(100)):
        sleep(random() * 0.2)

    with progress.dots() as bar:
        for i in range(200):
            bar.update()
            sleep(0.05)

    for i in progress.mill(range(100)):
        sleep(random() * 0.2)

    with progress.mill(expected_size=200) as bar:
        for i in range(200):
            bar.update(i + 1)
            sleep(0.05)
Exemple #10
0
def main():
    # welcome to the danger zone

    parser = argparse.ArgumentParser(
        # random comment here for no reason ;)
        formatter_class=argparse.RawTextHelpFormatter,
        prog='spry',
        description='++++++++++++++++++++++++++++\n+++ SPRY +++++++++++++++++++\n+++ s0c1@l m3d1a sc@nn3r +++\n++++++++++++++++++++++++++++',
        epilog = '''EXAMPLE: \n check instagram \n spry jamesanthonycampbell \n ''')

    parser.add_argument('username', help='specific username, like realdonaldtrump')

    parser.add_argument('-p', '--proxy', help='proxy in the form of 127.0.0.1:8118',
                        nargs=1, dest='setproxy', required=False)

    parser.add_argument('-w', '--wait', help='max random wait time in seconds, \n5 second default (randomly wait 1-5 seconds)',
                        dest='setwait', nargs='?',const=3,type=int,default=3)
    parser.add_argument('-u', '--user-agent', help='override random user-agent\n(by default randomly selects between \n+8500 different user agent strings',
                        dest='useragent', nargs='?',const='u',default='u')
    parser.add_argument('--report', dest='reporting', action='store_true')
    parser.add_argument('-v','--verbose-useragent',dest='vu',action='store_true')
    parser.add_argument('--version', action='version',
                    version='%(prog)s {version}'.format(version='Version: '+__version__))
    parser.set_defaults(reporting=False,vu=False)
    args = parser.parse_args()
    cprint(welcomer,'red')
    # args strings
    username = args.username
    setproxy = args.setproxy
    # note, the correct way to check if variable is NoneType
    if setproxy != '' and setproxy is not None:
        proxyoverride = True
        if '9050' in setproxy[0] or '9150' or 'tor' in setproxy[0]:
            usingtor = True
        else:
            usingtor = False
    else:
        proxyoverride = False
    setwait = args.setwait
    reporting = args.reporting
    useragent = args.useragent
    vu = args.vu
    if useragent == 'u':
        overrideuseragent = False
        useragent = random.choice(useragents) # if user agent override not set, select random from list
    if vu:
        cprint('\nUseragent set as %s\n' % (useragent,),'blue')
    headers = {'User-Agent': useragent}
    i = 0 # counter for how many are 200's
    social_networks_list=['https://twitter.com/','https://www.instagram.com/','https://www.linkedin.com/in/','https://foursquare.com/','https://www.flickr.com/photos/','https://www.facebook.com/','https://www.reddit.com/user/','https://new.vk.com/','https://github.com/','https://ok.ru/','https://www.twitch.tv/','https://venmo.com/','http://www.goodreads.com/','http://www.last.fm/user/','https://api.spotify.com/v1/users/','https://www.pinterest.com/','https://keybase.io/','https://bitbucket.org/','https://pinboard.in/u:','https://disqus.com/by/','https://badoo.com/profile/','http://steamcommunity.com/id/','http://us.viadeo.com/en/profile/','https://www.periscope.tv/','https://www.researchgate.net/profile/','https://www.etsy.com/people/','https://myspace.com/','http://del.icio.us/','https://my.mail.ru/community/','https://www.xing.com/profile/']
    totalnetworks = len(social_networks_list) # get the total networks to check
    print('\n\n[*] Starting to process list of {} social networks now [*]\n\n'.format(totalnetworks))
    for soc in social_networks_list:
        # get domain name
        domainname = urlparse(soc).netloc
        domainnamelist = domainname.split('.')
        for domainer in domainnamelist:
            if len(domainer) > 3 and domainer != 'vk' and domainer != 'ok' and domainer != 'last' and domainer != 'mail':
                realdomain = domainer
            elif domainer == 'vk':
                realdomain = domainer
            elif domainer == 'ok':
                realdomain = domainer+'.ru'
            elif domainer == 'last':
                realdomain = domainer+'.fm'
            elif domainer == 'mail':
                realdomain = domainer+'.ru'
        # get proxy settings if any
        if proxyoverride == True:
            if usingtor:
                socks_proxy = "socks5://"+setproxy[0]
                proxyDict = { "http" : socks_proxy }
            else:
            #print(setproxy)
                http_proxy  = "http://"+setproxy[0]
                https_proxy = "https://"+setproxy[0]
                proxyDict = {
                              "http"  : http_proxy,
                              "https" : https_proxy
                            }
        sleep(randint(1,setwait))
        sys.stdout.flush()
        # try to load the social network for the respective user name
        # make sure to load proxy if proxy set otherwise don't pass a proxy arg
        # DONT FORGET TO HANDLE LOAD TIMEOUT ERRORS! - ADDED exception handlers finally 2-5-2017 JC
        if proxyoverride == True:
            try:
                r=requests.get(soc+username,stream=True, headers=headers, proxies=proxyDict)
            except requests.Timeout as err:
                print(err)
                continue
            except requests.RequestException as err:
                print(err)
                continue
        else:
            try:
                r=requests.get(soc+username,stream=True, headers=headers)
            except requests.Timeout as err:
                print(err)
                continue
            except requests.RequestException as err:
                print(err)
                continue
        # switch user agents again my friend
        if overrideuseragent == False:
            useragent = random.choice(useragents)
            # if user agent override not set, select random from list
        if vu: # if verbose output then print the user agent string
            cprint('\nUseragent set as %s\n' % (useragent,),'blue')
        if soc == 'https://www.instagram.com/' and r.status_code == 200:
            #print(r.text)
            soup = BeautifulSoup(r.content,'html.parser')
            aa = soup.find("meta", {"property":"og:image"})
            # test instagram profile image print
            #print (aa['content']) # this is the instagram profile image
            instagram_profile_img = requests.get(aa['content']) # get instagram profile pic
            open('./'+username+'.jpg' , 'wb').write(instagram_profile_img.content)
            #exit()
        try:
            total_length = int(r.headers.get('content-length'))
        except:
            total_length = 102399
        for chunk in progress.dots(r.iter_content(chunk_size=1024),label='Loading '+realdomain):
            sleep(random.random() * 0.2)
            if chunk:
                #sys.stdout.write(str(chunk))
                sys.stdout.flush()
        sys.stdout.flush()
    #print(r.text)
        if r.status_code == 200:
            cprint("user found @ {}".format(soc+username),'green')
            i = i+1
        else:
            cprint("Status code: {} no user found".format(r.status_code),'red')
    print('\n\n[*] Total networks with username found: {} [*]\n'.format(i))
    if reporting: # if pdf reporting is turned on (default on)
        create_pdf(username, i)
        cprint('Report saved as {}-report.pdf. \nTo turn off this feature dont pass in the --report flag.\n'.format(username),'yellow')
Exemple #11
0
#!/usr/bin/env python
# -*- coding: utf-8 -*-

import sys
import os

sys.path.insert(0, os.path.abspath('..'))

from time import sleep
from random import random
from clint.textui import progress

if __name__ == '__main__':
    for i in progress.bar(range(100)):
        sleep(random() * 0.2)

    for i in progress.dots(range(100)):
        sleep(random() * 0.2)
Exemple #12
0
def load():

    STRUCTS = {
        (128, 128, 128): "castle",
        (255, 255, 255): "village",
        (255, 128, 0): "crops",
        (255, 160, 0): "crops",
        (255, 192, 0): "crops",
        (0, 255, 0): "pasture",
        (64, 255, 0): "meadow",
        (0, 160, 0): "orchard",
        (20, 20, 20): "tunnel",
        (224, 224, 224): "stone_bridge",
        (180, 167, 20): "other_bridge",
        (192, 192, 192): "stone_road",
        (150, 127, 20): "other_road",
        (96, 96, 96): "stone_wall",
        (160, 127, 20): "other_wall",
        (0, 96, 255): "lake",
        # The following are not really 'structures'
        #        (255,255,192): "mountain",
        #        (128,64,32)  : "land",# remove this?
        #        (0,64,255)   : "ocean",# remove this?
        #
    }

    RIVERS = {
        (0, 224, 255): "river",
        (0, 255, 255): "river",
        (0, 112, 255): "river",
    }

    structs = {}

    struct_image = Image.open(filefinder.struct_map())
    struct_pixels = struct_image.load()
    world_size = struct_image.size[0]
    del (struct_image)
    hydro_image = Image.open(filefinder.hydro_map())
    hydro_pixels = hydro_image.load()
    del (hydro_image)
    for (x, y) in progress.dots(itertools.product(range(world_size), repeat=2),
                                every=20000):
        try:
            structs[(x, y)] = STRUCTS[struct_pixels[(x, y)]]
        except KeyError:
            # We are not interested in this structure
            structs[(x, y)] = ""
        # Check if there is a river
        try:
            river = RIVERS[hydro_pixels[(x, y)]]
            if structs[(x, y)] != "" and structs[(x, y)] != "river":
                #print("Overwriting {} with river.".format(structs[(x,y)]))
                pass
            structs[(x, y)] = "river"
        except KeyError:
            pass

    final_tiles = {}
    # Now pass over all structures and see where tiles of the same type
    # neighbour each other
    for (x, y) in progress.dots(itertools.product(range(world_size), repeat=2),
                                every=20000):
        suffixes = ""
        if same_type(structs, (x, y), (0, -1)):
            suffixes += "n"
        if same_type(structs, (x, y), (-1, 0)):
            suffixes += "w"
        if same_type(structs, (x, y), (0, 1)):
            suffixes += "s"
        if same_type(structs, (x, y), (1, 0)):
            suffixes += "e"

        if suffixes:
            tile_type = "{}_{}".format(structs[(x, y)], suffixes)
            try:
                final_tiles[x][y] = tile_type
            except KeyError:
                final_tiles[x] = {y: tile_type}

    result = {"worldsize": world_size, "map": final_tiles}

    build_dir = conf["Paths"]["build"]
    if not os.path.exists(build_dir):
        os.makedirs(build_dir)
    with open("{}/structs.json".format(build_dir), "w") as heightjson:
        heightjson.write(json.dumps(result))
        logging.debug(
            "Dumped structs into {}/biomes.structs".format(build_dir))
def make_groups():
    # load structures json into a map
    with open(os.path.join(conf["Paths"]["build"], "structs.json")) as structjs:
        structures = json.loads(structjs.read())

    # Maps { x -> y -> group_index }
    # Better would be {(x,y) -> group_index} but json cannot use tuples as keys!
    groups = {}

    # Maps { group_index -> struct type}
    group_defs = {}

    next_grp_index = 0

    world_size = structures["worldsize"]

    # These structures should not be grouped (usually because there is no marker for them)
    blacklist = ["river", "meadow", "crops", "orchard", "pasture"]

    # first step is to grow groups on the map
    for (x,y) in progress.dots(itertools.product(range(world_size), repeat=2), every=20000):
        this_type = get_type(structures["map"], x, y)

        # Skip this tile when there's no structure
        if this_type == "" or this_type in blacklist:
            continue

        current_grp = get_grp(groups, x, y)            
        if current_grp == "":
            current_grp = next_grp_index
            group_defs[current_grp] = this_type
            next_grp_index += 1
            groups[x][y] = current_grp

        allow_x = False
        if x < world_size:
            allow_x = True
            right = get_type(structures["map"], x+1, y)
            if right == this_type:
                set_grp(groups, x+1, y, current_grp)

        allow_y = False
        if y < world_size:
            allow_y = True
            below = get_type(structures["map"], x, y+1)
            if below == this_type:
                set_grp(groups, x, y+1, current_grp)

        if allow_x and allow_y:
            right_below = get_type(structures["map"], x+1, y+1)
            if right_below == this_type:
                set_grp(groups, x+1, y+1, current_grp)

        # for each tile, get the type of the one above and to the right
        # to check if we have a new group here. write the grp index
        # on these cells

    # second step is to go over them and merge neighbouring groups
    # of the same type
    for (x,y) in progress.dots(itertools.product(range(world_size), repeat=2), every=20000):
        this_grp = get_grp(groups, x, y)

        # skip non-groups
        if this_grp == "":
            continue

        allow_x = False
        if x < world_size:
            allow_x = True
            right = get_grp(groups, x+1, y)
            if right != "" and right != this_grp and \
                group_defs[this_grp] == group_defs[right]:
                replace_grp(groups, right, this_grp)
                del(group_defs[right])

        allow_y = False
        if y < world_size:
            allow_y = True
            below = get_grp(groups, x, y+1)
            if below != "" and below != this_grp and \
                group_defs[this_grp] == group_defs[below]:
                replace_grp(groups, below, this_grp)
                del(group_defs[below])

        if allow_x and allow_y:
            right_below = get_grp(groups, x+1, y+1)
            if right_below != "" and right_below != this_grp and \
                group_defs[this_grp] == group_defs[right_below]:
                replace_grp(groups, right_below, this_grp)
                del(group_defs[right_below])

    # Remove empty x-coordinates from the map
    groups_final ={k:v for k,v in groups.items() if v}


    result = {"groups": groups_final, "defs": group_defs}
    with open(os.path.join(build_dir, "groups.json"), "w") as groupjs:
        groupjs.write(json.dumps(result))
Exemple #14
0
def main():
    # welcome to the danger zone

    parser = argparse.ArgumentParser(
        # random comment here for no reason ;)
        formatter_class=argparse.RawTextHelpFormatter,
        prog='spry',
        description=
        '++++++++++++++++++++++++++++\n+++ SPRY +++++++++++++++++++\n+++ s0c1@l m3d1a sc@nn3r +++\n++++++++++++++++++++++++++++',
        epilog=
        '''EXAMPLE: \n check instagram \n spry jamesanthonycampbell \n ''')

    parser.add_argument('username',
                        help='specific username, like realdonaldtrump')

    parser.add_argument('-p',
                        '--proxy',
                        help='proxy in the form of 127.0.0.1:8118',
                        nargs=1,
                        dest='setproxy',
                        required=False)

    parser.add_argument(
        '-w',
        '--wait',
        help=
        'max random wait time in seconds, \n5 second default (randomly wait 1-5 seconds)',
        dest='setwait',
        nargs='?',
        const=3,
        type=int,
        default=3)
    parser.add_argument(
        '-u',
        '--user-agent',
        help=
        'override random user-agent\n(by default randomly selects between \n+8500 different user agent strings',
        dest='useragent',
        nargs='?',
        const='u',
        default='u')
    parser.add_argument('--no-report', dest='reporting', action='store_false')
    parser.add_argument('-v',
                        '--verbose-useragent',
                        dest='vu',
                        action='store_true')
    parser.add_argument(
        '--version',
        action='version',
        version='%(prog)s {version}'.format(version='Version: ' + __version__))
    parser.set_defaults(reporting=True, vu=False)
    args = parser.parse_args()
    cprint(welcomer, 'red')
    # args strings
    username = args.username
    setproxy = args.setproxy
    # note, the correct way to check if variable is NoneType
    if setproxy != '' and setproxy is not None:
        proxyoverride = True
        if '9050' in setproxy[0] or '9150' in setproxy[0]:
            usingtor = True
        else:
            usingtor = False
    else:
        proxyoverride = False
    setwait = args.setwait
    reporting = args.reporting
    useragent = args.useragent
    vu = args.vu
    if useragent == 'u':
        overrideuseragent = False
        useragent = random.choice(
            useragents
        )  # if user agent override not set, select random from list
    if vu:
        cprint('\nUseragent set as %s\n' % (useragent, ), 'blue')
    headers = {'User-Agent': useragent}
    i = 0  # counter for how many are 200's
    social_networks_list = [
        'https://twitter.com/', 'https://www.instagram.com/',
        'https://www.linkedin.com/in/', 'https://foursquare.com/',
        'https://www.flickr.com/photos/', 'https://www.facebook.com/',
        'https://www.reddit.com/user/', 'https://new.vk.com/',
        'https://github.com/', 'https://ok.ru/', 'https://www.twitch.tv/',
        'https://venmo.com/', 'http://www.goodreads.com/',
        'http://www.last.fm/user/', 'https://api.spotify.com/v1/users/',
        'https://www.pinterest.com/', 'https://keybase.io/',
        'https://bitbucket.org/', 'https://pinboard.in/u:',
        'https://disqus.com/by/', 'https://badoo.com/profile/',
        'http://steamcommunity.com/id/', 'http://us.viadeo.com/en/profile/',
        'https://www.periscope.tv/', 'https://www.researchgate.net/profile/',
        'https://www.etsy.com/people/', 'https://myspace.com/',
        'http://del.icio.us/', 'https://my.mail.ru/community/',
        'https://www.xing.com/profile/'
    ]
    totalnetworks = len(
        social_networks_list)  # get the total networks to check
    print('\n\n[*] Starting to process list of {} social networks now [*]\n\n'.
          format(totalnetworks))
    for soc in social_networks_list:
        # get domain name
        domainname = urlparse(soc).netloc
        domainnamelist = domainname.split('.')
        for domainer in domainnamelist:
            if len(
                    domainer
            ) > 3 and domainer != 'vk' and domainer != 'ok' and domainer != 'last' and domainer != 'mail':
                realdomain = domainer
            elif domainer == 'vk':
                realdomain = domainer
            elif domainer == 'ok':
                realdomain = domainer + '.ru'
            elif domainer == 'last':
                realdomain = domainer + '.fm'
            elif domainer == 'mail':
                realdomain = domainer + '.ru'
        # get proxy settings if any
        if proxyoverride == True:
            if usingtor:
                socks_proxy = "socks5://" + setproxy[0]
                proxyDict = {"http": socks_proxy}
            else:
                #print(setproxy)
                http_proxy = "http://" + setproxy[0]
                https_proxy = "https://" + setproxy[0]
                proxyDict = {"http": http_proxy, "https": https_proxy}
        sleep(randint(1, setwait))
        sys.stdout.flush()
        # try to load the social network for the respective user name
        # make sure to load proxy if proxy set otherwise don't pass a proxy arg
        if proxyoverride == True:
            r = requests.get(soc + username,
                             stream=True,
                             headers=headers,
                             proxies=proxyDict)
        else:
            r = requests.get(soc + username, stream=True, headers=headers)
        # switch user agents again my friend
        if overrideuseragent == False:
            useragent = random.choice(useragents)
            # if user agent override not set, select random from list
        if vu:  # if verbose output then print the user agent string
            cprint('\nUseragent set as %s\n' % (useragent, ), 'blue')
        if soc == 'https://www.instagram.com/' and r.status_code == 200:
            #print(r.text)
            soup = BeautifulSoup(r.content, 'html.parser')
            aa = soup.find("meta", {"property": "og:image"})
            # test instagram profile image print
            #print (aa['content']) # this is the instagram profile image
            instagram_profile_img = requests.get(
                aa['content'])  # get instagram profile pic
            open('./' + username + '.jpg',
                 'wb').write(instagram_profile_img.content)
            #exit()
        try:
            total_length = int(r.headers.get('content-length'))
        except:
            total_length = 102399
        for chunk in progress.dots(r.iter_content(chunk_size=1024),
                                   label='Loading ' + realdomain):
            sleep(random.random() * 0.2)
            if chunk:
                #sys.stdout.write(str(chunk))
                sys.stdout.flush()
        sys.stdout.flush()
        #print(r.text)
        if r.status_code == 200:
            cprint("user found", 'green')
            i = i + 1
        else:
            cprint("Status code: {} no user found".format(r.status_code),
                   'red')
    print('\n\n[*] Total networks with username found: {} [*]\n'.format(i))
    if reporting:  # if pdf reporting is turned on (default on)
        create_pdf(username)
        cprint(
            'Report saved as {}-report.pdf. \nTo turn off this feature use the --no-report flag.\n'
            .format(username), 'yellow')
Exemple #15
0
#!/usr/bin/env python
# -*- coding: utf-8 -*-

import sys
import os

sys.path.insert(0, os.path.abspath('..'))

from time import sleep
from random import random
from clint.textui import progress


if __name__ == '__main__':
    for i in progress.bar(range(100)):
        sleep(random() * 0.2)

    for i in progress.dots(range(100)):
        sleep(random() * 0.2)
    
    for i in progress.mill(range(100)):
        sleep(random() * 0.2)
def complete_upload(data_config):
    data_endpoint = data_config.data_endpoint
    data_id = data_config.data_id
    tarball_path = data_config.tarball_path

    if not data_id:
        floyd_logger.error("Corrupted upload state, please start a new one.")
        sys.exit(1)

    # check for tarball upload, upload to server if not done
    if not data_config.resource_id and (tarball_path and data_endpoint):
        floyd_logger.debug("Getting fresh upload credentials")
        creds = DataClient().new_tus_credentials(data_id)
        if not creds:
            sys.exit(1)

        file_size = os.path.getsize(tarball_path)
        # check for upload limit dimension
        if file_size > MAX_UPLOAD_SIZE:
            try:
                floyd_logger.info("Removing compressed data...")
                rmtree(os.path.dirname(tarball_path))
            except (OSError, TypeError):
                pass

            sys.exit(("Data size too large to upload, please keep it under %s.\n") %
                     (sizeof_fmt(MAX_UPLOAD_SIZE)))

        floyd_logger.info("Uploading compressed data. Total upload size: %s",
                          sizeof_fmt(file_size))
        tus_client = TusDataClient()
        if not tus_client.resume_upload(tarball_path, data_endpoint, auth=creds):
            floyd_logger.error("Failed to finish upload!")
            return

        try:
            floyd_logger.info("Removing compressed data...")
            rmtree(os.path.dirname(tarball_path))
        except (OSError, TypeError):
            pass

        floyd_logger.debug("Created data with id : %s", data_id)
        floyd_logger.info("Upload finished.")

        # Update data config
        data_config.set_tarball_path(None)
        data_config.set_data_endpoint(None)
        data_source = DataClient().get(data_id)
        data_config.set_resource_id(data_source.resource_id)
        DataConfigManager.set_config(data_config)

    # data tarball uploaded, check for server untar
    if data_config.resource_id:
        floyd_logger.info(
            "Waiting for server to unpack data.\n"
            "You can exit at any time and come back to check the status with:\n"
            "\tfloyd data upload -r")
        try:
            for i in dots(ResourceWaitIter(data_config.resource_id),
                          label='Waiting for unpack...'):
                pass
        except WaitTimeoutException:
            clint_STREAM.write('\n')
            clint_STREAM.flush()
            floyd_logger.info(
                "Looks like it is going to take longer for Floydhub to unpack "
                "your data. Please check back later.")
            sys.exit(1)
        else:
            data_config.set_resource_id(None)
            data_config.set_tarball_path(None)
            data_config.set_data_endpoint(None)
            data_config.set_resource_id(None)
            data_config.set_data_id(None)
            DataConfigManager.set_config(data_config)

    # Print output
    table_output = [["NAME"],
                    [normalize_data_name(data_config.data_name)]]
    floyd_logger.info('')
    floyd_logger.info(tabulate(table_output, headers="firstrow"))
Exemple #17
0
def load():

    STRUCTS = {
        (128,128,128): "castle",
        (255,255,255): "village",
        (255,128,0)  : "crops",
        (255,160,0)  : "crops",
        (255,192,0)  : "crops",
        (0,255,0)    : "pasture", 
        (64,255,0)   : "meadow", 
        (0,160,0)    : "orchard", 
        (20,20,20)   : "tunnel", 
        (224,224,224): "stone_bridge",  
        (180,167,20) : "other_bridge",
        (192,192,192): "stone_road",
        (150,127,20) : "other_road",
        (96,96,96)   : "stone_wall",
        (160,127,20) : "other_wall", 
        (0,96,255)   : "lake",
# The following are not really 'structures'
#        (255,255,192): "mountain",
#        (128,64,32)  : "land",# remove this? 
#        (0,64,255)   : "ocean",# remove this?
#
    }

    RIVERS = {
        (0,224,255)  : "river",
        (0,255,255)  : "river",
        (0,112,255)  : "river",
    }

    structs = {}

    struct_image = Image.open(filefinder.struct_map())
    struct_pixels = struct_image.load()
    world_size = struct_image.size[0]
    del(struct_image)
    hydro_image = Image.open(filefinder.hydro_map())
    hydro_pixels = hydro_image.load()
    del(hydro_image)
    for (x,y) in progress.dots(itertools.product(range(world_size), repeat=2), every=20000):
        try:
            structs[(x,y)] = STRUCTS[struct_pixels[(x,y)]]
        except KeyError:
            # We are not interested in this structure
            structs[(x,y)] = ""
        # Check if there is a river
        try:
            river = RIVERS[hydro_pixels[(x,y)]]
            if structs[(x,y)] != "" and structs[(x,y)] != "river":
                #print("Overwriting {} with river.".format(structs[(x,y)]))
                pass
            structs[(x,y)] = "river"
        except KeyError:
            pass

    final_tiles = {}
    # Now pass over all structures and see where tiles of the same type
    # neighbour each other
    for (x,y) in progress.dots(itertools.product(range(world_size), repeat=2), every=20000):
        suffixes = ""
        if same_type(structs, (x,y), (0,-1)):
            suffixes += "n"
        if same_type(structs, (x,y), (-1,0)):
            suffixes += "w"
        if same_type(structs, (x,y), (0,1)):
            suffixes += "s"
        if same_type(structs, (x,y), (1,0)):
            suffixes += "e"

        if suffixes:
            tile_type = "{}_{}".format(structs[(x,y)], suffixes)
            try:
                final_tiles[x][y] = tile_type
            except KeyError:
                final_tiles[x] = {y : tile_type}

    result = {"worldsize": world_size,
              "map": final_tiles}

    build_dir = conf["Paths"]["build"]
    if not os.path.exists(build_dir):
        os.makedirs(build_dir)
    with open("{}/structs.json".format(build_dir), "w") as heightjson:
        heightjson.write(json.dumps(result))
        logging.debug("Dumped structs into {}/biomes.structs".format(build_dir))