Esempio n. 1
0
def getTextSizeFromImage(image):
    width, height = image.size
    colors = image.getcolors(width * height)
    background_color = 255
    if colors:
        colors.sort()
        background_color = colors[-1][1]
    text_sizes = []
    for i in xrange(1, height):
        blank_line = True
        for j in range(0, width, 3):
            color = image.getpixel((j, i - 1))
            if colorsContrast(color, background_color):
                blank_line = False
                break
        if blank_line:
            if text_sizes and text_sizes[-1]:
                text_sizes.append(0)
        else:
            if text_sizes and text_sizes[-1]:
                text_sizes[-1] += 1
            else:
                text_sizes.append(1)
    text_sizes.sort()
    text_sizes = [i for i in text_sizes if i != 0]
    text_size = 0
    if text_sizes:
        text_sizes_avg = sum(text_sizes) / len(text_sizes)
        for i in text_sizes:
            if i > text_sizes_avg:
                text_size = math.floor(i)
                break
        text_size = max(text_sizes)
    debug('Text Size: ', text_size)
    return text_size
Esempio n. 2
0
def convertMultiImage(image_path, temp_dir):
    converted_paths = []
    if imghdr.what(image_path) != 'tiff':
        return [image_path]

    debug('Checking for multiple images in TIFF')
    i = 0
    base_name = os.path.basename(image_path)
    name, extension = os.path.splitext(base_name)
    image = Image.open(image_path)
    try:
        while True:
            image.seek(i)
            file_name = os.path.join(temp_dir, name + ' #' + str(i + 1) + \
                                     extension)
            image_name = getNonExistingFileName(file_name)
            image.save(image_name, format='TIFF')
            debug('Saved %s' % image_name)
            converted_paths.append(image_name)
            i += 1
    except EOFError:
        # No more images in the file
        pass

    return converted_paths
Esempio n. 3
0
def manual_provision_vms(vs_config,
                         groupname,
                         groupdef,
                         clustername,
                         score,
                         client,
                         sl_storage,
                         configuration,
                         containername):
    configs = []
    count = int(groupdef['count'])
    for i in range(count):
        vscopy = vs_config.copy()
        vscopy['hostname'] = vscopy['hostname']+'-'+str(i)
        configs.append(vscopy)
    lib.debug(json.dumps(configs, indent=4, sort_keys=True))

    vs_manager = SoftLayer.VSManager(client)

    vms = lib.sl_retry(vs_manager.create_instances, configs)

    for vm in vms:
        lib.save_state(sl_storage, containername,
                       "serverinstances/{}/vms/{}/id".format(
                            groupname, vm['hostname']),
                       vm['id'])

    for vm in vms:
        lib.sl_retry(vs_manager.wait_for_ready, vm['id'], 600)

    groupdef['vms'] = []
    for vm in vms:
        groupdef['vms'].append(vs_manager.get_instance(vm['id']))
Esempio n. 4
0
def get_code(url, size=(640, 480), title="Google authentication"):
    if WEBKIT_BACKEND:
        lib.debug("Using webkit backend: " + WEBKIT_BACKEND)
        with lib.default_sigint():
            return backend.get_code(url, size=size, title=title)
    else:
        raise NotImplementedError("GUI auth requires pywebkitgtk or qtwebkit")
Esempio n. 5
0
def add_video_to_playlist(youtube, video_id, title, privacy="public"):
    """Add video to playlist (by title) and return the full response."""
    playlist_id = get_playlist(youtube, title) or \
        create_playlist(youtube, title, privacy)
    if playlist_id:
        return add_video_to_existing_playlist(youtube, playlist_id, video_id)
    else:
        debug("Error adding video to playlist")
    def get(self):
        debug("MyRequestHandler get")
        if self.request.params.items().__len__() == 0:
            self.write(self.script, self.html)
            return

        json_rpc = JsonRpc(self)
        json_rpc.write()
        return
Esempio n. 7
0
 def run(self):
     if self.canceled:
         return
     results = error = None
     try:
         results = self.target_method(*self.target_method_args)
     except Exception, exception:
         debug(str(exception))
         error = exception
Esempio n. 8
0
def do_create(args, client, sl_storage, configuration):

    if args['-v']:
        DebugLevel.set_level('verbose')
    else:
        DebugLevel.set_level('progress')

    containername = args['<clustername>']
    if args['<clustername>'] in clusters(sl_storage):
        error('cluster {} already exists'.format(args['<clustername>']))

    scoretext = open(args['<score.yaml>'], 'r').read()
    score = yaml.load(scoretext)

    score['clustername'] = args['<clustername>']
    dirname = os.path.dirname(args['<score.yaml>'])
    if dirname == "":
        dirname = "."
    score['path'] = dirname+"/"

    # setup environment for scripts in score to run properly. Change to
    #  the score directory and add . to the path
    os.chdir(score['path'])
    os.environ['PATH'] = ':'.join([os.environ['PATH'], './'])

    if 'parameters' in score:
        parmvalues = score['parameters']
    else:
        parmvalues = {}

    parameters = args['<key=value>']
    for param in parameters:
        splits = param.split('=', 1)
        if len(splits) != 2:
            raise Exception("{} is not a key=value pair".format(param))
        parmvalues[splits[0]] = splits[1]
    score['parameters'] = parmvalues
    scoretext = yaml.dump(score, indent=4)

    msg = validate_provision_parms_passed(scoretext, parmvalues)
    debug(msg)
    if msg:
        error(msg)

    state_container_create(sl_storage, containername)
    try:
        # save score for later operations
        save_state(sl_storage, containername, 'score', scoretext)
        provision(args['<clustername>'], containername, score,
                  configuration, client, sl_storage)
    except Exception, e:
        debug(traceback.format_exc())
        resources = get_resources(sl_storage, containername)
        del resources['score']
        if deletable(resources):
            state_container_clean(sl_storage, containername)
        error(e.message)
Esempio n. 9
0
def get_templated_string(templatestring, score):
    lib.debug(templatestring)
    regex = re.compile(r'{{(\\\n  \\)*\s*([a-zA-Z0-9-_\.]+)(\\\n\s*\\)*\s*}}',
                       re.MULTILINE)
    templatestring = re.sub(regex, subfunc, templatestring)
    lib.debug(templatestring)
    env = Environment(autoescape=False,
                      loader=DictLoader({'templatestring': templatestring}),
                      trim_blocks=False)
    return env.get_template('templatestring').render(score)
Esempio n. 10
0
    def stop(self):
        # Check if there is a running application
        if self.proc == None or self.proc.poll() != None:
            warn("No process running for %s" % self.prog)
            self.proc = None
            return

        # Terminate the application
        debug("Stopping %s %s" % (self.prog, self.app_args))
        self.proc.terminate()
        self.proc = None
Esempio n. 11
0
def create_playlist(youtube, title, privacy):
    """Create a playlist by title and return its ID"""
    debug("Creating playlist: {0}".format(title))
    response = youtube.playlists().insert(part="snippet,status", body={
        "snippet": {
            "title": title,
        },
        "status": {
            "privacyStatus": privacy,
        }
    }).execute()
    return response.get("id")
Esempio n. 12
0
def add_video_to_existing_playlist(youtube, playlist_id, video_id):
    """Add video to playlist (by identifier) and return the playlist ID."""
    debug("Adding video to playlist: {0}".format(playlist_id))
    return youtube.playlistItems().insert(part="snippet", body={
        "snippet": {
            "playlistId": playlist_id,
            "resourceId": {
                "kind": "youtube#video",
                "videoId": video_id,
            }
        }
    }).execute()
Esempio n. 13
0
def resolveDatacenter(client, dc):
    dc = lib.findInList(client['Location'].getDatacenters(mask="groups"),
                        'name', dc)
    lib.debug(json.dumps(dc))
    dc['locationGroupIDs'] = [(i['id']) for i in dc['groups'] if
                              i['locationGroupType']['name'] == 'PRICING']
    regionalGroups = [(i['id']) for i in dc['groups'] if
                      i['locationGroupType']['name'] == 'REGIONAL']
    if len(regionalGroups) != 0:
        dc['regionalGroupID'] = regionalGroups[0]
    del dc['groups']
    return dc
Esempio n. 14
0
def print_output(score):
    # check if no outputs requested
    if 'output' not in score:
        return

    output = get_templated_string(score['output']['template'], score)

    lib.debug(output)
    if 'result' in score['output']:
        with open(score['output']['result'], 'w') as f:
            f.write(output)
    else:
        print(output)
Esempio n. 15
0
 def run(self):
     while not self.stopped:
         if self.queue.empty():
             self.stop()
             break
         try:
             self.async_item = self.queue.get()
             self.item_number += 1
             self.async_item.run()
             self.queue.task_done()
             self.async_item = None
         except Exception, exception:
             debug(str(exception))
             self.stop()
Esempio n. 16
0
def validate_provision_parms_passed(scoretext, parmvalues):
    regex = re.compile(r'{{\s*parameters\.([a-zA-Z0-9-_]+)', re.MULTILINE)
    parmnames = re.findall(regex, scoretext)
    lib.debug(parmnames)
    lib.debug(parmvalues)
    missingparms = []
    for parm in parmnames:
        if parm not in parmvalues:
            missingparms.append(parm)

    if len(missingparms):
        return "missing inputs: "+str(missingparms)

    return None
Esempio n. 17
0
    def handle_msg(self, msg):
        debug(msg)

        # Determine the message nature
        # TODO: Create message constants
        if msg == "restart":
            self.restart()
        elif msg == "stop":
            self.stop()
        elif msg == "start":
            self.start()
        elif msg == "ping":
            self.ping()
        else:
            warn("Unknown command: %s" % msg)
Esempio n. 18
0
def run():
    # Create and read all the settings
    settings = parse_settings(sys.argv[1:])
    debug(settings)

    colour_print(BLUE_COLOUR, "[pymon] Pymon %s" % settings["version"])
    colour_print(BLUE_COLOUR, "[pymon] Watching %s" % settings["path"])

    # Create an event handler for file system changes
    event_handler = PymonEventHandler(
            settings["regexes"], 
            settings["ignores"], 
            True, False)

    # Create a file system observer
    observer = Observer()
    observer.schedule(
            event_handler, 
            settings["path"], 
            recursive=True)
    observer.start()

    # Create and register a listener for the user's application
    listener = PymonListener(
            settings["prog"], 
            settings["app_args"])
    transport.add_listener(listener)
    listener.start()

    try:
        # Loop for user input
        while True:
            user_input = raw_input("")

            if user_input == "rs" or user_input == "restart":
                transport.emit("restart")
            elif user_input == "stop":
                raise Exception

    except (KeyboardInterrupt, Exception) as e:
        # Terminate all applications and exit
        transport.emit("stop")
        observer.stop()
        observer.join()
Esempio n. 19
0
 def popup(self, node_id: str, ref_id: str):
     """ ! This method only deletes ONE ref relation in _ref_by_dict. """
     # remove one ref_by in ref_by_dict.
     ref_dict = self._ref_by_dict.get(node_id)
     # only if there's one item in that dict,
     # the ref_id can be set to None.
     # if ref_id is None, then del its whole dict.
     if ref_id is None:
         ref_id, ref_item = ref_dict.popitem()
     # else remove that ref item in dict.
     else:
         ref_item = ref_dict.get(ref_id)
         ref_dict.pop(ref_id)
     del ref_item.ref_to
     debug(f'[DEL REF] at node: {node_id} arg: {ref_id}')
     # check dict empty.
     if len(ref_dict) == 0:
         self._ref_by_dict.pop(node_id)
         self._ref_by_vn_dict.pop(node_id)
Esempio n. 20
0
    def after_first_redo(self):
        # delete the io & ref relation here.
        edge_type = self.edge.gr_edge.type
        dst_gr_node_id = self.edge.end_item.gr_node.id_str
        src_gr_node_id = self.edge.start_item.gr_node.id_str
        dst_model = self.args.get(dst_gr_node_id)
        src_model = self.args.get(src_gr_node_id)

        if edge_type == EDGE_DIRECT:
            if self.edge.end_item.gr_name == 'Model':
                dst_model.io_semaphore.popup(src_gr_node_id)
            else:
                pass
        elif edge_type == EDGE_CURVES:
            dst_item = dst_model.item(self.edge.ref_box, 1)
            src_model.rb_semaphore.popup(dst_gr_node_id, dst_item.id_str)

        self.gr_scene.removeItem(self.edge.gr_edge)
        self.src.pop(self.edge.id)
        debug(f"*[EDGE {self.src} DEL] < {self.edge}")
Esempio n. 21
0
def waitForOrderCompletion(orderid, client):
    ''' returns id of billingitem for the finished order '''
    boclient = client['Billing_Order']
    mask = "orderTopLevelItems.billingItem.provisionTransaction"
    while True:
        order = boclient.getObject(id=orderid, mask=mask)
        lib.debug(order)
        # if transaction finished, break
        pt = 'provisionTransaction'
        otli = 'orderTopLevelItems'
        bi = 'billingItem'
        ts = 'transactionStatus'
        if otli in order and bi in order[otli][0]:
            if pt in order[otli][0][bi]:
                if order[otli][0][bi][pt][ts]["name"] == 'COMPLETE':
                    break
        # else retry in 30s
        lib.debug("sleeping 30s")
        time.sleep(30)

    return order['orderTopLevelItems'][0]['billingItem']['id']
Esempio n. 22
0
def fetchPropDataGenerator(props_df):
    """
    A generator that returns (yields) all valid properties row by row.
    """
    for index, row in props_df.iterrows():
        try:
            # There are some properties missing property codes :/
            code = row['propertyCode']
            # Missing data
            if not code:
                code = ''

            code = code.strip()
            if code != '':
                yield fetchPropData(row)
            else:
                debug('Missing property code:', row )
        except IOError:
            # Some properties return 404 :)
            debug('Not found: ', code)
            pass
def get_seasonal_anomalities():
    # Since it takes quite a while to generate this; it's only smart to cache it once done
    try:
        df = pd.read_json(open_cached(CACHE_SEASONAL))
        if config.DEBUG:
            debug('Returning cached seasonal data from',
                  cached_name(CACHE_SEASONAL))
        return df
    except FileNotFoundError:
        # Ok, need to do the gathering
        pass

    # Text file containing file names for the temperature prognosis images
    # the format is 'SeasonalAnomalies_T2m_<YYYYMMDD>_m<N>.png' where Y, M, D are for the date and M is the month number
    # 1 = for the prognosis date month, e.g. for 20200901 N=1 and N7 is for march next year
    f = open_url(f'{SEASONAL_BASE_URL}/T2m_index.txt')
    lines = f.read().split('\n')
    lines.remove('')  # Last entry
    anomalities = dict()  # date -> list

    def image_data(fn):
        m = RE_IMAGE.match(fn)
        if m:
            day = datetime.datetime.strptime(m.group('date'), '%Y%m%d')
            n = int(m.group('n')) - 1  # indexing starts from 1 in file :/
            if not day in anomalities:
                anomalities[day] = [np.NaN for _ in range(7)]
            return (day, n, fn)

    # Run in parallel
    pool = mp.Pool()
    temps = pool.map(process_image, [image_data(fn) for fn in lines])
    # Remap
    for (d, n, t) in temps:
        anomalities[d][n] = t
    df = pd.DataFrame([(k, ) + tuple(v) for (k, v) in anomalities.items()],
                      columns=['date'] + [f'month{n}' for n in range(7)])
    df.set_index('date', inplace=True)
    df.to_json(open_cached(CACHE_SEASONAL, mode='w'))
    return df
Esempio n. 24
0
    def create_genesis_block(self):
        txin = []
        prev = OutPoint(b"", 0xFFFFFFFF)
        script = CompressInt(486604799).tobytes() + CompressInt(4).tobytes(
        ) + VarStr(
            b"The Times 03/Jan/2009 Chancellor on brink of second bailout for banks"
        ).tobytes()
        txin.append(TxIn(prev, VarStr(script), 0xFFFFFFFF))

        txout = []
        script = VarStr(
            lib.hexstr2bytes(
                "04678afdb0fe5548271967f1a67130b7105cd6a828e03909a67962e0ea1f61deb649f6bc3f4cef38c4f35504e51ec112de5c384df7ba0b8d578a4c702b6bf11d5f"
            )).tobytes() + struct.pack("<B", 0xAC)
        txout.append(TxOut(50 * config.coin, VarStr(script)))

        txs = []
        txs.append(Tx(1, 0, txin, txout, [], 0))

        merkle = lib.merkle_root(txs)
        if merkle != lib.hexstr2bytes(
                "4a5e1e4baab89f3a32518a88c31bc87f618f76673e2cc77ab2127b7afdeda33b"
        )[::-1]:
            lib.err("create genesis block failed: merkle root err {}",
                    merkle.hex())
            return False

        header = BlockHeader(1, b"", merkle, 1231006505, 0x1d00ffff,
                             2083236893)
        key = lib.double_hash(header.tobytes())
        if key != lib.hexstr2bytes(
                "000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f"
        )[::-1]:
            lib.err("create genesis block failed: hash err {}", key.hex())
            return False

        b = Block(header, txs)
        self.db.add(key, b.tobytes())
        lib.debug("genesis block created")
        return True
Esempio n. 25
0
def dns_update(score, client, sl_storage, containername, configuration):

    dns_m = SoftLayer.DNSManager(client)
    zones = dns_m.list_zones()
    score['resources']['dns'] = normalize(score['resources']['dns'], score)
    lib.debug(score['resources']['dns'])
    for domain, zonedef in score['resources']['dns'].iteritems():
        zone = lib.findInList(zones, 'name', domain)
        if not zone:
            raise Exception("no zone found for {}".format(domain))
        for entry in zonedef:
            group = entry.split(".")[1]
            lib.debug("reg vms in group {} to dns zone {}".format(group,
                                                                  domain))
            for vm in score['resources']['serverinstances'][group]['vms']:

                if not vm['domain'].endswith(domain):
                    sys.stderr.write("{}.{} not in zone {}\n".format(
                            vm['hostname'], vm['domain'], domain))
                    break

                # strip out root domain to register with DNS as a host
                #   record w/in the root domain

                record = "{}.{}".format(vm['hostname'], vm['domain'])
                record = record[:-(len(domain)+1)]
                lib.debug(dns_m.create_record(zone_id=zone['id'],
                                              record=record,
                                              record_type='A',
                                              data=vm['primaryIpAddress'],
                                              ttl=900))
                lib.save_state(sl_storage, containername,
                               "dns/{}/{}".format(domain, record),
                               vm['primaryIpAddress'])
Esempio n. 26
0
    def after_first_redo(self):
        edge_type = self.edge.gr_edge.type
        dst_gr_node_id = self.edge.end_item.gr_node.id_str
        src_gr_node_id = self.edge.start_item.gr_node.id_str
        dst_model = self.args.get(dst_gr_node_id)
        src_model = self.args.get(src_gr_node_id)

        if edge_type == EDGE_DIRECT:
            if self.edge.end_item.gr_name == 'Model':
                dst_model.io = (src_gr_node_id, self.edge.io_type,
                                src_model.var_name_item)
            else:
                pass
        elif edge_type == EDGE_CURVES:
            dst_item = dst_model.item(self.edge.ref_box, 1)
            dst_item.ref_to = (src_gr_node_id, src_model.var_name_item)
            src_model.ref_by = (dst_gr_node_id, dst_item,
                                dst_model.var_name_item)

        self.gr_scene.addItem(self.edge.gr_edge)
        self.src[self.edge.id] = self.edge
        debug(f"*[EDGE {self.src} ADD] < {self.edge}")
Esempio n. 27
0
def order_groups(score):
    roots = []
    branches = {}
    for groupname, grpdef in score['resources']['serverinstances'].iteritems():
        if 'dependson' not in grpdef:
            roots.append(groupname)
        else:
            branchroot = grpdef['dependson']
            if branchroot in branches:
                branch = branches[branchroot]
            else:
                branch = []
                branches[branchroot] = branch
            branch.append(groupname)

    orderedlist = []
    for root in roots:
        orderedlist = orderedlist + get_nodes_till_leaf(branches, root)

    lib.debug(orderedlist)

    return orderedlist
Esempio n. 28
0
    def undo(self):
        # reconnect the io & ref relation here.
        edge_type = self.edge.gr_edge.type
        dst_gr_node_id = self.edge.end_item.gr_node.id_str
        src_gr_node_id = self.edge.start_item.gr_node.id_str
        dst_model = self.args.get(dst_gr_node_id)
        src_model = self.args.get(src_gr_node_id)

        if edge_type == EDGE_DIRECT:
            if self.edge.end_item.gr_name == 'Model':
                dst_model.io = (src_gr_node_id, self.edge.io_type,
                                src_model.var_name_item)
            else:
                pass
        elif edge_type == EDGE_CURVES:
            dst_item = dst_model.item(self.edge.ref_box, 1)
            dst_item.ref_to = (src_gr_node_id, src_model.var_name_item)
            src_model.ref_by = (dst_gr_node_id, dst_item,
                                dst_model.var_name_item)

        self.gr_scene.addItem(self.edge.gr_edge)
        self.src[self.edge.id] = self.edge
        debug(f"*[EDGE {self.src} DEL] > {self.edge}")
Esempio n. 29
0
def get_monthly_energy_data(buildingCode, reporting_group, start_time,
                            end_time):
    """
  Reporting groups: 'Electricity', 'Heat', 'Water', 'DistrictCooling'
  throws: HTTPError if resource does not exist
  """

    if not reporting_group in VALID_REPORTING_GROUPS:
        raise ValueError(
            f'reporting_group should be one of {" ".join(VALID_REPORTING_GROUPS)}'
        )
    resource = 'https://helsinki-openapi.nuuka.cloud/api/v1.0/EnergyData/Monthly/ListByProperty?Record=LocationName'

    params = urlparse.urlencode({
        'Record': 'BuildingCode',
        'SearchString': buildingCode,
        'ReportingGroup': reporting_group,
        'StartTime': start_time,
        'EndTime': end_time
    })
    search_url = f'{ENERGY_RESOURCE}?{params}'
    debug(f'Requesting {search_url}')
    ret = pd.read_json(open_url(search_url))
    return ret
Esempio n. 30
0
    def reset_goal(self, record):
        """
        Reset a user's goal, inserting the current values into the history table and updating their current record.
        @param record:
        @return:
        """
        # Add the current values to a new record in the history table.
        self.__db.insert('user_goals_history', {
            'user': record['user'],
            'type': record['type'],
            'date': self.get_previous_goal_date(record['type']),
            'goal': record['goal'],
            'result': record['current'],
            'completed': record['completed']
        })

        # Calculate the next reset time for this goal.
        next = self.calculate_user_reset_time(record['type'])

        # Print out to the bot log what is happening.
        lib.debug('Setting next ' + record['type'] + ' goal reset time for ' + str(record['user']) + ' to: ' + str(next))

        # Update the goal record with the new reset time, resetting the completed and current values to 0.
        self.__db.update('user_goals', {'completed': 0, 'current': 0, 'reset': next}, {'id': record['id']})
def main():

    # delete user if it exists.
    for user in cm.identity.users.list():
        if user.name == Q1.username:
            debug("Deleting user: {0}".format(user))
            user.delete()

    # delete project if it exists.
    for project in cm.identity.projects.list():
        if project.name == Q1.project:
            debug("Deleting project: {0}".format(project))
            project.delete()

    # delete role if it exists.
    for role in cm.identity.roles.list():
        if role.name == Q1.role:
            debug("Deleting role: {0}".format(role))
            role.delete()
Esempio n. 32
0
 def edge_drag_end(self, item, event):
     debug(f"[stop dragging edge] => {self.drag_edge} at {item}")
     new_edge = KMBEdge(self.gr_scene.scene, self.drag_start_item.node,
                        item.node, self.edge_type)
     # remove the dragging dash edge.
     self.drag_edge.remove()
     self.drag_edge = None
     # saving for the new edge.
     saving_state = new_edge.store()
     # -1 (Invalid), 1 (Valid and display)
     if saving_state == -1:  # fail to add new edge.
         self.gr_scene.removeItem(new_edge.gr_edge)
         debug("[dropped] invalid connection.")
     else:  # add new edge successfully.
         debug(f"[connect] {self.drag_start_item} ~ {item} => {new_edge}")
         # only ref edge is able to pop up right menu of the end item,
         # so now you're able to pick up which arg it ref to.
         if self.edge_type == EDGE_CURVES:
             self._curve_edge_drag_end(event, new_edge)
         # for Model, show its input and output in right menu.
         if self.edge_type == EDGE_DIRECT:
             self._direct_edge_drag_end(event, item, new_edge)
def get_seasonal_temp_anomaly_from(fn):
    """
    Fetch the given filename from the repository and figure out the seasonal anomality temperature
    for Helsinki.
    """
    with open_url(f'{SEASONAL_BASE_URL}/{fn}', 'rb') as f:
        img = mpimg.imread(f)
        if img.shape != (4810, 6260, 4):
            raise Exception('Image shape has changed, must re-calibrate')

        tempmap = get_temp_map(img)
        rgb = to_rgb(img[HELSINKI_COORD[1], HELSINKI_COORD[0]])
        if rgb in tempmap:
            return tempmap[rgb]
        if not rgb in tempmap:
            # Maps can have a gray border around differing temperature zones, fuzz around it before giving up
            for fuzz_x in range(
                    1, 21
            ):  # Borders are about size 10; so this should be enough (+-20)
                for fuzz_y in range(0, 21):
                    # Search in all directions
                    for mult in [(1, 1), (1, -1), (-1, 1), (-1, -1)]:
                        rgb = to_rgb(
                            img[HELSINKI_COORD[1] + (mult[0] * fuzz_x),
                                HELSINKI_COORD[0] + (mult[1] * fuzz_y)])
                        if rgb in tempmap:
                            return tempmap[rgb]
                        elif rgb in TEMP_MAP:
                            return TEMP_MAP[rgb]

        debug('Temperature not found in tempmap?')
        debug('Color map:', TEMP_MAP)
        if config.DEBUG:
            dbgname = 'debug_anomality_image.png'
            debug(f'Saving a copy of the problematic image as "{dbgname}"')
            mpimg.imsave(dbgname, img)

        raise Exception(f'Color {rgb} not found in mapping?')
    raise Exception('Fail')
Esempio n. 34
0
 def set_select_mode(self):
     self.mode = MOUSE_SELECT
     self.setDragMode(QGraphicsView.RubberBandDrag)
     self.setCursor(Qt.ArrowCursor)
     debug("Now is <select> mode")
Esempio n. 35
0
 def create_edge(self, edge):
     self.dump_edge(edge)
     self.push(
         CreateEdgeCmd(edge, self._edges, self.args_menu.edit_model,
                       self.gr_scene))
     debug(f"*[EDGE {len(self._edges)}] + {edge}")
Esempio n. 36
0
def deploy_group(groupname, groupdef, clustername, score, client, sl_storage,
                 configuration, containername):
    vs_config = groupdef.copy()
    del vs_config['count']
    del vs_config['servertype']

    if 'vlan' in vs_config:
        del vs_config['vlan']

    if 'dependson' in vs_config:
        del vs_config['dependson']

    if 'keyname' in vs_config:
        sshkey_manager = SoftLayer.SshKeyManager(client)
        keys = sshkey_manager.list_keys(vs_config['keyname'])
        if len(keys) == 0:
            raise Exception("Key {} not found".format(vs_config['keyname']))
        vs_config['ssh_keys'] = [keys[0]['id']]
        del vs_config['keyname']

    vs_config['datacenter'] = score['datacenter']['name']

    vs_config.update(normalize(
            score['mappings']['servertypes'][groupdef['servertype']],
            score))

    if 'script' in vs_config or 'userdata' in vs_config:
        vs_config['post_uri'] = "https://gist.githubusercontent.com/suppandi/"\
                "92160b055d74662a1deb/raw/"\
                "5770507d3bb0a3f05acd9a5bb2b03fe65bb00c5d/script.sh"

        newuserdata = ""
        if 'userdata' in vs_config:
            newuserdata = vs_config['userdata']

        if 'script' in vs_config:
            regex = re.compile("^(http|https)://.*")
            result = regex.match(vs_config['script'])
            # if a url, just use it
            if result:
                vs_config['post_uri'] = vs_config['script']
            else:
                scripttext = open(vs_config['script'], "r").read()
                lib.save_state_script(sl_storage, containername,
                                      vs_config['script'],
                                      scripttext)
                newuserdata = newuserdata + \
                    "\nSCRIPTSTARTSCRIPTSTARTSCRIPTSTART\n" + scripttext

            del vs_config['script']

        vs_config['userdata'] = newuserdata

    if 'vlan' in groupdef:
        for vlanname in groupdef['vlan']:
            vlan = score['resources']['vlans'][vlanname]
            if vlan['type'] == 'public':
                vs_config['public_vlan'] = vlan['id']
            else:
                vs_config['private_vlan'] = vlan['id']

    if 'autoscale' in groupdef:
        autoscale_provision_vms(vs_config, groupname, groupdef, clustername,
                                score, client, sl_storage, configuration,
                                containername)
    else:
        manual_provision_vms(vs_config, groupname, groupdef, clustername,
                             score, client, sl_storage, configuration,
                             containername)
    lib.debug(groupdef)
Esempio n. 37
0
def findPriceIdsForDatacenter(prices, datacenter):
    lib.debug(prices)
    lib.debug(datacenter)
    return lib.findInList(prices, 'locationGroupId',
                          datacenter['locationGroupIDs'])
Esempio n. 38
0
 def create_node(self, node):
     self.dump_node(node)
     self.counter.update(node.gr_name)
     self.push(CreateNodeCmd(node, self._nodes, self.gr_scene))
     debug(f"*[NODE {len(self._nodes)}] + {node}")
Esempio n. 39
0
 def set_note_mode(self):
     self.mode = MOUSE_NOTE
     self.has_finished_editing = False
     self.setCursor(Qt.IBeamCursor)
     debug("Now is <note> mode")
Esempio n. 40
0
 def set_edge_direct_mode(self):
     self.mode = NODE_CONNECT
     self.edge_type = EDGE_DIRECT
     self.setCursor(Qt.CrossCursor)
     debug("Now is <connect-direct> mode")
Esempio n. 41
0
# config
debug = 0 # 1 = output logs. 0 = do not output logs. 

# import things
import sys, lib, os
lib.debug(debug)

# take in input
category = sys.argv[1]
content = sys.argv[2]

if len(category) == 0:
	print "Category missing. Usage: \npython categorize.py [category] [content]"
	sys.exit()
elif len(content) == 0:
	print "Content missing. Usage: \npython categorize.py [category] [content]"
	sys.exit()

# get the files for the type of categorization they want
categoriesRaw = {}
for file in [f for f in os.listdir("categories/" + category) if (os.path.isfile("categories/" + category + "/" + f) and f[-4:] == ".txt" and f != "stopwords.txt")]:
	categoriesRaw[file[0:-4]] = open("categories/" + category + "/" + file).read()

# build out categories as bag of words
categories = lib.prepareCategories(categoriesRaw)

# apply stop words
f = open("categories/" + category + "/stopwords.txt", "r")
if f:
	stopwords = set(lib.tokenize(f.read()))
	for key in categories:
def main():

    # Create project.
    try:
        project = cm.identity.projects.create(name=Q1.project, domain='Default')
    except keystoneclient.exceptions.Conflict as e:
        project = None
        if e.response.status_code == 409:
            for _project in cm.identity.projects.list():
                if _project.name == Q1.project:
                    project = _project
                    break
        else:
            raise
    finally:
        if project is None:
            raise Exception('Unable to find/create project.')
        else:
            debug("Found/created project: {0}".format(project))


    # Create user.
    try:
        user = cm.identity.users.create(name=Q1.username,
                                        email=Q1.email,
                                        project=project, domain='Default')
    except keystoneclient.exceptions.Conflict as e:
        user = None
        if e.response.status_code == 409:
            for _user in cm.identity.users.list():
                if _user.name == Q1.username and \
                    _user.email == Q1.email:
                    user = _user
                    break
        else:
            raise
    finally:
        if user is None:
            raise Exception('Unable to find/create user.')
        else:
            debug("Found/created user: {0}".format(user))

    # Create role.
    try:
        role = cm.identity.roles.create(name=Q1.role)
    except keystoneclient.exceptions.Conflict as e:
        role = None
        if e.response.status_code == 409:
            for _role in cm.identity.roles.list():
                if _role.name == Q1.role:
                    role = _role
                    break
        else:
            raise
    finally:
        if role is None:
            raise Exception('Unable to find/create role.')

    # Add user and project to role.
    role_grant = cm.identity.roles.grant(role=role, user=user, project=project)
    for ra in cm.identity.role_assignments.list(role=role):
        try:
            if ra.user['id'] == user.id and \
                ra.scope['project']['id'] == project.id:
                debug("Role granted access to project: {0}".format(ra))
                break
        except (KeyError, AttributeError):
            pass
Esempio n. 43
0
 def undo(self):
     self.gr_scene.addItem(self.node.gr_node)
     self.src[self.node.id] = self.node
     debug(f"*[NODE {len(self.src)} DEL] > {self.node}")
Esempio n. 44
0
 def after_first_redo(self):
     self.gr_scene.removeItem(self.node.gr_node)
     self.src.pop(self.node.id)
     debug(f"*[NODE {len(self.src)} DEL] < {self.node}")
Esempio n. 45
0
 def undo(self):
     self.gr_scene.removeItem(self.node.gr_node)
     self.src.pop(self.node.id)
     debug(f"*[NODE {len(self.src)} ADD] > {self.node}")
Esempio n. 46
0
 def after_first_redo(self):
     self.gr_scene.addItem(self.node.gr_node)
     self.src[self.node.id] = self.node
     debug(f"*[NODE {len(self.src)} ADD] < {self.node}")
Esempio n. 47
0
 def undo(self):
     self.gr_scene.addItem(self.note)
     self.src[self.note.id] = self.note
     debug(f"*[NOTE {len(self.src)} DEL] > {self.note}")
Esempio n. 48
0
 def set_movable_mode(self):
     self.mode = MOUSE_MOVE
     self.setDragMode(QGraphicsView.ScrollHandDrag)
     debug("Now is <move> mode")
Esempio n. 49
0
 def set_delete_mode(self):
     self.mode = NODE_DELETE
     del_icon = QPixmap(icon['TRASH']).scaled(32, 32)
     self.setCursor(QCursor(del_icon))
     debug("Now is <delete> mode")
Esempio n. 50
0
def provision_loadbalancers(score, client, sl_storage, containername,
                            configuration):

    if 'loadbalancers' not in score['resources']:
        return

    lbmgr = SoftLayer.LoadBalancerManager(client)
    all_pkgs = lbmgr.get_lb_pkgs()
    # lib.debug([ (i['capacity']) for i in all_pkgs ])

    for lbname, lbconfig in score['resources']['loadbalancers'].iteritems():
        lbconfig = normalize(lbconfig, score)
        score['resources']['loadbalancers'][lbname] = lbconfig
        # first find lb packages with given connection support
        lbs_available = lib.findInList(all_pkgs,
                                       'capacity',
                                       str(lbconfig['connections']))
        if lbs_available is None:
            msg = 'no loadbalancer option found with capacity {}'
            raise Exception(msg.format(lbconfig['connections']))

        # if only one option available use it...
        #  otherwise do some more filtering
        if isinstance(lbs_available, list):
            # find the requested ssl support
            if 'ssl-offload' in lbconfig and lbconfig['ssl-offload']:
                lbs_available = lib.findInList(
                                        lbs_available, 'keyName', 'SSL',
                                        (lambda field, val: val in field))
            else:
                lbs_available = lib.findInList(
                                        lbs_available, 'keyName', 'SSL',
                                        (lambda field, v: v not in field))

            # lib.debug(lbs_available)

        # build a list to walk through
        if not isinstance(lbs_available, list):
            lbs_available = [lbs_available]

        # find prices for the current datacenter
        priceitems = []
        for lbitem in lbs_available:
            lib.debug(lbitem)
            priceitems.append(findPriceIdsForDatacenter(lbitem['prices'],
                                                        score['datacenter']))

        # sort the priceitems and pick the inexpensive one
        priceitems = sorted(priceitems, key=lambda p: float(p['recurringFee']))

        lib.debug(json.dumps(priceitems, indent=4))
        # do the create now
        lib.debug(priceitems[0])
        lib.debug(priceitems[0]['id'])
        order = lbmgr.add_local_lb(priceitems[0]['id'],
                                   score['datacenter']['name'])
        lib.debug(order)
        # wait for some time for order to be fulfilled
        billingItem = waitForOrderCompletion(order['orderId'], client)
        lib.debug(billingItem)
        # now list all load balancers
        all_lbs = client['Account'].getAdcLoadBalancers(mask='billingItem')
        provisioned_lb = lib.findInList(all_lbs, 'billingItem', billingItem,
                                        (lambda field,
                                            val: field['id'] == val))
        lib.debug(provisioned_lb)
        lib.save_state(sl_storage, containername,
                       "loadbalancers/{}/id".format(lbname),
                       provisioned_lb['id'])
        lbconfig['id'] = provisioned_lb['id']
        objtype = 'Network_Application_Delivery_Controller_LoadBalancer_'\
                  'Routing_Type'
        routing_types = client[objtype].getAllObjects()
        objtype = 'Network_Application_Delivery_Controller_LoadBalancer_'\
                  'Routing_Method'
        routing_methods = client[objtype].getAllObjects()
        for groupname, groupconfig in lbconfig['service-groups'].iteritems():
            lib.debug(groupconfig)
            routingtype = lib.findInList(routing_types, 'name',
                                         groupconfig['type'].upper())
            lib.debug(routingtype)
            routingmethod = lib.findInList(routing_methods, 'keyname',
                                           groupconfig['method'].upper())
            lib.debug(routingmethod)
            lib.debug(lbmgr.add_service_group(provisioned_lb['id'],
                                              groupconfig['allocation%'],
                                              groupconfig['port'],
                                              routingtype['id'],
                                              routingmethod['id']))
            # refresh lb info
            objtype = 'Network_Application_Delivery_Controller_LoadBalancer'\
                      '_VirtualIpAddress'
            lb = client[objtype].getObject(id=provisioned_lb['id'],
                                           mask="virtualServers.serviceGroups")
            groupconfig['id'] = lib.findInList(lb['virtualServers'], 'port',
                                               groupconfig['port'])['id']
Esempio n. 51
0
 def set_edge_curve_mode(self):
     self.mode = NODE_CONNECT
     self.edge_type = EDGE_CURVES
     self.setCursor(Qt.CrossCursor)
     debug("Now is <connect-curve> mode")
Esempio n. 52
0
def normalize(aDict, score):
    str_form = yaml.dump(aDict, default_flow_style=False)
    lib.debug(str_form)
    return yaml.load(get_templated_string(str_form, score))
Esempio n. 53
0
 def create_note(self, note):
     self.dump_note(note)
     self.push(CreateNoteCmd(note, self._notes, self.gr_scene))
     debug(f"*[NOTE {len(self._notes)}] + {note}")
Esempio n. 54
0
 def on_any_event(self, event):
     # Send restart message whenever file system changes
     debug("Change detected")
     transport.emit("restart")
Esempio n. 55
0
 def remove_note(self, note):
     self._notes.pop(note.id)
     self.push(DeleteNoteCmd(note, self._notes, self.gr_scene))
     debug(f"*[NOTE {len(self._notes)}] - {note}")

def fromRfcFormat(rfc_format):
    if rfc_format is None:
        dt = dateutil.parser.parse(_ANCIENT)
        assert isinstance(dt, datetime.datetime)
        assert dt.tzinfo is not None
        return dt
    else:
        assert isinstance(rfc_format, str)
        dt = dateutil.parser.parse(rfc_format)
        assert isinstance(dt, datetime.datetime)
        assert dt.tzinfo is not None
        return dt


def toRfcFormat(dt):
    if dt is None:
        return _ANCIENT
    assert isinstance(dt, datetime.datetime)
    assert dt.tzinfo is not None
    return dt.strftime("%a, %d %b %Y %H:%M:%S %Z")


if __name__ == "__main__":
    before = getNow()
    rfc_format = toRfcFormat(before)
    after = fromRfcFormat(rfc_format)
    assert before == after
    lib.debug("test finished")