def prepare_articles(names): '''saves tagged articles about given entities in a cache''' for f in glob.glob(join(raw_articles_path, "*.txt*")): os.remove(f) found = False link_dictionaries = {} for i, name in enumerate(names): try: get_article(name) except ArticleNotFoundError: try: article, link_dictionary = get_raw_article(name) link_dictionaries[i] = link_dictionary except ArticleNotFoundError: continue found = True article = '\n'.join(article.split('\n')[:article_sentence_limit]) out = copen(join(raw_articles_path, '%d.txt' % i), 'w', 'utf-8') print >> out, article if found: articles = lt.run_nlptools(link_dictionaries) for f in glob.glob(join(raw_articles_path, "*.txt*")): os.remove(f) #save processed articles for i, article in articles.iteritems(): Pickler.store(article, articles_cache_path % names[i])
def prepare_articles(names): '''saves tagged articles about given entities in a cache''' for f in glob.glob(join(raw_articles_path, "*.txt*")): os.remove(f) found = False link_dictionaries = {} for i, name in enumerate(names): try: get_article(name) except ArticleNotFoundError: try: article, link_dictionary = get_raw_article(name) link_dictionaries[i] = link_dictionary except ArticleNotFoundError: continue found = True article = '\n'.join(article.split('\n')[: article_sentence_limit]) out = copen(join(raw_articles_path, '%d.txt' % i), 'w', 'utf-8') print >>out, article if found: articles = lt.run_nlptools(link_dictionaries) for f in glob.glob(join(raw_articles_path, "*.txt*")): os.remove(f) #save processed articles for i, article in articles.iteritems(): Pickler.store(article, articles_cache_path % names[i])
def __init__(self, addon_name, params): """ :param str addon_name: The name of the add-on :param str params: The parameteters used to start the ParameterParser """ Logger.debug("Parsing parameters from: %s", params) # Url Keywords self.keywordPickle = "pickle" # : Used for the pickle item self.keywordAction = "action" # : Used for specifying the action self.keywordChannel = "channel" # : Used for the channel self.keywordChannelCode = "channelcode" # : Used for the channelcode self.keywordCategory = "category" # : Used for the category self.keywordRandomLive = "rnd" # : Used for randomizing live items self.keywordSettingId = "settingid" # : Used for setting an encrypted setting self.keywordSettingActionId = "settingactionid" # : Used for passing the actionid for the encryption self.keywordSettingName = "settingname" # : Used for setting an encrypted settings display name self.keywordSettingTabFocus = "tabfocus" # : Used for setting the tabcontrol to focus after changing a setting self.keywordSettingSettingFocus = "settingfocus" # : Used for setting the setting control to focus after changing a setting self.keywordLanguage = "lang" # : Used for the 2 char language information self.keywordProxy = "proxy" # : Used so set the proxy index self.keywordLocalIP = "localip" # : Used to set the local ip index # Url Actions self.actionFavourites = "favourites" # : Used to show favorites for a channel self.actionAllFavourites = "allfavourites" # : Used to show all favorites self.actionRemoveFavourite = "removefromfavourites" # : Used to remove items from favorites self.actionAddFavourite = "addtofavourites" # : Used to add items to favorites self.actionDownloadVideo = "downloadVideo" # : Used to download a video item self.actionPlayVideo = "playvideo" # : Used to play a video item self.actionUpdateChannels = "updatechannels" # : Used to update channels self.actionListFolder = "listfolder" # : Used to list a folder self.actionListCategory = "listcategory" # : Used to show the channels from a category self.actionConfigureChannel = "configurechannel" # : Used to configure a channel self.actionSetEncryptionPin = "changepin" # : Used for setting an application pin self.actionSetEncryptedValue = "encryptsetting" # : Used for setting an application pin self.actionResetVault = "resetvault" # : Used for resetting the vault self.actionPostLog = "postlog" # : Used for sending log files to pastebin.com self.actionProxy = "setproxy" # : Used for setting a proxy self.propertyRetrospect = "Retrospect" self.propertyRetrospectChannel = "RetrospectChannel" self.propertyRetrospectChannelSetting = "RetrospectChannelSettings" self.propertyRetrospectFolder = "RetrospectFolder" self.propertyRetrospectVideo = "RetrospectVideo" self.propertyRetrospectCloaked = "RetrospectCloaked" self.propertyRetrospectCategory = "RetrospectCategory" self.propertyRetrospectFavorite = "RetrospectFavorite" self.propertyRetrospectAdaptive = "RetrospectAdaptive" # determine the query parameters self.params = self.__get_parameters(params) self.pluginName = addon_name # We need a picker for this instance self._pickler = Pickler()
def savelog(self): '''(BTool)-> List of String returns the current list of backups on local machine, raises an exception if no saves exist ''' p = Pickler("savelog") try: return p.load() except Exception, err: raise
def chksave(self): '''BEditor -> int checks for saved user data, returns 1 if data exists 0 otherwise ''' pickle = Pickler(self._pickle_save) try: #if this succeeds then a save file exists pickle.load() return 1 except: return 0
def __init__(self, path): """ Initializes a Favourites class that can be use to show, add and delete favourites. :param str path: The path to store the favourites file """ self.__filePattern = "%s-%s.xotfav" self.__pickler = Pickler() self.FavouriteFolder = path
def __init__(self, predicate, sentence_limit=None, confidence_level=0.7): self.predicate = predicate self.predicate_words = map(lambda w: w.lower(), split_camelcase(unquote(predicate))) self.confidence_level = confidence_level self.sentence_limit = sentence_limit self.train() if save_to_cache: # pickle can't save a function, so it's removed before saving self.classifier.set_params(v__analyzer=None) Pickler.store(self, models_cache_path % ("svmmodel-%s.pkl" % predicate)) self.classifier.set_params(v__analyzer=lambda x: x)
def restore_library(self): #untested ''' restore entire library to its state before changes were made this process can be slow ''' #get data from pickle @lib_backup picklesave = 'lib_backup' pickle = Pickler(picklesave) data = pickle.load() #update each item for item in data: self._zot.update_item(item)
def make_pickle(self): pickle = Pickler() try: assert len(self._dict_of_everything.keys()) > 0 pickle.serialise(self._dict_of_everything) with open('result.json', 'w', encoding='utf8') as json_file: json.dump(self._dict_of_everything, json_file) except FileNotFoundError as e: print(e) except AssertionError: print( 'Dictionary is empty, try loading then extracting data first')
def __init__(self, predicate, sentence_limit=None, confidence_level=.7): self.predicate = predicate self.predicate_words = map(lambda w: w.lower(), split_camelcase(unquote(predicate))) self.confidence_level = confidence_level self.sentence_limit = sentence_limit self.train() if save_to_cache: #pickle can't save a function, so it's removed before saving self.classifier.set_params(v__analyzer=None) Pickler.store(self, models_cache_path % ('svmmodel-%s.pkl' % predicate)) self.classifier.set_params(v__analyzer=lambda x: x)
def dumps(value, **kwargs): """Returns a JSON formatted representation of value, a Python object. Optionally takes a keyword argument unpicklable. If set to False, the output does not contain the information necessary to >>> dumps('my string') '"my string"' >>> dumps(36) '36' """ j = Pickler(unpicklable=__isunpicklable(kwargs)) return json.dumps(j.flatten(value))
def collect_entities(): try: return Pickler.load(entities_path) except IOError: pass entities = defaultdict(list) for i, type in enumerate(entities_types): entities_of_type = select_entities_of_type(full_type_name(type)) for entity in entities_of_type: entities[entity].append(i) if "_(" in entity: entities[entity.split("_(")[0]].append(i) Pickler.store(entities, entities_path) return entities
def collect_entities(): try: return Pickler.load(entities_path) except IOError: pass entities = defaultdict(list) for i, type in enumerate(entities_types): entities_of_type = select_entities_of_type(full_type_name(type)) for entity in entities_of_type: entities[entity].append(i) if '_(' in entity: entities[entity.split('_(')[0]].append(i) Pickler.store(entities, entities_path) return entities
def encode(value, **kwargs): """Returns a JSON formatted representation of value, a Python object. Optionally takes a keyword argument unpicklable. If set to False, the output does not contain the information necessary to turn the json back into Python. >>> encode('my string') '"my string"' >>> encode(36) '36' """ j = Pickler(unpicklable=__isunpicklable(kwargs)) return json.encode(j.flatten(value), False) #, False) # JL - don't convert to ASCII.
def OnActionFromContextMenu(self, action): """Peforms the action from a custom contextmenu Arguments: action : String - The name of the method to call """ Logger.Debug("Performing Custom Contextmenu command: %s", action) item = Pickler.DePickleMediaItem(self.params[self.keywordPickle]) if not item.complete: Logger.Debug("The contextmenu action requires a completed item. Updating %s", item) item = self.channelObject.ProcessVideoItem(item) if not item.complete: Logger.Warning("UpdateVideoItem returned an item that had item.complete = False:\n%s", item) # invoke functionString = "returnItem = self.channelObject.%s(item)" % (action,) Logger.Debug("Calling '%s'", functionString) try: exec functionString except: Logger.Error("OnActionFromContextMenu :: Cannot execute '%s'.", functionString, exc_info=True) return
def Add(self, channel, item, actionUrl): """ Adds a favourite for a specific channel. @param channel: The channel @param item: The mediaitem @param actionUrl: The mediaitem's actionUrl Returns nothing """ Logger.Debug("Adding item %s\nfor channel %s\n%s", item, channel, actionUrl) fileName = self.__filePattern % (channel.guid, item.guid) filePath = os.path.join(self.FavouriteFolder, fileName) pickle = Pickler.PickleMediaItem(item) # Just double check for folder existence if not os.path.isdir(self.FavouriteFolder): os.makedirs(self.FavouriteFolder) # replacing to pickle in the actionUrl to save space actionUrl = actionUrl.replace(pickle, "%s") fileHandle = None try: fileHandle = open(filePath, mode='w') fileHandle.write("%s\n%s\n%s\n%s" % (channel.channelName, item.name, actionUrl, pickle)) fileHandle.close() except: Logger.Error("Error saving favourite", exc_info=True) if fileHandle and not fileHandle.closed: fileHandle.close() raise return
def get_sentence_classifier(predicate, sentence_limit=None): try: ret = Pickler.load(models_cache_path % ("svmmodel-%s.pkl" % predicate)) ret.classifier.set_params(v__analyzer=lambda x: x) return ret except IOError: return SentenceClassifier(predicate, sentence_limit)
def get_sentence_classifier(predicate, sentence_limit=None): try: ret = Pickler.load(models_cache_path % ('svmmodel-%s.pkl' % predicate)) ret.classifier.set_params(v__analyzer=lambda x: x) return ret except IOError: return SentenceClassifier(predicate, sentence_limit)
def backup(self): '''(BTool) -> NoneType saves the entire library to pickle ''' #initialize connection zot = zotero.Zotero(self._userData['user_id'], self._userData['user_type'], self._userData['api_key']) #get the data to backup and name the save file #library_id + current (24 hour) time to allow for easy backing up data = zot.items() version = self._userData["user_id"] + str(data[-1]["version"]) p = Pickler(version) #save the data and update savelog p.save(data) self.update_savelog("add", version)
def RemoveFavourite(self): """Removes an item from the favourites""" # remove the item item = Pickler.DePickleMediaItem(self.params[self.keywordPickle]) Logger.Debug("Removing favourite: %s", item) f = Favourites(Config.favouriteDir) f.Remove(self.channelObject, item) # refresh the list self.ShowFavourites(self.channelObject, replaceExisting=True) pass
def get_candidates(predicate): try: return Pickler.load(candidates_cache_path % predicate) except IOError: pass types = CandidatesSelector.get_most_specific_types( CandidatesSelector.get_predominant_types(predicate)) if types: candidates = select_entities_of_type_not_in_relation( types[0], predicate) if predicate == 'gmina': candidates = filter(lambda e: 'Gmina' not in e, candidates) if predicate == 'powiat': candidates = filter(lambda e: 'Powiat' not in e, candidates) if predicate == 'hrabstwo': candidates = filter( lambda e: 'hrabstwo_miejskie' not in e and 'Hrabstwo' not in e, candidates) Pickler.store(candidates, candidates_cache_path % predicate) return candidates else: return []
def get_candidates(predicate): try: return Pickler.load(candidates_cache_path % predicate) except IOError: pass types = CandidatesSelector.get_most_specific_types( CandidatesSelector.get_predominant_types(predicate) ) if types: candidates = select_entities_of_type_not_in_relation( types[0], predicate ) if predicate == 'gmina': candidates = filter(lambda e: 'Gmina' not in e, candidates) if predicate == 'powiat': candidates = filter(lambda e: 'Powiat' not in e, candidates) if predicate == 'hrabstwo': candidates = filter(lambda e: 'hrabstwo_miejskie' not in e and 'Hrabstwo' not in e, candidates) Pickler.store(candidates, candidates_cache_path % predicate) return candidates else: return []
def restore(self, version): '''(BTool, str)-> NoneType ''' #backup current library #self.backup() #delete all library data zot = zotero.Zotero(self._userData['user_id'], self._userData['user_type'], self._userData['api_key']) items = zot.items() for item in items: zot.delete_item(item) #add old library data p = Pickler(version) items = p.load() for item in items: item["data"]["version"] = 0 item["key"] = '' r = zot.create_items([item])
def test_unpickle_dict(self): expected = { 1: { "1ID": "A23", "Gender": "Male", "Age": 22, "Sales": 2445, "BMI": "normal", "salary": 20, "Birthday": "24/06/1995" }, 2: { "IhD": "A2f3", "Gender": "Male", "Age": 23, "Sales": 2565, "BMI": "normal", "salary": 20, "Birthday": "24/06/1995" }, 3: { "IjD": "Aa23", "Gender": "Female", "Age": 25, "Sales": 25, "BMI": "normal", "salary": 20, "Birthday": "24/06/1995" }, 4: { "IgD": "A23", "Gender": "Female", "Age": 26, "Sales": 225, "BMI": "normal", "salary": 20, "Birthday": "24/06/1995" } } pickle = Pickler.pickle_dictionary_values(expected) self.local.insert_dictionary(pickle) result = Unpickler.unpickle_dictionary(self.local.get_db()) self.assertEqual(expected, result)
def AddFavourite(self): """Adds an item to the favourites""" # remove the item item = Pickler.DePickleMediaItem(self.params[self.keywordPickle]) # no need for dates in the favourites # item.ClearDate() Logger.Debug("Adding favourite: %s", item) f = Favourites(Config.favouriteDir) if item.IsPlayable(): action = self.actionPlayVideo else: action = self.actionListFolder # add the favourite f.Add(self.channelObject, item, self.__CreateActionUrl(self.channelObject, action, item)) # we are finished, so just return return self.ShowFavourites(self.channelObject)
def __CreateActionUrl(self, channel, action, item=None, category=None): """Creates an URL that includes an action Arguments: channel : Channel - The channel object to use for the URL action : string - Action to create an url for Keyword Arguments: item : MediaItem - The media item to add """ if action is None: raise Exception("action is required") params = dict() if channel: params[self.keywordChannel] = channel.moduleName if channel.channelCode: params[self.keywordChannelCode] = channel.channelCode params[self.keywordAction] = action # it might have an item or not if item is not None: params[self.keywordPickle] = Pickler.PickleMediaItem(item) if action == self.actionPlayVideo and item.isLive: params[self.keywordRandomLive] = random.randint(10000, 99999) if category: params[self.keywordCategory] = category url = "%s?" % (self.pluginName, ) for k in params.keys(): url = "%s%s=%s&" % (url, k, params[k]) url = url.strip('&') # Logger.Trace("Created url: '%s'", url) return url
def update_savelog(self, mode, item): '''(BTool, string, string)-> NoneType updates the savelog, adds item if mode = 'add' and removes item if mode = 'remove'. *** should only be used my backup() and restore() methods in this class to ensure save corruption doesn't occur. *** ''' p = Pickler("savelog") try: #add/remove item from the list of items data = p.load() if mode == "add": data.append(item) else: data.remove(item) p.save(data) except: #in this case there is no savelog, so make one containing item data = [] data.append(item) p.save(data)
def get_article(name): try: return Pickler.load(articles_cache_path % name)[:article_sentence_limit] except IOError: raise ArticleNotFoundError(name)
def dump(value, file, **kwargs): """Saves a JSON formatted representation of value into file. """ j = Pickler(unpicklable=__isunpicklable(kwargs)) json.dump(j.flatten(value), file)
def charmony_run(color_matching_method, clothing_image_path, closet_image_path): # Import the packages we'll need import numpy as np from PIL import Image from skimage.segmentation import slic from cv2 import COLOR_BGR2RGB, imread, cvtColor from sklearn.neighbors import KNeighborsClassifier from pickler import Pickler from color_correction import fix_color from color_wheel_rotator import rotate_colors from mode_function import get_mode from resize_image import resize_image ################################################################ ### IMPORTING USER DEFINED VALUES AND PREPARING LOOKUP DICTS ### ################################################################ # unpickling the model color_detector_pickled = open('color_detector.pkl', 'r') color_detector = Pickler.load_pickle(color_detector_pickled) color_detector_pickled.close() # define the Red Green Blue profiles centers for each possible color color_dict = { 'red': (255, 0, 0), 'yellow': (255, 255, 0), 'green': (0, 255, 0), 'cyan': (0, 255, 255), 'blue': (0, 0, 255), 'magenta': (255, 0, 255) } # import user defined "How to match the color": 'complement' (180deg) or # 'triad' (120deg) how_to_match_colors = color_matching_method # define what degree rotation corresponds to each method of matching colors match_rotation_dict = {'complement': 180, 'triad': 120} # import the image path of the clothing you'd like to match with image_to_match_to_path = clothing_image_path # import the image path of the mix of clothing to match to image_of_possible_matches_path = closet_image_path ################################### ### LOADING AND CLEANING IMAGES ### ################################### # load image from image_to_match_to_path and convert to RBG from BGR image_to_match_to = imread(image_to_match_to_path) image_to_match_to = cvtColor(image_to_match_to, COLOR_BGR2RGB) # Reduce the image size by defining a width, and correct the color balance # by defining the low and high precentile value image_to_match_to = resize_image(image_to_match_to, new_basewidth=600) image_to_match_to = fix_color(image_to_match_to, percentile_correction=10) # load image from image_of_possible_matches_path and # convert to RBG from BGR image_of_possible_matches = imread(image_of_possible_matches_path) image_of_possible_matches = cvtColor(image_of_possible_matches, COLOR_BGR2RGB) # Reduce the image size by defining a width, and correct the color balance # by defining the low and high precentile value image_of_possible_matches = resize_image(image_of_possible_matches, new_basewidth=600) image_of_possible_matches = fix_color(image_of_possible_matches, percentile_correction=10) ########################################################## ### DETNERMINING THE COLOR OF THE CLOTHING TO MATCH TO ### ########################################################## # Determine the pixel boundaries of image_to_match_to max_dim1 = image_to_match_to.shape[0] max_dim2 = image_to_match_to.shape[1] max_dim1_percent = max_dim1 / 100.0 max_dim2_percent = max_dim2 / 100.0 # Use the aformentioned boundaries, and select 400 random points from # the center of the image dim1_coords = (np.random.choice( np.arange(max_dim1_percent * 40, max_dim1_percent * 60, 1), 400)).astype(int) dim2_coords = (np.random.choice( np.arange(max_dim2_percent * 40, max_dim2_percent * 60, 1), 400)).astype(int) # Itterate through each rrandom point and predict the color coords_colors = [] for coord in np.arange(len(dim1_coords)): coord_color = color_detector.predict( np.array(image_to_match_to[dim1_coords[coord], dim2_coords[coord], :]).reshape(1, -1)) coords_colors.append(coord_color) coords_colors = np.vstack(coords_colors).flatten() # find the most frequent color among all the random points, and assign that # to image_to_match_to_color most_frequent_color = get_mode(coords_colors) image_to_match_to_color = most_frequent_color ################################################### ### DETERMINING THE APPROPRIATE MATCHING COLOR ### ### Note: only supports matching to one color ### ################################################### # If image_to_match_to_color is a neutral color, return all possible # non-neutral colors if image_to_match_to_color in ['black', 'brown', 'grey', 'white']: matching_colors = ['red', 'yellow', 'green', 'cyan', 'blue', 'magenta'] # else, use a function to rotate the color wheel, based on the current # Red Green Blue profile (looked up in color_dict) and the degree rotaiton # from how_to_match_colors (looked up in match_rotation_dict) else: color_rbg_profile = color_dict[image_to_match_to_color] matching_rotation = match_rotation_dict[how_to_match_colors] matching_color_list = rotate_colors(color_rbg_profile, matching_rotation) # ensure the matching_color_list outputs (lists formatted as # Red Green Blue profiles) are all unique unique_matching_color_values = list() for sublist in matching_color_list: if sublist not in unique_matching_color_values: unique_matching_color_values.append(sublist) # convert tuples in Red Green Blue color format back to color # names, and store the color names in matching_colors matching_colors = [ color for color in color_dict if color_dict[color] in tuple( tuple(rbg) for rbg in unique_matching_color_values) ] ################################################################# ### SEGMENT THE IMAGE OF MULTIPLE GARMENTS IN A CLOSET IMAGE ### ################################################################# ## Use Simple Linear Itterative Clustering to segment the closet image segmented_possible_matches = slic(image_of_possible_matches, n_segments=350, compactness=10, sigma=1) # Get the unique segment labels unique_segments = np.unique(segmented_possible_matches) # randomly select 1/5 of the pixel data within each segment to summarize # color detection over. First, create a list to hold each segments' data unique_segments_summary = [] for segment in unique_segments: # get the pixle by pixel Red Green Blue data for a segment segment_pixels_data = image_of_possible_matches[ segmented_possible_matches == segment] # randomly choose 1/5 of the pixel data for that segment pixel_count = len(segment_pixels_data) pixel_subset = np.random.choice(np.arange(pixel_count), (pixel_count / 5), replace=False) segment_pixels_data_subset = [ segment_pixels_data[pixel] for pixel in pixel_subset ] # place the subset data into unique_segments_summary unique_segments_summary.append(segment_pixels_data_subset) ################################################################# ### DETERMINE THE COLOR FOR EACH SEGMENT IN THE CLOSET IMAGE ### ################################################################# # Using the subset of pixel data for each segment, itterate over each # segment and predict its color. Keep track of each segment's color in # the list unique_segments_colors unique_segments_colors = [] for segment_data in unique_segments_summary: # if the segment data has only one dimension then... if len(np.array(segment_data).shape) == 1: # if there is no data in it, just return no data if np.array(segment_data).size == 0: segment_data # if there is data in it, reshape the array and detect the color else: colors_seen = color_detector.predict( np.array(segment_data).reshape(1, -1)) # if the data is greater than 1 dimension, detect the color as usual else: colors_seen = color_detector.predict(segment_data) # find the most fre3quent color detected within the segment, and record # the color in unique_segments_colors most_frequent_color = get_mode(colors_seen) unique_segments_colors.append(most_frequent_color) ######################################################################## ### FIND THE SEGMENTS IN THE CLOSET THAT MATCH THE CLOTHING IN COLOR ### ######################################################################## # create a boolean to identify which segemnts are color matches which_segments_to_highlight = np.reshape( [(val in matching_colors) for val in unique_segments_colors], np.array(unique_segments_colors).shape) # create a list of the matching segments, that will eventually be used # to highlight the correct portions of the image unique_segments_to_highlight = unique_segments[which_segments_to_highlight] ########################################################### ### HIGHLIGHT THE MATCHING SEGMENTS IN THE CLOSET IMAGE ### ########################################################### # Create a blank mask the same size as the image_of_possible_matches, where # each pixel is black (red = 0, green = 0, blue = 0) highlighting_mask = np.zeros(image_of_possible_matches.shape, dtype=np.uint8) # for each y and x pixel coordinate in the image, if the corresponding # segment is going to be highlighted, change the corresponding mask pixel # color to white (red = 255, green = 255, blue = 255) for y in np.arange(segmented_possible_matches.shape[0]): for x in np.arange(segmented_possible_matches.shape[1]): if segmented_possible_matches[y, x] in unique_segments_to_highlight: highlighting_mask[y, x, 0] = 255 highlighting_mask[y, x, 1] = 255 highlighting_mask[y, x, 2] = 255 # ensure that image_of_possible_matches is a 'uint8' data type image_of_possible_matches = Image.fromarray( np.uint8(image_of_possible_matches)) # turn the mask into a 'uint8' data type highlighting_mask = Image.fromarray(np.uint8(highlighting_mask)) # convert image_of_possible_matches and the mask to red, green, blue, alpha # images (RBGA), so that we can control the alpha levels to highlight with orignal_image_alphas = image_of_possible_matches.convert("RGBA") mask_alpha_levels = highlighting_mask.convert("RGBA") # extract the RBGA data from the RBGA mask mask_alpha_levels_data = mask_alpha_levels.getdata() # for each pixel in the mask... mask_alpha_levels_newData = [] for pixel in mask_alpha_levels_data: # if the RBG profile is white (i.e., should be highlighted), then turn # the mask's alpha level to 0 (clear) in that place if pixel[0] == 255 and pixel[1] == 255 and pixel[2] == 255: mask_alpha_levels_newData.append((255, 255, 255, 0)) # if the RBG profile is black (i.e., should not be highlighted), then # turn the mask's alpha level to 255 (opaque/shaded) in that place else: mask_alpha_levels_newData.append((0, 0, 0, 255)) # update the data in the RBGA mask object with the new alpha values mask_alpha_levels.putdata(mask_alpha_levels_newData) # apply the mask alpha values to the original alpha values orignal_image_alphas.paste(mask_alpha_levels, mask=mask_alpha_levels) # convert the alpha data into an RBGA image orignal_image_alphas = orignal_image_alphas.convert("RGBA") # blend the alpha mask image with the original image_of_possible_matches # with an interpolation alpha factor of 85% image_of_highlighted_matches = Image.blend( image_of_possible_matches.convert("RGBA"), orignal_image_alphas, alpha=0.85) # convert the image back to an RBG format image_of_highlighted_matches = image_of_highlighted_matches.convert("RGB") ########################################################################### ### RETURN THE COLOR MATCHED TO, MATCHING COLORS, AND HIGHLIGHTED IMAGE ### ########################################################################### return image_to_match_to_color, matching_colors, image_of_highlighted_matches
def get_article(name): try: return Pickler.load(articles_cache_path % name)[: article_sentence_limit] except IOError: raise ArticleNotFoundError(name)
def ProcessFolderList(self): """Wraps the channel.ProcessFolderList""" Logger.Info("Plugin::ProcessFolderList Doing ProcessFolderList") try: ok = True selectedItem = None if self.keywordPickle in self.params: selectedItem = Pickler.DePickleMediaItem(self.params[self.keywordPickle]) watcher = stopwatch.StopWatch("Plugin ProcessFolderList", Logger.Instance()) episodeItems = self.channelObject.ProcessFolderList(selectedItem) watcher.Lap("Class ProcessFolderList finished") if len(episodeItems) == 0: Logger.Warning("ProcessFolderList returned %s items", len(episodeItems)) ok = self.__ShowEmptyInformation(episodeItems) else: Logger.Debug("ProcessFolderList returned %s items", len(episodeItems)) xbmcItems = [] for episodeItem in episodeItems: if episodeItem.thumb == "": episodeItem.thumb = self.channelObject.noImage if episodeItem.fanart == "": episodeItem.fanart = self.channelObject.fanart if episodeItem.type == 'folder' or episodeItem.type == 'append' or episodeItem.type == "page": action = self.actionListFolder folder = True elif episodeItem.IsPlayable(): action = self.actionPlayVideo folder = False else: Logger.Critical("Plugin::ProcessFolderList: Cannot determine what to add") continue # Get the XBMC item item = episodeItem.GetXBMCItem() # Get the context menu items contextMenuItems = self.__GetContextMenuItems(self.channelObject, item=episodeItem) item.addContextMenuItems(contextMenuItems) # Get the action URL url = self.__CreateActionUrl(self.channelObject, action=action, item=episodeItem) # Add them to the list of XBMC items xbmcItems.append((url, item, folder)) watcher.Lap("Kodi Items generated") # add items but if OK was False, keep it like that ok = ok and xbmcplugin.addDirectoryItems(self.handle, xbmcItems, len(xbmcItems)) watcher.Lap("items send to Kodi") if selectedItem is None: # mainlist item register channel. Statistics.RegisterChannelOpen(self.channelObject, Initializer.StartTime) watcher.Lap("Statistics send") watcher.Stop() self.__AddSortMethodToHandle(self.handle, episodeItems) # set the content xbmcplugin.setContent(handle=self.handle, content=self.contentType) xbmcplugin.endOfDirectory(self.handle, ok) except: Statistics.RegisterError(self.channelObject) XbmcWrapper.ShowNotification(LanguageHelper.GetLocalizedString(LanguageHelper.ErrorId), LanguageHelper.GetLocalizedString(LanguageHelper.ErrorList), XbmcWrapper.Error, 4000) Logger.Error("Plugin::Error Processing FolderList", exc_info=True) xbmcplugin.endOfDirectory(self.handle, False)
def setUp(self): self.pickler = Pickler()
def PlayVideoItem(self): """Starts the videoitem using a playlist. """ Logger.Debug("Playing videoitem using PlayListMethod") item = None try: item = Pickler.DePickleMediaItem(self.params[self.keywordPickle]) if item.isDrmProtected and AddonSettings.ShowDrmWarning(): Logger.Debug("Showing DRM Warning message") title = LanguageHelper.GetLocalizedString(LanguageHelper.DrmTitle) message = LanguageHelper.GetLocalizedString(LanguageHelper.DrmText) XbmcWrapper.ShowDialog(title, message) elif item.isDrmProtected: Logger.Debug("DRM Warning message disabled by settings") if not item.complete: item = self.channelObject.ProcessVideoItem(item) # validated the updated item if not item.complete or not item.HasMediaItemParts(): Logger.Warning("UpdateVideoItem returned an item that had item.complete = False:\n%s", item) Statistics.RegisterError(self.channelObject, item=item) if not item.HasMediaItemParts(): # the update failed or no items where found. Don't play XbmcWrapper.ShowNotification(LanguageHelper.GetLocalizedString(LanguageHelper.ErrorId), LanguageHelper.GetLocalizedString(LanguageHelper.NoStreamsId), XbmcWrapper.Error) Logger.Warning("Could not start playback due to missing streams. Item:\n%s", item) return playData = self.channelObject.PlayVideoItem(item) Logger.Debug("Continuing playback in plugin.py") if not playData: Logger.Warning("PlayVideoItem did not return valid playdata") return else: playList, srt = playData # Get the Kodi Player instance (let Kodi decide what player, see # http://forum.kodi.tv/showthread.php?tid=173887&pid=1516662#pid1516662) xbmcPlayer = xbmc.Player() # now we force the busy dialog to close, else the video will not play and the # setResolved will not work. xbmc.executebuiltin("Dialog.Close(busydialog)") resolvedUrl = None if item.IsResolvable(): # now set the resolve to the first URL startIndex = playList.getposition() # the current location if startIndex < 0: startIndex = 0 Logger.Info("Playing stream @ playlist index %s using setResolvedUrl method", startIndex) resolvedUrl = playList[startIndex].getfilename() xbmcplugin.setResolvedUrl(self.handle, True, playList[startIndex]) else: # playlist do not use the setResolvedUrl Logger.Info("Playing stream using Playlist method") xbmcPlayer.play(playList) # the set the subtitles showSubs = AddonSettings.UseSubtitle() if srt and (srt != ""): Logger.Info("Adding subtitle: %s and setting showSubtitles to %s", srt, showSubs) XbmcWrapper.WaitForPlayerToStart(xbmcPlayer, logger=Logger.Instance(), url=resolvedUrl) xbmcPlayer.setSubtitles(srt) xbmcPlayer.showSubtitles(showSubs) except: if item: Statistics.RegisterError(self.channelObject, item=item) else: Statistics.RegisterError(self.channelObject) XbmcWrapper.ShowNotification(LanguageHelper.GetLocalizedString(LanguageHelper.ErrorId), LanguageHelper.GetLocalizedString(LanguageHelper.NoPlaybackId), XbmcWrapper.Error) Logger.Critical("Could not playback the url", exc_info=True) return
class Favourites: def __init__(self, path): """ Initializes a Favourites class that can be use to show, add and delete favourites. :param str path: The path to store the favourites file """ self.__filePattern = "%s-%s.xotfav" self.__pickler = Pickler() self.FavouriteFolder = path def add(self, channel, item, action_url): """ Adds a favourite for a specific channel. :param channel: The channel :param item: The mediaitem :param str action_url: The mediaitem's actionUrl """ Logger.debug("Adding item %s\nfor channel %s\n%s", item, channel, action_url) file_name = self.__filePattern % (channel.guid, item.guid) file_path = os.path.join(self.FavouriteFolder, file_name) pickle = self.__pickler.pickle_media_item(item) # Just double check for folder existence if not os.path.isdir(self.FavouriteFolder): os.makedirs(self.FavouriteFolder) # replacing to pickle in the actionUrl to save space action_url = self.__remove_pickle(action_url) try: with io.open(file_path, mode='w', encoding='utf-8') as file_handle: file_handle.write( "%s\n%s\n%s\n%s" % (channel.channelName, item.name, action_url, pickle)) except: Logger.error("Error saving favourite", exc_info=True) raise return # noinspection PyUnusedLocal def remove(self, item): """ Adds a favourite for a specific channel :param item: The mediaitem """ path_mask = os.path.join(self.FavouriteFolder, "*-%s.xotfav" % (item.guid, )) Logger.debug("Removing favourites for mask: %s", path_mask) for fav in glob.glob(path_mask): Logger.trace("Removing item %s\nFileName: %s", item, fav) os.remove(fav) return def list(self, channel=None): """ Lists favourites. If a channel was specified it will limit them to that. :param channel: The channel to limit the favourites to. :return: A list of tupples (action_url, pickle) :rtype: list """ favs = [] if channel: path_mask = os.path.join(self.FavouriteFolder, "%s-*.xotfav" % (channel.guid, )) else: path_mask = os.path.join(self.FavouriteFolder, "*.xotfav") Logger.debug("Fetching favourites for mask: %s", path_mask) for fav in glob.glob(path_mask): Logger.trace("Fetching %s", fav) try: with io.open(fav, mode='r', encoding='utf-8') as file_handle: channel_name = file_handle.readline().rstrip() name = file_handle.readline().rstrip() action_url = file_handle.readline().rstrip() if "pickle=" in action_url and "pickle=%s" not in action_url: # see issue https://bitbucket.org/basrieter/xbmc-online-tv/issues/1037 Logger.debug( "Found favourite with full pickle, removing the pickle as we should use the one from the file." ) action_url = self.__remove_pickle(action_url) pickle = file_handle.readline() except: Logger.error("Error fetching favourite", exc_info=True) raise if channel_name == "" or name == "" or action_url == "" or pickle == "": Logger.error( "Apparently the file had too few lines, corrupt Favourite, removing it:\n" "Pickle: %s\n" "Channel: %s\n" "Item: %s\n" "ActionUrl: %s\n" "Pickle: %s", fav, channel_name, name, action_url, pickle) # Remove the invalid favourite os.remove(fav) continue Logger.debug("Found favourite: %s", name) try: item = self.__pickler.de_pickle_media_item(pickle) except Exception: Logger.error("Cannot depickle item.", exc_info=True) # Let's not remove them for now. Just ignore. # os.remove(fav) continue validation_error = self.__pickler.validate( item, logger=Logger.instance()) if validation_error: Logger.error( "Invalid Pickled Item: %s\nRemoving favourite: %s", validation_error, fav) # Remove the invalid favourite os.remove(fav) continue # add the channel name if channel is None: item.name = "%s [%s]" % (item.name, channel_name) item.clear_date() item.actionUrl = action_url % (pickle, ) favs.append(item) return favs def __remove_pickle(self, action_url): pickle = Regexer.do_regex("pickle=([^&]+)", action_url) if not pickle: return action_url return action_url.replace(pickle[0], "%s")
def insert_remote_dict(self, dictionary): """Insert values into both the local and remote""" pickled = Pickler.pickle_dictionary_values(dictionary) self.remote.insert_dictionary(pickled)
class ParameterParser(object): def __init__(self, addon_name, params): """ :param str addon_name: The name of the add-on :param str params: The parameteters used to start the ParameterParser """ Logger.debug("Parsing parameters from: %s", params) # Url Keywords self.keywordPickle = "pickle" # : Used for the pickle item self.keywordAction = "action" # : Used for specifying the action self.keywordChannel = "channel" # : Used for the channel self.keywordChannelCode = "channelcode" # : Used for the channelcode self.keywordCategory = "category" # : Used for the category self.keywordRandomLive = "rnd" # : Used for randomizing live items self.keywordSettingId = "settingid" # : Used for setting an encrypted setting self.keywordSettingActionId = "settingactionid" # : Used for passing the actionid for the encryption self.keywordSettingName = "settingname" # : Used for setting an encrypted settings display name self.keywordSettingTabFocus = "tabfocus" # : Used for setting the tabcontrol to focus after changing a setting self.keywordSettingSettingFocus = "settingfocus" # : Used for setting the setting control to focus after changing a setting self.keywordLanguage = "lang" # : Used for the 2 char language information self.keywordProxy = "proxy" # : Used so set the proxy index self.keywordLocalIP = "localip" # : Used to set the local ip index # Url Actions self.actionFavourites = "favourites" # : Used to show favorites for a channel self.actionAllFavourites = "allfavourites" # : Used to show all favorites self.actionRemoveFavourite = "removefromfavourites" # : Used to remove items from favorites self.actionAddFavourite = "addtofavourites" # : Used to add items to favorites self.actionDownloadVideo = "downloadVideo" # : Used to download a video item self.actionPlayVideo = "playvideo" # : Used to play a video item self.actionUpdateChannels = "updatechannels" # : Used to update channels self.actionListFolder = "listfolder" # : Used to list a folder self.actionListCategory = "listcategory" # : Used to show the channels from a category self.actionConfigureChannel = "configurechannel" # : Used to configure a channel self.actionSetEncryptionPin = "changepin" # : Used for setting an application pin self.actionSetEncryptedValue = "encryptsetting" # : Used for setting an application pin self.actionResetVault = "resetvault" # : Used for resetting the vault self.actionPostLog = "postlog" # : Used for sending log files to pastebin.com self.actionProxy = "setproxy" # : Used for setting a proxy self.propertyRetrospect = "Retrospect" self.propertyRetrospectChannel = "RetrospectChannel" self.propertyRetrospectChannelSetting = "RetrospectChannelSettings" self.propertyRetrospectFolder = "RetrospectFolder" self.propertyRetrospectVideo = "RetrospectVideo" self.propertyRetrospectCloaked = "RetrospectCloaked" self.propertyRetrospectCategory = "RetrospectCategory" self.propertyRetrospectFavorite = "RetrospectFavorite" self.propertyRetrospectAdaptive = "RetrospectAdaptive" # determine the query parameters self.params = self.__get_parameters(params) self.pluginName = addon_name # We need a picker for this instance self._pickler = Pickler() def _create_action_url(self, channel, action, item=None, category=None): """ Creates an URL that includes an action. Arguments: channel : Channel - action : string - Keyword Arguments: item : MediaItem - :param ChannelInfo|Channel channel: The channel object to use for the URL. :param str action: Action to create an url for :param MediaItem item: The media item to add :param str category: The category to use. :return: a complete action url with all keywords and values :rtype: str|unicode """ if action is None: raise Exception("action is required") # catch the plugin:// url's for items and channels. if item is not None and item.url and item.url.startswith("plugin://"): return item.url if item is None and channel is not None and channel.uses_external_addon: return channel.addonUrl params = dict() if channel: params[self.keywordChannel] = channel.moduleName if channel.channelCode: params[self.keywordChannelCode] = channel.channelCode params[self.keywordAction] = action # it might have an item or not if item is not None: params[self.keywordPickle] = self._pickler.pickle_media_item(item) if action == self.actionPlayVideo and item.isLive: params[self.keywordRandomLive] = random.randint(10000, 99999) if category: params[self.keywordCategory] = category url = "%s?" % (self.pluginName, ) for k in params.keys(): url = "%s%s=%s&" % (url, k, params[k]) url = url.strip('&') # Logger.Trace("Created url: '%s'", url) return url def __get_parameters(self, query_string): """ Extracts the actual parameters as a dictionary from the passed in querystring. This method takes the self.quotedPlus into account. :param str query_string: The querystring :return: dict() of keywords and values. :rtype: dict[str,str|None] """ result = dict() query_string = query_string.strip('?') if query_string == '': return result try: for pair in query_string.split("&"): (k, v) = pair.split("=") result[k] = v # if the channelcode was empty, it was stripped, add it again. if self.keywordChannelCode not in result: Logger.debug( "Adding ChannelCode=None as it was missing from the dict: %s", result) result[self.keywordChannelCode] = None except: Logger.critical("Cannot determine query strings from %s", query_string, exc_info=True) raise return result
def backup_library(self): #untested ''' backup entire library, this overwrites any previous backup''' #initialize a pickle and save @lib_backup.p picklesave = 'lib_backup' pickle = Pickler(picklesave) pickle.save(self._zot.items())
def List(self, channel=None): """ Lists favourites. If a channel was specified it will limit them to that. @param channel: The channel to limit the favourites to. Returns a list of tupples (actionUrl, pickle) """ favs = [] if channel: pathMask = os.path.join(self.FavouriteFolder, "%s-*.xotfav" % (channel.guid,)) else: pathMask = os.path.join(self.FavouriteFolder, "*.xotfav") Logger.Debug("Fetching favourites for mask: %s", pathMask) for fav in glob.glob(pathMask): Logger.Trace("Fetching %s", fav) fileHandle = None try: fileHandle = open(fav) channelName = fileHandle.readline().rstrip() name = fileHandle.readline().rstrip() actionUrl = fileHandle.readline().rstrip() pickle = fileHandle.readline() fileHandle.close() except: Logger.Error("Error fetching favourite", exc_info=True) if fileHandle and not fileHandle.closed: fileHandle.close() raise if channelName == "" or name == "" or actionUrl == "" or pickle == "": Logger.Error("Apparently the file had too few lines, corrupt Favourite, removing it:\n" "Pickle: %s\n" "Channel: %s\n" "Item: %s\n" "ActionUrl: %s\n" "Pickle: %s", fav, channelName, name, actionUrl, pickle) # Remove the invalid favourite os.remove(fav) continue Logger.Debug("Found favourite: %s", name) item = Pickler.DePickleMediaItem(pickle) validationError = Pickler.Validate(item, logger=Logger.Instance()) if validationError: Logger.Error("Invalid Pickled Item: %s\nRemoving favourite: %s", validationError, fav) # Remove the invalid favourite os.remove(fav) continue # add the channel name if channel is None: item.name = "%s [%s]" % (item.name, channelName) item.ClearDate() favs.append((actionUrl % (pickle,), item)) return favs
def do_delete_pickle(self, arg): my_pickler = Pickler() my_pickler.delete_pickle(arg)
class TestPicklerSetUp(TestCase): # Wesley def setUp(self): self.pickler = Pickler() # Wesley def tearDown(self): # self.pickler.dispose() self.pickler = None # Wesley # def test_pickle_record_byte_stream(self): # """Check if values are a byte stream when using pickle_record_values function""" # key = "test" # expected = bytes # value = {"ID": "A23", "Gender": "Male", "Age": 22, "Sales": 245, "BMI": "normal", "salary": 20, "Birthday": # "24/06/1995"} # data = self.pickler.pickle_record_values(key, value) # result = type(data["test"]) # self.assertEqual(result, expected) # Wesley # def test_pickle_record_return_dict(self): # """Function returns a dictionary""" # key = "test" # expected = dict # value = {"ID": "A23", "Gender": "Male", "Age": 22, "Sales": 245, "BMI": "normal", "salary": 20, "Birthday": # "24/06/1995"} # result = self.pickler.pickle_dictionary_values(key, value) # self.assertIsInstance(result, expected) # Wesley def test_pickle_dictionary_type_byte(self): """True if all values in dictionary are of type 'byte'""" expected = bytes data = { 0: { "1ID": "A23", "Gender": "Male", "Age": 22, "Sales": 245, "BMI": "normal", "salary": 20, "Birthday": "24/06/1995" }, 1: { "IhD": "A2f3", "Gender": "Male", "Age": 22, "Sales": 245, "BMI": "normal", "salary": 20, "Birthday": "24/06/1995" }, 2: { "IjD": "Aa23", "Genkder": "Male", "Age": 22, "Sales": 245, "BMI": "normal", "salary": 20, "Birthday": "24/06/1995" }, 3: { "IgD": "A23", "Gender": "Male", "Age": 22, "Sales": 245, "BMI": "normal", "salary": 20, "Birthday": "24/06/1995" } } data = self.pickler.pickle_dictionary_values(data) result = (type(value) == expected for value in data.values()) self.assertTrue(all(result)) # Wesley def test_pickle_dictionary_type_string(self): """False if any values in dictionary are of type 'string'""" the_type = bytes data = { 0: { "1ID": "A23", "Gender": "Male", "Age": 22, "Sales": 245, "BMI": "normal", "salary": 20, "Birthday": "24/06/1995" }, 1: { "IhD": "A2f3", "Gender": "Male", "Age": 22, "Sales": 245, "BMI": "normal", "salary": 20, "Birthday": "24/06/1995" }, 2: { "IjD": "Aa23", "Genkder": "Male", "Age": 22, "Sales": 245, "BMI": "normal", "salary": 20, "Birthday": "24/06/1995" }, 3: { "IgD": "A23", "Gender": "Male", "Age": 22, "Sales": 245, "BMI": "normal", "salary": 20, "Birthday": "24/06/1995" } } data = self.pickler.pickle_dictionary_values(data) data[2] = "This is a string" result = (type(value) == the_type for value in data.values()) self.assertFalse(all(result)) # Wesley def test_pickle_dictionary_type_string_true(self): """True if a value in dictionary are of type 'string'""" the_type = str data = { 0: { "1ID": "A23", "Gender": "Male", "Age": 22, "Sales": 245, "BMI": "normal", "salary": 20, "Birthday": "24/06/1995" }, 1: { "IhD": "A2f3", "Gender": "Male", "Age": 22, "Sales": 245, "BMI": "normal", "salary": 20, "Birthday": "24/06/1995" }, 2: { "IjD": "Aa23", "Genkder": "Male", "Age": 22, "Sales": 245, "BMI": "normal", "salary": 20, "Birthday": "24/06/1995" }, 3: { "IgD": "A23", "Gender": "Male", "Age": 22, "Sales": 245, "BMI": "normal", "salary": 20, "Birthday": "24/06/1995" } } data = self.pickler.pickle_dictionary_values(data) data[2] = "This is a string" result = (type(value) == the_type for value in data.values()) self.assertTrue(any(result))