output_file.write("=" * 120) output_file.close() ########################################################################################################### ### Run! ### # Parse the CMD args results = parser.parse_args() if not results.train and not results.chat and not results.data: raise Warning( "Invalid arguments: no arguments found, execute with [-h] for options." ) if results.train and results.chat: raise Warning( "Training runs indefinitely, --chat will only run without --train, please identify -c or -t. [-h] for options" ) print("\nRunning model with {}\n".format(results)) if results.data: util.prepare() if results.train: train(results.messages) if results.chat: chat()
def feedme(feed="", type=""): h = HTMLParser.HTMLParser() colour = [ "black", "white", "gray", "blue", "teal", "fuchsia", "indigo", "turquoise", "cyan", "greenyellow", "lime", "green", "olive", "gold", "yello", "lavender", "pink", "magenta", "purple", "maroon", "chocolate", "orange", "red", "brown" ] parameters = util.parseParameters() #util.logError(str(parameters)) try: mode = int(parameters["mode"]) except: mode = None try: offsite = ast.literal_eval(parameters['extras']) #util.logError(str(offsite)) if "site_xml" in offsite: feed = offsite['site_xml'] type = "url" except: #not set, dont worry about it pass if mode == None or mode == 0: # if we get here list the sites found in the json file menu = [] bits = util.getFile(feed, type) counter = 0 if str(len(bits['sites'])) == "1" and 'folder' not in bits['sites']: mode = 1 parameters['extras'] = str({"site": 0}) else: try: folder = ast.literal_eval(parameters['extras']) folder = folder['folder'] for site in bits['sites']: try: if site['folder'].lower() == folder.lower(): extras = {} try: extras['site_xml'] = offsite['site_xml'] except: pass extras['site'] = counter menu.append({ "title": site['name'], "url": site['name'], "mode": "1", "poster": site['poster'], "icon": site['poster'], "fanart": site['fanart'], "type": ADDON_TYPE, "plot": "", "isFolder": True, "extras": extras }) except: # site not in a folder pass counter = counter + 1 except: if "folders" in bits: for site in bits['folders']: extras = {} try: extras['site_xml'] = offsite['site_xml'] except: pass extras['site'] = counter folder_extras = {} folder_extras['folder'] = site['name'] if "url" in site: folder_extras['site_xml'] = site['url'] del (folder_extras['folder']) menu.append({ "title": site['name'], "url": site['name'], "mode": "0", "poster": site['poster'], "icon": site['poster'], "fanart": site['fanart'], "type": ADDON_TYPE, "plot": "", "isFolder": True, "extras": folder_extras }) for site in bits['sites']: if "folder" not in site: extras = {} try: extras['site_xml'] = offsite['site_xml'] except: pass extras['site'] = counter menu.append({ "title": site['name'], "url": site['name'], "mode": "1", "poster": site['poster'], "icon": site['poster'], "fanart": site['fanart'], "type": ADDON_TYPE, "plot": "", "isFolder": True, "extras": extras }) counter = counter + 1 util.addMenuItems(menu) if mode == 1: # first level within a site, show Latest, Search and any Tags within the specified site menu = [] extras = ast.literal_eval(parameters['extras']) try: extras['site_xml'] = offsite['site_xml'] except: pass bits = util.getFile(feed, type) site = bits['sites'][extras['site']] if "search_url" not in site and "tags" not in site and len( site['items']) == 1: mode = 2 for item in site['items']: parameters['url'] = site['items'][item][0]['site_url'] break else: for item in site['items'].iterkeys(): if item.lower() != "search": try: poster = parameters['poster'] except: try: poster = site['items'][item][0]['folder_poster'] if "http" not in poster and "https" not in poster: poster = os.path.join(HOME, '', poster) except: poster = "" try: fanart = parameters['fanart'] except: try: fanart = site['items'][item][0]['folder_fanart'] if "http" not in fanart and "https" not in fanart: fanart = os.path.join(HOME, '', fanart) except: fanart = "" extras['level'] = item menu.append({ "title": item, "url": urllib.quote_plus(site['items'][item][0]['site_url']), "mode": "2", "poster": poster, "icon": poster, "fanart": fanart, "type": ADDON_TYPE, "plot": "", "isFolder": True, "extras": str(extras) }) try: counter = 0 for tag in site['tags']: try: poster = parameters['poster'] except: poster = "" try: fanart = parameters['fanart'] except: fanart = "" extras['tag'] = counter menu.append({ "title": tag['name'], "url": tag['url'], "mode": "4", "poster": poster, "icon": poster, "fanart": fanart, "type": ADDON_TYPE, "plot": "", "isFolder": True, "extras": str(extras) }) counter = counter + 1 except: pass if "search_url" in site: try: poster = parameters['poster'] except: poster = "" try: fanart = parameters['fanart'] except: fanart = "" menu.append({ "title": "Search", "url": "", "mode": "3", "poster": poster, "icon": poster, "fanart": fanart, "type": ADDON_TYPE, "plot": "", "isFolder": True, "extras": str(extras) }) util.addMenuItems(menu) if mode == 2: # load the first level of relevant video information menu = [] extras = ast.literal_eval(parameters['extras']) try: extras['site_xml'] = offsite['site_xml'] except: pass bits = util.getFile(feed, type) site = bits['sites'][extras['site']] if 'pos' in extras: pos = extras['pos'] else: pos = 0 if 'level' in extras: level = extras['level'] else: for item in site['items']: level = item break if len(site['items'][level]) > pos + 1: # another level is needed extras['pos'] = pos + 1 newMode = "2" isFolder = True else: # on a level where next move is to check for sources try: if site['items'][level][pos]['play_media'] == "multiple": newMode = "113" isFolder = True else: newMode = "111" # find source isFolder = False except: # default to play first found newMode = "111" # find source isFolder = False #util.alert(newMode) page = util.get(h.unescape(parameters['url'])) next = page """if parameters['name']=="Next Page >": util.logError(str(next))""" try: if site['items'][level][pos]['global'] != "": regex = util.prepare(site['items'][level][pos]['global']) matches = re.findall(regex, page) if matches: page = matches[0] except: pass regex = util.prepare(site['items'][level][pos]['pattern']) matches = re.findall(regex, page) if matches: counter = 0 for match in matches: try: title = h.unescape( util.replaceParts( site['items'][level][pos]['name'], matches[counter]).replace('\n', '').replace( '\t', '').replace("\\", "").lstrip()) except: title = "" #try: # util.alert(site['items'][level][pos]['url']) url = urllib.quote_plus( util.replaceParts(site['items'][level][pos]['url'], matches[counter])) # util.alert(">>"+url) #except: # url="" try: poster = util.replaceParts( site['items'][level][pos]['poster'], matches[counter]).encode('utf-8') except: poster = "" try: fanart = util.replaceParts( site['items'][level][pos]['fanart'], matches[counter]).encode('utf-8') except: fanart = "" try: plot = util.replaceParts(site['items'][level][pos]['plot'], matches[counter]).encode('utf-8') except: plot = "" if isFolder: menu.append({ "title": title, "url": url, "mode": newMode, "poster": poster, "icon": poster, "fanart": fanart, "type": ADDON_TYPE, "plot": plot, "isFolder": isFolder, "extras": str(extras) }) else: menu.append({ "title": title, "url": url, "mode": newMode, "poster": poster, "icon": poster, "fanart": fanart, "type": ADDON_TYPE, "plot": plot, "isFolder": isFolder, "isPlayable": "True", "extras": str(extras) }) counter = counter + 1 try: regex = util.prepare(site['items'][level][pos]['next_pattern']) matches = re.findall(regex, next) if matches: parts = [] if len(matches) > 1: for match in matches: parts.append(match) else: match = matches #nextlink=util.execPy(util.replaceParts(site['items'][level][pos]['next_url'], match)) nextlink = util.replaceParts( site['items'][level][pos]['next_url'], match) extras['pos'] = pos menu.append({ "title": "Next Page >", "url": urllib.quote_plus(nextlink), "mode": "2", "poster": "", "icon": "", "fanart": "", "type": ADDON_TYPE, "plot": plot, "isFolder": True, "extras": str(extras) }) except Exception as e: util.logError(str(e)) pass util.addMenuItems(menu) elif mode == 3: # display the Search dialog and build search results menu = [] extras = ast.literal_eval(parameters['extras']) try: extras['site_xml'] = offsite['site_xml'] except: pass term = util.searchDialog() if term: bits = util.getFile(feed, type) site = bits['sites'][extras['site']] pos = 0 for item in site['items']: level = item extras['level'] = level break if len(site['items'][extras['level']]) > pos + 1: # another level is needed extras['pos'] = 1 newMode = "2" isFolder = True isPlayable = True else: # on a level where next move is to check for sources if site['items'][ extras['level']][pos]['play_media'] == "multiple": newMode = "113" isFolder = True isPlayable = False else: newMode = "111" # find source isFolder = False isPlayable = True if "{{" in site['search_url'] and "}}" in site['search_url']: url = util.execPy(site['search_url'].replace("{%}", term)) else: url = site['search_url'].replace("{%}", term) util.logError(url) page = util.get(url) next = page try: if site['item']['global'] != "": regex = util.prepare(site['item']['global']) matches = re.findall(regex, page) if matches: page = matches[0] except: pass regex = util.prepare(site['items'][level][pos]['pattern']) matches = re.findall(regex, page) if matches: counter = 0 for match in matches: try: title = h.unescape( util.replaceParts( site['items'][level][pos]['name'], matches[counter]).replace('\n', '').replace( '\t', '').lstrip().encode('utf-8')) except: title = "" try: url = util.replaceParts( site['items'][level][pos]['url'], matches[counter]).encode('utf-8') #util.logError(url) except: url = "" try: poster = util.replaceParts( site['items'][level][pos]['poster'], matches[counter]).encode('utf-8') except: poster = "" try: fanart = util.replaceParts( site['items'][level][pos]['fanart'], matches[counter]).encode('utf-8') except: fanart = "" try: plot = util.replaceParts( site['items'][level][pos]['plot'], matches[counter]).encode('utf-8') except: plot = "" if isFolder: menu.append({ "title": title, "url": url, "mode": newMode, "poster": poster, "icon": poster, "fanart": fanart, "type": ADDON_TYPE, "plot": plot, "isFolder": isFolder, "extras": str(extras) }) else: menu.append({ "title": title, "url": url, "mode": newMode, "poster": poster, "icon": poster, "fanart": fanart, "type": ADDON_TYPE, "plot": plot, "isFolder": isFolder, "isPlayable": "True", "extras": str(extras) }) counter = counter + 1 try: regex = util.prepare(site['items'][level][pos]['next_pattern']) matches = re.findall(regex, next) if matches: parts = [] """for match in matches: parts.append(match)""" if len(matches) > 1: for match in matches: parts.append(match) else: match = matches #nextlink=util.execPy(util.replaceParts(site['items'][level][pos]['next_url'], match)) nextlink = util.replaceParts( site['items'][level][pos]['next_url'], match) menu.append({ "title": "Next Page >", "url": nextlink, "mode": "2", "poster": "", "icon": "", "fanart": "", "type": ADDON_TYPE, "plot": plot, "isFolder": True, "extras": str(extras) }) except: pass util.addMenuItems(menu) else: return False elif mode == 4: # show relevant Tag video results menu = [] extras = ast.literal_eval(parameters['extras']) try: extras['site_xml'] = offsite['site_xml'] except: pass bits = util.getFile(feed, type) site = bits['sites'][extras['site']]['tags'][extras['tag']] page = util.get(parameters['url']) next = page try: if site['item']['global'] != "": regex = util.prepare(site['item']['global']) matches = re.findall(regex, page) if matches: page = matches[0] except: pass regex = util.prepare(site['item']['pattern']) matches = re.findall(regex, page) if matches: counter = 0 for match in matches: try: title = h.unescape( util.replaceParts(site['item']['name'], matches[counter]).encode('utf-8')) except: title = "" try: url = util.replaceParts(site['item']['url'], matches[counter]).encode('utf-8') except: url = "" try: poster = util.replaceParts( site['item']['poster'], matches[counter]).encode('utf-8') except: poster = "" try: fanart = util.replaceParts( site['item']['fanart'], matches[counter]).encode('utf-8') except: fanart = "" try: plot = util.replaceParts(site['item']['plot'], matches[counter]).encode('utf-8') except: plot = "" menu.append({ "title": title, "url": url, "mode": "2", "poster": poster, "icon": poster, "fanart": fanart, "type": ADDON_TYPE, "plot": plot, "isFolder": True, "extras": extras }) counter = counter + 1 util.addMenuItems(menu) elif mode == 5: pass elif mode == 111: # find playable sources in url #util.alert(parameters['url']) extras = ast.literal_eval(parameters['extras']) bits = util.getFile(feed, type) site = bits['sites'][extras['site']] try: pos = extras['pos'] except: pos = 0 try: selected_video = int( site['items'][extras['level']][pos]['play_media']) - 1 except: selected_video = 0 page = util.get(parameters['url']) link = False try: link = urlresolver.resolve(parameters['url']) except Exception as e: if str(e).lower() == "sign in to confirm your age": util.notify("YouTube Error: Login to confirm age.") return False else: util.notify(str(e)) return False if link: # play if url resolver reports true util.playMedia(parameters['name'], parameters['poster'], link, force=True) elif any(ext in parameters['url'] for ext in filetypes): # play if url has a video extension util.playMedia(parameters['name'], parameters['poster'], parameters['url'], force=True) else: #search for video urls if "urlresolver" in site and site['urlresolver'].lower( ) == "false": regex = "\"([^\s]*?\.(:?" + "|".join(filetypes) + "))\"" matches = re.findall(regex, page) else: regex = "(\/\/.*?\/embed.*?)[\?\"]" matches = re.findall(regex, page) regex = "\"((?:http:|https:)?\/\/.*?\/watch.*?)[\"]" matches = matches + re.findall(regex, page) matches2 = urlresolver.scrape_supported(page) #util.alert(str(matches)) """regex="\"(https?://("+"|".join(supports)+")\..*?)\"" matches2 = re.findall(regex, page) regex="\"((?:http:|https:)?\/\/.*?\/watch.*?)[\"]" matches3 = re.findall(regex, page) regex = 'https?://(.*?(?:\.googlevideo|(?:plus|drive|get|docs)\.google|google(?:usercontent|drive|apis))\.com)/(.*?(?:videoplayback\?|[\?&]authkey|host/)*.+)' matches4 = re.findall(regex, page) matches2=[ x for x in matches2 if any(sup in x for sup in supports) ] matches3=[ x for x in matches3 if any(sup in x for sup in supports) ]""" matches = matches + matches2 util.logError( "''''''''''''''''''''''''''''''''''''''''''''''''''''''") util.logError(">>>>" + str(matches)) if isinstance(matches[selected_video], tuple): url = matches[selected_video][0] else: url = matches[selected_video] #util.alert(url) if "http" not in url: url = "http:" + url link = urlresolver.resolve(url) if link == False: link = url util.playMedia(parameters['name'], parameters['poster'], link) elif mode == 112: extras = ast.literal_eval(parameters['extras']) bits = util.getFile(feed, type) site = bits['sites'][extras['site']] page = util.get(parameters['url']) """if "urlresolver" in site and site['urlresolver'].lower()=="false": regex="\"(.*?\.mp4)\"" matches = re.findall(regex, page) if matches: link=matches[0] else:""" regex = "\"(//\S*?(:?" + ("|".join(filetypes)) + ")\S*?)\"" matches = re.findall(regex, page) if matches: url = matches[selected_video][0] if "http" not in url: link = "http:" + url else: link = urlresolver.resolve(parameters['url']) if not link: try: regex = "(\/\/.*?\/embed.*?)[\?\"]" matches = re.findall(regex, page) regex = "\"((?:http:|https:)?\/\/.*?\/watch.*?)[\"]" matches = matches + re.findall(regex, page) regex = 'https?://(.*?(?:\.googlevideo|(?:plus|drive|get|docs)\.google|google(?:usercontent|drive|apis))\.com)/(.*?(?:videoplayback\?|[\?&]authkey|host/)*.+)' matches = matches + re.findall(regex, page) if matches: matches = [ x for x in matches if any(sup in x for sup in supports) ] if matches: link = urlresolver.resolve("http:" + matches[0]) except Exception as e: util.notify(str(e)) if link: import downloader downloader.download( link, os.path.join(xbmcaddon.Addon().getSetting('folder'), parameters['name'] + ".mp4")) else: util.notify("No video found") elif mode == 113: menu = [] extras = ast.literal_eval(parameters['extras']) bits = util.getFile(feed, type) site = bits['sites'][extras['site']] page = util.get(parameters['url']) matches = urlresolver.scrape_supported(page) #regex="(//\S*?(:?"+("|".join(filetypes))+")\S*?)" #matches2 = re.findall(regex, page) """regex="(\/\/.*?\/embed.*?)[\?\"]" matches2 = re.findall(regex, page) regex="\"(https?://("+"|".join(supports)+")\..*?)\"" matches3 = re.findall(regex, page) regex = 'https?://(.*?(?:\.googlevideo|(?:plus|drive|get|docs)\.google|google(?:usercontent|drive|apis))\.com)/(.*?(?:videoplayback\?|[\?&]authkey|host/)*.+)' matches4 = re.findall(regex, page) matches2=[ x for x in matches2 if any(sup in x for sup in supports) ] matches3=[ x for x in matches3 if any(sup in x for sup in supports) ] matches=matches+matches2+matches3+matches4""" unique = [] for match in matches: #+matches2: if isinstance(match, tuple): unique.append(match[0]) else: unique.append(match) matches = list(set(unique)) if matches: for match in matches: if "http" not in match: rl = "http:" + match else: rl = match menu.append({ "title": rl, "url": rl, "mode": "114", "poster": parameters['poster'], "icon": parameters['icon'], "fanart": parameters['fanart'], "type": "", "plot": "", "isFolder": False, "isPlayable": False, "extras": str(extras) }) util.addMenuItems(menu) elif mode == 114: # find playable sources in url #util.alert(parameters['url']) urlresolver.relevant_resolvers() try: link = urlresolver.resolve(str(parameters['url'])) except Exception as e: util.notify(str(e)) exit() if link: try: util.playMedia(parameters['name'], parameters['poster'], link) except: util.playMedia(parameters['name'], parameters['poster'], parameters['url'])
devSentences = util.readCoNLL(datasetFiles[1], dataColumns, word2Idx, labelKey, label2Idx) testSentences = util.readCoNLL(datasetFiles[2], dataColumns, word2Idx, labelKey, label2Idx) elif len(datasetFiles) == 2: devSentences = util.readCoNLL(datasetFiles[1], dataColumns, word2Idx, labelKey, label2Idx) print("embeddings", embeddings.shape[0], embeddings.shape[1]) # 11210,200 (the numbr of words, embedding size) print("Train Sentences:", len(trainSentences)) print("Test Sentences:", len(devSentences)) X_train, y_train = util.prepare(dataset=trainSentences, labelKey=labelKey, seq_max_len=num_steps) X_dev, y_dev = util.prepare(dataset=devSentences, labelKey=labelKey, seq_max_len=num_steps) num_chars = len(word2Idx.keys()) num_classes = len(label2Idx.keys()) print(num_chars, num_classes) ###################################################### # # Model Running # ######################################################
upload = json.load(fin) msg = f"{submit_name} find no solution element." upload = upload.get("solution") msg = f"{submit_name} can not convert to dataframe." return pd.DataFrame.from_dict(upload) except Exception as e: print(msg) print(str(e)) if __name__ == '__main__': ok, data_total = validator.import_data('jobs.json') if not ok: print("load environment setting failed.") sys.exit(-1) js = validator.JobShop(data_total) submit_name = sys.argv[1] if len(sys.argv) > 1 else "submit.json" df_up = load_submit(submit_name) ok, msg = validator.prepare(js, df_up) if not ok: print(msg) sys.exit(-1) ok, msg = validator.check(js) if not ok: for l in msg: print(l) else: print('check ok.')