def displayNews(): #Function to display the currently extracted and archived news #on the users default browser if str(dayChoice()) == '': instruct['text'] = 'PLEASE CHOOSE A DATE...' return #If the user has not selected a date, produce an error message and #exit the function elif track['text'] == '': instruct['text'] = 'PLEASE EXTRACT NEWS FIRST...' return #If the user has not pressed the extract button, produce an error #message and exit the function else: #Otherwise, display the page archiveLink = 'BBCNewsArchive.html' new = 2 webopen(archiveLink, new) instruct['text'] = 'EXTRACTED NEWS DISPLAYED IN BROWSER:' track['text'] = dayChoice() #Informs the user that they have currently displayed their selected #days news in their browser #If the user is logging events, log the news being displayed global eventNumber if logCheck() == 1: #Add 1 to the eventNumber counter eventNumber = eventNumber + 1 #Execute an SQLite script to insert rows. sql = "INSERT INTO Event_log VALUES ('" + str( eventNumber) + "', 'Extracted news displayed in browser')" eventLogDB.execute(sql) #Commit the change to the database. connection.commit()
def makeHTML(self, filename): self.build() fh = open(expanduser('~/Documents/PIEthon/reports/') + str(filename) + ".html", "w", encoding='utf-8') fh.write(self.html) fh.close() webopen('file://' + expanduser('~\\Documents\\PIEthon\\reports\\') + filename + ".html")
def cb_help_dialog(): logging.info('Displaying help dialog') """Callback from the Help button. Displays a dialog with the option to view Help in browser.""" dialog = DialogHelp() if dialog.exec_(): webopen('https://github.com/Merkwurdichliebe/Epidemic/wiki')
def main(): settings = load_settings() window = create_window() distances = distance_list(settings, window) update_display(window, distances, settings) while True: event, values = window.read(timeout=REFRESH_RATE_IN_MINUTES * 60 * 1000) if event in (None, 'Exit', '-QUIT-'): break elif event == '-SETTINGS-': settings = change_settings(settings) save_settings(settings) elif event == '-MOREINFO-': webopen( r'https://www.arcgis.com/apps/opsdashboard/index.html#/bda7594740fd40299423467b48e9ecf6' ) if settings['zipcode']: distances = distance_list(settings, window) update_display(window, distances, settings) window.close()
def main(namespace: Namespace) -> None: """Main function: setup the server and run it.""" listen_address = "127.0.0.1" if namespace.local else "0.0.0.0" lan_ip = get_lan_ip() baseport = namespace.baseport root = namespace.root # Setup the watchdog watchdog = pyinotify.WatchManager() mask = pyinotify.IN_MODIFY | pyinotify.IN_CREATE | pyinotify.IN_DELETE notifier = pyinotify.ThreadedNotifier(watchdog, EventHandler(root)) notifier.start() watchdog.add_watch(root, mask, rec=True) # Setup the server http_manager = HttpManager(root) with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock: bound = False while not bound: try: sock.bind((listen_address, baseport)) bound = True except Exception: baseport += 1 print(f"Connect to http://{lan_ip}:{baseport}") webopen(f"http://{lan_ip}:{baseport}") listen_loop(sock, notifier, http_manager)
def run_OnButtonClick(self): try: if len(self.md5s_dict.keys()) > 0: self.results = run_vt_analyse(self.md5s_dict, retrieve_apikey(self.config), self.language) output_file = get_output_file(self.config, self.input_file_string.get()) # Create the output log save_results(output_file, self.input_file_string.get(), self.file_type, self.md5s_dict, self.results, self.language) # Open the log self.console.insert(tk.END, get_string(VariousCodes.scan_complete, self.language)) self.console.see(tk.END) webopen(output_file) else: self.file_dialog_button.focus_set() raise ScriptWarning(ErrorsCodes.input_file_no_md5) except ScriptWarning as e: self.console.insert(tk.END, get_string(VariousCodes.warning, self.language).format(message=e.message(self.language))) self.console.see(tk.END) except ScriptError as e: self.console.insert(tk.END, get_string(VariousCodes.error, self.language).format(message=e.message(self.language))) self.console.see(tk.END)
def return_error_message(message): """As this script is also use on Windows as an exe without cmd, I need something more than a print to inform the user ofthe exit status""" with open(log_path, 'w') as f: f.write('<meta charset="UTF-8">\n') f.write(message) webopen(log_path) _exit(1)
def open(self, filename): if not filename: return new = 2 logging.info('presentation available at {}'.format(filename)) url = "file:///" + os.path.realpath(filename) logging.info('opening presentation at url {}'.format(url)) webopen(url, new=new)
def check_web(): if word_result == '': showinfo('网络查询', '机器没有输出任何成语。该功能只针对电脑输出的成语进行查询。') return webopen( 'https://www.baidu.com/s?ie=utf-8&f=8&rsv_bp=1&rsv_idx=1&ch=3&tn=98012088_4_dg&wd=' + word_result) return
def open_file(filepath): if platform.startswith('linux'): Popen(['xdg-open', filepath]) elif platform.startswith('darwin'): Popen(['open', filepath]) elif platform.startswith('win'): os.startfile(filepath) else: webopen(filepath)
def show_auth_page(): u = list(urlparse(cs['auth_uri'])) u[4] = urlencode({ 'client_id': cs['client_id'], 'redirect_uri': 'http://localhost:10000', 'response_type': 'code', 'scope': 'https://www.googleapis.com/auth/youtube.readonly' }) webopen(urlunparse(u))
def authorize(self): """Perform full OAuth authentication and authorization. Return: dict containing oauth_token and oauth_token_secret for access_token. """ host = get_local_host('bitbucket.org') httpd, port = open_server_on_high_port(AuthenticatorHTTPServer) oauth_api_uri = 'https://bitbucket.org/!api/1.0/oauth' oauth_callback = 'http://{host}:{port}'.format(host=host, port=port) request_token_url = ( '{0}/request_token?oauth_callback={oauth_callback}').format( oauth_api_uri, oauth_callback=oauth_callback) authorize_url = '{0}/authenticate'.format(oauth_api_uri) access_token_url = '{0}/access_token'.format(oauth_api_uri) consumer = self.get_consumer() client = oauth.Client(consumer) resp, content = client.request(request_token_url, 'POST') if resp['status'] != '200': print resp raise Exception('Invalid response {0}.'.format(resp['status'])) request_token = dict(parse_qsl(content)) # Step 2: Send user to the provider to authenticated and authorize. authorize_url = '{0}?oauth_token={1}'.format( authorize_url, request_token['oauth_token']) print "Visit in your browser and click Grant Access:" print authorize_url webopen(authorize_url) httpd.handle_request() token = oauth.Token( request_token['oauth_token'], request_token['oauth_token_secret']) try: token.set_verifier(httpd.oauth_verifier) except AttributeError: log.error(u'Did not get OAuth verifier.') return client = oauth.Client(consumer, token) resp, content = client.request(access_token_url, "POST") if resp['status'] != '200': log.error(u'Unable to get access token.') log.debug(resp) log.debug(content) return access_token = dict(parse_qsl(content)) return (access_token['oauth_token'], access_token['oauth_token_secret'])
def open(self, args): if args.id == -1: incident = self.client.show(1) url = urlparse(incident.url) page = "{}://{}".format(url.scheme, url.netloc) else: incident = self.client.show(args.id) page = incident.url webopen(page)
def main(): with open('meta.json', 'r') as f: meta = json.load(f) file_name = meta['CID_Filename'] if meta['WinnerFromFile'] == "Truncated": # Defaults to original file file_name = file_name.rstrip('.txt') + '_Truncated.txt' print("Drawing winner from {}\nAbort if this is not correct!\n".format( file_name)) with open(file_name, 'r') as f: comment_ids = [] authors = [] for line in f: l = line.strip().split(':') comment_ids.append(l[0]) authors.append(l[1]) # Set winning hash, obtains from API if it is not provided win_hash = get_win_hash( meta) if meta['Win_Hash'] == '' else meta['Win_Hash'] meta['Win_Hash'] = win_hash # Get winner and his/her details total = (len(comment_ids)) winner_no = (1 + (int(win_hash, 16) % total)) winner_id = comment_ids[winner_no - 1] winner_link = ''.join((find_winner_thread(meta, winner_no), winner_id)) winner = authors[winner_no - 1] # Print winner details print("Using {} comment list!\n".format(meta['WinnerFromFile'])) print("Total Participants: {}\nWinner: {}\nHash: {}\n".format( total, winner_no, win_hash)) print("Winner Comment ID: {}".format(winner_id)) print("Winning Comment URL: {}".format(winner_link)) print("Winner: {}".format(winner)) # Save winner details to meta meta['Total Participants'] = total meta['Winner_Number'] = winner_no meta['Winner_ID'] = winner_id meta['Winner_Link'] = winner_link meta['Winner'] = winner with open('meta.json', 'w') as outfile: json.dump(meta, outfile, indent=4) # Opens link to comment if user chooses so x = input("\nEnter Y/y to open winning comment...") if x.upper() == "Y": webopen(winner_link) x = input("Draw complete! Press Enter to exit...") return
def MainEventHandler(self, event): if event.type == MOUSEMOTION: if self.mouseDown: self.OnDrag(event) elif event.type == MOUSEBUTTONUP: self.OnRelease(event) elif event.type == MOUSEBUTTONDOWN: self.OnClick(event) else: from webbrowser import open as webopen webopen("https://reposti.com/i/m/bMm.jpg", new=2)
def anysoft(qwq): if qwq == '1': dir = getdir() mapdownloader_dir = dir + '/data/ranked_download/RankedMapsDownloader.exe' sysrun(mapdownloader_dir) input('回车返回') elif qwq == '2': webopen('https://github.com/wasupandceacar/osu-vs-player/') elif qwq == '3': webopen('https://github.com/veritas501/Osu-Ingame-Downloader') else: pass return
def view_html(): # View HTML just created path = getcwd() general_path = normpath(path) webopen(general_path + '\Extracted.html') ###Part B### #Get general path for db path = getcwd() general_path = normpath(path) if state.get() == 1: conn = connect(general_path + '\event_log.db') sql = "INSERT INTO Event_Log(Description) VALUES('Extracted news displayed in web browser')" conn.execute(sql) conn.commit() conn.close()
def scrapeYoutube(self): if self.go_no_go == 'go': speak('Opening first video for ' + self.search_query + ' on YouTube', self.speak_type) # saying what it will open for v in self.vids: #for loop for finding all videos that show up if self.is_absolute(v['href']) == True: pass else: tmp = 'https://www.youtube.com' + v[ 'href'] # create url to add to list with links from html self.videolist.append( tmp) # add the newly created url to list webopen(self.videolist[0], new=2) # open the url print('Done!') # finish message else: speak('No internet connection couldn\'t access')
def DrawWidgets(self): Label(self,text="아이디").grid(row=0,column=0) Entry(self,textvariable=self.ID).grid(row=0,column=1) Label(self,text="비밀번호").grid(row=1,column=0) Entry(self,textvariable=self.PW,show="*").grid(row=1,column=1) Button(self,text="로그인",command=self.Login_Handler).grid(row=2,column=1) Button(self,text="보인아이",command=lambda:webopen("https://boini.net")).grid(row=2,column=0)
def menu(): set_title(f"Ahsokify Spotify Checker - Geographs#0501 & Grogu#0501") title() print(f" {theme2}({theme1}1{theme2}) Spotify Checker") print(f" {theme2}({theme1}2{theme2}) Settings") print(f" {theme2}({theme1}3{theme2}) Join Discord Server") choice = input(f"{theme2}>{theme1} ") if choice == "1": Spotify().start() elif choice == "2": settings().getSettings() elif choice == "3": webopen(discord) menu() else: menu()
def show(self, *args, **kwargs): """ Shows the chart URL in a webbrowser Other arguments passed to webbrowser.open """ return webopen(str(self), *args, **kwargs)
def show(self, *args, **kwargs): """ Shows the chart URL in a webbrowser Other arguments passed to webbrowser.open """ from webbrowser import open as webopen return webopen(unicode(self), *args, **kwargs)
def __init__(self): ThemedTk.__init__(self, theme='black') self.title('Обновление F_Reference_H') self.geometry('500x140') x = (self.winfo_screenwidth() - self.winfo_reqwidth()) / 2 y = (self.winfo_screenheight() - self.winfo_reqheight()) / 2 self.wm_geometry("+%d+%d" % (x - 150, y)) self.resizable(width=False, height=False) self.iconphoto(True, PhotoImage(file='settings/ico/ico_main.png')) flow_hack_png = Image.open(f'settings/ico/mini_flowhack.png') flow_hack_png = ImageTk.PhotoImage(flow_hack_png) self.frame = Frame(self) self.frame.place(relwidth=1, relheight=1) flow_1 = Label(self.frame, image=flow_hack_png, cursor='heart') flow_1.bind('<Button-1>', lambda no_matter: webopen(VK)) flow_1.place(relx=.09, rely=.085, anchor='center') flow_2 = Label(self.frame, image=flow_hack_png, cursor='heart') flow_2.bind('<Button-1>', lambda no_matter: webopen(VK)) flow_2.place(relx=.91, rely=.085, anchor='center') self.lbl_done = Label(self.frame, text='ОБНОВЛЕНИЕ', font=('Times New Roman', 12, 'bold italic')) self.lbl_done.place(relx=.5, rely=.1, anchor='c') self.lable_second = Label( self.frame, text='Нам понадобится интернет!\nМы всё сделаем сами, это не ' 'займё много времени!', font=('Times New Roman', 10, 'bold italic'), justify='center') self.lable_second.place(relx=.5, rely=.35, anchor='c') self.btn_update = Button(self.frame, text='Обновить', cursor='hand1', command=self.updater_window) self.btn_update.place(relx=.5, rely=.65, anchor='c') self.license = Label( self.frame, cursor='hand1', text='Нажимая "Обновить" вы принимаете лицензионное соглашение', font=('Times New Roman', 10, 'bold italic'), foreground='black') self.license.bind('<Button-1>', lambda no_matter: webopen(SAIT)) self.license.place(relx=.5, rely=.92, anchor='c') self.mainloop()
def show(self, *args, **kwargs): """ Shows the chart URL in a webbrowser Other arguments passed to webbrowser.open """ from webbrowser import open as webopen return webopen(str(self), *args, **kwargs)
def display(): location = normpath('/RTNews.html') path = getcwd() fullpath = path + location # Imported 'exists' function from os.path if exists(fullpath): webopen('file://' + fullpath) messenger.config(text='Extracted News Displayed!') else: messenger.config(text='Cannot find Extracted News!') # If the event log button is turned on # Display extracted news button is pressed try: if check.get() == 1: event_description = log_description[1] sql_statement = template.replace('DESCRIPTION', event_description) event_db.execute(sql_statement) connection.commit() except: messenger.config(text=sqlite_error)
def run_OnButtonClick(self): try: if len(self.md5s_dict.keys()) > 0: self.results = run_vt_analyse(self.md5s_dict, retrieve_apikey(self.config), self.language) output_file = get_output_file(self.config, self.input_file_string.get()) # Create the output log save_results(output_file, self.input_file_string.get(), self.file_type, self.md5s_dict, self.results, self.language) # Open the log self.console.insert( tk.END, get_string(VariousCodes.scan_complete, self.language)) self.console.see(tk.END) webopen(output_file) else: self.file_dialog_button.focus_set() raise ScriptWarning(ErrorsCodes.input_file_no_md5) except ScriptWarning as e: self.console.insert( tk.END, get_string( VariousCodes.warning, self.language).format(message=e.message(self.language))) self.console.see(tk.END) except ScriptError as e: self.console.insert( tk.END, get_string( VariousCodes.error, self.language).format(message=e.message(self.language))) self.console.see(tk.END)
def display_news(): location = normpath('\WSJ.html') path = getcwd() fullpath = path + location # import exists from os.path from os.path import exists if exists(fullpath): webopen('file://' + fullpath) messenger.config(text=displaying_webpage) else: messenger.config(text=display_error) # If Event logger is activated and news displayed: try: if check.get() == 1: connection = connect(database='event_log.db') log_db = connection.cursor() event_executed = log_events[3] sql_query = sqlite3_template.replace('EVENT', event_executed) log_db.execute(sql_query) connection.commit() except: messenger.config(text=sqlite3_error)
def run_server(config: Config) -> None: CustomHTTPRequestHandler.initialize(config) server = ThreadingHTTPServer(("", config.base_port), CustomHTTPRequestHandler) try: ips = list_ips() urls = [f"http://{ip}:{config.base_port}" for ip in ips] if not ips: print( "Couldn't find any ip for this device, are you connected to the network?" ) else: print("Connect to:") if config.open_browser: webopen(urls[0]) for url in urls: print(f"- {url}") if config.show_qr: show_qrs(urls) server.serve_forever() except KeyboardInterrupt: print() print("Goodbye!")
def display_news(): #allow access to these global variables global has_archive global event_checkbox global log_counter global connection #Check if it has any extracted archives if (not has_archive): instruction_label['text'] = "Please choose a date to archive..." else: #Define path to extracted archive path = normpath(getcwd() + '/extracted_archive.html') #Open Extracted archive webopen(path) #Change instruction label instruction_label[ 'text'] = "Choose another day's news you would like to extract..." #Check if checkbox is ticked if ticked then add an entry in the sql data base if (event_checkbox.get() == 1): log_counter += 1 sql_query_display = "INSERT INTO Event_Log VALUES(" + str(log_counter) + \ ",'Extracted news displayed in web broswer')" event_log.execute(sql_query_display) connection.commit()
def main(): with open('meta.json', 'r') as f: meta = json.load(f) file_name = meta['CID_Filename'] win_hash = meta['Win_Hash'] with open(file_name.rstrip('.txt') + '_Truncated.txt', 'r') as f: comment_ids = [line.strip() for line in f] if win_hash == '': x = input("Winning hash has not been entered into JSON!") exit(1) total = (len(comment_ids)) winner_no = (1 + (int(win_hash, 16) % total)) winner_id = comment_ids[winner_no - 1] winner_link = ''.join((find_winner_thread(meta, winner_no), winner_id)) winner = get_winner_name(reddit=init_reddit(), cid=winner_id) print("Total Participants: {}\nWinner: {}\nHash: {}".format(total, winner_no, win_hash)) print("Winner Comment ID: {}".format(winner_id)) print("Winning Comment URL: {}".format(winner_link)) print("Winner: {}".format(winner)) meta['Total Participants'] = total meta['Winner_Number'] = winner_no meta['Winner_ID'] = winner_id meta['Winner_Link'] = winner_link meta['Winner'] = winner with open('meta.json', 'w') as outfile: json.dump(meta, outfile, indent=4) x = input("Enter Y to open winning comment...") if x.upper() == "Y": webopen(winner_link)
def entry_add(self, title, date, summary, url): """Display entry and return the toggleframe and htmlframe.""" def unwrap(event): l.update_idletasks() try: h = l.html.bbox()[-1] except TclError: pass else: l.configure(height=h + 2) def resize(event): if l.winfo_viewable(): try: h = l.html.bbox()[-1] except TclError: pass else: l.configure(height=h + 2) # convert date to locale time formatted_date = format_datetime(datetime.strptime( date, '%Y-%m-%d %H:%M').astimezone(tz=None), 'short', locale=getlocale()[0]) tf = ToggledFrame(self.display, text="{} - {}".format(title, formatted_date), style='widget.TFrame') l = HtmlFrame(tf.interior, height=50, style='widget.interior.TFrame') l.set_content(summary) l.set_style(self._stylesheet) l.set_font_size(self._font_size) tf.interior.configure(style='widget.interior.TFrame') tf.interior.rowconfigure(0, weight=1) tf.interior.columnconfigure(0, weight=1) l.grid(padx=4, sticky='eswn') Button(tf.interior, text='Open', style='widget.TButton', command=lambda: webopen(url)).grid(pady=4, padx=6, sticky='e') tf.grid(sticky='we', row=len(self.entries), pady=2, padx=(8, 4)) tf.bind("<<ToggledFrameOpen>>", unwrap) l.bind("<Configure>", resize) return tf, l
self.send_header('Access-Control-Allow-Origin', origin) self.send_header('Access-Control-Allow-Methods', 'GET,POST') self.send_header('Access-Control-Max-Age', '86400') self.end_headers() def _sendHttpBody(self, data, contentType='application/json'): self._sendHttpHeader(contentType) body = b'' if isinstance(data, bytes): body = data elif isinstance(data, str): body = data.encode('utf-8', errors='ignore') else: body = json.dumps(data).encode('utf-8', errors='ignore') self.wfile.write(body) if __name__ == "__main__": freeze_support() try: server_address = ('127.0.0.1', PORT) with HTTPServer(server_address, MyHTTPRequestHandler) as httpd: httpd.socket = ssl.wrap_socket(httpd.socket, certfile=GUI_DIR + '/server.key', server_side=True) print("HTTP server is starting at port " + repr(PORT) + '...') print("Press ^C to quit") print("https://127.0.0.1:8080") webopen('https://127.0.0.1:8080', new=0, autoraise=True) httpd.serve_forever() except KeyboardInterrupt: print("Shutting down server...")
def openurl(url): from webbrowser import open as webopen url = parse_fullurl(url) webopen(url.lower())
def main(refresh_minutes): refresh_time_milliseconds = refresh_minutes * 60 * 1000 settings = load_settings() sg.theme(settings['theme']) data_link = LINK_CONFIRMED_DATA if settings.get( 'data source', 'confirmed') == 'confirmed' else LINK_DEATHS_DATA loc_data_dict = prepare_data(data_link) num_data_points = len(loc_data_dict[("Worldwide", "Total")]) keys = loc_data_dict.keys() countries = set([k[0] for k in keys]) chosen_locations = settings.get('locations', []) if not chosen_locations: chosen_locations = choose_locations(countries, []) save_settings(settings, chosen_locations) window = create_window(settings) window['-SLIDER-'].update(range=(0, num_data_points - 1)) window['-REWIND MESSAGE-'].update(f'Rewind up to {num_data_points-1} days') update_window(window, loc_data_dict, chosen_locations, settings, 0, 1, DEFAULT_GROWTH_RATE) animating, animation_refresh_time = False, 1.0 while True: # Event Loop timeout = animation_refresh_time if animating else refresh_time_milliseconds event, values = window.read(timeout=timeout) if event in (None, 'Exit', '-QUIT-'): break if event == '-SETTINGS-': # "Settings" at bottom of window settings = change_settings(settings) save_settings(settings, chosen_locations) sg.theme( settings['theme'] if settings.get('theme') else sg.theme()) new_data_link = LINK_CONFIRMED_DATA if settings.get( 'data source', 'confirmed') == 'confirmed' else LINK_DEATHS_DATA if new_data_link != data_link: data_link = new_data_link loc_data_dict = prepare_data(data_link) window.close() window = create_window(settings) window['-SLIDER-'].update(range=(0, num_data_points - 1)) window['-REWIND MESSAGE-'].update( f'Rewind up to {num_data_points-1} days') elif event == '-LOCATIONS-': # "Location" text at bottom of window chosen_locations = choose_locations(countries, chosen_locations) save_settings(settings, chosen_locations) elif event == '-FORECAST-': # Changed Forecast checkbox settings['rows'] = settings['rows'] * 2 if values[ '-FORECAST-'] else settings['rows'] // 2 settings['forecasting'] = values['-FORECAST-'] save_settings(settings, chosen_locations) window.close() window = create_window(settings) window['-SLIDER-'].update(range=(0, num_data_points - 1)) window['-REWIND MESSAGE-'].update( f'Rewind up to {num_data_points - 1} days') elif event == '-SOURCE LINK-': # Clicked on data text, open browser webopen( r'https://github.com/CSSEGISandData/COVID-19/tree/master/csse_covid_19_data/csse_covid_19_time_series' ) elif event == '-RAW DATA-': sg.Print(loc_data_dict[("Worldwide", "Total")]) elif event == '-ANIMATE-': animating = values['-ANIMATE-'] animation_refresh_time = int(values['-ANIMATION SPEED-']) if animating: new_slider = values['-SLIDER-'] - 1 if values[ '-SLIDER-'] else num_data_points window['-SLIDER-'].update(new_slider) if event in (sg.TIMEOUT_KEY, '-REFRESH-') and not animating: sg.popup_quick_message('Updating data', font='Any 20') loc_data_dict = prepare_data(data_link) num_data_points = len(loc_data_dict[("Worldwide", "Total")]) if values['-FORECAST-']: try: growth_rate = float(values['-GROWTH RATE-']) except: growth_rate = 1.0 window['-GROWTH RATE-'](1.0) future_days = int(values['-FUTURE SLIDER-']) else: growth_rate = future_days = 0 update_window(window, loc_data_dict, chosen_locations, settings, int(values['-SLIDER-']), future_days, growth_rate) window.close()
def main(): parser = ArgumentParser(usage="usage: %(prog)s -f path_to_file [options]") parser.add_argument("-f", "--file", help="file to use", action="store", dest="path_to_file", required=True) # Keep -k option for retro compatibility parser.add_argument("-k", "--key", help="Only used for retrocompatibility", action="store", dest="dummy") args = parser.parse_args() config = {} results = {} # Get the files paths input_file = args.path_to_file.strip() # Init output file to "/tmp/errors_vt_scan.html" and it's windows equivalent output_file = get_output_file({}, "errors.txt") # Retrieve language based on locale config["language"] = get_language_from_locale() print(get_string(VariousCodes.vt_scan_title, config["language"]).format(version=VERSION)) try: # Load config config_file = join(dirname(abspath(expanduser(sys.argv[0]))), config_file_name) config = load_config(config_file) # Use locale language if no config language if "language" not in config.keys(): config["language"] = get_language_from_locale() # Retrieve apikey and check its validity apikey = retrieve_apikey(config) # Get the report lines print(get_string(VariousCodes.file_opening, config["language"]).format(file=input_file)) report_content = get_report_content(input_file) line_list = report_content.split("\n") # Detect the logFile type file_type = get_file_type(line_list[0]) print(get_string(VariousCodes.file_type, config["language"]).format(type=file_type)) output_file = get_output_file(config, input_file) # Find the md5s in the file md5s_dict = find_md5_in_file(report_content, file_type) md5_number = len(md5s_dict.keys()) print(get_string(VariousCodes.file_md5s_nb, config["language"]).format(nb_md5s=md5_number)) if md5_number != 0: # Search on VT for each md5 and store the results results = run_vt_analyse(md5s_dict, apikey, config["language"]) # Create the output log save_results(output_file, input_file, file_type, md5s_dict, results, config["language"]) print(get_string(VariousCodes.scan_complete, config["language"])) except ScriptError as e: error_message = get_string(VariousCodes.error, config["language"]).format(message=e.message(config["language"])) print(error_message) with open(output_file, 'w', encoding='utf-8') as f: f.write('<meta charset="UTF-8">\n') f.write(error_message) except ScriptWarning as e: error_message = get_string(VariousCodes.warning, config["language"]).format(message=e.message(config["language"])) print(error_message) with open(output_file, 'w', encoding='utf-8') as f: f.write('<meta charset="UTF-8">\n') f.write(error_message) # Open the log webopen(output_file)
def on_help(self, widget): from webbrowser import open as webopen webopen('https://github.com/simukis/Manga-Fox-Grabber/blob/master/HOWTO')
def OnClicked(notification, signal_text): webopen(ytUrl) notification.close() global loop loop.quit()
# Do not edit this value! ver = "1.4.0" print "Batch Whitelist Editor v"+ver+" by GarethPW" usuc = not check_for_updates if check_for_updates: info("Checking for a new version...") try: if ver == urllib2.urlopen("http://garethpw.net/dev/bwe/currentver.txt").read(): usuc = True info("Version is up to date!") else: info("Version is outdated! Please download the latest version from the website.") sleep(0.5) webopen("http://garethpw.net/repo/nav/misc/Batch%20Whitelist%20Editor/") except urllib2.HTTPError: info("Failed to check for an update!",1) usuc = True else: info("Update checking is disabled! This is not recommended.",1) sleep(0.5) if usuc: fp = [] wl = [] info("Loading whitelist.json...") while True: try:
def run(options): # Get the input file path_to_file = options.path_to_file.replace("\n", "") print("The input file is %s" % path_to_file) # Get the apikey apikeys = get_apikeys(options.path_to_apikey, log_path) # Handle issues with files encoding # OTL logs files comes formatted in utf-16-le encoding... try: with open(path_to_file, 'r') as f: content = f.read() except UnicodeDecodeError: with open(path_to_file, 'r', encoding='utf-16-le') as f: content = f.read() except: return_error_message("Error while opening file: %s" % path_to_file) line_list = content.split("\n") # Detect the logFile type file_type = get_file_type(line_list[0]) print("The input file is detected as a %s log." % file_type) # Find the md5s in the file md5s_list = find_md5_in_file(line_list, file_type) md5_number = len(md5s_list) if md5_number == 0: print("Found 0 md5 in %s" % path_to_file) with open(log_path, 'w') as f: f.write("<h2>VT_Scan by Chapi:</h2></br>") f.write("Found <b>0 different md5s</b> in %s.</br>" % path_to_file) else: print("Found %s different md5s in %s." % (md5_number, path_to_file)) # Search on VT for each md5 and store the results results = {"unknows": [], "negatives": [], "positives": []} i = 0 apikeys_number = len(apikeys) # Search on VT for each md5 by group of 4 while len(md5s_list) >= 4: run_vt_analyse(md5s_list[0:4], apikeys[i % apikeys_number], results, log_path) md5s_list = md5s_list[4:] # The VirusTotal public API allow 4 request each minute, # therefore we should wait 15sec between each request. i += 1 run_vt_analyse(md5s_list, apikeys[i % apikeys_number], results, log_path) # Create the output log with open(log_path, 'w') as f: f.write('<meta charset="UTF-8">\n') f.write('<style>\ntable, th, td {\n border: 1px solid black;\n border-collapse: collapse;\n}\nth, td {\n padding: 5px;\n}\n</style>\n') f.write("<h2>VT_Scan by Chapi:</h2></br>\n") f.write("The input file is <b>%s</b></br>\n" % path_to_file) f.write("The input file is detected as a <b>%s</b> log.</br>\n" % file_type) f.write("Found <b>%s different md5s</b>.</br>\n" % md5_number) f.write("<h4></br>VirusTotal nonzero detections (%s)</br></h4>\n" % len(results["positives"])) f.write(' <table>\n <tr>\n <th>Result</th>\n <th>Filename</th>\n <th>MD5</th>\n </tr>\n') for result in results["positives"]: f.write('<tr><td>%s/%s</td><td><a href=%s target="_blank">%s</a></td><td>%s</td></tr>\n' % result) f.write('</table>\n') f.write("<h4></br>VirusTotal unknown files (%s)</br></h4>\n" % len(results["unknows"])) f.write(' <table>\n <tr>\n <th>Filename</th>\n <th>MD5</th>\n </tr>\n') for result in results["unknows"]: f.write("<tr><td>%s</td><td>%s</td></tr>\n" % result) f.write('</table>\n') f.write("<h4></br>VirusTotal negative results (%s)</br></h4>\n" % len(results["negatives"])) f.write(' <table>\n <tr>\n <th>Result</th>\n <th>Filename</th>\n <th>MD5</th>\n </tr>\n') for result in results["negatives"]: f.write('<tr><td>%s/%s</td><td><a href=%s target="_blank">%s</a></td><td>%s</td></tr>\n' % result) f.write('</table>\n') f.write("</br></br>\nEnd of analysis.") print("### End of analysis.") # Open the log webopen(log_path)
return count def retweetCount(tweet): return tweet.retweet_count #Consumer token and secret are specific to the application #Can be found in dev.twitter.com/apps consumer_token = "pDeVZbNzK6HXocUuhwLqBg" consumer_secret = "Itj45FWSmMr0VmrNPJuO2KIaIt3hzayY2ywVteh2M" #To get authorization from user using OAuth auth = tweepy.OAuthHandler(consumer_token,consumer_secret) auth_url = auth.get_authorization_url(signin_with_twitter=True) # print "Authorize: " + auth_url webopen(auth_url) verifier = raw_input('PIN: ').strip() auth.get_access_token(verifier) access_token = auth.access_token.key access_secret = auth.access_token.secret # access_token = "45142783-KrrjDjWf0M3OKuq1Ckb9Bi6WpNX9ZQnhurPM1wxDA" # access_secret = "b6a6OHD39u1GNJMfkTp5LGRWCk7WWgTZgPkE6sgp8wo" #Access the user's account using the OAuth credentials auth.set_access_token(access_token,access_secret) api = tweepy.API(auth) #authenticated api module if api.me().name is not None: print "Successfully authenticated"