def comment(self, roll=1): if not self.ready: log.info("comments need to be initialized") self.init() if chance(roll): log.info("going to make a comment") # keep searching posts until we find one with comments post_with_comments = False while not post_with_comments: # pick a subreddit to comment on subreddit = get_subreddit(getsubclass=True) # get a random hot post from the subreddit post = random.choice(list(subreddit.hot())) # replace the "MoreReplies" with all of the submission replies post.comments.replace_more(limit=0) if len(post.comments.list()) > 0: post_with_comments = True try: # choose if we're replying to the post or to a comment if chance(self.config.get('reddit_reply_to_comment')): # reply to the post with a response based on the post title log.info('replying directly to post') post.reply(self.comments.get_reply(post.title)) else: # get a random comment from the post comment = random.choice(post.comments.list()) # reply to the comment log.info('replying to comment') comment.reply(self.comments.get_reply(comment.body)) except APIException as e: log.info(f"error commenting: {e}")
def createAccessPoint(interface, ssid): dot11 = Dot11(type=0, subtype=8, addr1='ff:ff:ff:ff:ff:ff', addr2='22:22:22:22:22:22', addr3='33:33:33:33:33:33') beacon = Dot11Beacon(cap='ESS+privacy') essid = Dot11Elt(ID='SSID', info=ssid, len=len(ssid)) rsn = Dot11Elt(ID='RSNinfo', info=('\x01\x00' '\x00\x0f\xac\x02' '\x02\x00' '\x00\x0f\xac\x04' '\x00\x0f\xac\x02' '\x01\x00' '\x00\x0f\xac\x02' '\x00\x00')) frame = RadioTap() / dot11 / beacon / essid / rsn log.info("Beacon Frame created with SSID: '" + ssid + "'") log.info("Transmitting Beacon through interface '" + interface + "'") raw_input("\nPress Enter to start Access Point '" + ssid + "'\n") log.success("Access Point Created!") sendp(frame, iface=interface, inter=0.10, loop=1)
def _ps_search(self, subreddit, before=None, after=None, score=None, limit=1): cur_time = int(time.time()) after = (cur_time - YEAR) if after is None else None before = (cur_time - (YEAR - DAY)) if before is None else None score = 5000 if score is None else None url = f"https://api.pushshift.io/reddit/search/submission/?subreddit={subreddit}" url = url + (f"&before={before}" if before else "") url = url + (f"&after={after}" if after else "") url = url + (f"&score>={score}" if score else "") url = url + (f"&limit={limit}" if limit else "") url = url + (f"&author!=[deleted]&selftext:not=[deleted]" ) # avoids deleted posts log.info(f"pushshift-url: {url}") try: response = requests.get(url).json().get("data", []) return response except Exception as e: # unable to get data from pushshift return None
def prefer_envar(configs: dict) -> dict: for config in list(configs): config_envar = f"{ENVAR_PREFIX}{config}".lower() if os.environ.get(config_envar): configs[config]=os.environ.get(config_envar) log.info(f"loading {config_envar} from envar. Value: {configs.get(config)}") else: log.debug(f"no environment config for: {config_envar}") return configs
def tick(self): if not should_we_sleep(): report = f"" for action in self.actions: roll = random.random() result = roll < self.config[action.name] print( f"{roll} < {self.config[action.name]} = {result} ", end="\r") if result: log.info(f"\nrunning action: {action.name}") action.call()
def should_we_sleep(): CHECKS = [ True for schedule in BOT_SCHEDULE if is_time_between(schedule[0], schedule[1]) ] # check if any of the time between checks returned true. # if there's a True in the list, it means we're between one of the scheduled times # and so this function returns False so the bot doesn't sleep if True in CHECKS or not CONFIG.get('reddit_sleep_schedule'): # no need to sleep - the bot is within one of the time ranges return False else: log.info("it's sleepy time.. zzzzz :snore: zzzz") whats_left = [] TIME_LEFT = [schedule[0] for schedule in BOT_SCHEDULE] for time_stamp in TIME_LEFT: # log.info(time_stamp) next_start = datetime.datetime.combine(datetime.date.today(), time_stamp) # log.info(f"next start: {next_start}") ts = int(next_start.timestamp()) # if this goes negative then the next start is probably tomorrow if ts < int(time.time()): next_start = datetime.datetime.combine( (datetime.date.today() + datetime.timedelta(days=1)), time_stamp) ts = next_start.timestamp() # collect all the seconds left for each time schedule to start # log.info(f"ts: {ts}") # log.info(f"time: {int(time.time())}") whats_left.append(ts - int(time.time())) #remove negative values and # get the shortest duration of time left before starting # log.info(whats_left) whats_left = [item for item in whats_left if item >= 0] # log.info(whats_left) time_left = int(min(whats_left)) if time_left > 600: log.info( f"waking up in: {datetime.timedelta(seconds=time_left)} at {next_start}" ) sleep_time = int(time_left / 3) # have the bot sleep for a short while instead of tons of messages every second time.sleep(sleep_time) return True
def _init(self): # check if account is set user = self.api.user.me() if user is None: log.info("User auth failed, Reddit bot shutting down") sys.exit() else: log.info(f"running as user: {user}") # check if account is shadowbanned self.cleanup.shadow_check() self.user = parse_user(user) log.info(f"account info:\n{log_json(self.user)}") self.ready = True log.info("The bot is now running. It has a chance to perform an action every second. Be patient")
def shadow_check(self, roll=1): if chance(roll): log.info("performing a shadowban check") response = requests.get( f"https://www.reddit.com/user/{self.username}/about.json", headers={ 'User-agent': f"hiiii its {self.username}" }).json() if "error" in response: if response["error"] == 404: log.info( f"account {self.username} is shadowbanned. poor bot :( shutting down the script..." ) sys.exit() else: log.info(response) else: log.info(f"{self.username} is not shadowbanned! We think..")
def repost(self, roll=1, subreddit=None): if chance(roll): log.info("running repost") # log.info("running _repost") post = self.get_post(subreddit=subreddit) if not post: return api_call = requests.get(post.url).status_code if api_call != 200: if api_call == 429: print('too many requests to pushshift') s(random.uniform(3, 8)) else: print('pushshift http error: ' + str(api_call)) return else: log.info(f"reposting post: {post.id}") if post.is_self: if post.selftext not in ('[removed]', '[deleted]') and bool( re.findall( r'20[0-9][0-9]|v.redd.it', post.selftext)) == False: params = { "title": edit_text(post.title, 'title'), "selftext": edit_text(post.selftext, 'body') } else: print( 'Info: skipping post; it was malformed or date indicated' ) # print(post.selftext) else: params = { "title": edit_text(post.title, 'title'), "url": post.url } sub = post.subreddit # randomly choose a potential subreddit to cross post if CONFIG['reddit_crosspost_enabled']: sub = self.rapi.subreddit(self.crosspost(sub.display_name)) try: self.rapi.subreddit(sub.display_name).submit(**params) return except (UnboundLocalError, TypeError): pass except APIException as e: log.info(f"REPOST ERROR: {e}") return else: pass
def get_subreddit(nsfw=False, getsubclass=False): # if the subreddit list is being used jut return one from there if REDDIT_APPROVED_SUBS: log.info(f"picking subreddit from approved list") subreddit = reddit_api.subreddit(random.choice(REDDIT_APPROVED_SUBS).strip()) log.info(f"using subreddit: {subreddit.display_name}") else: log.info(f"picking a random subreddit") # otherwise we'll do some logic to get a random subreddit subreddit_ok = False while not subreddit_ok: subreddit = reddit_api.random_subreddit(nsfw=nsfw) log.info(f"checking subreddit: {subreddit.display_name}") # make sure the radom sub isn't in the avoid sub list # keep searching for a subreddit until it meets this condition if subreddit.display_name not in AVOID_SUBS: subreddit_ok = True if getsubclass: return subreddit else: return subreddit.display_name
def repost(self, roll=1, subreddit=None): if chance(roll): log.info("running repost") # log.info("running _repost") post = self.get_post(subreddit=subreddit) log.info(f"reposting post: {post.id}") if post.is_self: params = {"title": post.title, "selftext": post.selftext} else: params = {"title": post.title, "url": post.url} sub = post.subreddit # randomly choose a potential subreddit to cross post if CONFIG['reddit_crosspost_enabled']: sub = self.rapi.subreddit(self.crosspost(sub.display_name)) try: self.rapi.subreddit(sub.display_name).submit(**params) except APIException as e: log.info(f"REPOST ERROR: {e}") else: pass
def remove_low_scores(self, roll=1): comment_count = 0 post_count = 0 if chance(roll): log.info("checking for low score content to remove") for i in self.rapi.redditor(self.username).new(limit=500): if i.score <= reddit_config.CONFIG[ "reddit_low_score_threshold"]: if isinstance(i, praw.models.Comment): log.info( f"deleting comment(id={i.id}, body={i.body}, score={i.score}, subreddit={i.subreddit_name_prefixed}|{i.subreddit_id})" ) try: i.delete() except Exception as e: log.info( f"unable to delete comment(id={i.id}), skip...\n{e.message}" ) comment_count += 1 else: log.info( f"deleting post(id={i.id}, score={i.score}, subreddit={i.subreddit_name_prefixed}|{i.subreddit_id})" ) try: i.delete() except Exception as e: log.info( f"unable to delete post(id={i.id}), skip...\n{e.message}" ) post_count += 1 log.info( f'removed {comment_count + post_count} item(s). removed {comment_count} comment(s), {post_count} post(s) with less than {reddit_config.CONFIG["reddit_low_score_threshold"]} score' ) # GOOD BOT if (comment_count + post_count) == 0: log.info( "no low score content to clean up. I'm a good bot! :^)")
def init(self): log.info("using cobe to generate comments") main_db = self.config.get("cobe_main_db") # make sure db was initialized correctly if os.path.isfile(main_db): # set the initial size self.size = os.path.getsize(main_db) else: log.info(f"cobe db failed to initialize. exiting") sys.exit() log.debug('filling cobe database for commenting') # loop through learning comments until we reach the min db size while self.size <= tobytes(self.config.get("cobe_min_db_size")): log.info(f"cobe db size is: {str(bytesto(self.size, 'm'))}mb, need {self.config.get('cobe_min_db_size')} - learning...") # just learn from random subreddits for now subreddit = get_subreddit(getsubclass=True) log.info(f"learning from /r/{subreddit}") # get the comment generator function from pushshift comments = self.psapi.get_comments(subreddit) # go through 500 comments per subreddit for x in range(500): # get the comment from the generator function try: comment = next(comments) except StopIteration as e: log.info(f"end of comments") # bot responses are better when it learns from short comments if len(comment.body) < 240: log.debug(f"learning comment: {comment.body.encode('utf8')}") # only learn comments that don't contain an avoid word if not any(word in comment.body for word in AVOID_WORDS): self.brain.learn(comment.body.encode("utf8")) # update the class size variable so the while loop # knows when to break self.size = os.path.getsize(main_db) log.info(f"database min size ({self.config.get('cobe_min_db_size')}) reached") self.ready = True
import os from logs.logger import log from pathlib import Path # Prefix that the bot uses to discover envars settings for the bots ENVAR_PREFIX="BOT_" CONFIG_ROOT = os.path.dirname(os.path.abspath(__file__)) config_root = Path(CONFIG_ROOT) REPO_ROOT = config_root.parents[1].absolute() SRC_ROOT = os.path.join(REPO_ROOT, "src") ENV_FILE= os.path.join(REPO_ROOT, ".env") log.info(f"config root: {CONFIG_ROOT}") log.info(f"repo root: {REPO_ROOT}") log.info(f"src root: {SRC_ROOT}") # Common Values DAY = 86400 # POSIX day (exact value in seconds) MINUTE = 60 # seconds in a minute
AUTH = prefer_envar({ # app creds "reddit_client_id": "", "reddit_client_secret": "", # reddit account creds "reddit_username": "", "reddit_password": "", }) for key in AUTH: if AUTH[key] == "": # reddit auth not configured correctly. # instruct user to generate a .env file config_gen() log.info(f"REDDIT AUTH CONFIG:\n {log_json(AUTH)}") CONFIG = prefer_envar({ "reddit_crosspost_enabled": False, # the chance the bot will repost a post "reddit_post_chance": 0.005, # the chance the bot will make a comment "reddit_comment_chance": 0.005, # the chance the bot will reply to a comment # otherwise it will reply to a post "reddit_reply_to_comment": 0.002, # chance the bot will remove poor performing # posts and comments "reddit_remove_low_scores": 0.002, # posts/comments that get downvoted to this score will be deleted "reddit_low_score_threshold": 0,
def plotADSB(filename): try: with open(filename, 'rb') as f: msg = f.read() except Exception as e: log.err(str(e) + "\n") return m = [] i = 0 size_m = len(msg) while i in range(size_m): m.append(struct.unpack('i', msg[i:i + 4])[0]) updateProgressBar(i, size_m) i += 4 print "\n" signal = [] signal_bin = "" signal_bin1 = "" mark = 0 flag = False for i in range(len(m)): if m[i] == 2139029504: signal.append(0) elif m[i] == 32639: signal.append(1) else: signal.append(0) if len(signal_bin) <= 116: if m[i] == 2139029504: signal_bin += "0" elif m[i] == 32639: signal_bin += "1" if len(signal_bin) == 116: mark = i flag = True if len(signal_bin1) <= 116 and i > mark and flag: if m[i] == 2139029504: signal_bin1 += "0" elif m[i] == 32639: signal_bin1 += "1" log.success("Extracted Signal -> " + signal_bin) log.info("Preamble => " + signal_bin[0:4]) log.info("DF => " + signal_bin[4:9]) log.info("CA => " + signal_bin[9:12]) log.info("ICAO => " + signal_bin[12:36] + " " + "\033[91m(" + str(hex(int(signal_bin[12:36], 2))) + ")\033[0m") log.info("TYPE => " + signal_bin[36:41]) log.info("DATA => " + signal_bin[41:92]) log.info("Interrogator ID => " + signal_bin[92:116] + "\n") log.success("Extracted Signal -> " + signal_bin1) log.info("Preamble => " + signal_bin1[0:4]) log.info("DF => " + signal_bin1[4:9]) log.info("CA => " + signal_bin1[9:12]) log.info("ICAO => " + signal_bin1[12:36] + " " + "\033[91m(" + str(hex(int(signal_bin1[12:36], 2))) + ")\033[0m") log.info("TYPE => " + signal_bin1[36:41]) log.info("DATA => " + signal_bin1[41:92]) log.info("Interrogator ID => " + signal_bin1[92:116] + "\n") plt.ylim([-2, 2]) plt.xlim([0, len(signal)]) plt.plot(range(len(signal)), signal) log.success("Plot Created") plt.show()
def tick(self): if not should_we_sleep(): for action in self.actions: if chance(self.config[action.name]): log.info(f"running action: {action.name}") action.call()
EASY_SCHEDULES = { 1: ((7, 00), (10, 00)), 2: ((10, 00), (14, 00)), 3: ((14, 00), (18, 00)), 4: ((18, 00), (22, 00)), 5: ((22, 00), (2, 00)), } # convert the easy schedules to the tuple values BOT_SCHEDULE = [ EASY_SCHEDULES.get(schedule) for schedule in CONFIG['reddit_sleep_schedule'] ] log.info(f"using schedules: {BOT_SCHEDULE}") # transform the schedule with datetime formatting updated_schedules = [((datetime.time(schedule[0][0], schedule[0][1])), (datetime.time(schedule[1][0], schedule[1][1]))) for schedule in BOT_SCHEDULE] BOT_SCHEDULE = updated_schedules def is_time_between(begin_time, end_time, check_time=None): # If check time is not given, default to current UTC time check_time = check_time or datetime.datetime.utcnow().time() if begin_time < end_time: return check_time >= begin_time and check_time <= end_time else: # crosses midnight
#!/usr/bin/env python3 # -*- coding: UTF-8 -*- import sys from logs.logger import log from utils import check_internet, get_public_ip import bot if __name__ == "__main__": if check_internet() is True: try: log.info(f'Internet connection found : {get_public_ip()}') bot.run() except KeyboardInterrupt: # quit sys.exit() else: log.info('Please check your internet connection') sys.exit()
import sys from logs.logger import log from utils import check_internet import bot if __name__ == "__main__": if check_internet() is True: try: log.info('Internet connection found') bot.run() except KeyboardInterrupt: # quit sys.exit() else: log.info('Please check your internet connection') sys.exit()
#!/usr/bin/env python3 # -*- coding: UTF-8 -*- from utils import prefer_envar from pathlib import Path from logs.logger import log from logs.log_utils import log_json from .common_config import SRC_ROOT import os BASE_DIR = os.path.join(SRC_ROOT, 'bots/reddit/actions/comments') DB_DIR = os.path.join(BASE_DIR, "brains") MAIN_DB = os.path.join(DB_DIR, "brain.db") if not os.path.exists(DB_DIR): os.makedirs(DB_DIR, exist_ok=True) CONFIG = prefer_envar({ # cobe config "cobe_base_dir": BASE_DIR, "cobe_db_dir": DB_DIR, "cobe_main_db": MAIN_DB, "cobe_min_db_size": "50mb", "cobe_max_db_size": "300mb", }) log.info(f"COBE CONFIG:\n {log_json(CONFIG)}")
def get_post(self, subreddit=None): log.info(f"finding a post to re-post") got_post = False attempts = 0 while not got_post: # use the supplied subreddit # otherwise choose one randomy if subreddit: log.info(f"searching post in sub: {subreddit}") sub = self.rapi.subreddit(subreddit) else: # if there are subreddits in the subreddit list pull randomly from that # otherwise pull a totally random subreddit sub = self.rapi.subreddit( random.choice(CONFIG['reddit_sub_list']) ) if CONFIG['reddit_sub_list'] else get_subreddit( getsubclass=True) log.info(f"searching post in sub: {sub.display_name}") try: post_id = self.psapi.get_posts(sub.display_name)[0]['id'] # don't use posts have have avoid words in title if not any(word in comment.body for word in AVOID_WORDS): got_post = True except Exception as e: log.info(f"couldn't find post in {sub}") # sub = self.rapi.random_subreddit(nsfw=False) # log.info(f"trying in: {subreddit}") attempts += 1 log.info(f"repost attempts: {attempts}") if attempts > 3: log.info( f"couldn't find any posts - skipping reposting for now" ) return return self.rapi.submission(id=post_id)
def onExit(self, button): self.ui.end("Exit") log.info("Exiting Karma Bot Menu: Bye! :D") sys.exit()
def init(self): log.info("intiializing comments") self.ready = False self.comments.init() self.ready = True log.info("commenting ready")
def get_reply(self, replyto: str=''): if self.ready: return self.brain.reply(replyto) else: log.info(f"cobe not initialized, run init")