def network_connect(): sta_if = network.WLAN(network.STA_IF) sta_if.active(True) ssid = ConfigFile.get('wifi', {}).get('ssid') if ssid: sta_if.connect(ssid, ConfigFile.get('wifi', {}).get('pwd')) if not wifi_connected(sta_if): ap = network.WLAN(network.AP_IF) # create access-point interface ap.active(True) # activate the interface ap.config(essid='micropython_ap') # set the ESSID of the access point @MicroWebSrv.route('/aps', 'GET') def scan_ap(http_client, http_response): sta_if.active(True) ap_list = sta_if.scan() http_response.WriteResponseJSONOk([ap[0] for ap in ap_list]) @MicroWebSrv.route('/connect', 'POST') def connect(http_client, http_response): params = json.loads(http_client.ReadRequestContent()) ssid = params.get('ssid') if not ssid: http_response.WriteResponseJSONOk({ 'Success': False, 'Message': 'ssid不能为空!' }) return sta_if = network.WLAN(network.STA_IF) sta_if.active(True) sta_if.connect(ssid, params.get('pwd')) # Connect to an AP if wifi_connected(sta_if): print('connect success!') wifi_config = ConfigFile.get('wifi', {}) wifi_config['ssid'] = ssid wifi_config['pwd'] = params.get('pwd') ConfigFile.set('wifi', wifi_config) machine.reset() http_response.WriteResponseJSONOk({ 'Success': False, 'Message': '连接失败!' }) srv = MicroWebSrv(webPath='/templates/') srv.MaxWebSocketRecvLen = 256 srv.WebSocketThreaded = False srv.Start()
def connect(http_client, http_response): params = json.loads(http_client.ReadRequestContent()) ssid = params.get('ssid') if not ssid: http_response.WriteResponseJSONOk({ 'Success': False, 'Message': 'ssid不能为空!' }) return sta_if = network.WLAN(network.STA_IF) sta_if.active(True) sta_if.connect(ssid, params.get('pwd')) # Connect to an AP if wifi_connected(sta_if): print('connect success!') wifi_config = ConfigFile.get('wifi', {}) wifi_config['ssid'] = ssid wifi_config['pwd'] = params.get('pwd') ConfigFile.set('wifi', wifi_config) machine.reset() http_response.WriteResponseJSONOk({ 'Success': False, 'Message': '连接失败!' })
class InstagramBot: def __init__(self, account): self.config_file = ConfigFile(account) self.account = account.strip() self.password = self.config_file.get("Instagram", "password").strip() self.db = dbManager() self.hashtags = self.config_file.get("Hashtags") self.titles = self.config_file.get("Instagram", "Titles") self.follow_amount_min = int( self.config_file.get("Instagram", "followamountmin")) self.follow_amount_max = int( self.config_file.get("Instagram", "followamountmax")) self.unfollow_days = int(self.config_file.get("Time", "unfollowdays")) self.like_ammount_max = int( self.config_file.get("Instagram", "likeamountmax")) self.like_ammount_min = int( self.config_file.get("Instagram", "likeamountmin")) print("Created Instagram Bot!") def upload(self, pic_info): try: api = InstagramAPI(self.account, self.password) if (api.login()): startCount = len(api.getTotalSelfUserFeed(self.account)) photo_path = pic_info['Path'] caption = self.genCaption(pic_info["Title"], pic_info["Subreddit"]) if caption is None: caption = self.titles[r.randint( 0, len(self.titles) - 1 )] + "#lol #lmao #funny #rofl #meme #error #404 #human #notabot" if pic_info['FileFormat'] not in ['jpg', 'png']: clip = VideoFileClip(photo_path) clip.save_frame("images/thumbnail.jpg") api.uploadVideo(photo_path, "images/thumbnail.jpg", caption=caption) statusCode = api.LastResponse.status_code if statusCode == 500: print("Retrying to upload video in 10s") time.sleep(10) api.uploadVideo(photo_path, "images/thumbnail.jpg", caption=caption) else: api.uploadPhoto(photo=photo_path, caption=caption) if len(api.getTotalSelfUserFeed( self.account)) - startCount >= 1: iID = self.db.insert("Instagram", [caption, self.account]) self.db.insert("Posted", [pic_info["ID"], iID]) print("Uploaded Post!") return True else: print("Didn't upload post :(") self.db.update("Reddit", {"Path": "null"}, "Reddit.ID == {0}".format(pic_info["ID"])) return False else: print("Can't login!") return False except Exception as e: print("ERROR WHILE UPLOADING: ", e) self.db.update("Reddit", {"Path": "null"}, "Reddit.ID == {0}".format(pic_info["ID"])) return False def genCaption(self, title, subreddit): try: tagList = list(self.hashtags[subreddit]) capList = [] for i in range( 0, min(r.randint(7, 12), len(self.hashtags[subreddit]))): tag = tagList[r.randint(0, len(tagList) - 1)] capList.append(tag) tagList.remove(tag) pronouns = [ "i", "me", "my", "our", "ours", "myself", "ourself", "we", "us", "i'm", "reddit", "sub", "karma", "upvote" ] wordList = title.lower().split() ran = r.randint(0, 9) if any(word in pronouns for word in wordList): return self.titles[r.randint( 0, len(self.titles) - 1)] + " #" + " #".join(capList) if ran in range(0, 9): return title + " #" + " #".join(capList) else: return self.titles[r.randint( 0, len(self.titles) - 1)] + " #" + " #".join(capList) except Exception as e: print("ERROR DURING genCaption: ", e) def getNumberOfPosts(self): try: api = InstagramAPI(self.account, self.password) if (api.login()): return len(api.getTotalSelfUserFeed()) else: return 0 print("ERROR COULDN'T DETERMINE NUMBER OF POSTS") except Exception as e: return 0 print("ERROR DURING getNumberOfPosts: ", e) def getUsernameID(self, username, api): try: api.searchUsername( username) # the InstagramUserID you want to know the followers usernameJson = api.LastJson return re.search( "(?<=pk\'\: )[0-9]*", str(usernameJson)).group() # finds the pk numeral cod except Exception as e: print("ERROR DURING getUsernameID: ", e) def getFollowers(self, api, username=None): try: if username is None: username = self.account pkString = self.getUsernameID(username, api) api.getUserFollowers(int(pkString)) followers = api.LastJson #followersNamesList = re.findall("(?<=username\'\: \').*?(?=\',)", str(followers)) # finds the UserID of the followers and creates a list #followersPksList = re.findall("(?<=pk\'\: )[0-9]*", str(followers)) # finds the UserID of the followers and creates a list return followers['users'] except Exception as e: print("ERROR DURING getFollowers: ", e) def getHashtagFeed(self, hashtag): try: api = InstagramAPI(self.account, self.password) if (api.login()): api.getHashtagFeed(hashtag) tagJson = api.LastJson mediaIdList = re.findall("(?<=\'id\'\: \')[0-9]*_[0-9]*(?=\')", str(tagJson)) return mediaIdList except Exception as e: print("ERROR DURING getHashtagFeed: ", e) def getMediaLikers(self, mediaId, api): try: api.getMediaLikers(mediaId) likerJson = api.LastJson return re.findall("(?<=pk\'\: )[0-9]*", str(likerJson)) except Exception as e: print("ERROR DURING getMediaLikers: ", e) # Will choose a random hashtag for list of hashtags ## Get the feed of that hashtag and pick a picture ## Like the picture, then follow random people who have also ## liked the picture async def followRandom(self): try: api = InstagramAPI(self.account, self.password) if (api.login()): amountToFollow = r.randint(self.follow_amount_min, self.follow_amount_max) hashValues = list(self.hashtags.values()) hashList = r.choice(hashValues) hashtag = r.choice(hashList) mediaList = self.getHashtagFeed(hashtag) amountFollowed = 0 while mediaList and amountFollowed < amountToFollow: mediaId = mediaList.pop(r.randint(0, len(mediaList) - 1)) api.like(mediaId) self.db.insert("Like", [self.account, mediaId]) likersList = self.getMediaLikers(mediaId, api) if not likersList: continue r.shuffle(likersList) for i in range( 0, min(amountToFollow - amountFollowed, len(likersList))): print("trying to follow") try: user_pk = likersList[i] followers = self.db.c.execute( "SELECT PK from FOLLOWING").fetchall() if user_pk not in followers: api.follow(user_pk) self.db.insert("Following", [self.account, user_pk]) except Exception as e: print( "ERRROR DURING followRandom while trying to follow" ) finally: await asyncio.sleep(r.randint(5, 120)) amountFollowed += 1 except Exception as e: print("ERROR DURING followRandom: ", e) async def unfollow(self): try: api = InstagramAPI(self.account, self.password) if api.login(): amountToUnfollow = r.randint(self.follow_amount_min, self.follow_amount_max) expiredSelection = self.db.c.execute( "SELECT PK From Following WHERE DateFollowed IS NOT NULL AND julianday('now') - julianday(DateFollowed) >= " + str(self.unfollow_days)) expiredList = expiredSelection.fetchall() amountUnfollowed = 0 for pk in expiredList: print("trying to unfollow") api.unfollow(pk[0]) self.db.update("Following", {"DateFollowed": "null"}, "PK = {0}".format(pk[0])) await asyncio.sleep(r.randint(5, 120)) amountUnfollowed += 1 if amountUnfollowed >= amountToUnfollow: return except Exception as e: print("ERROR DURING unfollow: ", e) async def likeTimelineRandom(self): try: api = InstagramAPI(self.account, self.password) if api.login(): if not api.getTimeline(): raise Exception("Couldn't get timeline!") amountToLike = r.randint(self.like_ammount_min, self.like_ammount_max) amountLiked = 0 mediaList = re.findall("(?<=\'id\'\: \')[0-9]*_[0-9]*(?=\')", str(api.LastJson)) liked = self.db.c.execute( "SELECT MediaID from Like").fetchall() while mediaList and amountLiked < amountToLike: mediaId = mediaList.pop(r.randint(0, len(mediaList) - 1)) if mediaId not in liked: print("trying to like") #Still need to do check api.like(mediaId) amountLiked += 1 self.db.insert("Like", [self.account, mediaId]) await asyncio.sleep(r.randint( 3, 40)) #You should have a like wait time min/max except Exception as e: print("ERROR DURING likeRandom: ", e) def directMessage(self): try: api = InstagramAPI(self.account, self.password) if api.login(): followersInfo = self.getFollowers(api) followingPks = self.db.c.execute( "SELECT PK FROM Following").fetchall() followingPks = [pk[0] for pk in followingPks] print(type(followingPks[0])) #Get all pks that are in followingPks but not in followersPks ## i.e., Get all people you are following but are not following you nonFollowersPks = [ pk for pk in followingPks if pk not in followersInfo ] # api.direct_message("hey whats up", 'ravioliraviolirobot') except Exception as e: print("ERROR DURING directMessage", e)
class Bot: def __init__(self, account): #Account and password are instagram account and password self.config_file = ConfigFile(account) self.sub_list = self.config_file.get("Reddit", "Subreddits") self.account = account self.ibot = InstagramBot(account) self.rbot = RedditBot(account) self.obot = osManager(int(self.config_file.get("Time", "imagelifetime"))) self.db = dbManager() self.numPosts = int(self.ibot.getNumberOfPosts()) #Make this self and remove all of the member variables and instead call the dicionary when needed times = self.config_file.get("Time") for k,v in times.items(): times[k] = int(v) self.scrape_time = times["scrapetime"] self.upload_time_MAX = times["uploadtimemax"] self.upload_time_MIN = times["uploadtimemin"] self.expire_time = times["expiretime"] self.retry_upload_delay = times["retryuploaddelay"] #Keep this however when deleting for convenience self.slow_hours_start = datetime.time(times["slowhourstart"], 0, 0) self.slow_hours_end = datetime.time(times["slowhourend"], 0, 0) self.slow_hours_delay = times["slowhourdelay"] self.follow_time_MAX = times["followtimemax"] self.follow_time_MIN = times["followtimemin"] self.like_time_MAX = times["liketimemax"] self.like_time_MIN = times["liketimemin"] def start(self): self.db.createDatabase() loop = asyncio.get_event_loop() loop.create_task(self.scrape()) loop.create_task(self.upload()) loop.create_task(self.expire()) loop.create_task(self.follow()) loop.create_task(self.like()) loop.run_forever() loop.close() def getPost(self): try: temp_sub_list = list(self.sub_list) while temp_sub_list: try: sub = temp_sub_list[random.randint(0, len(temp_sub_list) - 1)] pic = self.db.c.execute("SELECT DISTINCT Reddit.ID, Reddit.Title, Reddit.Path, Reddit.Subreddit, Reddit.FileFormat FROM Reddit WHERE Reddit.Subreddit == (?) AND Reddit.Path is NOT NULL AND REDDIT.ID NOT IN (SELECT redditID FROM POSTED) GROUP BY Reddit.Path HAVING COUNT(Reddit.Path) = 1",(sub,)).fetchall() temp_sub_list.remove(sub) #removing here if pic: picList = pic[random.randint(0, len(pic) - 1)] pic_info = { "ID" : picList[0], "Title" : picList[1], "Path" : picList[2], "Subreddit" : picList[3], "FileFormat" : picList[4] } #If the FileFormat value is null (will not need this in the future!) if not pic_info['FileFormat']: filename, file_extension = os.path.splitext(pic_info['Path']) pic_info['FileFormat'] = file_extension.split('.')[1] return pic_info except Exception as e: print("ERROR DURING getPost while trying to get path to image: ", e) return None except Exception as e: print("ERROR DURING getPost: ", e) def getNumPosts(self): try: return self.ibot.getNumberOfPosts() except Exception as e: print("ERROR DURING getnumPosts: ", e) async def scrape(self): while True: try: for sub in self.sub_list: self.rbot.scrapeImages(sub) except Exception as e: print("ERROR DURING scape in bot: ", e) finally: await asyncio.sleep(self.scrape_time) async def upload(self): while True: waitTime = random.randint(self.upload_time_MIN, self.upload_time_MAX) if self.isSlowHours(): waitTime += self.slow_hours_delay try: succeed = self.ibot.upload(self.getPost()) if not succeed: waitTime = self.retry_upload_delay #If it did not succeed only wait 5 more min to post again except Exception as e: print("ERROR DURING upload in bot: ", e) waitTime = self.retry_upload_delay finally: await asyncio.sleep(waitTime) async def expire(self): while True: try: self.obot.checkExpired() except Exception as e: print("ERROR DURING expire in bot: ", e) finally: await asyncio.sleep(self.expire_time) async def follow(self): while True: try: waitTime = random.randint(self.follow_time_MIN, self.follow_time_MAX) if self.isSlowHours(): waitTime += self.slow_hours_delay await self.ibot.followRandom() await self.ibot.unfollow() except Exception as e: print("ERROR DURING expire in bot: ", e) finally: await asyncio.sleep(waitTime) async def like(self): while True: try: waitTime = random.randint(self.follow_time_MIN, self.follow_time_MAX) if self.isSlowHours(): waitTime += self.slow_hours_delay await self.ibot.likeTimelineRandom() except Exception as e: print("ERROR DURING expire in bot: ", e) finally: await asyncio.sleep(waitTime) def isSlowHours(self): currentTime = datetime.datetime.now().time() if self.slow_hours_start <= self.slow_hours_end: return self.slow_hours_start <= currentTime <= self.slow_hours_end else: return self.slow_hours_start <= currentTime or currentTime <= self.slow_hours_end
class RedditBot: def __init__(self, iusername): self.config_file = ConfigFile(iusername) self.reddit = praw.Reddit( client_id=self.config_file.get("Reddit", "client_id"), client_secret=self.config_file.get("Reddit", "client_secret"), password=self.config_file.get("Reddit", "password"), user_agent=self.config_file.get("Reddit", "user_agent"), username=self.config_file.get("Reddit", "username")) self.redditURL = 'www.reddit.com/r/' self.db = dbManager() self.osMan = osManager() self.videoDurationMax = int( self.config_file.get("Time", "videodurationmax")) def getImage(self, url, title): try: imgExtensions = [".jpg", ".png"] vidExtensions = [".mp4", ".gif", ".gifv"] doc = requests.get(url, stream=True) filename, file_extension = os.path.splitext(url) #Reddit gifs can be downloaded directly ##So no need for further parsing if re.search("redd[.]?it/.*.gif[v]?", url): return doc, file_extension x = re.compile("(\s?https?:)?(/{2})?") soup = BeautifulSoup(doc.text, 'html.parser') if re.search("v.redd[.]?it/.*", url): print(re.search('fallback_url', soup.prettify())) return doc, file_extension, True #If the image is a Imgur gif if file_extension.__contains__("gif"): #Imgur and reddit structure gifs differently in HTML, so ## The source is for Imgur for video in (soup.find_all('source')): if video.has_attr('src'): src = video['src'] filename, file_extension = os.path.splitext(src) if file_extension not in vidExtensions: return None, None prefix = x.match(src) if not prefix: return requests.get("http://" + src, stream=True), file_extension elif prefix.group(0) == "//": return requests.get("http:" + src, stream=True), file_extension else: return requests.get(src, stream=True), file_extension return None, None #This happens if it wasn't able to find the gif # If this url is already an image if file_extension in imgExtensions: return doc, file_extension # If not, then will need to extract the image from the URL for img in soup.find_all('img'): if img.has_attr('alt') and img[ 'alt'] == title: #There may nor be an alt src = img['src'] filename, file_extension = os.path.splitext(src) prefix = x.match(src) if not prefix: return requests.get("http://" + src, stream=True), file_extension elif prefix.group(0) == "//": return requests.get("http:" + src, stream=True), file_extension else: return requests.get(src, stream=True), file_extension return None, None except Exception as e: print("ERROR WHILE PARSING: ", e) return None, None def download(self, submission, subreddit): pass #make this method call downloadVideo and downloadImage def downloadVideo(self, isGif, filepath, submission, subreddit): try: duration = submission.media['reddit_video']["duration"] if duration > self.videoDurationMax: return doc = requests.get( submission.media['reddit_video']['fallback_url'], stream=True) if not doc: raise Exception("COULDN'T RETRIEVE THE VIDEO") if not isGif: audioLocation = os.path.join(filepath, submission.title + "_audio") videoLocation = os.path.join(filepath, submission.title + "_video") audioPacket = requests.get(submission.url + "/audio", stream=True) if not audioPacket: raise Exception("COULDN'T RETRIEVE THE AUDIO") self.writeFile(audioLocation, audioPacket) self.writeFile(videoLocation, doc) #audio = mp.AudioFileClip(audioLocation) video = mp.VideoFileClip(videoLocation) #video_with_audio = video.set_audio(audio) video_new_loc = videoLocation + ".mp4" video.write_videofile(video_new_loc, audio=audioLocation) self.db.insert("Reddit", [ submission.title, submission.url, video_new_loc, subreddit, "mp4" ]) self.osMan.deleteFile(audioLocation) self.osMan.deleteFile(videoLocation) else: imgLocation = os.path.join(filepath, submission.title) isWritten = self.writeFile(imgLocation, doc) if isWritten: self.db.insert("Reddit", [ submission.title, submission.url, imgLocation, subreddit, "gif" ]) except Exception as e: print("ERROR WHILE downloadVideo: ", e) return def downloadImage(self, submission, subreddit): #Edit so it may only take in image try: url = submission.url #"https://v.redd.it/qahcxxgdvha11" title = submission.title #https://v.redd.it/xglnc6dzlia11/HLSPlaylist.m3u8 videoData = submission.media duplicateAmount = len( self.db.c.execute( "SELECT URL FROM Reddit WHERE URL == '{0}'".format( str(url))).fetchall()) if duplicateAmount >= 1: return False filepath = self.osMan.createDir( subreddit ) # Create the directory in which to store the (change this later so it wont be called in this function if isinstance(videoData, dict) and 'reddit_video' in videoData: self.downloadVideo(videoData['reddit_video']['is_gif'], filepath, submission, subreddit) return else: img, file_extension = self.getImage(url, title) if img == None or file_extension == None: return False imgLocation = os.path.join(filepath, title + file_extension) isWritten = self.writeFile(imgLocation, img) if isWritten: print(title + " URL: " + url) self.db.insert("Reddit", [ title, url, imgLocation, subreddit, file_extension.split('.')[1] ]) except Exception as e: print("ERROR WHILE DOWNLOADING: ", e) return False def writeFile(self, toFile, content): try: with open(toFile, 'wb') as f: for chunk in content.iter_content( chunk_size=1024 ): #iter_content allows you to write the image by chunks if chunk: f.write(chunk) return True except Exception as e: print("ERROR WHILE writeFile: ", e) return False def scrapeImages(self, subreddit_id): try: print("DOWNLOADING IMAGES...") for submission in self.reddit.subreddit(subreddit_id).hot( limit=25): result = self.downloadImage(submission, subreddit_id) except Exception as e: print("ERROR WHILE REQUESTING: ", e)