def get_writing(self, string_to_write): payload = {'text': string_to_write, 'bias': 0.15, 'samples': 1} try: response = requests.get( 'http://www.cs.toronto.edu/~graves/handwriting.cgi', params=payload) if response.status_code != 200: return "there was an error creating the image. code: %s" % response.status_code html = response.text soup = BeautifulSoup(html, 'html.parser') image = soup.find_all("img")[-1] image_src = image['src'] b64 = image_src[(image_src.index(',') + 1):] file = tempfile.NamedTemporaryFile(suffix=".png") file.write(b64.decode("base64")) file.flush() img = Imgur() url = img.save_from_file(file.name) file.close() return url except Exception as e: print "there was an error", e.message return "foo" except: logging.debug("there was an error!") logging.debug(sys.exc_info()[0]) return "there was an error writing!"
def __init__(self, imgur_id): """Initializes Mongo. Args: imgur_id: id to authorize the imgur API. """ self._imgur_client_id = imgur_id self._imgur = Imgur(self._imgur_client_id) self._client = MongoClient('mongodb://127.0.0.1:27017/') self._db = self._client.hotornot
def __init__(self): print _('TV-Maxe Channel Editor is starting...') self.gui = gtk.Builder() self.gui.add_from_file('ceditor.glade') self.gui.connect_signals({ "quit": self.quit, "deschide": self.deschide, #open "openFile": self.openFile, "openURL": self.openURL, "hideOpenURL": self.hideOpenURL, "goURL": self.goURL, "saveList": self.saveList, "showAddWindow": self.showAddWindow, "hideAddWindow": self.hideAddWindow, "addChannel": self.addChannel, "editChannel": self.editChannel, "hideChannelEditor": self.hideChannelEditor, "saveChannel": self.saveChannel, "deleteChannel": self.deleteChannel, "uploadImage": self.uploadImage, "salveaza": self.salveaza, "addChannelURL": self.addChannelURL, "addChannelEdit": self.addChannelEdit, "removeChannelURL": self.removeChannelURL, "addAudio": self.addAudio, "addAudioEdit": self.addAudioEdit, "removeAudio": self.removeAudio, "saveNew": self.saveNew, "saveInfo": self.saveNew2, "hideInfo": self.hideInfo, "on_entry2_changed": self.on_entry2_changed, "hideLogoWin": self.hideLogoWin, "selectIcon": self.selectIcon }) self.imgur = Imgur() self.imageFinder = imageFinder(self) self.db = DBManager(':memory:') self.db.build() self.gui.get_object('cellrenderertext12').set_property( 'editable', True) self.gui.get_object('cellrenderertext10').set_property( 'editable', True) self.gui.get_object('cellrenderertext8').set_property('editable', True) self.gui.get_object('cellrenderertext9').set_property('editable', True) self.icons = {}
def main(): path = sys.argv[1] language = sys.argv[2] # Google Sheet Key we want to use to update Data Studio sheet_key = 'your-sheet-key' g_sheet = gSheet(sheet_key) with open('imgur_credentials.json', 'r') as f: imgur_credentials = json.load(f) imgur_conn = Imgur(imgur_credentials) print("Cleaning preexisting images...") res = imgur_conn.clean_user_images() if not res: "Error on cleaning user images!" df_posts, caption_text, img_link = assemble_info(path, imgur_conn, language) df_hash = generate_top_hashes(df_posts) df_day = generate_df_per_day(df_posts, language) df_data = df_posts[['media_code', 'media_type', 'likes', 'comments']] metrics = assemble_metrics(df_posts) #------------#-------------#--------#--------------- mean_likes = metrics['mean_likes'] mean_comments = metrics['mean_comments'] likes_image = metrics['likes_image'] likes_video = metrics['likes_video'] likes_carousel = metrics['likes_carousel'] g_sheet.update_data_sheet(df_data) g_sheet.update_wordcloud(caption_text, path, imgur_conn, language) g_sheet.update_mainmetrics(mean_likes, mean_comments) g_sheet.update_top_hashes(df_hash) g_sheet.update_media_metrics(likes_image, likes_video, likes_carousel) g_sheet.update_day_metrics(df_day) g_sheet.update_profile_info(img_link, path)
def handle_command(command, details, channel, respond = True): """ Receives commands directed at the bot and determines if they are valid commands. If so, then acts on the commands. If not, returns back what it needs for clarification. """ response = False if command == "learn": learner = Learner() response = learner.learn(details[0], " ".join(details[1:])) elif command == "unlearn": learner = Learner() content = None if len(details) > 1: content = " ".join(details[1:]) response = learner.unlearn(details[0], content) elif command == "commands": learner = Learner() response = learner.list_commands() elif command == "list": learner = Learner() response = learner.list(details[0]) elif command == "cowsay": out = subprocess.check_output(['cowsay', " ".join(details)]) response = "```" + out + "```" elif command == "meme": memer = Memer() if not details or len(details) == 0: response = memer.list_templates() else: template = details.pop(0).strip() parts = [x.strip() for x in " ".join(details).split(",")] top = parts[0] if len(parts) > 0 else None bottom = parts[1] if len(parts) > 1 else None response = memer.get_meme(template, top, bottom) elif command == "hostname": response = "slurms coming to you live from: `%s (%s)`" % (subprocess.check_output("hostname -A", shell=True).strip(), subprocess.check_output("hostname -i", shell=True).strip()) elif command == "write": writer = Writer() response = writer.get_writing(" ".join(details)) elif command == "imglearn": learner = Learner() imgur = Imgur() image_url = imgur.save_from_url(" ".join(details[1:])) response = learner.learn(details[0], image_url) elif command == "++" or command == "endorse": plusser = Plusser() reason = "" if len(details) > 1: reason = " ".join(details[1:]) response = plusser.plus(details[0], reason) elif command == "plusses": plusser = Plusser() response = plusser.get(details[0]) elif command == "leaders" or command == "leader_board": plusser = Plusser() response = plusser.leader_board() elif command == "monthly_leaders" or command == "monthly_leader_board": plusser = Plusser() months_ago = 0 if details and len(details) > 0: months_ago = details[0] response = plusser.monthly_leader_board(months_ago) elif command == "youtube": query = " ".join(details) videos = youtube.youtube_search(query) if len(videos) > 0: response = videos[-1] else: response = "sorry, couldnt find any videos for %s" % query elif command == "echo": response = " ".join(details) elif command == "pipe": pipe(command, details, channel) elif command == "doom": doom = Doom() response = doom.doom(details) else: """ see if a randomly entered command is something that was previously learned """ learner = Learner() response = learner.get(command) if response and respond: slack_client.api_call("chat.postMessage", channel=channel, text=response, as_user=True) elif not respond: return response
import logging from exchanges import Exchanges from imgur import Imgur from slack import Slack from firestore import FireStore from dotenv import load_dotenv load_dotenv() logging.basicConfig(format='%(asctime)s %(message)s', level=logging.INFO, datefmt='%m/%d/%Y %I:%M:%S %p') logger = logging.getLogger(__name__) if __name__ == '__main__': logger.info('Starting crypto-balance-check worker') exchanges = Exchanges() slack = Slack() firestore = FireStore() imgur = Imgur() positions_by_exchange = exchanges.get_all_positions_by_exchange() slack.publish_all_positions_by_exchange(positions_by_exchange) firestore.update_historic_balances(positions_by_exchange) balances = firestore.get_historic_balances() url = imgur.send_graph([b[0] for b in balances], [b[1] for b in balances]) slack.publish_url(url) logger.info('Exiting crypto-balance-check worker')
def create_imgur_album(self, image_urls): album_link = Imgur().upload_images_to_album(image_urls) return album_link
import pyimgur, tweepy, sys, pymysql from datetime import date from imgur import Imgur from database import Database from twitter import Twitter from fractal import Fractal # Generate the Fractal of the Day fract = Fractal() fract.generate(fotd=True) # Upload im = Imgur() image = im.api.upload_image("fotd.png", title="Fractal of the day (1080p) - " + str(date.today())) print(image.title) print(image.link) print(image.size) print(image.type) print "deletion link: imgur.com/delete/" + str(image._delete_or_id_hash) if image.link == "": print("no link") sys.exit(0) # Log the Fractal of the Day db = Database() db.log_fotd(image.link, image._delete_or_id_hash, image.size) # Tweet
def posts(): form = QuickPost() url = form.link.data random = form.random.data user = current_user.get_id() collection = app.config['POSTS_COLLECTION'] print url regex_http = re.compile(r'^(?:http|ftp)s?://') if request.method == 'POST' and request.files['file']: print 'upload to imgur' file = request.files['file'] if file and allowed_file(file.filename): collection = app.config['POSTS_COLLECTION'] filename = secure_filename(file.filename) g = os.path.join(app.config['DIR_PATH'], app.config['UPLOAD_FOLDER']) sav = file.save(os.path.join(g, filename)) user = current_user.get_id() _id = app.config['IMGUR_ID'] _secret = app.config['IMGUR_SECRET'] tags = ['External', 'Imgur'] try: img = Imgur(_id, _secret) f = img.Image_Upload(os.path.join(g, filename)) entry_make = "<img src=%s><br><a href='%s' target='_blank'>%s</a>" % ( f['link'], f['link'], f['link']) try: collection.insert({ "title": f['id'], "entry": entry_make, "user": user, "created_on": datetime.datetime.now(), "tags": tags }) os.remove(os.path.join(g, filename)) print 'file deleted' return redirect(url_for('posts')) except: flash("Something went bad....", category='error') except: flash("Something went bad...on imgur", category='error') if request.method == 'POST' and regex_http.findall(url): print 'checking if user is OK' if current_user.is_authenticated() and url != None: print 'User is OK' handle_posts(url) if request.method == 'POST' and random: entry_make = random tags = ['Random', 'Quick'] try: collection.insert({ "title": 'Random', "entry": entry_make, "user": user, "created_on": datetime.datetime.now(), "tags": tags }) return redirect(url_for('posts')) except: flash("Something went bad....", category='error') page, per_page, offset = get_page_items() count = app.config['POSTS_COLLECTION'].find().sort(u'_id', -1).count() p = app.config['POSTS_COLLECTION'].find().sort( u'_id', -1).limit(per_page).skip(offset) pagination = get_pagination(page=page, per_page=per_page, total=count, record_name='posts') return render_template('posts.html', posts=p, pagination=pagination, form=form)