def makeTweets(tweetBodies): i = 0 while i < len(tweetBodies): try: post.post(tweetBodies[i]) i += 1 except tweepy.RateLimitError: print("Ratelimited - sleeping for 1s") time.sleep(60)
def testPostWithInvalidArguments(self): '''Tries to add a post with integer and list as post_text and subject arguments. It should be converted to str and added without problem.''' p.post("a", 78, 69, -1, self.postn, self.root) p.post("a", ["hello", "world"], ["sub", "ject"], -1, self.postn, self.root) index = b.get_index("a", self.root) self.assertEqual(index[0][1], "69") # subject self.assertEqual(index[0][2][2], "78") # post self.assertEqual(index[1][1], "['sub', 'ject']") # subject self.assertEqual(index[1][2][2], "['hello', 'world']") # post
def on_any_event(event): if event.is_directory: return None elif event.event_type == 'created': # Take any action here when a file is first created. print("Received created event - %s." % event.src_path) post.post('post.txt') elif event.event_type == 'modified': # Taken any action here when a file is modified. print("Received modified event - %s." % event.src_path)
def get_all_user_posts(username): post_list = [] cur_connection = DatabaseManager.get_connection() cursor = cur_connection.cursor() result = DatabaseManager._execute_robust( cursor, ''' SELECT posts.post_id, posts.username, posts.subreddit, posts.post_karma, posts.post_date, maximum_ranks.max_hot_sub_rank, maximum_ranks.max_hot_all_rank FROM posts LEFT JOIN maximum_ranks ON posts.post_id = maximum_ranks.post_id WHERE username='******'; '''.format(username=username)) for post_result in result: new_post = post(post_id=post_result[0], username=post_result[1], subreddit=post_result[2], post_karma=post_result[3], post_date=post_result[4], max_sub_rank=post_result[5], max_all_rank=post_result[6]) post_list.append(new_post) return post_list
def postinfo(info): #获取帖子id infoid = info.get('info-id') #获取发布时间 sec = info.find('a', class_='TCSayList_li_time').get('data-time') #对发布时间格式化 sec = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(int(sec))) #获取发布帖子用户id userid = info.find( 'a', class_='TCSayList_li_author').get('data-tcuserpanel-uid') #获取发布帖子用户名 username = info.find('a', class_='TCSayList_li_author').text #获取帖子点赞数 up = info.find('div', class_='TCSayList_li_handlers').find_all('span')[2].text #判断是否点赞 if not up.isdigit(): up = 0 #获取帖子评论数 comment = info.find( 'div', class_='TCSayList_li_handlers').find_all('span')[4].get_text() #判断是否评论 if not comment.isdigit(): comment = 0 #填充对象post p = post(sec, infoid, userid, username, up, comment) return p
def fetch_top(n=20): """ Fetches the n most recent posts in reverse chronological order. """ if n < 0: raise IndexError tree = u.get_user_feed('user/feed.xml') stati = [post(status) for status in tree.xpath('//item')] stati.sort(key=lambda x: x['pubdate'], reverse=True) return stati[:n]
def asr_thread(): while True: try: g = q2.get() now, file_name, data, speaker = g D = np.frombuffer(data, dtype=np.int16) data = librosa.core.resample(1.0 * D, orig_sr=16000, target_sr=8000).astype(dtype=np.int16).tobytes() speech = asr(data) if speech: if speech in CESLeA: speech = "CESLeA" # print(speaker, speech) post.post(createdAt=now, speaker=speaker, speakerId=speaker, content=speech) else: # print("empty") except queue.Empty: continue
def application(env: tp.Dict[str, tp.Any], start_response: tp.Callable[..., None]) -> tp.List[tp.Any]: """ uWSGI entry point Manages HTTP request and calls specific functions for [GET, POST] Args: env: HTTP request environment - dict start_response: HTTP response headers function Returns: data: which will be transmitted """ # Parse query string query = dict(urllib.parse.parse_qsl(env['QUERY_STRING'])) # Parse cookie raw_json = env.get('HTTP_COOKIE', '') cookie_obj = SimpleCookie() cookie_obj.load(raw_json) # Even though SimpleCookie is dictionary-like, it internally uses a Morsel object # which is incompatible with requests. Manually construct a dictionary instead. cookie = {} for key, morsel in cookie_obj.items(): cookie[key] = morsel.value status = '200 OK' headers = [] data = [] # Manage admin actions if env['PATH_INFO'][:6] == '/admin': status, headers, data = admin(env, query, cookie) # Main methods elif env['REQUEST_METHOD'] == 'GET': status, headers, data = get(env, query, cookie) elif env['REQUEST_METHOD'] == 'POST': status, headers, data = post(env, query, cookie) elif env['REQUEST_METHOD'] == 'OPTIONS': status = '200 OK' headers = [ ('Access-Control-Allow-Origin', '*'), ('Access-Control-Allow-Methods', 'GET, POST, HEAD, OPTIONS'), ('Access-Control-Allow-Headers', '*'), ('Allow', 'GET, POST, HEAD, OPTIONS' ) # TODO: Add content application/json ] # Setup request status and headers start_response(status, headers) return data
def asr_thread(): global d while True: try: g = q2.get() now, file_name, data, speaker = g speech = google_stt(file_name) if speech: if speech in CESLeA: speech = "CESLeA" print(speaker, speech) post.post(createdAt=now, speaker=speaker, speakerId=speaker, content=speech) else: pass except queue.Empty: continue
def post(self): self.response.headers['Content-Type'] = 'text/html' url_string = '' url = '' user = users.get_current_user() if user: url = users.create_logout_url(self.request.uri) url_string = 'logout' myuser_details = ndb.Key('MyUser', user.email()) myuser = myuser_details.get() if myuser == None: myuser = MyUser(id=user.email()) myuser.email_address = user.email() myuser.userId = user.nickname() welcome = 'Welcome to the application' myuser.put() upload = self.get_uploads()[0] blobinfo = blobstore.BlobInfo(upload.key()) image_url = get_serving_url(blobinfo) caption = self.request.get('caption') collection_key = ndb.Key('post', user.email()) collection_key = collection_key.get() if collection_key == None: collection_key = post(id=user.email()) collection_key.photo_url.append(image_url) collection_key.email_address = user.email() collection_key.caption.append(caption) else: collection_key.photo_url.append(image_url) collection_key.email_address = user.email() collection_key.caption.append(caption) collection_key.put() self.redirect('/') else: url = users.create_login_url(self.request.uri) url_string = 'login' self.redirect('/') template_values = { 'url': url, 'url_string': url_string, 'user': user, 'collection_key': collection_key, } template = JINJA_ENVIRONMENT.get_template('photocomment.html') self.response.write(template.render(template_values))
def main(): print("What do you want to do?:\n") print(" 1)Post all content in posts") print(" 2)Share all content in shares") print("\n:> ", end='') mode = int(input()) browser = webdriver.Chrome(executable_path=input( "Insert the path of the chrome driver for Selenium: ").replace( "/", "\\")) try: if mode != 1 and mode != 2: raise ValueError("Invalid option selected") email = lib.find_data(lib.PSEC, "linkedin", lib.EPATTERN) password = lib.find_data(lib.PSEC, "linkedin", lib.PPATTERN) url = lib.find_field("linkedin", "url") email_xpath = lib.find_field("linkedin", "email_xpath") password_xpath = lib.find_field("linkedin", "password_xpath") submit_xpath = lib.find_field("linkedin", "submit_xpath_1") try: browser.get(url) browser.find_element_by_xpath(email_xpath).send_keys(email) browser.find_element_by_xpath(password_xpath).send_keys(password) if mode == 1: post.post(browser) else: share.share(browser) except common.exceptions.NoSuchElementException: print_exc() except common.exceptions.TimeoutException: print("Something went wrong during finding a clickable element") print_exc() except ValueError: print_exc() except: print_exc() sleep(30) finally: browser.quit()
def gathertolist(lim): post_list = [] for submission in loseit_subreddit.get_new(limit = lim): newpost = post.post(str(submission.author).encode('utf-8'), submission.short_link.encode('utf-8'), submission.title.encode('utf-8') + ' ' + submission.selftext.encode('utf-8')) post_list.append(newpost) return post_list
def refresh(self): postz = [] self.db.get_cursor().execute("SELECT * FROM posts ORDER BY created DESC") rows = self.db.get_cursor().fetchall() for row in rows: postz.append(post(self.db, row["id"], row["title"], row["content"], row["created"], row["author_id"], row["image_path"])) self.posts = postz
def get_all_posts(dateLimit=None, subreddit=None): post_list = [] cur_connection = DatabaseManager.get_connection() cursor = cur_connection.cursor() main_query = """ SELECT posts.post_id, posts.username, posts.subreddit, posts.post_karma, posts.post_date, maximum_ranks.max_hot_sub_rank, maximum_ranks.max_hot_all_rank FROM posts LEFT JOIN maximum_ranks ON posts.post_id = maximum_ranks.post_id """ final_query = "" if subreddit is None and dateLimit is None: final_query = main_query elif subreddit is not None and dateLimit is None: final_query = main_query + """WHERE subreddit='{subreddit}'""".format( subreddit=subreddit) elif subreddit is None and dateLimit is not None: final_query = main_query + """WHERE post_date > {dateLimit}""".format( dateLimit=str(dateLimit)) else: final_query = main_query + """WHERE subreddit='{subreddit}' and post_date > {dateLimit}""".format( subreddit=subreddit, dateLimit=str(dateLimit)) result = DatabaseManager._execute_robust(cursor, final_query) for row in result: new_post = post(post_id=row[0], username=row[1], subreddit=row[2], post_karma=row[3], post_date=row[4], max_sub_rank=row[5], max_all_rank=row[6]) post_list.append(new_post) return post_list
def __init__(self, db): postz = [] self.db = db self.db.get_cursor().execute("SELECT * FROM posts ORDER BY creation_date DESC") rows = self.db.get_cursor().fetchall() for row in rows: postz.append(post(db, row["id"], row["title"], row["content"], row["creation_date"], row["author_id"], row["image_path"])) self.posts = postz
def admin_api_grab_post(): if not admin_check(): return "admin_fail" if not request.method == "POST": return None post_id = int(request.form["post_id"]) if post_id > 0: ps = post(db, post_id) return ps.jsonify() return '0'
def req(self, url, values = {}): while True: if values != {}: values['user'] = self.user values['pass'] = self.passwd post_data = urllib.urlencode(values) js = post.post(url, post_data) try: dt = json.loads(js) except: logger.info('Got invalid JSON; retrying') logger.debug(js) continue return dt
def view_post(id): db.connect() p = post(db, id) ps = posts(db) if session.get('authenticated'): client = user(session['user_id'], db) else: client = None return render_template('post.html', page_title=site_name, p=p, client=client, ago=timeago, date=datetime, recent_posts=ps.most_recent(p.id))
def _subtopost(submission): post_id = submission.id username = str(submission.author) subreddit = str(submission.subreddit) post_karma = submission.score post_date = submission.created_utc new_post = post(post_id=post_id, username=username, subreddit=subreddit, post_karma=post_karma, post_date=post_date) return new_post
def readPostsCSV(posts): path = 'AnnotatedPosts.csv' with open(path, "rt", encoding= 'utf8') as f: reader = csv.reader(f, delimiter=',') # Need to make sure we skip the first line which contains the rows next(f) for row in reader: review = row[0] posExp = int(row[1]) negExp = int(row[2]) symOK = int(row[3]) symNOK = int(row[4]) noSE = int(row[5]) SE = int(row[6]) rating = int(row[7]) drug = row[9] newPost = post( review, posExp, negExp, symOK, symNOK, noSE, SE, rating, drug) posts.append(newPost)
def get(self): self.response.headers['Content-Type'] = 'text/html' url_string = '' url = '' collection_key = [] user = users.get_current_user() upload_url = "" if user: url = users.create_logout_url(self.request.uri) url_string = 'logout' myuser_details = ndb.Key('MyUser', user.email()) myuser = myuser_details.get() upload_url = blobstore.create_upload_url('/photocomment') if myuser == None: myuser = MyUser(id=user.email()) myuser.email_address = user.email() myuser.userId = user.nickname() welcome = 'Welcome to the application' myuser.put() collection_key = ndb.Key('post', user.email()) collection_key = collection_key.get() if collection_key == None: collection_key = post(id=user.email()) collection_key.put() else: url = users.create_login_url(self.request.uri) url_string = 'login' self.redirect('/') template_values = { 'url': url, 'url_string': url_string, 'user': user, 'collection_key': collection_key, 'upload_url': upload_url, } template = JINJA_ENVIRONMENT.get_template('photocomment.html') self.response.write(template.render(template_values))
def fetch(start, n=0): """ Starting at the starting post id, fetches n posts (assuming the posts are ordered). Positive n for posts since start, negative n for previous posts. Zero (or nothing) for only the post with the given id. """ # Get the tree, exract the starting point. tree = u.get_user_feed('user/feed.xml') stati = [post(status) for status in tree.xpath('//channel/item')] stati.sort(key=lambda x: x['pubdate'], reverse=True) starting = stati.index([status for status in stati if status['guid'] == start][0]) # Get only the single post. if n == 0: return starting # Get n posts. if len(stati[starting:]) < abs(n): if n > 0: return stati[:starting] if n < 0: return stati[starting:] return stati[starting:n]
def processa(raw): ''' "raw": bytes object (encrypted) ''' print(time.strftime("%c"),"|",len(raw),"bytes received |",end=' ') #remove \r\n end bytes if raw[-1]==10 and raw[-2]==13: raw=raw[0:len(raw)-2] #decrypt raw message try: key="libeliumlibelium" decrypted=AES.new(key,AES.MODE_ECB).decrypt(raw) except Exception as e: print(e) return False #try to parse to json the decrypted message #remove AES padding bytes: "{json_string}u0001u0001u0005u0003" #find the closing '}' brace #remove padding bytes only if last byte is not '}' (0x7d) if decrypted[-1] != ord('}'): decrypted = decrypted[0:decrypted.find(1+ord('}'))] #conversion bytes object to string decrypted_string = decrypted.decode('utf-8') #intenta parsejar a json try: decrypted_string = json.dumps(decrypted_string) #string except Exception as e: print(e); return False #send decrypted message to remote server post_result = p.post(decrypted_string) return post_result
def subButtonMouseClicked(self, evt): selectedSub = self.postTable.getValueAt( self.postTable.getSelectedRow(), 1) print("Collecting posts...") subprocess.call("python3 getPosts.py " + selectedSub, shell=True) print("Done!") with open("subPosts.csv", "r") as f: reader = csv.reader(f) tempList = list(reader) del postList[:] for item in tempList: postList.append( post(item[0], item[1], item[2], item[3], item[4], item[5])) self.postTable.getModel().setRowCount(0) for idx, item in enumerate(postList): dt = datetime.utcfromtimestamp(float(item.timePosted)) postTime = item.title + " (" + self.timeAgo(dt) + ")" self.postTable.getModel().addRow([postTime, item.sub, item.postID])
def req(self, url, values = {'nonce': None}): while True: nonce = _get_nonce() values['nonce'] = nonce post_data = urllib.urlencode(values) headers = { 'Rest-Key': self.key, 'Rest-Sign': b64encode(_hmac_digest(post_data, b64decode(self.sec))) } js = post.post(url, post_data, headers) try: dt = json.loads(js) except: logger.info('Got invalid JSON; retrying') logger.debug(js) continue if 'error' in dt: logger.info('MtGox reported an error; retrying') logger.debug(str(dt)) continue return dt
def refreshPosts(self): print("Collecting posts...") subprocess.call("python3 getPosts.py", shell=True) print("Done!") with open("hotPosts.csv", "r") as f: reader = csv.reader(f) tempList = list(reader) del postList[:] for item in tempList: postList.append( post(item[0], item[1], item[2], item[3], item[4], item[5])) self.postTable.getModel().setRowCount(0) for idx, item in enumerate(postList): dt = datetime.utcfromtimestamp(float(item.timePosted)) postTime = item.title + " (" + self.timeAgo(dt) + ")" self.postTable.getModel().addRow([postTime, item.sub, item.postID]) self.timeLabel.setText("Last Refreshed: " + SimpleDateFormat( "hh:mm a z").format(Calendar.getInstance().getTime()))
check = urllib.urlopen(link) urlcheck = check.getcode() if urlcheck == 200: urllib.urlretrieve(link, str(ura) + ".png") img = Image.open(str(ura) + ".png") img = img.convert('RGB') #preluka img_pre = img.crop((163, 63, 163 + 92, 63 + 71)) hitrost_pre = round(compute_average_image_color(img_pre)) mydata = [('cas', ura), ('var', 'bora_o'), ('pre', hitrost_pre)] from post import post post(mydata) print mydata ## srednja oblacnost ura = -3 obl_me_pre = 0 for list in my_list: ura = ura + 3 link = "http://bora.gekom.hr/png_wrf/Kvarner/clfmi_" + list + ".png" print link check = urllib.urlopen(link) urlcheck = check.getcode() if urlcheck == 200: urllib.urlretrieve(link, str(ura) + ".png")
#!/usr/bin/env python3 # -*- coding:utf-8 -*- from collector import collect_lists from filter_illust import filter_illust from post import post if __name__ == "__main__": collect_lists() filter_illust() post()
print("You must specify either the -get or -post option") else: #GET request if args.get: #If httpc.py -get -help if help: print(GET_HELP) else: response = get.get(host, path, header) #POST request elif args.post: #If httpc.py -post -help if help: print(POST_HELP) else: response = post.post(host, path, data, header) #If httpc.py -help print general help message elif help: parser.print_usage() print(HELP) #Output the response if not help: #If -o is specified, output response to file if(args.o): #output verbose data if args.v: output.write(response.get_header() + "\r\n\r\n") output.write(response.get_body()) output.close() #Output reponse to console
from os import mkdir from sys import argv from random import random as rand from post import post N = int(argv[1]) for t in range(N): sec="" pas="" L = 32 for i in range(L): sec += chr(ord('a')+int(rand()*26)) pas += chr(ord('a')+int(rand()*26)) sec = popen("echo -n %s | md5sum"%sec).read().split(' ')[0] pas = popen("echo -n %s | md5sum"%pas).read().split(' ')[0] rep = "cat pwn100.c | sed 's/1337/%d/'"%(1200+t) source= popen(rep).read() post(t+1, 8, sec) print "posted",t+1,8,sec mkdir('team%d'%t) f = file('team%d/pwn100.c'%t, 'w+') f.write(source) f = file('team%d/flag.txt'%t, 'w+') f.write(sec) f = file('team%d/pass.txt'%t, 'w+') f.write(pas) popen("echo 'INSERT INTO `flags` (`teamID`, `taskID`, `flag`) VALUES (%d, 6, '\"'\"%s\"'\"');\' >> pwn100.sql"%((t+1), sec))
def post(self, request): return post(request)
def aladin(danes, ura, kater): link = "http://meteo.arso.gov.si/uploads/probase/www/model/aladin/field/ad_"+danes+"-"+am_pm+"00_"+kater+ura+".png" check=urllib.urlopen(link) urlcheck=check.getcode() if urlcheck == 200 : urllib.urlretrieve (link, ura+".png") img = Image.open(ura+".png") img = img.convert('RGB') #istra if kater == "vm-va10m_hr-w_" : #liznjan img_liz=img.crop((198, 411, 57+198, 52+411)) hitrost_liz = compute_average_image_color(img_liz) #krk img_krk=img.crop((453, 304, 54+453, 38+304)) hitrost_krk = compute_average_image_color(img_krk) #pre img_pre=img.crop((337, 121, 62+337, 45+121)) hitrost_pre = compute_average_image_color(img_pre) #sav img_sav=img.crop((19, 41, 19+21, 41+30)) hitrost_sav = compute_average_image_color(img_sav) #umag img_umag=img.crop((24, 72, 24+26, 28+72)) hitrost_umag = compute_average_image_color(img_umag) #nov img_nov=img.crop((33, 135, 33+23, 135+22)) hitrost_nov = compute_average_image_color(img_nov) mydata=[('cas',ura),('var','istra'),('liz',hitrost_liz),('krk',hitrost_krk),('pre',hitrost_pre),('sav',hitrost_sav),('umag',hitrost_umag),('nov',hitrost_nov)] print mydata #primorska elif kater == "vm-va10m_si-sw_" : #MJ img_mj=img.crop((126, 142, 126+69, 142+55)) hitrost_mj = compute_average_image_color(img_mj) #barcole img_bar=img.crop((204, 184, 204+42, 184+36)) hitrost_bar = compute_average_image_color(img_bar) #portoroz img_por=img.crop((147, 316, 147+35, 316+33)) hitrost_por = compute_average_image_color(img_por) mydata=[('cas',ura),('var','pri'),('mj',hitrost_mj),('bar',hitrost_bar),('por',hitrost_por)] print mydata #send data to website from post import post post(mydata) return
from post import post import datetime postObject=post(input("Enter post title"),input("Enter post dicription"),input("Enter post author name"),datetime.datetime.now,input("Enter post picture Url")) print(postObject.Title) print(postObject.Author) print(postObject.PublishDate.strftime("%A")) print(postObject.Discription)
#!/usr/bin/env python import RPi.GPIO as GPIO from mfrc522 import SimpleMFRC522 import post reader = SimpleMFRC522() while True: id, text = reader.read() text = text.replace('\n', '').lstrip().rstrip() if text: post.post(text, text)
from texting import texting from post import post import sys #import argparse url = "http://ec2-34-207-101-233.compute-1.amazonaws.com" #url = "http://localhost" p = post() key = 0 def main2(): running = True command = "null" play = True; while (running): running = False #command = raw_input("CNTLR~") if (sys.argv[1] == "battery"): battery() elif (sys.argv[1] =="chat"): chat() elif (sys.argv[1] == "texting" or sys.argv[1] == "text"): message ="" for i in range(3,len(sys.argv)): message +=sys.argv[i]+" " text(sys.argv[2],message) elif (str(sys.argv[1])[0:6] == "volume"):
if len(sys.argv) != 2: print 'Usage: dumpWork.py <dump>' sys.exit(1) date = datetime.datetime.now() logger = logging.getLogger('ArkbotLogger') formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s', '%H:%M:%S') logger.setLevel(logging.INFO) bot = arkbot.Arkbot(arkbot._botName, arkbot._wiki, logger) try: if not debug: bot.login(getpass.getpass('Bot password ? ')) post.post(bot, 'data/pagesEnImpasse-{}.txt'.format(sys.argv[1]), dump, 1, debug) post.post(bot, 'data/pagesVides-{}.txt'.format(sys.argv[1]), dump, 2, debug) post.post(bot, 'data/frwiki-ns_redirects-{}.txt'.format(sys.argv[1]), dump, 3, debug) noportal.noportal(bot, 'data/lastEdit-{}.txt'.format(sys.argv[1]), dump, 5, debug) #noportal.noportal(bot, 'data/mostEdit-{}.txt'.format(sys.argv[1]), dump, 6, debug) #noportal.noportal(bot, 'data/articlesSansPortail-musique-{}.txt'.format(sys.argv[1]), dump, 2, debug) #noportal.noportal(bot, 'data/articlesSansPortail-acteurs-{}.txt'.format(sys.argv[1]), dump, 3, debug) #noportal.noportal(bot, 'data/articlesSansPortail-h**o-{}.txt'.format(sys.argv[1]), dump, 4, debug) noportal.noportal(bot, 'data/articlesSansPortail-{}.txt'.format(sys.argv[1]), dump, 1, debug) noportal.noportal(bot, 'data/articlesSansInfobox-musique-{}.txt'.format(sys.argv[1]), dump, 8, debug) noportal.noportal(bot, 'data/articlesSansInfobox-acteurs-{}.txt'.format(sys.argv[1]), dump, 9, debug) noportal.noportal(bot, 'data/articlesSansInfobox-{}.txt'.format(sys.argv[1]), dump, 7, debug)
def aladin(danes, ura, kater): link = "http://meteo.arso.gov.si/uploads/probase/www/model/aladin/field/ad_" + danes + "-" + am_pm + "00_" + kater + ura + ".png" check = urllib.urlopen(link) urlcheck = check.getcode() if urlcheck == 200: urllib.urlretrieve(link, ura + ".png") img = Image.open(ura + ".png") img = img.convert('RGB') #istra if kater == "vm-va10m_hr-w_": #liznjan img_liz = img.crop((198, 411, 57 + 198, 52 + 411)) hitrost_liz = compute_average_image_color(img_liz) dir_liz = direction2( np.array(img.crop((205, 415, 205 + 56, 54 + 415)))) #krk img_krk = img.crop((453, 304, 54 + 453, 38 + 304)) hitrost_krk = compute_average_image_color(img_krk) dir_krk = direction2( np.array(img.crop((441, 292, 441 + 69, 292 + 65)))) #pre img_pre = img.crop((337, 121, 62 + 337, 45 + 121)) hitrost_pre = compute_average_image_color(img_pre) dir_pre = direction2( np.array(img.crop((332, 105, 332 + 77, 105 + 73)))) #sav img_sav = img.crop((19, 41, 19 + 21, 41 + 30)) hitrost_sav = compute_average_image_color(img_sav) dir_sav = direction2(np.array(img.crop((3, 36, 3 + 57, 36 + 64)))) #nov img_nov = img.crop((33, 135, 33 + 23, 135 + 22)) hitrost_nov = compute_average_image_color(img_nov) dir_nov = direction2( np.array(img.crop((32, 225, 32 + 74, 273 + 58)))) #prm img_prm = img.crop((199, 446, 199 + 22, 446 + 23)) hitrost_prm = compute_average_image_color(img_prm) dir_prm = direction2( np.array(img.crop((173, 429, 173 + 54, 429 + 51)))) #los img_los = img.crop((362, 433, 362 + 48, 433 + 18)) hitrost_los = compute_average_image_color(img_los) dir_los = direction2( np.array(img.crop((354, 408, 354 + 66, 408 + 43)))) mydata = [('cas', ura), ('var', 'istra'), ('liz', hitrost_liz), ('krk', hitrost_krk), ('pre', hitrost_pre), ('sav', hitrost_sav), ('prm', hitrost_prm), ('los', hitrost_los), ('nov', hitrost_nov), ('smer_liz', dir_liz), ('smer_krk', dir_krk), ('smer_pre', dir_pre), ('smer_sav', dir_sav), ('smer_nov', dir_nov), ('smer_los', dir_los), ('smer_prm', dir_prm)] print mydata #primorska elif kater == "vm-va10m_si-sw_": #MJ img_mj = img.crop((126, 142, 126 + 69, 142 + 55)) hitrost_mj = compute_average_image_color(img_mj) dir_mj = direction2( np.array(img.crop((118, 132, 118 + 89, 132 + 66)))) #barcole img_bar = img.crop((204, 184, 204 + 42, 184 + 36)) hitrost_bar = compute_average_image_color(img_bar) dir_bar = direction2( np.array(img.crop((202, 182, 202 + 66, 182 + 58)))) #portoroz img_por = img.crop((147, 316, 147 + 35, 316 + 33)) hitrost_por = compute_average_image_color(img_por) dir_por = direction2( np.array(img.crop((127, 316, 127 + 78, 316 + 44)))) #lignano img_lig = img.crop((0, 177, 0 + 24, 177 + 30)) hitrost_lig = compute_average_image_color(img_lig) dir_lig = direction2(np.array(img.crop( (0, 177, 0 + 42, 177 + 43)))) #grado img_grado = img.crop((75, 206, 75 + 36, 206 + 23)) hitrost_grado = compute_average_image_color(img_grado) dir_grado = direction2( np.array(img.crop((75, 201, 75 + 56, 201 + 42)))) #umag img_umag = img.crop((125, 360, 125 + 29, 360 + 17)) hitrost_umag = compute_average_image_color(img_umag) dir_umag = direction2( np.array(img.crop((102, 342, 102 + 56, 342 + 50)))) mydata = [('cas', ura), ('var', 'pri'), ('mj', hitrost_mj), ('bar', hitrost_bar), ('por', hitrost_por), ('lig', hitrost_lig), ('umag', hitrost_umag), ('grado', hitrost_grado), ('smer_mj', dir_mj), ('smer_bar', dir_bar), ('smer_por', dir_por), ('smer_umag', dir_umag), ('smer_lig', dir_lig), ('smer_grado', dir_grado)] print mydata #gorejnska elif kater == "vm-va10m_si-nw_": #bohinj img_boh = img.crop((266, 285, 266 + 20, 285 + 11)) hitrost_boh = compute_average_image_color(img_boh) dir_boh = direction2( np.array(img.crop((212, 247, 212 + 139, 247 + 104)))) mydata = [('cas', ura), ('var', 'gor'), ('boh', hitrost_boh), ('smer_boh', dir_boh)] print mydata #stajerska elif kater == "vm-va10m_si-ne_": #murska img_mur = img.crop((466, 210, 466 + 17, 210 + 11)) hitrost_mur = compute_average_image_color(img_mur) dir_mur = direction2( np.array(img.crop((439, 171, 439 + 75, 171 + 69)))) mydata = [('cas', ura), ('var', 'sta'), ('mur', hitrost_mur), ('smer_mur', dir_mur)] print mydata #send data to website from post import post post(mydata) return
from random import random as rand from post import post N = int(argv[1]) for t in range(N): sec = "" pas = "" L = 32 for i in range(L): sec += chr(ord('a') + int(rand() * 26)) pas += chr(ord('a') + int(rand() * 26)) sec = popen("echo -n %s | md5sum" % sec).read().split(' ')[0] pas = popen("echo -n %s | md5sum" % pas).read().split(' ')[0] rep = "cat pwn100.c | sed 's/1337/%d/'" % (1200 + t) source = popen(rep).read() post(t + 1, 8, sec) print "posted", t + 1, 8, sec mkdir('team%d' % t) f = file('team%d/pwn100.c' % t, 'w+') f.write(source) f = file('team%d/flag.txt' % t, 'w+') f.write(sec) f = file('team%d/pass.txt' % t, 'w+') f.write(pas) popen( "echo 'INSERT INTO `flags` (`teamID`, `taskID`, `flag`) VALUES (%d, 6, '\"'\"%s\"'\"');\' >> pwn100.sql" % ((t + 1), sec))