def __init__(self, ID, type_graph=None, name=None, Elements=None, plan_elm=None, Edges=None, Restrictions=None): if type_graph == None: type_graph = 'PlanElementGraph' if Elements == None: Elements = set() if Edges == None: Edges = set() if Restrictions == None: Restrictions = set() self.OrderingGraph = OrderingGraph(ID=uid(5)) self.CausalLinkGraph = CausalLinkGraph(ID=uid(6)) self.flaws = FlawLib() self.solved = False self.initial_dummy_step = None self.final_dummy_step = None if plan_elm is None: plan_elm = PlanElement(uid=ID, typ=type_graph, name=name) super(PlanElementGraph, self).__init__(ID, type_graph, name, Elements, plan_elm, Edges, Restrictions)
def get(self, image_id=''): self.set_json_output() download = self.get_argument('download', '') if download: response = yield Task( self.api_call, url=self.settings['API_URL'] + '/images?download={}'.format(download), method='GET') if response and response.code in [200, 201]: respdata = loads(response.body.decode('utf-8')) links = respdata['data'] folder = self.settings['static_path'] + '/' + str(uid()) mkdir(folder) for link in links: info('Downloading: ' + link['url']) with open(folder + '/' + link['filename'], 'wb') as f: c = pycurl.Curl() c.setopt(pycurl.USERAGENT, 'Mozilla/5.0 (Windows; U; Windows NT 6.1; it; rv:1.9.2.3) Gecko/20100401 Firefox/3.6.3 (.NET CLR 3.5.30729)') c.setopt(c.URL, link['url']) c.setopt(c.WRITEDATA, f) c.perform() c.close() curpath = dirname(realpath(curdir)) chdir(folder) info('Creating zip file: ' + folder + '.zip') with ZipFile(folder + '.zip', 'w') as myzip: for link in links: myzip.write(link['filename']) chdir(curpath) rmtree(folder) dtexec = datetime.now() + timedelta(hours=1) jobid = str(uid()) self.settings['scheduler'].add_job( remove_file, trigger='date', name='Remove file ' + folder + '.zip at ' + str(dtexec), run_date=dtexec, args=[self.settings['scheduler'], folder + '.zip', jobid], coalesce=True, id=jobid) self.set_header('Content-Type', 'application/octet-stream') self.set_header('Content-Disposition', 'attachment; filename=' + folder.split('/')[-1] + '.zip') with open(folder + '.zip', 'rb') as f: self.write(f.read()) self.finish() else: self.response(500, 'Fail to get urls to download the images.') return else: response = yield Task( self.api_call, url=self.settings['API_URL'] + '/images/{}'.format(image_id), method='GET') self.set_status(response.code) if response.code in [200, 201]: self.finish(response.body) else: self.finish({'status': 'error', 'message': 'Fail to get images data.'})
def arg_to_elm(i, arg): if arg.typ == 'character' or arg.typ == 'actor': elm = Actor(ID=uid(i), name=arg.name, typ='character', arg_name=arg.arg_name) elif arg.typ == 'arg' or arg.typ == 'item' or arg.typ == 'place': elm = Argument(ID=uid(i), name=arg.name, typ=arg.typ, arg_name=arg.arg_name) elif arg.typ == 'step': elm = Operator(ID=uid(i), name=arg.name, typ='Action', arg_name=arg.arg_name) elif arg.typ == 'literal' or arg.typ == 'lit': elm = Literal(ID=uid(i), name=arg.name, typ='Condition', arg_name=arg.arg_name) else: raise ValueError('whose typ is this anyway? {}'.format(arg.typ)) return elm
def Actions_2_Plan(cls, Actions): # Used by Plannify elements = set().union(*[A.elements for A in Actions]) edges = set().union(*[A.edges for A in Actions]) Plan = cls(uid(2), name='Action_2_Plan', Elements=elements, Edges=edges) for edge in Plan.edges: if edge.label == 'effect-of': elm = Plan.getElementById(edge.sink.ID) elm.replaced_ID = edge.sink.replaced_ID Plan.OrderingGraph = OrderingGraph(ID=uid(5)) Plan.CausalLinkGraph = CausalLinkGraph(ID=uid(6)) #Plan.Steps = [A.root for A in Actions] return Plan
def create_user_match(): if request.method == 'POST': item = request.json # create new static object new_item = { 'id': str(uid()), 'current': 'pending', 'user_id': current_user.get_id(), 'create_time': date.today().strftime('%Y/%m/%d'), 'members': [current_user.get_id()], 'completed_time': None, 'winner': None, 'players': 2, 'cur_players': 1 } # append dic with form post size = len(item) while size >= 0: new_item[item[size - 1]['name']] = item[size - 1]['value'] size -= 1 # add to database db.UserMatches.insert_one(new_item) # Process for Rabbitmq send_task('Create', new_item['id'], current_user.get_id()) flash('Item Added.') return jsonify(success=1)
def main(inputs, table): create_Table() session.execute("""TRUNCATE nasalogs;""") insert_log = session.prepare( "INSERT INTO " + table + " (host,datetime,path,bytes,id) VALUES (?,?,?,?,?)") batch = BatchStatement(consistency_level=ConsistencyLevel.ONE) c = 0 for g_file in os.listdir(inputs): with gzip.open(os.path.join(inputs, g_file), 'rt', encoding='utf-8') as logfile: for line in logfile: w = get_words(line) if len(w) > 4: c += 1 batch.add( insert_log, (w[1], datetime.datetime.strptime(w[2], '%d/%b/%Y:%H:%M:%S'), w[3], int(w[4]), uid())) if (c == 400): session.execute(batch) batch.clear() c = 0 session.execute(batch) cluster.shutdown()
def createThread(self, category_id, author, title, body, **kwargs): ''' Creates board Threads for social.html. Returns True if valid. category_id: id of category thread (str) author: Author username (str) title: Name of thread (str) body: Text Body (str) Optional. hashtags = List of hashtags for message (list) image = object (obj) ''' # checks for exisitng thread if self.getTID(title): raise NameError("A thread already exists with this name.") return thread = { 'id': uid().__str__(), 'author': author, 'category': category_id, 'title': title, 'body': body, 'hashtag': [], 'messages': [], 'likes': [], 'image': '' } # inserts in hashtags if 'hashtag' in kwargs: hashtag = kwargs.get('hashtag') if not isinstance(hashtag, list): raise TypeError('hashtag paramater must be a list or blank.') return for x in hashtag: thread['hashtag'].append(x) # sends image to Content Delivery Network for storage if 'image' in kwargs: img = kwargs.get('image') # sendCDN(img) # process new entries tmap = self.getTMAP() threads = self.getThreads() thread['image'] = img threads[thread['id']] = thread tmap[thread['title']] = thread['id'] # store entries r.set('tmap', json.dumps(tmap)) r.set('threads', json.dumps(threads)) # updates category with new thread Cat = Category() Cat.updateCategory(category_id, thread_id=thread['id']) return True
def __init__(self, instructorName): assert isinstance(instructorName, str) if instructorName == "": #use a uuid instructorName = str(uid()) self.name = instructorName.strip().upper()
def smart_order(side, qty, symbol=ordersym, close=False): (bid, ask, last) = get_wsbidasklast() ocoorders = [] # if bid is 7000 ask is 7005 # to buy, bid 7004.5, hope it moves down # if next trade moves up, market buy if side == 'Buy': limitprice = ask - 1 stopprice = ask + 2. if side == 'Sell': limitprice = bid + 1 stopprice = bid - 2. #print "bid %f, ask %f, last %f, limit %f, stop %f" % (bid, ask, last, limitprice, stopprice) ocoid = uid().hex ordertext = 'smart_order' orderObj = { 'orders': [{ 'clOrdLinkID': ocoid, 'contingencyType': 'OneCancelsTheOther', 'symbol': possym, 'ordType': 'Stop', 'side': side, 'stopPx': stopprice, 'orderQty': qty, 'text': ordertext, 'execInst': 'LastPrice' }, { 'clOrdLinkID': ocoid, 'contingencyType': 'OneCancelsTheOther', 'symbol': possym, 'ordType': 'Limit', 'side': side, 'price': limitprice, 'orderQty': qty, 'text': ordertext }] } if close: orderObj['orders'][0]['execInst'] += ',Close' orderObj['orders'][1]['execInst'] = 'ReduceOnly' result = None apitry = 0 while (not result and apitry < apitrylimit): try: #result = requests.post(bitmex.urls['api'], json = [ limitOrder, stopOrder ]) result = bitmex.private_post_order_bulk(orderObj) log.debug(result) except Exception as err: result = None log.warning("Failed to place smart order, trying again") log.warning(err) time.sleep(apisleep) apitry = apitry + 1 return result
def test_mark_as_sold_valid_item_does_not_exist(self): payload = { 'id': uid() } res = self.client.post(MARK_AS_SOLD_URL, payload) self.assertEqual(res.status_code, status.HTTP_404_NOT_FOUND)
def testSomeModified(self): old = getRandomDict(4) new = dict(old) modified = old.keys()[:2] for k in modified: new[k] = uid().int expected = set(modified), set(), set() self.diff(old, new, expected)
def dl(url, folder): try: path = urlparse(url).path url_fname = os.path.split(path)[-1] fname = uid().hex[:8] + '-' + url_fname with urllib.request.urlopen(url, timeout=10) as u: data = u.read() with open(os.path.join(folder, fname), 'wb') as f: f.write(data) return True except Exception as e: print('Problem with {}: {}'.format(url, e)) return False
def get_ref_id(): ref_id = str(uid()).replace('-', '').lower() try: # tries to retrieve ref_id, using random ref_id _ = Join.objects.get(ref_id=ref_id) # if it exists, function starts over print("16-bit UUID Match Error") get_ref_id() # else it returns the ref_id as an exception except ObjectDoesNotExist: print("16-bit UUID Accepted") return ref_id
def createGames(count): games = [] while count > 0: game = { 'game_id': str(uid()), 'title': 'Game ' + str(count), 'date': datetime.now() } games.append(game) count -= 1 for x in games: db.Games.insert_one(x) return
def __init__(self, ID, type_graph, name=None, Elements=None, root_element=None, Edges=None): if Edges == None: Edges = set() if root_element is None: root_element = Operator(uid(200),typ='Action') if Elements == None: Elements = {root_element} self.nonequals = set() super(Action,self).__init__(ID, type_graph, name, Elements, root_element, Edges)
def __init__(self, ID, type_graph=None, name=None, Elements=None, plan_elm=None, Edges=None, Restrictions=None): if type_graph == None: type_graph = 'PlanElementGraph' if Elements == None: Elements = set() if Edges == None: Edges= set() if Restrictions == None: Restrictions = set() self.OrderingGraph = OrderingGraph(ID=uid(5)) self.CausalLinkGraph = CausalLinkGraph(ID=uid(6)) self.flaws = FlawLib() self.solved = False self.initial_dummy_step = None self.final_dummy_step = None if plan_elm is None: plan_elm = PlanElement(uid=ID, typ=type_graph, name=name) super(PlanElementGraph,self).__init__(ID, type_graph, name, Elements, plan_elm, Edges, Restrictions)
def report_user_match(): if request.method == 'POST': item = request.json # create new static object new_item = { 'id': str(uid()), 'current': 'pending', 'user_id': current_user.get_id(), 'create_time': date.today().strftime('%Y/%m/%d'), 'members': [current_user.get_id()], 'completed_time': date.now(), 'players': 2, 'cur_players': 2 } # append dic with form post size = len(item) while size >= 0: new_item[item[size - 1]['name']] = item[size - 1]['value'] size -= 1 # add to database db.UserMatches.insert_one(new_item) # Get specific field to calculate the ELO rating game = new_item['game'] league = new_item['league'] player_two = new_item['playertwo'] winner = new_item['winner'] # Get the Player1's ELO Rating player1_rating = get_player_rating(game, league, current_user.get_id()) player2_rating = get_player_rating(game, league, player_two) # Calculate new Rating d = 1 if winner == current_user.get_id() else -1 new_ratings = EloRatingCal(player1_rating, player2_rating, ELO_CONSTANT, d) # Get Elo Rating Index player1_index = elo_ratings.getERID(game, league, current_user.get_id()) player2_index = elo_ratings.getERID(game, league, player_two) # Update the Rating elo_ratings.updateEloRating(player1_index, new_ratings[0]) elo_ratings.updateEloRating(player2_index, new_ratings[1]) return jsonify(success=1)
async def start(request): log.info('*** starting new process manager ***') process_type = request.query.get('type') read_dir = request.query.get('read_dir') or 'opt' manager_id = str(uid()) app['managers'][manager_id] = ipm( manager_id, process_type=process_type, read_dir=read_dir, opt_path_prefix=app['opt_file_path_prefix']) context = dict(manager_info_map=app['managers'], manager_id=manager_id) executor_pool.map(delegate_manager, [context]) return web.json_response( dict(message='PROCESS_INITIATED', process_id=manager_id, logfile=app['log_file_path']))
def addUnit(libId, unitId, addr): connector = HWL.hwLibrary.find('./emt:Unit[@id="%s"]/emt:Connect' % libId, ns) unit = hwConfig.find('.//emt:Unit[@id="%s"]' % unitId, ns) slot = HWL.hwLibrary.find( './emt:Unit[@id="%s"]/emt:Slot[@addr="%s"]' % (unit.get('lib'), addr), ns) if not MU.is_compatible(conn=connector, slot=slot): raise AddError('Library unit incompatible to slot') lib = HWL.hwLibrary.find('./emt:Unit[@id="%s"]' % libId, ns) old_unit = unit.find('./emt:Unit[@addr="%s"]' % addr, ns) #Create unit and NOT copy all the data from lib new_unit = ET.SubElement(unit, nsURI + 'Unit', nsmap=nsMap) new_unit.attrib.update({ 'id': str(uid()), 'lib': libId, 'addr': addr, 'tag': '', 'alias': '' }) #Move old unit childs, if present if old_unit is not None: new_unit.set('alias', old_unit.get('alias')) for child in old_unit.findall('./emt:Unit', ns): child_conn = get_conn(child) # check slot with the same addr new_slot = next( (slot for slot in HWL.hwLibrary.findall( './emt:Unit[@id="%s"]/emt:Slot[@addr="%s"]' % (new_unit.get('lib'), child.get('addr')), ns) if new_unit.find('./emt:Unit[@addr="%s"]' % slot.get('addr'), ns) is None and MU.is_compatible(conn=child_conn, slot=slot)), None) if new_slot is None: # find any compatible free slot new_slot = next( (slot for slot in HWL.hwLibrary.findall( './emt:Unit[@id="%s"]/emt:Slot' % new_unit.get('lib'), ns) if new_unit.find('./emt:Unit[@addr="%s"]' % slot.get('addr'), ns) is None and MU.is_compatible(conn=child_conn, slot=slot)), None) if new_slot is not None: new_unit.append(child) child.set('addr', new_slot.get('addr')) unit.remove(old_unit) return getUnit(new_unit.get('id'), None)
def __init__(self, ID, type_graph, name=None, Elements=None, root_element=None, Edges=None): if Edges == None: Edges = set() if root_element is None: root_element = Operator(uid(200), typ='Action') if Elements == None: Elements = {root_element} self.nonequals = set() super(Action, self).__init__(ID, type_graph, name, Elements, root_element, Edges)
def createCategory(self, title): ''' Creates board Categories for social.html. Returns True if Valid. title: Name of board (string) ''' if self.findCMAP(title): raise NameError("Category name already exists.") return category = {'id': uid().__str__(), 'title': title, 'threads': []} # store values convert = r.get('categories') categories = json.loads(convert) categories[category['id']] = category r.set('categories', json.dumps(categories)) # stores id lookup cmap = self.getCMAP() cmap[category['title']] = category['id'] r.set('cmap', json.dumps(cmap)) return True
def main(inputs, key_space, table): cluster = Cluster(['199.60.17.188', '199.60.17.216']) session = cluster.connect(key_space) session.execute(""" CREATE TABLE IF NOT EXISTS nasalogs ( host TEXT, datetime TIMESTAMP, path TEXT, bytes INT, recId UUID, PRIMARY KEY (host,recId) ) """) session.execute("""TRUNCATE nasalogs;""") insert_log = session.prepare( "INSERT INTO " + table + " (host,datetime,path,bytes,recId) VALUES (?,?,?,?,?)") batch = BatchStatement(consistency_level=ConsistencyLevel.ONE) c = 0 for g_file in os.listdir(inputs): with gzip.open(os.path.join(inputs, g_file), 'rt', encoding='utf-8') as logfile: for line in logfile: w = get_words(line) if len(w) > 4: c += 1 batch.add( insert_log, (w[1], datetime.datetime.strptime(w[2], '%d/%b/%Y:%H:%M:%S'), w[3], int(w[4]), uid())) if (c == 400): session.execute(batch) batch.clear() c = 0 session.execute(batch) cluster.shutdown()
def createPlayerQueued(self, game_id, league_id, wager_amount, player_id, state): ''' Creates PlayerQueued. Returns True if Valid. game_id = Game Id ( UID ) league_id = League Id ( UID ) player_id = Player Id ( UID ) wager_amount = Money for wager ( Integer ) state = status of Queued ''' if self.findPQMAP(game_id, league_id, wager_amount, player_id, state): raise NameError("Duplicated Queued already exists.") return player_queued = { 'id': uid().__str__(), 'game_id': game_id, 'league_id': league_id, 'wager_amount': wager_amount, 'player_id': player_id, 'created_time': datetime.now().__str__(), 'state': state } # store values convert = r.get('playerqueued') playerqueued = json.loads(convert) playerqueued[player_queued['id']] = player_queued r.set('playerqueued', json.dumps(playerqueued)) # stores id lookup pqmap = self.getPQMAP() # Update the PQMap nested_set(pqmap, [game_id, league_id, wager_amount, state, player_id], player_queued['id']) r.set('pqmap', json.dumps(pqmap)) return True
def createMatches(count): ''' Generates a Match ''' matches = [] users = list(db.Users.find({})) while count > 0: players = [] for user in users: if random.randint(0, 1) == 1: players.append(user['id']) match = { 'match_id': str(uid()), 'title': 'Match ' + str(count), 'players': players, 'observers': [], 'create_time': datetime.now(), 'completed_time': None, 'value': random.randint(1, 100), 'winner': None, } matches.append(match) count -= 1 for x in matches: db.Matches.insert_one(x) return
def add(self, path=None, binary=False, stage=True): itemId = str(uid()) path = path or itemId if binary: content = r'\0binary file ' + itemId else: content = 'text file ' + itemId opts = {'path': path, 'content': content} cmd = ''' mkdir -p `dirname "%(path)s"` printf '%(content)s\n' > "%(path)s" ''' if stage: cmd += ''' git add -v "%(path)s" ''' shell(cmd % opts) return path
def createMessage(self, thread_id, author, message): ''' Creates messages for threads in social.html. Returns True if valid. thread_id: id of category thread (str) author: Author username (str) message: Text Body (str) ''' messages = self.getMessages() message = { 'id': uid().__str__(), 'thread_id': thread_id, 'author': author, 'message': message, 'time': str(date.now()) } messages[message['id']] = message r.set('tmessages', json.dumps(messages)) # updates thread with message id thread = Thread() thread.updateThread(message['thread_id'], message_id=message['id']) return True
def createEloRating(self, game_id, league_id, player_id, rating): ''' Creates Elo Rating. Returns True if Valid. game_id = Id of Game league_id = Id of League player_id = Id of Player ''' if self.findERMAP(game_id, league_id, player_id): raise NameError("Elo Rating already exists.") return new_elo_rating = { 'id': uid().__str__(), 'game': game_id, 'league': league_id, 'player': player_id, 'updated_time': datetime.now().__str__(), 'rating': rating } # store values convert = r.get('elo_ratings') elo_ratings = json.loads(convert) elo_ratings[new_elo_rating['id']] = new_elo_rating r.set('elo_ratings', json.dumps(elo_ratings)) # stores id lookup ermap = self.getERMAP() # Update the Elo Rating Hashmap nested_set(ermap, [game_id, league_id, player_id], new_elo_rating['id']) r.set('ermap', json.dumps(ermap)) return True
def createMatchQueues(self, player1_queue, player2_queue, state): ''' Creates Match Queues. Returns True if Valid. player1_queue = Id of First PlayerQueue player2_queue = Id of Second PlayerQueue state = State of Match Queues ''' if self.findMQMAP(player1_queue, player2_queue, state): raise NameError("Match Queue already exists.") return new_match_queue = { 'id': uid().__str__(), 'player1': player1_queue, 'player2': player2_queue, 'created_time': datetime.now().__str__(), 'state': state } # store values convert = r.get('matchqueues') matchqueues = json.loads(convert) matchqueues[new_match_queue['id']] = new_match_queue r.set('matchqueues', json.dumps(matchqueues)) # stores id lookup mqmap = self.getMQMAP() # Update MQMap nested_set(mqmap, [state, player1_queue, player2_queue], new_match_queue['id']) r.set('mqmap', json.dumps(mqmap)) return True
def search(port, query, maxnum=None): """ Large images: tbs=isz:l Medium images: tbs=isz:m Icon sized images: tba=isz:i Image size larger than 400×300: tbs=isz:lt,islt:qsvga Image size larger than 640×480: tbs=isz:lt,islt:vga Image size larger than 800×600: tbs=isz:lt,islt:svga Image size larger than 1024×768: tbs=isz:lt,islt:xga Image size larger than 1600×1200: tbs=isz:lt,islt:2mp Image size larger than 2272×1704: tbs=isz:lt,islt:4mp Image sized exactly 1000×1000: tbs=isz:ex,iszw:1000,iszh:1000 Images in full color: tbs=ic:color Images in black and white: tbs=ic:gray Images that are red: tbs=ic:specific,isc:red [orange, yellow, green, teal, blue, purple, pink, white, gray, black, brown] Image type Face: tbs=itp:face Image type Photo: tbs=itp:photo Image type Clipart: tbs=itp:clipart Image type Line drawing: tbs=itp:lineart Group images by subject: tbs=isg:to Show image sizes in search results: tbs=imgo:1 """ chrome = Chromote(port=port) tab = chrome.tabs[0] flags = dict( tbm='isch', # tbs='ic:color,isz:l', tbs='isz:lt,islt:svga', ) tmpl = 'http://www.google.com/search?q={query}&{flags}' search_url = tmpl.format(query='+'.join(query.split(' ')), flags='&'.join( [k + '=' + v for k, v in flags.items()])) print('Search url:', search_url) print(tab.set_url(search_url)) time.sleep(1) # tab.reload() time.sleep(1) for i in range(5): tab.evaluate('window.scrollTo(0,document.body.scrollHeight);') print('scrolled a bit...') time.sleep(1) time.sleep(10) code = ''' Array.prototype.slice.call( document.querySelectorAll("a.rg_l") ).map( function(elem) { return elem.href; } ).join(","); ''' out = tab.evaluate(code.replace('\n', '')) print(len(out)) time.sleep(1) print(len(out)) pprint(out) urls = json.loads(out)['result']['result']['value'] pprint(urls) imgurls = [] for url in urls.split(',')[:maxnum]: try: parsed_url = urlparse(url) if not parsed_url.path: continue # print(parsed_url) opts = parse_qs(parsed_url.query) # print(opts) imgurls.append( dict(url=opts['imgurl'][0], height=opts['h'], width=opts['w'])) except Exception as e: # print('Problem with {}: {}'.format(url, e)) pass def dl(url, folder): try: path = urlparse(url).path url_fname = os.path.split(path)[-1] fname = uid().hex[:8] + '-' + url_fname with urllib.request.urlopen(url, timeout=10) as u: data = u.read() with open(os.path.join(folder, fname), 'wb') as f: f.write(data) return True except Exception as e: print('Problem with {}: {}'.format(url, e)) return False folder = slugify(query) + '-' + uid().hex[:8] if not os.path.exists(folder): os.mkdir(folder) with ThreadPoolExecutor(max_workers=8) as exe: jobs = [exe.submit(dl, x['url'], folder) for x in imgurls] result = [j.result() for j in jobs] return imgurls, tab
def search(port, query, maxnum=None): """ Large images: tbs=isz:l Medium images: tbs=isz:m Icon sized images: tba=isz:i Image size larger than 400×300: tbs=isz:lt,islt:qsvga Image size larger than 640×480: tbs=isz:lt,islt:vga Image size larger than 800×600: tbs=isz:lt,islt:svga Image size larger than 1024×768: tbs=isz:lt,islt:xga Image size larger than 1600×1200: tbs=isz:lt,islt:2mp Image size larger than 2272×1704: tbs=isz:lt,islt:4mp Image sized exactly 1000×1000: tbs=isz:ex,iszw:1000,iszh:1000 Images in full color: tbs=ic:color Images in black and white: tbs=ic:gray Images that are red: tbs=ic:specific,isc:red [orange, yellow, green, teal, blue, purple, pink, white, gray, black, brown] Image type Face: tbs=itp:face Image type Photo: tbs=itp:photo Image type Clipart: tbs=itp:clipart Image type Line drawing: tbs=itp:lineart Group images by subject: tbs=isg:to Show image sizes in search results: tbs=imgo:1 """ chrome = Chromote(port=port) tab = chrome.tabs[0] flags = dict( tbm='isch', # tbs='ic:color,isz:l', tbs='isz:lt,islt:svga', ) tmpl = 'http://www.google.com/search?q={query}&{flags}' search_url = tmpl.format( query='+'.join(query.split(' ')), flags='&'.join([k+'='+v for k, v in flags.items()]) ) print('Search url:', search_url) print(tab.set_url(search_url)) time.sleep(1) # tab.reload() time.sleep(1) for i in range(5): tab.evaluate('window.scrollTo(0,document.body.scrollHeight);') print('scrolled a bit...') time.sleep(1) time.sleep(10) code = ''' Array.prototype.slice.call( document.querySelectorAll("a.rg_l") ).map( function(elem) { return elem.href; } ).join(","); ''' out = tab.evaluate(code.replace('\n', '')) print(len(out)) time.sleep(1) print(len(out)) pprint(out) urls = json.loads(out)['result']['result']['value'] pprint(urls) imgurls = [] for url in urls.split(',')[:maxnum]: try: parsed_url = urlparse(url) if not parsed_url.path: continue # print(parsed_url) opts = parse_qs(parsed_url.query) # print(opts) imgurls.append(dict( url=opts['imgurl'][0], height=opts['h'], width=opts['w'])) except Exception as e: # print('Problem with {}: {}'.format(url, e)) pass def dl(url, folder): try: path = urlparse(url).path url_fname = os.path.split(path)[-1] fname = uid().hex[:8] + '-' + url_fname with urllib.request.urlopen(url, timeout=10) as u: data = u.read() with open(os.path.join(folder, fname), 'wb') as f: f.write(data) return True except Exception as e: print('Problem with {}: {}'.format(url, e)) return False folder = slugify(query) + '-' + uid().hex[:8] if not os.path.exists(folder): os.mkdir(folder) with ThreadPoolExecutor(max_workers=8) as exe: jobs = [exe.submit(dl, x['url'], folder) for x in imgurls] result = [j.result() for j in jobs] return imgurls, tab
def __init__(self, left, right): self.tag = str(uid()) self.left = left self.right = right self.order = "" self.direction = ""
def setUID(): temp = uid() return temp.__str__()
def generate_uid(): id = uid() return str(id.hex)
def smart_order(side, qty, symbol=ordersym, close=False): bid, ask, last = get_bidasklast() ocoorders = [] # if bid is 7000 ask is 7005 # to buy, bid 7004.5, hope it moves down # if next trade moves up, market buy if side == 'Buy': limitprice = ask - 1 stopprice = ask + 2. if side == 'Sell': limitprice = bid + 1 stopprice = bid - 2. # print "bid %f, ask %f, last %f, limit %f, stop %f" % (bid, ask, last, limitprice, stopprice) ocoid = uid().hex ordertext = 'smart_order' orderObj = { 'orders': [{ 'clOrdLinkID': ocoid, 'contingencyType': 'OneCancelsTheOther', 'symbol': possym, 'ordType': 'Stop', 'side': side, 'stopPx': stopprice, 'orderQty': qty, 'text': ordertext, 'execInst': 'LastPrice' }, { 'clOrdLinkID': ocoid, 'contingencyType': 'OneCancelsTheOther', 'symbol': possym, 'ordType': 'Limit', 'side': side, 'price': limitprice, 'orderQty': qty, 'text': ordertext }] } if close: orderObj['orders'][0]['execInst'] += ',Close' orderObj['orders'][1]['execInst'] = 'ReduceOnly' result = None apitry = 0 while not result and apitry < apitrylimit * 10: try: # result = requests.post(binance.urls['api'], json = [ limitOrder, stopOrder ]) # binance.fapiPrivate_post_leverage({ # 'symbol': binance.market(ordersym)['id'], # 'leverage': '5', # }) print('**********', qty * 3) result = binance.create_order(symbol, 'MARKET', side, None, params={ 'quoteOrderQty': qty, 'type': 'margin', 'isIsolated': True }) log.debug(result) except Exception as err: result = None log.warning("Failed to place smart order, trying again") log.warning(err) time.sleep(0.1) apitry = apitry + 1 return result
def deepcopy(self): new_self = copy.deepcopy(self) new_self.ID = uid(21) return new_self
def deepcopy(self): new_self = copy.deepcopy(self) new_self.S.ID = uid(21) new_self.D.ID = uid(22) return new_self
global FONTSIZE global THICKNESS try: img = json.load(sys.stdin) except KeyboardInterrupt: _die('\nabort due to user interrupt') if 'objects' not in img: _die('image has no objects section') _log('creating new image...') RADIUS = img['radius'] FONTSIZE = img['fontsize'] THICKNESS = img['thickness'] nodes = [] canvas = dict(id=uid(), type='canvas', width=img['size'][0], height=img['size'][1]) newImg = dict(objects=[canvas], states=[]) for o in img['objects']: t = o['type'] if t == 'compound': c = o['children'] if 'meta' in o: m = o['meta'] valueLabels = _delType(_getValueLabels(c, m)) titleLabels = _delType(_getTitleLabels(c, m)) otherElems = _factorElems(_getOtherElems(c))
def create_private_key(self, salt): from uuid import uuid4 as uid return b64encode(hashlib.sha3_512("%s%s" % (salt, uid())).digest()).rstrip('==').replace("/","")
def _replaceInternals(self): self.ID = uid(self.root.stepnumber) for elm in self.elements: if not isinstance(elm, Argument): elm.replaced_ID = uid(self.root.stepnumber)
import json from uuid import uuid4 as uid from src.constants import CALENDAR_ID_CREATED_EVENT, CALENDAR_ID_DELETED_EVENT, \ EVENT_CREATED_EVENT, EVENT_MODIFIED_EVENT, EVENT_DELETED_EVENT, RECURRENCE_DELETED_EVENT, \ USER_CALENDARS_DELETED_EVENT """ Here are defined the events used in the event sourcing by this micro service. """ def serialize_rule(x): return x.__dict__ generate_uuid = lambda: str(uid()) class Event(object): def __init__(self, type): self.uuid = generate_uuid() self.type = type def toJSON(self): return json.dumps(self, default=serialize_rule, indent=2) class EventCreatedEvent(Event): def __init__(self, user_id, calendar_id, id, name, location, start_time, end_time, next_is_base, recurrence_rule, until, flex, flex_duration):
def getRandomDict(numItems = 1): return {u.hex: u.int for u in [uid() for i in range(numItems)]}