def __init__(self): # jhs group item type self.worker_type = Config.JHS_GroupItem self.jhs_type = Config.JHS_TYPE # queue type # message self.message = Message() # 获取Json数据 self.jsonpage = Jsonpage() # 抓取设置 self.crawler = TBCrawler() # 抓取时间设定 self.crawling_time = Common.now() # 当前爬取时间 self.begin_time = Common.now() self.begin_date = Common.today_s() self.begin_hour = Common.nowhour_s() # DB # mysql access self.mysqlAccess = MysqlAccess() # redis queue self.redisQueue = RedisQueue() # redis access self.redisAccess = RedisAccess() # mongodb fs access self.mongofsAccess = MongofsAccess()
def storyObject(pirEle,cursor,actionType): actionEle = pirEle.getElementsByTagName("action")[0] story_text = _mysql.escape_string(urllib.unquote(Common.checkXMLValue("story_text",actionEle))) title = _mysql.escape_string(urllib.unquote(Common.checkXMLValue("title",actionEle))) startsOn = Common.checkXMLValue("starts_on",actionEle) if (startsOn): month,day,year = startsOn.split('/') startsOn = "%s-%s-%s" % (year,month.rjust(2,'0'),day.rjust(2,'0')) endsOn = Common.checkXMLValue("ends_on",actionEle) if (endsOn): month,day,year = endsOn.split('/') endsOn = "%s-%s-%s" % (year,month.rjust(2,'0'),day.rjust(2,'0')) address,city,state,country,zipcode = getLocation(actionEle) if (actionType == "add"): cursor.execute("""insert into rs_story (story_text, title, starts_on, ends_on, address, city, state, country, zipcode, enterer_id) values ("%s","%s","%s","%s","%s","%s","%s","%s","%s",1)""" \ % (story_text,title,startsOn,endsOn,address,city,state,country,zipcode)) object_id = cursor.lastrowid Common.addXMLValue(actionEle,"object_id",str(object_id)) for objects in pirEle.getElementsByTagName("objects"): Common.addXMLValue(objects,"object_id",str(object_id)) if (Common.checkXMLValue("connect",actionEle)): connectObjects(actionEle,cursor) elif (actionType == "update"): storyId = Common.checkXMLValue("object_id",actionEle) cursor.execute("""update rs_story set story_text = "%s", title = "%s", starts_on = "%s", ends_on = "%s", address = "%s", city = "%s", state = "%s", country = "%s", zipcode = "%s" where id = %s""" % (story_text,title,startsOn,endsOn,address,city,state, \ country,zipcode,storyId))
def scanAliveItems(self): # 到结束时间后的一个小时 val = (Common.time_s(self.crawling_time), Common.add_hours(self.crawling_time, -1)) # 查找已经开团但是没有结束的商品 _items = self.mysqlAccess.selectJhsGroupItemAlive(val) print "# hour all item nums:",len(_items) return _items
def __init__(self): # 抓取设置 self.crawler = XCCrawler() self.retrycrawler = RetryCrawler() self.crawling_time = Common.now() # 当前爬取时间 self.crawling_time_s = Common.time_s(self.crawling_time) self.crawling_begintime = '' # 本次抓取开始时间 self.crawling_beginDate = '' # 本次爬取日期 self.crawling_beginHour = '' # 本次爬取小时 # 频道信息 self.platform = '携程-pc' # 品牌团所在平台 self.channel_id = '' # 频道id self.channel_url = '' # 频道链接 self.channel_name = '' # 频道name self.channel_type = '' # 频道类型 # 频道所属地理位置信息 self.province_id = 0 # 省,州id self.province_name = '' # 省,州名称 # 原数据信息 self.channel_page = '' # 频道页面html内容 self.channel_pages = {} # 频道页面内请求数据列表 # channel items self.channel_items = [] # channel list self.channel_list = []
def initialize_conditions(segment,state): """Sets the specified conditions which are given for the segment type. Assumptions: Builds on the initialize conditions for common Source: N/A Inputs: segment.throttle [unitless] segment.analyses.weights.vehicle.mass_properties.landing [kilogram] Outputs: conditions.weights.total_mass [kilogram] conditions.propulsion.throttle [unitless] Properties Used: N/A """ # use the common initialization conditions = state.conditions Common.initialize_conditions(segment,state) # Unpack throttle = segment.throttle m_initial = segment.analyses.weights.vehicle.mass_properties.landing # apply initials conditions.weights.total_mass[:,0] = m_initial conditions.propulsion.throttle[:,0] = throttle return conditions
def mergeAct(self, act, prev_act): if prev_act: # 合并本次和上次抓取的商品ID列表 prev_item_ids = prev_act["item_ids"] act.brandact_itemids = Common.unionSet(act.brandact_itemids, prev_item_ids) # 取第一次的活动抓取时间 act.crawling_time = Common.str2timestamp(prev_act["crawl_time"]) if not act.brandact_name or act.brandact_name == '': act.brandact_name = prev_act["act_name"] if not act.brandact_url or act.brandact_url == '': act.brandact_url = prev_act["act_url"] if not act.brandact_position or str(act.brandact_position) == '0': act.brandact_position = prev_act["act_position"] if not act.brandact_enterpic_url or act.brandact_enterpic_url == '': act.brandact_enterpic_url = prev_act["act_enterpic_url"] if not act.brandact_remindNum or str(act.brandact_remindNum) == '0': act.brandact_remindNum = prev_act["act_remindnum"] if not act.brandact_coupons or act.brandact_coupons == []: act.brandact_coupon = prev_act["act_coupon"] act.brandact_coupons = prev_act["act_coupons"].split(Config.sep) if not act.brandact_starttime or act.brandact_starttime == 0.0: act.brandact_starttime = Common.str2timestamp(prev_act["start_time"]) if not act.brandact_endtime or act.brandact_endtime == 0.0: act.brandact_endtime = Common.str2timestamp(prev_act["end_time"]) if not act.brandact_other_ids or act.brandact_other_ids == '': act.brandact_other_ids = prev_act["_act_ids"]
def flush_db(self, _db=0): try: _pool= self.getPool(_db) r = redis.Redis(connection_pool=_pool) r.flushdb() except Exception, e: Common.log('# RedisPool flush db exception: %s' % e)
def isInCheck(self): us = self.activePlayer them = Common.togglePlayer(us) #print "checking if " + us + " is in check" coloredKing = Common.casePieceByPlayer('k', us) # find king square sqrKing = '' for sqr, piece in self.squares.iteritems(): if piece == coloredKing: sqrKing = sqr break if not sqrKing: raise Exception("could not find the -%s- king -%s-!" % (us, coloredKing)) # for all types of pieces, see if there are any attacking squares: for piece in 'QKRBNP': # found! in check! #print "opp is: %s" % opp #print "piece is: %s" % piece #print "sqrKing is: %s" % sqrKing # temporarily switch active player to see if they can attack us self.activePlayer = them attackSquares = self.getAttackingSquares(sqrKing, piece) self.activePlayer = us if attackSquares: return True # none found? not in check return False
def insert_scores(path): '''Insert scores from cydime.scores into cydime db score table. :param path: full path to daily build directory :type path: str ''' engine, table = get_cydime_db_engine() conn = engine.connect() add_count = 0 add_list = [] label_dict = Com.get_label_dict() add_list = [{'ip_addr': i, 'score': label_dict[i]} for i in label_dict] add_list = Com.get_add_list(add_list, label_dict, path) add_count = len(add_list) if add_count > 0: try: conn.execute(table.delete()) conn.execute(table.insert(), add_list) logging.info('Made predictions for {0} IPs'.format(add_count)) except SQLAlchemyError as e: logging.error(e) logging.error("Type of Exception : SQLAlchemyError") logging.error(traceback.format_exc()) raise CydimeDatabaseException(e.message) except Exception as e: logging.error(e) logging.error("Type of Exception : {0}".format(type(e).__name__)) logging.error(traceback.format_exc()) conn.close()
def itemByBrandPageType1(self, itemdata, position): # 基本信息 item_ju_url, item_id, item_juId = '', '', '' m = re.search(r'<a.+?href="(.+?)".+?>', itemdata, flags=re.S) if m: # 商品聚划算链接 item_ju_url = Common.fix_url(m.group(1).replace('amp;','')) if item_ju_url: ids_list = item_ju_url.split('&') for ids in ids_list: if ids.find('item_id=') != -1: # 商品Id item_id = ids.split('=')[1] elif ids.find('id=') != -1: # 商品聚划算Id item_juId = ids.split('=')[1] # 商品聚划算展示图片链接 item_juPic_url = '' m = re.search(r'<img class="item-pic" src="(.+?)"', itemdata, flags=re.S) if m: item_juPic_url = Common.fix_url(m.group(1)) else: m = re.search(r'<img class="item-pic" data-ks-lazyload="(.+?)"', itemdata, flags=re.S) if m: item_juPic_url = Common.fix_url(m.group(1)) else: m = re.search(r'<img.+?data-ks-lazyload="(.+?)"', itemdata, flags=re.S) if m: item_juPic_url = Common.fix_url(m.group(1)) # 解析聚划算商品 return self.return_val((itemdata, self.brandact_id, self.brandact_name, self.brandact_url, position, item_ju_url, item_id, item_juId, item_juPic_url))
def __init__(self,port,mode,fsprivate_key,fspublic_key,fcpublic_key): """ Initializing the Server Socket and binding it on the port. param:port The port number on which the server is listening. param:mode The mode(Trusted/Untrusted) of Server. """ self.host ="localhost" self.port = port self.mode = mode self.fsprivate_key = fsprivate_key self.fspublic_key = fspublic_key self.fcpublic_key = fcpublic_key serversocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) serversocket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) try: serversocket.bind((self.host,self.port)) except socket.error as error: print ("Socket binding failed with %s",error) exit() # Allowing only one(1) Client to connect. count=0 while True: serversocket.listen(1) if count==0: count+=1 Common.gen_rsa('server') #Generate the RSA Keys(if not exists) (clientsocket,clientaddress) = serversocket.accept() self.recv_data(clientsocket) elif count==1: self.normallisten(serversocket) break
def write_jmact(self, keys, val): try: if type(val) is dict: act_dict = val else: crawl_time, category_id, act_id, act_name, act_url, act_position, act_enterpic_url, act_remindnum, act_coupon, act_coupons, act_sign, _act_ids, start_time, end_time, item_ids = ( val ) act_dict = {} act_dict["crawl_time"] = str(crawl_time) act_dict["category_id"] = str(category_id) act_dict["act_id"] = str(act_id) act_dict["act_name"] = act_name act_dict["act_url"] = act_url act_dict["act_position"] = str(act_position) act_dict["act_enterpic_url"] = act_enterpic_url act_dict["act_remindnum"] = str(act_remindnum) act_dict["act_coupon"] = str(act_coupon) act_dict["act_coupons"] = act_coupons act_dict["_act_ids"] = str(_act_ids) act_dict["act_sign"] = str(act_sign) act_dict["start_time"] = str(start_time) act_dict["end_time"] = str(end_time) act_dict["item_ids"] = item_ids act_json = json.dumps(act_dict) self.redis_pool.write(keys, act_json, self.JM_ACT_DB) except Exception, e: Common.log("# Redis access write jm activity exception: %s" % e)
def mergeAct(self, item, prev_item): if prev_item: if not item.item_position or item.item_position == 0: item.item_position = prev_item["item_position"] if not item.item_juName or item.item_juName == '': item.item_juName = prev_item["item_juname"] if not item.item_juDesc or item.item_juDesc == '': item.item_juDesc = prev_item["item_judesc"] if not item.item_juPic_url or item.item_juPic_url == '': item.item_juPic_url = prev_item["item_jupic_url"] if not item.item_url or item.item_url == '': item.item_url = prev_item["item_url"] if not item.item_oriPrice or item.item_oriPrice == '': item.item_oriPrice = prev_item["item_oriprice"] if not item.item_actPrice or item.item_actPrice == '': item.item_actPrice = prev_item["item_actprice"] if not item.item_discount or item.item_discount == '': item.item_discount = prev_item["item_discount"] if not item.item_coupons or item.item_coupons == []: item.item_coupons = prev_item["item_coupons"].split(Config.sep) if not item.item_promotions or item.item_promotions == []: item.item_promotions = prev_item["item_promotions"].split(Config.sep) if not item.item_remindNum or item.item_remindNum == '': item.item_remindNum = prev_item["item_remindnum"] if not item.item_isLock_time or item.item_isLock_time == '': if prev_item["item_islock_time"] and prev_item["item_islock_time"] != '': item.item_isLock_time = Common.str2timestamp(prev_item["item_islock_time"]) item.item_isLock = prev_item["item_islock"] if not item.item_starttime or item.item_starttime == 0.0: if prev_item["start_time"] and prev_item["start_time"] != '': item.item_starttime = Common.str2timestamp(prev_item["start_time"]) if not item.item_endtime or item.item_endtime == 0.0: if prev_item["end_time"] and prev_item["end_time"] != '': item.item_endtime = Common.str2timestamp(prev_item["end_time"])
def testOwnerChanging(self): """Changing file owner""" Common.changeOwner(SetUp.usernames[0], self.file_path_server) owner = Common.getOwner(self.file_path_client) SetUp.logger.info('Current owner: %s, default owner: %s' % (owner, self.d_owner)) self.assertEqual(SetUp.usernames[0], owner, self.testOwnerChanging.__doc__ + ' FAILED') SetUp.logger.info(self.testOwnerChanging.__doc__+' has been finished')
def action(pirEle,cursor): actionEle = pirEle.getElementsByTagName("action")[0] objectType = Common.checkXMLValue("object_type",actionEle) actionType = Common.checkXMLValue("action_type",actionEle) if (actionType == "delete"): objectId = Common.checkXMLValue("object_id",actionEle) cursor.execute("""delete from rs_%s where id = %s""" % \ (objectType,objectId)) cursor.execute("""delete from rs_person_story_photo_rel where %s_id = %s""" \ % (objectType,objectId)) cursor.execute("""update rs_person set father_id = 0 where father_id = %s""" % (objectId)) cursor.execute("""update rs_person set mother_id = 0 where mother_id = %s""" % (objectId)) spouseInfo = Common.getSpouseInfo(cursor,objectId) if (spouseInfo): cursor.execute("""delete from rs_marriage where id = %s""" % (spouseInfo)) elif (actionType == "connect_objects"): connectObjects(actionEle,cursor) elif (objectType == "person"): personObject(actionEle,cursor,actionType) elif (objectType == "story"): storyObject(pirEle,cursor,actionType) elif (objectType == "photo"): photoObject(pirEle,cursor,actionType) elif (objectType == "relative"): relativeAction(actionEle,cursor,actionType)
def play(url=None): if not url: url = Common.args.url data = Common.getURL(url) if 'nayakhabar' in url: travlink = re.compile('a href="(http://www.canadanepal.+?)".+?<strong>').findall(data) if travlink: travlink = travlink[0] data = Common.getURL(travlink) post = common.parseDOM(data, "div", attrs={ "class": "post" })[0] Title = common.parseDOM(post, "h2", attrs={ "class": "title"})[0] Title = common.replaceHTMLCodes(Title) Title = Title.encode('utf-8') entry = common.parseDOM(post, "div", attrs={ "class": "entry"})[0] # Resolve media url using videohosts videoUrl = None videoUrl = hosts.resolve(entry) if not videoUrl: dialog = xbmcgui.Dialog() dialog.ok('Nothing to play', 'A playable url could not be found.') return else: #print videoUrl playList = xbmc.PlayList(xbmc.PLAYLIST_VIDEO) playList.clear() count = 1 for PlayItem in videoUrl: Title = Title + ' Part ' + str(count) listitem = ListItem(Title, iconImage='', thumbnailImage='') listitem.setInfo('video', { 'Title': Title}) playList.add(url=PlayItem, listitem=listitem) count = count + 1 xbmcPlayer = xbmc.Player() xbmcPlayer.play(playList)
def response_process(self, response, result): """ Response processing """ if response != 200: Common.plog_info("We failed with " + str(response)) sys.exit()
def getLocation(pirEle): address_l1 = _mysql.escape_string(urllib.unquote(Common.checkXMLValue("address_l1",pirEle))) city = Common.checkXMLValue("city",pirEle) state = Common.checkXMLValue("state",pirEle) country = Common.checkXMLValue("country",pirEle) zipcode = Common.checkXMLValue("zipcode",pirEle) return address_l1,city,state,country,zipcode
def config(self): self.channelPage() if self.channel_type == 1: self.spot() #elif self.channel_type == 2: else: Common.log('# not find this channel type...')
def itemTicket(self): t_url = 'http://www.ly.com/scenery/AjaxHelper/SceneryPriceFrame.aspx?action=GETNEWPRICEFRAMEFORLAST&ids=%d&isSimple=1&isShowAppThree=0&widthtype=1&isGrap=1&nobookid=&isyry=1&YpState=1&lon=null&lat=null' % int(self.item_id) result = self.crawler.getData(t_url, self.item_url) if result: try: scenery = json.loads(result) if scenery.has_key('SceneryPrices'): scenery_list = scenery['SceneryPrices'] for destination in scenery_list: if destination.has_key('DestinationId') and int(destination['DestinationId']) == int(self.item_id): if destination.has_key('ChannelPriceModelEntityList'): for pricemodel in destination['ChannelPriceModelEntityList']: if pricemodel.has_key('ConsumersTypeId') and pricemodel.has_key('ConsumersTypeName') and pricemodel.has_key('ChannelPriceEntityList'): consumer_type = pricemodel['ConsumersTypeId'] consumer_type_name = pricemodel['ConsumersTypeName'] t_i = 1 for t_data in pricemodel['ChannelPriceEntityList']: val = (self.item_id, self.item_name, self.channel_type, consumer_type, consumer_type_name, t_i, t_data, self.crawling_begintime) t = Ticket() t.antPage(val) self.item_tickets.append(t.outSql()) t_i += 1 except Exception as e: Common.log('# itemTicket,exception err in load json: %s' % e) Common.traceback_log()
def __init__(self): # jhs brand type self.worker_type = Config.JHS_Brand # DB self.jhs_type = Config.JHS_TYPE # queue type self.mysqlAccess = MysqlAccess() # mysql access self.redisQueue = RedisQueue() # redis queue self.redisAccess = RedisAccess() # redis db self.mongofsAccess = MongofsAccess() # mongodb fs access # 获取Json数据 self.jsonpage = Jsonpage() # 抓取设置 self.crawler = TBCrawler() # 页面模板解析 self.brand_temp = JHSBrandTEMP() # message self.message = Message() # 抓取时间设定 self.crawling_time = Common.now() # 当前爬取时间 self.begin_time = Common.now() self.begin_date = Common.today_s() self.begin_hour = Common.nowhour_s()
def action(request): web_domain = settings.WEB_DOMAIN#域名 categories = Common.cate()#文章分类 captions = Common.caption()#文章标题 linkValues = Common.links()#友情链接 tagValues = Common.tag()#文章标签 id = int(request.GET.get('action')) article = Models.Article.objects.filter(id = id).values('times','cate__cateName', 'degree','caption','content') caption_one = article[0]['caption'] cateName_one = article[0]['cate__cateName'] degree_one = article[0]['degree'] degreeGo = int(degree_one) + 1 Models.Article.objects.filter(id = id).update(degree = degreeGo) times = time.strftime('%Y-%m-%d',time.localtime(article[0]['times'])) content_one = article[0]['content'] #下一篇文章 next = Models.Article.objects.order_by('id').filter(id__gt = id).values('id', 'caption')[:1] if next: next_id = next[0]['id'] next_caption = next[0]['caption'] #上一篇文章 previous = Models.Article.objects.order_by('-id').filter(id__lt = id).values('id', 'caption')[:1] if previous: previous_id = previous[0]['id'] previous_caption = previous[0]['caption'] return render_to_response('articleList.html',locals())
def photoList(request): web_domain = settings.WEB_DOMAIN#域名 categories = Common.cate()#文章分类 captions = Common.caption()#文章标题 linkValues = Common.links()#友情链接 tagValues = Common.tag()#文章标签 id = int(request.GET.get('cateId')) cate = Models.Photocategories.objects.filter(id = id).values('cateName') cateName = cate[0]['cateName'] photoList = Models.Photo.objects.order_by('-id').filter(cate = id).all() after_range_num = 2 before_range_num = 9 try: page = int(request.GET.get('page',1)) if page < 1: page = 1 except ValueError: page = 1 paginator = Paginator(photoList,18) try: photo = paginator.page(page) except(EmptyPage,InvalidPage,PageNotAnInteger): photo = paginator.page(1) if page >= after_range_num: page_range = paginator.page_range[page - after_range_num:page + before_range_num] else: page_range = paginator.page_range[0:int(page) + before_range_num] action = '&cateId=%d'%id return render_to_response('photoList.html',locals())
def photo(request): web_domain = settings.WEB_DOMAIN#域名 web_name = settings.WEB_NAME#程序名称 categories = Common.cate()#文章分类 captions = Common.caption()#文章标题 linkValues = Common.links()#友情链接 tagValues = Common.tag()#文章标签 photoCate = Models.Photocategories.objects.order_by('-orderId').all() print photoCate after_range_num = 2 before_range_num = 9 try: page = int(request.GET.get('page',1)) if page < 1: page = 1 except ValueError: page = 1 paginator = Paginator(photoCate,18) try: photoCategories = paginator.page(page) except(EmptyPage,InvalidPage,PageNotAnInteger): photoCategories = paginator.page(1) if page >= after_range_num: page_range = paginator.page_range[page - after_range_num:page + before_range_num] else: page_range = paginator.page_range[0:int(page) + before_range_num] return render_to_response('photo.html',locals())
def put_q(self, _key, _val): try: if self.q_dict.has_key(_key): q = self.q_dict[_key] q.put(_val) except Exception as e: Common.log('# put_q exception: %s' % e)
def read_jmact(self, keys): try: val = self.redis_pool.read(keys, self.JM_ACT_DB) return json.loads(val) if val else None except Exception, e: Common.log("# Redis access read jm activity exception: %s" % e) return None
def browselive(url=None): if not url: url = Common.args.url print 'npvideo browselive' data = Common.getURL(url) livetvicons = common.parseDOM(data, "div", attrs={ "id": "rvid"})[0] items = common.parseDOM(livetvicons, "li") MediaItems = [] for item in items: #print 'inside for' Mediaitem = MediaItem() Title = common.parseDOM(item, "a") if not Title: print 'no title' continue Title = Title[0] Title = common.replaceHTMLCodes(Title) Title = Title.encode('utf-8') Href = common.parseDOM(item, "a", ret="href") if not Href: print 'no href' continue Href = Href[0] Url = urllib.quote_plus(BASE_URL % Href) Mediaitem.Mode = 'playlive' Mediaitem.ListItem.setInfo('video', { 'Title': Title}) Mediaitem.ListItem.setLabel(Title) Mediaitem.Image = Common.video_thumb Mediaitem.ListItem.setThumbnailImage(Mediaitem.Image) Mediaitem.Url = sys.argv[0] + '?site="' + site + '"&mode="' + Mediaitem.Mode Mediaitem.Url += '"&url="' + Url + '"&name="' + Title + '"' Mediaitem.ListItem.setProperty('IsPlayable', 'true') MediaItems.append(Mediaitem) Common.addDir(MediaItems)
def brandHourList(self): # 查找需要每小时统计的列表 # 得到需要的时间段 val = (Common.add_hours(self.begin_time), Common.add_hours(self.begin_time, -1)) print '# hour crawler time:',val # 商品默认信息列表 all_item_num = 0 hour_val_list = [] act_items = {} item_results = self.mysqlAccess.selectJhsItemsHouralive(val) if item_results: for item in item_results: if act_items.has_key(str(item[0])): act_items[str(item[0])]["items"].append(item[2:]) else: act_items[str(item[0])] = {'act_name':item[1],'items':[]} act_items[str(item[0])]["items"].append(item[2:]) all_item_num += 1 for key in act_items.keys(): hour_val_list.append((key,act_items[key]["act_name"],act_items[key]["items"])) else: print '# not find need hour items...' print '# hour all item nums:',all_item_num print '# hour all acts nums:',len(hour_val_list) # 清空每小时抓取redis队列 self.item_queue.clearQ() # 保存每小时抓取redis队列 self.item_queue.putlistQ(hour_val_list) print '# item queue end:',time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time()))
def itemByBrandPageType2(self, itemdata, position): # 基本信息 item_juPic_url, item_ju_url, item_id, item_juId = '', '', '', '' # 基本信息 if itemdata.has_key('baseinfo'): item_baseinfo = itemdata['baseinfo'] # 商品Id if item_baseinfo.has_key('itemId') and item_baseinfo['itemId'] != '': item_id = item_baseinfo['itemId'] # 商品juId if item_baseinfo.has_key('juId') and item_baseinfo['juId'] != '': item_juId = item_baseinfo['juId'] # 商品聚划算展示图片链接 if item_baseinfo.has_key('picUrl') and item_baseinfo['picUrl'] != '': item_juPic_url = Common.fix_url(item_baseinfo['picUrl']) elif item_baseinfo.has_key('picUrlM') and item_baseinfo['picUrlM'] != '': item_juPic_url = Common.fix_url(item_baseinfo['picUrlM']) # 商品聚划算链接 if item_baseinfo.has_key('itemUrl') and item_baseinfo['itemUrl'] != '': item_ju_url = Common.fix_url(item_baseinfo['itemUrl']) ids_list = item_ju_url.split('&') for ids in ids_list: if ids.find('item_id=') != -1: # 商品Id if item_id == '': item_id = ids.split('=')[1] elif ids.find('id=') != -1: # 商品聚划算Id if item_juId == '': item_juId = ids.split('=')[1] # 解析聚划算商品 return self.return_val((itemdata, self.brandact_id, self.brandact_name, self.brandact_url, position, item_ju_url, item_id, item_juId, item_juPic_url))
def testPermissionsChanging(self): """Changing file permissions""" Common.setPermission(self.permissions_to_set, self.file_path) current_permissions = Common.getPermissions(self.file_path) SetUp.logger.info('Current permissions: %s, default permissions: %s, permissions to set: %s' % (current_permissions, self.d_permissions, self.permissions_to_set)) self.assertEqual(self.permissions_to_set, current_permissions, self.testPermissionsChanging.__doc__ + ' FAILED') SetUp.logger.info(self.testPermissionsChanging.__doc__+' has been finished')
def getMoviesByDate(input_date): date = datetime.strptime(input_date, Movie.DATE_FORMAT) today = date.today() offset = date.day - today.day if offset > 6: print 'Cannot get movies more than a week later: ' + input_date movies = [] soup = Common.getPageSoup('http://metrograph.com/film?d=' + input_date) if soup is None: print 'Cannot find movie for Metrograph on {0}'.format(input_date) return movies for movie_div in soup.find_all('div', {'class': 'film'}): if movie_div is not None: movies.append(parseMovie(input_date, movie_div)) print 'Found {0} movies from {1} on {2}'.format(len(movies), 'Metrograph', input_date) return movies
def get_test_report(self, prefix=''): self.controller.generateReport(detailedReport=1, format="PDF") ixload_tmp_dir = self._ixload_tmp_dir() result_folder = Common.get_result_path() file_list = ['IxLoad Detailed Report.pdf'] try: for item in file_list: dst = item.replace('-', '') dst = dst.replace(' ', '_') dst = dst.replace('__', '_') try: self.ix.retrieveFileCopy( "%s/%s" % (ixload_tmp_dir, item), "%s/%s%s" % (result_folder, prefix, dst)) except Exception as err: if not ignore_not_found: raise err self.result_queue.put(["ixload::ok"]) except Exception as err: self.result_queue.put(err) self.result_queue.put(["ixload::ok"]) self.task_queue.task_done()
def load_config(self, config_name='', force=True): """ Loads test configuration config_name is defined in ``local.yaml`` or specific by user in the main scenario. """ BuiltIn().log("Load test configuration") cli = self._clients[self._cur_name] ix = cli['connection'] if config_name == '': config_name = Common.LOCAL['tester'][self._cur_name]['config'] config_path = Common.get_item_config_path() + '/' + config_name service = self._base_url + '/bps/upload' files = {'file': (config_name, open(config_path, 'rb'), 'application/xml')} jdata = {'force': force} result = ix.post(service, files=files, data=jdata, verify=False) if result.status_code != requests.codes.ok: BuiltIn().log(result.text) raise Exception('ERROR: could not logout') self._config_path = config_path BuiltIn().log("Loaded configuration file `%s`" % config_path)
def computeCore(self, data, tRange): # Compute probabilities based on thresholds p0 = 0 p1 = 1 if (tRange[0] != -np.inf and tRange[1] != np.inf): var0 = data.getPvar(tRange[0]) var1 = data.getPvar(tRange[1]) [obs, p0, p1] = data.getScores(["obs", var0, var1]) elif (tRange[0] != -np.inf): var0 = data.getPvar(tRange[0]) [obs, p0] = data.getScores(["obs", var0]) elif (tRange[1] != np.inf): var1 = data.getPvar(tRange[1]) [obs, p1] = data.getScores(["obs", var1]) obsP = self.within(obs, tRange) p = p1 - p0 # Prob of obs within range bs = np.nan * np.zeros(len(p), 'float') # Split into bins and compute Brier score on each bin for i in range(0, len(self._edges) - 1): I = np.where((p >= self._edges[i]) & (p < self._edges[i + 1]))[0] bs[I] = (np.mean(p[I]) - obsP[I])**2 return Common.nanmean(bs)
def play(url=None): if not url: url = Common.args.url data = Common.getURL(url) playerholder = common.parseDOM(data, "object") # Resolve media url using videohosts videoUrl = None if playerholder: playerholder = playerholder[0] videoUrl = hosts.resolve(playerholder) if not videoUrl: flvdiv = common.parseDOM(data, "div", {"class": "downloadTxt"}) if flvdiv: flvdiv = flvdiv[0] flvlink = common.parseDOM(flvdiv, "a", ret="href") if flvlink: videoUrl = [] videoUrl.append(flvlink[0].replace(' ', '%20')) #videoUrl = None if videoUrl: #print videoUrl Url = videoUrl[0] xbmcplugin.setResolvedUrl(pluginhandle, True, xbmcgui.ListItem(path=Url))
def __init__(self, m_type): # DB self.mysqlAccess = MysqlAccess() # mysql access # channel queue self.chan_queue = XCQ('channel', 'spot') self.work = XCWorker() # 默认类别 self.channel_list = [( 1, 'http://www.ly.com/scenery/scenerysearchlist_22_295__0_0_0_0_0_0_0.html', 1)] # 页面 self.site_page = None # 抓取开始时间 self.begin_time = Common.now() # 分布式主机标志 self.m_type = m_type
def init_crawl(self, _obj, _crawl_type): self._obj = _obj self._crawl_type = _crawl_type # dial client self.dial_client = DialClient() # local ip self._ip = Common.local_ip() # router tag self._router_tag = 'ikuai' #self._router_tag = 'tpent' # items self.items = [] # giveup items self.giveup_items = [] # giveup msg val self.giveup_val = None self.init_log(_obj, _crawl_type)
def execute(self, GridTimeRange, Topo, varDict): "This tool accesses T grids directly" self._common = Common.Common(self._dbss) model = varDict["Model:"] # Convert Topo to meters topo_M = self._common._convertFtToM(Topo) # Make a sounding cubes for T # Height will increase in the sounding and be the # first dimension levels = ["MB1000", "MB850", "MB700", "MB500"] gh_Cube, t_Cube = self.makeNumericSounding(model, "t", levels, GridTimeRange) print "Cube shapes ", gh_Cube.shape, t_Cube.shape # Make an initial T grid with values of -200 # This is an out-of-range value to help us identify values that # have already been set. T = (Topo * 0) - 200 # Work "upward" in the cubes to assign T # We will only set the value once, i.e. the first time the # gh height is greater than the Topo # For each level for i in xrange(gh_Cube.shape[0]): # where ( gh > topo and T == -200), # set to t_Cube value, otherwise keep value already set)) T = where(logical_and(greater(gh_Cube[i], topo_M), equal(T, -200)), t_Cube[i], T) # Convert from K to F T_F = self.convertKtoF(T) return T_F
def plot(self, ax): NF = len(self.files) clim = self.files[0].getScores('fcst') for nf in range(0,NF): file = self.files[nf] lineColor = self.getColor(nf, NF) dates = Common.convertDates(file.getDates()) fcst = file.getScores('fcst') obs = file.getScores('fcst') mae = file.getScores('mae') bias = file.getScores('bias') NO = len(fcst[0,:,0]) NL = len(fcst[0,0,:]) #clim = np.zeros([NO, NL], 'float') diff = abs(bias[range(1, len(bias[:,0,0])),:,:] - bias[range(0, len(bias[:,0,0])-1),:,:]) mfcst = np.ma.masked_array(fcst,np.isnan(fcst)) extr = abs(fcst - clim) extr = extr[range(1, len(extr[:,0,0])),:,:] mdiff = np.ma.masked_array(diff,np.isnan(diff)) mextr = np.ma.masked_array(extr,np.isnan(extr)) x = np.mean(np.mean(mdiff, axis=2), axis=1).flatten() y = np.mean(np.mean(mextr, axis=2), axis=1).flatten() # x = diff[:,0,0].flatten() # y = extr[range(1,len(extr[:,0,0])),0,0].flatten() size = mae[range(1,len(extr[:,0,0])),0,0] mpl.scatter(x, y, s=size*6, color=lineColor) ax.set_xlabel("Daily bias change " + file.getUnitsString()) ax.set_ylabel("Distance from climatological mean " + file.getUnitsString()) xlim = mpl.gca().get_xlim() ylim = mpl.gca().get_ylim() mpl.gca().set_xlim([0,xlim[1]]) mpl.gca().set_ylim([0,ylim[1]])
def SelectMatchingRecipes(global_state, previous_recipes, current_restrictions, current_chefs, ingredients_to_use, banned_ingredients, history=Common.History()): ''' Pick recipes to match previous recipes Parameters: number_needed : int Number of days food is needed for Returns: Recipe [] List of matching recipes not including previous ''' new_recipes = previous_recipes num_filled = sum([int(r.num_servings) for r in previous_recipes]) while num_filled < global_state.NumServings(): weights = [(WeightMatchingRecipe(global_state, r, new_recipes, current_restrictions, current_chefs, ingredients_to_use, banned_ingredients, history)) for r in global_state.all_recipes] weight_sum = sum(weights) if weight_sum == 0: print "Warning: Matching recipes are over-constrained. Returning existing list." return new_recipes weights = [float(w / float(weight_sum)) for w in weights] msg = "Weights for the " + str(num_filled + 1) + "th recipe" LogProbabilities(msg, global_state.all_recipes, weights) ch = choice(len(global_state.all_recipes), 1, p=weights) new_recipes.append(global_state.all_recipes[ch]) num_filled = sum([int(r.num_servings) for r in new_recipes]) return new_recipes
def __init__(self, _q_type, thread_num=10, a_val=None): # parent construct MyThread.__init__(self, thread_num) # thread lock self.mutex = threading.Lock() self.worker_type = Config.XC_Spot # message self.message = Message() # db self.mysqlAccess = MysqlAccess() # mysql access self.mongofsAccess = MongofsAccess() # mongodb fs access # xc queue type self._q_type = _q_type # new:新增商品 # appendix val self.a_val = a_val # activity items self.items = [] # dial client self.dial_client = DialClient() # local ip self._ip = Common.local_ip() # router tag self._tag = 'ikuai' #self._tag = 'tpent' # give up item, retry too many times self.giveup_items = []
def parentProjectPath(self): log = logging.getLogger() if self._parentprojectpath is None: if self._parentproject: self._parentprojectpath = self._parentproject.fullLocation() else: wdir = self.fullLocation() os.chdir(wdir) env = Env.getDefaultEnv() env["PWD"] = wdir p = Popen(["cmt", "show", "projects"], stdout=PIPE, stderr=PIPE, close_fds=True) for line in p.stdout: line = line[:-1] if line.find("(current)") != -1: m = re.compile(".+\(in\s+(.+)\s*\)\s*\(\s*current\s*\)" ).search(line) if m: self._parentprojectpath = os.path.realpath( m.group(1)) # words = line.split() # self._parentprojectpath = os.path.realpath(words[-2].replace(")","")) for line in p.stderr: Common.CMTLog(line[:-1]) retcode = os.waitpid(p.pid, 0)[1] if retcode != 0: log.warning( "return code of 'cmt show project' in %s is %s", wdir, retcode) return self._parentprojectpath
def getMoviesByDate(input_date): date = datetime.strptime(input_date, Movie.DATE_FORMAT) today = date.today() delta = date.date() - today.date() offset = delta.days if offset > 6: print 'Cannot get movies more than a week later: ' + input_date week_day_name = str(date.strftime('%a')).lower() movies = [] for movie_soup in Common.getPageSoup(HOME_PAGE_URL).find('div', {'class': week_day_name})\ .find('ul').findChildren(recursive=False): try: movie = parseMovie(input_date, movie_soup) movies.append(movie) except TypeError: print 'Parse movie error' print 'Found {0} movies from {1} on {2}'.format(len(movies), 'IFC', input_date) return movies
def get_nodes_from_tree_topo(self, topo, exceptroot="0"): ''' This function generates a dictionary that contains type and id of each node. It follows tree level topology. @parameter topo: either an interer (in this case, depth is set and fanout will be 2) or a string in format of "(a,b)" (a and b are integers and they stands for depth and fanout respectively) @return array of dicitonary objects that contains info about each node ''' depth = 0 fanout = 2 if isinstance(topo, str) or isinstance(topo, unicode): t = tuple(int(v) for v in re.findall("[0-9]+", topo)) if len(t) == 1: depth = t[0] elif len(t) == 2: depth = t[0] fanout = t[1] else: return None #topology consists of two parameters: depth and fanout elif isinstance(topo, int): depth = topo else: return None #topo parameter is not given in a desired way num_nodes = Common.num_of_nodes(depth, fanout) nodelist = [] for i in xrange(1, num_nodes + 1): temp = { "id": "00:00:00:00:00:00:00:%s" % format(i, '02x'), "type": "OF" } nodelist.append(temp) if int(exceptroot): del nodelist[0] return nodelist
def __init__(self, numDrifters, observation_variance=0.1, boundaryConditions=Common.BoundaryConditions(), domain_size_x=1.0, domain_size_y=1.0): """ Creates a GlobalParticles object for drift trajectory ensemble. numDrifters: number of drifters in the collection, not included the observation observation_variance: uncertainty of observation position boundaryConditions: BoundaryConditions object, relevant during re-initialization of particles. """ # Call parent constructor super(CPUDrifterCollection, self).__init__(numDrifters, observation_variance=observation_variance, boundaryConditions=boundaryConditions, domain_size_x=domain_size_x, domain_size_y=domain_size_y) # One position for every particle plus observation self.positions = np.zeros((self.numDrifters + 1, 2))
def test_hotTrader_sumProfit(self): '''获取首页的热门交易员总获利信息''' hotTrader = Common.getHomeHotTrader(webAPIData['hostName'] + commonData['getHomeHotTrader_url'],printLogs=1) self.assertEqual(hotTrader.status_code, webAPIData['status_code_200']) print("hotTrader_sumProfit:") table = PrettyTable(["预期/实际","NickName","UserID","MT4account","AccountIndex", "跟随获利","被跟随人数","盈亏点数","近一日收益"]) try: for item in json.loads(hotTrader.text)["data"]["Items"]: if item["ScreenTime"] == 0: mt4Account = Statistic.getMt4Account(userID=str(item["UserId"]),accountIndex=str(item["AccountCurrentIndex"])) moneyFollowCloseSum = Statistic.moneyFollowCloseSum(mt4Account=mt4Account,brokerID=item["BrokerId"]) befollowedCount = Statistic.befollowedCount(mt4Account=mt4Account,brokerID=item["BrokerId"]) pointCloseSum = Statistic.pointCloseSum(mt4Account=mt4Account,brokerID=item["BrokerId"]) moneyFollowCloseDay = Statistic.moneyFollowCloseDay(mt4Account=mt4Account,brokerID=item["BrokerId"]) table.add_row(["预期结果",item["NickName"],item["UserId"],item["MT4Account"],item["AccountCurrentIndex"],moneyFollowCloseSum,befollowedCount,pointCloseSum,moneyFollowCloseDay]) table.add_row(["实际结果",item["NickName"],item["UserId"],item["MT4Account"],item["AccountCurrentIndex"],item["FollowProfit"],item["FOLLOWEDLOGIN"],item["Point"],item["NearProfit"]]) table.add_row(["","","","","","","","",""]) self.assertAlmostEqual(moneyFollowCloseSum,float(item["FollowProfit"]),delta = 10000000) self.assertAlmostEqual(befollowedCount,float(item["FOLLOWEDLOGIN"]),delta = 100000) self.assertAlmostEqual(pointCloseSum,float(item["Point"]),delta = 100000) self.assertAlmostEqual(moneyFollowCloseDay,float(item["NearProfit"]),delta = 100000) finally: table.reversesort = True print(table)
def parseMovie(input_date, movie_soup): movie = Movie.Movie() movie.showdate = input_date # title and show link title_info = movie_soup.find('div', {'class': 'details'}).find('h3') movie.show_url = title_info.a['href'] movie.setTitle(title_info.a.text.encode('utf-8')) movie.theater = 'IFC' # showtime for showtime_li in movie_soup.find('ul', {'class': 'times'}).findChildren(recursive=False): movie.addShowTime(movie.showdate, showtime_li.a.text) # director, year (hard to find this one) details_soup = Common.getPageSoup(movie.show_url) for detail_li in details_soup.find('ul', {'class': 'film-details'}).findChildren(recursive=False): label = detail_li.find('strong').text if label.lower() == 'year': movie.year = re.sub('year', '', detail_li.text, flags=re.IGNORECASE).strip() if label.lower() == 'director': movie.addDirectors(re.sub('director', '', detail_li.text, flags=re.IGNORECASE).strip()) return movie
def UpdateAvailableCount(Action): H = Common.Helper() # Initial List Ingredientdf = pd.read_csv("Ingredients1.csv") Ingredientlist = Ingredientdf.values num_ing = len(Ingredientlist) row = random.randint(0, num_ing - 1) Ingredient = Ingredientlist[row][0] Change = random.randint(1, 50) # Update values if Action == "Stock": Ingredientlist[row][1] += Change expected_value = Ingredientlist[row][1] elif Action == "Consume": Ingredientlist[row][1] -= Change expected_value = Ingredientlist[row][1] else: assert (0) Ingredientdf = pd.DataFrame(data=Ingredientlist, index=Ingredientdf.index, columns=Ingredientdf.columns) Ingredientdf.to_csv("InventoryList_1.csv", encoding='utf-8') # Read new list and compare the count Ingredientdf1 = pd.read_csv("InventoryList_1.csv") Ingredientlist_new = Ingredientdf1.values if Ingredientlist_new[row][2] == expected_value: print("Test passed") else: print("Test failed")
def ModifyRevitLinkData(doc, revitFilePath, linkData): returnvalue = res.Result() match = False try: revitFileName = com.GetRevitFileName(revitFilePath) for fileName, worksetData in linkData: if (revitFileName.startswith(fileName)): match = True # loop over link data and change link worksets as required for linkName, newWorksetName in worksetData: changeLinkInstance = ModifyRevitLinkInstanceWorkset( doc, linkName, newWorksetName) returnvalue.Update(changeLinkInstance) changeLinkType = ModifyRevitLinkTypeWorksetName( doc, linkName, newWorksetName) returnvalue.Update(changeLinkType) break if (match == False): Output('Failed to find current Revit file link workset data!') except Exception as e: returnvalue.UpdateSep( False, 'Failed to modify revit link instances with exception: ' + str(e)) return returnvalue
def __init__(self, filename, size): self.filename = filename self.image = None try: self.image = pygame.image.load(filename) except Exception as err: print(str(err)) self.lastRenderTime = 0 self.sizeX = size[0] self.sizeY = size[1] self.fps = Common.getFPS() self.valid = False self.cur = 0 ix, iy = self.image.get_size() self.tilesX = ix / self.sizeX self.tilesY = iy / self.sizeY if (self.tilesX > 2 and self.tilesY > 2 and self.image != None): self.valid = True
def __init__(self, key, q_type, thread_num=10, a_val=None): # parent construct MyThread.__init__(self, thread_num) # thread lock self.mutex = threading.Lock() self.xc_type = Config.XC_TYPE # xc type #self.item_type = q_type # item queue type # db self.mysqlAccess = MysqlAccess() # mysql access self.redisQueue = RedisQueue() # redis queue self.mongofsAccess = MongofsAccess() # mongodb fs access # xc queue type self.xc_queue_type = q_type # new... self._key = key # redis queue key # appendix val self.a_val = a_val # return items self.items = [] # dial client self.dial_client = DialClient() # local ip self._ip = Common.local_ip() # router tag self._tag = 'ikuai' #self._tag = 'tpent' # give up item, retry too many times self.giveup_items = []
def loadAttachments(self, data): attachments = [] if 'attachments' in data: attachments.extend(data['attachments']) if 'attachment' in data: attachments.append(data['attachment']) if 'copy_history' in data: for subdata in data['copy_history']: self.loadAttachments(subdata) for attach in attachments: c.log('debug', 'Processing %s' % attach['type']) funcname = 'process' + attach['type'].title() if funcname in dir(self): getattr(self, funcname)(attach[attach['type']]) else: c.log( 'error', ' media processing function "Media.%s" is not implemented' % funcname) c.log('debug', str(attach))
def __init__(self): self.incFileName = Common.getIncludePath() + "AutoGemmKernelBinaries.h" self.incFile = open(self.incFileName, "w") self.incFile.write( Common.getAutoGemmHeader() ) self.incStr = "" self.incStr += "#include <cstddef>\n" self.incStr += "\n#ifndef AUTOGEMM_KERNEL_BINARIES_H\n" self.incStr += "#define AUTOGEMM_KERNEL_BINARIES_H\n" self.incStr += "\n" self.cppFileName = Common.getIncludePath() + "AutoGemmKernelBinaries.cpp" self.cppFile = open(self.cppFileName, "w") self.cppFile.write( Common.getAutoGemmHeader() ) self.cppStr = "" self.cppStr += "#include \"%sAutoGemmKernelBinaries.h\"\n" % Common.getRelativeIncludePath() self.cppStr += "\n" self.cppStr += "#ifdef AUTOGEMM_USE_PRE_COMPILED_KERNELS\n" self.cppStr += "#include \"%sAutoGemmKernelBinariesPreCompiled.h\"\n" % Common.getRelativeKernelBinaryPath() self.cppStr += "#endif\n" self.cppStr += "\n"
def getInputFiles(): inputFiles_WJetsToLNu = Common.getRootFiles( Details.cutFlowDir_WJetsToLNu[era], suffix, rootFileName) inputFiles_DYJetsToLL = Common.getRootFiles( Details.cutFlowDir_DYJetsToLL[era], suffix, rootFileName) inputFiles_VH = Common.getRootFiles(Details.cutFlowDir_VH[era], suffix, rootFileName) inputFiles_VV = Common.getRootFiles(Details.cutFlowDir_VV[era], suffix, rootFileName) inputFiles_WG = Common.getRootFiles(Details.cutFlowDir_WG[era], suffix, rootFileName) inputFiles_ttbar = Common.getRootFiles(Details.cutFlowDir_ttbar[era], suffix, rootFileName) inputFiles_singleTop = Common.getRootFiles( Details.cutFlowDir_singleTop[era], suffix, rootFileName) inputFiles_mc = [ inputFiles_ttbar, inputFiles_DYJetsToLL, (inputFiles_WJetsToLNu + inputFiles_VH + inputFiles_VV + inputFiles_WG + inputFiles_singleTop), ] return inputFiles_mc
def request(method, params): global _TOKEN, _LAST_API_CALL diff = time.time() - _LAST_API_CALL if diff < 0.4: time.sleep(0.4) _LAST_API_CALL = time.time() for retry in xrange(3): try: params['access_token'] = _TOKEN params['v'] = '5.25' url = "https://api.vk.com/method/%s?%s" % (method, urlencode(params)) data = json.loads(urllib2.urlopen(url, None, 30).read()) if 'response' not in data: if 'error' in data: c.log( 'warning', 'Api responded error: %s' % data['error']['error_msg']) if data['error']['error_code'] in [7, 15, 212]: return elif data['error']['error_code'] in [10]: continue else: raise Exception( 'unknown error code %i, "%s", data: %s' % (data['error']['error_code'], method, data)) else: raise Exception( 'no correct response while calling api method "%s", data: %s' % (method, data)) break except Exception as e: c.log('warning', 'Retry request %i (3): %s' % (retry, str(e))) time.sleep(2.0 * (retry + 1)) if 'response' not in data: c.log('error', 'Unable to process request') return None return data['response']
def Activated(self): FreeCAD.ActiveDocument.openTransaction(self.__str__()) if Common.localMode: Path = Common.GetSelectedLowerObjectsPath() objs = Common.GetSelectedLowerObjects() i = objs.__len__() - 2 if i > 0: bb1 = Common.GetBoundBoxFromGlobalCoordinates(Path[i], objs[i]) bb2 = Common.GetBoundBoxFromGlobalCoordinates( Path[i + 1], objs[i + 1]) a = (bb2.ZMax - bb1.ZMin) / 2.0 + bb1.ZMin j = 0 while j < i: bb_j = Common.GetBoundBoxFromGlobalCoordinates( Path[j], objs[j]) base_j = Common.toGlobalCoordinates( Path[j], objs[j].Placement.Base) base_j.z = base_j.z + a - (bb_j.ZMin + ((bb_j.ZMax - bb_j.ZMin) / 2.0)) objs[j].Placement.Base = Common.toLocalCoordinates( Path[j], base_j) j = j + 1 else: objs = GetSelectedUpperObjects() i = objs.__len__() - 2 if i > 0: bb = GetObjectBoundBox(objs[i]) a = (GetObjectBoundBox(objs[i + 1]).ZMax - bb.ZMin) / 2.0 + bb.ZMin j = 0 while j < i: bb = GetObjectBoundBox(objs[j]) objs[j].Placement.Base.z = objs[j].Placement.Base.z + a - ( bb.ZMin + ((bb.ZMax - bb.ZMin) / 2.0)) j = j + 1 return
def PTPGroup(self, lock): SelList = Common.getParcedSelectionList() if SelList.__len__() < 2: return sel1 = SelList[SelList.__len__() - 2] sel2 = SelList[SelList.__len__() - 1] if sel1[sel1.__len__() - 1] == "" or sel2[sel2.__len__() - 1] == "": return P1 = Common.GetSelectedPoint(sel1[sel1.__len__() - 2], sel1[sel1.__len__() - 1]) sel1.pop() sel1.pop() P1 = Common.toGlobalCoordinates(sel1, P1) P2 = Common.GetSelectedPoint(sel2[sel2.__len__() - 2], sel2[sel2.__len__() - 1]) sel2.pop() sel2.pop() P2 = Common.toGlobalCoordinates(sel2, P2) if lock == 1: P1.y = 0 P2.y = 0 P1.z = 0 P2.z = 0 if lock == 2: P1.x = 0 P2.x = 0 P1.z = 0 P2.z = 0 if lock == 3: P1.y = 0 P2.y = 0 P1.x = 0 P2.x = 0 u = Common.GetSelectedUpperObjects() i = 0 FreeCAD.ActiveDocument.openTransaction(self.__str__()) dP = P2 - P1 while i < u.__len__() - 1: u[i].Placement.Base = u[i].Placement.Base + dP i = i + 1 return
def main(): run = True while run: print("CSV to Image Encoding and decoding") mode = input("Choose an Option: \n 1.Encode a file in to a image \n 2.Decode an image into a file \n Q.Close program \n") if mode == "1": outputLocation = "output.png" fileLocation = input("Enter file location: ") while not Common.ValidFileName(fileLocation): fileLocation = input("File name entered is invalid \n Enter file location: ") image_location = input("Enter input Image file location: ") while not Common.ValidFileName(image_location): image_location = input("Input image file name entered is invalid \n Enter input Image file location: ") outputLocation = input("Enter output .png location(if left blank defults to output.png): ") if outputLocation == "": outputLocation = "output.png" print("Invaled filename using defult") binAray = fileToBinAray(fileLocation) binAray = usageCheck(binAray, image_location) BinArayToImg(image_location,binAray,outputLocation) elif mode == "2": image_location = input("Enter encoded .PNG file location\n") while not Common.ValidFileName(image_location): image_location = input("PNG file name entered is invalid \n Enter encoded .PNG file location: ") fileName, fileSize, row, coloum, channel = retriveFileInformation(image_location) outputLocation = Common.dose_file_already_exsist(fileName) print("The file will be outputed to: " + outputLocation) outputBinaryAray = imgToBinAray(image_location, fileSize, row, coloum, channel) Common.binArayToFile(outputBinaryAray, outputLocation) elif mode.lower() == "q": run = False else: print("select valid option")
def requestMessages(request, msgs_data): request['count'] = 200 request['offset'] = -200 if len(msgs_data['log']) == 0: request['rev'] = 1 request['offset'] = 0 else: request['start_message_id'] = msgs_data['log'][-1]['id'] while True: data = Api.request('messages.getHistory', request) if data == None: return count = data['count'] data = data['items'] if len(data) == 0: c.log('info', ' no new messages %i (%i)' % (len(msgs_data['log']), count)) break # Switch to get history by message id if 'start_message_id' not in request: request['offset'] = -200 request.pop('rev', None) else: data.reverse() processMessages(data) msgs_data['log'].extend(data) request['start_message_id'] = data[-1]['id'] c.log( 'info', ' loaded %i, stored %i (%i)' % (len(data), len(msgs_data['log']), count)) if len(data) < 200: c.log('info', ' done') break
def set_deal_data_to_redis(redis, p_no=None): session = Session() day = date.today() end_day = day + timedelta(days=1) from_timestamp = time.mktime(day.timetuple()) end_timestamp = time.mktime(end_day.timetuple()) results = session.query(OrderDeal.id, func.from_unixtime(OrderDeal.deal_time, '%H:%i'), OrderDeal.price, OrderDeal.volume) \ .filter(OrderDeal.p_no == p_no, OrderDeal.deleted == 0, OrderDeal.deal_time.between(from_timestamp, end_timestamp)) \ .order_by(OrderDeal.id.asc()).all() dr = {} t = 0 p = 0 v = 0 for id, dt_m, price, volume in results: if dt_m not in dr.keys(): dr[dt_m] = {} t = 0 p = 0 v = 0 dr[dt_m]['price'] = price t = t + 1 p = p + price v = v + volume dr[dt_m]['times'] = t dr[dt_m]['avg_price'] = round(p / t, 2) dr[dt_m]['volume'] = v ret = [] prev_price = Common.get_last_price(session, p_no) prev_avg_price = prev_price x_times = ProductHandler.get_transaction_times(p_no) now = datetime.datetime.now().strftime('%H:%M') for t in x_times: if t not in dr.keys(): if operator.gt(t, now): break ret.append(tuple((t, prev_price, prev_avg_price, 0))) else: ret.append( tuple((t, dr[t]['price'], dr[t]['avg_price'], dr[t]['volume']))) prev_price = dr[t]['price'] prev_avg_price = dr[t]['avg_price'] data = ret key = "{}{}".format('REDIS_KEY_FOR_CHART_TIME_DATA_', p_no) redis.set(key, json.JSONEncoder().encode(data)) session.close()