def get_pushers(self, settings, defaults): ''' Setup the push class - storage for all things related to a specific subscriber channel (meaning an endpoint) ''' push_list = [] for destination in sorted(settings, key=lambda k: k['priority']): if destination['enabled'] == False: continue if self.message_type is None or self.message_type not in destination['endpoints']: raise ImproperlyConfigured("Endpoint type isn't registered") if self.namespace == '' or self.namespace is None: raise ImproperlyConfigured("Namespace isn't configured") if self.dossier == '' or self.dossier is None: raise ImproperlyConfigured("Dossier isn't configured") endpoint = destination['endpoints'][self.message_type] p = Push(destination['host'], endpoint['path'], self.namespace, self.dossier, destination['subscriberName'], destination['https'] if 'https' in destination else False) p.alias = destination.get('alias', destination['host']) p.fail_on_failure = destination.get('failOnFailure', True) p.debug = destination.get('debug', defaults.get('debug', False)) p.timeout = destination.get('timeout', defaults.get('timeout', False)) push_list.append(p) return push_list
def push_thread_function(): from push import Push global _cache push_handle = Push() database = DB() PayLoad = { 'aps':{ 'alert':'%s当前价格为:%s, 已经%s您设定的%s.', 'sound':'default', } } data = json.dumps(PayLoad) while 1: time.sleep(10) platforms = ['okcoin','chbtc','btctrade','fxbtc','mtgox','btc100','btcchina'] for platform in platforms: need_push_big = database.find(platform, _cache[platform], 'b') for one_push in need_push_big: msg = ((data) % (platform, ("%0.2f" % _cache[platform]), "大于", str(one_push[3]))) push_handle.send_message(one_push[0], msg) database.delete(one_push[0], platform, 'high') for platform in platforms: need_push_big = database.find(platform, _cache[platform], 's') for one_push in need_push_big: msg = ((data) % (platform, ("%0.2f" % _cache[platform]), "小于", str(one_push[2]))) push_handle.send_message(one_push[0], msg) database.delete(one_push[0], platform, 'low')
def get(self): users = WXUser.all() p = Push() if not users.count(): return opener = poster.streaminghttp.register_openers() weatherinfo = json.loads(opener.open(settings.weather1_url % settings.weather_city, timeout=5).read())['weatherinfo'] logging.info(weatherinfo) city = weatherinfo['city'] temp = weatherinfo['temp'] wd = weatherinfo['WD'] ws = weatherinfo['WS'] sd = weatherinfo['WS'] time = weatherinfo['time'] args = (to_unicode(city), temp, to_unicode(wd), to_unicode(ws), sd, time) logging(str(args)) for user in users: msg = ''' 城市:%s 温度:%s 摄氏度 风向:%s 风力:%s 湿度:%s 发布时间:%s''' % (to_unicode(city), temp, to_unicode(wd), to_unicode(ws), sd, time) logging.info(msg) p.send_txt_msg(user.fake_id, msg)
def push_foreach_match(list_of_match): endpoint = Mock() instance = Push(endpoint) instance.matches(list_of_match) for m in list_of_match: endpoint.put.assert_any_call('/matches/', m['match_id'], m)
def push_player_stats(player_stats): endpoint = Mock() instance = Push(endpoint) instance.player_stats(player_stats) key = player_stats['date'] endpoint.put.assert_any_call('/stats/', key, player_stats)
def index(): try: payload = loads(request.data) text = Push(payload) text.process() Bot.sendMessage(CHAT_ID, text.message) except: abort(400) return "Ok!"
def __init__(self, touser, fromuser, create_time, msg_id, event, event_key): super(EventMessage, self).__init__( touser, fromuser, create_time, msg_id) self.msg_type = "event" self.event = event self.event_key = event_key if event == "subscribe": p = Push() p.login_unless_not() p.get_contact_by_group(settings.wx_ungrouped) elif event == "unsubscribe": pass
def test_valueChain(self): from longbot import LongBot from push import Push LongBot.cacheData() cn = LongBot.fetch("daily") self.assertTrue(cn > 0) if LongBot.fetch("hourly"): bots = LongBot.loadBots("0") for b in bots: res, warn = b.calcDepo() for w in warn: Push.send(w) else: print("Unable to fetch") LongBot.closeCache()
def publish(): auth = firebase.FirebaseAuthentication(vars.SECRET, vars.EMAIL) endpoint = firebase.FirebaseApplication(vars.FB_URL) endpoint.authentication = auth post = { 'embed_url': 'https://www.youtube.com/embed/UVsIGAEnK_4', 'post_id': '2', 'published': '2015-03-14 19:10' } Push(endpoint).post(post)
def __init__(self): self.closed = False self.command = str() self.args = defaultdict(list) self.command_history = list() self.valid_commands = {} self.valid_commands["init"] = Init() self.valid_commands["push"] = Push() self.valid_commands["pull"] = Pull() self.valid_commands["config"] = Config() self.valid_commands["dir"] = Dir() self.valid_commands["purgue"] = Purgue()
def players(): log('players', "fetching player_stats for %d users" % len(vars.USERS)) stats = Poll().player_stats(vars.USERS) if len(stats) == 0: log('players', 'no stats, exiting') return log('players', "recieved %d stats entries" % len(stats)) auth = firebase.FirebaseAuthentication(vars.SECRET, vars.EMAIL) endpoint = firebase.FirebaseApplication(vars.FB_URL) endpoint.authentication = auth Push(endpoint).player_stats(stats) log('players', "pushed %d stats entries to firebase" % len(stats))
def run_main(events): while True: current_time = GetTime().get_current_time() for event in events: if (event['time'] > current_time) and (event['time'] < (current_time + datetime.timedelta(hours=1))): incoming_events.append(event) if len(incoming_events) == 0: push = Push(str(current_time), 'No incoming events within an hour') push.auto_select_os() else: for incoming_event in incoming_events: push = Push(f"Incoming event at {str(incoming_event['time'])}", incoming_event['name']) push.auto_select_os() incoming_events.clear() time.sleep(3200)
def matches(): log('matches', 'fetching stats for %d users' % len(vars.USERS)) tokens = Poll().match_tokens(vars.USERS) if len(tokens) == 0: log('matches', 'no tokens, exiting') return matches = Poll().matches(tokens) if len(matches) == 0: log('matches', 'no matches, exiting') return log('matches', "recieved %d match tokens" % len(matches)) auth = firebase.FirebaseAuthentication(vars.SECRET, vars.EMAIL) endpoint = firebase.FirebaseApplication(vars.FB_URL) endpoint.authentication = auth Push(endpoint).matches(matches) log('matches', "pushed %d matches to firebase" % len(matches))
def query_pushes_by_revision_range(repo_url, from_revision, to_revision, version=VERSION, tipsonly=True, return_revision_list=False): """ Return an ordered list of pushes (by date - oldest (starting) first). repo_url - represents the URL to clone a repo from_revision - from which revision to start with (oldest) to_revision - from which revision to end with (newest) version - version of json-pushes to use (see docs) tipsonly - only return the tip most push been returned if it's True return_revision_list - return a list of revisions if it's True """ push_list = [] url = "%s?fromchange=%s&tochange=%s&version=%d" % (JSON_PUSHES % { "repo_url": repo_url }, from_revision, to_revision, version) if tipsonly: url += '&tipsonly=1' LOG.debug("About to fetch %s" % url) req = retry(requests.get, args=(url, )) pushes = req.json()["pushes"] # json-pushes does not include the starting revision push_list.append(query_push_by_revision(repo_url, from_revision)) for push_id in sorted(pushes.keys()): # Querying by push ID is perferred because date ordering is # not guaranteed (due to system clock skew) # We can interact with self-serve with the full char representation push_list.append(Push(push_id=push_id, push_info=pushes[push_id])) if return_revision_list: return _pushes_to_list(push_list) return push_list
def query_push_by_revision(repo_url, revision, full=False, return_revision_list=False): """ Return a dictionary with meta-data about a push including: * changesets * date * user repo_url - represents the URL to clone a rep revision - the revision used to set the query range full - query whole information of a push if it's True return_revision_list - return a list of revisions if it's True """ url = "%s?changeset=%s&tipsonly=1" % (JSON_PUSHES % { "repo_url": repo_url }, revision) if full: url += "&full=1" LOG.debug("About to fetch %s" % url) req = retry(requests.get, args=(url, )) data = req.json() assert len(data) == 1, "We should only have information about one push" if not full: LOG.debug("Push info: %s" % str(data)) push_id, push_info = data.popitem() push = Push(push_id=push_id, push_info=push_info) else: LOG.debug( "Requesting the info with full=1 can yield too much unnecessary output " "to debug anything properly") if return_revision_list: return push.changesets[0].node return push
def query_pushes_by_pushid_range(repo_url, start_id, end_id, version=VERSION, return_revision_list=False): """ Return an ordered list of pushes (oldest first). repo_url - represents the URL to clone a repo start_id - from which pushid to start with (oldest) end_id - from which pushid to end with (most recent) version - version of json-pushes to use (see docs) return_revision_list - return a list of revisions if it's True """ push_list = [] url = "%s?startID=%s&endID=%s&version=%s&tipsonly=1" % ( JSON_PUSHES % { "repo_url": repo_url }, start_id - 1, # off by one to compensate for pushlog as it skips start_id end_id, version) LOG.debug("About to fetch %s" % url) req = retry(requests.get, args=(url, )) pushes = req.json()["pushes"] for push_id in sorted(pushes.keys()): # Querying by push ID is preferred because date ordering is # not guaranteed (due to system clock skew) # We can interact with self-serve with the 12 char representation push_list.append(Push(push_id=push_id, push_info=pushes[push_id])) if return_revision_list: return _pushes_to_list(push_list) return push_list
import logging, logging.config import json import config from message import Message from push import Push from weibo import Weibo import traceback import random import time from api import app conf = json.loads(config.LoggerJsonConfig) logging.config.dictConfig(conf) log = logging.getLogger('main') push = Push(config.DingTalkWebHookToken) def callback(path): url = config.Url + path + '?rand=%d' % random.randrange(10000) title = '微博登陆提醒' text = '![.](%s)' % url push.push(title, text, config.DingTalkWebHookAtPhone) def main(): queue = Message(config.Redis, config.RedisKey) weibo = Weibo(config.ChromeDriver, callback) while True: try: msg = queue.getMessage()
def push_post(a_post): endpoint = Mock() instance = Push(endpoint) instance.post(a_post) endpoint.put.assert_any_call('/posts/', a_post['post_id'], a_post)
def get(self): p = Push(settings.wx_email, settings.wx_password) p.login() p.send_txt_msg("5636455", time.strftime('%Y-%m-%d %H:%M:%S'))
def get(self): p = Push() p.login_unless_not() p.get_contact_by_group()
def collect(project): #1.Server server_list=Server.get_server_list() #2.Project_Insert new projects new_project_list=Project.get_new_project_list(server_list) if len(new_project_list) >0: Project.insert_new_project_list(new_project_list) #2.Project_Get all projects serial_number=0 #Collect data by server_name server_name="" if len(sys.argv)>1: server_name=sys.argv[1] if server_name=="": project_list=Project.get_project_list() else: project_list=Project.get_project_list(server_name) for project in project_list: serial_number=serial_number+1 print (">>>>>>No%s.Git project url: %s " %(len(project_list)-serial_number, project.project_repository_url)) print (">>>>>>0_Collecting push records") is_have_new_push=Push.collect_push_list(project) if is_have_new_push==0: print (">>>>>>There is nothing new in repository \n") continue # clean workspace git_home=os.getcwd() git_path=git_home+"/"+project.project_name if os.path.isdir(git_path): Util.getpipeoutput(["rm -rf %s " % git_path ]) print (">>>>>>1_Git path: %s" % git_path) print (">>>>>>2_Clone git repository") Util.getpipeoutput(["git clone %s " % project.project_repository_url+project.project_name ]) print (">>>>>>3_Collecting git data") if os.path.isdir(git_path): os.chdir(git_path) #########Begin to collect #Collect new branchs Branch.collect_branch_list(project.project_id) #Query all branchs from database all_branch_list=Branch.get_branch_list(project.project_id) branch_list=[] for branch in all_branch_list: revision_list=[] print(" >>>>>>>>Branch Name:"+branch.branch_name) current_branch=Util.getpipeoutput(["git rev-parse --abbrev-ref HEAD"]) if current_branch!=branch.branch_name: Util.getpipeoutput(["git checkout %s" % branch.branch_name]) # if last_commit_id is empty ,it means that it's a new branch latest_commit_id=Util.getpipeoutput(["git rev-parse HEAD"]) if branch.branch_last_commit_id!=latest_commit_id: #Collect all the Revisions(all commits) branch_total_line=Revision.collect_revision_list(branch,latest_commit_id) #Collect all the files local_file_change_list=File.collect_file_list(branch,latest_commit_id) #Collect all the link Revision_File_Link.collect_link_list(local_file_change_list,branch,latest_commit_id) #Update branch info branch.branch_type="update" branch.branch_total_line=branch_total_line branch.branch_last_commit_id=latest_commit_id branch.branch_contributor_counts = int(Util.getpipeoutput(["git shortlog -s %s" % Util.getlogrange(), "wc -l"])) branch.branch_file_counts=int(Util.getpipeoutput(["git ls-files | wc -l"])) branch_list.append(branch) Branch.update_branch_list(branch_list) Tag.collect_tag_list(project.project_id) # Merge Request # Project LOC #########End os.chdir(git_home) print (">>>>>>4.Delete the git repository diretory\n") if os.path.isdir(git_path): Util.getpipeoutput(["rm -rf %s " % git_path ])
time.sleep(20) except Exception as e: log.error("error: %s", traceback.format_exc()) if __name__ == '__main__': # use github action to run? if len(sys.argv) > 1: usrAction = True log.info("use github action") else: usrAction = False push = Push(token=config.PushToken, keyWord=config.PushKeyWord, weiboSCF=config.WeiboSCFUrl, weiboRef=config.WeiboRef, weiboCookie=config.WeiboCookie, weixinToken=config.WeixinToken) useMirror = False if config.TelegramMirror is None else True if not useMirror: spider = Spider() else: spider = SpiderMirror(config.TelegramMirror) cityFilter = config.City if usrAction: state = State(config.Redis) spider.postId = state.getPostId()
def test_pushover(self): from push import Push # Uncomment to send push message, will only send once if cached Push.send("Test message 123")
def query_repo_tip(repo_url): """Return the tip of a branch URL.""" url = "%s?tipsonly=1" % (JSON_PUSHES % {"repo_url": repo_url}) recent_commits = retry(requests.get, args=(url, )).json() tip_id = sorted(recent_commits.keys())[-1] return Push(push_id=tip_id, push_info=recent_commits[tip_id])
#!/usr/bin/python from longbot import LongBot from push import Push import sys if len(sys.argv) > 1: print("TEST123" + str(sys.argv)) cnd = LongBot.fetch("daily") cnh = LongBot.fetch("hourly") if cnd > 0 and cnh > 0: bots = LongBot.loadBots("0") for b in bots: res, warn = b.calcDepo() print(str(res)) for w in warn: print("Issue push " + str(w)) Push.send(w) else: print("ERROR\tProblems with fetch") else: print("No timer argument - exit")
def setup(app): push = Push(app) app.register_class('Push.instance', push) app.add_action('details_push_commit', push.render_commit_message)
def setup(app): push = Push(app) app.register_class('Push.instance', push) app.add_action('details_push_commit', push.render_commit_message) if __name__ == '__main__': base_dir = os.path.dirname(os.path.realpath(__file__)) sys.path.append('%s/../../' % base_dir) sys.path.append('%s/../../plugins/config/' % base_dir) from push import Push from config import Config push = Push(None) conf = Config(None) parser = argparse.ArgumentParser() parser.add_argument('-d', '--docs', help='Shows docs for this process', default='', nargs='?') args = parser.parse_args() docs = getattr(args, 'docs') if docs or docs == None: help(Push) else: push.term_push(conf.get_data())