Example #1
0
def reset_queue(redis, key, *fields):
    """
    reset for queue-based keys
    """
    while redis.llen(key) > 0:
        redis.lpop(key)
    d = {}
    for f in fields:
        d[f] = None
    redis.rpush(key, str(json.dumps(d)))
Example #2
0
def main():
    print(f"Starting worker on queue: {queue}")
    print(f"ES Index: {index}")
    print(f"{queue} size:{redis.llen(queue)}")

    processed = 0
    skipped = 0
    while True:
        print("processed: {} // skipped: {} // {} size:{}".format(
            processed, skipped, queue, redis.llen(queue)))
        video_id = redis.lpop(queue)
        if video_id is None:
            continue
        video_id = video_id.decode("utf-8")
        try:
            transcript = YouTubeTranscriptApi.get_transcript(video_id)
            # [{'text': 'is that a million yeah oh this is a', 'start': 0.06, 'duration': 5.04}]
            if not should_process(transcript, video_id=video_id):
                skipped += 1
                continue
            processed += 1
            write_to_es(transcript, video_id)
        except KeyboardInterrupt:
            print('SIGINT or CTRL-C detected. Exiting gracefully')
            exit(0)
        except Exception as ex:
            print(f"Something went wrong. Keep going! Err -> {ex}")
Example #3
0
def get_from_queue(name: str):
    # удаляем и возвращаем первое значение в лист lpop
    if redis.llen(name) > 0:
        file = json.loads(redis.lpop(name))
        return file
    else:
        return False
Example #4
0
 def parse(self, response):
     redis = self.redis
     re_url = re.compile(r'https://www.tianyancha.com/company/\d+')
     code = response.meta['code']
     url = re_url.findall(response.text)
     if len(url) > 0:
         url = url[0]
         redis.lpush('lawcourt', code)
         province = response.xpath(
             '/html/body/div[2]/div/div[1]/div[2]/div[2]/div[1]/div/span/text()'
         ).extract_first()
         score = response.xpath(
             '/html/body/div[2]/div/div[1]/div[2]/div[2]/div[1]/div/div[4]/span[1]/text()'
         ).extract_first()
         yield scrapy.Request(url,
                              callback=self.parse_second,
                              meta={
                                  'url_link': url,
                                  'code': code,
                                  'province': province,
                                  'score': score
                              })
     else:
         redis.rpush('base', code)
         code = redis.lpop('base')
         print(self.redis.llen('base'))
         url = 'https://www.tianyancha.com/search?key={}'.format(code)
         yield scrapy.Request(url,
                              callback=self.parse,
                              meta={'code': code},
                              dont_filter=True)
Example #5
0
def setTurnouts(output,redis):
	message = redis.lpop('turnout_action')
	if (message):
		bits = message.split('",')
		long_id = bits[0].replace('"','',1);
		position = bits[1]
		output[long_id].setPosition(position)
		logging.debug(long_id + " to " + position)
Example #6
0
def setSignals(output,redis):
	message = redis.lpop('signal_action')
	if (message):
		bits = message.split('",')
		long_id = bits[0].replace('"','',1);
		color = bits[1]
		output[long_id].setColor(color)
		logging.debug(long_id + " to " + color)
Example #7
0
def setSignals(output, redis):
    message = redis.lpop('signal_action')
    if (message):
        bits = message.split('",')
        long_id = bits[0].replace('"', '', 1)
        color = bits[1]
        output[long_id].setColor(color)
        logging.debug(long_id + " to " + color)
Example #8
0
def setTurnouts(output, redis):
    message = redis.lpop('turnout_action')
    if (message):
        bits = message.split('",')
        long_id = bits[0].replace('"', '', 1)
        position = bits[1]
        output[long_id].setPosition(position)
        logging.debug(long_id + " to " + position)
Example #9
0
def muxer_worker():
    global redis
    global onset_dicts, onset_dir, X, Y, Z

    while True:
        try:
            yt_url = redis.lpop('yturls')
            if yt_url is None:
                time.sleep(10)
            else:
                m = Muxer(yt_url=yt_url)
                m.download_video()
                with open(
                    '{output_dir}/{ytid}.status.json'.format(output_dir=output_dir, ytid=m.ytid), 'w'
                ) as f:
                    f.write('{"status": "processing", "stage": 1}')
                m.demux()
                with open(
                    '{output_dir}/{ytid}.status.json'.format(output_dir=output_dir, ytid=m.ytid), 'w'
                ) as f:
                    f.write('{"status": "processing", "stage": 2}')
                m.convert_to_wav()
                with open(
                    '{output_dir}/{ytid}.status.json'.format(output_dir=output_dir, ytid=m.ytid), 'w'
                ) as f:
                    f.write('{"status": "processing", "stage": 3}')
                # XXX: Call meshuggahfier here, and use its output in place of m.get_audio_file()
                meshuggahfied_file = '{output_path}/{ytid}mm.wav'.format(
                    output_path=m.output_dir, ytid=m.ytid
                )
                if not os.path.exists(meshuggahfied_file):
                    meshuggahme(
                        m.get_audio_file(),
                        X, improve_func=improve_log_no_loudness,
                        onset_dicts=onset_dicts, onset_dir=onset_dir,
                        metric='correlation', output_file=meshuggahfied_file,
                        original_w=10
                    )
                with open(
                    '{output_dir}/{ytid}.status.json'.format(output_dir=output_dir, ytid=m.ytid), 'w'
                ) as f:
                    f.write('{"status": "processing", "stage": 4}')
                meshuggahfied_file = m.compress_wav(meshuggahfied_file)
                with open(
                    '{output_dir}/{ytid}.status.json'.format(output_dir=output_dir, ytid=m.ytid), 'w'
                ) as f:
                    f.write('{"status": "processing", "stage": 5}')
                m.remux(meshuggahfied_file).split('/')[-1]
                with open(
                    '{output_dir}/{ytid}.status.json'.format(output_dir=output_dir, ytid=m.ytid), 'w'
                ) as f:
                    f.write('{"status": "complete"}')
        except Exception as e:
            print repr(e)
Example #10
0
def worker():
    while True:
        item = redis.lpop("queue")

        if item is None:
            break

        try:
            process_links(*json.loads(item))
        except Exception:
            traceback.print_exc()
Example #11
0
def post_to_facebook():
    if redis.llen('articles') > 0:
        article = Article.objects.get(
            article_id=redis.lpop('articles').decode())
        """Post new articles to facebook"""
        try:
            status = graph_api.put_object("me",
                                          "feed",
                                          message=article.title,
                                          link=article.url)
        except facebook.GraphAPIError as er:
            print("There is a problem ", str(er))
Example #12
0
def post_to_facebook():
    """Post new articles to facebook"""

    for i in range(5):
        if redis.llen('articles') > 0:
            article = Article.objects.get(article_id = redis.lpop('articles'))

            attachment = {"name":article.title ,  "link" :article.url , "description": article.description}
            try:
                status = api.put_wall_post(article.title, attachment )
            except facebook.GraphAPIError as er:
                print("There is a problem ", str(er))
Example #13
0
def post_video_to_facebook():
    """Post new articles to facebook"""
    for i in range(1):
        if redis.llen('videos') > 0:
            #get the first element

            video = YoutubeVideo.objects.get(video_id = redis.lpop('videos'))

            attachment = {"name":video.title ,  "link" :video.url , "description": video.description}
            try:
                status = api.put_wall_post(video.title, attachment )
            except facebook.GraphAPIError as er:
                print("There is a problem ", str(er))
Example #14
0
async def qrzLookupQueue():
    '''test'''
    while True:
        try:
            call = redis.lpop('qrzLookupQueue')
            if call is not None:
                session.default_buffer.text = "lookup " + call
                session.default_buffer.validate_and_handle()
                await asyncio.sleep(1)
            else:
                if oneshot is True:
                    await shutdown()
                await asyncio.sleep(1)
        except asyncio.CancelledError:
            await shutdown()
Example #15
0
    def get_message(self):
        """
        Get message and processes it
        :return: None
        """
        message = redis.lpop('messages')
        if not message:
            return

        message = message.decode('utf-8')
        # 5% of chance that message contains error
        is_message_correct = random.choices((True, False), [0.95, 0.05])[0]
        if not is_message_correct:
            self.send_message('errors', message)
            return

        print(message)
        return
Example #16
0
def loop(redis):
    while True:
        name = redis.lpop('http_post_list')
        if name:
            log.debug("processing {}".format(name))
            d = redis.hgetall(name)
            url = d['url']
            data = dict(data=d['data'])
            try:
                r = requests.post(url, data=data, timeout=args.timeout)
            except requests.exceptions.RequestException as e:
                log.info("err while report to {}: {}".format(url, e))
            else:
                log.info("reported ({}) to {}".format(r.status_code, url))
            finally:
                redis.delete(name)
        else:
            log.debug("nothing in http_post_list")
            time.sleep(1)
Example #17
0
 def draw_cards(self):
     hand_length = redis.llen(self.key(':hand'))
     for i in range(4 - hand_length):
         card = redis.lpop('games:{}:deck'.format(self.gid))
         if card:
             redis.rpush(self.key(':hand'), card)
Example #18
0
def get_one_proxy(redis_conn):
    '''
    功能:从redis获取一个代理
    '''
    proxy = redis.lpop("ip_list")
    return proxy
Example #19
0
def delete_all():
    '''Debug:  clears the redis queue'''
    res = redis.ltrim(trace_list, 0, 0)
    res = redis.lpop(trace_list)
    count = redis.llen(trace_list)
    return jsonify({'count': count})
Example #20
0
def delete_route():
    '''Deletes the trace from the front of the queue, and return the trace *after* that'''
    val = redis.lpop(trace_list)
    return get_route()
Example #21
0
        s_in["currentReverseSection"] = section[
            s_in["id"]].getCurrentReverseSectionId()
        s_in["currentDirection"] = section[s_in["id"]].getCurrentDirection()
        s_in["currentMaxSpeed"] = section[s_in["id"]].getMaxSpeed()

    for t_in in data["turnouts"]:
        t_in["connected"] = turnout[t_in["id"]].getConnected()

    with open('config.json', 'w') as outfile:
        json.dump(data, outfile, indent=4)


updateSignals()

while 1:
    message = redis.lpop('sensors')
    if (message):
        logging.debug("Got a message " + message)
        bits = message.split(',')
        address = bits[0] + "," + bits[1] + "," + bits[2]
        try:
            sensor[address]
        except:
            logging.debug(
                "Recieved a message from a sensor that didn't exist in the config!"
            )
            pass
        else:
            handleSensorUpdate(message, sensor[address])
            rewriteConfig(data)
Example #22
0
try:
    conn = psycopg2.connect("dbname='postgres'")
except:
    print("Can't connect to PSQL!")
try:
    redis = redis.StrictRedis(host='localhost', port=6379, db=0)
except:
    print("Can't connect to Redis!")

cur = conn.cursor()

i = redis.llen('event')

while i > 0:

    data = json.loads(redis.lpop('event')) 

    policeTweet = False
    if str(data['username']) == 'pdxpolicelog':
        policeTweet = True

    # Check if the retweet indicator is in the text body. Do not process retweets. 
    isRetweet = False
    if 'RT @pdxpolicelog: ' in str(data['text']):
        isRetweet = True

    if isRetweet != True:

        # Tweets are usually formatted like this ROBBERY - COLD at 1200 SW ALDER ST, PORT [id]
        # So everything prior to the 'at' are the incident details, everything between the 'at' and the '[' are the incident details.
        at = 'at'
Example #23
0
	message = bytearray('212100'.decode('hex'))
	ret = send2(message)
	version = ret[0];
	xor = ret[2];
	print version.encode('hex')
	print
	message = bytearray('8d00'.decode('hex'))
	edb = 0
	for byte in message: edb ^= byte
	print edb

def test(message):
	for c in message: print(c)

while 1:
        message = redis.lpop('traincon')
        if (message):
                bits = message.split(',')
                train_id = int(bits[0])
		direction = bits[1]
		speed = int(bits[2])
		print("setting speed of " + str(train_id) + " in direction " + direction + " to speed " + str(speed));
		setThrottle(train_id,direction,speed)
		try:
			getFirmwareVersion()
		except:
			pass;

#print "Loco: " + sys.argv[1]
#print "Direction: " + sys.argv[2]
#print "Speed: " + sys.argv[3]
Example #24
0
print("Lpush")
redis.lpush("mylist","a","b","c")
#lrange
print("lrange")
lrange=redis.lrange("mylist",0,-1)
print(lrange)
#RPUSH
print("Rpush")
redis.lpush("mylist","a","b","c")
#lrange
print("lrange")
lrange=redis.lrange("mylist",0,-1)
print(lrange)
#LPOP
print("Lpop")
lpop=redis.lpop("mylist")
print(lpop)
#lrange
print("lrange")
lrange=redis.lrange("mylist",0,-1)
print(lrange)
#RPOP
print("Rpop")
rpop=redis.rpop("mylist")
print(rpop)
#lrange
print("lrange")
lrange=redis.lrange("mylist",0,-1)
print(lrange)
#LSET
print("Lset")
Example #25
0
tablename = args.query

cur = conn.cursor()
cur.execute(
    sql.SQL(
        "CREATE TABLE IF NOT EXISTS twitter.{} (id BIGINT, createdate TIMESTAMP, body VARCHAR, username VARCHAR, url VARCHAR, location VARCHAR, address VARCHAR, incident_type VARCHAR)"
    ).format(sql.Identifier(tablename)))

i = redis.llen(tablename)

logging.debug("Writing %s tweets to database twitter.%s" % (i, tablename))

while i > 0:

    data = json.loads(redis.lpop(tablename))

    tweetBody = data['text']

    at = 'at'
    startBracket = '['

    atIndex = tweetBody.find(at)
    startBracketIndex = tweetBody.find(startBracket)

    address = tweetBody[atIndex + 2:startBracketIndex].strip()
    incidentType = tweetBody[:atIndex - 1].strip()

    cur.execute(
        sql.SQL(
            "INSERT INTO twitter.{} (id, createdate, body, username, url, location, address, incident_type) VALUES (%s, %s, %s, %s, %s, %s, %s, %s)"
Example #26
0
redis.decr("counter")
print(redis.get("counter")) #1

# List : possible to duplicate values
redis.rpush("members", "r1") 
redis.rpush("members", "r2")
redis.lpush("members", "l1")
redis.lpush("members", "l2")
print(redis.lrange("members", 0, 0))
print(redis.lrange("members", 0, 1))
print(redis.lrange("members", 0, 2))
print(redis.llen("members"))
print(redis.lrange("members",0, redis.llen("members")-1))
print(redis.lindex("members",3))
print(redis.rpop("members"))
print(redis.lpop("members"))
print(redis.llen("members"))
print(redis.lrange("members",0, redis.llen("members")-1))
redis.delete("members") 

#Sets  : impossible to duplicate values
redis.sadd("members", "s1")
redis.sadd("members", "s1")
redis.sadd("members", "s2")
redis.sadd("members", "s3")
redis.sadd("members", "s4")
redis.sadd("members", "s5")
print(redis.smembers("members"))
redis.delete("members")

# Using JSON Format
from psycopg2.extensions import AsIs
import datetime

try:
    redis = redis.StrictRedis(host='localhost', port=6379, db=0)
except:
    print "Can't connect to Redis!"

try:
    conn = psycopg2.connect("dbname='postgres'")
except:
    print "Can't connect to PSQL!"

subreddits = ['politics','worldnews','news']
date = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')

cur = conn.cursor()

for subreddit in subreddits:
    i = redis.llen(subreddit)
    while i > 0:
        word = redis.lpop(subreddit)
        cur.execute("INSERT INTO reddit.%s AS s (word, count, full_date, day) VALUES (%s, %s, %s, date_trunc('day', %s::date)) ON CONFLICT ON CONSTRAINT %s_word_day DO UPDATE SET count = s.count + 1",
                    (AsIs(subreddit), word, 1, date, date, AsIs(subreddit)))
        i = i - 1

conn.commit()
cur.close()
conn.close()

Example #28
0
 def parse_second(self, response):
     infor = SsbaseItem()
     code = response.meta['code']
     url = response.meta['url_link']
     score = response.meta['score']
     province = response.meta['province']
     infor['toTime'] = response.xpath(
         '//*[@id = "_container_baseInfo"]/table[2]/tbody/tr[7]/td[2]/span/text()'
     ).extract_first()
     # infor['toTime'] = '2014-10-1'
     infor['estiblishTime'] = response.xpath(
         '//*[@id = "_container_baseInfo"]/table[2]/tbody/tr[2]/td[2]/div/text()'
     ).extract_first()
     infor['companyOrgType'] = response.xpath(
         '//*[@id = "_container_baseInfo"]/table[2]/tbody/tr[5]/td[2]/text()'
     ).extract_first()
     infor['regCapital'] = response.xpath(
         '//*[@id = "_container_baseInfo"]/table[2]/tbody/tr[1]/td[2]/div/text()'
     ).extract_first()
     infor['legalPersonName'] = response.xpath(
         '//*[@id = "_container_baseInfo"]/table[1]/tbody/tr[1]/td[1]/div/div[1]/div[2]/div[1]/a/text()'
     ).extract_first()
     infor['regLocation'] = response.xpath(
         '//*[@id = "_container_baseInfo"]/table[2]/tbody/tr[10]/td[2]/text()'
     ).extract_first()
     infor['regInstitute'] = response.xpath(
         '//*[@id = "_container_baseInfo"]/table[2]/tbody/tr[6]/td[4]/text()'
     ).extract_first()
     infor['regStatus'] = response.xpath(
         '//*[@id = "_container_baseInfo"]/table[2]/tbody/tr[2]/td[4]/text()'
     ).extract_first()
     infor['email'] = response.xpath(
         '//*[@class = "email"]/text()').extract_first()
     infor['creditCode'] = response.xpath(
         '//*[@id = "_container_baseInfo"]/table[2]/tbody/tr[3]/td[2]/text()'
     ).extract_first()
     infor['businessScope'] = response.xpath(
         '//*[@id = "_container_baseInfo"]/table[2]/tbody/tr[11]/td[2]//text()'
     ).extract_first()
     infor['approvedTime'] = response.xpath(
         '//*[@id = "_container_baseInfo"]/table[2]/tbody/tr[6]/td[2]/text()'
     ).extract_first()
     infor['regNumber'] = response.xpath(
         '//*[@id = "_container_baseInfo"]/table[2]/tbody/tr[3]/td[4]/text()'
     ).extract_first()
     infor['score'] = score
     infor['phoneNumber'] = response.xpath(
         '//div[@class = "detail"]/div[1]/span[2]/text()').extract_first()
     infor['gettime'] = time.strftime('%Y-%m-%d',
                                      time.localtime(time.time()))
     infor['name'] = response.xpath(
         '//h1[@class ="name"]//text()').extract_first()
     yield infor
     redis = self.redis
     code = redis.lpop('base')
     print(self.redis.llen('base'))
     url = 'https://www.tianyancha.com/search?key={}'.format(code)
     yield scrapy.Request(url,
                          callback=self.parse,
                          meta={'code': code},
                          dont_filter=True)
Example #29
0
def status_update():
	global player
	if player is None:
		return
	redis.set("musicastatus", json.dumps({"paused": player.paused, "time": player.time_pos or 0, "length": player.length or 0}))

while True:
	if player is not None and player.filename is not None and player.paused != should_be_paused:
		player.pause()
	status_update()
	p.get_message()
	quent = redis.lindex("musicaqueue", 0)
	removed_uuid = check_finished_uuid()
	if removed_uuid and quent and removed_uuid == json.loads(quent.decode())["uuid"]:
		print("DEQUEUE")
		ent = redis.lpop("musicaqueue")
		redis.set("musicatime.%s" % json.loads(quent.decode())["ytid"], time.time())
		redis.rpush("musicaudit", "dequeued entry %s at %s because process ended" % (ent, time.ctime()));
		quent = redis.lindex("musicaqueue", 0)
	if quent:
		quent = json.loads(quent.decode())
		if quent["uuid"] != current_uuid:
			redis.set("musicatime.%s" % quent["ytid"], time.time())
			start_playing(quent["uuid"], quent["ytid"])
	else:
		if current_uuid is not None:
			stop_playing()
		if player is not None:
			player.quit()
			player = None
	time.sleep(0.5)
Example #30
0
File: qr.py Project: tavisrudd/qr
 def pop_back(self):
     """Pop an element from the back of the deque"""
     popped = redis.lpop(self.key)
     log.debug('Popped ** %s ** from key ** %s **' % (popped, self.key))
     return popped 
Example #31
0
File: qr.py Project: tavisrudd/qr
 def pop(self):
     """Pop an element"""
     popped = redis.lpop(self.key)
     log.debug('Popped ** %s ** from key ** %s **' % (popped, self.key))
     return popped 
Example #32
0
    queue_size = rospy.get_param("redis_qs", 5)
    map_sub = rospy.Subscriber("/map", OccupancyGrid, map_cb)
    bt.establish_reset(redis, redis_key, bt.reset_queue, None, "data", "id",
                       "Line_count", "width", "height")
    bt.establish_pulse()
    map_shift = [0, 0, 0]
    map_rot = [0, 0, 0]
    listener = tf.TransformListener()
    tf_present = False
    rate = rospy.Rate(5)
    # main loop: get tf from odom to map
    while not rospy.is_shutdown():
        try:
            map_shift, map_rot = listener.lookupTransform(
                "odom", "map", rospy.Time(0)
            )  # gives us the tf from odom to map. values inverted for tf from map to odom.
            map_rot = euler_from_quaternion(map_rot)
            if not tf_present:
                rospy.loginfo_once("MAP_BRIDGE: Odom to map tf found")
                tf_present = True
        except (tf.LookupException, tf.ConnectivityException,
                tf.ExtrapolationException):
            if tf_present:
                rospy.loginfo_once("MAP_BRIDGE: no transform from odom to map")
                tf_present = False
            continue
        # trim queue size
        while redis.llen(redis_key) > queue_size:
            redis.lpop(redis_key)
        rate.sleep()
Example #33
0
	img_array = np.asarray(bytearray(imgdata), dtype=np.uint8)
	image = cv2.imdecode(img_array, cv2.CV_LOAD_IMAGE_UNCHANGED)
	rs = (detect_label(model, image), imgdata)
	tdiff = int(time.time() - t)
	dbprint('image process time: %d:%02d' % (tdiff//60, tdiff%60))
	return rs

#00D6FB009223(n800sau)_1_20160516142921_30928.jpg
failed_file = ''
ftp_h = None
models = None
r = re.compile('^[0-9A-F]+\(.*\)_\d_(\d+)_\d+\.jpg')
try:
	redis = redis.Redis()
	for i in range(2):
		fpath = redis.lpop(REDIS_INPUT_LIST)
		if fpath is None:
			print 'End of files'
			break
		bname = os.path.basename(fpath)
		dbprint('popped %s' % fpath)
		m = r.match(bname)
		if m:
			dt = datetime.strptime(m.groups()[0], '%Y%m%d%H%M%S')
			ts = time.mktime(dt.timetuple())
			try:
				if ftp_h is None:
					ftp_h = FTP('192.168.1.1', timeout=30)
					ftp_h.login('writer', 'pfgbcm')
				if models is None:
					t = time.time()
Example #34
0
    return logger

# try to connect redis server
redis = redis.StrictRedis( host='106.187.34.51', port=6379, password='******' )

#some test data
#i = 5
#while i>0:
#    redis.rpush( 'image_to_upload' , "544:test.jpg" )
#    i = i - 1

logger = initlog()

# read image infomation that to upload
while True:
    image_info = redis.lpop( "image_to_upload" )
    if not image_info:
        print "no job to do."
        time.sleep( 5 )
        continue
    [user_id, image_name] = image_info.split( ":" )
    try:
        img_fp = open( "./temp/" + image_name , "rb" )
    except Exception , data:
        logger.error( "image file is not exist : " + user_id + ":" + image_name )
        break;

    # do upload
    # max try 5 times, if all fail I will push image_info back to the stack of image_to_upload
    while True:
        try_count = 0
Example #35
0
 def remove_top(email, json):
     redis.lpop(redis_book_key_from_email(email))
     return {'result': 'removed top'}
Example #36
0
def lpop(key):
    try:
        item = None
        item = loads(r.lpop(key))
    finally:
        return item
    scan_sub = rospy.Subscriber("/scan", LaserScan, scan_cb)
    cmd_pub = rospy.Publisher("/cmd_vel", Twist, queue_size=3)
    bt.establish_reset(redis, redis_key)
    bt.establish_pulse()

    pose = None
    flags = {"moving": False, "forward": False, "stalled": False, "stopped": False}
    safe_thresh = 0.25
    current_twist = Twist()
    current_cmd = {}
    next_cmd = {}
    expiration_time = None
    rate = rospy.Rate(10)
    while not rospy.is_shutdown():
        if redis.llen(redis_key) > 0 and not next_cmd:
            next_cmd = json.loads(redis.lpop(redis_key))  # if next command slot is empty, then get a new commadn from redis
        if not current_cmd and next_cmd:  # if current command is empty and next command is not, then move the command to the current slot
            current_cmd = next_cmd
            next_cmd = {}
            if "cmd" not in current_cmd.keys():  # check to make sure the json at least has a cmd key
                rospy.logerr("[cmd_feedback invalid] cmd not given. No action available. Aborting.")
                current_cmd = {}
            else:
                execute_command(current_cmd)
        if current_cmd != None and next_cmd.get("cmd", "").strip().lower() == "stop":  # if there is a current command, but the next command is a stop, then stop the current action
            flags["stopped"] = True  # set the stopped flag
            expiration_time = rospy.get_time()  # 
            next_cmd = {}  # empty next command slot
        if expiration_time:
            if rospy.get_time() < expiration_time and not flags["stalled"]:
                cmd_pub.publish(current_twist)
Example #38
0
import logging
import csv
import io
from util import encode

logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.DEBUG)

try:
    redis = redis.StrictRedis(host='localhost', port=6379, db=0)
except:
    print("Can't connect to Redis!")

parser = argparse.ArgumentParser()
parser.add_argument("-q",
                    "--queries",
                    dest="query",
                    help="Give names of queries stored in Redis.",
                    default='-')
args = parser.parse_args()

with io.open('data/%s.json' % args.query, 'a', encoding='utf-8') as outfile:
    i = redis.llen(args.query)
    logging.debug("Writing %s tweets to data/%s.json" % (i, args.query))
    while i > 0:
        data = json.loads(redis.lpop(args.query))
        outfile.write(
            unicode(
                json.dumps(data, ensure_ascii=False, indent=4,
                           sort_keys=True)))

        i = i - 1
Example #39
0
 def pull(redis):
     jobkey = redis.lpop('jobs:queue')
     jobdescr = redis.hgetall(jobkey)
     return Job(jobkey=jobkey, **jobdescr)
Example #40
0
import json
import os
import subprocess

from musicautils import *

YOUTUBE_DL = os.path.join(os.getenv("HOME"), ".local/bin/youtube-dl")

if not os.path.isdir(DATA_DIR):
	os.mkdir(DATA_DIR)

redis = redis.Redis()

# refresh the loading queue

while redis.lpop("musicaload") is not None:
	pass

for ent in redis.lrange("musicaqueue", 0, -1):
	redis.rpush("musicaload", json.loads(ent.decode())["ytid"])

def gen_cmdline(ytid, for_title=False):
	return [YOUTUBE_DL, "--no-playlist", "--id", "--no-progress", "--format", "mp4"] + (["--get-title"] if for_title else []) + ["--", sanitize(ytid)]

def get_title(ytid):
	return subprocess.check_output(gen_cmdline(ytid, for_title=True))

# "mplayer -fs"

while True:
	_, to_load = redis.blpop("musicaload")
Example #41
0
#                     "disinfect":true,  \
#                     "oxygen_supply":true, \
#                     "heater":true }}'
# content = '{"reported":{"temperature":13}}'
def on_connect(client, userdata, rc):
    print('Connected. Client id is: ' + clientid)
    #
    # client.publish(topic,content )
    # print('MQTT message published.',content)


client = mqtt.Client(clientid)
client.on_connect = on_connect
client.username_pw_set(username, password)
client.connect(host=broker, port=port)
client.loop_start()
while True:
    sleep(0.2)
    var_name = redis.lpop("set_value")
    var_value = redis.lpop("set_value")
    if var_name == None:
        continue
    else:
        if var_value == '1':
            var_value = 'true'
        else:
            var_value = 'false'

        context = '{"reported": {"' + var_name + '":' + var_value + '}}'
        print context
        client.publish(topic_update, context)