Example #1
0
    def _scan(self, domain):
        api = StackOverflow.Api(domain)
        questions = api.questions()

        if questions != None:
            settings = QuestionsScanner.get_by_key_name(domain)

            tags = {}
            logging.debug('scanning %d questions on domain %s' % (
                len(questions),
                domain,
            ))
            for question in questions:
                logging.debug('question: %d' % question['question_id'])
                if question['question_id'] == settings.last_question:
                    break

                logging.info('question: %d is new on %s' % (
                    question['question_id'],
                    domain,
                ))

                Publisher._append_tags(tags, question)

            if len(tags) > 0:
                settings.last_question = questions[0]['question_id']
                settings.put()

                Publisher._publish_tags(domain, tags)
            else:
                logging.info('Nothing to publish')
        else:
            logging.error('no questions!?')
Example #2
0
def execute(*args, **kwargs):
    time.sleep(10)
    logger.debug('a execute:%s, %s', args, kwargs)
    publisher = Publisher()
    publisher.publish('B')

    return None
 def setUp(self):
     broker_address = cfg.BROKER_ADDRESS
     broker_exchange_name = cfg.EXCHANGE_NAME
     broker_queue_name = cfg.QUEUE_NAME
     broker_client_obj = BrokerClient(broker_address, broker_exchange_name,
                                      broker_queue_name)
     self._publisher = Publisher(broker_client_obj)
Example #4
0
class Publication(MDO):

    sql_dict={"get_all":"SELECT sid, publisher_sid, uuid, publishing_date, title, publication_url, publication_text FROM fc_publication",
              "delete":"DELETE FROM fc_publication WHERE sid=%s",
              "insert":"INSERT INTO fc_publication(publisher_sid,publishing_date,title,publication_url,publication_text) VALUES(%s,%s,%s,%s,%s) RETURNING sid",
              "load":"SELECT sid, publisher_sid, uuid, publishing_date, title, publication_url, publication_text FROM fc_publication WHERE sid=%s"}
    
    def __init__(self, sid=None, uuid=None, publisher_sid=None, publishing_date=None, title=None, publication_url=None, publication_text=None):
        super(Publication, self).__init__(Publication.sql_dict,sid,uuid)
        self.publisher_sid=publisher_sid
        self.publishing_date=publishing_date
        self.title=title
        self.publication_text=publication_text
        self.publication_url=publication_url
        self.publisher=Publisher(self.publisher_sid)
        self.publisher.load()
        

    def load_object_from_db(self,rec):
        self.publisher_sid=rec.publisher_sid
        self.uuid=rec.uuid
        self.publishing_date=rec.publishing_date
        self.title=rec.title
        self.publication_text=rec.publication_text
        self.publication_url=rec.publication_url
        
    def get_insert_data(self):
        return (self.publisher_sid,self.publishing_date,self.title,self.publication_url,self.publication_text,)
    
    def fabric_method(self,rec):
        return Publication(rec.sid, rec.uuid, rec.publisher_sid, rec.publishing_date, rec.title, rec.publication_url, rec.publication_text)
Example #5
0
 def test_publisher_attach_send(self):
     p = Publisher('localhost', '5556')
     p.context = zmq.Context()
     p.socket = p.context.socket(zmq.PUB)
     p.socket.connect(f"tcp://localhost:5556")
     # If the send fails, an exception is returned. None returned if successful
     self.assertEqual(p.socket.send_string("I can send!"), None)
Example #6
0
    def __init__(self, rabbit_cfg, rmq_ws_cfg, db_cfg, redis_cfg, worker_type):
        """
        :param dict rabbit_cfg: config rabbit
        :param dict rmq_ws_cfg: config rmq_ws
        :param dict db_cfg: config db
        :param dict redis_cfg: config redis
        :param BaseEtlListener worker_type: Type of worker (listener)
        """
        self.worker_type = worker_type
        self._rmq_user = rabbit_cfg["user"]
        self._rmq_pwd = rabbit_cfg["password"]

        self._queue = rabbit_cfg['{}_queue'.format(self.worker_type)]
        self._ack = 'auto'
        self._db_conn = psycopg2.connect(**db_cfg)

        self.rmq_ws_publisher = Publisher(user=rmq_ws_cfg["user"],
                                          password=rmq_ws_cfg["password"],
                                          host=rmq_ws_cfg["host"])
        # self.rmq_ws_channel = self.rmq_ws_connection.channel()
        rmq_host = (rabbit_cfg["host"], rabbit_cfg["port"])
        self._conn = stomp.Connection([rmq_host])
        self.redis_storage_conn = ConnectionPool.from_url(
            "redis://{}:{}/{}".format(redis_cfg["host"], redis_cfg["port"],
                                      redis_cfg['db']))
        listener = LISTENER_MAP[self.worker_type]
        self._conn.set_listener(
            self.worker_type,
            listener(db_conn=self._db_conn,
                     redis_storage_conn=self.redis_storage_conn,
                     rmq_ws_publisher=self.rmq_ws_publisher))
        self._subscribe_and_connect()
Example #7
0
def print_report(date):
    pb = Publisher()
    report = pb.generate_report(date)
    if report:
        print(report['body'])
    else:
        print('\n'.join(pb.get_all_points()))
Example #8
0
    def __init__(self, worker=None, interval=1):
        self.worker = worker
        self.cpu_count = Monitor.get_cpu_count()
        self.interval = interval
        self.publish_report_interval = __publish_report_interval_default__
        self.system = platform.system()
        self.platform = platform.platform()
        self.architecture = platform.architecture()
        self.memory_total = psutil.virtual_memory().total
        self.user = psutil.users()[0].name
        self.cpu_name = None
        self.sys_caption = None
        self.sys_path = None
        self.sys_serial = None
        self.disk_caption = None
        self.fan_status = None
        if self.system == 'Windows':
            try:
                import wmi
                w = wmi.WMI()
                self.user = w.Win32_ComputerSystem()[0].UserName
                self.cpu_name = w.Win32_Processor()[0].Name
                self.sys_caption = w.Win32_OperatingSystem()[0].Caption
                self.sys_path = w.Win32_OperatingSystem()[0].WindowsDirectory
                self.sys_serial = w.Win32_OperatingSystem()[0].SerialNumber
                self.disk_caption = w.Win32_DiskDrive()[0].Caption
                self.fan_status = w.Win32_Fan()[0].status
            except Exception as e:
                print(e)

        if self.worker is None:
            self.worker = self.user
        self.publish = Publisher(self.worker)
        self.publish_timer = None
Example #9
0
    def add_publisher(self, name_, topics):
        j = 0
        name_found = False
        while j < len(self.publishers):
            if name_ == self.publishers.get(j).get_name():
                name_found = True
                break
            else:
                j += 1

        if not name_found:
            p = Publisher(self.pub_id, name_)
            self.publishers[p.get_id()] = p
            self.pub_id += 1

            i = 0
            while i < len(topics):
                if topics[i] not in self.topic_manager.topics:
                    self.topic_manager.add_pub(
                        p, self.topic_manager.create_topic(topics[i]))
                else:
                    self.topic_manager.add_pub(
                        p, self.topic_manager.find_topic(topics[i]))
                i += 1
            return p
        else:
            raise Exception(
                "There is already a publisher named \'{0}\' registered to topics \'{1}\'!"
                .format(name_, topics))
    def __init__(self, parent, *args, **kwargs):
        tk.Frame.__init__(self, parent, *args, **kwargs)
        self.parent = parent

        # Set Eth node endpoint.
        if self.LIVE:
            self.endpoint = self.MAINNET
        else:
            self.endpoint = self.TESTNET

        # Load data.json signal dict.
        with open("data.json") as file:
            self.data = json.load(file)

        # Init component modules.
        self.auditor = Auditor(self.endpoint, self.data,
                               self.ETHERSCAN_API_TOKEN, self.LIVE)
        self.publisher = Publisher(self.endpoint, self.pub_k, self.pvt_k,
                                   self.data)

        self.init_gui()

        # Check if address and key are valid
        self.output.insert(
            self.output.size() + 1,
            "Address check: " + str(self.auditor.validate(self.pub_k)))

        self.output.insert(
            self.output.size() + 1,
            "Private key check: " + str(self.auditor.validate(self.pvt_k)))
Example #11
0
def handler(data, context):
    """
    Handler method that calculates the difference of a dataset
    and sends messages to Google Cloud Pub/Sub.

    :param: data    Dictionary like object that holds trigger information.
    :param: context Google Cloud Function context.
    """

    try:
        bucket_name = data["bucket"]
        file_name = data["name"]

        # Exit when file does not need to be processed
        if not file_name.startswith(config.prefix_filter):
            logging.info("Do not process file, exiting...")
            return "OK", 204

        file = GoogleCloudStorage().read(file_name, bucket_name)
        file.top_level_attribute = config.top_level_attribute
        file.csv_dialect_parameters = config.csv_dialect_parameters

        records = file.to_json(Formatter(config.template))

        if not config.full_load:
            if config.state.type == "datastore":
                records = GoogleCloudDatastore().difference(
                    records, config.state.kind, config.state.property)
            else:
                raise NotImplementedError("Unkown state type!")

        # Exit when no new records exist
        if not len(records):
            logging.info("No new records found, exiting...")
            return "OK", 204

        metadata = Gobits.from_context(context=context)
        publisher = Publisher(config.topic.batch_settings)
        publisher.publish(
            config.topic.project_id,
            config.topic.id,
            records,
            metadata.to_json(),
            config.topic.batch_size,
            config.topic.subject,
        )

        # Store the new state records
        if not config.full_load:
            if config.state.type == "datastore":
                logging.info("Adding new items to state")
                GoogleCloudDatastore().put_multi(records, config.state.kind,
                                                 config.state.property)

    except Exception as e:
        logging.exception(e)
        return "Bad Request", 400

    return "OK", 204
Example #12
0
    def _create_and_execute_publisher(query):
        # create Publisher object and run the main method
        publisher_app = Publisher(PR_BASE_DIR, df_collections, df_tiles, query=query)
        publisher_app.main()

        # wait N seconds to the task save the data in the database
        # before checking if the data has been inserted correctly
        sleep(test_delay_secs)
	def run(self):
		syslog.syslog("fail2ban-zmq-tools Publisher starting")
		signal.signal(signal.SIGTERM,self.__sigTERMhandler)
		signal.signal(signal.SIGINT,self.__sigTERMhandler)
		self.publisher = Publisher(publisherconfig=publisherconfig)
		self.publisher.start()
		syslog.syslog("fail2ban-zmq-tools Publisher running. Main process waiting for termination signal. Threads working.")
		signal.pause()
		syslog.syslog("fail2ban-zmq-tools Publisher exiting.")
Example #14
0
 def addPublisher(self,publisherName):
     """This function shouldnt really be here imo"""
     publisher = Publisher.query.filter(Publisher.name == publisherName).first()
     if publisher:
         self.publisher.append(publisher)
     else:
         publisher = Publisher()
         publisher.name = publisherName
         self.publisher.append(publisher)
Example #15
0
 def __init__(self, sid=None, uuid=None, publisher_sid=None, publishing_date=None, title=None, publication_url=None, publication_text=None):
     super(Publication, self).__init__(Publication.sql_dict,sid,uuid)
     self.publisher_sid=publisher_sid
     self.publishing_date=publishing_date
     self.title=title
     self.publication_text=publication_text
     self.publication_url=publication_url
     self.publisher=Publisher(self.publisher_sid)
     self.publisher.load()
Example #16
0
 def __init__(self, base_dir):
     self.publisher = Publisher()
     self.publisher.declare_queue('hello')
     self.base_dir = base_dir
     self.csv_filename = 'screenshot_list.csv'
     self.face_detector = FaceDetector()
     # self.cap0 = cv2.VideoCapture()
     # self.cap0.open(0)
     self.cap = cv2.VideoCapture(1)  # default is 0
     self.eyegaze_process = None
Example #17
0
 def publish_project(self, definition, identifer, username, pwd):
     """
     prepare and render the project.
     :param name: the name of the project
     :return: None
     """
     renderer = Publisher(username, pwd)
     result = renderer.run(definition)
     #self.store_project(user, name, definition, identifier)
     return result
def publish():
    bs = request.args.get('bs')
    topic = request.args.get('topic')

    try:
        p = Publisher(bs)
        result = p.publish(topic, request.get_data())
    except Exception as e:
        result = '{0}'.format(e)

    return '{0}'.format(result)
Example #19
0
def publish():
    observer.received_msgs.inc()

    payload = request.get_json()
    img_url = payload["url"]
    queue_name = payload.get("queue_name", "")
    publisher = Publisher({"rabbitmq_hostname": rabbitmq_hostname})
    publisher.publish(img_url, queue_name)

    observer.processed_msgs.inc()
    return f"Sent {img_url} to {('exchange ' +  publisher.exchange_name) if not queue_name else queue_name}"
Example #20
0
    def test_github_ip_check(self):
        publish = Publisher('192.168.0.1', "dontmatter", "")
        self.assertFalse(publish.in_ip_address_range())
        jsonResponse = requests.get("https://api.github.com/meta", auth=("KentonCountyLibrary-Cincypy", "CincyPyCoders2000"))
        ipranges = jsonResponse.json()["hooks"]

        ipranges = [list(ip_network(ip).hosts()) for ip in ipranges]

        flat_range = list(chain.from_iterable(ipranges))

        publish = Publisher(str(flat_range[0]), "dontmatter", "")
        self.assertTrue(publish.in_ip_address_range())
Example #21
0
class Site(object):
    def __init__(self, site_id, site_number):
        'parse parameter, create sharing object between process'
        if type(site_id) is str:
            site_id = int(site_id)
        if type(site_number) is str:
            site_number = int(site_number)
        self._site_id = site_id
        self._its_queue_name = "Q{!s}".format(site_id)
        self._its_exchange_name = "X{!s}".format(site_id)
        exchange_names = []
        for i in range(1, site_number + 1):
            if i != site_id:
                exchange_names.append("X{!s}".format(i))
        self._binding_exchange_names = exchange_names
        print(self._its_queue_name, self._its_exchange_name,
              self._binding_exchange_names)
        # declare sharing object
        requestQManager.register(
            'RequestQueue',
            RequestQueue,
            exposed=['add_request', 'pop_request', 'peek_request', 'size'])
        self._lock = Lock()
        self._logical_time = Value('i', 0)
        self._replys = Array('i', [0 for i in range(site_number)],
                             lock=self._lock)
        self._mymanager = requestQManager()
        self._mymanager.start()
        self._requestQ = self._mymanager.RequestQueue()

    def start_consumer(self):
        example = ExampleConsumer(self._its_queue_name,
                                  self._binding_exchange_names, self._lock,
                                  self._logical_time, self._requestQ,
                                  self._replys)
        try:
            example.run()
        except KeyboardInterrupt:
            example.stop()

    def run_consumer_process(self):
        self._p = Process(target=self.start_consumer, args=())
        self._p.start()

    def start_publisher(self):
        self._publisher = Publisher(self._its_exchange_name,
                                    self._its_queue_name)

    def request_for_critical_section(self):
        self._requestQ.add_request(self._site_id, self._logical_time.value)
        self._publisher.send_REQUEST(self._logical_time.value)
        self._logical_time.value += 1
Example #22
0
class EtlEventWorker(object):
    def __init__(self, rabbit_cfg, rmq_ws_cfg, db_cfg, redis_cfg, worker_type):
        """
        :param dict rabbit_cfg: config rabbit
        :param dict rmq_ws_cfg: config rmq_ws
        :param dict db_cfg: config db
        :param dict redis_cfg: config redis
        :param BaseEtlListener worker_type: Type of worker (listener)
        """
        self.worker_type = worker_type
        self._rmq_user = rabbit_cfg["user"]
        self._rmq_pwd = rabbit_cfg["password"]

        self._queue = rabbit_cfg['{}_queue'.format(self.worker_type)]
        self._ack = 'auto'
        self._db_conn = psycopg2.connect(**db_cfg)

        self.rmq_ws_publisher = Publisher(user=rmq_ws_cfg["user"],
                                          password=rmq_ws_cfg["password"],
                                          host=rmq_ws_cfg["host"])
        # self.rmq_ws_channel = self.rmq_ws_connection.channel()
        rmq_host = (rabbit_cfg["host"], rabbit_cfg["port"])
        self._conn = stomp.Connection([rmq_host])
        self.redis_storage_conn = ConnectionPool.from_url(
            "redis://{}:{}/{}".format(redis_cfg["host"], redis_cfg["port"],
                                      redis_cfg['db']))
        listener = LISTENER_MAP[self.worker_type]
        self._conn.set_listener(
            self.worker_type,
            listener(db_conn=self._db_conn,
                     redis_storage_conn=self.redis_storage_conn,
                     rmq_ws_publisher=self.rmq_ws_publisher))
        self._subscribe_and_connect()

    def run(self):
        while self._conn.is_connected():
            time.sleep(RECONNECT_TIMEOUT)

        self._subscribe_and_connect()

    def stop(self):
        self._conn.disconnect()
        self.redis_storage_conn.disconnect()

        self.rmq_ws_publisher.close()

    def _subscribe_and_connect(self):
        self._conn.start()
        self._conn.connect(self._rmq_user, self._rmq_pwd, wait=True)
        self._conn.subscribe(destination=self._queue,
                             id=self.worker_type,
                             ack=self._ack)
Example #23
0
    def pull_books(self, N_BOOKS = None):
        books = []
        if N_BOOKS is None:

            for p in Publisher.enlist():
                books += list(p.books)

            self.shelf = Warehouse(books * 5)
            return
        else:
            from itertools import chain
            all_books = list(chain.from_iterable([list(p.books) for p in Publisher.enlist()]))
            self.shelf = Warehouse([random.choice(all_books) for _ in range(N_BOOKS)])
Example #24
0
class OdometryHandler(object):
    def __init__(self):
        self.arduino_data_processor = DataProcessor("arduino_data", 0.025,
                                                    0.210)
        self.pub = Publisher('odom', "odom", "base_link")

    def main(self):
        current_time = self.pub.getCurrentTime()

        x_y_theta_t, vx_vth = self.arduino_data_processor.getPublisherInfo()
        if self.arduino_data_processor.arduino_data is not None:
            self.pub.publishMessage(
                self.pub.createNavMsg(current_time, x_y_theta_t, vx_vth),
                self.pub.createTF(current_time, x_y_theta_t))
Example #25
0
def publish(source, destination=""):
    """
    Publishes all the posts found in _posts to destination
    """
    config = Config(source, destination).data()

    posts = make_posts(config['source'], config['posts_prefix'],
                       extensions=list(markup.extensions()))

    documents = posts + [HomePage(posts), Feed(posts), Archive(posts)]

    publisher = Publisher(documents, config['publish_dir'], config['templates_dir'], config)
    publisher.publish()
    copy_supporting_files(config['source'], config['destination'])
    return True
Example #26
0
def track_click_and_get_url(project_id, pubsub_client, article_id, articles,
                            user_id):
    """Tracks a link click based on article ID and returns the article URL

    Args:
        project_id: GCP project ID
        pubsub_client: Pubsub publisher client
        article_id: article ID which can be mapped to article metadata
        articles: list of dictionaries with article metadata
        user_id: ID of the user
    """

    # find the dictionary for the article clicked in the list of article dictionaries
    article_dict = next(item for item in articles
                        if item['article_id'] == article_id)
    article_url = article_dict['url']

    # populate click tracker dictionary
    click_tracker = {}
    click_tracker['user_id'] = user_id
    click_tracker['click_timestamp'] = datetime.datetime.now().strftime(
        '%Y-%m-%d %H:%M:%S')

    columns_to_track = ['article_id', 'title', 'publishedAt', 'sort']
    article_click_tracking = {}
    for field in article_dict:
        if field in columns_to_track:
            article_click_tracking[field] = article_dict[field]

    click_tracker['article_clicked'] = article_click_tracking

    print("""
    *********************
    click tracker: {}
    *********************
    """.format(json.dumps(click_tracker)))
    logger.info("""
    *********************
    click tracker: {}
    *********************
    """.format(json.dumps(click_tracker)))

    # publish message to pubsub topic
    publisher = Publisher(pubsub_client)
    publisher_path = pubsub_client.topic_path(project_id, 'news_clicks')
    publisher.pubsub_publish(publisher_path, json.dumps(click_tracker))

    return article_url
Example #27
0
 def add_feed_media(cls, collection_id, feed_category=None, approve_all=False):
   collection = Collection.get_by_id(int(collection_id))
   medias = []
   if feed_category:
     uri = Collection.YOUTUBE_CATEGORY_FEED % ((collection.region or 'US'), collection.feed_id,
                                               feed_category)
   else:
     uri = Collection.YOUTUBE_FEED % ((collection.region or 'US'), collection.feed_id)
   response = urlfetch.fetch(uri)
   if response.status_code == 200:
     data = simplejson.loads(response.content) or {}
     entries = data['feed']['entry'] if data.get('feed') else []
     ids = ''
     publisher_map = {}
     for entry in entries:
       id = re.search('video:(.*)', entry['id']['$t']).group(1)
       publisher = Publisher.add(host=MediaHost.YOUTUBE, host_id=entry['author'][0]['yt$userId']['$t'])
       deferred.defer(Publisher.fetch_details, publisher.id,
                      _name='publisher-' + publisher.id + '-' + str(uuid.uuid1()),
                      _queue='youtube')
       ids += id + ','
       publisher_map[id] = publisher
     if len(ids):
       youtube3 = get_youtube3_service()
       videos_response = youtube3.videos().list(
         id=ids,
         part='id,snippet,topicDetails,contentDetails,statistics'
       ).execute()
       logging.info('ADD FEED VIDEO: ' + id)
       medias = Media.add_from_snippet(videos_response.get('items', []), collection=collection,
                                       publisher=publisher, approve=approve_all)
   return medias
Example #28
0
 def _read(self):
     result = self.sheets_service.spreadsheets().values().get(
         spreadsheetId=SPREADSHEET_ID, range='data').execute()
     values = result.get('values')
     data = {}
     first_row = None
     for row, value in enumerate(values):
         if first_row is None:
             first_row = value
             self.service_state_col = {}
             for i in range(2, len(first_row), 2):
                 self.service_state_col[value[i]] = chr(ord('@') + i + 2)
             continue
         value.extend([None] * (len(first_row) - len(value)))
         services = []
         for i in range(2, len(first_row), 2):
             if not value[i]:
                 continue
             try:
                 state = json.loads(value[i + 1])
             except:
                 state = None
             services.append(Service(first_row[i], value[i], state))
         p = Publisher(row + 1, value[0], value[1], services)
         data[p.name] = p
     self.data = data
Example #29
0
    def update_state(self, name, service, state):
        p = self.data[name]

        updated_json = json.dumps(state)
        original_json = None
        for s in p.services:
            if s.name == service:
                original_json = json.dumps(s.state)

        if updated_json == original_json:
            return

        body = {'values': [[updated_json]]}
        range = self.service_state_col[service] + str(p.row)
        self.sheets_service.spreadsheets().values().update(
            spreadsheetId=SPREADSHEET_ID,
            range=range,
            valueInputOption='RAW',
            body=body).execute()
        services = []
        for s in p.services:
            if s.name == service:
                services.append(Service(service, s.username, state))
            else:
                services.append(s)
        self.data[p.name] = Publisher(p.row, p.name, p.channel, services)
Example #30
0
    def random(books=None, DEF_N=20, GENERATE=True):
        names = ['AstPress', 'NoShitPress', 'Factoid']

        p = Publisher(random.choice(names), [])

        if books is None:
            books = list(WarehouseGenerator.random(DEF_N,
                                                   p)) if GENERATE else []

        else:
            for i in range(len(books)):
                books[i].publisher = p

        p.books = set(books)

        return p
    def __init__(self):

        self.classnames = [
            "background", "person", "crutches", "walking_frame", "wheelchair",
            "push_wheelchair"
        ]

        #read rosparams
        config_file = rospy.get_param('~model_config', "")
        self.fixed_frame = rospy.get_param('~fixed_frame', 'odom')
        self.tracking = rospy.get_param('~tracking', True)
        self.filter_detections = rospy.get_param('~filter_inside_boxes', True)
        self.inside_box_ratio = rospy.get_param('~inside_box_ratio', 0.8)
        camera_topic = rospy.get_param('~camera_topic',
                                       '/kinect2/qhd/image_color_rect')
        camera_info_topic = rospy.get_param('~camera_info_topic',
                                            '/kinect2/qhd/camera_info')

        #initialize subscribers
        rospy.Subscriber(camera_topic,
                         Image,
                         self.image_callback,
                         queue_size=1)
        rospy.Subscriber(camera_info_topic,
                         CameraInfo,
                         self.cam_info_callback,
                         queue_size=1)

        #detection model and tracker
        self.setup_model_and_tracker(config_file)

        #image queues
        self.last_received_image = None  #set from image topic
        self.last_processed_image = None  #set from image topic
        self.new_image = False

        self.cam_calib = None  #set from camera info
        self.camera_frame = None  #set from camera info

        #helpers
        Server(TrackingParamsConfig, self.reconfigure_callback)
        bridge = CvBridge()
        self.viz_helper = Visualizer(len(self.classnames))
        self.publisher = Publisher(self.classnames, bridge)
        self.image_handler = ImageHandler(bridge, cfg.TEST.MAX_SIZE,
                                          cfg.TEST.SCALE)
        self.tfl = tf.TransformListener()
def main():
    publisher = Publisher()
    with open(TRIPDATA_PATH, "r") as trip_data:
        reader = csv.DictReader(trip_data)
        for line in reader:
            publisher.publish(json.dumps(line), '/queue/source')

    publisher.publish(str('exit'), '/queue/source')

    publisher.disconnect()
Example #33
0
    def test_get_daily(self):
        with open('./input_data/db_range.json') as sample:
            data = json.load(sample)
        pb = Publisher({
            "reward": {
                "medals": [
                    "http://i.imgur.com/hv0zL8U.png",
                    "http://i.imgur.com/xe9CD0S.png",
                    "http://i.imgur.com/50zpz2p.png"
                ],
                "pool":
                10
            }
        })

        print(pb.get_rank(data))
        print('\n'.join(pb.generate_report(data)))

        pass
class f2bPublisherDaemon(daemon):
	def __sigTERMhandler(self,signum,frame):
		syslog.syslog("fail2ban-zmq-tools Publisher: Caught signal %d. Initiating shutdown..." % signum)
		self.quit()

	def run(self):
		syslog.syslog("fail2ban-zmq-tools Publisher starting")
		signal.signal(signal.SIGTERM,self.__sigTERMhandler)
		signal.signal(signal.SIGINT,self.__sigTERMhandler)
		self.publisher = Publisher(publisherconfig=publisherconfig)
		self.publisher.start()
		syslog.syslog("fail2ban-zmq-tools Publisher running. Main process waiting for termination signal. Threads working.")
		signal.pause()
		syslog.syslog("fail2ban-zmq-tools Publisher exiting.")
		
	def quit(self):
		signal.signal(signal.SIGTERM,signal.SIG_IGN)
		syslog.syslog("fail2ban-zmq-tools Publisher: Stopping threads...")
		self.publisher.join()
		syslog.syslog("fail2ban-zmq-tools Publisher says Bye")
Example #35
0
  def add_from_snippet(cls, items, collection=None, publisher=None, enforce_category=False, approve=False, dont_fetch_publisher=False):
    from publisher import Publisher
    from publisher import PublisherMedia
    from collection import CollectionMedia
    from topic import Topic
    from topic import TopicMedia
    from topic import TopicCollectionMedia

    medias = []
    for item in items:
      id = item['id']
      media = Media.get_by_key_name(MediaHost.YOUTUBE + id)
      if not media:
        duration = re.search('PT((\d*)H)?((\d*)M)?((\d*)S)?', item['contentDetails']['duration']).groups()
        duration = float(3600*float(duration[1] or 0) + 60*float(duration[3] or 0) + float(duration[5] or 0))
        media = cls(key_name=(MediaHost.YOUTUBE + id),
                    type=MediaType.VIDEO,
                    host_id=id,
                    name=db.Text(item['snippet']['title']),
                    published=iso8601.parse_date(item['snippet']['publishedAt']).replace(tzinfo=None),
                    duration=duration,
                    description=db.Text(item['snippet']['description'].replace("\n", r" ") if item['snippet']['description'] else ''),
                    host_views=int(item['statistics']['viewCount']) if item['statistics']['viewCount'] else 0)
        media.put()
        logging.info('FETCHED: ' + media.name)

      collection_media = None
      if collection and \
          (not enforce_category or (item['snippet'].get('categoryId') in collection.categories)):
        collection_media = CollectionMedia.add(collection, media, publisher=publisher,
                                               approved=(True if approve else None))

      if not dont_fetch_publisher and (not publisher or publisher.channel_id != item['snippet']['channelId']):
        publisher = Publisher.get_by_channel_id(item['snippet']['channelId'], item['snippet']['channelTitle'])
      if publisher and item['snippet']['channelId'] == publisher.channel_id:
        pm = media.publisherMedias.get()
        if pm and pm.publisher.channel_id != item['snippet']['channelId']:
          # Check if there was an incorrect publisher assigned before
          pm.delete()
        pm = PublisherMedia.add(publisher=publisher, media=media)
        logging.info(pm.publisher.name)

      # if item.get('topicDetails'):
      #   for topic_id in item['topicDetails']['topicIds']:
      #     topic = Topic.add(topic_id)
      #     TopicMedia.add(topic, media)
      #     if collection_media:
      #       TopicCollectionMedia.add(topic, collection_media)

      medias.append(media)
    return medias
    def __init__(self):
        """ Initialize the speech pipeline components. """
        # configure pipeline
        Publisher.__init__(self)
        self.pipeline = gst.parse_launch('gconfaudiosrc ! audioconvert ! audioresample '
                                         + '! vader name=vad auto-threshold=true '
                                         + '! pocketsphinx name=asr ! fakesink')

        asr = self.pipeline.get_by_name('asr')
        asr.connect('partial_result', self.asr_partial_result)
        asr.connect('result', self.asr_result)
        asr.set_property('configured', True)
        asr.set_property('dsratio', 1)

        # parameters for grammar and dic
        grammar = os.path.dirname(os.getcwd()) + '/conf/grammar/callcenter.fsg'
        dic = os.path.dirname(os.getcwd()) + '/conf/dic/callcenter.dic'
        asr.set_property('fsg', grammar)
        asr.set_property('dict', dic)

        bus = self.pipeline.get_bus()
        bus.add_signal_watch()
        bus.connect('message::application', self.application_message)
Example #37
0
  def add_from_entry(cls, entries, fetch_publisher=False, approve=False, collection=None):
    from publisher import Publisher
    from publisher import PublisherMedia
    from collection import CollectionMedia

    medias = []
    for entry in [e for e in entries if e.media.player and not e.noembed]:
      content_url = urlparse.urlparse(entry.media.player.url)
      id = urlparse.parse_qs(content_url.query)['v'][0]
      media = Media.get_by_key_name(MediaHost.YOUTUBE + id)
      publisher = None
      if not media:
        name = entry.media.title.text.decode('utf-8')
        desc = entry.media.description.text
        desc = desc.decode('utf-8').replace("\n", r" ") if desc else None
        category = entry.media.category[0].text
        category = category.decode('utf-8') if category else None
        media = cls(key_name=(MediaHost.YOUTUBE + id),
                    type=MediaType.VIDEO,
                    host_id=id,
                    name=db.Text(name),
                    published=iso8601.parse_date(entry.published.text).replace(tzinfo=None),
                    duration=float(entry.media.duration.seconds),
                    description=desc,
                    host_views=int(entry.statistics.view_count) if entry.statistics else 0,
                    category=category)

        publisher_name = entry.author[0].name.text.lower()
        publisher = Publisher.add(MediaHost.YOUTUBE, publisher_name, fetch_details=fetch_publisher)
        PublisherMedia.add(publisher, media)
        media.put()
      medias.append(media)

      publisher = publisher or media.publisherMedias.get().publisher if media.publisherMedias.get() else None
      collection_media = None
      if collection:
        collection_media = CollectionMedia.add(collection, media, publisher=publisher, approved=(True if approve else None))
    return medias
Example #38
0
 def fetch(self, approve_all=False, max=50, collection=None):
   medias = []
   logging.info('Fetching playlist')
   max = 200 if not self.last_fetch else max
   yt_service = get_youtube_service()
   offset = 1
   while offset <= max:
     feed = yt_service.GetYouTubePlaylistVideoFeed(
         uri=Playlist.YT_PLAYLIST % (self.host_id, offset))
     if not self.name:
       self.name = feed.title.text
       self.publisher = Publisher.add('youtube', feed.author[0].name.text)
       self.put()
     if len(feed.entry) == 0:
       break
     medias = Media.add_from_entry(feed.entry, collection=collection, approve=approve_all, fetch_publisher=True)
     for media in medias:
       PlaylistMedia.add(playlist=self, media=media,
                           approved=(True if approve_all else None))
     offset += len(medias)
   self.last_fetch = datetime.datetime.now()
   self.put()
   return medias
Example #39
0
    # publisher stuff:
    'Publisher', 
    'subscribe', 'unsubscribe', 'isSubscribed', 'unsubAll', 
    'sendMessage', 'SenderMissingReqdArgs', 'SenderUnknownOptArgs',
    'getListenerExcHandler', 'setListenerExcHandler',
    'addNotificationHandler', 'setNotificationFlags', 'clearNotificationHandlers',
    'setTopicUnspecifiedFatal',
    
    # misc:
    'PUBSUB_VERSION',
    ]


# ---------------------------------------------

_publisher = Publisher()

subscribe   = _publisher.subscribe    
unsubscribe = _publisher.unsubscribe
unsubAll    = _publisher.unsubAll
sendMessage = _publisher.sendMessage

getListenerExcHandler     = _publisher.getListenerExcHandler
setListenerExcHandler     = _publisher.setListenerExcHandler
addNotificationHandler    = _publisher.addNotificationHandler
clearNotificationHandlers = _publisher.clearNotificationHandlers
setNotificationFlags      = _publisher.setNotificationFlags
getNotificationFlags      = _publisher.getNotificationFlags

setTopicUnspecifiedFatal = _publisher.setTopicUnspecifiedFatal
    
Example #40
0
 def __init__(self):
     self.ICPD = ICPDataFile()
     Publisher.__init__(self)
Example #41
0
 def __init__(self, data=None, subs=None):
     Publisher.__init__(self, subs)
     self.data = data or {}
Example #42
0
def test_subscribe():
    p = Publisher()
    result = []
    def callback(data):
        result.append(data)
    p.subscribe('test', callback)
    p.publish('test', 4)
    assert result[0] == 4
    p.publish('test', 4)
    assert result[1] == 4
    p.publish('test2', 4)
    assert len(result) == 2
    p.unsubscribe('test', callback)
    p.publish('test', 4)
    assert len(result) == 2
Example #43
0
 def add_publisher_media(cls, collection_id, publisher_id, approve_all):
   collection = Collection.get_by_id(int(collection_id))
   publisher = Publisher.get_by_key_name(publisher_id)
   publisher_medias = publisher.fetch(collection=collection, approve_all=approve_all)
import time
from broker_connection import Config
from publisher import Publisher


def publish_loop(publisher):
    publish_val = 0
    while True:
        try:
            publisher.publish(publish_val)
            publish_val += 1
        except IOError:
            return
        time.sleep(1)

uuid = 'publisheruuid1'
config = Config('127.0.0.1', 4444, '127.0.0.1', 5505)
simple_publisher = Publisher(uuid, publish_loop, config)
simple_publisher.add_metadata({'Room': '410'})
simple_publisher.start()

time.sleep(60)
Example #45
0
	def publishers(self):
		from publisher import Publisher
		return Publisher.get_for_agency(self)
Example #46
0
 def test_hello(self):
     pp = Publisher(self.personal_cloud)
     result = pp.hello()
     self.assertEqual(result, 0)
Example #47
0
 def test_publish(self):
     pp = Publisher(self.personal_cloud)
     result = pp.publish(self.test_file_path, self.remote_file_path)
     self.assertEqual(result, 0)  # 0 means published successfully
Example #48
0
	def __init__(self):
		Publisher.__init__(self)
		self.history = {}
Example #49
0
	def __init__(self, twitter):
		Publisher.__init__(self)
		self.twitter = twitter
		self.last_ids = {'home': -1, 'mentions': -1}
		self.timelines_fetchers = {'home': self.twitter.home_timeline, 'mentions': self.twitter.mentions_timeline}
Example #50
0
	def publisher(self):
		from publisher import Publisher
		return Publisher.get(self['publisherId'])
Example #51
0
def publish():
    ip = request.remote_addr
    publish = Publisher(ip, "publisher", request.json)
    if publish.in_ip_address_range():
        return "Yup"
    abort(403)