示例#1
0
class CustomStreamListener(tweepy.StreamListener):
  def __init__(self, socketio, track):
    super(CustomStreamListener, self).__init__()
    self.socketio = socketio
    self.room = track
    self.session = FuturesSession()

  def get_geonames_username(self):
    return "yasyf{}".format(random.randint(1,5))

  def on_status(self, status):
    if status.coordinates or status.author.location:
      data = {'text': status.text.encode('utf-8')}
      data.update({k:getattr(status.author, k) for k in ['time_zone', 'location']})
      data.update({k:getattr(status, k) for k in ['lang', 'coordinates']})

      def add_sentiment(session, response):
        data['sentiment'] = response.json()['results']
        self.socketio.emit('status', data, room=self.room)

      def add_country_code(session, response):
        try:
          json = response.json()
          if json['totalResultsCount'] > 0:
            result = json['geonames'][0]
            data['country'] = result['countryCode']
            data['coordinates'] = {'coordinates': [float(result['lng']), float(result['lat'])]}
          else:
            return
        except:
          data['country'] = response.text.strip()

        if TEST_MODE:
          data['sentiment'] = random.random()
          self.socketio.emit('status', data, room=self.room)
        else:
          url = "http://apiv2.indico.io/sentiment"
          args = {'key': os.getenv('INDICOIO_API_KEY')}
          self.session.post(url, data={'data': data['text']}, params=args, background_callback=add_sentiment)

      if status.coordinates:
        url = "http://ws.geonames.org/countryCode"
        args = {'lat': status.coordinates['coordinates'][1], 'lng': status.coordinates['coordinates'][0],
               'username': self.get_geonames_username()}
        self.session.get(url, params=args, background_callback=add_country_code)
      else:
        url = "http://api.geonames.org/search"
        args = {'q': status.author.location, 'username': self.get_geonames_username(),
                'maxRows': 1, 'type': 'json'}
        self.session.get(url, params=args, background_callback=add_country_code)
    return True

  def on_error(self, status_code):
    print 'Encountered error with status code:', status_code
    self.socketio.emit('error', {'status_code': status_code}, room=self.room)
    return True

  def on_timeout(self):
    print 'Timeout...'
    return True
示例#2
0
class APNsClient(object):
    def __init__(self, cert_file, use_sandbox=False, use_alternative_port=False):
        server = 'api.development.push.apple.com' if use_sandbox else 'api.push.apple.com'
        port = 2197 if use_alternative_port else 443
        self.cert = cert_file
        self.server = "https://{}:{}".format(server, port)
        self.__connection = FuturesSession()
        self.__connection.mount('https://', HTTP20Adapter())

    def send_notification(self, tokens, notification, priority=NotificationPriority.Immediate, topic=None):
        # print(notification.dict())
        json_payload = json.dumps(notification.dict(), ensure_ascii=False, separators=(',', ':')).encode('utf-8')

        headers = {
            'apns-priority': priority.value
        }
        if topic:
            headers['apns-topic'] = topic

        if not isinstance(tokens, list):
            tokens = [tokens]

        for token in tokens:
            url = '{}/3/device/{}'.format(self.server, token)
            self.__connection.post(url, json_payload, headers=headers, cert=self.cert, background_callback=req_callback)
示例#3
0
def crowdsource_undetected(related_list, files_path, instructions, data_for):
    # if no files found then return zero
    if not os.listdir(files_path):
        return 0

    # Remove trailing slashes
    files_path = os.path.normpath(files_path)
    # Get an api crowd user
    api_user = get_api_user()
    crowd_user_id = 0
    if api_user and 'Id' in api_user:
        crowd_user_id = api_user['Id']

    # get a crowd job
    crowd_job_id = 0
    if crowd_user_id > 0:
        crowd_job_id = create_api_job(crowd_user_id, os.path.basename(files_path), instructions)
    zip_path = None
    if crowd_job_id > 0:
        # save json object to json file
        if related_list is not None and len(related_list) > 0:
            sio = StringIO()
            json.dump(related_list, sio)
            with open(os.path.join(files_path,'%s.json'%data_for), "w") as fjson:
                fjson.write(sio.getvalue())
        # compress all files in files_path directory
        zip_path = os.path.join(files_path, '%s.zip'%data_for)
        buff = StringIO()
        with zipfile.ZipFile(buff, 'w', zipfile.ZIP_DEFLATED) as zipf:
            print 'zipping ' + zip_path
            zipdir(files_path, zipf)
            print 'zipped ' + zip_path

        session = FuturesSession()
        # api_uri = 'http://api.opescode.com/api/UserData?id=%s' %str(job_api_id)
        api_uri = '{0}/api/UserData?id={1}'.format(service_base_uri, str(crowd_job_id))
        logger.info('Calling web api {0} for {1}'.format(api_uri, zip_path))

        def bg_cb(sess, resp):
            print zip_path, resp.status_code
            # if failed then save the files to the recording physical folder
            if resp.status_code != 200:
                print 'Post file {0} failed with stc={1}'.format(zip_path, str(resp.status_code))
                # For now, I will not log this until I find a better way to pass logger to the callback method. Note: callback method has no access to self
                logger.error('Post file {0} failed with stc={1}'.format(zip_path, str(resp.status_code)))
            else:
                logger.info('%s posted successfully'%zip_path)
        try:
            with open(zip_path, "wb") as f: # use `wb` mode
                print 'saving zip ' + zip_path
                f.write(buff.getvalue())
                print 'zip saved ' + zip_path
            if not archive_only:
                print 'posting ' + zip_path
                session.post(api_uri, files={"archive": buff.getvalue()}, background_callback=bg_cb)
                print 'posted ' + zip_path
            logger.info('posted %s and awaiting api response.'%zip_path)
        except Exception as ex:
            logger.error('Exception occured while calling web api.')
    return crowd_job_id
示例#4
0
class HttpClient(ClientBase):

    def __init__(self, host='localhost', port=8094, tags=None):
        # only import HttpClient's dependencies if using HttpClient
        # if they're not found, inform the user how to install them
        try:
            from requests_futures.sessions import FuturesSession
        except ImportError:
            raise ImportError('pytelegraf[http] must be installed to use HTTP transport')

        super(HttpClient, self).__init__(host, port, tags)

        # the default url path for writing metrics to Telegraf is /write
        self.url = 'http://{host}:{port}/write'.format(host=self.host, port=self.port)

        # create a session to reuse the TCP socket when possible
        self.future_session = FuturesSession()

    def send(self, data):
        """
        Send the data in a separate thread via HTTP POST.
        HTTP introduces some overhead, so to avoid blocking the main thread,
        this issues the request in the background.
        """
        self.future_session.post(url=self.url, data=data)
示例#5
0
def send_ga_event(event, user):
    session = FuturesSession()
    payload = {
        'v': 1,
        'tid': settings.GOOGLE_TRACKING_ID,
        'uid': google_user_id(user),
        't': 'event',
        'ec': 'email',
        'ea': event.event_type,
        'cm': 'email',
    }
    if event.esp_event:
        payload['ua'] = event.esp_event.get('user-agent')
        payload['dt'] = event.esp_event.get('subject', [None])[0]
        payload['cn'] = event.esp_event.get('campaign_name', None)
        payload['cs'] = event.esp_event.get('campaign_source', None)
        payload['cc'] = payload['el'] = event.esp_event.get(
            'email_id', None)
        payload['dp'] = "%s/%s" % (
            payload['cc'], event.event_type)
    else:
        logger.warn("No ESP event found for event: %s" % event.__dict__)
    logger.info("Sending mail event data Analytics: %s" % payload)
    session.post(
        'https://www.google-analytics.com/collect', data=payload)
示例#6
0
文件: views.py 项目: zvovov/totext
def check_result(request):
	"""
		This is NOT A VIEW.
		Returns the job status after querying asynchronously. If finished, returns result.
	"""
	API_KEY = gimmeapikey(request)
	jobid = request.session['jobid']
	payload = {'apikey':API_KEY}
	session = FuturesSession()
	
	try:
		future = session.post('https://api.havenondemand.com/1/job/status/'+jobid, data = payload)
		r = future.result()
	except Exception as e:    # This is the correct syntax
		return 0
		
	# check if response if valid, else return error.
	
	# r.content is a byte array. To cure that, decode utf-8 is used.
	
	response = r.content.decode('utf-8')
	json_data = json.loads(response)
	
	if 'status' in json_data:
		if json_data['status'] == 'finished':
			request.session['extracted_text'] = json_data['actions'][0]['result']['document'][0]['content']
		return json_data['status']
	else:
		return 0
    def send_single(receiver, body):
        self = agent_repo.get_byname('~self')

        url = 'http://{0}:8888/incoming'.format(receiver.hostname)
        session = FuturesSession()
        response_future = session.post(url, data={'sender': self.hostname, 'content': body})

        # wait for the response to come in
        response_future.result()
示例#8
0
    def embed_query(self, batch_query):
        emb_session = FuturesSession()
        r = emb_session.post(self.get_address(self.query_port) + '/batch_api',
                             data={'query': json.dumps(batch_query)})

        def map_():
            result = r.result()
            emb = result.json()
            return emb, result.elapsed.total_seconds() * 1000

        return map_
示例#9
0
def send_ga_event(event, user):
    session = FuturesSession()
    payload = {
        'v': 1,
        'tid': settings.GOOGLE_TRACKING_ID,
        't': 'event',
        'ec': 'email',
        'ea': event.event_type,
        'cm': 'email',
    }
    if event.esp_event:
        payload['ua'] = event.esp_event.get('user-agent')
        payload['dt'] = event.esp_event.get('subject', [None])[0]
        payload['cn'] = event.esp_event.get('campaign_name', None)
        payload['cs'] = event.esp_event.get('campaign_source', None)
        payload['cc'] = payload['el'] = event.esp_event.get('email_id', None)
        payload['dp'] = "%s/%s" % (payload['cc'], event.event_type)
    else:
        logger.warn("No ESP event found for event: %s" % event.__dict__)
    logger.info("Sending mail event data Analytics: %s" % payload)
    session.post('https://www.google-analytics.com/collect', data=payload)
示例#10
0
文件: test.py 项目: Sathvika/Remember
 def testMakePosts(self):
     s = FuturesSession()
     new_post = json.dumps({'content': 'testing'})
     print new_post
     p = s.post('https://cs242project.herokuapp.com/submitPost', data=new_post)
     res = p.result()
     print res
     print res.content
     r = s.get('https://cs242project.herokuapp.com/getPosts')
     res2 = r.result()
     print res2.content
     self.assertEqual("test", "test")
示例#11
0
def send_ga_event(event, user):
    session = FuturesSession()
    payload = {
        "v": 1,
        "tid": settings.GOOGLE_TRACKING_ID,
        "t": "event",
        "ec": "email",
        "ea": event.event_type,
        "cm": "email",
    }
    if event.esp_event:
        payload["ua"] = event.esp_event.get("user-agent")
        payload["dt"] = event.esp_event.get("subject", [None])[0]
        payload["cn"] = event.esp_event.get("campaign_name", None)
        payload["cs"] = event.esp_event.get("campaign_source", None)
        payload["cc"] = payload["el"] = event.esp_event.get("email_id", None)
        payload["dp"] = "%s/%s" % (payload["cc"], event.event_type)
    else:
        logger.warn("No ESP event found for event: %s" % event.__dict__)
    logger.info("Sending mail event data Analytics: %s" % payload)
    session.post("https://www.google-analytics.com/collect", data=payload)
示例#12
0
class LogglyHandler(logging.Handler):

    def __init__(self, token, host, tags=None, fmt=None,
                 resp_callback=None, exc_callback=None):

        logging.Handler.__init__(self)

        tags = tags or 'pyloggly'
        fmt = fmt or DEFAULT_MESSAGE_FORMAT

        self.url = INPUT_URL_FORMAT.format(
            host=host,
            token=token,
            tags=quote(tags, safe=',')
        )

        self.session = FuturesSession()
        self.formatter = jsonlogger.JsonFormatter(fmt)
        self.setFormatter(self.formatter)

        if resp_callback is not None:
            self.resp_callback = resp_callback

        if exc_callback is not None:
            self.exc_callback = exc_callback

    def resp_callback(self, session, resp):
        pass

    def exc_callback(self, exc):
        raise exc

    def emit(self, record):
        try:
            self.session.post(self.url,
                              data=self.format(record),
                              background_callback=self.resp_callback)
        except RequestException, e:
            self.exc_callback(e)
示例#13
0
文件: test.py 项目: Sathvika/Remember
 def testMakePosts(self):
     s = FuturesSession()
     new_post = json.dumps({'content': 'testing'})
     print new_post
     p = s.post('https://cs242project.herokuapp.com/submitPost',
                data=new_post)
     res = p.result()
     print res
     print res.content
     r = s.get('https://cs242project.herokuapp.com/getPosts')
     res2 = r.result()
     print res2.content
     self.assertEqual("test", "test")
示例#14
0
def start_summarization_jobs(sentence_count, articles, nj_phrases):
    result_lock = multiprocessing.Lock()
    endpoint_index = 0
    endpoints = config['summarization_endpoints']
    endpoints_count = len(endpoints)

    tokenizer = Tokenizer()

    body = {
        'noun_phrases': list(nj_phrases['noun_phrases']),
        'adjective_phrases': list(nj_phrases['adjective_phrases']),
        'nouns': list(nj_phrases['nouns']),
        'adjectives': list(nj_phrases['adjectives'])
    }

    max_workers_count = len(articles) * SENTENCES_PER_REQUEST
    request_session = FuturesSession(max_workers=max_workers_count)
    sent_requests = Queue()

    for article in articles:
        page_sentences = tokenizer.tokenize_sentences(article['text'])
        body['title'] = article['title']

        page_sentences_count = len(page_sentences)
        parts_count =  int(math.ceil(page_sentences_count / SENTENCES_PER_REQUEST))

        for i in range(parts_count):
            endpoint = endpoints[endpoint_index]
            begin_index = math.ceil(i * page_sentences_count / parts_count)
            end_index = math.ceil((i + 1) * page_sentences_count / parts_count)

            body['text'] = ' '.join(page_sentences[begin_index: end_index])
            page_part_request = request_session.post(
                endpoint,
                json=body,
                background_callback=append_result)

            sent_requests.put({
                'future': page_part_request,
                'body': body,
                'callback': append_result
            })

            endpoint_index += 1

            if endpoint_index >= endpoints_count:
                endpoint_index = 0

    save_results(request_session, result_lock, sent_requests)

    return get_best_sentences(sentence_count)
示例#15
0
    def binary_data_http_instance_generator(self, action, instance_times,
                                            blocking_cli, data_file):
        """
      TODO: Automate content type
      """
        url = f"{self.base_gust_url}{action}?{self.runid}"
        session = FuturesSession(max_workers=100)
        if len(instance_times) == 0:
            return False
        after_time, before_time = 0, 0

        futures = []

        if not data_file in self.binary_data_cache:
            data = open(data_file, 'rb').read()
            self.binary_data_cache[data_file] = {}
            self.binary_data_cache[data_file]["body"] = data
            self.binary_data_cache[data_file]["mime"] = MimeTypes().guess_type(
                data_file)[0]

        file_body = self.binary_data_cache[data_file]["body"]
        file_mime = self.binary_data_cache[data_file]["mime"]

        for t in instance_times:
            st = t - (after_time - before_time)
            if st > 0:
                time.sleep(st)
            before_time = time.time()
            #self.logger.info("Url " + url)
            assert (self.runid)
            future = session.post(url=url,
                                  headers={'Content-Type': file_mime},
                                  params={
                                      'blocking': blocking_cli,
                                      'result': self.RESULT,
                                      'payload': {
                                          'testid': self.runid
                                      }
                                  },
                                  data=file_body,
                                  auth=(self.user_pass[0], self.user_pass[1]),
                                  verify=False)
            futures.append(future)
            after_time = time.time()

        (successes, failures) = self.handle_futures(futures)

        with self.tally_lock:
            self.invocation_success_tally += successes
            self.invocation_failure_tally += failures
            self.invocation_expected_tally += len(instance_times)
示例#16
0
class Connection:
    ''' Connects to league client and communicates with it '''

    def __init__(self):
        self.kwargs = None
        self.url = None
        self.session = FuturesSession()

    def get_connection(self, settings):
        ''' Parses connection url and port from lockfile '''
        raise NotImplementedError('Please implement this method')

    def get(self, url, *args, **kwargs):
        ''' Wrapper around requests get method '''
        return requests.get('{}{}'.format(self.url, url), *args, **kwargs, **self.kwargs)

    def post(self, url, *args, **kwargs):
        ''' Wrapper around requests post method '''
        return requests.post('{}{}'.format(self.url, url), *args, **kwargs, **self.kwargs)

    def patch(self, url, *args, **kwargs):
        ''' Wrapper around requests patch method '''
        return requests.patch('{}{}'.format(self.url, url), *args, **kwargs, **self.kwargs)

    def put(self, url, *args, **kwargs):
        ''' Wrapper around requests put method '''
        return requests.put('{}{}'.format(self.url, url), *args, **kwargs, **self.kwargs)

    def delete(self, url, *args, **kwargs):
        ''' Wrapper around requests delete method '''
        return requests.delete('{}{}'.format(self.url, url), *args, **kwargs, **self.kwargs)

    def async_get(self, url, *args, **kwargs):
        ''' Wrapper around requests get method '''
        return self.session.get('{}{}'.format(self.url, url), *args, **kwargs, **self.kwargs)

    def async_post(self, url, *args, **kwargs):
        ''' Wrapper around requests post method '''
        return self.session.post('{}{}'.format(self.url, url), *args, **kwargs, **self.kwargs)

    def async_patch(self, url, *args, **kwargs):
        ''' Wrapper around requests patch method '''
        return self.session.patch('{}{}'.format(self.url, url), *args, **kwargs, **self.kwargs)

    def async_put(self, url, *args, **kwargs):
        ''' Wrapper around requests put method '''
        return self.session.put('{}{}'.format(self.url, url), *args, **kwargs, **self.kwargs)

    def async_delete(self, url, *args, **kwargs):
        ''' Wrapper around requests delete method '''
        return self.session.delete('{}{}'.format(self.url, url), *args, **kwargs, **self.kwargs)
示例#17
0
    def call_webhook(self, original_validated_data, instance=None):
        user = self.context.get('request').user
        # Get the webhook of the user.
        webhook = user.webhooks.first()
        # For now we only want to send a POST request for the following events.
        events = ['account', 'case', 'contact', 'deal']
        # Get the class name of the instance.
        model = self.Meta.model._meta.model_name

        if webhook and model in events:
            if instance:
                # Create, so we don't have the instance ID available in the data.
                original_validated_data.update({
                    'id': instance.id,
                })

                data = {
                    'type': model,
                    'data': original_validated_data,
                    'object': self.data,
                    'event': 'create',
                }
            else:
                data = {
                    'type': model,
                    'data': original_validated_data,
                    'object': self.data,
                    'event': 'update',
                }

            data = json.dumps(data, sort_keys=True, default=lambda x: str(x))

            headers = {
                'Content-Type': 'application/json',
            }

            session = FuturesSession()
            session.post(webhook.url, data=data, headers=headers)
示例#18
0
    def call_webhook(self, original_validated_data, instance=None):
        user = self.context.get('request').user
        # Get the webhook of the user.
        webhook = user.webhooks.first()
        # For now we only want to send a POST request for the following events.
        events = ['account', 'case', 'contact', 'deal']
        # Get the class name of the instance.
        model = self.Meta.model._meta.model_name

        if webhook and model in events:
            if instance:
                # Create, so we don't have the instance ID available in the data.
                original_validated_data.update({
                    'id': instance.id,
                })

                data = {
                    'type': model,
                    'data': original_validated_data,
                    'object': self.data,
                    'event': 'create',
                }
            else:
                data = {
                    'type': model,
                    'data': original_validated_data,
                    'object': self.data,
                    'event': 'update',
                }

            data = json.dumps(data, sort_keys=True, default=lambda x: str(x))

            headers = {
                'Content-Type': 'application/json',
            }

            session = FuturesSession()
            session.post(webhook.url, data=data, headers=headers)
def main():
    utils.setup_logging()
    args = argparse.ArgumentParser()
    args.add_argument('property_files', help='path to the config.yml file')
    args.add_argument('directory_json_files',
                      help='path to directory that contains json files')
    args.add_argument(
        'parallel_threads',
        nargs='?',
        help='number representing how many times to send the static events',
        type=int,
        default=1)
    parse_args = args.parse_args()
    properties_filename = parse_args.property_files
    json_files_dir = parse_args.directory_json_files
    no_threads = parse_args.parallel_threads
    session = FuturesSession(max_workers=no_threads)
    try:
        config_yml_stream = open(properties_filename, 'rb')
    except IOError as e:
        logging.error('Problem reading the file %s due to %s',
                      properties_filename, e)
        sys.exit(1)
    with config_yml_stream:
        try:
            config_yml_obj = yaml.load(config_yml_stream)
        except YAMLError as e:
            logging.error('Problem loading the file %s due to %s',
                          properties_filename, e)
            sys.exit(1)
    files = os.listdir(json_files_dir)
    file = list(filter(lambda file: file.endswith(".json"), files))[0]
    path_to_file = json_files_dir + os.sep + file
    data = read_json(path_to_file)
    json_event_transformed = get_transformed_event(config_yml_obj, data)
    logging.info('Json data to be sent {} '.format(json_event_transformed))
    endpoint = config_yml_obj[utils.PROPS_ENDPOINT]
    url = endpoint[utils.PROPS_URL]
    token = endpoint[utils.PROPS_TOKEN]
    content_type = 'Content-type'
    authorization = 'Authorization'
    mime_type_json = 'application/json'
    bearer = 'Bearer ' + token
    headers = {content_type: mime_type_json, authorization: bearer}
    fire_requests = [
        session.post(url, json=json_event_transformed, headers=headers)
        for x in range(no_threads)
    ]
    responses = [item.result() for item in fire_requests]
    logging.info(responses)
示例#20
0
def makeAsyncLoop(links):  # Распредилитель изображений
    video_capture = cv2.VideoCapture(0)
    sess = FuturesSession()
    counter = 0
    scale = 1
    print(str(links) + ' - links are looped')
    while True:
        if len(links) == 0:
            return 0
        ret, frame = video_capture.read()  # Чтение кадра с камеры

        small_frame = cv2.resize(frame, (0, 0), fx=scale, fy=scale)  # При небходимости, уменьшение разрешения кадра

        img = pickle.dumps(small_frame)  # Сериализация изображения
        # try:
        sess.post(links[counter] + 'handle', data=img)  # Попытка отправить следующий кадр
        # except:
        #     links.remove(links[counter])  # Удалить ссылку из списка если она не доступна
        counter += 1
        if counter > len(links) - 1:
            counter = 0
            sess.close()
            sess = FuturesSession()
示例#21
0
def handler(event, context):

    body = json.loads(event['body'])

    # Generate a random key
    tag = random_id(25)
    body["db_tag"] = tag

    # Insert a new row into dynamo db
    dynamo_db = boto3.resource("dynamodb")
    table = dynamo_db.Table("reddit_to_wordcloud")
    table.put_item(Item={
        "tag": tag,
        "status": "working",
        "image": "placeholder"
    })

    # Trigger execution lambda
    session = FuturesSession()
    session.post(
        "https://qirlhy4te6.execute-api.eu-west-2.amazonaws.com/production/reddit-to-wordcloud-execute",
        json=body)

    time.sleep(3)  # Wait for the request to successfully transmit

    response = {
        "statusCode": 200,
        "headers": {
            "Content-Type": "application/json"
        },
        "body": json.dumps({
            "status": "working",
            "db_tag": tag
        })
    }

    return response
示例#22
0
def av_processing_view(request):
    """
		Inputs a audio link, stores jobid to dict, redirects to processing page.
	"""
    ip = gimmeip(request)

    # no need to store linkid when jobid is required in every step. store only jobid instead.
    #latest_link_id = Link.objects.filter(linkip=ip, linktype='a').order_by('-created')[0].id
    latestlink = Link.objects.filter(linkip=ip).exclude(
        linktype='i').order_by('-created')[0].linktext

    #check if latest_link exists
    if latestlink:

        API_KEY = gimmeapikey(request)

        latest_link = latestlink.strip()
        payload = {'apikey': API_KEY, 'url': latest_link}

        session = FuturesSession()
        try:
            future = session.post(
                'https://api.havenondemand.com/1/api/async/recognizespeech/v1',
                data=payload)
            r = future.result()
        except Exception as e:  # This is the correct syntax
            return 0

        # r.content is a byte array. To cure that, decode utf-8 is used.

        response = r.content.decode('utf-8')
        #this works too, to get jobID string
        #response = str(r.result().content)[2:-1]

        json_data = json.loads(response)

        if 'jobID' in json_data:
            jobid = json_data['jobID']
            # setting jobid and linktext as session variables.
            request.session['jobid'] = jobid
            request.session['linktext'] = latest_link

        return render(request, 'apicall/av_processing.html', {
            'link': latest_link,
            'status': 'Processing...'
        })

    else:
        return HttpResponseRedirect(reverse('error'))
示例#23
0
def try_forgetful_futures():
    """
    Tests the workaround using ForgetfulCookieJar with requests_futures.
    """

    from requests_toolbelt.cookies.forgetful import ForgetfulCookieJar
    from requests_futures.sessions import FuturesSession

    session = FuturesSession()
    jar = ForgetfulCookieJar()
    session.cookies = jar

    payload1 = construct_payload()
    req1 = session.post(balance_url, payload1)
    resp1 = req1.result().text

    payload2 = construct_payload()
    req2 = session.post(balance_url, payload2)
    resp2 = req2.result().text

    test_response(resp1, 'Forgetful Futures req1')
    test_response(resp2, 'Forgetful Futures req2')

    return session
示例#24
0
def get_logged_in_future_sesh():
    sesh = FuturesSession()

    # get a fresh authenticity token
    response = sesh.get(
        'https://archivesupport.zendesk.com/auth/v2/login/signin').result()
    auth_token_pattern = re.compile(
        b'<input[^<^>]*?name="authenticity_token"[^<^>]*?value="(.*?)".*?/>')
    m = auth_token_pattern.search(response.content)
    auth_token = m.group(1).decode()

    login_form_data = {
        'utf8': '✓',
        'authenticity_token': auth_token,
        'return_to_on_failure': '/auth/v2/login/signin',
        'return_to': 'https://help.archive.org/auth/v2/login/signed_in',
        'brand_id': '360000261412',
        'form_origin': 'no_return',
        'user[email]': env['ZENDESK_AGENT_ACCOUNT'],
        'user[password]': env['ZENDESK_AGENT_PASSWORD']
    }
    sesh.post('https://archivesupport.zendesk.com/access/login',
              data=login_form_data).result()
    return sesh
def create_names_future(ids):
    session = FuturesSession(max_workers=200)
    if len(ids) <= 1000:
        url = 'https://esi.evetech.net/latest/universe/names/?datasource=tranquility'
        header = {
            'accept': 'application/json',
            'Content-Type': 'application/json',
            'Cache-Control': 'no-cache'
        }
        future = session.post(url, json=ids, headers=header)
        return future
    answer = []
    for i in range(0, len(ids), 1000):
        answer.append(ids[i:1000 + i])
    futures = []
    for id_segment in answer:
        url = 'https://esi.evetech.net/latest/universe/names/?datasource=tranquility'
        header = {
            'accept': 'application/json',
            'Content-Type': 'application/json',
            'Cache-Control': 'no-cache'
        }
        futures.append(session.post(url, json=id_segment, headers=header))
    return futures
def get_api_call_future(api_call: ApiCall):
    if api_call.method:
        session = FuturesSession()
        if api_call.method == 'GET':
            return session.get(url=api_call.url)
        elif api_call.method == 'POST':
            return session.post(url=api_call.url, data=api_call.body)
        elif api_call.method == 'PUT':
            return session.put(url=api_call.url, data=api_call.body)
        elif api_call.method == 'DELETE':
            return session.delete(url=api_call.url)
        else:
            raise ValueError('Invalid method type: {}'.format(api_call.method))
    else:
        raise ValueError('No API method defined')
示例#27
0
def HTTPInstanceGenerator(action, instance_times, blocking_cli, param_file=None):
    if len(instance_times) == 0:
        return False
    session = FuturesSession(max_workers=15)
    url = base_url + action
    parameters = {'blocking': blocking_cli, 'result': RESULT}
    authentication = (user_pass[0], user_pass[1])
    after_time, before_time = 0, 0

    if param_file == None:
        st = 0
        for t in instance_times:
            st = st + t - (after_time - before_time)
            before_time = time.time()
            if st > 0:
                time.sleep(st)
            future = session.post(url, params=parameters, auth=authentication, verify=False)
            after_time = time.time()
    else:   # if a parameter file is provided
        try:
            param_file_body = param_file_cache[param_file]
        except:
            with open(param_file, 'r') as f:
                param_file_body = json.load(f)
                param_file_cache[param_file] = param_file_body

        for t in instance_times:
            st = t - (after_time - before_time)
            if st > 0:
                time.sleep(st)
            before_time = time.time()
            future = session.post(url, params=parameters, auth=authentication,
                                  json=param_file_body, verify=False)
            after_time = time.time()

    return True
示例#28
0
def post_request(url,
                 body,
                 headers=None,
                 params=None,
                 cookies=None,
                 use_gzip=False,
                 session=None):
    """Perform a POST request with a predetermined number of retries."""
    async = True if session else False
    if not async:
        session = FuturesSession(executor=ThreadPoolExecutor(max_workers=1))

    _log_request("POST",
                 url,
                 body=body,
                 params=params,
                 headers=headers,
                 cookies=cookies)

    for number_of_tries in range(config.get_number_of_retries() + 1):
        try:
            if use_gzip:
                if headers:
                    headers["Content-Encoding"] = "gzip"
                else:
                    headers = {"Content-Encoding": "gzip"}
                data = gzip.compress(json.dumps(body).encode("utf-8"))
            else:
                data = json.dumps(body)
            future = session.post(url,
                                  data=data,
                                  headers=headers,
                                  params=params,
                                  cookies=cookies)
            if async:
                return future
            res = future.result()
            if res.status_code == 200:
                return res
        except Exception as e:
            if number_of_tries == config.get_number_of_retries():
                raise APIError(e)
    try:
        err_mess = res.json()["error"].__str__()
    except:
        err_mess = res.content.__str__()
    err_mess += "\nX-Request_id: {}".format(res.headers.get("X-Request-Id"))
    raise APIError(err_mess)
def boys_go_deliver(good_stuff, audience):
    accesscode = os.environ['accesscode']
    try:
        session = FuturesSession()
        url = f"https://api.telegram.org/bot{accesscode}/sendPhoto"
        logger.info(f"url formed is {url}")
        for chat_id in audience:
            for stuff in good_stuff:
                response = session.post(url,
                                        data={
                                            'chat_id': chat_id,
                                            'photo': stuff
                                        })
        logger.info(f"Successfully sent message! {good_stuff}")
    except:
        logger.error(f"Couldn't send reply")
示例#30
0
    def orders_remove_button_clicked(self):
        session = FuturesSession()

        if len(self.orders_table.selectionModel().selectedRows()) < 1:
            QMessageBox.about(self, "Villa", "Engin pöntun er valin")
            return

        id = self.orders_table.item(
            self.orders_table.selectionModel().selectedRows()[0].row(),
            11).text()

        global token

        rq = session.post("https://leiga.fisedush.com/api/order/remove/" + id,
                          data="token=" + token,
                          background_callback=self.on_order_removal)
示例#31
0
def schedule_3d_coords_gen(mol, user):
    mol['generating_3d_coords'] = True

    base_url = openbabel_base_url()
    path = 'convert'
    output_format = 'sdf'

    url = '/'.join([base_url, path, output_format])

    data = {'format': 'smi', 'data': mol['smiles'], 'gen3d': True}

    session = FuturesSession()
    future = session.post(url, json=data)

    inchikey = mol['inchikey']
    future.add_done_callback(
        functools.partial(_finish_3d_coords_gen, inchikey, user))
示例#32
0
def schedule_orbital_gen(cjson, mo, id, orig_mo, user):
    cjson['generating_orbital'] = True

    base_url = avogadro_base_url()
    path = 'calculate-mo'
    url = '/'.join([base_url, path])

    data = {
        'cjson': cjson,
        'mo': mo,
    }

    session = FuturesSession()
    future = session.post(url, json=data)

    future.add_done_callback(
        functools.partial(_finish_orbital_gen, mo, id, user, orig_mo))
示例#33
0
 def broadcast_block(self, mined_block, transaction):
     session = FuturesSession()
     future = []
     for node in self.ring:
         future.append(
             session.post('http://' + HOST + ':' + node['port'] +
                          '/get_mined_block',
                          json={
                              'block': mined_block,
                              'transaction': transaction
                          },
                          hooks={'response': self.response_hook}))
     for fut in future:
         response = fut.result()
         if (response.status_code != 200):
             return -1
     return 0
示例#34
0
文件: views.py 项目: zvovov/totext
def av_processing_view(request):
	"""
		Inputs a audio link, stores jobid to dict, redirects to processing page.
	"""
	ip = gimmeip(request)
	
	# no need to store linkid when jobid is required in every step. store only jobid instead.
	#latest_link_id = Link.objects.filter(linkip=ip, linktype='a').order_by('-created')[0].id
	latestlink = Link.objects.filter(linkip=ip).exclude(linktype='i').order_by('-created')[0].linktext
	
	#check if latest_link exists
	if latestlink:
		
		API_KEY = gimmeapikey(request)
		
		latest_link = latestlink.strip()	 
		payload = {'apikey':API_KEY, 'url':latest_link}

		
		session = FuturesSession()
		try:
			future = session.post('https://api.havenondemand.com/1/api/async/recognizespeech/v1', data = payload)
			r = future.result()
		except Exception as e:    # This is the correct syntax
			return 0
		
		# r.content is a byte array. To cure that, decode utf-8 is used.
		
		response = r.content.decode('utf-8')
		#this works too, to get jobID string
		#response = str(r.result().content)[2:-1]
		
		json_data = json.loads(response)
		
		
		if 'jobID' in json_data:
			jobid = json_data['jobID']
			# setting jobid and linktext as session variables. 
			request.session['jobid'] = jobid
			request.session['linktext'] = latest_link
			
		return render(request, 'apicall/av_processing.html', { 'link' : latest_link, 'status': 'Processing...'})
		
	else:
		return HttpResponseRedirect(reverse('error'))
示例#35
0
 def broadcast_transaction(self, transaction, signature, outputs):
     session = FuturesSession()
     future = []
     for node in self.ring:
         future.append(
             session.post('http://' + HOST + ':' + node['port'] +
                          '/validate_transaction',
                          json={
                              'transaction': transaction,
                              'signature': signature,
                              'outputs': outputs
                          },
                          hooks={'response': self.response_hook}))
     for fut in future:
         response = fut.result()
         if (response.status_code != 200):
             return -1
     return 0
示例#36
0
def gen_names(n_workers=4):
    template = BeautifulSoup(requests.get(NAME_URL).text, 'lxml')
    regions = [option['value'] for option in template.select('input[name="Languages"]')]
    genders = [option['value'] for option in template.select('input[name="Gender"]')]

    all_names = []
    session = FuturesSession(max_workers=n_workers)
    futures = [(session.post(NAME_URL, data={'generate': 'Generate',
                                            'Gender': gender,
                                             'Languages': region}),
                gender, region)
               for gender, region in itertools.product(genders, regions)]

    for future, gender, region in futures:
        names = list(BeautifulSoup(future.result().text, 'lxml').select('div#mainContent')[0].stripped_strings)
        all_names.extend(zip(names, [gender] * len(names), [region] * len(names)))

    return all_names
示例#37
0
    def connect(self):
        """
        Connect to the G-Node REST API via HTTP. Note: almost all methods throw an HTTPError or
        URLError if the communication fails.
        """
        url = urlparse.urljoin(self.location, RestStore.URL_LOGIN)

        session = FuturesSession(max_workers=20)

        future = session.post(url, {'username': self.user, 'password': self.password})
        response = future.result()
        self.raise_for_status(response)

        if not session.cookies:
            raise RuntimeError("Unable to authenticate for user '%s' (status: %d)!"
                               % (self.user, response.status_code))

        self.__session = session
示例#38
0
    def send_market(self, data):
        session = FuturesSession()
        log.info('EDDN Dispatcher sending Market payload.')
        log.debug('EDDN Dispatcher Raw Market Data: {}'.format(str(data)))

        payload = {
            "$schemaRef": "https://eddn.edcd.io/schemas/commodity/3/test",
            "header": {
                "uploaderID": "Anon",
                "softwareName": "BGS Companion",
                "softwareVersion": self.bgs_version
            },
            "message": {
                "systemName": data['StarSystem'],
                "stationName": data['StationName'],
                "timestamp": data['timestamp'],
                "commodities": []
            }
        }

        for commodity in data['Items']:
            c_dict = {
                'name': commodity['Name'],
                'meanPrice': commodity['MeanPrice'],
                'buyPrice': commodity['BuyPrice'],
                'stock': commodity['Stock'],
                'stockBracket': commodity['StockBracket'],
                'sellPrice': commodity['SellPrice'],
                'demand': commodity['Demand'],
                'demandBracket': commodity['DemandBracket']
            }

        for commodity in data['Items']:
            if 'legality' not in commodity.keys():  ##REVIEW
                payload['message']['commodities'].append(c_dict)

        payload = json.dumps(payload)
        log.debug('EDDN Dispatcher Market Data Payload: {}'.format(
            str(payload)))
        r = session.post('https://eddn.edcd.io:4430/upload/',
                         data=payload,
                         background_callback=self.receive_response)
        r.result()
示例#39
0
def create_api_job(user_id, title, instructions):
    session = FuturesSession()
    member_job = {'UserId': user_id, 'Id': 0, 'Title': title, 'Instructions': instructions}
    headers = {'Content-Type': 'application/json'}
    # future = session.post('http://api.opescode.com/api/MemberJobs', json=member_job, headers=headers)
    future = session.post('{0}/api/MemberJobs'.format(service_base_uri), json=member_job, headers=headers)
    response = future.result()
    stc = response.status_code
    id = 0
    if stc != 200:
        print 'get_api_job -> Job not created - Request failed with stc=%s'%str(stc)
        logger.error('get_api_job -> Job not created - Request failed with stc=%s'%str(stc))
    else:
        jsn = response.json()
        print 'get_api_job -> created job: ', jsn
        id = jsn  # this api method returns integer job id
        logger.info('get_api_job -> Job created - id=%s'%str(id))

    return id
示例#40
0
class Endpoint:
    def __init__(self, host=None):
        self.host = host or self.default_host
        self.session = FuturesSession()
        self.request_precision = 3

    default_host = "http://api.mdrft.com"
    ising_solver_path = "/apiv1/ising"

    def dispatch(self, solver, path=ising_solver_path, callback=None):
        path = path or self.ising_solver_path
        if not (solver.ising_interactions.shape[0] == 0):
            mat = self._build_matrix_for_params(solver.ising_interactions)
            params = {'hami': np.round(mat, self.request_precision).tolist()}
        else:
            raise ValueError(
                'No valid qubo nor ising interactions in the solver.')

        def handle_result(sess, resp):
            if not (callback is None):
                if resp.status_code != 200:
                    print("Server responded: {}".format(resp.content))
                    callback([])
                else:
                    callback(
                        solver.adjust_solutions_from_ising_spins(
                            np.array(resp.json())))

        request = self.session.post(
            url=self.host + path,
            headers={'Content-Type': 'application/json'},
            data=json.dumps(params, separators=(',', ':'), cls=CompactEncoder),
            background_callback=handle_result)

        return request

    def _build_matrix_for_params(self, matrix, strip=False):
        n = matrix.shape[0]
        list = matrix.tolist()
        if strip:
            for i in range(n):
                list[i] = list[i][i:]
        return list
示例#41
0
    def post_async(self, iterable):
        session = FuturesSession(executor=ThreadPoolExecutor(
            max_workers=self.max_workers))
        fs = [
            session.post(self.URL, data=self.get_payload(*i)) for i in iterable
        ]
        fs = concurrent.futures.as_completed(fs)

        valores = []

        for future in fs:
            resp = future.result()
            body = resp.request.body
            data = [unquote(i.split('=')[1]) for i in body.split('&')]
            soup = BeautifulSoup(resp.content, 'lxml')
            check_for_error(soup)
            valor_corrigido = parse_valor_corrigido(soup)
            valores.append([data[4], data[2], data[3], valor_corrigido])
        return valores
示例#42
0
    def process(self, message, **kwargs):
        # type: (Message, **Any) -> None

        session = FuturesSession()
        if not message.params: message.params = {}
        if message.params.get('nolog', 'false') in ['true', '1']:
            return

        output = self._message_dict(message)
        for k, v in self.component_config.get('params').items():
            output[k] = v
        output['modelId'] = self.component_config.get('model_id')

        future = session.post(self.component_config.get('url'), json=output)
        response = future.result()
        if response.status_code != 200:
            logger.error('{} Error from API: {}'.format(
                str(response.status_code),
                json.loads(response.content)['error']))
示例#43
0
    def async_requests(self, url_list, request, endpoint_path,
                       endpoint_payload, endpoint_service, header):
        """Send requests to each CanDIG node in the network asynchronously using FutureSession. The
        futures are returned back to and handled by handle_peer_requests()


        :param url_list: List of
        :param request: The type of HTTP request to federate, either GET or POST. PUT TBD
        :type request: str
        :param endpoint_path: Specific API endpoint of CanDIG service to be queried, may contain query string if GET
        :type endpoint_path: str
        :param endpoint_payload: Query string or data needed by endpoint specified in endpoint_path
        :type endpoint_payload: object, {param0=value0, paramN=valueN} for GET, JSON struct dependent on service endpoint for POST
        :param endpoint_service: Specific microservice name, should match a service listed in services.json config
        :type endpoint_service: str
        :param header: Request headers defined in self.headers
        :type header: object
        :return: List of Futures
        """

        args = {
            "request_type": request,
            "endpoint_path": endpoint_path,
            "endpoint_payload": endpoint_payload,
            "endpoint_service": endpoint_service
        }
        async_session = FuturesSession(max_workers=10)  # capping max threads
        responses = []

        for url in url_list:
            try:
                # self.announce_fed_out(request_type, url, endpoint_path, endpoint_payload)
                responses.append(
                    async_session.post(url,
                                       json=args,
                                       headers=header,
                                       timeout=self.timeout))
            except (requests.exceptions.ConnectionError,
                    requests.exceptions.Timeout) as e:
                responses.append(e)

        return responses
示例#44
0
class Tracker:

    def __init__(self, app_id: str) -> None:
        self._app_id = app_id
        self._session = FuturesSession()

    def send_async(self, teacher: Teacher) -> Future:
        payload = {
            "v": 1,
            "tid": "UA-2241989-17",
            "cid": 555,
            "t": "pageview",
            "dh": self._app_id,
            "dp": teacher.id,
            "dt": teacher.name,
        }
        return self._session.post("http://www.google-analytics.com/collect", payload)

    def close(self) -> None:
        self._session.close()
示例#45
0
def postLogs(logcache):
    #post logs asynchronously with requests workers and check on the results
    #expects a queue object from the multiprocessing library
    #setting this within the processs to allow requests to establish a keep alive session with async workers.
    httpsession = FuturesSession(max_workers=2)
    httpsession.trust_env=False #turns off needless and repetitive .netrc check for creds
    canQuit=False
    logger.info('started posting process')
    def backgroundcallback(session, response):
        #release the connection back to the pool
        try: 
            r=response.result()
            response.close()
        except Exception as e:
            logger.error('Exception while posting message: %r'%e)

    while True:
        try:
            #see if we have anything to post
            #waiting a bit to not end until we are told we can stop.
            postdata=logcache.get(False,30)
            if postdata is None:
                #signalled from parent process that it's ok to stop.
                logcache.task_done()
                canQuit=True
                
            elif len(postdata)>0:
                url=random.choice(options.urls)
                r=httpsession.post(url,data=postdata,stream=False)
                logcache.task_done()
        except Empty as e:
            if canQuit:
                logger.info('signaling shutdown for threadpool executor')
                httpsession.executor.shutdown(wait=True)
                break

    logger.info('{0} done'.format('log posting task'))
示例#46
0
def get_api_user():
    mac = get_mac()
    # use hex mac address as the username for this device on the api
    username = format(mac, 'x')
    session = FuturesSession()
    # future = session.get('http://api.opescode.com/api/Users?username='******'{0}/api/Users?username={1}'.format(service_base_uri, username))
    response = future.result()
    print 'get_api_user response -> ', response.status_code
    logger.info('get_api_user response %s.'%str(response.status_code))
    if response.status_code == 404:
        print 'get_api_user -> user not found. Requesting a new user'
        logger.error('get_api_user -> user not found. Requesting a new user')
        user = {'Username': username, 'Name': username, 'Title': '', 'Email': '*****@*****.**'}
        headers = {'Content-Type': 'application/json'}
        # future = session.post('http://api.opescode.com/api/Users', json=user, headers=headers)
        future = session.post('{0}/api/Users'.format(service_base_uri), json=user, headers=headers)
        response = future.result()
        if response.status_code != 200:
            print 'get_api_user -> user could not be created'
            logger.error('get_api_user -> user could not be created - Request failed with stc=%s'%str(response.status_code))
            return None
        logger.info('get_api_user -> new user (%s) created'%username)
        print 'get_api_user -> created new user %s'%username

    stc = response.status_code
    id = 0
    if stc == 200:
        jsn = response.json()
        id = jsn['Id']
        logger.info('get_api_user -> user id: %s'%str(id))
        print jsn
    else:
        logger.error('get_api_user response -> : %s'%str(stc))
    print 'crowd eye user id: ', id
    return response.json()
if user_key != '':
    api_suffix = '?user_key={}'.format(user_key)

session = FuturesSession(max_workers=total_threads)


def close_fps(fp_arr):
    """Close all previously open files."""
    if len(fp_arr) >= 50:
        for fp in fp_arr:
            fp.close()


fp_array = []

for i in range(0, total_threads):
    fp1 = open('data/pom-effective.xml', 'r')
    print('Run %d for %s is in progress' % (i, 'data/pom-effective.xml'))
    try:
        resp = session.post('{}/api/v1/stack-analyses{}'.format(api_url, api_suffix),
                            files={'manifest[]': ('pom.xml', fp1)},
                            data={'filePath[]': '/home/JohnDoe'},
                            headers={'Authorization':
                            'Bearer {}'.format(os.getenv('RECOMMENDER_API_TOKEN'))})
    except Exception as e:
        print(e)
        pass

    close_fps(fp_array)
    fp_array.append(fp1)
示例#48
0
        # Parse ECG measurements
        lead1 = row[1]
        lead2 = row[2]

        # Buffer lines for influxdb
        # E.g: ecg,stream_id=janakaOptiPlex9020 lead1=-0.576,lead2=0.012 1452486486726298112
        #
        # Measurement: ecg
        # Tags: stream_id
        # Values: lead1, lead2

        # Writing directly in nanoseconds
        #s += 'ecg,stream_id=' + STREAM_ID + ' lead1=' + str(lead1) + ",lead2=" + str(lead2) + " " + str(nanotime.now().nanoseconds()) + "\n"

        # Milliseconds converted to nanoseconds
        s += 'ecg,stream_id=' + STREAM_ID + ' lead1=' + str(lead1) + ",lead2=" + str(lead2) + " " + str(int(nanotime.now().milliseconds() * 1000000)) + "\n"

        # Sampling rate as per csv file
        time.sleep(0.003)

        # POST to influxdb when batch threshold is reached
        if count > BATCH_AMOUNT:

            t = nanotime.now().milliseconds()

            # POST operation is async so it wont delay the timestamping
            future = session.post(INFLUX_URL + ':' + INFLUX_PORT + '/write?db=' + INFLUX_DB_NAME, data=s, background_callback=bg_cb)

            s = ''
            count = 0
class RestApiHandler(logging.Handler):
    """
    A handler which does an HTTP POST for each logging event.
    """

    def __init__(self, endpoint, content_type='json',
                 ignored_record_keys=None):
        """
        endpoint: define the fully qualified RESTful API endpoint to POST to.
        content_type: only supports JSON currently
        """
        self.endpoint = endpoint
        self.content_type = content_type
        self.session = FuturesSession(max_workers=32)
        self.ignored_record_keys = (ignored_record_keys if ignored_record_keys
                                    else DEFAULT_IGNORED_KEYS)
        foo = TOP_KEYS.union(META_KEYS)
        self.detail_ignore_set = self.ignored_record_keys.union(foo)

        logging.Handler.__init__(self)

    def _getTraceback(self, record):
        """
        Format the traceback of the record, if exists.
        """
        if record.exc_info:
            return traceback.format_exc()
        return None

    def _getEndpoint(self):
        """
        Build RESTful API endpoint.
        Can override in child classes to add parameters.
        """
        return self.endpoint

    def _getPayload(self, record):
        """
        The data that will be sent to the RESTful API
        """

        try:
            # top level payload items
            d = record.__dict__
            pid = d.pop('process', 'nopid')
            tid = d.pop('thread', 'notid')

            payload = {
                k: v for (k, v) in d.items()
                if k in TOP_KEYS
            }

            # logging meta attributes
            payload['meta'] = {
                k: v for (k, v) in d.items()
                if k in META_KEYS
            }

            # everything else goes in details
            payload['details'] = {
                k: simple_json(v) for (k, v) in d.items()
                if k not in self.detail_ignore_set
            }

            payload['log'] = payload.pop('name', 'n/a')
            payload['level'] = payload.pop('levelname', 'n/a')
            payload['meta']['line'] = payload['meta'].pop('lineno', 'n/a')

            payload['message'] = record.getMessage()
            tb = self._getTraceback(record)
            if tb:
                payload['traceback'] = tb

        except Exception as e:
            payload = {
                'level': 'ERROR',
                'message': 'could not format',
                'exception': repr(e),
            }
        payload['pid'] = 'p-{}'.format(pid)
        payload['tid'] = 't-{}'.format(tid)
        return payload

    def _prepPayload(self, record):
        """
        record: generated from logger module
        This preps the payload to be formatted in whatever content-type is
        expected from the RESTful API.

        returns: a tuple of the data and the http content-type
        """
        payload = self._getPayload(record)
        json_data = json.dumps(payload, default=serialize)

        return {
            'json': (json_data, 'application/json')
        }.get(self.content_type, (json_data, 'text/plain'))

    def emit(self, record):
        """
        Override emit() method in handler parent for sending log to RESTful API
        """
        # avoid infinite recursion
        if record.name.startswith('requests'):
            return

        data, header = self._prepPayload(record)

        try:
            self.session.post(self._getEndpoint(),
                              data=data,
                              headers={'content-type': header})
        except Exception:
            self.handleError(record)
class Main(object):
    def __init__(self, args):
        log.info('Started main procedure')
        self.args = args
        self.wd = os.path.normpath(os.path.abspath(os.path.split(__file__)[0]))
        self.icon_path = os.path.join(self.wd, 'icons')
        if not self.args.text_only:
            from gi.repository import AppIndicator3 as appindicator
            self.ind = appindicator.Indicator.new_with_path(
                'syncthing-indicator',
                'syncthing-client-idle',
                appindicator.IndicatorCategory.APPLICATION_STATUS,
                self.icon_path)
            self.ind.set_status(appindicator.IndicatorStatus.ACTIVE)

        self.state = {'update_folders': True,
                      'update_devices': True,
                      'update_files': True,
                      'update_st_running': False,
                      'set_icon': 'paused'}
        self.set_icon()
        self.create_menu()

        self.downloading_files = []
        self.downloading_files_extra = {} # map: file_details -> file_details_extra
        self.recent_files = []
        self.folders = []
        self.devices = []
        self.errors = []

        self.last_ping = None
        self.system_data = {}
        self.syncthing_base = 'http://localhost:8080'
        self.syncthing_version = ''
        self.device_name = ''
        self.last_seen_id = 0
        self.timeout_counter = 0
        self.count_connection_error = 0
        self.session = FuturesSession()

        GLib.idle_add(self.load_config_begin)

    def create_menu(self):
        self.menu = Gtk.Menu()

        self.title_menu = Gtk.MenuItem('Syncthing')
        self.title_menu.show()
        self.title_menu.set_sensitive(False)
        self.menu.append(self.title_menu)

        self.syncthing_upgrade_menu = Gtk.MenuItem('Upgrade check')
        self.syncthing_upgrade_menu.connect('activate', self.open_releases_page)
        self.menu.append(self.syncthing_upgrade_menu)

        self.mi_errors = Gtk.MenuItem('Errors: open web interface')
        self.mi_errors.connect('activate', self.open_web_ui)
        self.menu.append(self.mi_errors)

        sep = Gtk.SeparatorMenuItem()
        sep.show()
        self.menu.append(sep)

        self.devices_menu = Gtk.MenuItem('Devices')
        self.devices_menu.show()
        self.devices_menu.set_sensitive(False)
        self.menu.append(self.devices_menu)
        self.devices_submenu = Gtk.Menu()
        self.devices_menu.set_submenu(self.devices_submenu)

        self.folder_menu = Gtk.MenuItem('Folders')
        self.folder_menu.show()
        self.folder_menu.set_sensitive(False)
        self.menu.append(self.folder_menu)
        self.folder_menu_submenu = Gtk.Menu()
        self.folder_menu.set_submenu(self.folder_menu_submenu)

        sep = Gtk.SeparatorMenuItem()
        sep.show()
        self.menu.append(sep)

        self.current_files_menu = Gtk.MenuItem('Downloading files')
        self.current_files_menu.show()
        self.current_files_menu.set_sensitive(False)
        self.menu.append(self.current_files_menu)
        self.current_files_submenu = Gtk.Menu()
        self.current_files_menu.set_submenu(self.current_files_submenu)

        self.recent_files_menu = Gtk.MenuItem('Recently updated')
        self.recent_files_menu.show()
        self.recent_files_menu.set_sensitive(False)
        self.menu.append(self.recent_files_menu)
        self.recent_files_submenu = Gtk.Menu()
        self.recent_files_menu.set_submenu(self.recent_files_submenu)

        sep = Gtk.SeparatorMenuItem()
        sep.show()
        self.menu.append(sep)

        open_web_ui = Gtk.MenuItem('Open web interface')
        open_web_ui.connect('activate', self.open_web_ui)
        open_web_ui.show()
        self.menu.append(open_web_ui)

        self.more_menu = Gtk.MenuItem('More')
        self.more_menu.show()
        self.menu.append(self.more_menu)

        self.more_submenu = Gtk.Menu()
        self.more_menu.set_submenu(self.more_submenu)

        self.mi_start_syncthing = Gtk.MenuItem('Start Syncthing')
        self.mi_start_syncthing.connect('activate', self.syncthing_start)
        self.mi_start_syncthing.set_sensitive(False)
        self.more_submenu.append(self.mi_start_syncthing)

        self.mi_restart_syncthing = Gtk.MenuItem('Restart Syncthing')
        self.mi_restart_syncthing.connect('activate', self.syncthing_restart)
        self.mi_restart_syncthing.set_sensitive(False)
        self.more_submenu.append(self.mi_restart_syncthing)

        self.mi_shutdown_syncthing = Gtk.MenuItem('Shutdown Syncthing')
        self.mi_shutdown_syncthing.connect('activate', self.syncthing_shutdown)
        self.mi_shutdown_syncthing.set_sensitive(False)
        self.more_submenu.append(self.mi_shutdown_syncthing)

        sep = Gtk.SeparatorMenuItem()
        self.more_submenu.append(sep)

        if not self.args.no_shutdown:
            self.mi_start_syncthing.show()
            self.mi_restart_syncthing.show()
            self.mi_shutdown_syncthing.show()
            sep.show()

        self.about_menu = Gtk.MenuItem('About Indicator')
        self.about_menu.connect('activate', self.show_about)
        self.about_menu.show()
        self.more_submenu.append(self.about_menu)

        self.quit_button = Gtk.MenuItem('Quit Indicator')
        self.quit_button.connect('activate', self.leave)
        self.quit_button.show()
        self.more_submenu.append(self.quit_button)

        if not self.args.text_only:
            self.ind.set_menu(self.menu)

    def load_config_begin(self):
        ''' Read needed values from config file '''
        confdir = GLib.get_user_config_dir()
        if not confdir:
            confdir = os.path.expanduser('~/.config')
        conffile = os.path.join(confdir, 'syncthing', 'config.xml')
        if not os.path.isfile(conffile):
            log.error("load_config_begin: Couldn't find config file {}".format(
                conffile))
        f = Gio.file_new_for_path(conffile)
        f.load_contents_async(None, self.load_config_finish)
        return False

    def load_config_finish(self, fp, async_result):
        try:
            success, data, etag = fp.load_contents_finish(async_result)

            dom = minidom.parseString(data)

            conf = dom.getElementsByTagName('configuration')
            if not conf:
                raise Exception('No configuration element in config')

            gui = conf[0].getElementsByTagName('gui')
            if not gui:
                raise Exception('No gui element in config')

            # Find the local syncthing address
            address = gui[0].getElementsByTagName('address')
            if not address:
                raise Exception('No address element in config')
            if not address[0].hasChildNodes():
                raise Exception('No address specified in config')

            self.syncthing_base = 'http://%s' % address[0].firstChild.nodeValue

            # Find and fetch the api key
            api_key = gui[0].getElementsByTagName('apikey')
            if not api_key:
                raise Exception('No api-key element in config')
            if not api_key[0].hasChildNodes():
                raise Exception('No api-key specified in config, please create one via the web interface')
            self.api_key = api_key[0].firstChild.nodeValue

            # Read folders and devices from config
            for elem in conf[0].childNodes:
                if elem.nodeType != minidom.Node.ELEMENT_NODE:
                    continue
                if elem.tagName == 'device':
                    self.devices.append({
                        'id': elem.getAttribute('id'),
                        'name': elem.getAttribute('name'),
                        'state': '',
                        'connected': False
                        })
                elif elem.tagName == 'folder':
                    self.folders.append({
                        'id': elem.getAttribute('id'),
                        'path': elem.getAttribute('path'),
                        'state': 'unknown',
                        })
            if not self.devices:
                raise Exception('No devices in config')
            if not self.folders:
                raise Exception('No folders in config')
        except Exception as e:
            log.error('Error parsing config file: {}'.format(e))
            self.leave()

        # Start processes
        GLib.idle_add(self.rest_get, '/rest/system/version')
        GLib.idle_add(self.rest_get, '/rest/system/connections')
        GLib.idle_add(self.rest_get, '/rest/system/status')
        GLib.idle_add(self.rest_get, '/rest/system/upgrade')
        GLib.idle_add(self.rest_get, '/rest/system/error')
        GLib.idle_add(self.rest_get, '/rest/events')
        GLib.timeout_add_seconds(self.args.timeout_gui, self.update)
        GLib.timeout_add_seconds(self.args.timeout_rest, self.timeout_rest)
        GLib.timeout_add_seconds(self.args.timeout_event, self.timeout_events)

    def syncthing_url(self, url):
        ''' Creates a url from given values and the address read from file '''
        return urlparse.urljoin(self.syncthing_base, url)

    def open_web_ui(self, *args):
        webbrowser.open(self.syncthing_url(''))

    def open_releases_page(self, *args):
        webbrowser.open('https://github.com/syncthing/syncthing/releases')

    def rest_post(self, rest_path):
        log.debug('rest_post {}'.format(rest_path))
        headers = {'X-API-Key': self.api_key}
        if rest_path in ['/rest/system/restart', '/rest/system/shutdown']:
            f = self.session.post(
                self.syncthing_url(rest_path), headers=headers)
        return False

    def rest_get(self, rest_path):
        params = ''
        if rest_path == '/rest/events':
            params = {'since': self.last_seen_id}

        log.info('rest_get {} {}'.format(rest_path, params))
        # url for the included testserver: http://localhost:5115
        headers = {'X-API-Key': self.api_key}
        f = self.session.get(self.syncthing_url(rest_path),
                             params=params,
                             headers=headers,
                             timeout=9)
        f.add_done_callback(self.rest_receive_data)
        return False

    def rest_receive_data(self, future):
        try:
            r = future.result()
        except requests.exceptions.ConnectionError:
            log.error(
                "Couldn't connect to Syncthing REST interface at {}".format(
                    self.syncthing_base))
            self.count_connection_error += 1
            log.info('count_connection_error: {}'.format(self.count_connection_error))
            if self.count_connection_error > 1:
                self.state['update_st_running'] = True
                self.set_state('paused')
            return
        except (requests.exceptions.Timeout, socket.timeout):
            log.debug('Timeout')
            # Timeout may be because Syncthing restarted and event ID reset.
            GLib.idle_add(self.rest_get, '/rest/system/status')
            return
        except Exception as e:
            log.error('exception: {}'.format(e))
            return

        rest_path = urlparse.urlparse(r.url).path
        rest_query = urlparse.urlparse(r.url).query
        if r.status_code != 200:
            log.warning('rest_receive_data: {0} failed ({1})'.format(
                rest_path, r.status_code))
            if rest_path == '/rest/system/upgrade':
                # Debian/Ubuntu Syncthing packages disable upgrade check
                pass
            else:
                self.set_state('error')
            if rest_path == '/rest/system/ping':
                # Basic version check: try the old REST path
                GLib.idle_add(self.rest_get, '/rest/ping')
            return

        try:
            json_data = r.json()
        except:
            log.warning('rest_receive_data: Cannot process REST data')
            self.set_state('error')
            return

        # Receiving data appears to have succeeded
        self.count_connection_error = 0
        self.set_state('idle')  # TODO: fix this
        log.debug('rest_receive_data: {} {}'.format(rest_path, rest_query))
        if rest_path == '/rest/events':
            try:
                for qitem in json_data:
                    self.process_event(qitem)
            except Exception as e:
                log.warning(
                    'rest_receive_data: error processing event ({})'.format(e))
                log.debug(qitem)
                self.set_state('error')
        else:
            fn = getattr(
                    self,
                    'process_{}'.format(rest_path.strip('/').replace('/', '_'))
                    )(json_data)

    # processing of the events coming from the event interface
    def process_event(self, event):
        if self.args.log_events:
            log.debug('EVENT: {}: {}'.format(event['type'], json.dumps(event)))

        t = event.get('type').lower()
        #log.debug('received event: '+str(event)) 
        if hasattr(self, 'event_{}'.format(t)):
            log.debug('received event: {} {}'.format(
                event.get('id'), event.get('type')))
            pass
        else:
            log.debug('ignoring event: {} {}'.format(
                event.get('id'), event.get('type')))

        #log.debug(json.dumps(event, indent=4))
        fn = getattr(self, 'event_{}'.format(t), self.event_unknown_event)(event)
        self.update_last_seen_id(event.get('id', 0))


    def event_downloadprogress(self, event):
        try:
            e = event['data'].values()
            e = e[0].keys()[0]
        except (ValueError, KeyError, IndexError):
            e = ""

        log.debug(u'download in progress: {}'.format(e))
        for folder_name in event['data'].keys():
            for filename in event['data'][folder_name]:
                file_details = json.dumps({'folder': folder_name,
                                'file': filename,
                                'type': 'file',
                                'direction': 'down'})

                must_be_added = False
                try:
                    v = self.downloading_files_extra[file_details]
                except KeyError:
                    v = {}
                    must_be_added = True # not yet present in downloading_files_extra

                file = event["data"][folder_name][filename]
                if file["bytesTotal"] == 0:
                    pct = 0.0
                else:
                    pct = 100 * file["bytesDone"] / file["bytesTotal"]
                # TODO: convert bytes to kb, mb etc
                v["progress"] = " ({}/{}) - {:.2f}%".format(file["bytesDone"], file["bytesTotal"], pct)
                if must_be_added:
                    self.downloading_files_extra[file_details] = v

            for elm in self.folders:
                if elm['id'] == folder_name:
                        elm['state'] = 'syncing'
                        #TODO: this is slow!
        self.state['update_files'] = True

    def event_unknown_event(self, event):
        pass

    def event_statechanged(self, event):
        for elem in self.folders:
            if elem['id'] == event['data']['folder']:
                elem['state'] = event['data']['to']
        self.state['update_folders'] = True
        self.set_state()

    def event_foldersummary(self, event):
        for elem in self.folders:
            if elem['id'] == event['data']['folder']:
                elem.update(event['data']['summary'])
        self.state['update_folders'] = True

    def event_foldercompletion(self, event):
        for dev in self.devices:
            if dev['id'] == event['data']['device']:
                if event['data']['completion'] < 100:
                    dev['state'] = 'syncing'
                else:
                    dev['state'] = ''
        self.state['update_devices'] = True

    def event_starting(self, event):
        self.set_state('paused')
        log.info('Received that Syncthing was starting at %s' % event['time'])
        # Check for added/removed devices or folders.
        GLib.idle_add(self.rest_get, '/rest/system/config')
        GLib.idle_add(self.rest_get, '/rest/system/version')

    def event_startupcomplete(self, event):
        self.set_state('idle')
        log.info('Syncthing startup complete at %s' %
            self.convert_time(event['time']))

    def event_ping(self, event):
        self.last_ping = dateutil.parser.parse(event['time'])

    def event_devicediscovered(self, event):
        found = False
        for elm in self.devices:
            if elm['id'] == event['data']['device']:
                elm['state'] = 'discovered'
                found = True
        if not found:
            log.warn('unknown device discovered')
            self.devices.append({
                'id': event['data']['device'],
                'name': 'new unknown device',
                'address': event['data']['addrs'],
                'state': 'unknown',
                })
        self.state['update_devices'] = True

    def event_deviceconnected(self, event):
        for dev in self.devices:
            if event['data']['id'] == dev['id']:
                dev['connected'] = True
                log.info('Device connected: %s' % dev['name'])
        self.state['update_devices'] = True

    def event_devicedisconnected(self, event):
        for dev in self.devices:
            if event['data']['id'] == dev['id']:
                dev['connected'] = False
                log.info('Device disconnected: %s' % dev['name'])
        self.state['update_devices'] = True

    def event_itemstarted(self, event):
        log.debug(u'item started: {}'.format(event['data']['item']))
        file_details = {'folder': event['data']['folder'],
                        'file': event['data']['item'],
                        'type': event['data']['type'],
                        'direction': 'down'}
        try:
            del self.downloading_files_extra[json.dumps(file_details)]
        except KeyError:
            pass

        if file_details not in self.downloading_files:
            self.downloading_files.append(file_details)
        for elm in self.folders:
            if elm['id'] == event['data']['folder']:
                elm['state'] = 'syncing'
        self.set_state()
        self.state['update_files'] = True

    def event_itemfinished(self, event):
        # TODO: test whether 'error' is null
        log.debug(u'item finished: {}'.format(event['data']['item']))
        file_details = {'folder': event['data']['folder'],
                        'file': event['data']['item'],
                        'type': event['data']['type'],
                        'direction': 'down'}

        try:
            del self.downloading_files_extra[json.dumps(file_details)]
        except KeyError:
            pass

        try:
            self.downloading_files.remove(file_details)
            #action: update, delete, or metadata.
            #versioning:
            #For the first hour, the most recent version is kept every 30 seconds.
            #For the first day, the most recent version is kept every hour.
            #For the first 30 days, the most recent version is kept every day.
            log.debug('file locally updated: %s (%s) at %s' % (file_details['file'], event['data']['action'], event['time']))
        except ValueError:
            log.debug('Completed a file we didn\'t know about: {}'.format(
                event['data']['item']))
        file_details['time'] = event['time']
        file_details['action'] = event['data']['action']
        self.recent_files.insert(0, file_details)
        self.recent_files = self.recent_files[:self.args.nb_recent_files]
        self.state['update_files'] = True
    # end of the event processing dings

    # begin REST processing functions
    def process_rest_system_connections(self, data):
        for elem in data['connections']:
            for dev in self.devices:
                if dev['id'] == elem:
                    dev['connected'] = True
        self.state['update_devices'] = True

    def process_rest_system_config(self, data):
        log.info('Processing /rest/system/config')
        self.api_key = data['gui']['apiKey']

        newfolders = []
        for elem in data['folders']:
            newfolders.append({
                'id': elem['id'],
                'path': elem['path'],
                'state': 'unknown',
                })

        newdevices = []
        for elem in data['devices']:
            newdevices.append({
                'id': elem['deviceID'],
                'name': elem['name'],
                'state': '',
                'connected': False,
                })

        self.folders = newfolders
        self.devices = newdevices

    def process_rest_system_status(self, data):
        if data['uptime'] < self.system_data.get('uptime', 0):
            # Means that Syncthing restarted
            self.last_seen_id = 0
            GLib.idle_add(self.rest_get, '/rest/system/version')
        self.system_data = data
        # TODO: check status of global announce
        self.state['update_st_running'] = True

    def process_rest_system_upgrade(self, data):
        self.syncthing_version = data['running']
        if data['newer']:
            self.syncthing_upgrade_menu.set_label(
                'New version available: %s' % data['latest'])
            self.syncthing_upgrade_menu.show()
        else:
            self.syncthing_upgrade_menu.hide()
        self.state['update_st_running'] = True

    def process_rest_system_version(self, data):
        self.syncthing_version = data['version']
        self.state['update_st_running'] = True

    def process_rest_system_ping(self, data):
        if data['ping'] == 'pong':
            log.info('Connected to Syncthing REST interface at {}'.format(
                self.syncthing_url('')))

    def process_rest_ping(self, data):
        if data['ping'] == 'pong':
            # Basic version check
            log.error('Detected running Syncthing version < v0.11')
            log.error('Syncthing v0.11 (or higher) required. Exiting.')
            self.leave()

    def process_rest_system_error(self, data):
        self.errors = data['errors']
        if self.errors:
            log.info('{}'.format(data['errors']))
            self.mi_errors.show()
            self.set_state('error')
        else:
            self.mi_errors.hide()
    # end of the REST processing functions

    def update(self):
        for func in self.state:
            if self.state[func]:
                log.debug('self.update {}'.format(func))
                start = getattr(self, '%s' % func)()
        return True

    def update_last_checked(self, isotime):
        #dt = dateutil.parser.parse(isotime)
        #self.last_checked_menu.set_label('Last checked: %s' % (dt.strftime('%H:%M'),))
        pass

    def update_last_seen_id(self, lsi):
        if lsi > self.last_seen_id:
            self.last_seen_id = lsi

    def update_devices(self):
        if self.devices:
            # TODO: set icon if zero devices are connected
            self.devices_menu.set_label('Devices ({}/{})'.format(
                self.count_connected(), len(self.devices) - 1))
            self.devices_menu.set_sensitive(True)

            if len(self.devices_submenu) == len(self.devices) - 1:
                # Update the devices menu
                for mi in self.devices_submenu:
                    for elm in self.devices:
                        if mi.get_label().split(' ')[0] == elm['name']:
                            mi.set_label(elm['name'])
                            mi.set_sensitive(elm['connected'])
            else:
                # Repopulate the devices menu
                for child in self.devices_submenu.get_children():
                    self.devices_submenu.remove(child)

                for elm in sorted(self.devices, key=lambda elm: elm['name']):
                    if elm['id'] == self.system_data.get('myID', None):
                        self.device_name = elm['name']
                        self.state['update_st_running'] = True
                    else:
                        mi = Gtk.MenuItem(elm['name'])
                        mi.set_sensitive(elm['connected'])
                        self.devices_submenu.append(mi)
                        mi.show()
        else:
            self.devices_menu.set_label('No devices')
            self.devices_menu.set_sensitive(False)
        self.state['update_devices'] = False

    def update_files(self):
        self.current_files_menu.set_label(u'Downloading %s files' % (
            len(self.downloading_files)))

        if not self.downloading_files:
            self.current_files_menu.set_sensitive(False)
            #self.set_state('idle')
        else:
            # Repopulate the current files menu
            self.current_files_menu.set_sensitive(True)
            self.set_state('syncing')
            for child in self.current_files_submenu.get_children():
                self.current_files_submenu.remove(child)
            for f in self.downloading_files:
                fj = json.dumps(f)
                #mi = Gtk.MenuItem(u'\u2193 [{}] {}'.format(
                #    f['folder'],
                #    shorten_path(f['file'])))
                mi = Gtk.MenuItem(u'\u2193 [{}] {}{}'.format(
                    f['folder'],
                    shorten_path(f['file']),
                    self.downloading_files_extra[fj]["progress"] if fj in self.downloading_files_extra and "progress" in self.downloading_files_extra[fj] else ""))
                self.current_files_submenu.append(mi)
                mi.connect(
                    'activate',
                    self.open_file_browser,
                    os.path.split(
                        self.get_full_path(f['folder'], f['file']))[0])
                mi.show()
            self.current_files_menu.show()

        # Repopulate the recent files menu
        if not self.recent_files:
            self.recent_files_menu.set_sensitive(False)
        else:
            self.recent_files_menu.set_sensitive(True)
            for child in self.recent_files_submenu.get_children():
                self.recent_files_submenu.remove(child)
            icons = {'delete': u'\u2612',    # [x]
                     'update': u'\u2193',    # down arrow
                     'dir': u'\U0001f4c1',   # folder
                     'file': u'\U0001f4c4',  # file
                     }
            for f in self.recent_files:
                mi = Gtk.MenuItem(
                    u'{icon} {time} [{folder}] {item}'.format(
                        icon=icons.get(f['action'], 'unknown'),
                        folder=f['folder'],
                        item=shorten_path(f['file']),
                        time=self.convert_time(f['time'])
                        )
                    )
                self.recent_files_submenu.append(mi)
                mi.connect(
                    'activate',
                    self.open_file_browser,
                    os.path.split(
                        self.get_full_path(f['folder'], f['file']))[0])
                mi.show()
            self.recent_files_menu.show()
        self.state['update_files'] = False

    def update_folders(self):
        if self.folders:
            self.folder_menu.set_sensitive(True)
            folder_maxlength = 0
            if len(self.folders) == len(self.folder_menu_submenu):
                for mi in self.folder_menu_submenu:
                    for elm in self.folders:
                        folder_maxlength = max(folder_maxlength, len(elm['id']))
                        if str(mi.get_label()).split(' ', 1)[0] == elm['id']:
                            if elm['state'] == 'scanning':
                                mi.set_label('{} (scanning)'.format(elm['id']))
                            elif elm['state'] == 'syncing':
                                if elm.get('needFiles') > 1:
                                    lbltext = '{fid} (syncing {num} files)'
                                elif elm.get('needFiles') == 1:
                                    lbltext = '{fid} (syncing {num} file)'
                                else:
                                    lbltext = '{fid} (syncing)'
                                mi.set_label(lbltext.format(
                                    fid=elm['id'], num=elm.get('needFiles')))
                            else:
                                mi.set_label(elm['id'].ljust(folder_maxlength + 20))
            else:
                for child in self.folder_menu_submenu.get_children():
                    self.folder_menu_submenu.remove(child)
                for elm in self.folders:
                    folder_maxlength = max(folder_maxlength, len(elm['id']))
                    mi = Gtk.MenuItem(elm['id'].ljust(folder_maxlength + 20))
                    mi.connect('activate', self.open_file_browser, elm['path'])
                    self.folder_menu_submenu.append(mi)
                    mi.show()
        else:
            self.folder_menu.set_sensitive(False)
        self.state['update_folders'] = False

    def update_st_running(self):
        if self.count_connection_error <= 1:
            if self.syncthing_version and self.device_name:
                self.title_menu.set_label(u'Syncthing {0}  \u2022  {1}'.format(
                    self.syncthing_version, self.device_name))
            else:
                self.title_menu.set_label(u'Syncthing')
            self.mi_start_syncthing.set_sensitive(False)
            self.mi_restart_syncthing.set_sensitive(True)
            self.mi_shutdown_syncthing.set_sensitive(True)
        else:
            self.title_menu.set_label('Could not connect to Syncthing')
            for dev in self.devices:
                dev['connected'] = False
            self.state['update_devices'] = True
            for f in self.folders:
                f['state'] = 'unknown'
            self.state['update_folders'] = True
            self.errors = []
            self.mi_errors.hide()
            self.set_state()
            self.mi_start_syncthing.set_sensitive(True)
            self.mi_restart_syncthing.set_sensitive(False)
            self.mi_shutdown_syncthing.set_sensitive(False)

    def count_connected(self):
        return len([e for e in self.devices if e['connected']])

    def syncthing_start(self, *args):
        cmd = [os.path.join(self.wd, 'start-syncthing.sh')]
        log.info('Starting {}'.format(cmd))
        try:
            proc = subprocess.Popen(cmd)
        except Exception as e:
            log.error("Couldn't run {}: {}".format(cmd, e))
            return
        GLib.idle_add(self.rest_get, '/rest/system/status')
        GLib.idle_add(self.rest_get, '/rest/system/version')
        self.state['update_st_running'] = True

    def syncthing_restart(self, *args):
        self.rest_post('/rest/system/restart')
        GLib.idle_add(self.rest_get, '/rest/system/status')

    def syncthing_shutdown(self, *args):
        self.rest_post('/rest/system/shutdown')
        GLib.idle_add(self.rest_get, '/rest/system/status')

    def convert_time(self, time):
        return dateutil.parser.parse(time).strftime('%x %X')

    def calc_speed(self, old, new):
        return old / (new * 10)

    def license(self):
        with open(os.path.join(self.wd, 'LICENSE'), 'r') as f:
            lic = f.read()
        return lic

    def show_about(self, widget):
        dialog = Gtk.AboutDialog()
        dialog.set_default_icon_from_file(
            os.path.join(self.icon_path, 'syncthing-client-idle.svg'))
        dialog.set_logo(None)
        dialog.set_program_name('Syncthing Ubuntu Indicator')
        dialog.set_version(VERSION)
        dialog.set_website('https://github.com/icaruseffect/syncthing-ubuntu-indicator')
        dialog.set_comments('This menu applet for systems supporting AppIndicator'
            '\ncan show the status of a Syncthing instance')
        dialog.set_license(self.license())
        dialog.run()
        dialog.destroy()

    def set_state(self, s=None):
        if not s:
            s = self.state['set_icon']

        if (s == 'error') or self.errors:
            self.state['set_icon'] = 'error'
        elif self.count_connection_error > 1:
            self.state['set_icon'] = 'paused'
        else:
            self.state['set_icon'] = self.folder_check_state()

    def folder_check_state(self):
        state = {'syncing': 0, 'idle': 0, 'cleaning': 0, 'scanning': 0,
                 'unknown': 0}
        for elem in self.folders:
            if elem['state'] in state:
                state[elem['state']] += 1

        if state['syncing'] > 0:
            return 'syncing'
        elif state['scanning'] > 0 or state['cleaning'] > 0:
            return 'scanning'
        else:
            return 'idle'

    def set_icon(self):
        icon = {
        'updating': {'name': 'syncthing-client-updating', 'descr': 'Updating'},
        'idle': {'name': 'syncthing-client-idle', 'descr': 'Nothing to do'},
        'syncing': {'name': 'syncthing-client-updown', 'descr': 'Transferring Data'},
        'error': {'name': 'syncthing-client-error', 'descr': 'Scotty, We Have A Problem!'},
        'paused': {'name': 'syncthing-client-paused', 'descr': 'Paused'},
        'scanning': {'name': 'syncthing-client-scanning', 'descr': 'Scanning Directories'},
        'cleaning': {'name': 'syncthing-client-scanning', 'descr': 'Cleaning Directories'},
        }
	
        if not self.args.text_only:
            self.ind.set_attention_icon(icon[self.state['set_icon']]['name'])
            self.ind.set_icon_full(icon[self.state['set_icon']]['name'],
                               icon[self.state['set_icon']]['descr'])

    def leave(self, widget):
        Gtk.main_quit()

    def timeout_rest(self):
        self.timeout_counter = (self.timeout_counter + 1) % 10
        if self.count_connection_error == 0:
            GLib.idle_add(self.rest_get, '/rest/system/connections')
            GLib.idle_add(self.rest_get, '/rest/system/status')
            GLib.idle_add(self.rest_get, '/rest/system/error')
            if self.timeout_counter == 0 or not self.syncthing_version:
                GLib.idle_add(self.rest_get, '/rest/system/upgrade')
                GLib.idle_add(self.rest_get, '/rest/system/version')
        else:
            GLib.idle_add(self.rest_get, '/rest/system/status')
        return True

    def timeout_events(self):
        if self.count_connection_error == 0:
            GLib.idle_add(self.rest_get, '/rest/events')
        return True

    def open_file_browser(self, menuitem, path):
        if not os.path.isdir(path):
            log.debug('Not a directory, or does not exist: {}'.format(path))
            return
        try:
            proc = subprocess.Popen(['xdg-open', path])
        except Exception as e:
            log.error("Couldn't open file browser for {} ({})".format(path, e))

    def get_full_path(self, folder, item):
        for elem in self.folders:
            if elem['id'] == folder:
                a = elem['path']
        return os.path.join(a, item)
示例#51
0
  card <<= 1
  cbits += 1


# handle card data1 low
def cdata1_low(channel):
  global card, cbits
  card = (card << 1) | 1
  cbits += 1


# set card data handlers
GPIO.add_event_detect(pdata0, GPIO.FALLING, callback=cdata0_low)
GPIO.add_event_detect(pdata1, GPIO.FALLING, callback=cdata1_low)


# card read code
print "[reader] ready!"
try:
  while True:
    time.sleep(dread)
    if cbits > 0:
      time.sleep(dread)
      print "[%d bit] - %d" % (cbits, card)
      payload = {"cbits": cbits, "card": card}
      session.post("http://localhost/api/reader/card", payload)
      card = 1
      cbits = 0
except KeyboardInterrupt:
  GPIO.cleanup()
示例#52
0
class SubmissionMaker:
    def __init__(self, contest_id, username, password):
        # Save parameters
        self.contest_id = contest_id
        self.username = username
        self.password = password
        # Setup URLs
        self.login_url = "http://codeforces.com/enter"
        self.submit_url = "http://codeforces.com/contest/%s/submit" % contest_id
        # Initiate future with a request to the login page (in order to get its CSRF token)
        self.session = FuturesSession()
        self.future = self.session.get(self.login_url, background_callback=self.background_callback)
    def background_callback(self, session, response):
        # Make sure that the original request was successful
        if response.status_code != 200:
            response.submit_csrf_token = None
            return
        # Retrieve the CSRF token from the login page
        try:
            login_csrf_token = utils.retrieve_csrf_token(response.text, {'id': 'enterForm'})
        except Exception:
            response.submit_csrf_token = None
            return
        # Perform a second request to really authenticate
        response2 = session.post(self.login_url, data={
            'action': 'enter',
            'handle': self.username,
            'password': self.password,
            'csrf_token': login_csrf_token,
        }).result()
        # Check whether the authentication was successful
        if response2.status_code != 200:
            response.submit_csrf_token = None
            return
        elif 'Invalid handle or password' in response2.text:
            response.submit_csrf_token = None
            return
        elif 'personal-sidebar' not in response2.text:
            #TODO: log this event, as it probably indicates a change in the codeforces website
            response.submit_csrf_token = None
            return
        # Perform a third request in order to obtain the CSRF token from the submit page
        response3 = session.get(self.submit_url).result()
        if response3.status_code != 200:
            response.submit_csrf_token = None
            return
        # Retrieve the CSRF token from the submit page
        try:
            response.submit_csrf_token = utils.retrieve_csrf_token(response3.text)
        except Exception:
            response.submit_csrf_token = None
            return
    def submit(self, problem_letter, language_id, source_code):
        submit_csrf_token = self.future.result().submit_csrf_token
        if submit_csrf_token is None:
            return {'success': False, 'reason': None}
        response = self.session.post(self.submit_url, data={
            'action': 'submitSolutionFormSubmitted',
            'submittedProblemIndex': problem_letter,
            'programTypeId': language_id,
            'source': source_code,
            'csrf_token': submit_csrf_token,
        }).result()
        if response.status_code != 200:
            return {'success': False, 'reason': None}
        elif 'You have submitted exactly the same code before' in response.text:
            return {'success': False, 'reason': 'you have already sent this code'}
        elif 'submit-form' in response.text:
            return {'success': False, 'reason': None}
        else:
            return {'success': True}
示例#53
0
文件: releases.py 项目: jestory/pynab
def process():
    """Helper function to begin processing binaries. Checks
    for 100% completion and will create NZBs/releases for
    each complete release. Will also categorise releases,
    and delete old binaries."""

    # TODO: optimise query usage in this, it's using like 10-15 per release

    binary_count = 0
    added_count = 0

    if config.scan.get('publish', False):
        request_session = FuturesSession()
    else:
        request_session = None

    start = time.time()

    with db_session() as db:
        binary_query = """
            SELECT
                binaries.id, binaries.name, binaries.posted, binaries.total_parts
            FROM binaries
            INNER JOIN (
                SELECT
                    parts.id, parts.binary_id, parts.total_segments, count(*) as available_segments
                FROM parts
                    INNER JOIN segments ON parts.id = segments.part_id
                GROUP BY parts.id
                ) as parts
                ON binaries.id = parts.binary_id
            GROUP BY binaries.id
            HAVING count(*) >= binaries.total_parts AND (sum(parts.available_segments) / sum(parts.total_segments)) * 100 >= {}
            ORDER BY binaries.posted DESC
        """.format(config.postprocess.get('min_completion', 100))

        # pre-cache blacklists and group them
        blacklists = db.query(Blacklist).filter(Blacklist.status == True).all()
        for blacklist in blacklists:
            db.expunge(blacklist)

        # cache categories
        parent_categories = {}
        for category in db.query(Category).all():
            parent_categories[category.id] = category.parent.name if category.parent else category.name

        # for interest's sakes, memory usage:
        # 38,000 releases uses 8.9mb of memory here
        # no real need to batch it, since this will mostly be run with
        # < 1000 releases per run
        for completed_binary in engine.execute(binary_query).fetchall():
            # some optimisations here. we used to take the binary id and load it
            # then compare binary.name and .posted to any releases
            # in doing so, we loaded the binary into the session
            # this meant that when we deleted it, it didn't cascade
            # we had to submit many, many delete queries - one per segment/part
            # by including name/posted in the big query, we don't load that much data
            # but it lets us check for a release without another query, and means
            # that we cascade delete when we clear the binary

            # first we check if the release already exists
            r = db.query(Release).filter(Release.name == completed_binary[1]).filter(
                Release.posted == completed_binary[2]
            ).first()
            if r:
                # if it does, we have a duplicate - delete the binary
                db.query(Binary).filter(Binary.id == completed_binary[0]).delete()
            else:
                # get an approx size for the binary without loading everything
                # if it's a really big file, we want to deal with it differently
                binary = db.query(Binary).filter(Binary.id == completed_binary[0]).first()

                # this is an estimate, so it doesn't matter too much
                # 1 part nfo, 1 part sfv or something similar, so ignore two parts
                # take an estimate from the middle parts, since the first/last
                # have a good chance of being something tiny
                # we only care if it's a really big file
                # abs in case it's a 1 part release (abs(1 - 2) = 1)
                # int(/2) works fine (int(1/2) = 0, array is 0-indexed)
                est_size = (abs(binary.total_parts - 2) *
                            binary.parts[int(binary.total_parts / 2)].total_segments *
                            binary.parts[int(binary.total_parts / 2)].segments[0].size)

                oversized = est_size > config.postprocess.get('max_process_size', 10 * 1024 * 1024 * 1024)

                if oversized and not config.postprocess.get('max_process_anyway', True):
                    log.debug('release: [{}] - removed (oversized)'.format(binary.name))
                    db.query(Binary).filter(Binary.id == completed_binary[0]).delete()
                    db.commit()
                    continue

                if oversized:
                    # for giant binaries, we do it differently
                    # lazyload the segments in parts and expunge when done
                    # this way we only have to store binary+parts
                    # and one section of segments at one time
                    binary = db.query(Binary).options(
                        subqueryload('parts'),
                        lazyload('parts.segments'),
                    ).filter(Binary.id == completed_binary[0]).first()
                else:
                    # otherwise, start loading all the binary details
                    binary = db.query(Binary).options(
                        subqueryload('parts'),
                        subqueryload('parts.segments'),
                        Load(Part).load_only(Part.id, Part.subject, Part.segments),
                    ).filter(Binary.id == completed_binary[0]).first()

                blacklisted = False
                for blacklist in blacklists:
                    if regex.search(blacklist.group_name, binary.group_name):
                        # we're operating on binaries, not releases
                        field = 'name' if blacklist.field == 'subject' else blacklist.field
                        if regex.search(blacklist.regex, getattr(binary, field)):
                            log.debug('release: [{}] - removed (blacklisted: {})'.format(binary.name, blacklist.id))
                            db.query(Binary).filter(Binary.id == binary.id).delete()
                            db.commit()
                            blacklisted = True
                            break

                if blacklisted:
                    continue

                binary_count += 1

                release = Release()
                release.name = binary.name
                release.posted = binary.posted
                release.posted_by = binary.posted_by
                release.regex_id = binary.regex_id
                release.grabs = 0

                # this counts segment sizes, so we can't use it for large releases
                # use the estimate for min_size and firm it up later during postproc
                if oversized:
                    release.size = est_size
                else:
                    release.size = binary.size()

                # check against minimum size for this group
                undersized = False
                for size, groups in config.postprocess.get('min_size', {}).items():
                    if binary.group_name in groups:
                        if release.size < size:
                            undersized = True
                            break

                if undersized:
                    log.debug('release: [{}] - removed (smaller than minimum size for group)'.format(
                        binary.name
                    ))
                    db.query(Binary).filter(Binary.id == binary.id).delete()
                    db.commit()
                    continue

                # check to make sure we have over the configured minimum files
                # this one's okay for big releases, since we're only looking at part-level
                rars = []
                rar_count = 0
                zip_count = 0
                nzb_count = 0

                for part in binary.parts:
                    if pynab.nzbs.rar_part_regex.search(part.subject):
                        rar_count += 1
                    if pynab.nzbs.rar_regex.search(part.subject) and not pynab.nzbs.metadata_regex.search(part.subject):
                        rars.append(part)
                    if pynab.nzbs.zip_regex.search(part.subject) and not pynab.nzbs.metadata_regex.search(part.subject):
                        zip_count += 1
                    if pynab.nzbs.nzb_regex.search(part.subject):
                        nzb_count += 1

                # handle min_archives
                # keep, nzb, under
                status = 'keep'
                archive_rules = config.postprocess.get('min_archives', 1)
                if isinstance(archive_rules, dict):
                    # it's a dict
                    if binary.group_name in archive_rules:
                        group = binary.group_name
                    else:
                        group = '*'

                    # make sure the catchall exists
                    if group not in archive_rules:
                        archive_rules[group] = 1

                    # found a special rule
                    if rar_count + zip_count < archive_rules[group]:
                        if nzb_count > 0:
                            status = 'nzb'
                        else:
                            status = 'under'
                else:
                    # it's an integer, globalise that shit yo
                    if rar_count + zip_count < archive_rules:
                        if nzb_count > 0:
                            status = 'nzb'
                        else:
                            status = 'under'

                # if it's an nzb or we're under, kill it
                if status in ['nzb', 'under']:
                    if status == 'nzb':
                        log.debug('release: [{}] - removed (nzb only)'.format(binary.name))
                    elif status == 'under':
                        log.debug('release: [{}] - removed (less than minimum archives)'.format(binary.name))

                    db.query(Binary).filter(Binary.id == binary.id).delete()
                    db.commit()
                    continue

                # clean the name for searches
                release.search_name = clean_release_name(binary.name)

                # assign the release group
                release.group = db.query(Group).filter(Group.name == binary.group_name).one()

                # give the release a category
                release.category_id = pynab.categories.determine_category(binary.name, binary.group_name)

                # create the nzb, store it and link it here
                # no need to do anything special for big releases here
                # if it's set to lazyload, it'll kill rows as they're used
                # if it's a small release, it'll go straight from memory
                nzb = pynab.nzbs.create(release.search_name, parent_categories[release.category_id], binary)

                if nzb:
                    added_count += 1

                    log.info('release: [{}]: added release ({} rars, {} rarparts)'.format(
                        release.search_name,
                        len(rars),
                        rar_count
                    ))

                    release.nzb = nzb

                    # save the release
                    db.add(release)

                    try:
                        db.flush()
                    except Exception as e:
                        # this sometimes raises if we get a duplicate
                        # this requires a post of the same name at exactly the same time (down to the second)
                        # pretty unlikely, but there we go
                        log.debug('release: [{}]: duplicate release, discarded'.format(release.search_name))
                        db.rollback()

                    # delete processed binaries
                    db.query(Binary).filter(Binary.id == binary.id).delete()

                    # publish processed releases?
                    if config.scan.get('publish', False):
                        futures = [request_session.post(host, data=to_json(release)) for host in
                                   config.scan.get('publish_hosts')]

            db.commit()

    end = time.time()
    log.info('release: added {} out of {} binaries in {:.2f}s'.format(
        added_count,
        binary_count,
        end - start
    ))
示例#54
0
class MozDefMessage(object):
    # Supported message types
    MSGTYPE_NONE            = 0
    MSGTYPE_EVENT           = 1
    MSGTYPE_COMPLIANCE      = 2
    MSGTYPE_VULNERABILITY   = 3
    MSGTYPE_ASSETHINT       = 4
    MSGTYPE_RRA             = 5

    def __init__(self, url):
        self._msgtype = self.MSGTYPE_NONE

        self.log = {}
        self._sendlog = {}

        self._httpsession = Session()
        self._httpsession.trust_env = False
        self._url = url

        # Set some default options
        self._send_to_syslog = False
        self._send_to_sqs = False
        self._syslog_only = False
        self._fire_and_forget = False
        self._verify_certificate = False
        self._verify_path = None

    def validate(self):
        return True

    def validate_log(self):
        return True

    def set_verify(self, f):
        self._verify_certificate = f

    def set_verify_path(self, p):
        self._verify_path = p

    def set_fire_and_forget(self, f):
        self._fire_and_forget = f

    def set_sqs_queue_name(self, f):
        self._sqs_queue_name = f

    def set_sqs_aws_account_id(self, f):
        self._sqs_aws_account_id = f

    def set_sqs_region(self, f):
        self._sqs_region = f

    def set_send_to_sqs(self, f):
        self._send_to_sqs = f

    def set_send_to_syslog(self, f, only_syslog=False):
        self._send_to_syslog = f
        self._syslog_only = only_syslog

    def syslog_convert(self):
        raise MozDefError('message type does not support syslog conversion')

    def construct(self):
        raise MozDefError('subclass of MozDefMessage must override construct()')

    def _httpsession_cb(self, session, response):
        if response.result().status_code != 200:
            if not self._fire_and_forget:
                raise MozDefError('POST failed with code %r' % \
                    response.result().status_code)

    def send_syslog(self):
        raise MozDefError('message type does not support syslog submission')

    def send(self):
        if not self.validate():
            raise MozDefError('message failed validation')
        self.construct()
        if not self.validate_log():
            raise MozDefError('message failed post construct validation')

        if self._send_to_syslog:
            self.send_syslog()
            if self._syslog_only:
                return

        if self._send_to_sqs:
            self.send_sqs()
            return

        vflag = self._verify_certificate
        if vflag:
            if self._verify_path != None:
                vflag = self._verify_path

        buf = json.dumps(self._sendlog, sort_keys=True, indent=4)
# Compatibility notes:
# When updating either path (futures_loaded or not loaded) please ensure both have the same functionality
# future_loaded is used by Python 2, the non-loaded version if for Python 3
        if futures_loaded:
            self._httpsession.post(self._url, buf,
                verify=vflag,
                background_callback=self._httpsession_cb)
        else:
           response = self._httpsession.post(self._url, buf,
                verify=vflag)
           if response.ok == False:
                if not self._fire_and_forget:
                    raise MozDefError('POST failed with code %r msg %s' % \
                        (response.status_code, response.text))
示例#55
0
        """ Function create
        Create an new object

        @param obj: object name ('hosts', 'puppetclasses'...)
        @param payload: the dict of the payload
        @param async: should this request be async, if true use
                        return.result() to get the response
        @return RETURN: the server response
        """
        self.url = self.base_url + obj
        self.method = 'POST'
        self.payload = json.dumps(payload)
        if async:
            self.method = 'POST(Async)'
            session = FuturesSession()
            self.resp = session.post(url=self.url, auth=self.auth,
                                headers=self.headers, data=self.payload)
            return self.resp
        else:
            self.resp = requests.post(url=self.url, auth=self.auth,
                                      headers=self.headers,
                                      data=self.payload)
            return self.__process_resp__(obj)

    @log
    def delete(self, obj, id):
        """ Function delete
        Delete an object by id

        @param obj: object name ('hosts', 'puppetclasses'...)
        @param id: the id of the object (name or id)
        @return RETURN: the server response
示例#56
0
class MaltASync(controller.Master):
    def __init__(self, server, workers):
        controller.Master.__init__(self, server)
        self.letsGo = True
        self.Session = FuturesSession(max_workers=20)
        self.futures = {}
        #self.manager = multiprocessing.Manager()
        #self.futures = self.manager.dict()

    def run(self):  
#        try:
         threading.Thread(target=self.qWatcher).start()
         return controller.Master.run(self)
#        except KeyboardInterrupt:
#            self.letsGo = False
#            self.shutdown()

    def qWatcher(self):
        lastL, lastD = -1, -1
        print "[+] Starting queue monitor:"
        while self.letsGo:
            time.sleep(4)
            n = 0 
            l = len(self.futures)
            for k,v in self.futures.iteritems():
                if v.done():
                    n+=1
            if lastL != l or lastD != n:
                print "    Transforms in Queue: %d. Completed Transforms: %d" %(l, n)
                lastL = l
                lastD = n

    def handle_request(self, flow):
        if "TransformToRun" in  flow.request.url:
            req_headers = dict(flow.request.headers.fields)
            req_url = flow.request.url
            req_data = flow.request.data.content
        
            m = MaltegoMsg(req_data)
            TRX = MaltegoTransform()
            if not m.getProperty("_uid"):
                uid = str(uuid.uuid4())
                NewEnt = TRX.addEntity(m.Type, m.Value)
                for k,v in m.Properties.iteritems():
                    NewEnt.addProperty(k, k, "nostrict", v)
                NewEnt.addProperty("_uid", "_uid", "nostrict", uid)
                #NewEnt.setNote(uid)
                data = TRX.returnOutput()
    
                #Add to Queue
                future = self.Session.post(req_url, headers=req_headers, data=req_data)
                self.futures[uid] = future
            else:
                #Check status of request
                uid = m.getProperty("_uid")
                futReq = self.futures.get(uid)
                if futReq and futReq.done():
                    del self.futures[uid]
                    data = futReq.result().text
                else:
                    data = TRX.returnOutput() 

        
        resp = HTTPResponse(
                "HTTP/1.1", 200, "OK",
                Headers(Content_Type="text/xml;charset=UTF-8"),
                data)
        
        flow.reply(resp)
示例#57
0
文件: client.py 项目: xme/DoFler
class DoflerClient(object):
    """
    DoFler API Client Class.  This class handles all client-side API calls to 
    the DoFler service, regardless of the service is remote or local.
    """

    def __init__(self, host, port, username, password, ssl=False, anon=True):
        self.host = host
        self.port = port
        self.ssl = ssl
        self.anonymize = anon
        self.username = username
        self.opener = FuturesSession(max_workers=10)
        self.login(username, password)
        if self.host in ["localhost", "127.0.0.1"]:
            self.engine = create_engine(config.config.get("Database", "db"), echo=False)
            self.Session = sessionmaker(bind=self.engine)

    def call(self, url, data, files={}):
        """
        This is the core function that calls the API.  all API calls route
        through here.

        :param url: URL of Call
        :param data: Data to be sent with call

        :type url: str 
        :type data: dictionary, str 

        :return: urllib2 Response Object
        """
        ssl = {True: "https://", False: "http://"}
        location = "%s%s:%s%s" % (ssl[self.ssl], self.host, self.port, url)
        log.debug("CLIENT: %s" % location)
        return self.opener.post(location, data=data, files=files)

    def login(self, username, password):
        """
        Login Function.

        :param username: username/sensorname
        :param password: username/sensorname password

        :type username: str
        :type password: str

        :return: None
        """
        self.call("/post/login", {"username": username, "password": password}).result()

    def account(self, username, password, info, proto, parser):
        """
        Account API call.  This function handles adding accounts into the
        database.

        :param username: Account Username
        :param password: Account Password
        :param info: General Information Field
        :param proto: Discovered Protocol
        :param parser: Parser/Agent to discovere account 

        :type username: str 
        :type password: str 
        :type info: str 
        :type proto: str 
        :type parser: str

        :return: None
        """
        # If the anonymization bit is set, then we need to hide the password.
        # We will still display the first 3 characters, however will replace
        # the rest of the password with a random number of asterisks.
        if self.anonymize:
            if len(password) >= 3:
                password = "******" % (password[:3], "*" * (randint(2, 8)))
        if self.host in ["localhost", "127.0.0.1"]:
            s = self.Session()
            # Check if the account already exists
            # Tip: don't check for the password (if anonymized)
            try:
                account = s.query(Account).filter_by(username=username, proto=proto, info=info).one()
            except:
                s.add(Account(username, password, info, proto, parser))
                s.commit()
                log.debug("DATABASE: Added Account: %s:%s:%s:%s:%s" % (username, password, info, proto, parser))
        else:
            self.call(
                "/post/account",
                {"username": username, "password": password, "info": info, "proto": proto, "parser": parser},
            )

    def image(self, filename):
        """
        Image API Call.  Uploads the image into the database.

        :param fobj: File-like object with the image contents
        :param filename: Filename or extension of the file. 

        :type fobj: fileobject
        :type filename: str 

        :return: None
        """
        if os.path.exists(filename):
            if self.host in ["localhost", "127.0.0.1"]:
                with open(filename, "rb") as imagefile:
                    data = imagefile.read()
                md5 = md5hash(data)
                s = self.Session()
                if s.query(Image).filter_by(md5sum=md5).count() > 0:
                    image = s.query(Image).filter_by(md5sum=md5).one()
                    image.timestamp = int(time.time())
                    image.count += 1
                    s.merge(image)
                    log.debug("DATABASE: Updated Image %s" % image.md5sum)
                else:
                    ftype = filename.split(".")[-1]
                    image = Image(int(time.time()), ftype, data, md5)
                    s.add(image)
                    log.debug("DATABASE: Added Image %s" % image.md5sum)
                s.commit()
                s.close()
            else:
                try:
                    self.call("/post/image", {"filetype": filename.split(".")[-1]}, {"file": open(filename, "rb")})
                except:
                    log.error("API: Upload Failed. %s=%skb" % (filename, os.path.getsize(filename) / 1024))
        else:
            log.error("API: %s doesnt exist" % filename)

    def stat(self, proto, count):
        """
        Statistical API call.  Sends the 1 minute count of packets for a given
        protocol to the backend database.

        :param proto: Protocol name
        :param count: Packet count

        :type proto: str 
        :type count: int 

        :return: None
        """
        if self.host in ["localhost", "127.0.0.1"]:
            s = self.Session()
            s.add(Stat(proto, self.username, count))
            s.commit()
            s.close()
            log.debug("DATABASE: Added Stat %s:%s:%s" % (proto, count, self.username))
        else:
            self.call("/post/stat", {"proto": proto, "count": count, "username": self.username})

    def reset(self, env):
        """
        Reset API call.  Sends a reset code to the API for the given type of
        data. 

        :param env: Environment Type.  Valid types are: images, accounts
        :type env: str 
        :return: None 
        """
        self.call("/post/reset", {"type": env})

    def services(self):
        """
        Gets the current service statuses. 
        """
        return json.loads(self.call("/post/services", {"action": "none", "parser": "none"}).result().content)

    def start(self, name):
        """
        Starts the defined service. 
        """
        return json.loads(self.call("/post/services", {"action": "Start", "parser": name}).result().content)

    def stop(self, name):
        """
        Stops the defined service. 
        """
        return json.loads(self.call("/post/services", {"action": "Stop", "parser": name}).result().content)
示例#58
0
文件: client.py 项目: blha303/DoFler
class DoflerClient(object):
    '''
    DoFler API Client Class.  This class handles all client-side API calls to 
    the DoFler service, regardless of the service is remote or local.
    '''
    def __init__(self, host, port, username, password, ssl=False, anon=True):
        self.host = host
        self.port = port
        self.ssl = ssl
        self.anonymize = anon
        self.username = username
        self.opener = FuturesSession(max_workers=10)
        self.login(username, password)
        if self.host in ['localhost', '127.0.0.1']:
            self.engine = create_engine(config.config.get('Database', 'db'), echo=False)
            self.Session = sessionmaker(bind=self.engine)


    def call(self, url, data, files={}):
        '''
        This is the core function that calls the API.  all API calls route
        through here.

        :param url: URL of Call
        :param data: Data to be sent with call

        :type url: str 
        :type data: dictionary, str 

        :return: urllib2 Response Object
        '''
        ssl = {
            True: 'https://',
            False: 'http://'
        }
        location = '%s%s:%s%s' % (ssl[self.ssl], self.host, self.port, url)
        log.debug('CLIENT: %s' % location)
        return self.opener.post(location, data=data, files=files)


    def login(self, username, password):
        '''
        Login Function.

        :param username: username/sensorname
        :param password: username/sensorname password

        :type username: str
        :type password: str

        :return: None
        '''
        self.call('/post/login', {
            'username': username,
            'password': password
        }).result()


    def account(self, username, password, info, proto, parser):
        '''
        Account API call.  This function handles adding accounts into the
        database.

        :param username: Account Username
        :param password: Account Password
        :param info: General Information Field
        :param proto: Discovered Protocol
        :param parser: Parser/Agent to discovere account 

        :type username: str 
        :type password: str 
        :type info: str 
        :type proto: str 
        :type parser: str

        :return: None
        '''
        # If the anonymization bit is set, then we need to hide the password. 
        # We will still display the first 3 characters, however will asterisk
        # the rest of the password past that point.
        if self.anonymize:
            if len(password) >= 3:
                password = '******' % (password[:3], '*' * (len(password) - 3))
        if self.host in ['localhost', '127.0.0.1']:
            s = self.Session()
            s.add(Account(username, password, info, proto, parser))
            s.commit()
            log.debug('DATABASE: Added Account: %s:%s:%s:%s:%s' % (username,
                password, info, proto, parser))
        else:
            self.call('/post/account', {
                'username': username,
                'password': password,
                'info': info,
                'proto': proto,
                'parser': parser,
            })


    def image(self, filename):
        '''
        Image API Call.  Uploads the image into the database.

        :param fobj: File-like object with the image contents
        :param filename: Filename or extension of the file. 

        :type fobj: fileobject
        :type filename: str 

        :return: None
        '''
        if os.path.exists(filename):
            if self.host in ['localhost', '127.0.0.1']:
                with open(filename, 'rb') as imagefile:
                    data = imagefile.read()
                md5 = md5hash(data)
                s = self.Session()
                if s.query(Image).filter_by(md5sum=md5).count() > 0:
                    image = s.query(Image).filter_by(md5sum=md5).one()
                    image.timestamp = int(time.time())
                    image.count += 1
                    s.merge(image)
                    log.debug('DATABASE: Updated Image %s' % image.md5sum)
                else:
                    ftype = filename.split('.')[-1]
                    image = Image(int(time.time()), ftype, data, md5)
                    s.add(image)
                    log.debug('DATABASE: Added Image %s' % image.md5sum)
                s.commit()
                s.close()
            else:
                try:
                    self.call('/post/image', {'filetype': filename.split('.')[-1]},
                                             {'file': open(filename, 'rb')})
                except:
                    log.error('API: Upload Failed. %s=%skb' % (filename, 
                                                os.path.getsize(filename) / 1024))
        else:
            log.error('API: %s doesnt exist' % filename)


    def stat(self, proto, count):
        '''
        Statistical API call.  Sends the 1 minute count of packets for a given
        protocol to the backend database.

        :param proto: Protocol name
        :param count: Packet count

        :type proto: str 
        :type count: int 

        :return: None
        '''
        if self.host in ['localhost', '127.0.0.1']:
            s = self.Session()
            s.add(Stat(proto, self.username, count))
            s.commit()
            s.close()
            log.debug('DATABASE: Added Stat %s:%s:%s' % (proto, count, self.username))
        else:
            self.call('/post/stat', {
                'proto': proto, 
                'count': count, 
                'username': self.username
            })


    def reset(self, env):
        '''
        Reset API call.  Sends a reset code to the API for the given type of
        data. 

        :param env: Environment Type.  Valid types are: images, accounts
        :type env: str 
        :return: None 
        '''
        self.call('/post/reset', {'type': env})


    def services(self):
        '''
        Gets the current service statuses. 
        '''
        return json.loads(self.call('/post/services', {
                'action': 'none',
                'parser': 'none',
        }).result().content)


    def start(self, name):
        '''
        Starts the defined service. 
        '''
        return json.loads(self.call('/post/services', {
                'action': 'Start', 
                'parser': name
        }).result().content)


    def stop(self, name):
        '''
        Stops the defined service. 
        '''
        return json.loads(self.call('/post/services', {
            'action': 'Stop', 
            'parser': name
        }).result().content)
示例#59
0
class TweetStream(StreamListener):
    def __init__(self, api, cfg):
        super(TweetStream, self).__init__()
        self.gsession = FuturesSession(max_workers=10)

        self.mapi = api
        self.cfg = cfg
        self.install_hooks()

        self.auth = authenticate(cfg)
        self.twapi = API(auth_handler=self.auth)
        self.id_cache = {}

        self.load_following()
        self.database = connect_db()
        self.db_lock = threading.RLock()

        self.sthread = None
        self.filter_others = 1
        self.restart_stream()

    def install_hooks(self):
        self.mapi.hook_raw("KICK", self.on_kick)
        self.mapi.hook_command(midori.CONTEXT_CHANNEL, self.api_follow,
                              predicate=lambda cmd: cmd.message.startswith("*follow"))
        self.mapi.hook_command(midori.CONTEXT_CHANNEL, self.api_unfollow,
                              predicate=lambda cmd: cmd.message.startswith("*ufollow"))
        self.mapi.hook_command(midori.CONTEXT_CHANNEL, self.api_silence,
                              predicate=lambda cmd: cmd.message.startswith("*silence"))
        self.mapi.hook_command(midori.CONTEXT_CHANNEL, self.api_usilence,
                              predicate=lambda cmd: cmd.message.startswith("*usilence"))
        self.mapi.hook_command(midori.CONTEXT_CHANNEL, self.api_spamon,
                              predicate=lambda cmd: cmd.message.startswith("*nofilter"))
        self.mapi.hook_command(midori.CONTEXT_CHANNEL, self.api_spamoff,
                              predicate=lambda cmd: cmd.message.startswith("*yesfilter"))
        self.mapi.hook_command(midori.CONTEXT_CHANNEL, self.api_arc,
                              predicate=lambda cmd: cmd.message.startswith("*arc"))
        self.mapi.hook_command(midori.CONTEXT_CHANNEL, self.api_disgnostic,
                              predicate=lambda cmd: cmd.message.startswith("*diagnostics"))
        self.mapi.hook_command(midori.CONTEXT_CHANNEL, self.api_helpinfo,
                              predicate=lambda cmd: cmd.message.startswith("*help"))
        self.mapi.hook_command(midori.CONTEXT_CHANNEL, self.api_get_tweet)

    def load_following(self):
        try:
            with open("follows.json", "r") as f:
                self.follow_ids = json.load(f)
        except:
            print("No following.")
            self.follow_ids = []

        try:
            with open("silence.json", "r") as f:
                self.silenced_ids = json.load(f)
        except:
            print("No silenced.")
            self.silenced_ids = []

    def save_follows(self):
        with open("follows.json", "w") as f:
            json.dump(self.follow_ids, f)
        with open("silence.json", "w") as f:
            json.dump(self.silenced_ids, f)

    def restart_stream(self):
        if self.sthread:
            self.sthread.stop()
        self.sthread = StreamThread(self)
        self.sthread.daemon = 1
        self.sthread.start()

    def api_helpinfo(self, cmd):
        self.mapi.privmsg(cmd.channel, "To show twitter user's tweets: *follow user | Top stop showing twitter user's tweets *ufollow user | To archive.today (and attempt waybacking behind the scenes ) something *arc URL")

    def api_follow(self, cmd):
        user = self.m_get_userid(cmd.message[8:].strip())
        if not user: #or cmd.channel.name != "#nasa_surveilance_van_no.7":
            self.mapi.privmsg(cmd.channel, "An argument is required (*follow user)")
        else:
            self.follow_ids.append(user)
            self.save_follows()
            self.mapi.privmsg(cmd.channel, "Added to the stalking list. Restarting stream...")
            self.restart_stream()

    def api_unfollow(self, cmd):
        user = self.m_get_userid(cmd.message[9:].strip())
        if not user: #or cmd.channel.name != "#nasa_surveilance_van_no.7":
            self.mapi.privmsg(cmd.channel, "An argument is required (*ufollow user)")
        else:
            try:
                int(user)
                self.follow_ids.remove(user)
            except ValueError:
                self.mapi.privmsg(cmd.channel, "Not in list.")
            else:
                self.save_follows()
                self.mapi.privmsg(cmd.channel, "Removed from the stalking list. Restarting stream...")
                self.restart_stream()

    def api_silence(self, cmd):
        user = self.m_get_userid(cmd.message[9:].strip())
        if not user: #or cmd.channel.name != "#nasa_surveilance_van_no.7":
            self.mapi.privmsg(cmd.channel, "An argument is required (*silence user)")
        else:
            self.silenced_ids.append(user)
            self.save_follows()
            self.mapi.privmsg(cmd.channel, "Silenced. Use '*usilence <name>' to un-silence later.")
            self.restart_stream()

    def api_usilence(self, cmd):
        user = self.m_get_userid(cmd.message[10:].strip())
        if not user: #or cmd.channel.name != "#nasa_surveilance_van_no.7":
            self.mapi.privmsg(cmd.channel, "An argument is required (*usilence user)")
        else:
            try:
                int(user)
                self.silenced_ids.remove(user)
            except ValueError:
                self.mapi.privmsg(cmd.channel, "Not in list.")
            else:
                self.save_follows()
                self.mapi.privmsg(cmd.channel, "Un-silenced.")
                self.restart_stream()

    def api_spamon(self, cmd):
        self.filter_others = 0

    def api_spamoff(self, cmd):
        self.filter_others = 1

    def api_get_tweet(self, cmd):
        statuses = re.findall("http(?:s)?://twitter.com/[a-z0-9\\-_]+/status(?:es)?/([0-9]+)",
                              cmd.message.lower())
        if not statuses:
            return
        statuses = set(statuses)

        for id_ in statuses:
            try:
                tweet = self.twapi.get_status(id=id_)
            except:
                continue

            the_url = "https://twitter.com/{0}/status/{1}".format(tweet.author.screen_name, tweet.id_str)
            urls = self.m_tweet_archive_sync(the_url)
            self.midori_push(tweet, urls, cmd.channel)

    def api_arc(self, cmd):
        to_arc = cmd.message[4:].strip()
        to_arc = to_arc.split(" ", 1)[0]

        if re.match("http(?:s)?://twitter.com/[a-z0-9\\-_]+/status(?:es)?/([0-9]+)", to_arc):
            self.mapi.privmsg(cmd.channel, "Simply linking a tweet is enough to get it archived.")
            return

        if not to_arc:
            self.mapi.privmsg(cmd.channel, "An argument is required. (*arc https://example.com...)")
            return

        links = self.m_archive_sync(to_arc)
        if links:
            self.mapi.privmsg(cmd.channel, "{0}: {1}".format(cmd.sender.nick, ", ".join(links)))
        else:
            self.mapi.privmsg(cmd.channel, "Archive failed; probably an invalid URL.")

    def api_disgnostic(self, cmd):
        if cmd.sender.hostmask != self.cfg["owner_host"]:
            self.mapi.notice(cmd.sender, "You need to authenticate with your NASA employee ID and passphrase before doing that.")
        else:
            self.mapi.notice(cmd.sender, "{0}".format(str(self.follow_ids)))
            ul = self.m_convert_ids_to_users(self.follow_ids)
            self.mapi.notice(cmd.sender, "{0}".format(str([u.screen_name for u in ul])))

            self.mapi.notice(cmd.sender, "{0}".format(str(self.silenced_ids)))
            ul = self.m_convert_ids_to_users(self.silenced_ids)
            self.mapi.notice(cmd.sender, "{0}".format(str([u.screen_name for u in ul])))

    def on_kick(self, command):
        if command.args[1] == self.mapi.nick and command.args[0] == self.cfg["channel"]:
            self.mapi.join(self.cfg["channel"])

    def m_archive_sync(self, url):
        future_at = self.gsession.post(ARCHIVE_TODAY_URL, data={
            "url": url,
        }, headers={
            "Referer": "https://archive.is",
            "Connection": "close",
        }, timeout=30, verify=True, allow_redirects=False)

        #future_pe = self.gsession.post("http://www.peeep.us/upload.php", data={
        #    "r_url": url,
        #}, headers={
        #    "Content-Type": "application/x-www-form-urlencoded",
        #}, timeout=30, allow_redirects=False)

        # blackhole archive; todo: put it in db
        self.gsession.head("http://web.archive.org/save/{0}".format(url), headers={
            "Referer": "https://archive.org/web/"
        }, timeout=30)

        response_at = future_at.result()
        #response_pe = future_pe.result()

        arc_url = []
        if "Refresh" in response_at.headers:
            arc_url.append(response_at.headers["Refresh"][6:])
            with self.db_lock:
                self.database.execute("INSERT INTO links VALUES (?, ?)", (url, response_at.headers["Refresh"][6:]))
                self.database.commit()
        elif "Location" in response_at.headers:
            arc_url.append(response_at.headers["Location"])
            with self.db_lock:
                self.database.execute("INSERT INTO links VALUES (?, ?)", (url, response_at.headers["Location"]))
                self.database.commit()

        #if "Location" in response_pe.headers:
        #    arc_url.append("http://peeep.us" + response_pe.headers["Location"])
        #    with self.db_lock:
        #        self.database.execute("INSERT INTO links VALUES (?, ?)", (url, "http://peeep.us" + response_pe.headers["Location"]))
        #        self.database.commit()

        return arc_url or None

    def m_tweet_archive_sync(self, url):
        future_at = self.gsession.post(ARCHIVE_TODAY_URL, data={
            "url": url,
        }, headers={
            "Referer": "https://archive.is",
            "Connection": "close",
        }, timeout=30, verify=True, allow_redirects=False)

        future_ts = self.gsession.get("http://tweetsave.com/api.php?mode=save&tweet=", params={
            "mode": "save",
            "tweet": url
        }, timeout=30)

        response = future_ts.result()
        response_at = future_at.result()

        arc_url = []
        try:
            payload = response.json()
            if "redirect" in payload and payload["status"] == "OK":
                arc_url.append(payload["redirect"])
                with self.db_lock:
                    self.database.execute("INSERT INTO links VALUES (?, ?)", (url, payload["redirect"]))
                    self.database.commit()
        except:
            pass

        if "Refresh" in response_at.headers:
            arc_url.append(response_at.headers["Refresh"][6:])
            with self.db_lock:
                self.database.execute("INSERT INTO links VALUES (?, ?)", (url, response_at.headers["Refresh"][6:]))
                self.database.commit()
        elif "Location" in response_at.headers:
            arc_url.append(response_at.headers["Location"])
            with self.db_lock:
                self.database.execute("INSERT INTO links VALUES (?, ?)", (url, response_at.headers["Location"]))
                self.database.commit()

        return arc_url

    def m_convert_ids_to_users(self, l):
        return self.twapi.lookup_users(user_ids=l)

    def m_get_userid(self, name):
        name = name.lower()
        if name in self.id_cache:
            return self.id_cache[name]
        else:
            try:
                user = self.twapi.get_user(screen_name=name)
            except:
                return None
            self.id_cache[name] = user.id_str
            return user.id_str

    def midori_push(self, tweet, arc, channel, the_url=None):
        tw = HTML_PARSER.unescape(tweet.text).replace("\n", " ")
        for url in tweet.entities["urls"]:
            tw = tw.replace(url["url"], url["expanded_url"])
        if arc:
            text = (u"@{0}: \"{1}\" {3}({2})".format(
                    tweet.author.screen_name, tw, ", ".join(arc),
                    "({0}) ".format(the_url) if the_url else ""))
        else:
            text = (u"@{0}: \"{1}\" ({2})".format(
                    tweet.author.screen_name, tw, the_url))
        self.mapi.privmsg(channel, text)

    def on_status(self, tweet):
        if self.filter_others and tweet.author.id_str not in self.follow_ids:
            return True
        the_url = "https://twitter.com/{0}/status/{1}".format(tweet.author.screen_name, tweet.id_str)
        if tweet.author.id_str not in self.silenced_ids:
            self.midori_push(tweet, None, self.cfg["channel"], the_url)
        links = self.m_tweet_archive_sync(the_url)
        if tweet.author.id_str not in self.silenced_ids:
            self.mapi.privmsg(self.cfg["channel"], "-> {0}".format(", ".join(links)))
        return True

    def on_disconnect(self, status):
        print(status)
        print("disconnected :^(")
        self.mapiobj.privmsg(CHANNEL, "Lost connection.")
示例#60
0
    def ask_multiple(self, input_list):
        """Sends multiple requests to AVS concurrently.

        Args:
            input_list (list): A list of input audio filenames to send
                               to AVS. The list elements can also be a
                               tuple, (in_filename, out_filename) to
                               specify where to save the response audio.
                               Otherwise the responses will be saved to
                               the temporary directory.

        Returns:
            List of paths where the responses were saved.
        """
        session = FuturesSession(max_workers=len(input_list))
        # Keep a list of file handlers to close. The input file handlers
        # need to be kept open while requests_futures is sending the
        # requests concurrently in the background.
        files_to_close = []
        # List of saved files to return
        saved_filenames = []
        # List of future tuples, (future, output_filename)
        futures = []

        try:
            # Refresh token to prevent HTTP 403
            self.get_token(refresh=True)
            for inp in input_list:
                # Check if input is a tuple
                if isinstance(inp, tuple):
                    name_in = inp[0]
                    name_out = inp[1]
                else:
                    name_in = inp
                    name_out = None

                # Open the input file
                in_f = open(name_in)
                files_to_close.append(in_f)

                # Setup request parameters
                url, headers, request_data = self.get_request_params()
                files = [
                    (
                        'file',
                        (
                            'request', json.dumps(request_data),
                            'application/json; charset=UTF-8',
                        )
                    ),
                    (
                        'file',
                        ('audio', in_f, 'audio/L16; rate=16000; channels=1')
                    )
                ]

                # Use request_futures session to send the request
                future = session.post(url, headers=headers, files=files)
                futures.append((future, name_out))

            # Get the response from each future and save the audio
            for future, name_out in futures:
                res = future.result()
                save_to = self.save_response_audio(res, name_out)
                saved_filenames.append(save_to)
            return saved_filenames
        finally:
            # Close all file handlers
            for f in files_to_close:
                f.close()