示例#1
0
 def _set_info(self):
     """"""
     if self.token is None:
         return None
     result = requests.get(url=self.__users_url,
                           data=None,
                           headers=self.headers)
     dict_result = json.loads(result.content)
     for i in range(len(dict_result['users'])):
         if dict_result['users'][i]['name'] == self.name:
             self.identification = dict_result['users'][i]['id']
             break
         elif i == len(dict_result['users']) - 1:
             """Record error info"""
             self.identification = None
             warnings.warn('No this user!', Warning)
         else:
             continue
     result = requests.get(url=self.__users_url + '/' + self.identification,
                           data=None,
                           headers=self.headers)
     dict_result = json.loads(result.content)
     self.enable = dict_result['user']['enabled']
     self.endpoint["keystone"] = utils.get_endpoint(self.token, "keystone")
     self.endpoint["nova"] = utils.get_endpoint(self.token, "nova")
     self.endpoint["cinder"] = utils.get_endpoint(self.token, "cinder")
     self.endpoint["glance"] = utils.get_endpoint(self.token, "glance")
     self.endpoint["neutron"] = utils.get_endpoint(self.token, "neutron")
示例#2
0
def image_content_batch(token, urls, categories=None, threshold=None):
    endpoint = utils.get_endpoint(ais.AisService.MODERATION_SERVICE)
    _url = 'https://%s/v1.0/moderation/image/batch' % endpoint

    _data = {
        "urls": urls,
        "categories": categories,
        "threshold": threshold,
    }

    kreq = urllib2.Request(url=_url)
    kreq.add_header('Content-Type', 'application/json')
    kreq.add_header('X-Auth-Token', token)
    kreq.add_data(json.dumps(_data))

    resp = None
    status_code = None
    try:
        #
        # Here we use the unvertified-ssl-context, Because in FunctionStage
        # the client CA-validation have some problem, so we must do this.
        #
        _context = ssl._create_unverified_context()
        r = urllib2.urlopen(kreq, context=_context)

    #
    # We use HTTPError and URLError,because urllib2 can't process the 4XX &
    # 500 error in the single urlopen function.
    #
    # If you use a modern, high-level designed HTTP client lib, Yeah, I mean requests,
    # there is no this problem.
    #
    except HTTPError, e:
        resp = e.read()
        status_code = e.code
示例#3
0
def dark_enhance(token, image, brightness=0.9):
    endpoint = utils.get_endpoint(ais.AisService.IMAGE_SERVICE)
    _url = 'https://%s/v1.0/vision/dark-enhance' % endpoint

    _data = {"image": image, "brightness": brightness}

    kreq = urllib2.Request(url=_url)
    kreq.add_header('Content-Type', 'application/json')
    kreq.add_header('X-Auth-Token', token)
    kreq.add_data(json.dumps(_data))

    resp = None
    status_code = None
    try:
        #
        # Here we use the unvertified-ssl-context, Because in FunctionStage
        # the client CA-validation have some problem, so we must do this.
        #
        _context = ssl._create_unverified_context()
        r = urllib2.urlopen(kreq, context=_context)

    #
    # We use HTTPError and URLError,because urllib2 can't process the 4XX &
    # 500 error in the single urlopen function.
    #
    # If you use a modern, high-level designed HTTP client lib, Yeah, I mean requests,
    # there is no this problem.
    #
    except HTTPError, e:
        resp = e.read()
        status_code = e.code
def image_batch_jobs(token, urls, categories=['politics', 'terrorism', 'p**n']):
    endpoint = utils.get_endpoint(ais.AisService.MODERATION_SERVICE)
    status, r = _image_batch_jobs(endpoint, token, urls, categories)

    if status != 200:
        return r

    submit_result = json.loads(r)
    job_id = submit_result['result'].get('job_id', '')
    # print "Process job id is :", job_id
    time.sleep(1.0)
    try:
        while True:
            status, r = _get_result(endpoint, token, job_id)
            if status != 200:
                return r

            rec_result = json.loads(r)

            process_status = rec_result["result"].get('status')
            if process_status == 'failed':
                return r

            elif process_status == 'finish':
                return r

            #
            # process_status == 0 || process_status == 1
            #
            else:
                time.sleep(2.0)
                continue

    except Exception:
        return ''
示例#5
0
 def __init__(self):
     self.token = utils.get_token()
     self.enable = True
     self.__headers = {
         'User-Agent': utils.init_data().get('agent', 'ALL'),
         'X-Auth-Token': self.token,
         "Content-type": "application/json",
         "Accept": "application/json"
     }
     self.__roles_url = utils.get_endpoint(self.token,
                                           'keystone') + '/roles'
示例#6
0
def image_tagging_aksk(_ak,
                       _sk,
                       image,
                       url,
                       languzge,
                       limit=-1,
                       threshold=0.0):
    endpoint = utils.get_endpoint(ais.AisService.IMAGE_SERVICE)
    _url = 'https://%s/v1.0/image/tagging' % endpoint

    sig = signer.Signer()
    sig.AppKey = _ak
    sig.AppSecret = _sk

    _data = {
        "image": image,
        "url": url,
        "language": languzge,
        "limit": limit,
        "threshold": threshold
    }

    kreq = signer.HttpRequest()
    kreq.scheme = "https"
    kreq.host = endpoint
    kreq.uri = "/v1.0/image/tagging"
    kreq.method = "POST"
    kreq.headers = {"Content-Type": "application/json"}
    kreq.body = json.dumps(_data)

    resp = None
    status_code = None
    try:
        sig.Sign(kreq)
        #
        # Here we use the unvertified-ssl-context, Because in FunctionStage
        # the client CA-validation have some problem, so we must do this.
        #
        _context = ssl._create_unverified_context()
        req = urllib2.Request(url=_url, data=kreq.body, headers=kreq.headers)
        r = urllib2.urlopen(req, context=_context)

    #
    # We use HTTPError and URLError,because urllib2 can't process the 4XX &
    # 500 error in the single urlopen function.
    #
    # If you use a modern, high-level designed HTTP client lib, Yeah, I mean requests,
    # there is no this problem.
    #
    except HTTPError, e:
        resp = e.read()
        status_code = e.code
示例#7
0
def moderation_text_aksk(
        _ak,
        _sk,
        text,
        type='content',
        categories=["ad", "politics", "p**n", "abuse", "contraband", "flood"]):
    endpoint = utils.get_endpoint(ais.AisService.MODERATION_SERVICE)
    _url = 'https://%s/v1.0/moderation/text' % endpoint

    sig = signer.Signer()
    sig.AppKey = _ak
    sig.AppSecret = _sk

    _data = {
        "categories":
        categories,  # 检测场景 Array politics:涉政 p**n:涉黄 ad:广告 abuse:辱骂 contraband:违禁品 flood:灌水
        "items": [{
            "text": text,
            "type": type
        }  # items: 待检测的文本列表  text 待检测文本 type 文本类型
                  ]
    }

    kreq = signer.HttpRequest()
    kreq.scheme = "https"
    kreq.host = endpoint
    kreq.uri = "/v1.0/moderation/text"
    kreq.method = "POST"
    kreq.headers = {"Content-Type": "application/json"}
    kreq.body = json.dumps(_data)

    resp = None
    status_code = None
    try:
        sig.Sign(kreq)
        #
        # Here we use the unvertified-ssl-context, Because in FunctionStage
        # the client CA-validation have some problem, so we must do this.
        #
        _context = ssl._create_unverified_context()
        req = urllib2.Request(url=_url, data=kreq.body, headers=kreq.headers)
        r = urllib2.urlopen(req, context=_context)
    #
    # We use HTTPError and URLError,because urllib2 can't process the 4XX &
    # 500 error in the single urlopen function.
    #
    # If you use a modern, high-level designed HTTP client lib, Yeah, I mean requests,
    # there is no this problem.
    #
    except HTTPError, e:
        resp = e.read()
        status_code = e.code
示例#8
0
def moderation_video_aksk(_ak,
                          _sk,
                          url,
                          frame_interval=5,
                          categories=['politics', 'terrorism']):
    sig = signer.Signer()
    sig.AppKey = _ak
    sig.AppSecret = _sk

    endpoint = utils.get_endpoint(ais.AisService.MODERATION_SERVICE)

    status, r = _moderation_video_aksk(endpoint, sig, url, frame_interval,
                                       categories)

    if status != 200:
        return r

    submit_result = json.loads(r)
    job_id = submit_result['result'].get('job_id', '')
    #print "Process job id is :", job_id
    time.sleep(1.0)
    try:
        while True:
            status, r = _get_result_aksk(endpoint, sig, job_id)
            if status != 200:
                return r

            rec_result = json.loads(r)

            process_status = rec_result["result"].get('status')
            if process_status == 'failed':
                return r

            elif process_status == 'finish':
                return r

            #
            # process_status == 0 || process_status == 1
            #
            else:
                time.sleep(2.0)
                continue

    except Exception:
        return ''
示例#9
0
def moderation_text(
        token,
        text,
        type='content',
        categories=["ad", "politics", "p**n", "abuse", "contraband", "flood"]):
    endpoint = utils.get_endpoint(ais.AisService.MODERATION_SERVICE)
    _url = 'https://%s/v1.0/moderation/text' % endpoint

    _data = {
        "categories":
        categories,  # 检测场景 Array politics:涉政 p**n:涉黄 ad:广告 abuse:辱骂 contraband:违禁品 flood:灌水
        "items": [{
            "text": text,
            "type": type
        }  # items: 待检测的文本列表  text 待检测文本 type 文本类型
                  ]
    }
    kreq = urllib2.Request(url=_url)
    kreq.add_header('Content-Type', 'application/json')
    kreq.add_header('X-Auth-Token', token)
    kreq.add_data(json.dumps(_data))

    resp = None
    status_code = None
    try:
        #
        # Here we use the unvertified-ssl-context, Because in FunctionStage
        # the client CA-validation have some problem, so we must do this.
        #
        _context = ssl._create_unverified_context()
        r = urllib2.urlopen(kreq, context=_context)

    #
    # We use HTTPError and URLError,because urllib2 can't process the 4XX &
    # 500 error in the single urlopen function.
    #
    # If you use a modern, high-level designed HTTP client lib, Yeah, I mean requests,
    # there is no this problem.
    #
    except HTTPError, e:
        resp = e.read()
        status_code = e.code
示例#10
0
 def create(self, project, name, passwd, role, **kwargs):
     params = {
         "user": {
             "name": name,
             "password": passwd,
             "default_project_id": project.identification,
             "enabled": True,
         }
     }
     # TODO(dc):description is not concerned
     # if kwargs.get("description"):
     #     params["user"]["description"] = kwargs.get("description")
     if kwargs.get("domain_id"):
         params["user"]["domain_id"] = kwargs.get("domain_id")
     user_id = json.loads(
         requests.post(url=self.__users_url,
                       data=json.dumps(params),
                       headers=self.__headers).content)["user"]["id"]
     # TODO(dc): domain is not concerned
     requests.put(url=utils.get_endpoint(self.token, 'keystone') +
                  '/projects/' + project.identification + '/users/' +
                  user_id + '/roles/' + role.identification,
                  headers=self.__headers)
     return self.get(name=name, passwd=passwd, project=project)
import os.path
import re
import sys
import time
import traceback
import urllib2
from datetime import datetime

import requests
from bs4 import BeautifulSoup
from lxml import etree
from lxml.etree import tostring

from utils import send_to_slack, notify_users_about_article, get_endpoint, is_production, write_news

ENDPOINT = get_endpoint()
logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
logging.getLogger("requests").setLevel(logging.WARNING)

if os.path.isfile("config.ini"):
    config = ConfigParser.RawConfigParser(allow_no_value=True)
    config.read('config.ini')
    APPSPOT_KEY = config.get("appspot", "X-Secret-Key")
else:
    print "config.ini is needed"
    sys.exit(1)


class NewsParser:
    TIMEOUT = 15
    RE_POST_ID = re.compile(ur'((?<=")post-\d+(?="))')
 def feeler_coords(self, angle, length):
     '''Coordinates of feeler with given angle and length.'''
     end_x, end_y = utils.get_endpoint(self.critter.coords[0], self.critter.coords[1],
                                       (self.critter.heading + angle) % 360, length)
     return self.critter.coords[0], self.critter.coords[1], end_x, end_y
 def mouth_end(self):
     '''Coordinates of the point where the mouth opens.'''
     return utils.get_endpoint(self.coords[0], self.coords[1], self.heading, Thing.radius)
#!/usr/bin/python
# -*- coding: iso-8859-15 -*-
import codecs
import ConfigParser
import json
import os
import re
import requests
import urllib2
from utils import get_endpoint, notify_users, read_json, write_json, write_news

ENDPOINT = get_endpoint()

if os.path.isfile("config.ini"):
    config = ConfigParser.RawConfigParser(allow_no_value=True)
    config.read('config.ini')
    APPSPOT_KEY = config.get("appspot", "X-Secret-Key")


class BuliParser:
    def __init__(self,
                 season,
                 league,
                 relay,
                 push_descr,
                 fragment_id,
                 leage_relay="LEAGUE_RELAY"):
        self.league = league
        self.relay = relay
        self.iat_season_base = "https://www.iat.uni-leipzig.de/datenbanken/blgew{0}/".format(
            season)
import os
import argparse
import traceback
from parser.buli_parser import BuliParser
from parser.news_parser import BVDGParser, SpeyerParser, SchwedtParser, MutterstadtParser, RodingParser
from utils import update_readme, commit_changes, send_to_slack, get_endpoint, is_production, NEWS_FILE

if __name__ == '__main__':
    parser = argparse.ArgumentParser(
        description="Send new articles or competition results to the server")
    parser.add_argument('--notify', action='store_true')
    args = parser.parse_args()
    print "Endpoint: " + get_endpoint()

    SEASON = "1718"
    BuliParser1A = BuliParser(SEASON,
                              "1",
                              "Gruppe+A",
                              "1. Bundesliga - Staffel A",
                              "4",
                              leage_relay="1a")
    BuliParser1B = BuliParser(SEASON,
                              "1",
                              "Gruppe+B",
                              "1. Bundesliga - Staffel B",
                              "5",
                              leage_relay="1b")
    BuliParser2A = BuliParser(SEASON,
                              "2",
                              "Gruppe+A",
                              "2. Bundesliga - Staffel A",
示例#16
0
    def get_values(self, points, calculationtype=None, starttime='*-1d',
                   endtime='*', boundary=None, boundarytype=None,
                   maxcount='1000', desiredunits=None, interval=None,
                   intervals='24', retrievalmode='Auto', summarytype='Total',
                   calculationbasis='TimeWeighted', timetype='Auto',
                   summaryduration=None, sampletype='ExpressionRecordedValues',
                   sampleinterval=None, time=None, filterexpression=None,
                   includefilteredvalues=False, sortorder='Ascending',
                   append=False, overwrite=False):
        # starttime is Time String
        # endtime is Time String
        # interval is AFTimeSpan
        # desiredUnits is a uom, cannot be specified for PI points
        # fiterexpression is filtering like * or SINU*
        # includefilteredvalues bool: Specify 'true' to indicate that values which fail the filter criteria are present in the returned data at the times where they occurred with a value set to a 'Filtered' enumeration value with bad status. Repeated consecutive failures are omitted.
        # sortorder default is 'Ascending'
        # summaryDuration The duration of each summary interval. If specified in hours, minutes, seconds, or milliseconds, the summary durations will be evenly spaced UTC time intervals. Longer interval types are interpreted using wall clock rules and are time zone dependent.
        # TODO: add starttime parameter
        # TODO: add endtime parameter
        # TODO: add boundary parameter
        # TODO: add interval parameter
        calctype = calculationtype.lower()

        is_single_value = True if calctype == 'current' or calctype == 'end' \
            else False

        log.debug('Calculation type: %s, Single value: %s', calctype,
                  is_single_value)

        for point in iterfy(points):
            log.debug('Retrieving %s data for %s...', calctype, point.name)

            if calctype == 'current':
                payload = {'time': time}
            elif calctype == 'interpolated':
                payload = {'startTime': starttime, 'endTime': endtime,
                           'interval': interval,
                           'filterExpression': filterexpression,
                           'includeFilteredValues': includefilteredvalues}
            elif calctype == 'interpolatedattimes':
                payload = {'time': time, 'filterExpression': filterexpression,
                           'includeFilteredValues': includefilteredvalues,
                           'sortOrder': sortorder}
            elif calctype == 'recorded':
                payload = {'startTime': starttime, 'endTime': endtime,
                           'boundaryType': boundarytype,
                           'filterExpression': filterexpression,
                           'includeFilteredValues': includefilteredvalues,
                           'maxCount': maxcount}
            elif calctype == 'recordedattime':
                payload = {'time': time, 'retrievalMode': retrievalmode}
            elif calctype == 'plot':
                payload = {'startTime': starttime, 'endTime': endtime,
                           'intervals': intervals}
            elif calctype == 'summary':
                payload = {'startTime': starttime, 'endTime': endtime,
                           'summaryType': summarytype,
                           'calculationBasis': calculationbasis,
                           'timeType': timetype,
                           'summaryDuration': summaryduration,
                           'sampleType': sampletype,
                           'sampleInterval': sampleinterval,
                           'filterExpression': filterexpression}
            elif calctype == 'end':
                payload = {}
            else:
                log.debug('This %s request has no URL parameters', calctype)

            endpoint = get_endpoint(self.url, point, calctype)

            # TODO: add queryParamater generator function here?
            try:
                log.debug('Instantiating %s request for PI point %s to '
                          'endpoint %s with the following parameters: %s',
                          calctype, point.name, endpoint, payload)

                r = self.session.get(endpoint, params=payload)
                if r.status_code != requests.codes.ok:
                    r.raise_for_status()
            except OSIsoftPyException as e:
                log.error('Exception while retrieving recorded values'
                          'from %s for %s. Raw JSON: %s', endpoint, point.name,
                          exc_info=True)

            data = r.json()
            log.debug('HTTP %s - Instantiating OSIsoftPy.Values()',
                      r.status_code)
            log.debug('Staging PI point value for '
                      'instantiation...')
            try:
                new_values = get_point_values(point, calctype, data)
                log.debug('%s %s value(s) were instantiated for %s.',
                          get_count(new_values), calctype, point.name)
            except OSIsoftPyException as e:
                log.error('Exception while instantiating PI Point value(s)'
                          'for %s. Raw JSON: %s', point.name, data,
                          exc_info=True)
            current_values = TypedList(validtypes=Value)

            if is_single_value:
                try:
                    value = getattr(point, get_attribute(calctype))
                    log.debug('Storing %s value.', calctype)
                    current_values.append(value)
                except TypeError as e:
                    log.warning('TypeError encountered - the attribute %s is '
                                'empty for %s, which will raise an '
                                'exception when trying to iterate.',
                                get_attribute(calctype), point.name,
                                exc_info=False)
            else:
                try:
                    for value in getattr(point, get_attribute(calctype)):
                        log.debug(
                            'Storing %s value for PI point %s, attribute: %s',
                            calctype, point.name, get_attribute(calctype))
                        current_values.append(value)
                except TypeError as e:
                    log.warning('TypeError encountered - the attribute %s is '
                                'empty for %s, which will raise an '
                                'exception when trying to iterate.',
                                get_attribute(calctype), point.name,
                                exc_info=False)

            log.debug('PI point %s currently has %s %s values.', point.name,
                      get_count(current_values), calctype)

            if is_single_value and overwrite:
                log.debug('Single point value - overwriting existing %s '
                          'value, Single value: %s.', calctype,
                          is_single_value)
                setattr(point, get_attribute(calctype), new_values[0])
            elif is_single_value and append:
                log.debug('Single point value - append is true but cannot '
                          'append...overwriting existing %s '
                          'value, Single value: %s.', calctype,
                          is_single_value)
                setattr(point, get_attribute(calctype), new_values[0])
            elif not is_single_value and overwrite:
                log.debug('Multiple point values - overwriting %s existing '
                          '%s values, Single value: %s.',
                          get_count(current_values), calctype, is_single_value)
                setattr(point, get_attribute(calctype), new_values)
            elif not is_single_value and append:
                for new_value in new_values:
                    current_values.append(new_value)
                setattr(point, get_attribute(calctype), current_values)
            else:
                # TODO: allow both to be false if no data exists.
                log.error('Error saving %s new %s point value(s) for PI '
                          'point %s. Single value: %s, Overwrite: %s, Append: '
                          '%s.', get_count(new_values), calctype, point.name,
                          is_single_value, overwrite, append)
        return points
import os
import argparse
import traceback
from parser.buli_parser import BuliParser
from parser.news_parser import BVDGParser, SpeyerParser, SchwedtParser, MutterstadtParser, RodingParser
from utils import update_readme, commit_changes, send_to_slack, get_endpoint, is_production, NEWS_FILE

if __name__ == '__main__':
    parser = argparse.ArgumentParser(description="Send new articles or competition results to the server")
    parser.add_argument('--notify', action='store_true')
    args = parser.parse_args()
    print "Endpoint: " + get_endpoint()

    SEASON = "1718"
    BuliParser1A = BuliParser(SEASON, "1", "Gruppe+A", "1. Bundesliga - Staffel A", "4", leage_relay="1a")
    BuliParser1B = BuliParser(SEASON, "1", "Gruppe+B", "1. Bundesliga - Staffel B", "5", leage_relay="1b")
    BuliParser2A = BuliParser(SEASON, "2", "Gruppe+A", "2. Bundesliga - Staffel A", "7", leage_relay="2a")
    BuliParser2B = BuliParser(SEASON, "2", "Gruppe+B", "2. Bundesliga - Staffel B", "8", leage_relay="2b")

    blog_parsers_instances = [BVDGParser(), SpeyerParser(), SchwedtParser(), MutterstadtParser(), RodingParser()]

    try:
        for parser in [BuliParser1A, BuliParser1B, BuliParser2A, BuliParser2B]:
            parser.update_buli(args.notify)

        for blog_parser_instance in blog_parsers_instances:
            blog_parser_instance.parse_articles(args.notify)

        update_readme(blog_parsers_instances)

        if is_production():