Example #1
0
    def handle_trent_request(self):
        target = request.headers.get("X-Amz-Target")
        data = request.get_data()
        verifier = AWSSigV4Verifier(
            request_method=request.method, uri_path=request.path,
            query_string=request.query_string, headers=request.headers,
            body=data, region="us-west-2", service="kms",
            key_mapping=self.keymap)
        try:
            verifier.verify()
        except InvalidSignatureError as e:
            # Uncomment if debugging signature issues.
            # print_exc()
            return make_response(json_dumps(
                {"__type": "AuthFailure",
                 "message": "Invalid signature"}), UNAUTHORIZED)

        try:
            params = json_loads(data)
        except ValueError as e:
            return make_response(json_dumps(
                {"__type": "MalformedQueryString",
                 "message": "Could not decode JSON data"}), BAD_REQUEST)

        if target == "TrentService.GenerateDataKey":
            return self.handle_trent_generate_data_key_request(params)
        elif target == "TrentService.Decrypt":
            return self.handle_trent_decrypt_request(params)

        print("Unknown action %s: params=%r" % (target, params), file=stderr)
        
        return make_response(json_dumps(
            {"__type": "InvalidAction",
             "message": "Unknown action %s" % target}), BAD_REQUEST)
Example #2
0
    def handle_trent_generate_data_key_request(self, params):
        try:
            key_id = params["KeyId"]
            context = params.get("EncryptionContext", {})
            key_spec = params.get("KeySpec", "AES_256")
        except KeyError as e:
            return make_response(json_dumps(
                {"__type": "MissingParameter",
                 "message": "Missing parameter: %s" % e.args[0]}), BAD_REQUEST)

        if key_id != self.kms_key_id:
            return make_response(json_dumps(
                {"__type": "InvalidParameterValue",
                 "message": "Unknown key"}), BAD_REQUEST)

        # Random key
        if key_spec == "AES_256":
            plaintext = urandom(32)
        elif key_spec == "AES_128":
            plaintext = urandom(16)
        else:
            return make_response(json_dumps(
                {"__type": "InvalidParameter",
                 "message": "Invalid KeySpec %r" % key_spec}), BAD_REQUEST)

        ciphertext_blob = b64encode(json_dumps(
            {"KeyId": key_id,
             "EncryptionContext": context,
             "Plaintext": b64encode(plaintext)}))

        return make_response(json_dumps(
            {'KeyId': key_id,
             'CiphertextBlob': ciphertext_blob,
             'Plaintext': b64encode(plaintext)}))
Example #3
0
    def handle_trent_decrypt_request(self, params):
        try:
            ciphertext_blob = params["CiphertextBlob"]
            encryption_context = params.get("EncryptionContext", {})
        except KeyError as e:
            return make_response(json_dumps(
                {"__type": "MissingParameter",
                 "message": "Missing parameter: %s" % e.args[0]}), BAD_REQUEST)

        try:
            encrypt_params = json_loads(b64decode(ciphertext_blob))
        except ValueError as e:
            return make_response(json_dumps(
                {"__type": "InvalidParameterValue",
                 "message": "Invalid ciphertext blob"}), BAD_REQUEST)

        try:
            key_id = encrypt_params["KeyId"]
            encryption_context = encrypt_params["EncryptionContext"]
            plaintext = encrypt_params["Plaintext"]
        except KeyError as e:
            return make_response(json_dumps(
                {"__type": "MissingParameter",
                 "message": "Missing parameter: %s" % e.args[0]}), BAD_REQUEST)

        # Plaintext is already base64 encoded.
        return make_response(json_dumps(
            {"KeyId": key_id, "Plaintext": plaintext}), OK)
Example #4
0
    def index(self):
        if "latitude" in request.params and "longitude" in request.params:
            latitude = float(request.params["latitude"])
            longitude = float(request.params["longitude"])
        if "easting" in request.params:
            easting = float(request.params["easting"])
        if "northing" in request.params:
            northing = float(request.params["northing"])

        queryString = "http://wms.geo.admin.ch/?SERVICE=WMS&VERSION=1.1.1&REQUEST=GetFeatureInfo&SRS=EPSG:21781"
        queryString = (
            queryString
            + "&BBOX="
            + str(easting - 1.0)
            + ","
            + str(northing - 1.0)
            + ","
            + str(easting + 1.0)
            + ","
            + str(northing + 1.0)
            + ""
        )
        queryString = (
            queryString
            + "&WIDTH=2&HEIGHT=2&QUERY_LAYERS=ch.bfs.gebaeude_wohnungs_register&LAYERS=ch.bfs.gebaeude_wohnungs_register&X=1&Y=1&uuid="
            + str(uuid.uuid1())
        )
        response = urllib2.urlopen(queryString)
        responseText = response.read()
        if responseText.rfind("Search returned no results") == -1:
            responseElements = responseText.split("\n")
            for element in responseElements:
                if element.rfind("strname1") > -1:
                    strname1_s = element.split("=")
                    street = strname1_s[1].lstrip().lstrip("'").rstrip().rstrip("'")
                if element.rfind("plz4") > -1:
                    plz4_s = element.split("=")
                    postcode = plz4_s[1].lstrip().lstrip("'").rstrip().rstrip("'")
                if element.rfind("deinr") > -1:
                    deinr_s = element.split("=")
                    housenumber = deinr_s[1].lstrip().lstrip("'").rstrip().rstrip("'")
                if element.rfind("plzname") > -1:
                    plzname_s = element.split("=")
                    city = plzname_s[1].lstrip().lstrip("'").rstrip().rstrip("'")
            swissBuildingArray = [{"housenumber": housenumber, "street": street, "postcode": postcode, "city": city}]
            if "callback" in request.params:
                response.headers["Content-Type"] = "text/javascript; charset=utf-8"
                return request.params["callback"] + "(" + json_dumps(swissBuildingArray) + ");"
            else:
                response.headers["Content-Type"] = "application/json"
                return json_dumps(swissBuildingArray)
        else:
            if "callback" in request.params:
                response.headers["Content-Type"] = "text/javascript; charset=utf-8"
                return request.params["callback"] + "(" + json_dumps([{"result": "Search returned no results"}]) + ");"
            else:
                response.headers["Content-Type"] = "application/json"
                return json_dumps([{"result": "Search returned no results"}])
Example #5
0
 def add_statement(self, statement, parameters, records=None):
     # returns pair of requests
     self.requests.append(Request("RUN %s %s" % (json_dumps(statement), json_dumps(parameters)),
                          CLIENT["RUN"], statement, parameters))
     head = QueryResponse()
     if records is None:
         self.requests.append(DISCARD_ALL_REQUEST)
         tail = QueryResponse()
     else:
         self.requests.append(PULL_ALL_REQUEST)
         tail = QueryStreamResponse(records)
     self.responses.extend([head, tail])
     return head, tail
Example #6
0
 def on_error(self, status_code, data):
     # @TO-DO: handle Twython errors - see https://twython.readthedocs.org/en/latest/api.html#twython.TwythonStreamer.disconnect
     print('\n\n' + ('*'*70))
     print('Twython error')
     print('*'*70)
     print ( status_code, data )
     msg = '{0}\nTwython Error:\n{1}\n{0}\n\n'.format(
         '*'*70, json_dumps(data))
     e = TwythonError(msg, status_code)
     self.log.error('twythonError', '{}:{}'.format(
         status_code,
         json_dumps(data)))
     raise(e)
Example #7
0
def fprint(content):
    """ 
    This is a Google style docs.

    Args:
        param1(str): this is the first param
        param2(int, optional): this is a second param
            
    Returns:
        bool: This is a description of what is returned
            
    Raises:
        KeyError: raises an exception))
    """
    print json_dumps(content,indent=1)
Example #8
0
 def f(request, *args, **kwargs):
     # 1. check origin
     origin = request.META.get('HTTP_ORIGIN')
     if origin is None:
         origin = request.META.get('HTTP_REFERER')
         if origin:
             origin = cors.make_origin(origin)
     if not cors.check_origin(request, origin):
         return HttpResponseForbidden('bad origin')
     # 2. build response
     result = func(request, *args, **kwargs)
     json_str = json_dumps(result)
     response = HttpResponse(content_type='application/json')
     for variable in ('jsonpCallback', 'callback'):
         if variable in request.GET:
             identifier = request.GET[variable]
             if not re.match(r'^[$a-zA-Z_][0-9a-zA-Z_$]*$', identifier):
                 return HttpResponseBadRequest('invalid JSONP callback name')
             json_str = '%s(%s);' % (identifier, json_str)
             break
     else:
         response['Access-Control-Allow-Origin'] = origin
         response['Access-Control-Allow-Credentials'] = 'true'
         response['Access-Control-Allow-Headers'] = 'x-requested-with'
     response.write(json_str)
     return response
Example #9
0
    def create_message(self, url):
        # Get prop_html
        resp = req_get(url)
        prop_html = resp.text

        timestamp = time()
        return json_dumps([url, timestamp, prop_html])
Example #10
0
    def stream(self):
        self.log.info('StreamWorker {} starting stream with:\n    {}'.format(
            self.pid,
            json_dumps({str(k): str(v) for k, v in self.params.items()})))

        # Get new StreamSession from DB
        session = get_session(self.db_path)
        ss = session.merge(StreamSession(starttime=dt.now()))
        session.commit()
        self.streamsession_id = ss.id
        session.close()

        # create streamer
        self.streamer = FanTwython(self.q, self.msgq, self.log, self.countq, self.streamsession_id, self.credentials, chunk_size=self.chunk_size)

        # stream tweets
        try:
            self.streamer.statuses.filter(**self.params)

        except (IncompleteRead, ChunkedEncodingError) as e:
            self.log.exception(e)
            self.restart_stream()

        except Exception as e:
            tb = traceback.format_exc()
            print('ERROR STREAMING') #@DEBUG
            print(tb)
            print(e)
            self.log.exception(e)
            self.restart_stream()
            
        except ValueError:
            raise
Example #11
0
def _parseConfigfileCache(cache_dict, dirpath):
    """ Used by parseConfigfile() to parse just the cache parts of a config.
    """
    if cache_dict.has_key('name'):
        _class = Caches.getCacheByName(cache_dict['name'])
        kwargs = {}
        
        if _class is Caches.Test:
            if cache_dict.get('verbose', False):
                kwargs['logfunc'] = lambda msg: stderr.write(msg + '\n')
    
        elif _class is Caches.Disk:
            kwargs['path'] = enforcedLocalPath(cache_dict['path'], dirpath, 'Disk cache path')
            
            if cache_dict.has_key('umask'):
                kwargs['umask'] = int(cache_dict['umask'], 8)
            
            for key in ('dirs', 'gzip'):
                if cache_dict.has_key(key):
                    kwargs[key] = cache_dict[key]
    
        else:
            raise Exception('Unknown cache: %s' % cache_dict['name'])
        
    elif cache_dict.has_key('class'):
        _class = loadClassPath(cache_dict['class'])
        kwargs = cache_dict.get('kwargs', {})
        kwargs = dict( [(str(k), v) for (k, v) in kwargs.items()] )

    else:
        raise Exception('Missing required cache name or class: %s' % json_dumps(cache_dict))

    cache = _class(**kwargs)

    return cache
Example #12
0
    def save(self, *args, **kwargs):
        if self.pk is None:
            self.salt = uuid.uuid4().hex
            self.created_at = datetime.datetime.now()

            # do this now instead of in AbstractVersionedEntity.save() so we can use it for image name
            if self.entity_id is None:
                self.entity_id = generate_entity_uri()

            if not self.image:
                badgeclass_name, ext = os.path.splitext(self.badgeclass.image.file.name)
                new_image = StringIO.StringIO()
                bake(image_file=self.cached_badgeclass.image.file,
                     assertion_json_string=json_dumps(self.get_json(obi_version=UNVERSIONED_BAKED_VERSION), indent=2),
                     output_file=new_image)
                self.image.save(name='assertion-{id}{ext}'.format(id=self.entity_id, ext=ext),
                                content=ContentFile(new_image.read()),
                                save=False)

            try:
                from badgeuser.models import CachedEmailAddress
                existing_email = CachedEmailAddress.cached.get(email=self.recipient_identifier)
                if self.recipient_identifier != existing_email.email and \
                        self.recipient_identifier not in [e.email for e in existing_email.cached_variants()]:
                    existing_email.add_variant(self.recipient_identifier)
            except CachedEmailAddress.DoesNotExist:
                pass

        if self.revoked is False:
            self.revocation_reason = None

        super(BadgeInstance, self).save(*args, **kwargs)
Example #13
0
    def send_discuss_msg(self, msg_content, target_did, clientid=0, psessionid="", face=525, msg_id=72690003):
        """
        发送信息到讨论组
        """
        rsp_json = self._request_and_parse(
            method='POST',
            url='http://d1.web2.qq.com/channel/send_discu_msg2',
            data_dict={
                'clientid': clientid if clientid else self.clientid,
                'content': json_dumps(  # 蛋疼的content需要预dump一次
                    [
                        msg_content,  # 信息内容主体
                        ["font", {"name": "宋体",
                                  "size": 10,
                                  "style": [0, 0, 0],
                                  "color": "000000"}
                         ]
                    ]
                ),
                'face': face,
                'msg_id': msg_id + randint(0, 12120002),
                'psessionid': psessionid if psessionid else self.psessionid,
                'did': target_did  # 接收对象
            }
        )

        if 'errCode' in rsp_json and rsp_json['errCode'] == 0 or rsp_json['retcode'] == 1202:
            return True
        else:
            errprint('发送信息失败:', rsp_json)
            return False
def error404(error):
    bottle.response.content_type = 'application/json'
    try:
        (code, message) = error.body
        return json_dumps({"schema":"api_error1", "code":code, "message":message})
    except:
        return error.body
Example #15
0
    def get_baked_image_url(self, obi_version=CURRENT_OBI_VERSION):
        if obi_version == UNVERSIONED_BAKED_VERSION:
            # requested version is the one referenced in assertion.image
            return self.image.url

        try:
            baked_image = BadgeInstanceBakedImage.cached.get(badgeinstance=self, obi_version=obi_version)
        except BadgeInstanceBakedImage.DoesNotExist:
            # rebake
            baked_image = BadgeInstanceBakedImage(badgeinstance=self, obi_version=obi_version)

            json_to_bake = self.get_json(
                obi_version=obi_version,
                expand_issuer=True,
                expand_badgeclass=True,
                include_extra=True
            )
            badgeclass_name, ext = os.path.splitext(self.badgeclass.image.file.name)
            new_image = StringIO.StringIO()
            bake(image_file=self.cached_badgeclass.image.file,
                 assertion_json_string=json_dumps(json_to_bake, indent=2),
                 output_file=new_image)
            baked_image.image.save(
                name='assertion-{id}-{version}{ext}'.format(id=self.entity_id, ext=ext, version=obi_version),
                content=ContentFile(new_image.read()),
                save=False
            )
            baked_image.save()

        return baked_image.image.url
Example #16
0
 def cast(self, out):
     """
     Cast the output to an iterable of strings or something WSGI can handle.
     Set Content-Type and Content-Length when possible. Then clear output
     on HEAD requests.
     Supports: False, str, unicode, list(unicode), dict(), open()
     """
     if not out:
         out = []
         response.header['Content-Length'] = '0'
     elif isinstance(out, types.StringType):
         out = [out]
     elif isinstance(out, unicode):
         out = [out.encode(response.charset)]
     elif isinstance(out, list) and isinstance(out[0], unicode):
         out = map(lambda x: x.encode(response.charset), out)
     elif self.autojson and json_dumps and isinstance(out, dict):
         out = [json_dumps(out)]
         response.content_type = 'application/json'
     elif hasattr(out, 'read'):
         out = request.environ.get('wsgi.file_wrapper',
               lambda x: iter(lambda: x.read(8192), ''))(out)
     if isinstance(out, list) and len(out) == 1:
         response.header['Content-Length'] = str(len(out[0]))
     if not hasattr(out, '__iter__'):
         raise TypeError('Request handler for route "%s" returned [%s] '
         'which is not iterable.' % (request.path, type(out).__name__))
     return out
Example #17
0
    def _request(self, method, url, **kwargs):
        """
        Wrapper for requests.request()

        Performs the following additional functions:
            - Automatically includes required auth headers
            - Throws errors for non-200 HTTP status codes
            - Handles invalid JSON responses

        Arguments:
            method (string): HTTP method
            url (string): URL to make the request to
            **kwargs: any kwargs allowed by requests.request()
        """
        logging.debug("{} {}".format(method, url))
        logging.debug(kwargs)
        logging.debug(self.session.headers)

        response = self.session.request(method, url, **kwargs)

        try:
            json = response.json()
        except JSONDecodeError:
            logging.warning("Failed to decode JSON from response")
            json = {}

        logging.debug(json_dumps(json, indent=4))
        response.raise_for_status()
        return json
Example #18
0
def json(article):
    date = strftime(DATE_FORMAT, article.date)
    obj = {
        'url': article.url, 'date': date, 'title': article.title,
        'author': article.author, 'text': article.text
    }
    return json_dumps(obj)
def update_beliefs(agent, cmd, args):
    """Update beliefs concerning the agent
    This function should take care to check signature for some beliefs updates
    to avoid dumb UDP spoofing leading to corrupting beliefs"""
    args_l = args.split(',')
    args_len = len(args_l)
    log.debug("updating beliefs")
    log.debug(args)
    if args_len == 2:
        #TODO tests (udp spoofing)
        log.debug("Agent " + agent['name'] + " is updating : ")
        agent[args_l[0]] = args_l[1]
        log.debug(json_dumps(agent, indent=2, sort_keys=True))
    elif args_len == 3:
        # Go with belief creator (type specification)
        a_hash = agent['ip_hash']
        args_l = args.split(',')
        attr = args_l[0]
        if attr == "address":
            address = {}
            for a_attr in args_l[1].split('/'):
                key, value = a_attr.split(':')
                address[key] = value

            if not address['host']:
                address['host'] = agent['ip_src']
            
            address_s = "/".join(["%s:%s" % (x, address[x]) for x in address.keys()])
            args_l[1] = address_s
            args = ",".join(args_l)
            
        command_queue.put_str("add_belief::friends.%s.%s::" % (a_hash, args))
Example #20
0
def cache_next(request):
    bibliotik_id = request.GET['bibliotik_id']
    bibliotik_client = BibliotikClient(bibliotik_id)
    last_id = BibliotikTorrentPageCache.objects.aggregate(Max('id'))['id__max'] or 0
    next_id = last_id + 1
    response = bibliotik_client.session.get(
        BIBLIOTIK_GET_TORRENT_URL.format(next_id), allow_redirects=False)
    if response.status_code == 200:
        pass
    elif response.status_code == 302:
        location = response.headers['location']
        if location.startswith('http://bibliotik.org/log/'):
            pass
        else:
            return {'success': False, 'location': location}
    else:
        return {'success': False, 'status_code': response.status_code}
    item = BibliotikTorrentPageCache(id=next_id, status_code=response.status_code,
                                     headers=json_dumps(dict(response.headers)),
                                     body=response.text)
    item.save()
    res = {'success': True, 'id': item.id, 'status_code': item.status_code,
           'body_length': len(item.body)}
    if 'location' in response.headers:
        res['location'] = response.headers['location']
    return res
Example #21
0
def api(request):
    reply = {}
    if request.GET:
        try:
            if not request.GET.has_key("url"):
                raise ValueError("POST argument 'url' required")

            url = request.GET["url"]
            if not newsline.CheckUri(url):
                raise ValueError("'url' not from recognized news site")

            data = newsline.NewsLine(url, is_html=True)
            # json understands dicts
            reply["data"] = map(lambda x: x.toDict(), data)
            #reply["data"] = [{"date":"2008-09-10", "title": "test", "url":"http://localhost/"}, {"date":"2008-09-10", "title": "test", "url":"http://localhost/"}, {"date":"2008-09-10", "title": "test", "url":"http://localhost/"}] 
        except StandardError as e:
            reply["error"] = e.message
    else:
        reply["pong"] = ""

    data = json_dumps(reply)
    print data

    # Wrap response in a function callback (jsonp)
    if request.GET and request.GET.has_key("callback"):
        data = "%s(%s)"%(request.GET["callback"],data)

    response = HttpResponse(data, mimetype="application/json")

    return response
Example #22
0
 def records():
     ids = {}
     for row in cur:
         nno, sid, seq, subtype, ab, values = row[:6]
         values_ = {}
         for kv in values.split(','):
             k, v = kv.split(':')
             try:
                 v_ = float(v.strip().lstrip('<>'))
             except ValueError:
                 continue
             if k not in values_:
                 values_[k] = []
             values_[k].append(v_)
         if len(values_) == 0:
             warn("skipping sequence '%s', invalid values '%s'" % (sid, values))
             continue
         record = SeqRecord(
             Seq(OrfList(seq, include_stops=False)[0], DNAAlphabet),
             id=sid,
             description=json_dumps({
                 'subtype': '' if subtype is None else subtype,
                 'ab': ab,
                 'values': values_
                 }),
             annotations={'antibody': values_, 'subtype': subtype}
             )
         if sid in ids:
             record.id += str(-ids[sid])
             ids[sid] += 1
         else:
             ids[sid] = 1
         yield record
Example #23
0
    def __init__(self, key_to_list_mapping):
        if isinstance(key_to_list_mapping, QueryDict):
            to_iterate, key_to_list_mapping = key_to_list_mapping, {}
            for key, value in to_iterate.iteritems():
                key_to_list_mapping[key.lower()] = [i.lower() for i in to_iterate.getlist(key)]
        if isinstance(key_to_list_mapping, basestring):
            # TODO: XML Request
            from xmltodict import parse as xml_to_dict
            from json import dumps as json_dumps, loads as json_loads
            data = xml_to_dict(key_to_list_mapping)
            dct = json_loads(json_dumps(data))

            def iterate_dict(nested):
                for key, value in nested.iteritems():
                    if isinstance(value, dict):
                        for inner_key, inner_value in iterate_dict(value):
                            yield inner_key, inner_value
                    else:
                        yield key, value

            list_elements = list(iterate_dict(dct))
            k = dct.keys()[0].lower()
            request = k if ':' not in k else k.split(':')[1]
            key_to_list_mapping = {'request': [request]}
            for element in list_elements:
                if element[0].lower() == "@service" or element[0].lower() == "@version":
                    key_to_list_mapping[element[0][1:].lower()] = [element[1].lower()]
                elif not element[0].startswith('@'):
                    e = element[0].lower()
                    if ':' in e:
                        e = e.split(':')[1].lower()
                    key_to_list_mapping[e] = [element[1].lower()]
        super(OWSDict, self).__init__(key_to_list_mapping)
        self._is_valid_ows_request()
        self.coverage_id_formatter()
def create(ctx, iface, resource_config, **_):
    '''Creates an AWS IAM Policy'''
    # Build API params
    params = \
        utils.clean_params(
            dict() if not resource_config else resource_config.copy())
    resource_id = \
        utils.get_resource_id(
            ctx.node,
            ctx.instance,
            params.get(RESOURCE_NAME),
            use_instance_id=True
        ) or iface.resource_id
    params[RESOURCE_NAME] = resource_id
    utils.update_resource_id(ctx.instance, resource_id)

    if 'PolicyDocument' in params and \
            isinstance(params['PolicyDocument'], dict):
        params['PolicyDocument'] = json_dumps(params['PolicyDocument'])
    # Actually create the resource
    create_response = iface.create(params)
    resource_id = create_response['Policy']['PolicyName']
    iface.update_resource_id(resource_id)
    utils.update_resource_id(ctx.instance, resource_id)
    utils.update_resource_arn(ctx.instance, create_response['Policy']['Arn'])
def json(data):
    """
    Safely JSON-encode an object

    To protect against XSS attacks, HTML special characters (``<``, ``>``, ``&``) and unicode newlines are
    replaced by escaped unicode characters. Django does not escape these characters by default.

    Output of this method is not marked as HTML safe. If you use it inside an HTML attribute, it must be
    escaped like regular data::

        <div data-user="******">

    If you use it inside a ``<script>`` tag, then the output does not need to be escaped, so you can mark it
    as safe::

        <script>
            var user = {{ data|json|safe }};
        </script>

    Escaped characters taken from Rails ``json_escape()`` helper:
    https://github.com/rails/rails/blob/v4.2.5/activesupport/lib/active_support/core_ext/string/output_safety.rb#L60-L113
    """

    unsafe_chars = {'&': '\\u0026',
                    '<': '\\u003c',
                    '>': '\\u003e',
                    '\u2028': '\\u2028',
                    '\u2029': '\\u2029'}

    json_str = json_dumps(data, cls=DjangoJSONEncoder)

    for (c, d) in unsafe_chars.items():
        json_str = json_str.replace(c, d)

    return json_str
Example #26
0
    def update_data(self, **kwargs):
        kwargs = SerializableModelMixin.dict_keys_camel_to_snake(kwargs)
        editable_column_list = self.get_editable_column_names()
        # if not set([key for key in kwargs]) <= set(editable_column_list):
        #     invalid_attr_str = str(set([key for key in kwargs]) - set(editable_column_list))
        #     raise AttributeError(
        #             'object has not update attr' + invalid_attr_str)

        for column_name in editable_column_list:
            if column_name not in kwargs:
                continue

            column = self.__table__.columns.get(column_name)
            value = kwargs.get(column_name)

            if isinstance(column.type, db.DateTime):
                pass
                # print value

            if value is not None and hasattr(self.__class__, '__json_column_names__'):
                if column.name in self.__class__.__json_column_names__:
                    value = json_dumps(value)

            if isinstance(column.type, db.Boolean):
                value = SerializableModelMixin.to_boolean_value(value)

            setattr(self, column_name, value)

        return self
Example #27
0
def get_mst():

    points = map(tuple, json_loads(request.form.get('data')))

    return json_dumps(
        mst.mst(
            mst.fullmap_of_pointslist(points)))
Example #28
0
def sfn_cit_ref_to_json(response) -> str:
    """Generate api JSON response containing sfn, cite and ref."""
    return json_dumps({
        'reference_tag': response.ref,
        'citation_template': response.cite,
        'shortened_footnote': response.sfn,
    })
Example #29
0
def getdescendants(request, code):
    params = {}
    results = {}
    
    language = request.LANGUAGE_CODE.lower()
    if language == 'pt-br':
        language = 'pt'

    for lang in DECS_LANGS:
        params[lang] = urllib.urlencode({
            'tree_id': code or '',
            'lang': lang,
            })

        resource = urllib.urlopen(settings.DECS_SERVICE, params[lang])

        tree = ElementTree()
        tree.parse(resource)

        descendants = tree.findall('decsws_response/tree/descendants/term_list[@lang="%s"]/term' % lang)
        for d in descendants:
            if d.attrib['tree_id'] in results:
                results[ d.attrib['tree_id'] ] += ',"%s":"%s"' % (lang,d.text.capitalize())
            else:
                results[ d.attrib['tree_id'] ] = '"%s":"%s"' % (lang,d.text.capitalize())

    json = '[%s]' % ','.join((JSON_MULTILINGUAL_TERM % (id,desc) for desc,id in results.items()))
    json_response = json_loads(json)
    json_response.sort(key=lambda x: x['fields']['description'][language])
        
    return HttpResponse(json_dumps(json_response), mimetype='application/json')
def sendSubmission(address, submission, filepath, filename):

	logger.info("# sending new submission")

	# send data
	try:
		headers = {'Content-type': 'application/json', 'Accept': 'text/plain'}
		res = requests.post(address + POST_SUBMISSIONS_ROUTE, data=json_dumps(submission), headers=headers)
	except Exception as err:
		logger.error(err)
		return

	#upload file
	try:
		json = res.json()
		submissionId = json['_id']
	except Exception:
		logger.error('Could not get submissionId for uploading image file.')
		return

	try:
		#headers = {'Content-type': 'application/json', 'Accept': 'text/plain'}
		res = requests.post(address + POST_FILE_ROUTE + submissionId, files={ 'file' : (filename, open( filepath, 'rb'), "image/jpeg" ) })
	except Exception as err:
		logger.error(err)
		return

	logger.info("# submission uploaded")
	
	# delete file
	if os.path.isfile(filepath):
		os.remove(filepath)
Example #31
0
                extra.x1 = extra.x0
                extra.y1 = extra.y0
                first = False

            for p in o['points']:
                x = p[0]
                y = p[1]

                if x < extra.x0: extra.x0 = x
                if y < extra.y0: extra.y0 = y
                if x > extra.x1: extra.x1 = x
                if y > extra.y1: extra.y1 = y

    # apply translation and scaling to all points, fontsizes, radiuses, etc...
    for o in objects:
        if o['type'] == 'polyline':
            for p in o['points']:
                p[0] -= extra.x0
                p[1] -= extra.y0
        elif o['type'] == 'text':
            o['position'][0] -= extra.x0
            o['position'][1] -= extra.y0

    doc = {}
    doc['info'] = {}
    doc['info']['width'] = extra.x1 - extra.x0
    doc['info']['height'] = extra.y1 - extra.y0
    doc['objects'] = objects

    print(json_dumps(doc))
Example #32
0
def log_json(data):
    logger.info(json_dumps(data, indet=4, sort_keys=True))
Example #33
0
def print_colors(config: Config, colors: Iterable[Color]):
    if config.always_output_json:
        print(json_dumps([c.get_dict(config.json_keys) for c in colors]))
    else:
        for color in colors:
            print(format_list_view(config, color))
Example #34
0
    async def login(self):
        """Log in HydroQuebec website.

        Hydroquebec is using ForgeRock solution for authentication.
        """
        # Reset cache
        self.reset()

        # Get http session
        self._get_httpsession()
        self.logger.info("Log in using %s", self.username)

        # Get the callback template
        headers = {
            "Content-Type": "application/json",
            "X-NoSession": "true",
            "X-Password": "******",
            "X-Requested-With": "XMLHttpRequest",
            "X-Username": "******"
        }
        res = await self.http_request(LOGIN_URL_3, "post", headers=headers)
        data = await res.json()

        # Check if we are already logged in
        if 'tokenId' not in data:
            # Fill the callback template
            data['callbacks'][0]['input'][0]['value'] = self.username
            data['callbacks'][1]['input'][0]['value'] = self.password

            data = json_dumps(data)

            # TODO catch error
            try:
                res = await self.http_request(LOGIN_URL_3,
                                              "post",
                                              data=data,
                                              headers=headers)
            except PyHydroQuebecHTTPError:
                self.logger.critical(
                    'Unable to connect. Check your credentials')
                return
            json_res = await res.json()

            if 'tokenId' not in json_res:
                self.logger.error(
                    "Unable to authenticate."
                    "You can retry and/or check your credentials.")
                return

        # Find settings for the authorize
        res = await self.http_request(LOGIN_URL_4, "get")

        sec_config = await res.json()
        oauth2_config = sec_config['oauth2'][0]

        client_id = oauth2_config['clientId']
        redirect_uri = oauth2_config['redirectUri']
        scope = oauth2_config['scope']
        # Generate some random strings
        state = "".join(
            random.choice(string.digits + string.ascii_letters)
            for i in range(40))
        nonce = state
        # TODO find where this setting comes from
        response_type = "id_token token"

        # Get bearer token
        params = {
            "response_type": response_type,
            "client_id": client_id,
            "state": state,
            "redirect_uri": redirect_uri,
            "scope": scope,
            "nonce": nonce,
            "locale": "en"
        }
        res = await self.http_request(LOGIN_URL_5,
                                      "get",
                                      params=params,
                                      status=302)

        # Go to Callback URL
        callback_url = res.headers['Location']
        await self.http_request(callback_url, "get")

        raw_callback_params = callback_url.split('/callback#',
                                                 1)[-1].split("&")
        callback_params = dict([p.split("=", 1) for p in raw_callback_params])

        # Check if we have the access token
        if 'access_token' not in callback_params or not callback_params[
                'access_token']:
            self.logger.critical("Access token not found")
            return

        self.access_token = callback_params['access_token']

        headers = {
            "Content-Type": "application/json",
            "Authorization": "Bearer " + self.access_token
        }
        await self.http_request(LOGIN_URL_6, "get", headers=headers)

        ####
        # Get customers
        self.logger.info("fetching customers")

        res = await self.http_request(LOGIN_URL_7, "get", headers=headers)
        json_res = await res.json()

        for account in json_res:
            account_id = account['noPartenaireDemandeur']
            customer_id = account['noPartenaireTitulaire']

            customer_logger = self.logger.getChild('customer')
            customer = Customer(self, account_id, customer_id, self._timeout,
                                customer_logger)
            self._customers.append(customer)
            await customer.fetch_summary()
            if customer.contract_id is None:
                del self._customers[-1]
Example #35
0
        if (not self._config["ep"]["socket_emit"]
                or isinstance(message, bytes) or not self.is_ready()):
            return

        await self.wss.broadcast(message)

        channel = self._get_socket_channel()

        if channel is None:
            return

        await asyncio.sleep(1)

        if (message["t"] == "MESSAGE_CREATE" and
            (message_id := int(message["d"]["id"])) in self.__socket_noloop):
            self.__socket_noloop.remove(message_id)
            return

        data = json_dumps(message)

        if len(data) >= 1900:
            return  # TODO: Impl paging objects

        try:
            message = await channel.send(codeblock(data, style="json"))
        except Exception as err:
            self.logger.error(err)
            raise
        else:
            self.__socket_noloop.add(message.id)
Example #36
0
def fprint(content):
    print json_dumps(content, indent=1)
def do_emit__json():
    dump_json = json_dumps(result_native_type, indent=3)
def do_emit__json_ordered():
    dump_json = json_dumps(result_ordered_native_type, indent=3)
Example #39
0
def send_notification(notification, endpoint, client_encoded_public_key, auth):
    # Parse endpoint
    endpoint_url = urlparse(endpoint)

    # Get the auth JWT IDing server
    jwt = create_webpush_jwt(endpoint_url)

    payload = json_dumps(notification.to_push_json()).encode('utf8')

    server_public_key = get_public_key()
    client_public_key = urlsafe_b64decode(client_encoded_public_key)

    # Encrypt the payload
    encrypted_payload, local_public_key, salt = encrypt_payload(
        payload, client_public_key, auth)
    encoded_salt = urlsafe_b64encode(salt).decode('utf8')

    # Get pub key
    server_encoded_public_key = urlsafe_b64encode(server_public_key).decode(
        'utf8')
    encoded_local_key = urlsafe_b64encode(local_public_key).decode('utf8')

    headers = {
        'Authorization': f"WebPush {jwt.strip('=')}",
        'Encryption': f"salt={encoded_salt.strip('=')}",
        'Content-Length': str(len(encrypted_payload)),
        'Content-Type': "application/octet-stream",
        'Content-Encoding': "aesgcm",
        'Crypto-Key':
        f"p256ecdsa={server_encoded_public_key.strip('=')};dh={encoded_local_key.strip('=')}",
        'TTL': str(int(WEBPUSH_EXPIRATION)),
        'Urgency': "normal"
    }

    # Display topic if applicable
    if notification.is_overwriting():
        headers[
            'Topic'] = f"{notification.get_target_descriptor()}-{notification.source_id}"

    r = requests.post(endpoint, data=encrypted_payload, headers=headers)

    # If the status code is NOT 2xx
    # then we'll report the error
    if r.status_code == 410:
        # Code of 410 means the user has unsubscribed from notifs. This means we
        # must remove the device
        PushDevice.query.\
            filter_by(endpoint=endpoint).\
            delete()

        db.session.commit()
    elif r.status_code // 100 != 2:
        try:
            rejection_response = r.text
        except:
            rejection_response = 'Error reading rejection response'

        if bugsnag.configuration.api_key is not None:
            bugsnag.notify(
                Exception("Web Push dispatch error"),
                meta_data={
                    'webpush_request': {
                        'endpoint':
                        f'{endpoint_url.scheme}://{endpoint_url.netloc}',
                        'status_code': r.status_code
                    },
                    'webpush_response': {
                        'response': rejection_response
                    }
                })

        server.logger.error(
            f'Notification (Web Push) rejected {notification.uuid} -> {endpoint_url.netloc}:\n{rejection_response}'
        )
Example #40
0
 def __init__(self, params, demand=0):
     self.id = hash_dict(params).hexdigest()
     self.params = params
     self.params_string = json_dumps(self.params)
     self.demand = demand
     self.last_occurence = None
Example #41
0
def _parseConfigfileLayer(layer_dict, config, dirpath):
    """ Used by parseConfigfile() to parse just the layer parts of a config.
    """
    projection = layer_dict.get('projection', 'spherical mercator')
    projection = Geography.getProjectionByName(projection)
    
    #
    # Add cache lock timeouts and preview arguments
    #
    
    layer_kwargs = {}
    
    if 'cache lifespan' in layer_dict:
        layer_kwargs['cache_lifespan'] = int(layer_dict['cache lifespan'])
    
    if 'stale lock timeout' in layer_dict:
        layer_kwargs['stale_lock_timeout'] = int(layer_dict['stale lock timeout'])
    
    if 'write cache' in layer_dict:
        layer_kwargs['write_cache'] = bool(layer_dict['write cache'])
    
    if 'allowed origin' in layer_dict:
        layer_kwargs['allowed_origin'] = str(layer_dict['allowed origin'])
    
    if 'maximum cache age' in layer_dict:
        layer_kwargs['max_cache_age'] = int(layer_dict['maximum cache age'])
    
    if 'redirects' in layer_dict:
        layer_kwargs['redirects'] = dict(layer_dict['redirects'])
    
    if 'tile height' in layer_dict:
        layer_kwargs['tile_height'] = int(layer_dict['tile height'])
    
    if 'preview' in layer_dict:
        preview_dict = layer_dict['preview']
        
        for (key, func) in zip(('lat', 'lon', 'zoom', 'ext'), (float, float, int, str)):
            if key in preview_dict:
                layer_kwargs['preview_' + key] = func(preview_dict[key])
    
    #
    # Do the bounds
    #
    
    if 'bounds' in layer_dict:
        if type(layer_dict['bounds']) is dict:
            layer_kwargs['bounds'] = _parseLayerBounds(layer_dict['bounds'], projection)
    
        elif type(layer_dict['bounds']) is list:
            bounds = [_parseLayerBounds(b, projection) for b in layer_dict['bounds']]
            layer_kwargs['bounds'] = BoundsList(bounds)
    
        else:
            raise Core.KnownUnknown('Layer bounds must be a dictionary, not: ' + dumps(layer_dict['bounds']))
    
    #
    # Do the metatile
    #

    meta_dict = layer_dict.get('metatile', {})
    metatile_kwargs = {}

    for k in ('buffer', 'rows', 'columns'):
        if k in meta_dict:
            metatile_kwargs[k] = int(meta_dict[k])
    
    metatile = Core.Metatile(**metatile_kwargs)
    
    #
    # Do the per-format options
    #
    
    jpeg_kwargs = {}
    png_kwargs = {}

    if 'jpeg options' in layer_dict:
        jpeg_kwargs = dict([(str(k), v) for (k, v) in layer_dict['jpeg options'].items()])

    if 'png options' in layer_dict:
        png_kwargs = dict([(str(k), v) for (k, v) in layer_dict['png options'].items()])

    #
    # Do the provider
    #

    provider_dict = layer_dict['provider']

    if 'name' in provider_dict:
        _class = Providers.getProviderByName(provider_dict['name'])
        provider_kwargs = _class.prepareKeywordArgs(provider_dict)
        
    elif 'class' in provider_dict:
        _class = loadClassPath(provider_dict['class'])
        provider_kwargs = provider_dict.get('kwargs', {})
        provider_kwargs = dict( [(str(k), v) for (k, v) in provider_kwargs.items()] )

    else:
        raise Exception('Missing required provider name or class: %s' % json_dumps(provider_dict))
    
    #
    # Finish him!
    #

    layer = Core.Layer(config, projection, metatile, **layer_kwargs)
    layer.provider = _class(layer, **provider_kwargs)
    layer.setSaveOptionsJPEG(**jpeg_kwargs)
    layer.setSaveOptionsPNG(**png_kwargs)
    
    return layer
Example #42
0
 def json(self, indent=None, separators=None, **kwargs):
     return json_dumps(self, indent=indent, separators=separators, cls=DumpEncoder, **kwargs)
Example #43
0
def requestHandler2(config_hint, path_info, query_string=None, script_name=''):
    """ Generate a set of headers and response body for a given request.

        TODO: Replace requestHandler() with this function in TileStache 2.0.0.

        Requires a configuration and PATH_INFO (e.g. "/example/0/0/0.png").

        Config_hint parameter can be a path string for a JSON configuration file
        or a configuration object with 'cache', 'layers', and 'dirpath' properties.

        Query string is optional, currently used for JSON callbacks.

        Calls Layer.getTileResponse() to render actual tiles, and getPreview() to render preview.html.
    """
    headers = Headers([])

    try:
        # ensure that path_info is at least a single "/"
        path_info = '/' + (path_info or '').lstrip('/')

        layer = requestLayer(config_hint, path_info)
        query = parse_qs(query_string or '')
        try:
            callback = query['callback'][0]
        except KeyError:
            callback = None

        #
        # Special case for index page.
        #
        if path_info == '/':
            mimetype, content = getattr(
                layer.config, 'index',
                ('text/plain', 'TileStache says hello.'))
            return 200, Headers([('Content-Type', mimetype)]), content
        elif path_info == '/health':
            data = {
                'ok': True,
                'balancer': {
                    'status': 'connected'
                },
                'version': '1.0',
                'host': 'host',
                'uptime': 1,
                'uptime_s': 1
            }

            return 200, Headers([('Content-Type', 'application/json')
                                 ]), json_dumps(data)

        coord, extension = splitPathInfo(path_info)[1:]

        if extension == 'html' and coord is None:
            status_code, headers, content = getPreview(layer)

        elif extension.lower() in layer.redirects:
            other_extension = layer.redirects[extension.lower()]

            redirect_uri = script_name
            redirect_uri += mergePathInfo(layer.name(), coord, other_extension)

            if query_string:
                redirect_uri += '?' + query_string

            headers['Location'] = redirect_uri
            headers['Content-Type'] = 'text/plain'

            return 302, headers, 'You are being redirected to %s\n' % redirect_uri

        else:
            status_code, headers, content = layer.getTileResponse(
                coord, extension)

        if layer.allowed_origin:
            headers.setdefault('Access-Control-Allow-Origin',
                               layer.allowed_origin)

        if callback and 'json' in headers['Content-Type']:
            headers['Content-Type'] = 'application/javascript; charset=utf-8'
            content = '%s(%s)' % (callback, content)

        if layer.max_cache_age is not None:
            expires = datetime.utcnow() + timedelta(
                seconds=layer.max_cache_age)
            headers.setdefault('Expires',
                               expires.strftime('%a, %d %b %Y %H:%M:%S GMT'))
            headers.setdefault('Cache-Control',
                               'public, max-age=%d' % layer.max_cache_age)

    except Core.KnownUnknown as e:
        out = StringIO()

        print >> out, 'Known unknown!'
        print >> out, e
        print >> out, ''
        print >> out, '\n'.join(Core._rummy())

        headers['Content-Type'] = 'text/plain'
        status_code, content = 500, out.getvalue()

    return status_code, headers, content
def hash_rows(stop_flag):
    latest_hashed = get_last_sent_id()

    print(latest_hashed)

    have_new_rows = True
    hashed_rows = 0

    model = get_vehicle_model()
    serializer = get_vehicle_serializer()

    while not stop_flag[0] and have_new_rows:
        latest_id = get_latest_id(model)
        new_rows = get_new_rows(model, latest_hashed,
                                settings.max_size_hashed_batch)
        records = []

        if not len(new_rows):
            have_new_rows = False

        for new_row in new_rows:
            if new_row['vin'] is None:
                continue

            if len(new_row['vin']) > 17:
                continue

            # if new_row['create_date'] > datetime.now() - timedelta(days=3):
            #     continue

            if not re.match(r'^[a-zA-Z0-9\-]+$', new_row['vin']):
                continue

            latest_hashed = new_row[settings.vehicle_model_primary_key]
            hashed_rows += 1

            records.append({
                'uuid':
                new_row[settings.vehicle_model_primary_key],
                'vin':
                new_row[settings.vehicle_model_vin_key],
                'standard_version':
                settings.vindb_hash_functions,
                'hash':
                hash_functions[settings.vindb_hash_functions](
                    serializer(new_row))
            })

        if len(records):
            blockchain = VinChain(node=settings.vinchain_node,
                                  blocking=True,
                                  debug=False,
                                  known_chains={
                                      'VIN': {
                                          'chain_id':
                                          settings.vinchain_chain_id,
                                          'core_symbol': 'VIN',
                                          'prefix': 'VIN'
                                      },
                                  })
            blockchain.wallet.unlock(settings.vinchain_wallet_password)

            payload = {
                'signature':
                blockchain.get_message(
                    datetime.now().strftime('%Y-%m-%dT%H:%M:%S')).sign(
                        settings.vindb_hasher if settings.
                        vindb_use_hasher else settings.vindb_data_source),
                'data_source':
                settings.vindb_data_source,
                'hashes':
                records
            }

            if settings.vindb_use_hasher:
                payload['hasher'] = settings.vindb_hasher

            start_time = time.time()

            response = requests_post(
                '{}/vindb/vin_records/create/'.format(settings.vindb_host),
                data=json_dumps(payload),
                headers={'Content-Type': 'application/json'},
                timeout=120)

            extra = {
                'data_source': settings.vindb_data_source,
                'hash_functions': settings.vindb_hash_functions,
                'latest_hashed_id': latest_hashed,
                'latest_id': latest_id,
                'success': response.status_code == 201,
            }

            if response.status_code != 201:  # error
                extra['result'] = json_dumps({
                    'status_code': response.status_code,
                    'response': response.text
                }),
                _logger.error(
                    '%s:  %d rows processed unsuccessfully (ids %s-%s). Status code: %s. Error: "%s"',
                    settings.app_name,
                    len(records),
                    records[0]['uuid'],
                    records[-1]['uuid'],
                    response.status_code,
                    response.text,
                    extra=extra)
                raise Exception(
                    'Rows have not been stored in DB. Status code: {}. Error: "{}"'
                    .format(response.status_code, response.text))

            # success
            hashed_records = response.json()['records']
            # check if all records stored in DB
            rs = len(hashed_records) == len(records)
            extra.update({
                'success':
                rs,
                'hashed_rows':
                len(hashed_records),
                'hashed_rows_ids': [r['uuid'] for r in hashed_records],
                'tried_hash_rows_ids':
                [r['uuid'] for r in records] if not rs else None,
                'result':
                json_dumps({'status_code': response.status_code}),
            })
            if rs:
                _logger.info('%s: %d rows processed successfully (ids %s-%s)',
                             settings.app_name,
                             len(hashed_records),
                             hashed_records[0]['uuid'],
                             hashed_records[-1]['uuid'],
                             extra=extra)
            else:
                if len(hashed_records):
                    _logger.info(
                        '%s: %d of %d rows processed successfully (ids %s-%s)',
                        settings.app_name,
                        len(hashed_records),
                        len(records),
                        hashed_records[0]['uuid'],
                        hashed_records[-1]['uuid'],
                        extra=extra)
                # _logger.error('%s: Not all rows have been stored in DB. '
                #               'Only %d from %d rows processed successfully (ids %s-%s)',
                #               settings.app_name, len(hashed_records), len(records),
                #               hashed_records[0]['uuid'], hashed_records[-1]['uuid'], extra=extra)
                # raise Exception('Not all rows have been created. Status code: {}. Hashed rows ids: "{}". '
                #                 'Tried to hash rows ids: "{}"'.format(response.status_code, extra['hashed_rows_ids'],
                #                                                       extra['tried_hash_rows_ids']))

            _logger.info('--- %s seconds ---', (time.time() - start_time),
                         extra=extra)

    return hashed_rows
Example #45
0
def save(slotname, extra_info='', mutate_flag=False):
    """
    :doc: loadsave
    :args: (filename, extra_info='')

    Saves the game state to a save slot.

    `filename`
        A string giving the name of a save slot. Despite the variable name,
        this corresponds only loosely to filenames.

    `extra_info`
        An additional string that should be saved to the save file. Usually,
        this is the value of :var:`save_name`.

    :func:`renpy.take_screenshot` should be called before this function.
    """

    # Update persistent file, if needed. This is for the web and mobile
    # platforms, to make sure the persistent file is updated whenever the
    # game is saved. (But not auto-saved, for performance reasons.)
    if not mutate_flag:
        renpy.persistent.update()

    if mutate_flag:
        renpy.revertable.mutate_flag = False

    roots = renpy.game.log.freeze(None)

    if renpy.config.save_dump:
        save_dump(roots, renpy.game.log)

    logf = io.BytesIO()
    try:
        dump((roots, renpy.game.log), logf)
    except Exception:

        t, e, tb = sys.exc_info()

        if mutate_flag:
            reraise(t, e, tb)

        try:
            bad = find_bad_reduction(roots, renpy.game.log)
        except Exception:
            reraise(t, e, tb)

        if bad is None:
            reraise(t, e, tb)

        if e.args:
            e.args = (e.args[0] + ' (perhaps {})'.format(bad), ) + e.args[1:]

        reraise(t, e, tb)

    if mutate_flag and renpy.revertable.mutate_flag:
        raise SaveAbort()

    screenshot = renpy.game.interface.get_screenshot()

    json = {
        "_save_name": extra_info,
        "_renpy_version": list(renpy.version_tuple),
        "_version": renpy.config.version
    }

    for i in renpy.config.save_json_callbacks:
        i(json)

    json = json_dumps(json)

    sr = SaveRecord(screenshot, extra_info, json, logf.getvalue())
    location.save(slotname, sr)

    location.scan()
    clear_slot(slotname)
Example #46
0
    def rootPage(self, *args, **kwargs):
        self.response.content_type = 'application/json'  # normal responses forced to be json

        common_headers = self.common_headers()
        if common_headers:
            for kk, vv in common_headers.items():
                self.response.add_header(kk, vv)

        if self.request.method == 'OPTIONS':
            return '{}'
        try:
            method_name = 'json_%s' % args[0] if args else 'index'
            try:
                # getPublicMethod allow to manage tags
                method = self.getPublicMethod('rpc', method_name)
            except (GnrUserNotAllowed, GnrBasicAuthenticationError) as err:
                if self._debug:
                    raise
                return JsonError(code=401, msg=err, headers=common_headers)

            if method is None:
                return JsonError(code=501,
                                 msg="Method method '%s' does not exist" %
                                 method_name[len("json_"):],
                                 headers=common_headers)
            try:
                body = self.request._request.body
                if body == '':
                    #print("NULL")
                    body = "{}"
                body_json = json_loads(body)
            except Exception as err:
                if self._debug:
                    raise
                raise JsonError(
                    code=400,
                    msg=
                    "Unable to deserialize input data (malformed json request?)\n%s"
                    % err,
                    headers=common_headers)
            result = method(body_json)
            if self.response.content_type == 'application/json':
                jresult = json_dumps(result,
                                     default=default)  #, cls=_DecimalEncoder)
                return jresult
            else:
                # per altri tipi di risposta
                return result

            # # trying to limit response length
            # max_mb = 4
            # if len(jresult)>1024*1024*max_mb:
            #     # max 5mb
            #     raise exc.HTTPInternalServerError('Response is too big (>%sMb)' % max_mb)

        except exc.WSGIHTTPException as err:
            if self._debug:
                raise
            # we can raise every webob.exc HTTP error
            #print("ERR1", err, file=sys.stderr)
            return JsonError(code=err.code,
                             msg=err.body,
                             headers=common_headers)
        except JsonError as err:
            if self._debug:
                raise
            return err
        except Exception as err:
            if self._debug:
                raise
            return JsonError(code=500, msg=err, headers=common_headers)
Example #47
0
def conver_to_file(fn, dict):
    with open(fn, 'w+') as fd:
        fd.write(json_dumps(dict))
    print('Success Write File:', fn)
Example #48
0
def environment_properties(request, template_name='environment/property.html'):
    """
    Edit environemnt properties and values belong to
    """

    # Initial the ajax response
    ajax_response = {'rc': 0, 'response': 'ok'}
    message = ''

    has_perm = request.user.has_perm
    user_action = request.GET.get('action')

    # Actions of create properties
    if user_action == 'add':
        if not has_perm('management.add_envproperty'):
            return JsonResponse({'rc': 1, 'response': 'Permission denied'})

        property_name = request.GET.get('name')

        if not property_name:
            return JsonResponse({
                'rc': 1,
                'response': 'Property name is required'
            })

        if EnvProperty.objects.filter(name=property_name).exists():
            resp_msg = "Environment property named '{}' already exists, " \
                       "please select another name.".format(property_name)
            return JsonResponse({'rc': 1, 'response': resp_msg})

        new_property = EnvProperty.objects.create(name=property_name)

        return JsonResponse({
            'rc': 0,
            'response': 'ok',
            'id': new_property.pk,
            'name': new_property.name
        })

    # Actions of edit a exist properties
    if user_action == 'edit':
        if not has_perm('management.change_envproperty'):
            return JsonResponse({'rc': 1, 'response': 'Permission denied'})

        if not request.GET.get('id'):
            return JsonResponse({'rc': 1, 'response': 'ID is required'})

        try:
            property_id = request.GET['id']
            env_property = EnvProperty.objects.get(id=int(property_id))
        except ValueError:
            return JsonResponse({
                'rc':
                1,
                'response':
                'ID {} is not a valid integer.'.format(property_id)
            })
        except EnvProperty.DoesNotExist:
            return JsonResponse({'rc': 1, 'response': 'ID does not exist.'})

        new_name = request.GET.get('name', env_property.name)
        env_property.name = new_name
        try:
            env_property.save(update_fields=['name'])
        except Exception:
            return JsonResponse({'rc': 1, 'response': 'Cannot save property.'})

        return JsonResponse({'rc': 0, 'response': 'ok'})

    # Actions of remove properties
    if user_action == 'modify':
        if not has_perm('management.change_envproperty'):
            message = 'Permission denied'

        property_ids = request.GET.getlist('id')

        if has_perm('management.change_envproperty') and property_ids:
            env_properties = EnvProperty.objects.filter(id__in=property_ids)

            if request.GET.get('status') in ['0', '1']:
                for env_property in env_properties:
                    env_property.is_active = int(request.GET['status'])
                    env_property.save()

                property_values = "', '".join(
                    env_properties.values_list('name', flat=True))
                message = "Modify test properties status '%s' successfully." % property_values

                if not env_property.is_active:
                    EnvGroupPropertyMap.objects.filter(
                        property__id__in=property_ids).delete()
            else:
                message = 'Argument illegal'

    if request.is_ajax():
        ajax_response['rc'] = 1
        ajax_response['response'] = 'Unknown action'
        return HttpResponse(json_dumps(ajax_response))

    context_data = {
        'message': message,
        'properties': EnvProperty.objects.all().order_by('-is_active')
    }
    return render(request, template_name, context_data)
Example #49
0
def pretty_format(json):
    return json_dumps(json, indent=2)
Example #50
0
def build_topo_from_json(tgen, topo):
    """
    Reads configuration from JSON file. Adds routers, creates interface
    names dynamically and link routers as defined in JSON to create
    topology. Assigns IPs dynamically to all interfaces of each router.

    * `tgen`: Topogen object
    * `topo`: json file data
    """

    ROUTER_LIST = sorted(topo["routers"].keys(),
                         key=lambda x: int(re_search("\d+", x).group(0)))

    listRouters = ROUTER_LIST[:]
    for routerN in ROUTER_LIST:
        logger.info("Topo: Add router {}".format(routerN))
        tgen.add_router(routerN)
        listRouters.append(routerN)

    if "ipv4base" in topo:
        ipv4Next = ipaddr.IPv4Address(topo["link_ip_start"]["ipv4"])
        ipv4Step = 2**(32 - topo["link_ip_start"]["v4mask"])
        if topo["link_ip_start"]["v4mask"] < 32:
            ipv4Next += 1
    if "ipv6base" in topo:
        ipv6Next = ipaddr.IPv6Address(topo["link_ip_start"]["ipv6"])
        ipv6Step = 2**(128 - topo["link_ip_start"]["v6mask"])
        if topo["link_ip_start"]["v6mask"] < 127:
            ipv6Next += 1
    for router in listRouters:
        topo["routers"][router]["nextIfname"] = 0

    while listRouters != []:
        curRouter = listRouters.pop(0)
        # Physical Interfaces
        if "links" in topo["routers"][curRouter]:

            def link_sort(x):
                if x == "lo":
                    return 0
                elif "link" in x:
                    return int(x.split("-link")[1])
                else:
                    return int(re_search("\d+", x).group(0))

            for destRouterLink, data in sorted(
                    topo["routers"][curRouter]["links"].iteritems(),
                    key=lambda x: link_sort(x[0]),
            ):
                currRouter_lo_json = topo["routers"][curRouter]["links"][
                    destRouterLink]
                # Loopback interfaces
                if "type" in data and data["type"] == "loopback":
                    if ("ipv4" in currRouter_lo_json
                            and currRouter_lo_json["ipv4"] == "auto"):
                        currRouter_lo_json["ipv4"] = "{}{}.{}/{}".format(
                            topo["lo_prefix"]["ipv4"],
                            number_to_row(curRouter),
                            number_to_column(curRouter),
                            topo["lo_prefix"]["v4mask"],
                        )
                    if ("ipv6" in currRouter_lo_json
                            and currRouter_lo_json["ipv6"] == "auto"):
                        currRouter_lo_json["ipv6"] = "{}{}:{}/{}".format(
                            topo["lo_prefix"]["ipv6"],
                            number_to_row(curRouter),
                            number_to_column(curRouter),
                            topo["lo_prefix"]["v6mask"],
                        )

                if "-" in destRouterLink:
                    # Spliting and storing destRouterLink data in tempList
                    tempList = destRouterLink.split("-")

                    # destRouter
                    destRouter = tempList.pop(0)

                    # Current Router Link
                    tempList.insert(0, curRouter)
                    curRouterLink = "-".join(tempList)
                else:
                    destRouter = destRouterLink
                    curRouterLink = curRouter

                if destRouter in listRouters:
                    currRouter_link_json = topo["routers"][curRouter]["links"][
                        destRouterLink]
                    destRouter_link_json = topo["routers"][destRouter][
                        "links"][curRouterLink]

                    # Assigning name to interfaces
                    currRouter_link_json["interface"] = "{}-{}-eth{}".format(
                        curRouter, destRouter,
                        topo["routers"][curRouter]["nextIfname"])
                    destRouter_link_json["interface"] = "{}-{}-eth{}".format(
                        destRouter, curRouter,
                        topo["routers"][destRouter]["nextIfname"])

                    topo["routers"][curRouter]["nextIfname"] += 1
                    topo["routers"][destRouter]["nextIfname"] += 1

                    # Linking routers to each other as defined in JSON file
                    tgen.gears[curRouter].add_link(
                        tgen.gears[destRouter],
                        topo["routers"][curRouter]["links"][destRouterLink]
                        ["interface"],
                        topo["routers"][destRouter]["links"][curRouterLink]
                        ["interface"],
                    )

                    # IPv4
                    if "ipv4" in currRouter_link_json:
                        if currRouter_link_json["ipv4"] == "auto":
                            currRouter_link_json["ipv4"] = "{}/{}".format(
                                ipv4Next, topo["link_ip_start"]["v4mask"])
                            destRouter_link_json["ipv4"] = "{}/{}".format(
                                ipv4Next + 1, topo["link_ip_start"]["v4mask"])
                            ipv4Next += ipv4Step
                    # IPv6
                    if "ipv6" in currRouter_link_json:
                        if currRouter_link_json["ipv6"] == "auto":
                            currRouter_link_json["ipv6"] = "{}/{}".format(
                                ipv6Next, topo["link_ip_start"]["v6mask"])
                            destRouter_link_json["ipv6"] = "{}/{}".format(
                                ipv6Next + 1, topo["link_ip_start"]["v6mask"])
                            ipv6Next = ipaddr.IPv6Address(
                                int(ipv6Next) + ipv6Step)

            logger.debug(
                "Generated link data for router: %s\n%s",
                curRouter,
                json_dumps(topo["routers"][curRouter]["links"],
                           indent=4,
                           sort_keys=True),
            )
Example #51
0
def dumps(data):
    """helper for jinja2"""
    return json_dumps(data, sort_keys=True, indent=2)
Example #52
0
    async def connection(self, websocket, _):
        print("Initializing overlay...")
        self.active = self.active + 1

        #
        # Reset overlay to Idle on initialization
        #
        if not self.sendQueue:
            # Get default mascot image
            mascot_idle_image = self.settings.mascotImages['Idle']['Image']
            if not path.isfile(mascot_idle_image):
                mascot_idle_image = ""

            # Load Idle pose mapping if available
            if 'Idle' in self.settings.PoseMapping:
                # Reset Image to Idle
                if 'Image' in self.settings.PoseMapping['Idle'] and \
                        self.settings.PoseMapping['Idle']['Image'] in self.settings.mascotImages:
                    tmp = self.settings.mascotImages[self.settings.PoseMapping['Idle']['Image']]['Image']
                    if path.isfile(tmp):
                        mascot_idle_image = tmp

                # Reset Nanoleaf to Idle
                if 'Nanoleaf' in self.settings.PoseMapping['Idle']:
                    self.nanoleaf.scene(self.settings.PoseMapping['Idle']['Nanoleaf'])

                # Reset Hue to Idle
                if 'Hue' in self.settings.PoseMapping['Idle']:
                    for device in self.settings.PoseMapping['Idle']['Hue']:
                        pose_light = self.settings.PoseMapping['Idle']['Hue'][device]
                        if 'Brightness' in pose_light and \
                                pose_light['Brightness'] >= 1 and \
                                'Color' in pose_light and 6 <= len(pose_light['Color']) <= 7:
                            self.hue.state(device=device,
                                           bri=pose_light['Brightness'],
                                           col=pose_light['Color'])

                # Reset Yeelight to Idle
                if 'Yeelight' in self.settings.PoseMapping['Idle']:
                    for device in self.settings.PoseMapping['Idle']['Yeelight']:
                        pose_light = self.settings.PoseMapping['Idle']['Yeelight'][device]
                        if 'Brightness' in pose_light and \
                                pose_light['Brightness'] >= 1 and 'Color' in \
                                pose_light and 6 <= len(pose_light['Color']) <= 7 and \
                                isinstance(pose_light['TransitionTime'], int):
                            self.yeelight.state(device=device,
                                                brightness=pose_light['Brightness'],
                                                color=pose_light['Color'],
                                                transition=pose_light['Transition'],
                                                transitionTime=pose_light['TransitionTime'])

            # Send Idle payload
            json_data = {
                "mascot": mascot_idle_image
            }
            self.send(event="EVENT_WOOFERBOT", json_data=json_data, init=1)

        #
        # Overlay loop
        #
        ping_send = 0
        while True:
            ping_send = ping_send + 1
            # Queue is not empty, process
            if self.sendQueue:
                json_data_raw = self.sendQueue
                try:
                    # Process message
                    if 'message' in json_data_raw['data']:
                        # Process inline randomizer
                        while json_data_raw['data']['message'].find("[") >= 0:
                            tmp = json_data_raw['data']['message'][slice(json_data_raw['data']['message'].find("[") + 1,
                                                                         json_data_raw['data']['message'].find("]"))]
                            json_data_raw['data']['message'] = json_data_raw['data']['message'][slice(0, json_data_raw['data']['message'].find("["))] + \
                                                               SystemRandom().choice(tmp.split(";")) + \
                                                               json_data_raw['data']['message'][slice(json_data_raw['data']['message'].find("]") + 1, 9999)]

                        chatbot_msg = json_data_raw['data']['message']
                        # Process substrings for chatbot
                        if chatbot_msg.find("{") >= 0:
                            while chatbot_msg.find("{") >= 0:
                                tmp = chatbot_msg[slice(chatbot_msg.find("{") + 1, chatbot_msg.find("}"))]
                                tmp2 = ""
                                if tmp in json_data_raw['data']:
                                    tmp2 = json_data_raw['data'][tmp]

                                chatbot_msg = chatbot_msg[slice(0, chatbot_msg.find("{"))] + tmp2 + chatbot_msg[
                                    slice(chatbot_msg.find("}") + 1, 9999)]

                        # Send message to chat
                        self.chatbot.send(chatbot_msg)

                    # Send message to overlay
                    await websocket.send(json_dumps(json_data_raw))
                except websockets_exceptions.ConnectionClosed:
                    # Connection failed
                    print("Connection closed by overlay...")
                    self.active = self.active - 1
                    break
                else:
                    ping_send = 0
                    self.sendQueue = None
            # Queue empty, send keepalive
            else:
                if ping_send >= 40:
                    json_data_raw = json_dumps({
                        "event": "EVENT_PING",
                        "data": ""
                    })
                    try:
                        await websocket.send(json_data_raw)
                    except websockets_exceptions.ConnectionClosed:
                        # Connection failed
                        self.active = self.active - 1
                        if self.active == 0:
                            print("Connection closed by overlay...")
                        break
                    else:
                        ping_send = 0

            await asyncio_sleep(0.5)
Example #53
0
def render_json(value):
    """
    Render a dictionary as formatted JSON.
    """
    return json_dumps(value, indent=4, sort_keys=True)
Example #54
0
def print_config(config: Config, sort: bool = True, indent: int = 2):
    print(json_dumps(config.dump(), sort_keys=sort, indent=indent))
Example #55
0
def save_config(cfg, filename=None):
    s = json_dumps(cfg, indent=2)
    with open(filename, 'w') as f:
        f.write(s)
Example #56
0
    loader = TestLoader()
    for _, name, is_pkg in iter_modules(path):
        full_name = '{}.{}'.format(prefix, name)
        module_path = os.path.join(path[0], name)

        if is_pkg:
            search([module_path], full_name)

        if not is_pkg and name.startswith('test'):
            test_module = import_module(full_name)
            for suite in loader.loadTestsFromModule(test_module):
                for test in suite._tests:  # pylint: disable=protected-access
                    RECORDS.append({
                        'module':
                        full_name,
                        'class':
                        test.__class__.__name__,
                        'method':
                        test._testMethodName,  # pylint: disable=protected-access
                        'type':
                        get_test_type(test),
                        'path':
                        '{}.{}.{}'.format(full_name, test.__class__.__name__,
                                          test._testMethodName)
                    })  # pylint: disable=protected-access


search(azure.cli.__path__, 'azure.cli')

print(json_dumps(RECORDS))
Example #57
0
def _parseConfigCache(cache_dict, dirpath):
    """ Used by parseConfig() to parse just the cache parts of a config.
    """
    if 'name' in cache_dict:
        _class = Caches.getCacheByName(cache_dict['name'])
        kwargs = {}

        def add_kwargs(*keys):
            """ Populate named keys in kwargs from cache_dict.
            """
            for key in keys:
                if key in cache_dict:
                    kwargs[key] = cache_dict[key]

        if _class is Caches.Test:
            if cache_dict.get('verbose', False):
                kwargs['logfunc'] = lambda msg: stderr.write(msg + '\n')

        elif _class is Caches.Disk:
            kwargs['path'] = enforcedLocalPath(cache_dict['path'], dirpath,
                                               'Disk cache path')

            if 'umask' in cache_dict:
                kwargs['umask'] = int(cache_dict['umask'], 8)

            add_kwargs('dirs', 'gzip')

        elif _class is Caches.Multi:
            kwargs['tiers'] = [
                _parseConfigCache(tier_dict, dirpath)
                for tier_dict in cache_dict['tiers']
            ]

        elif _class is Caches.Memcache.Cache:
            if 'key prefix' in cache_dict:
                kwargs['key_prefix'] = cache_dict['key prefix']

            add_kwargs('servers', 'lifespan', 'revision')

        elif _class is Caches.Redis.Cache:
            if 'key prefix' in cache_dict:
                kwargs['key_prefix'] = cache_dict['key prefix']

            add_kwargs('host', 'port', 'db')

        elif _class is Caches.S3.Cache:
            add_kwargs('bucket', 'access', 'secret', 'use_locks', 'path',
                       'reduced_redundancy', 'policy')

        else:
            raise Exception('Unknown cache: %s' % cache_dict['name'])

    elif 'class' in cache_dict:
        _class = Core.loadClassPath(cache_dict['class'])
        kwargs = cache_dict.get('kwargs', {})
        kwargs = dict([(str(k), v) for (k, v) in kwargs.items()])

    else:
        raise Exception('Missing required cache name or class: %s' %
                        json_dumps(cache_dict))

    cache = _class(**kwargs)

    return cache
Example #58
0
def edit(_user):
    """
    Apply the changes from the user edit form. This updates such varied things
    as the profile photo and bio, the email address, name, password and
    interests.
    """
    from pyaspora.post.models import Post

    p = Post(author=_user.contact)
    changed = []
    order = 0

    notif_freq = post_param(
        'notification_frequency_hours',
        template='users_edit.tpl',
        optional=True
    )
    _user.notification_hours = int(notif_freq) if notif_freq else None

    email = post_param('email', optional=True)
    if email and email != _user.email:
        _user.email = email

    old_pw = post_param('current_password', optional=True)
    new_pw1 = post_param('new_password', optional=True)
    new_pw2 = post_param('new_password2', optional=True)
    if old_pw and new_pw1 and new_pw2:
        if new_pw1 != new_pw2:
            abort(400, 'New passwords do not match')
        try:
            _user.change_password(old_pw, new_pw1)
        except ValueError:
            abort(400, 'Old password is incorrect')
    db.session.add(_user)

    attachment = request.files.get('avatar', None)
    if attachment and attachment.filename:
        changed.append('avatar')
        order += 1
        check_attachment_is_safe(attachment)

        if not renderer_exists(attachment.mimetype) or \
                not attachment.mimetype.startswith('image/'):
            abort(400, 'Avatar format unsupported')

        attachment_part = MimePart(
            type=attachment.mimetype,
            body=attachment.stream.read(),
            text_preview=attachment.filename
        )

        p.add_part(attachment_part, order=order, inline=True)
        _user.contact.avatar = attachment_part

    name = post_param('name', template='users_edit.tpl', optional=True)
    if name and name != _user.contact.realname:
        _user.contact.realname = name
        changed.append('name')

    bio = post_param('bio', template='users_edit.tpl', optional=True)
    if bio:
        bio = bio.encode('utf-8')
    else:
        bio = b''
    if bio and (not _user.contact.bio or _user.contact.bio.body != bio):
        changed.append('bio')
        order += 1
        bio_part = MimePart(body=bio, type='text/plain', text_preview=None)
        p.add_part(
            order=order,
            inline=True,
            mime_part=bio_part
        )
        _user.contact.bio = bio_part

    tags = post_param('tags', optional=True)
    if tags is not None:
        tag_objects = Tag.parse_line(tags, create=True)
        old_tags = set([t.id for t in _user.contact.interests])
        new_tags = set([t.id for t in tag_objects])
        if old_tags != new_tags:
            changed.append('tags')
            _user.contact.interests = tag_objects

    p.add_part(
        order=0,
        inline=True,
        mime_part=MimePart(
            body=json_dumps({
                'fields_changed': changed
            }).encode('utf-8'),
            type='application/x-pyaspora-profile-update',
            text_preview='updated their profile'
        )
    )

    if changed:
        db.session.add(p)
        db.session.add(_user.contact)
        p.share_with([_user.contact])
        p.thread_modified()

    db.session.commit()

    return redirect(url_for('contacts.profile', contact_id=_user.contact.id))
Example #59
0
def result(request, result_uuid):
    val_run = get_object_or_404(ValidationRun, pk=result_uuid)
    current_user = request.user

    copied_runs = current_user.copiedvalidations_set.all(
    ) if current_user.username else CopiedValidations.objects.none()
    is_copied = val_run.id in copied_runs.values_list('copied_run', flat=True)

    if is_copied and val_run.doi == '':
        original_start = copied_runs.get(
            copied_run=val_run).original_run.start_time
        original_end = copied_runs.get(
            copied_run=val_run).original_run.end_time
    else:
        original_start = None
        original_end = None

    if (request.method == 'DELETE'):
        ## make sure only the owner of a validation can delete it (others are allowed to GET it, though)
        if (val_run.user != request.user):
            return HttpResponse(status=403)

        ## check that our validation can be deleted; it can't if it already has a DOI
        if (not val_run.is_unpublished):
            return HttpResponse(status=405)  #405

        val_run.delete()
        return HttpResponse("Deleted.", status=200)

    elif request.method == 'POST':
        post_params = QueryDict(request.body)
        user = request.user
        if 'add_validation' in post_params and post_params[
                'add_validation'] == 'true':
            if val_run not in user.copied_runs.all():
                valrun_user = CopiedValidations(used_by_user=user,
                                                original_run=val_run,
                                                copied_run=val_run)
                valrun_user.save()
                response = HttpResponse("Validation added to your list",
                                        status=200)
            else:
                response = HttpResponse(
                    "You have already added this validation to your list",
                    status=200)
        elif 'remove_validation' in post_params and post_params[
                'remove_validation'] == 'true':
            user.copied_runs.remove(val_run)
            response = HttpResponse(
                "Validation has been removed from your list", status=200)

        elif 'copy_validation' in post_params and post_params[
                'copy_validation'] == 'true':
            resp = _copy_validationrun(val_run, request.user)
            response = JsonResponse(resp)

        else:
            response = HttpResponse("Wrong action parameter.", status=400)

        return response

    elif (request.method == 'PATCH'):
        ## make sure only the owner of a validation can change it (others are allowed to GET it, though)

        if (val_run.user != request.user):
            return HttpResponse(status=403)

        patch_params = QueryDict(request.body)

        if 'save_name' in patch_params:
            ## check that our validation's name can be changed'; it can't if it already has a DOI
            if (not val_run.is_unpublished):
                return HttpResponse('Validation has been published',
                                    status=405)

            save_mode = patch_params['save_name']

            if save_mode != 'true':
                return HttpResponse("Wrong action parameter.", status=400)

            val_run.name_tag = patch_params['new_name']
            val_run.save()

            return HttpResponse("Changed.", status=200)

        if 'archive' in patch_params:
            archive_mode = patch_params['archive']

            if not ((archive_mode == 'true') or (archive_mode == 'false')):
                return HttpResponse("Wrong action parameter.", status=400)

            val_run.archive(unarchive=(archive_mode == 'false'))
            return HttpResponse("Changed.", status=200)

        if 'extend' in patch_params:
            extend = patch_params['extend']

            if extend != 'true':
                return HttpResponse("Wrong action parameter.", status=400)

            val_run.extend_lifespan()
            return HttpResponse(val_run.expiry_date, status=200)

        if 'publish' in patch_params:
            publish = patch_params['publish']

            # check we've got the action set correctly
            if publish != 'true':
                return HttpResponse("Wrong action parameter.", status=400)

            # check that the publication parameters are valid
            pub_form = PublishingForm(data=patch_params, validation=val_run)
            if not pub_form.is_valid():
                # if not, send back an updated publication form with errors set and http code 420 (picked up in javascript)
                return render(request,
                              'validator/publishing_dialog.html', {
                                  'publishing_form': pub_form,
                                  'val': val_run
                              },
                              status=420)

            try:
                get_doi_for_validation(val_run, pub_form.pub_metadata)
            except Exception as e:
                m = getattr(e, 'message', repr(e))
                return HttpResponse(m, status=400)

            return HttpResponse("Published.", status=200)

        return HttpResponse("Wrong action parameter.", status=400)

    # by default, show page
    else:
        ## tell template whether it's the owner of the validation - to show action buttons
        is_owner = (val_run.user == request.user)

        ## TODO: get time in format like '2 minutes', '5 hours'
        run_time = None
        if val_run.end_time is not None:
            run_time = val_run.end_time - val_run.start_time
            run_time = (run_time.days * 1440) + (run_time.seconds // 60)

        error_rate = 1
        if val_run.total_points != 0:
            error_rate = (val_run.total_points -
                          val_run.ok_points) / val_run.total_points

        pairs, triples, metrics, ref0_config = get_dataset_combis_and_metrics_from_files(
            val_run)
        combis = OrderedDict(sorted({**pairs, **triples}.items()))
        # the publication form is only needed by the owner; if we're displaying for another user, avoid leaking user data
        pub_form = PublishingForm(validation=val_run) if is_owner else None

        metrics = OrderedDict(sorted([(v, k) for k, v in metrics.items()]))

        inspection_table = get_inspection_table(val_run)

        context = {
            'current_user': current_user.username,
            'is_owner': is_owner,
            'val': val_run,
            'is_copied': is_copied,
            'original_start': original_start,
            'original_end': original_end,
            'error_rate': error_rate,
            'run_time': run_time,
            'metrics': metrics,
            'combis': combis,
            'json_metrics': json_dumps(METRICS),
            'publishing_form': pub_form,
            'inspection_table': inspection_table,
        }

        return render(request, 'validator/result.html', context)
Example #60
0
def wss_send(params):
    query = json_dumps(
        {"method": "call", "params": params, "jsonrpc": "2.0", "id": 1}
    )
    print(query)
    return query