Esempio n. 1
0
 def to_json(self) -> str:
     """
     recursively dumps the entire class structure into json
     :returns JSON string, encoded with jsonpickle
     """
     jsonpickle.set_encoder_options('json', sort_keys=True, indent=4)
     return jsonpickle.encode(self)
Esempio n. 2
0
def to_json(params):
    jsonpickle.load_backend('json', 'dumps', 'loads', ValueError)
    jsonpickle.set_preferred_backend('json')
    jsonpickle.set_encoder_options('json', ensure_ascii=False)
    out = jsonpickle.encode(params, unpicklable=False)
    out = out.replace(': None', ': null')
    return out
Esempio n. 3
0
 def run(self, results, objfile):
     host = self.options.get("host", "hpfriends.honeycloud.net")
     port = self.options.get("port", 20000)
     ident = self.options.get("ident")
     secret = self.options.get("secret")
     channel_reports = self.options.get("channel_reports")
     channel_files = self.options.get("channel_files")
     
     if not ident or not secret:
         raise Exception("HPFriends Identifier and Secret not configurated")
     
     try:
         # Connect to HPFriends
         hpc = hpfeeds.HPC(host, port, ident, secret, timeout=60)
         
         if channel_reports:
             # publish JSON-Report on the HPFriends channel
             log.info("publish JSON-Report on the HPFriends channel %s" % channel_reports)
             
             jsonpickle.set_encoder_options('simplejson', indent=4) 
             jsonpickle.handlers.registry.register(datetime.datetime, DatetimeHandler)
             hpc.publish(channel_reports, jsonpickle.encode(results))
         
         if channel_files:
             # publish RAW-File as BASE64 on the HPFriends channel
             log.info("publish BASE64 on the HPFriends channel %s" % channel_files)
             hpc.publish(channel_files, json.dumps(objfile.file.get_fileB64encode(), sort_keys=False, indent=4))
     except hpfeeds.FeedException as e:
         raise Exception("publish on the channel failed: %s" % e)
     finally:
         hpc.close()
    def create_object_factory_call(self, var_in):
        """ Returns the string needed for an object_factory call """

        # Detect slots objects
        if hasattr(var_in, "__slots__"):

            # Try and use repr to recreate
            repr_result = self.repr_encode(var_in)

            if repr_result:
                return repr_result
            else:
                # TODO should this be fatal?
                logging.warning("Cannot pickle as object uses __slots__")
                logging.warning("repr result = {}".format(self.repr_encode(var_in)))
                return "None"

        else:
            # Pickle the object - set the options to intent lines
            jsonpickle.set_encoder_options('simplejson', sort_keys=True, indent=self.INDENT_SIZE)
            j = jsonpickle.pickler.Pickler()
            encoded_obj = jsonpickle.encode(j.flatten(var_in))

            encoded_obj = encoded_obj.replace("'", "\\'")
            encoded_obj = encoded_obj.replace('\\"', '\\\\"')

            return "object_factory(\"\"\"{}\"\"\")".format(encoded_obj)
Esempio n. 5
0
    def test_decimal(self):
        # Default behavior: Decimal is preserved
        obj = decimal.Decimal(0.5)
        as_json = jsonpickle.dumps(obj)
        clone = jsonpickle.loads(as_json)
        self.assertTrue(isinstance(clone, decimal.Decimal))
        self.assertEqual(obj, clone)

        # Custom behavior: we want to use simplejson's Decimal support.
        jsonpickle.set_encoder_options('simplejson',
                                       use_decimal=True, sort_keys=True)

        jsonpickle.set_decoder_options('simplejson',
                                       use_decimal=True)

        # use_decimal mode allows Decimal objects to pass-through to simplejson.
        # The end result is we get a simple '0.5' value as our json string.
        as_json = jsonpickle.dumps(obj, unpicklable=True, use_decimal=True)
        self.assertEqual(as_json, '0.5')
        # But when loading we get back a Decimal.
        clone = jsonpickle.loads(as_json)
        self.assertTrue(isinstance(clone, decimal.Decimal))

        # side-effect: floats become decimals too!
        obj = 0.5
        as_json = jsonpickle.dumps(obj)
        clone = jsonpickle.loads(as_json)
        self.assertTrue(isinstance(clone, decimal.Decimal))
Esempio n. 6
0
 def parseJsonString(self,jsonstring):
     jsonpickle.set_preferred_backend('json')
     jsonpickle.set_encoder_options('json',ensure_ascii=False,separators=(',', ': '))
     obj = jsonpickle.decode(jsonstring)
     # obj=json.loads(jsonstring)
     # assert obj.name == result['name'] == 'Awesome'
     return obj
Esempio n. 7
0
def produce_json (filename, data, compact = True):
    """Produce JSON content (data) into a file (filename).

    Parameters
    ----------

    filename: string
       Name of file to write the content.
    data: any
       Content to write in JSON format. It has to be ready to pack using
       jsonpickle.encode.

    """

    if compact:
        # Produce compact JSON output
        set_encoder_options('json', separators=(',', ': '),
                            ensure_ascii=False,
                            encoding="utf8")
    else:
        # Produce pretty JSON output
        set_encoder_options('json', sort_keys=True, indent=4,
                            separators=(',', ': '),
                            ensure_ascii=False,
                            encoding="utf8")
    data_json = encode(data, unpicklable=False)
    with codecs.open(filename, "w", "utf-8") as file:
        file.write(data_json)
Esempio n. 8
0
 def parseJsonAll(self,obj):
     jsonpickle.set_preferred_backend('json')
     jsonpickle.set_encoder_options('json', ensure_ascii=False,separators=(',', ': '))
     frozen = jsonpickle.encode(obj,unpicklable=True)
     # self.finish(frozen)
     # jsonstr=json.dumps(obj,default=self.__default,ensure_ascii=False,separators=(',',':')) #cls=DecimalEncoder
     return frozen
Esempio n. 9
0
 def save_to_uri(self, bytes, uri, save_metadata=True):
     # Have to use a two-step process to write to the file: open the
     # filesystem, then open the file.  Have to open the filesystem
     # as writeable in case this is a virtual filesystem (like ZipFS),
     # otherwise the write to the actual file will fail with a read-
     # only filesystem error.
     if uri.startswith("file://"):
         # FIXME: workaround to allow opening of file:// URLs with the
         # ! character
         uri = uri.replace("file://", "")
     fs, relpath = opener.parse(uri, writeable=True)
     fh = fs.open(relpath, 'wb')
     log.debug("saving to %s" % uri)
     fh.write(bytes)
     fh.close()
     
     if save_metadata:
         metadata_dict = dict()
         self.get_extra_metadata(metadata_dict)
         if metadata_dict:
             relpath += ".omnivore"
             log.debug("saving extra metadata to %s" % relpath)
             jsonpickle.set_encoder_options("json", sort_keys=True, indent=4)
             bytes = jsonpickle.dumps(metadata_dict)
             text = jsonutil.collapse_json(bytes)
             header = self.get_extra_metadata_header()
             fh = fs.open(relpath, 'wb')
             fh.write(header)
             fh.write(text)
             fh.close()
             self.metadata_dirty = False
     
     fs.close()
Esempio n. 10
0
def main():
    dataset  = {}
    outfiles = {}
    datas    = transcript.transcriptdict()
    namesA   = [ x[0] for x in sampleFolders ]
    namesA.sort()

    for sampleFolder in sampleFolders:
        sampleName, sampleDirectory = sampleFolder
        dataset[sampleName] = {
            'expression' : os.path.join(base, sampleDirectory, expressionName),
            'exons'      : os.path.join(base, sampleDirectory, gtfName       ),
            'name'       : sampleName,
         }

        expfile = dataset[sampleName]['expression' ]
        gtffile = dataset[sampleName]['exons'      ]

        loadExpFile(expfile, sampleName, datas)
        loadGtfFile(gtffile, sampleName, datas)

    jsonpickle.set_preferred_backend('simplejson')
    jsonpickle.set_encoder_options('simplejson', sort_keys=True, indent=1)
    jsonp = jsonpickle.encode([datas, transcript.transcriptdata.headersPos, transcript.transcriptdata.keys])

    with open(dbfile, 'w') as f:
        f.write(jsonp)
def get_count_unread(request):
    jsonpickle.set_preferred_backend('demjson')
    jsonpickle.set_encoder_options('json', cls=JSONDateTimeEncoder)
    res = DBSession.query(Comment) \
        .filter(Comment.user_id == RequestGetUserId(request)) \
        .filter(Comment.is_read == False) \
        .count()
    return jsonpickle.encode(res, unpicklable=False, max_depth=5)
Esempio n. 12
0
def output_response(status_code=STATUS_CODE.OK, data={}, errmsg=None):
    result = {}
    result['status_code'] = status_code
    result['data'] = data
    if errmsg is not None:
        result['errmsg'] = errmsg

    if settings.DEBUG:
        jsonpickle.set_encoder_options('simplejson', sort_keys=True, indent=4)

    return HttpResponse(jsonpickle.encode(result, unpicklable=False), mimetype='text/json')
Esempio n. 13
0
def main():
    jsonpickle.set_preferred_backend('simplejson')
    jsonpickle.set_encoder_options('simplejson', sort_keys=True, indent=1)

    print "loading data %s" % dbfile    
    datas, transcript.transcriptdata.headersPos, transcript.transcriptdata.keys = jsonpickle.decode(open(dbfile, 'r').read())

    print "creating index %s" % indexfile
    jsonq = jsonpickle.encode(transcript.getIndex(datas))
    with open(indexfile, 'w') as f:
        f.write(jsonq)
Esempio n. 14
0
 def save(self, filename):
     """Saves a model to a file.
     """
     fh = open(filename, 'w')
     jsonpickle.set_encoder_options('simplejson', indent=4)
     try:
         core.write_nointr(fh, jsonpickle.encode(self))
     except:
         pass
     fh.close()
     jsonpickle.set_encoder_options('simplejson', indent=None)
Esempio n. 15
0
 def save(self, file=None):
     if file is None:
         file = self.save_file
     if isinstance(click.get_current_context().obj, proscli.utils.State) and click.get_current_context().obj.debug:
         proscli.utils.debug('Pretty Formatting {} File'.format(self.__class__.__name__))
         jsonpickle.set_encoder_options('json', sort_keys=True, indent=4)
     else:
         jsonpickle.set_encoder_options('json', sort_keys=True)
     if os.path.dirname(file):
         os.makedirs(os.path.dirname(file), exist_ok=True)
     with open(file, 'w') as f:
         f.write(jsonpickle.encode(self))
Esempio n. 16
0
    def list(cls, name, obj, indent=4, file=None):
        """
        List object attributes recursively as expanded JSON.

        name = Name of object (ie "myobject.subobject")
        obj = Object to dump
        indent = Number of chacracters to indent (default is 4)
        file = Optional output file to append
        """
        jsonpickle.set_encoder_options('json', indent=indent, sort_keys=True)
        cls.show(name + ' = ' + jsonpickle.encode(
            obj, unpicklable=False), file=file)
Esempio n. 17
0
def generate_json(n,f):
    testdata = [Group(name=common.random_ascii_string(), header=common.random_digits(),
                      footer=common.random_string())
                for i in range(n)
                ]

    filename = os.path.join(os.path.abspath(os.path.dirname(__file__)), f)
    # форматируем JSON
    jsonpickle.set_encoder_options('json', sort_keys=True, indent=2)
    # сохраняем тестовые данные как JSON
    with open(filename, "write") as f:
    #  f.write(json.dumps(testdata,default=lambda x: x.__dict__, indent = 2))
        f.write(jsonpickle.encode(testdata))
Esempio n. 18
0
def to_json(obj):
    """Utility method for converting an object to a json encoded string. 
    
    Takes care of all the necessary setup.
    
    Args:
        obj (object): The object that should be encoded.
        
    Returns:
        str: The encoded version of the given object.
    """
    import jsonpickle
    jsonpickle.set_encoder_options('simplejson', sort_keys=True)
    return jsonpickle.encode(obj, unpicklable=False)
Esempio n. 19
0
def init_json():
    """Initialize JSON encoder.

    """

    # Register datetime flattener for jsonpickle
    jsonpickle.handlers.registry.register(datetime, DatetimeHandler)
    jsonpickle.handlers.registry.register(timedelta, TimedeltaHandler)
    # Select json module
    jsonpickle.set_preferred_backend('json')
    # Opetions for producing nice JSON
    jsonpickle.set_encoder_options('json', sort_keys=True, indent=4,
                                   separators=(',', ': '),
                                   ensure_ascii=False,
                                   encoding="utf8")
Esempio n. 20
0
def writeJsonReportFile(exportDir, dbresults, fileName):
    try:
        jsonpickle.set_encoder_options('simplejson', indent=4)
        jsonpickle.handlers.registry.register(datetime.datetime, DatetimeHandler)
        jsonpickle.handlers.registry.register(uuid.UUID, UUIDHandler)
        jsonReport = jsonpickle.encode(dbresults)
    except (UnicodeError, TypeError):
        jsonReport = jsonpickle.encode(convertDirtyDict2ASCII(dbresults))
    try:
        if not os.path.exists(exportDir + fileName):
            report = codecs.open(os.path.join(exportDir, fileName), "w", "utf-8")
            report.write(jsonReport)
            report.close()
    except (TypeError, IOError) as e:
        raise Exception("Failed to generate JSON report: %s" % e)
Esempio n. 21
0
def loadSettings(settingsFile=None):
    if not settingsFile:
        settingsFile = "settings.json"
    s = Settings()
    try:
        with open(settingsFile, "r") as f:
            j = f.read()
            s = jsonpickle.decode(j)
    except Exception, ex:
        print ex
        print "Settings file not found. Creating default template file. Add port there."
        with open(settingsFile, "w") as f:
            jsonpickle.set_encoder_options('json', indent=4)
            f.write(jsonpickle.encode(s))
        exit(2)
Esempio n. 22
0
    def saveJSONdata(self):
        matches = dict()
        match_q = object_session(self).query(Match).filter(Match.tourney_id == self.id)
        for m in match_q:
            matches[m.id] = m
            if not m.next:
                final = m
                # print final
        tree = JSONnode(final, matches)
        import jsonpickle

        jsonpickle.set_encoder_options("json", sort_keys=True, indent=4)
        with open("tourneys/%s.js" % self.id, "w") as tourney_js:
            tourney_js.write("var tree = %s;\n" % jsonpickle.encode(tree, unpicklable=False))
            # tourney_js.write( 'var edges = %s;\n' % jsonpickle.encode( edges , unpicklable=False ) )
            tourney_js.write("var final_id = %s;\n" % jsonpickle.encode(final.id, unpicklable=False))
Esempio n. 23
0
def produce_json (filename, data, compact = True):

    if compact:
        # Produce compact JSON output
        set_encoder_options('json', separators=(',', ': '),
                            ensure_ascii=False,
                            encoding="utf8")
    else:
        # Produce pretty JSON output
        set_encoder_options('json', sort_keys=True, indent=4,
                            separators=(',', ': '),
                            ensure_ascii=False,
                            encoding="utf8")
    data_json = encode(data, unpicklable=False)
    with codecs.open(filename, "w", "utf-8") as file:
        file.write(data_json)
Esempio n. 24
0
def generate_json(n,f):
    testdata = [Contact(address=common.random_string(10), middlename=common.random_ascii_string(10), lastname=common.random_ascii_string(10),
                        nickname=common.random_ascii_string(10), byear="1988", ayear="2000", email = "*****@*****.**", firstname = "",
                        title=common.random_string(10), company=common.random_string(10), home=common.random_digits(5), mobile="+7", work=common.random_digits(5), fax=common.random_digits(11),
                        email2="*****@*****.**", email3="*****@*****.**", homepage="www.my.org", address2="Samara",
                        photo= os.path.join(os.path.abspath(os.path.dirname(__file__)), "../resources/avatar.png"), phone2=common.random_digits(5), notes="++++++++++", bday="4", aday="14",
                        amonth= "July", bmonth= "May", group=None)
                for i in range(n)
                ]

    filename = os.path.join(os.path.abspath(os.path.dirname(__file__)), f)
    # форматируем JSON
    jsonpickle.set_encoder_options('json', sort_keys=True, indent=2)
    # сохраняем тестовые данные как JSON
    with open(filename, "write") as f:
        f.write(jsonpickle.encode(testdata))
Esempio n. 25
0
def run():
    # Safely load tree from disk (or recreate)
    import pickle

    try:
        tree = pickle.load(open('tree.pickle', 'rb'))
    except:
        matrix = generator.recipe_data(Normalization.EXACT_AMOUNTS)
        tree = build_hierarchy(matrix)
        pickle.dump(tree, open('tree.pickle', 'wb'))

    # Save it as JSON
    import jsonpickle

    jsonpickle.set_encoder_options('simplejson', sort_keys=True, indent=2)
    f = open('tree.json', 'wb')
    f.write(jsonpickle.encode(tree))
Esempio n. 26
0
def init_db(dbfile, indexfile):
    with app.app_context():
        print "initializing db"

        if not os.path.exists(dbfile):
            print "NO DATABASE FILE %s" % dbfile
            sys.exit(1)

        if not os.path.exists(indexfile):
            print "NO INDEX FILE %s" % indexfile
            sys.exit(1)

        global db
        global headers
        global queries

        jsonpickle.set_preferred_backend('simplejson')
        jsonpickle.set_encoder_options('simplejson', sort_keys=True, indent=1)
        dataDb = open(dbfile,    'r').read()
        dataIn = open(indexfile, 'r').read()


        db, lHeadersPos, lKeys = jsonpickle.decode(dataDb)
        headers                = jsonpickle.decode(dataIn)
        transcript.transcriptdata.headersPos, transcript.transcriptdata.keys = lHeadersPos, lKeys


        if db is None:
            print "no data in database"
            sys.exit(1)

        if len(db) == 0:
            print "database is empty"
            sys.exit(1)

        if headers is None:
            print "no data in index"
            sys.exit(1)

        if len(headers) == 0:
            print "index is empty"
            sys.exit(1)

        print "db loaded. %d entries" % len(db)
Esempio n. 27
0
    def save_metadata(self):
        """
        Saves this node's metadata.
        """
        if self.model_item.text() != self.name:
            self.metadata["name"] = self.model_item.text()
        else:
            self.metadata.pop("name", None)

        if self.user_sort_order and self.user_sort_order != "0".zfill(7):
            self.metadata["user_sort"] = self.user_sort_order.zfill(7)
        else:
            self.metadata.pop("user_sort", None)

        if self.hidden_children:
            self.metadata["hidden_nodes"] = []
            for element in self.hidden_children:
                objectify.deannotate(element, cleanup_namespaces=True)
                node_string = etree.tostring(element, pretty_print=False, encoding="unicode")
                node_string = node_string.replace("<!--", "<!- -").replace("-->", "- ->")
                self.metadata["hidden_nodes"].append(node_string)
        else:
            self.metadata.pop("hidden_nodes", None)

        if not self.allowed_children and "<node_text>" not in self.properties.keys():
            return
        else:
            meta_comment = None
            set_encoder_options("json", separators=(',', ':'))
            for child in self:
                if type(child) is NodeComment:
                    if child.text.startswith("<designer.metadata.do.not.edit>"):
                        meta_comment = child
                        if self.metadata:
                            child.text = "<designer.metadata.do.not.edit> " + encode(self.metadata)
                        else:
                            self.remove(child)

            if meta_comment is None and self.metadata:
                meta_comment = NodeComment()
                meta_comment.properties["<node_text>"].set_value("<designer.metadata.do.not.edit> " + encode(self.metadata))
                self.add_child(meta_comment)
def get(request):
    try:
        run_id = int(request.matchdict['run_id'])
        is_superuser = RequestCheckUserCapability(request, 'moodle/ejudge_submits:comment')
        user_id = RequestGetUserId(request)

        comment_q = DBSession.query(Comment) \
            .filter(Comment.py_run_id == run_id)
        if not is_superuser:
            comment_q.filter(or_(Comment.author_user_id == user_id,
                                 Comment.user_id == user_id))
        comments = comment_q.all()

        jsonpickle.set_preferred_backend('demjson')
        jsonpickle.set_encoder_options('json', cls=JSONDateTimeEncoder)

        return jsonpickle.encode(comments, unpicklable=False, max_depth=5)

    except Exception as e:
        return json.dumps(
            {"result": "error", "message": e.__str__(), "stack": traceback.format_exc()})
Esempio n. 29
0
def writeConfig():
    """
    Writes the global configuration.
    """
    global Configuration
    global ConfigFilename
    print("Storing configuration...")

    jsonpickle.set_encoder_options('simplejson', sort_keys=True, indent=4)
    jsonpickle.set_encoder_options('json', sort_keys=True, indent=4)
    jsonpickle.set_preferred_backend('json')
    try:
        configfile = open(ConfigFilename, "w")
        #json.dump(Configuration, configfile, indent=True)
        configfile.write(jsonpickle.encode(Configuration))
        configfile.close()
        print("Configuration successfully stored.")

        return True
    except Exception as error:
        # TODO: Handle this better, friendlier
        print("Configuration error: %s" % error)
Esempio n. 30
0
    def run(self, results, objfile):
        """Writes report.
        @param results: results dict.
        @param objfile: file object
        @raise Exception: if fails to write report.
        """
        dumpdir = self.options.get("dumpdir", None)

        if not dumpdir:
            raise Exception("dumpdir not configured, skip")
        
        try:
            if not os.path.exists(dumpdir):
                os.makedirs(dumpdir)  
            d = tempfile.mkdtemp(dir=dumpdir)
        except Exception as e:
            raise Exception('Could not open %s for writing (%s)', dumpdir, e)
        else:
            os.rmdir(d)
        
        url_md5 = results["Info"]["url"]["md5"]
        file_md5 = results["Info"]["file"]["md5"]
        jfile = url_md5 + "_" + file_md5 + ".json"

        try:
            jsonpickle.set_encoder_options('simplejson', indent=4) 
            jsonpickle.handlers.registry.register(datetime.datetime, DatetimeHandler)
            jsonpickle.handlers.registry.register(uuid.UUID, UUIDHandler)
            jsonReport = jsonpickle.encode(results)
        except (UnicodeError, TypeError):
            jsonReport = jsonpickle.encode(convertDirtyDict2ASCII(results))
        
        try:  
            if not os.path.exists(dumpdir + jfile):
                report = codecs.open(os.path.join(dumpdir, jfile), "w", "utf-8")      
                report.write(jsonReport)
                report.close()
        except (TypeError, IOError) as e:
            raise Exception("Failed to generate JSON report: %s" % e)    
Esempio n. 31
0
# modified from https://www.quickprogrammingtips.com/python/aes-256-encryption-and-decryption-in-python.html
BLOCK_SIZE = 16
pad = lambda s: s + (BLOCK_SIZE - len(s) % BLOCK_SIZE) * chr(BLOCK_SIZE - len(
    s) % BLOCK_SIZE)


def encrypt(raw, key):
    raw = pad(raw)
    iv = Random.new().read(AES.block_size)
    cipher = AES.new(key, AES.MODE_CBC, iv)
    return base64.b64encode(iv + cipher.encrypt(raw))


# main code...
'''
# magic JSON incantation (I forget why, might not even be needed here:-)
jsonpickle.set_encoder_options('json', sort_keys=True, indent=2)
jsonpickle.set_encoder_options('simplejson', sort_keys=True, indent=2)

# defaults for some command line arguments (CLAs)
depth=10 # level of nesting
minppl=10 # ppl = passwords per level - we'll randomly select in this range, unless CLA overrides
maxppl=20
skips=0 # how many passwords from file to skip

# usage
def usage():
    print >>sys.stderr, "Usage: " + sys.argv[0] + " -u <username> -p <pwdfile> [-D <destdir>] [-s <skips>] [-m min] [-M max] [-l levels]" 
    sys.exit(1)
Esempio n. 32
0
from model.group import Group
import random
import string
import os.path
import jsonpickle
import getopt
import sys
try:
    opts, args = getopt.getopt(sys.argv[1:], "n:f:",
                               ["number of groups", "file"])
except getopt.GetoptError as err:
    getopt.usage()
    sys.exit(2)
n = 3
f = "data/groups.json"
for o, a in opts:
    if o == "-n":
        n = int(a)
    elif o == "-f":
        f = a

def random_string(prefix, maxlen):
    symbols = string.ascii_letters + string.digits + string.punctuation + " " * 10
    return prefix + "".join(
        [random.choice(symbols) for i in range(random.randrange(maxlen))])

testdata = [Group(name="TestClient")
            ] + [Group(name=random_string("contact", 10)) for i in range(n)]
file = os.path.join(os.path.dirname(os.path.abspath(__file__)), "..", f)
with open(file, "w") as out:
Esempio n. 33
0
for o, a in opts:
    if o == "-n":
        n = int(a)
    elif o == "-f":
        f = a

# случайным образом генерируемые данные
def random_string(prefix, maxlen):
    symbols = string.ascii_letters + string.digits + " "*5
    return prefix + "".join([random.choice(symbols) for i in range (random.randrange(maxlen))])

def random_numbers(prefix, maxlen):
    #numbers = string.digits + string.punctuation + " "*10
    return prefix + "".join([random.choice(string.digits) for i in range (random.randrange(maxlen))])

def random_email(prefix, maxlen):
    symbols = string.ascii_letters + string.digits + "@"*10 + "."
    return prefix + "".join([random.choice(symbols) for i in range (random.randrange(maxlen))])

testdata =[Contact(contact_firstname="", contact_lastname="", contact_address="",contact_homephone="", contact_email="")] + [
        Contact(contact_firstname=random_string("firstname", 15), contact_lastname=random_string("lastname", 20), contact_address=random_string("address", 10), contact_homephone=random_numbers("tel", 12), contact_email=random_email("email", 30))
        for i in range(n)
]

file = os.path.join(os.path.dirname(os.path.abspath(__file__)), "../", f)

with open(file, "w") as out:
    jsonpickle.set_encoder_options("json", indent=2)  # параметры форматирования для нагладности в json файле
    out.write(jsonpickle.encode(testdata))
    #out.write(json.dumps(testdata, default=lambda x: x.__dict__, indent=2))
Esempio n. 34
0
    print >> sys.stderr, "Doing a " + country + "run"

if args.index is not None:
    index = args.index

if args.fp_index is not None:
    fp_index = args.fp_index

# this is an array to hold the set of keys we find
fingerprints = []
overallcount = 0
badcount = 0
goodcount = 0

# encoder options
jsonpickle.set_encoder_options('json', sort_keys=True, indent=2)
jsonpickle.set_encoder_options('simplejson', sort_keys=True, indent=2)

# it can happen that we run out of memory after we've done all of the
# dns stuff, in such a case, it's nice to be able to re-start from the
# fingerprints.json file to save the time of re-doing all those dns
# queries, this branch does that
if args.fpfile is not None:
    pass
# TODO: Update
# # read fingerprints from fpfile
# fpf=open(args.fpfile,"r")
# f=getnextfprint(fpf)
# print f
# fpcount=0
# while f:
Esempio n. 35
0
 def getJson(self):
     self.load(None)
     jsonpickle.set_encoder_options('json', sort_keys=True, indent=3)
     json = jsonpickle.encode(self.data, unpicklable=False)
     log.L.debug('Encoded SDP JSON: %s' % json)
     return json
Esempio n. 36
0
 def to_json(self, sort=False):
     """
     Get JSON representation of an object
     """
     jsonpickle.set_encoder_options('json', sort_keys=sort)
     return jsonpickle.encode(self._data())
Esempio n. 37
0
import jsonpickle as jp
from utils import open_file, write_file, collator

jp.set_encoder_options('simplejson',
                       sort_keys=True,
                       indent=4,
                       ensure_ascii=False)

content = open_file('input/monlam_verbs.json')
json = jp.decode(content)
dadrag = open_file('input/dadrag_syllables.txt').strip().split('\n')

entries = []
for inflected, context in json.items():
    # a few entries don't have any content in monlam_verbs.json and are filtered here
    # like : ལྷོགས་ | ༡བྱ་ཚིག 1. ༡བརྡ་རྙིང་། རློགས། 2. ཀློགས། that parses into "ལྷོགས": []
    if context == []:
        continue

    possible_verbs = []
    for verb in context:
        # inflected verbs
        if 'བྱ་ཚིག' in verb.keys():
            possible_verbs.append(verb['བྱ་ཚིག'])
        # non-inflected verbs (གཟུགས་མི་འགྱུར་བ།)
        else:
            possible_verbs.append(inflected)

    # de-duplicate the verbs
    possible_verbs = list(set(possible_verbs))
Esempio n. 38
0
    cmd = 'pahole --hex -c 10000000 {} -y {} -E'.format(args.elf_file_path,
                                                        args.struct_name)

    if args.verbose:
        print(cmd)

    raw_pahole_output = subprocess.check_output(cmd,
                                                shell=True)

    raw_pahole_output = ''.join([chr(el) for el in raw_pahole_output])

    dump = open(args.dump_path, 'rb').read()
    struct_layout = PaholeOutputParser.parse_raw_pahole_output(raw_pahole_output, args.struct_name)

    cpp_struct = CppStruct(struct_layout, dump, args.offset, args.endiannes)

    with open('{}.txt'.format(args.out_file), 'w') as out_file:

        def my_print(*print_args):
            out_file.write(*print_args)
            out_file.write('\n')

        cpp_struct.print_struct(my_print)

    import jsonpickle
    jsonpickle.set_preferred_backend('json')
    jsonpickle.set_encoder_options('json', indent=4)

    with open('{}.jsonpickle'.format(args.out_file), 'w') as out_file:
        out_file.write(jsonpickle.encode(cpp_struct))
Esempio n. 39
0
def serialize_and_write(file, test_data):
    with open(file, "w") as f:
        jsonpickle.set_encoder_options('json', sort_keys=True, indent=2)
        test_data_serialised = jsonpickle.encode(test_data)
        f.write(test_data_serialised)
Esempio n. 40
0
 def getTreeData(self):
     jsonpickle.set_preferred_backend('json')
     jsonpickle.set_encoder_options('jason', sort_keys=False, indent=4)
     json_dump = json.dumps(self.model.json())
     return jsonpickle.decode(json_dump)
Esempio n. 41
0
 def putTreeData(self, myObject):
     jsonpickle.set_preferred_backend('json')
     jsonpickle.set_encoder_options('json', sort_keys=False, indent=4)
     self.jp = jsonpickle.encode(myObject)
     self.document = json.loads(self.jp)
     self.model.load(self.document)
Esempio n. 42
0
import xml.etree.ElementTree as ET
tree = ET.parse('mokshopaya2.xml')
root = tree.getroot()
import re
from yv_verse import YvVerse
from os import listdir
from os.path import isfile, join
import jsonpickle
import xmltodict
from indic_transliteration import sanscript
from indic_transliteration.sanscript import SchemeMap, SCHEMES, transliterate

jsonpickle.set_encoder_options('json',
                               sort_keys=True,
                               indent=2,
                               ensure_ascii=False)


def convert_to_dev(text):
    return transliterate(text, sanscript.IAST, sanscript.DEVANAGARI)


def save(data, filename):
    with open(filename, 'w', encoding='utf-8') as f:
        f.write(jsonpickle.encode(data, max_depth=3))


tag_prefix = '{http://www.tei-c.org/ns/1.0}'
id_attrib = '{http://www.w3.org/XML/1998/namespace}id'
verses = []
buffer = []
 def json_pickle_result(self, result):
     jsonpickle.set_preferred_backend('simplejson')
     jsonpickle.set_encoder_options('simplejson', sort_keys=True, indent=4)
     json = jsonpickle.encode(result)
     return json
def find_repeat_detections(inputFilename, outputFilename=None, options=None):

    ##%% Input handling

    if options is None:
        options = RepeatDetectionOptions()

    toReturn = RepeatDetectionResults()

    # Check early to avoid problems with the output folder

    if options.bWriteFilteringFolder or options.bRenderHtml:
        assert options.outputBase is not None and len(options.outputBase) > 0
        os.makedirs(options.outputBase, exist_ok=True)

    # Load file

    detectionResults, otherFields = load_api_results(
        inputFilename,
        normalize_paths=True,
        filename_replacements=options.filenameReplacements)
    toReturn.detectionResults = detectionResults
    toReturn.otherFields = otherFields

    # Before doing any real work, make sure we can *probably* access images
    # This is just a cursory check on the first image, but it heads off most
    # problems related to incorrect mount points, etc.  Better to do this before
    # spending 20 minutes finding repeat detections.
    if options.bWriteFilteringFolder or options.bRenderHtml:
        if not is_sas_url(options.imageBase):
            row = detectionResults.iloc[0]
            relativePath = row['file']
            for s in options.filenameReplacements.keys():
                relativePath = relativePath.replace(
                    s, options.filenameReplacements[s])
            assert os.path.isfile(os.path.join(options.imageBase,
                                               relativePath))

    ##%% Separate files into directories

    # This will be a map from a directory name to smaller data frames
    rowsByDirectory = {}

    # This is a mapping back into the rows of the original table
    filenameToRow = {}

    # TODO: in the case where we're loading an existing set of FPs after manual filtering,
    # we should load these data frames too, rather than re-building them from the input.

    print('Separating files into directories...')

    # iRow = 0; row = detectionResults.iloc[0]
    for iRow, row in detectionResults.iterrows():
        relativePath = row['file']
        dirName = os.path.dirname(relativePath)

        if len(dirName) == 0:
            assert options.nDirLevelsFromLeaf == 0, 'Can' 't use the dirLevelsFromLeaf option with flat filenames'
        else:
            if options.nDirLevelsFromLeaf > 0:
                iLevel = 0
                while (iLevel < options.nDirLevelsFromLeaf):
                    iLevel += 1
                    dirName = os.path.dirname(dirName)
            assert len(dirName) > 0

        if not dirName in rowsByDirectory:
            # Create a new DataFrame with just this row
            # rowsByDirectory[dirName] = pd.DataFrame(row)
            rowsByDirectory[dirName] = []

        rowsByDirectory[dirName].append(row)

        assert relativePath not in filenameToRow
        filenameToRow[relativePath] = iRow

    # Convert lists of rows to proper DataFrames
    dirs = list(rowsByDirectory.keys())
    for d in dirs:
        rowsByDirectory[d] = pd.DataFrame(rowsByDirectory[d])

    toReturn.rowsByDirectory = rowsByDirectory
    toReturn.filenameToRow = filenameToRow

    print('Finished separating {} files into {} directories'.format(
        len(detectionResults), len(rowsByDirectory)))

    ##% Look for matches (or load them from file)

    dirsToSearch = list(rowsByDirectory.keys())
    if options.debugMaxDir > 0:
        dirsToSearch = dirsToSearch[0:options.debugMaxDir]

    # length-nDirs list of lists of DetectionLocation objects
    suspiciousDetections = [None] * len(dirsToSearch)

    # Are we actually looking for matches, or just loading from a file?
    if len(options.filterFileToLoad) == 0:

        # We're actually looking for matches...
        print('Finding similar detections...')

        allCandidateDetections = [None] * len(dirsToSearch)

        if not options.bParallelizeComparisons:

            options.pbar = None
            # iDir = 0; dirName = dirsToSearch[iDir]
            for iDir, dirName in enumerate(tqdm(dirsToSearch)):
                allCandidateDetections[iDir] = find_matches_in_directory(
                    dirName, options, rowsByDirectory)

        else:

            options.pbar = tqdm(total=len(dirsToSearch))
            allCandidateDetections = Parallel(
                n_jobs=options.nWorkers,
                prefer='threads')(delayed(find_matches_in_directory)(
                    dirName, options, rowsByDirectory)
                                  for dirName in tqdm(dirsToSearch))

        print('\nFinished looking for similar bounding boxes')

        ##%% Find suspicious locations based on match results

        print('Filtering out repeat detections...')

        nImagesWithSuspiciousDetections = 0
        nSuspiciousDetections = 0

        # For each directory
        #
        # iDir = 51
        for iDir in range(len(dirsToSearch)):

            # A list of DetectionLocation objects
            suspiciousDetectionsThisDir = []

            # A list of DetectionLocation objects
            candidateDetectionsThisDir = allCandidateDetections[iDir]

            for iLocation, candidateLocation in enumerate(
                    candidateDetectionsThisDir):

                # occurrenceList is a list of file/detection pairs
                nOccurrences = len(candidateLocation.instances)

                if nOccurrences < options.occurrenceThreshold:
                    continue

                nImagesWithSuspiciousDetections += nOccurrences
                nSuspiciousDetections += 1

                suspiciousDetectionsThisDir.append(candidateLocation)
                # Find the images corresponding to this bounding box, render boxes

            suspiciousDetections[iDir] = suspiciousDetectionsThisDir

        print(
            'Finished searching for repeat detections\nFound {} unique detections on {} images that are suspicious'
            .format(nSuspiciousDetections, nImagesWithSuspiciousDetections))

    else:

        print('Bypassing detection-finding, loading from {}'.format(
            options.filterFileToLoad))

        # Load the filtering file
        detectionIndexFileName = options.filterFileToLoad
        sIn = open(detectionIndexFileName, 'r').read()
        suspiciousDetections = jsonpickle.decode(sIn)
        filteringBaseDir = os.path.dirname(options.filterFileToLoad)
        assert len(suspiciousDetections) == len(dirsToSearch)

        nDetectionsRemoved = 0
        nDetectionsLoaded = 0

        # We're skipping detection-finding, but to see which images are actually legit false
        # positives, we may be looking for physical files or loading from a text file.
        fileList = None
        if options.filteredFileListToLoad is not None:
            with open(options.filteredFileListToLoad) as f:
                fileList = f.readlines()
                fileList = [x.strip() for x in fileList]
            nSuspiciousDetections = sum([len(x) for x in suspiciousDetections])
            print(
                'Loaded false positive list from file, will remove {} of {} suspicious detections'
                .format(len(fileList), nSuspiciousDetections))

        # For each directory
        # iDir = 0; detections = suspiciousDetections[0]
        #
        # suspiciousDetections is an array of DetectionLocation objects,
        # one per directory.
        for iDir, detections in enumerate(suspiciousDetections):

            bValidDetection = [True] * len(detections)
            nDetectionsLoaded += len(detections)

            # For each detection that was present before filtering
            # iDetection = 0; detection = detections[iDetection]
            for iDetection, detection in enumerate(detections):

                # Are we checking the directory to see whether detections were actually false
                # positives, or reading from a list?
                if fileList is None:

                    # Is the image still there?
                    imageFullPath = os.path.join(
                        filteringBaseDir,
                        detection.sampleImageRelativeFileName)

                    # If not, remove this from the list of suspicious detections
                    if not os.path.isfile(imageFullPath):
                        nDetectionsRemoved += 1
                        bValidDetection[iDetection] = False

                else:

                    if detection.sampleImageRelativeFileName not in fileList:
                        nDetectionsRemoved += 1
                        bValidDetection[iDetection] = False

            # ...for each detection

            nRemovedThisDir = len(bValidDetection) - sum(bValidDetection)
            if nRemovedThisDir > 0:
                print('Removed {} of {} detections from directory {}'.format(
                    nRemovedThisDir, len(detections), iDir))

            detectionsFiltered = list(compress(detections, bValidDetection))
            suspiciousDetections[iDir] = detectionsFiltered

        # ...for each directory

        print('Removed {} of {} total detections via manual filtering'.format(
            nDetectionsRemoved, nDetectionsLoaded))

    # ...if we are/aren't finding detections (vs. loading from file)

    toReturn.suspiciousDetections = suspiciousDetections

    if options.bRenderHtml:

        # Render problematic locations with html (loop)

        print('Rendering html')

        nDirs = len(dirsToSearch)
        directoryHtmlFiles = [None] * nDirs

        if options.bParallelizeRendering:

            # options.pbar = tqdm(total=nDirs)
            options.pbar = None

            directoryHtmlFiles = Parallel(
                n_jobs=options.nWorkers,
                prefer='threads')(delayed(render_images_for_directory)(
                    iDir, directoryHtmlFiles, suspiciousDetections, options)
                                  for iDir in tqdm(range(nDirs)))

        else:

            options.pbar = None

            # For each directory
            # iDir = 51
            for iDir in range(nDirs):
                # Add this directory to the master list of html files
                directoryHtmlFiles[iDir] = render_images_for_directory(
                    iDir, directoryHtmlFiles, suspiciousDetections, options)

            # ...for each directory

        # Write master html file

        masterHtmlFile = os.path.join(options.outputBase, 'index.html')
        os.makedirs(options.outputBase, exist_ok=True)
        toReturn.masterHtmlFile = masterHtmlFile

        with open(masterHtmlFile, 'w') as fHtml:

            fHtml.write('<html><body>\n')
            fHtml.write(
                '<h2><b>Repeat detections by directory</b></h2></br>\n')

            for iDir, dirHtmlFile in enumerate(directoryHtmlFiles):

                if dirHtmlFile is None:
                    continue

                relPath = os.path.relpath(dirHtmlFile, options.outputBase)
                dirName = dirsToSearch[iDir]

                # Remove unicode characters before formatting
                relPath = relPath.encode('ascii', 'ignore').decode('ascii')
                dirName = dirName.encode('ascii', 'ignore').decode('ascii')

                fHtml.write('<a href={}>{}</a><br/>\n'.format(
                    relPath, dirName))

            fHtml.write('</body></html>\n')

    # ...if we're rendering html

    toReturn.allRowsFiltered = update_detection_table(toReturn, options,
                                                      outputFilename)

    # Create filtering directory
    if options.bWriteFilteringFolder:

        print('Creating filtering folder...')

        dateString = datetime.now().strftime('%Y.%m.%d.%H.%M.%S')
        filteringDir = os.path.join(options.outputBase,
                                    'filtering_' + dateString)
        os.makedirs(filteringDir, exist_ok=True)

        # iDir = 0; suspiciousDetectionsThisDir = suspiciousDetections[iDir]
        for iDir, suspiciousDetectionsThisDir in enumerate(
                tqdm(suspiciousDetections)):

            # suspiciousDetectionsThisDir is a list of DetectionLocation objects
            # iDetection = 0; detection = suspiciousDetectionsThisDir[0]
            for iDetection, detection in enumerate(
                    suspiciousDetectionsThisDir):

                instance = detection.instances[0]
                relativePath = instance.filename
                outputRelativePath = 'dir{:0>4d}_det{:0>4d}_n{:0>4d}.jpg'.format(
                    iDir, iDetection, len(detection.instances))
                outputFullPath = os.path.join(filteringDir, outputRelativePath)

                if is_sas_url(options.imageBase):
                    inputFullPath = relative_sas_url(options.imageBase,
                                                     relativePath)
                else:
                    inputFullPath = os.path.join(options.imageBase,
                                                 relativePath)
                    assert (os.path.isfile(inputFullPath)
                            ), 'Not a file: {}'.format(inputFullPath)

                try:
                    render_bounding_box(detection,
                                        inputFullPath,
                                        outputFullPath,
                                        lineWidth=options.lineThickness,
                                        expansion=options.boxExpansion)
                except Exception as e:
                    print(
                        'Warning: error rendering bounding box from {} to {}: {}'
                        .format(inputFullPath, outputFullPath, e))
                    if options.bFailOnRenderError:
                        raise
                detection.sampleImageRelativeFileName = outputRelativePath

        # Write out the detection index
        detectionIndexFileName = os.path.join(filteringDir,
                                              DETECTION_INDEX_FILE_NAME)
        jsonpickle.set_encoder_options('json', sort_keys=True, indent=4)
        s = jsonpickle.encode(suspiciousDetections)
        with open(detectionIndexFileName, 'w') as f:
            f.write(s)
        toReturn.filterFile = detectionIndexFileName

        print('Done')

    # ...if we're writing filtering info

    return toReturn
Esempio n. 45
0
 def serialize(self):
     jsonpickle.set_encoder_options('simplejson', sort_keys=True, indent=4)
     self.json = jsonpickle.encode(self)
     return
Esempio n. 46
0
import jsonpickle
import requests

from common.alert import message_box_error
from common.web_helper import firefox_quick_get_url
from ip.constants import ClientConstant
from ip.st import ClientOperBuy, EntrustType, BuyResult, SellResult, ClientOperSell, \
    ClientOperQuery, AccountInfo, QueryResult, ShareItem, EntrustItem, ClientOperCancel, \
    EntrustWay, ErrorResult
from project_config.logbook_logger import mylog
from trading.client_access_formatter import format_operation

jsonpickle.load_backend('simplejson')
jsonpickle.set_encoder_options('simplejson',
                               sort_keys=True,
                               ensure_ascii=False)

_c = ClientConstant

stock_server_address = 'http://127.0.0.1:8866'
stock_server_operation_address = 'http://127.0.0.1:8866/operation'


def _visit_client_server(url_args, headers, timeout=5):
    append_str = ''
    for i, k in enumerate(url_args):
        if not i:
            tmp = '?'
        else:
            tmp = '&'
Esempio n. 47
0
 def getJson(self):
     jsonpickle.set_encoder_options('json', sort_keys=True, indent=3)
     json = jsonpickle.encode(self.data, unpicklable=False)
     log.L.info('Encoded SDP Service JSON: %s' % json)
     return json
Esempio n. 48
0
    def loadState(cls, filename=None, validateHash=True):
        """ Loads a :class:`LabState` object from a file.

        It loads and instantiates a copy of every object serialized
        with ``lab.saveState(filename)``. The objects are saved with
        :mod:`jsonpickle`, and must be hashable and contain no
        C-object references. For convenience, lab objects are inherited
        from `:class:`lightlab.laboratory.Hashable`.

        By default, the sha256 hash is verified at import time to prevent
        instantiating objects from a corrupted file.

        A file version is also compared to the code version. If a new
        version of this class is present, but your ``json`` file is older,
        a ``RuntimeWarning`` is issued.

        Todo:
            When importing older ``json`` files, know what to do to
            upgrade it without bugs.

        Args:
            filename (str or Path): file to load from.
            validateHash (bool): whether to check the hash, default True.

        Raises:
            RuntimeWarning: if file version is older than lightlab.
            RuntimeError: if file version is newer than lightlab.
            JSONDecodeError: if there is any problem decoding the .json file.
            JSONDecodeError: if the hash file inside the .json file does not
                match the computed hash during import.
            OSError: if there is any problem loading the file.

        """
        if filename is None:
            filename = _filename

        with open(filename, 'r') as file:
            frozen_json = file.read()
        json_state = json.decode(frozen_json)

        user = json_state.pop("__user__")
        datetime_json = json_state.pop("__datetime__")

        # Check integrity of stored version
        sha256 = json_state.pop("__sha256__")
        jsonpickle.set_encoder_options('json', sort_keys=True, indent=4)
        if validateHash and sha256 != hash_sha256(json.encode(json_state)):
            raise JSONDecodeError(
                "Labstate is corrupted. expected: {} vs actual: {}.".format(
                    sha256, hash_sha256(json.encode(json_state))),
                str(filename), 0)

        # Compare versions of file vs. class
        version = json_state.pop("__version__")
        if version < cls.__version__:
            logger.warning("Loading older version of Labstate.")
        elif version > cls.__version__:
            raise RuntimeError(
                "Stored Labstate version is newer than current software. Update package lightlab."
            )

        context = jsonpickle.unpickler.Unpickler(backend=json,
                                                 safe=True,
                                                 keys=True)

        restored_object = context.restore(json_state, reset=True)
        restored_object.__sha256__ = sha256
        restored_object.__version__ = version
        restored_object.filename = filename
        restored_object.__user__ = user
        restored_object.__datetime__ = datetime_json

        try:
            for i in range(version, cls.__version__):
                logger.warning("Attempting patch %s -> %s", i, cls.__version__)
                restored_object = patch_labstate(i, restored_object)
        except NotImplementedError as e:
            logger.exception(e)

        return restored_object
Esempio n. 49
0
f = "data/groups.json"

#  кортеж из названий переменных и значения, для вврдимых опций проверяются n и f для использования нижепо коду
for o, a in opts:
    if a == "-n":
        n = int(a)
    elif o == "-f":
        f = a


def random_string(prefix, maxlen):
    symbols = string.ascii_letters + string.digits + "" * 10 + string.punctuation
    return prefix + "".join(
        random.choice(symbols) for i in range(random.randrange(maxlen)))


testdata = [Group(group_name="", group_header="", group_footer="")] + [
    Group(group_name=random_string("name", 10),
          group_header=random_string("header", 20),
          group_footer=random_string("footer", 13)) for i in range(n)
]

#склейка родительской директории текущего файла и названия файла, в который будут записаны тестовые данные, вложенная функция определяет абсолютный путь к текущему файлу
path = os.path.join(os.path.dirname(os.path.abspath(__file__)), "..", f)

#открываем файл для записи по пути path для записи в него сгенерированных данных
with open(path, "w") as out:
    jsonpickle.set_encoder_options("json", indent=2)
    # параметр indent для переноса объектов списка в отдельные строки
    out.write(jsonpickle.encode(testdata))
Esempio n. 50
0
 def _to_json(self):
     jsonpickle.set_preferred_backend('json')
     jsonpickle.set_encoder_options('json', ensure_ascii=False)
     return jsonpickle.encode(self.companies, unpicklable=False)
Esempio n. 51
0
import json
import os

import jsonpickle

from IO.paths import saves_dir

sort_keys = True
indent = 1
jsonpickle.set_encoder_options('simplejson', sort_keys, indent)


def remove_comments(s):
    """
    Parameters:
        s (str) : The string to remove comments from
        
    Returns:
        a string that is s but with all the comments of the form //, /**/, and # removed
    """
    in_comment_single = False
    in_comment_multi = False
    in_string = False

    to_return = []
    length = len(s)

    i = 0
    from_index = 0
    while i < length:
        c = s[i]
Esempio n. 52
0
import jsonpickle

jsonpickle.set_encoder_options('simplejson',
                               sort_keys=True,
                               indent=4,
                               encoding='utf-8',
                               ensure_ascii=False,
                               unpicklable=False,
                               use_decimal=True)

jsonpickle.set_decoder_options('simplejson',
                               encoding='utf-8',
                               use_decimal=True)

jsonpickle.set_preferred_backend('simplejson')
Esempio n. 53
0
    elif o == "-f":
        f = a


def random_string(prefix, maxlen):
    symbols = ascii_letters + digits + punctuation + " " * 10
    return prefix + "".join(
        [choice(symbols) for i in range(randrange(maxlen))])


test_data = [
    Group(name="Friends", header="Mine", footer="Dear ones"),
    Group(name="", header="", footer="")
] + [
    Group(name=random_string("name", 10),
          header=random_string("header", 20),
          footer=random_string("footer", 20)) for i in range(n)
]

test_data_file = join(dirname(abspath(__file__)), "../data/", f)

with open(test_data_file, mode="w") as file:
    set_encoder_options("json", indent=2)
    file.write(encode(test_data))
# test_data = [
#     Group(name=name, header=header, footer=footer)
#     for name in ["", random_string("name", 10)]
#     for header in ["", random_string("header", 20)]
#     for footer in ["", random_string("footer", 20)]
# ]
Esempio n. 54
0
 def test_int_dict_keys_with_numeric_keys(self):
     jsonpickle.set_encoder_options('demjson', strict=False)
     int_dict = {1000: [1, 2]}
     pickle = jsonpickle.encode(int_dict, numeric_keys=True)
     actual = jsonpickle.decode(pickle)
     self.assertEqual(actual[1000], [1, 2])
Esempio n. 55
0
def random_string(
    prefix, maxlen
):  #генерация случайных тест данных (prefix - слово с кот начинается строка,maxlen - мах длинна строки )
    symbols = string.ascii_letters + string.digits + " " * 10  # это символы которые будут в случайно сгенерированной строке
    return prefix + "".join(
        [random.choice(symbols) for i in range(random.randrange(maxlen))]
    )  # random.choice случайным образом выбирает символ из строки случайной длинны - random.randrange(maxlen) (будет сгенерир случайн длинна не превыш мах) потом склеиваем этот список -"".join


testdata = [
    Group(name="", header="", footer="")
] + [  # описываем тестовые данные (1 группа с пустыми данными и неск с непустыми )
    Group(name=random_string("name", 10),
          header=random_string("header", 20),
          footer=random_string("footer", 20)) for i in range(n)
]  # будет сгенерирован объект Group, содержащийслучайные данные, n раз и из этих сгенерированных объектов будет построен список

#сохранение сгенерированных данных в файл
file = os.path.join(os.path.dirname(os.path.abspath(__file__)), "..",
                    f)  #путь к файлу

with open(file, "w") as out:  #открываем его на запись
    jsonpickle.set_encoder_options(
        "json", indent=2)  # параметры форматирования представления данных
    out.write(jsonpickle.encode(testdata))
"""testdata =[     # описываем тестовые данные  в виде генерации комбинаций
    Group(name=name, header=header, footer=footer) # строим обект типа групп , в котором параметры получаются из циклов
    for name in ["",random_string("name",10)] # переменная name пробегает по двум возможным значениям пустое или случайное, берутся комбинации типа пустое name, случайный header, пустой футер
    for header in ["", random_string("header", 20)]
    for footer in ["", random_string("footer", 20)]
]"""
Esempio n. 56
0
class MsbClient(websocket.WebSocketApp):
    """Definition of the msb client to handle the creation of the self-description
     and communication with the msb websocket interface.
    """
    def __init__(self,
                 service_type=None,
                 uuid=None,
                 name=None,
                 description=None,
                 token=None,
                 applicationPropertiesCustomPath=None):
        """Initializes a new msb client.

        If no parameters are provided an application.properties file with the main configuration needs to be present.
        Otherwise the config data can be provided as constructor parameters

        Args:
            service_type (str): The service type of the service ('Application' or 'SmartObject')
            uuid (str): The uuid of the service as valid V4 UUID
            name (str): The name of the service
            description (str): The description of the service
            token (str): The token of the service used to verify service via MSB GUI or Rest
        Returns:
            MsbClient: The msb client object to specify the service and handle MSB connection
        """

        self.msb_url = ""
        self.msb_url_with_wspath = ""
        self.applicationPropertiesCustomPath = applicationPropertiesCustomPath

        # debugging
        self.debug = False
        self.trace = False
        self.dataFormatValidation = True

        # connection params
        self.connected = False
        self.registered = False
        self.autoReconnect = True
        self.reconnecting = False
        self.userDisconnect = False
        self.reconnectInterval = 10

        # client-side heartbeats
        self.keepAlive = False
        self.heartbeat_interval = 8

        # sockJs framing
        self.sockJsFraming = True

        # event caching
        self.eventCache = []
        self.eventCacheEnabled = True
        self.eventCacheSize = 1000
        self.maxMessageSize = 1000000

        # smart object definition
        self.functions = {}
        self.events = {}
        self.configuration = {}
        self.configuration["parameters"] = {}

        # // socket
        self.ws = None
        self.hostnameVerification = False
        self.threadAsDaemonEnabled = False

        # check if all params are present or if the application.properties file will be used
        if (service_type or uuid or name or description or token) is not None:
            self.service_type = service_type
            self.uuid = uuid
            self.name = name
            self.description = description
            self.token = token
        else:
            self.readConfig()

    # used for serialization and deserialization of complex Python objects
    jsonpickle.set_encoder_options("json", sort_keys=False, indent=4)
    jsonpickle.set_preferred_backend("json")

    # list of all valid MSB message types
    MSBMessageTypes = [
        "IO", "NIO", "IO_CONNECTED", "IO_REGISTERED", "IO_PUBLISHED",
        "NIO_ALREADY_CONNECTED", "NIO_REGISTRATION_ERROR",
        "NIO_UNEXPECTED_REGISTRATION_ERROR", "NIO_UNAUTHORIZED_CONNECTION",
        "NIO_EVENT_FORWARDING_ERROR", "NIO_UNEXPECTED_EVENT_FORWARDING_ERROR",
        "ping"
    ]

    def sendBuf(self):
        for idx, msg in enumerate(self.eventCache):
            try:
                if self.connected and self.registered:
                    logging.debug("SENDING (BUF): " + msg)
                    if self.sockJsFraming:
                        _msg = self.objectToJson(msg).replace("\\n", "")
                        self.ws.send('["E ' + _msg[1:-1] + '"]')
                    else:
                        self.ws.send("E " + msg)
                    self.eventCache.pop(idx)
            except Exception:
                pass

    def on_message(self, message):
        if self.sockJsFraming:
            if self.debug and message.startswith("h"):
                logging.debug("♥")
            message = message[3:-2]
        if message in self.MSBMessageTypes:
            logging.info(message)
            if message == "IO_CONNECTED":
                if self.reconnecting:
                    self.reconnecting = False
                    if self.sockJsFraming:
                        _selfd = json.dumps(
                            self.objectToJson(
                                self.getSelfDescription())).replace("\\n", "")
                        self.ws.send('["R ' + _selfd[1:-1] + '"]')
                    else:
                        self.ws.send(
                            "R " +
                            self.objectToJson(self.getSelfDescription()))
            if message == "IO_REGISTERED":
                self.registered = True
                if self.eventCacheEnabled:
                    self.connected = True
                    self.sendBuf()
            elif message == "NIO_ALREADY_CONNECTED":
                if self.connected:
                    try:
                        self.ws.close()
                    except Exception:
                        pass
            elif message == "NIO_UNEXPECTED_REGISTRATION_ERROR":
                if self.connected:
                    try:
                        self.ws.close()
                    except Exception:
                        pass
            elif message == "NIO_UNAUTHORIZED_CONNECTION":
                if self.connected:
                    try:
                        self.ws.close()
                    except Exception:
                        pass
            elif message == 'ping':
                if self.sockJsFraming:
                    self.ws.send('["pong"]')
                else:
                    self.ws.send('pong')
        if message.startswith("C"):
            jmsg = message.replace('\\"', '"')
            jmsg = json.loads(jmsg[2:])
            logging.info(str(jmsg))
            if jmsg["functionId"] not in self.functions:
                if jmsg["functionId"].startswith(
                        "/") and not jmsg["functionId"].startswith("//"):
                    jmsg["functionId"] = jmsg["functionId"][1:]
            if jmsg["functionId"] in self.functions:
                if "correlationId" in jmsg:
                    jmsg["functionParameters"]["correlationId"] = jmsg[
                        "correlationId"]
                else:
                    logging.debug(
                        "correlationid could not be found. Does the websocket interface version support it?"
                    )
                self.functions[jmsg["functionId"]].implementation(
                    jmsg["functionParameters"])
            else:
                logging.warning("Function could not be found: " +
                                jmsg["functionId"])
        elif message.startswith("K"):
            jmsg = message.replace('\\"', '"')
            jmsg = json.loads(jmsg[2:])
            logging.info(str(jmsg))
            logging.debug("CONFIGURATION: " + str(jmsg))
            if jmsg["uuid"] == self.uuid:
                for key in jmsg["params"]:
                    if key in self.configuration["parameters"]:
                        self.changeConfigParameter(key, jmsg["params"][key])
                self.reRegister()

    def on_error(self, error):
        logging.error(error)

    def on_close(self, code, reason):
        logging.debug("DISCONNECTED")
        logging.debug("Websocket Close Status Code: (" + str(code) +
                      "); Reason: (" + str(reason) + ")")
        self.connected = False
        self.registered = False
        if self.autoReconnect and not self.userDisconnect:
            logging.info("### closed, waiting " + str(self.reconnectInterval) +
                         " seconds before reconnect. ###")
            time.sleep(self.reconnectInterval)
            self.reconnecting = True
            logging.info("Start reconnecting to msb url: >" + self.msb_url +
                         "<")
            self.connect(self.msb_url)

    def on_open(self):
        logging.debug("Socket open")
        self.connected = True

    def enableDebug(self, debug=True):
        """Enables or disables the debug logging for the msb client.

        Args:
            debug (bool): Used to either enable (true) or disable (false) debug logging.
        """
        if debug:
            logging.basicConfig(
                format="[%(asctime)s] %(module)s %(name)s.%(funcName)s" +
                " +%(lineno)s: %(levelname)-8s [%(process)d] %(message)s")
            logging.getLogger().setLevel(logging.DEBUG)
        else:
            logging.basicConfig(format="[%(asctime)s] %(message)s")
            logging.getLogger().setLevel(logging.INFO)
        self.debug = debug

    def enableTrace(self, trace=True):
        """Enables or disables the websocket trace.

        Args:
            trace (bool): Used to either enable (true) or disable (false) websocket trace
        """
        self.trace = trace
        websocket.enableTrace(trace)

    def enableDataFormatValidation(self, dataFormatValidation=True):
        """Enables or disables data format and message format validation.

        (Mainly for development, can be disabled in production to improve performance)

        Args:
            dataFormatValidation (bool): Used to either enable (true) or disable (false) format validation
        """
        self.dataFormatValidation = dataFormatValidation

    def disableAutoReconnect(self, autoReconnect=True):
        """Disables or enables auto reconnect for the client if connection to MSB gets lost.

        Args:
            autoReconnect (bool): Used to either disable (true) or enable (false) auto reconnect
        """
        self.autoReconnect = not autoReconnect

    def setReconnectInterval(self, interval=10000):
        """Set the interval in ms for automatic reconnects if connection to MSB gets lost.

        Args:
            interval (int):  The interval value in ms (>=3000) for automatic reconnections
        """
        if interval <= 3000:
            interval = 3000
        self.reconnectInterval = interval / 1000

    def setKeepAlive(self, keepAlive=True, heartbeat_interval=8000):
        """Sets the keepalive interval for the client-side heartbeat in ms for the WS connection.

        This is required if there is no server-side heartbeat.

        Args:
            keepAlive (bool):  Used to enable (true) or disable (false) the keep alive functionality
            heartbeat_interval (int):  Client-side heartbeat interval value in ms
        """
        self.keepAlive = keepAlive
        if heartbeat_interval < 8000:
            heartbeat_interval = 8000
        self.heartbeat_interval = heartbeat_interval / 1000

    def disableSockJsFraming(self, sockJsFraming=True):
        """Disables or enables the sockJs framing.

        Args:
            sockJsFraming (bool): Used to either disable (true) or enable (false) sockJs framing
        """
        self.sockJsFraming = not sockJsFraming

    def disableHostnameVerification(self, hostnameVerification=True):
        """Disables or enables checking for self-signed SSL certificates (disable it e.g. for development)

        Args:
            hostnameVerification (bool): Used to either disable (true) or enable (false) ssl checks
        """
        self.hostnameVerification = not hostnameVerification

    def disableEventCache(self, disableEventCache=True):
        """Disables or enables the event cache, which will save sent events if no active MSB connection is present.

        Args:
            disableEventCache (bool): Used to either disable (true) or enable (false) event cache
        """
        self.eventCacheEnabled = not disableEventCache

    def setEventCacheSize(self, eventCacheSize=1000):
        """Sets the size (max number of events) of the event cahe.

        If the max is reached, oldest entry gets dismissed.

        Args:
            eventCacheSize (int): The size of the event cache (event entries)
        """
        self.eventCacheSize = eventCacheSize

    def enableThreadAsDaemon(self, threadAsDaemonEnabled=True):
        """Enable the msb client thread to run as daemon.

        This will run the websocket thread as daemon to be independet from the user threads.

        Args:
            threadAsDaemonEnabled (bool): Used to either enable (true) or disable (false) the thread to run as daemon
        """
        self.threadAsDaemonEnabled = threadAsDaemonEnabled

    def _checkUrl(self, msb_url=None):
        """Checks and transforms the msb url into a valid websocket format

        Args:
            msb_url (str): The url of the MSB (http(s)://host:port or ws(s)://host:port)
        """
        server_id = str(randint(100, 999))
        session_id = str(uuid.uuid4()).replace("-", "")
        if msb_url is not None:
            self.msb_url = msb_url
        if "http://" in self.msb_url:
            self.msb_url = self.msb_url.replace("http://", "ws://")
        elif "https://" in self.msb_url:
            self.msb_url = self.msb_url.replace("https://", "wss://")
        if not (self.msb_url.startswith("ws://")
                or self.msb_url.startswith("wss://")):
            logging.error("WRONG MSB URL FORMAT: " + str(self.msb_url))
        if self.sockJsFraming:
            self.msb_url_with_wspath = (self.msb_url + "/websocket/data/" +
                                        server_id + "/" + session_id +
                                        "/websocket")
        else:
            self.msb_url_with_wspath = self.msb_url + "/websocket/data/websocket"

    def connect(self, msb_url=None):
        """Connects the client to the MSB WebSocket interface.

        Args:
            msb_url (str): The url of the MSB (http(s)://host:port or ws(s)://host:port)
        """
        self.userDisconnect = False

        # check and update the url fromat
        self._checkUrl(msb_url)
        # init the websocket app and register own listeners
        ws = websocket.WebSocketApp(
            self.msb_url_with_wspath,
            on_message=self.on_message,
            on_error=self.on_error,
            on_close=self.on_close,
        )
        self.ws = ws
        ws.on_open = self.on_open

        # prepare and start socket
        def runf():
            try:
                if not self.hostnameVerification:
                    if self.keepAlive:
                        ws.run_forever(ping_interval=self.heartbeat_interval,
                                       ping_timeout=self.heartbeat_interval -
                                       5,
                                       sslopt={
                                           "cert_reqs": ssl.CERT_NONE,
                                           "check_hostname": False,
                                       },
                                       suppress_origin=True)
                    else:
                        ws.run_forever(sslopt={
                            "cert_reqs": ssl.CERT_NONE,
                            "check_hostname": False,
                        },
                                       suppress_origin=True)
                else:
                    if self.keepAlive:
                        ws.run_forever(
                            ping_interval=self.heartbeat_interval,
                            ping_timeout=self.heartbeat_interval - 3,
                        )
                    else:
                        ws.run_forever()
            except Exception:
                pass

        logging.info("Connecting to MSB @ " + self.msb_url)
        wst = threading.Thread(target=runf)
        if self.threadAsDaemonEnabled:
            wst.setDaemon(True)
        wst.start()

    def disconnect(self):
        """Disconnects the client from the MSB WebSocket interface."""
        self.userDisconnect = True
        logging.debug("Disconnect requested by msb client api")
        self.ws.close()

    def register(self):
        """Sends registration message to the MSB."""
        def _sendReg():
            if self.sockJsFraming:
                _selfd = json.dumps(
                    self.objectToJson(self.getSelfDescription())).replace(
                        "\\n", "")
                _selfd = _selfd[1:-1]
                self.ws.send('["R ' + _selfd + '"]')
            else:
                self.ws.send("R " +
                             self.objectToJson(self.getSelfDescription()))

        def _set_interval(func, sec):
            def func_wrapper():
                if self.connected:
                    func()
                else:
                    _set_interval(func, sec)

            t = threading.Timer(sec, func_wrapper)
            t.start()
            return t

        _set_interval(_sendReg, 0.1)

    def addEvent(
        self,
        event,
        event_name=None,
        event_description=None,
        event_dataformat=None,
        event_priority=0,
        isArray=None,
    ):
        """Adds an event to the self-description.

        Args:
            event (:obj:Event, str): The event object or the event id
            event_name (str): The name of the event
            event_description (str): The description of the event
            event_dataFormat (:obj:): The data type of the event (of class DataFormat, DataType or ComplexDataFormat)
            event_priority (str, int): The priority of the event (LOW,MEDIUM,HIGH) or (0,1,2)
            isArray (bool): Specifies if the event handles an object array or just an object of the data
        """
        # create event object by single params
        if not isinstance(event, Event):
            event = Event(
                event,
                event_name,
                event_description,
                event_dataformat,
                event_priority,
                isArray,
            )
        # for complex objects, update dataformat
        if event.dataFormat is not None:
            # if array of complex objects, change dataformat to type array
            if event.isArray:
                if "$ref" in event.dataFormat["dataObject"]:
                    event.dataFormat["dataObject"]["type"] = "array"
                    event.dataFormat["dataObject"]["items"] = {}
                    event.dataFormat["dataObject"]["items"]["$ref"] = {}
                    event.dataFormat["dataObject"]["items"][
                        "$ref"] = event.dataFormat["dataObject"]["$ref"]
                    del event.dataFormat["dataObject"]["$ref"]
            # if not an array of complex objects, change dataformat to type object
            elif not event.isArray:
                if "$ref" in event.dataFormat["dataObject"]:
                    event.dataFormat["dataObject"]["type"] = "object"
        # logging.debug(str(event.dataFormat))
        # validate data format and add event
        if vadilateEventDataFormat(event.dataFormat):
            event.id = len(self.events) + 1
            if event.eventId not in self.events:
                self.events[event.eventId] = event
            else:
                logging.error(
                    str(event.eventId) +
                    " already in events, change event id!")
                raise Exception("Event with this ID already present: " +
                                str(event.eventId))

    def addFunction(
        self,
        function,
        function_name=None,
        function_description=None,
        function_dataformat=None,
        fnpointer=None,
        isArray=False,
        responseEvents=None,
    ):
        """Adds a function to the self-description.

        Args:
            function (:obj:Function, str): The function object ot the function id
            function_name (str): The name of the function
            function_description (str): The description of the function
            function_dataformat (:obj:): The data type of the function (of class DataFormat or ComplexDataFormat)
            fnpointer (:func:): The function implementation to be called for incoming events
            isArray (bool): Specifies if the function handles an object array or just an object of the data
            responseEvents (:obj: list of event ids): The list of event IDs to be send as response events
        """
        # create function object by single params
        if not isinstance(function, Function):
            function = Function(
                function,
                function_name,
                function_description,
                function_dataformat,
                fnpointer,
                isArray,
                responseEvents,
            )
        # check if defined reponseEvents are valid (exist)
        if function.responseEvents is not None:
            for responseEvent in function.responseEvents:
                if responseEvent not in self.events:
                    logging.error("Event not found for id " + responseEvent)
                    raise Exception("Event not found for id " + responseEvent)
        # for complex objects, update dataformat
        if function.dataFormat is not None:
            # if array of complex objects, change dataformat to type array
            if function.isArray:
                if "$ref" in function.dataFormat["dataObject"]:
                    function.dataFormat["dataObject"]["type"] = "array"
                    function.dataFormat["dataObject"]["items"] = {}
                    function.dataFormat["dataObject"]["items"]["$ref"] = {}
                    function.dataFormat["dataObject"]["items"][
                        "$ref"] = function.dataFormat["dataObject"]["$ref"]
                    del function.dataFormat["dataObject"]["$ref"]
            # if not and array of complex objects, change dataformat to type object
            elif not function.isArray:
                if "$ref" in function.dataFormat["dataObject"]:
                    function.dataFormat["dataObject"]["type"] = "object"
        # logging.debug(str(function.dataFormat))
        # validate data format and add function
        if vadilateFunctionDataFormat(function.dataFormat):
            if function.functionId not in self.functions:
                self.functions[function.functionId] = function
            else:
                logging.error(
                    str(function.functionId) +
                    " already in functions, change function id!")
                raise Exception("Function with this ID already present: " +
                                str(function.functionId))

    def setEventValue(self, eventId, eventValue):
        """Sets the value for an event

        Args:
            eventId (str): The event id
            eventValue (str): The value of the event
        """
        if eventId in self.events:
            self.events[eventId].dataObject = eventValue

    def publish(
        self,
        eventId,
        dataObject=None,
        priority=None,
        cached=False,
        postDate=None,
        correlationId=None,
    ):
        """This function sends the event of the provided event ID.

        Optionally the value can be provided, otherwise the last set value will be used.
        The priority can also be set, otherwise the standard value for the event's priority will be used.
        A postDate can be optionally provided, otherwise the current timestamp will be used.

        Args:
            eventId (str): The event id
            dataObject (:obj:): The value to be published
            priority (str, int): The priority of the event (LOW,MEDIUM,HIGH) or (0,1,2)
            cached (bool): Specifies wether this event will be added to cache if MSB is currently not reachable
            postDate (datetime): the post date of the event (e.g. datetime.datetime.utcnow().isoformat()[:-3] + "Z")
            correlationId (str): The correlation id of the event used to idetify events in multi-step flows
        """
        event = {}
        event["uuid"] = self.uuid
        event["eventId"] = eventId
        # upfate the event value
        if dataObject is not None:
            self.events[eventId].dataObject = dataObject
            event["dataObject"] = self.events[eventId].dataObject
        if priority is not None:
            self.events[eventId].priority = priority
        event["priority"] = self.events[eventId].priority
        if postDate is None:
            event["postDate"] = datetime.datetime.utcnow().isoformat(
            )[:-3] + "Z"
        if correlationId is not None:
            event["correlationId"] = correlationId

        # validate event value
        if self.dataFormatValidation and dataObject is not None:
            self.validateValueForDataFormat(
                event["dataObject"],
                self.events[eventId].df,
                self.events[eventId].dataFormat,
                self.events[eventId].isArray,
            )
        msg = self.objectToJson(event)

        # send event
        if self.connected and self.registered:
            try:
                if self.sockJsFraming:
                    _msg = self.objectToJson(msg).replace("\\n", "")
                    self.ws.send('["E ' + _msg[1:-1] + '"]')
                else:
                    self.ws.send("E " + msg)
                logging.debug("SENDING: " + msg)
            except Exception:
                logging.exception(self, "Error, could not send message...")
                pass
        else:
            # or cache event if not connected
            if self.eventCacheEnabled and cached:
                logging.debug(
                    "Not connected and/or registered, putting event in cache.")
                if len(self.eventCache) < self.eventCacheSize:
                    self.eventCache.append(msg)
                else:
                    self.eventCache.pop(0)
                    self.eventCache.append(msg)
            elif cached and not self.eventCacheEnabled:
                logging.debug(
                    "Global cache disabled, message cache flag overridden and discarded."
                )
            else:
                logging.debug("Caching disabled, message discarded.")

    @staticmethod
    def validateValueForDataFormat(value, df, dataFormat, isArray):
        """Validate the event value to match the specified data format

        Args:
            value (:obj:): The value of the event to be validated
            df (:obj:): The (short) data format of the event
            dataFormat (:obj:): The (complex) data format of the event
            isArray (bool): Specifies wether this event will be added to cache if MSB is currently not reachable
        """
        if isinstance(df, ComplexDataFormat):
            if validateValueForComplexDataformat(
                    value,
                    dataFormat,
                    isArray,
            ):
                return True
            else:
                return False
        else:
            if validateValueForSimpleDataformat(
                    value,
                    df,
                    isArray,
            ):
                return True
            else:
                return False

    def addConfigParameter(self, key, value, type):
        """Add a new configuration parameter to the client.

        Configuration parameters can be used to change client behaviour ny changing its values via MSB GUI.

        Args:
            key (str): The key (name) of the configuration parameter
            value (:obj:): The initial value of the configuration parameter
            type (:obj:DataType): The simple data format of the confituration parameter
        """
        newParam = getDataType(type)
        newParam["type"] = newParam["type"].upper()
        if "format" in newParam:
            newParam["format"] = newParam["format"].upper()
        newParam["value"] = value
        self.configuration["parameters"][key] = newParam

    def getConfigParameter(self, key):
        """Get the value of a configuration parameter.

        Args:
            key (str): The key (name) of the configuration parameter
        """
        if key in self.configuration["parameters"]:
            return self.configuration["parameters"][key]["value"]
        else:
            logging.warning("Cannot get config param for unknown key: " +
                            str(key))
            raise Exception("Cannot get config param for unknown key: " +
                            str(key))

    def changeConfigParameter(self, key, value):
        """Change the value of a configuration parameter.

        Args:
            key (str): The key (name) of the configuration parameter
            value (:obj:): The new value of the configuration parameter
        """
        if key in self.configuration["parameters"]:
            oldValue = self.configuration["parameters"][key]["value"]
            if oldValue != value:
                self.configuration["parameters"][key]["value"] = value
                if self.connected and self.registered:
                    self.reRegister()
            else:
                logging.warning(
                    "Cannot change config param. Value is already set!")
        else:
            logging.warning("Cannot change config param for unknown key: " +
                            str(key))

    def reRegister(self):
        """Performs a new registration to update the self-description on MSB."""
        logging.debug("Reregistering after configuration parameter change...")
        if self.sockJsFraming:
            _selfd = json.dumps(self.objectToJson(
                self.getSelfDescription())).replace("\\n", "")
            self.ws.send('["R ' + _selfd[1:-1] + '"]')
        else:
            self.ws.send("R " + self.objectToJson(self.getSelfDescription()))

    def objectToJson(self, object):
        """Converts a python object into a json ovject.

         Returns:
            json object: The resulting json object
        """
        return jsonpickle.encode(object, unpicklable=False)

    def getSelfDescription(self):
        """Generate the self description JSON object of the application or smart object."""
        self_description = {}
        self_description["@class"] = self.service_type
        self_description["uuid"] = self.uuid
        self_description["name"] = self.name
        self_description["description"] = self.description
        self_description["token"] = self.token
        _ev = []
        e_props = ["@id", "id", "dataFormat", "description", "eventId", "name"]
        for event in self.events:
            current_e_props = []
            e = jsonpickle.decode(
                jsonpickle.encode(self.events[event], unpicklable=False))
            for key in list(e.keys()):
                if key == "id":
                    e["@id"] = e["id"]
                    del e[key]
            del e["priority"]
            del e["df"]
            if e["dataFormat"] is None:
                del e["dataFormat"]
            del e["isArray"]
            for key in list(e.keys()):
                current_e_props.append(key)
            for key in current_e_props:
                if key not in e_props:
                    # logging.warning(self, 'Remove key from event if invalid in self description: ' + key)
                    try:
                        del e[key]
                    except Exception:
                        logging.exception(self, "Key not found: " + key)
            _ev.append(e)
        self_description["events"] = _ev
        _fu = []
        for function in self.functions:
            f = jsonpickle.decode(
                jsonpickle.encode(self.functions[function], unpicklable=False))
            if f["responseEvents"] and len(f["responseEvents"]) > 0:
                _re = []
                for idx, re in enumerate(f["responseEvents"]):
                    _re.append(self.events[re].id)
                f["responseEvents"] = _re
            else:
                del f["responseEvents"]
            del f["isArray"]
            if "implementation" in f:
                del f["implementation"]
            if f["dataFormat"] is None:
                del f["dataFormat"]
            _fu.append(f)
        self_description["functions"] = _fu
        self_description["configuration"] = self.configuration
        return self_description

    def readConfig(self):
        """Helper function to parse main configuration param by param name from the application.properties file"""
        logging.info("Reading configuration from application.properties file")
        config = None
        if self.applicationPropertiesCustomPath is None:
            config = open("application.properties", "r")
        else:
            config = open(str(self.applicationPropertiesCustomPath), "r")
        if config is not None:
            for line in config:
                configparam = line.split("=")
                if configparam[0] == "msb.type":
                    self.service_type = configparam[1].rstrip()
                elif configparam[0] == "msb.name":
                    self.name = configparam[1].rstrip()
                elif configparam[0] == "msb.uuid":
                    self.uuid = configparam[1].rstrip()
                elif configparam[0] == "msb.token":
                    self.token = configparam[1].rstrip()
                elif configparam[0] == "msb.url":
                    self.msb_url = configparam[1].rstrip()
                elif configparam[0] == "msb.description":
                    self.description = configparam[1].rstrip()
Esempio n. 57
0
# -*- coding: utf-8 -*-
__author__ = 'Dos Santos Julien'
from config import connexion, curseur
from datetime import datetime
import calendar
import jsonpickle
import MySQLdb #http://www.mikusa.com/python-mysql-docs/index.html
from jsonpickle import handlers


class DatetimeHandler(handlers.BaseHandler):
    def flatten(self, obj, data):
        return calendar.timegm(obj.timetuple())

handlers.registry.register(datetime, DatetimeHandler)
jsonpickle.set_encoder_options('simplejson', sort_keys=True)

class Error(Exception):
     def __init__(self, code, value):
        self.value = value
        self.code = code

     def __str__(self):
         return repr(self.value)

class Entity(object) :

    def __init__(self,table):
        self.__table = table
        self.__columns = []
        self.__hasOne = []
Esempio n. 58
0
    def optimizeModels(self):
        """To optimize moiety models based on the optimization settings.

        :return: None
        :rtype: :py:obj:`None`
        """

        logger = self._initLogging()

        for optimizationMethod in self.optimizations:

            for setting in self.optimizations[optimizationMethod]:

                optimizationParameters = setting

                if not os.path.exists(
                        self.path + '/' +
                        optimizationParameters['optimizationSetting']):
                    os.mkdir(self.path + '/' +
                             optimizationParameters['optimizationSetting'])
                path = self.path + '/' + optimizationParameters[
                    'optimizationSetting'] + '/'

                for model in self.models:

                    datasets = []
                    datasetName = ''

                    for dataset in self.datasets:
                        # datasets can contain more molecules than model.

                        if set([molecule.name for molecule in model.molecules
                                ]).issubset(set(dataset.keys())):
                            datasetName += dataset.datasetName + "_"
                            datasets.append(dataset)

                    if datasets:
                        logger.info(
                            "Performing {0} optimization on {1} with {2}dataset"
                            .format(
                                optimizationParameters['optimizationSetting'],
                                model.name, datasetName))

                        if optimizationMethod == 'SAGA':
                            if self.split:
                                optimization = SAGAseparateOptimization(
                                    model, datasets, path,
                                    optimizationParameters['methodParameters'],
                                    optimizationParameters[
                                        'optimizationSetting'],
                                    self.energyFunction,
                                    optimizationParameters[
                                        'noPrintBestResults'],
                                    optimizationParameters['noPrintAllResults']
                                )
                            else:
                                optimization = SAGAoptimization(
                                    model, datasets, path,
                                    optimizationParameters['methodParameters'],
                                    optimizationParameters[
                                        'optimizationSetting'],
                                    self.energyFunction,
                                    optimizationParameters[
                                        'noPrintBestResults'],
                                    optimizationParameters['noPrintAllResults']
                                )
                            optimization.creatSubdir()
                        elif optimizationMethod in [
                                'L-BFGS-B', 'TNC', 'SLSQP'
                        ]:
                            if self.split:
                                optimization = ScipySeparateOptimization(
                                    model, datasets, path,
                                    optimizationParameters['methodParameters'],
                                    optimizationParameters[
                                        'optimizationSetting'],
                                    self.energyFunction, optimizationMethod)
                            else:
                                optimization = ScipyOptimization(
                                    model, datasets, path,
                                    optimizationParameters['methodParameters'],
                                    optimizationParameters[
                                        'optimizationSetting'],
                                    self.energyFunction, optimizationMethod)
                        else:
                            logger.warning(
                                "The optimization optimizationMethod does not exist for {0} with {1}."
                                .format(
                                    model.name, optimizationParameters[
                                        'optimizationSetting']))
                            if self.force:
                                continue
                            else:
                                sys.exit("Optimization stops with error.")

                        if self.multiprocess:
                            try:
                                with multiprocessing.Pool() as pool:
                                    optimization.bestGuesses = pool.map(
                                        optimization.optimizeSingle,
                                        (i + 1 for i in range(self.times)))
                            except Exception:
                                logger.exception(
                                    "{0} with {1} optimization setting fails at multiprocessing"
                                    .format(
                                        model.name, optimizationParameters[
                                            'optimizationSetting']))
                                if self.force:
                                    continue
                                else:
                                    sys.exit("Optimization stops with error.")
                        else:
                            for i in range(self.times):
                                try:
                                    optimization.bestGuesses.append(
                                        optimization.optimizeSingle(i))
                                except Exception:
                                    logger.exception(
                                        "{0} with {1} optimization setting fails at {2} iteration"
                                        .format(
                                            model.name, optimizationParameters[
                                                'optimizationSetting'], i))
                                    if self.force:
                                        continue
                                    else:
                                        sys.exit(
                                            "Optimization stops with error.")

                        # to compress the SAGA optimization results
                        if optimizationMethod == 'SAGA':
                            if os.path.exists(optimization.path + model.name +
                                              '_all'):
                                shutil.make_archive(
                                    optimization.path + model.name + '_all',
                                    'zip',
                                    optimization.path + model.name + '_all')
                                shutil.rmtree(optimization.path + model.name +
                                              '_all')
                            if os.path.exists(optimization.path + model.name +
                                              '_best'):
                                shutil.make_archive(
                                    optimization.path + model.name + '_best',
                                    'zip',
                                    optimization.path + model.name + '_best')
                                shutil.rmtree(optimization.path + model.name +
                                              '_best')

                        if self.printOptimizationScript:
                            optimization.optimizationScripts()

                        jsonpickle.set_encoder_options('json',
                                                       sort_keys=True,
                                                       indent=4)
                        fileName = '{0}{1}_{2}{3}.json'.format(
                            path, model.name, datasetName,
                            optimization.optimizationSetting)
                        with open(fileName, 'w') as outFile:
                            outFile.write(
                                jsonpickle.encode({
                                    'model':
                                    model,
                                    'datasets':
                                    datasets,
                                    'bestGuesses':
                                    optimization.bestGuesses,
                                    'optimizationSetting':
                                    optimization.optimizationSetting,
                                    'energyFunction':
                                    self.energyFunction
                                }))

                # to store the paths to the optimization results files.
                with open(
                        self.path + '/{0}_{1}.txt'.format(
                            optimizationParameters['optimizationSetting'],
                            self.energyFunction), 'w') as resultsFile:
                    for dirpath, _, filenames in os.walk(path):
                        for f in filenames:
                            if f.endswith('{0}.json'.format(
                                    optimizationParameters[
                                        'optimizationSetting'])):
                                resultsFile.write(
                                    os.path.abspath(os.path.join(dirpath, f)) +
                                    '\n')
def find_suspicious_detections(inputCsvFilename,
                               outputCsvFilename,
                               options=None):

    ##%% Input handling

    if options is None:

        options = SuspiciousDetectionOptions()

    toReturn = SuspiciousDetectionResults()

    ##%% Load file

    detection_results = load_api_results(
        inputCsvFilename,
        normalize_paths=True,
        filename_replacements=options.filenameReplacements)
    toReturn.detectionResults = detection_results

    # [ymin, xmin, ymax, xmax, confidence], where (xmin, ymin) is the upper-left

    ##%% Separate files into directories

    # This will be a map from a directory name to smaller data frames
    rowsByDirectory = {}

    # This is a mapping back into the rows of the original table
    filenameToRow = {}

    print('Separating files into directories...')

    # iRow = 0; row = detection_results.iloc[0]
    for iRow, row in detection_results.iterrows():

        relativePath = row['image_path']
        dirName = os.path.dirname(relativePath)

        if not dirName in rowsByDirectory:
            # Create a new DataFrame with just this row
            # rowsByDirectory[dirName] = pd.DataFrame(row)
            rowsByDirectory[dirName] = []

        rowsByDirectory[dirName].append(row)

        assert relativePath not in filenameToRow
        filenameToRow[relativePath] = iRow

    print('Finished separating {} files into {} directories'.format(
        len(detection_results), len(rowsByDirectory)))

    # Convert lists of rows to proper DataFrames
    dirs = list(rowsByDirectory.keys())
    for d in dirs:
        rowsByDirectory[d] = pd.DataFrame(rowsByDirectory[d])

    toReturn.rowsByDirectory = rowsByDirectory
    toReturn.filenameToRow = filenameToRow

    # Look for matches

    print('Finding similar detections...')

    # For each directory

    dirsToSearch = list(rowsByDirectory.keys())[0:options.debugMaxDir]

    # length-nDirs list of lists of DetectionLocation objects
    suspiciousDetections = [None] * len(dirsToSearch)

    if len(options.filterFileToLoad) == 0:

        allCandidateDetections = [None] * len(dirsToSearch)

        if not options.bParallelizeComparisons:

            options.pbar = None
            # iDir = 0; dirName = dirsToSearch[iDir]
            for iDir, dirName in enumerate(tqdm(dirsToSearch)):
                allCandidateDetections[iDir] = find_matches_in_directory(
                    dirName, options, rowsByDirectory)

        else:

            options.pbar = tqdm(total=len(dirsToSearch))
            allCandidateDetections = Parallel(
                n_jobs=options.nWorkers,
                prefer='threads')(delayed(find_matches_in_directory)(
                    dirName, options, rowsByDirectory)
                                  for dirName in tqdm(dirsToSearch))

        print('\nFinished looking for similar bounding boxes')

        ##%% Find suspicious locations based on match results

        print('Filtering out suspicious detections...')

        nImagesWithSuspiciousDetections = 0
        nSuspiciousDetections = 0

        # For each directory
        #
        # iDir = 51
        for iDir in range(len(dirsToSearch)):

            # A list of DetectionLocation objects
            suspiciousDetectionsThisDir = []

            # A list of DetectionLocation objects
            candidateDetectionsThisDir = allCandidateDetections[iDir]

            for iLocation, candidateLocation in enumerate(
                    candidateDetectionsThisDir):

                # occurrenceList is a list of file/detection pairs
                nOccurrences = len(candidateLocation.instances)

                if nOccurrences < options.occurrenceThreshold:
                    continue

                nImagesWithSuspiciousDetections += nOccurrences
                nSuspiciousDetections += 1

                suspiciousDetectionsThisDir.append(candidateLocation)
                # Find the images corresponding to this bounding box, render boxes

            suspiciousDetections[iDir] = suspiciousDetectionsThisDir

        print(
            'Finished searching for problematic detections\nFound {} unique detections on {} images that are suspicious'
            .format(nSuspiciousDetections, nImagesWithSuspiciousDetections))

    else:

        print('Bypassing detection-finding, loading from {}'.format(
            options.filterFileToLoad))

        # Load the filtering file
        detectionIndexFileName = options.filterFileToLoad
        sIn = open(detectionIndexFileName, 'r').read()
        suspiciousDetections = jsonpickle.decode(sIn)
        filteringBaseDir = os.path.dirname(options.filterFileToLoad)
        assert len(suspiciousDetections) == len(dirsToSearch)

        nDetectionsRemoved = 0
        nDetectionsLoaded = 0

        # For each directory
        # iDir = 0; detections = suspiciousDetectionsBeforeFiltering[0]
        for iDir, detections in enumerate(suspiciousDetections):

            bValidDetection = [True] * len(detections)
            nDetectionsLoaded += len(detections)

            # For each detection that was present before filtering
            for iDetection, detection in enumerate(detections):

                # Is the image still there?
                imageFullPath = os.path.join(
                    filteringBaseDir, detection.sampleImageRelativeFileName)

                # If not, remove this from the list of suspicious detections
                if not os.path.isfile(imageFullPath):
                    nDetectionsRemoved += 1
                    bValidDetection[iDetection] = False

            nRemovedThisDir = len(bValidDetection) - sum(bValidDetection)
            if nRemovedThisDir > 0:
                print('Removed {} of {} detections from directory {}'.format(
                    nRemovedThisDir, len(detections), iDir))

            # ...for each detection

            detectionsFiltered = list(compress(detections, bValidDetection))
            suspiciousDetections[iDir] = detectionsFiltered

        # ...for each directory

        print('Removed {} of {} total detections via manual filtering'.format(
            nDetectionsRemoved, nDetectionsLoaded))

    # ...if we are/aren't finding detections (vs. loading from file)

    toReturn.suspiciousDetections = suspiciousDetections

    if options.bRenderHtml:

        # Render problematic locations with html (loop)

        print('Rendering html')

        nDirs = len(dirsToSearch)
        directoryHtmlFiles = [None] * nDirs

        if options.bParallelizeRendering:

            # options.pbar = tqdm(total=nDirs)
            options.pbar = None

            directoryHtmlFiles = Parallel(
                n_jobs=options.nWorkers,
                prefer='threads')(delayed(render_images_for_directory)(
                    iDir, directoryHtmlFiles, suspiciousDetections, options)
                                  for iDir in tqdm(range(nDirs)))

        else:

            options.pbar = None

            # For each directory
            # iDir = 51
            for iDir in range(nDirs):

                # Add this directory to the master list of html files
                directoryHtmlFiles[iDir] = render_images_for_directory(
                    iDir, directoryHtmlFiles, suspiciousDetections, options)

            # ...for each directory

        # Write master html file

        masterHtmlFile = os.path.join(options.outputBase, 'index.html')
        os.makedirs(options.outputBase, exist_ok=True)
        toReturn.masterHtmlFile = masterHtmlFile

        with open(masterHtmlFile, 'w') as fHtml:

            fHtml.write('<html><body>\n')
            fHtml.write(
                '<h2><b>Suspicious detections by directory</b></h2></br>\n')
            for iDir, dirHtmlFile in enumerate(directoryHtmlFiles):
                if dirHtmlFile is None:
                    continue
                relPath = os.path.relpath(dirHtmlFile, options.outputBase)
                dirName = dirsToSearch[iDir]
                fHtml.write('<a href={}>{}</a><br/>\n'.format(
                    relPath, dirName))
            fHtml.write('</body></html>\n')

    # ...if we're rendering html

    toReturn.allRowsFiltered = update_detection_table(toReturn, options,
                                                      outputCsvFilename)

    # Create filtering directory
    print('Creating filtering folder...')

    dateString = datetime.now().strftime('%Y.%m.%d.%H.%M.%S')
    filteringDir = os.path.join(options.outputBase, 'filtering_' + dateString)
    os.makedirs(filteringDir, exist_ok=True)

    # iDir = 0; suspiciousDetectionsThisDir = suspiciousDetections[iDir]
    for iDir, suspiciousDetectionsThisDir in enumerate(suspiciousDetections):

        # suspiciousDetectionsThisDir is a list of DetectionLocation objects
        for iDetection, detection in enumerate(suspiciousDetectionsThisDir):

            bbox = detection.bbox
            instance = detection.instances[0]
            relativePath = instance.filename
            inputFullPath = os.path.join(options.imageBase, relativePath)
            assert (os.path.isfile(inputFullPath))
            outputRelativePath = 'dir{:0>4d}_det{:0>4d}.jpg'.format(
                iDir, iDetection)
            outputFullPath = os.path.join(filteringDir, outputRelativePath)
            render_bounding_box(bbox, inputFullPath, outputFullPath, 15)
            detection.sampleImageRelativeFileName = outputRelativePath

    # Write out the detection index
    detectionIndexFileName = os.path.join(filteringDir, 'detectionIndex.json')
    jsonpickle.set_encoder_options('json', sort_keys=True, indent=4)
    s = jsonpickle.encode(suspiciousDetections)
    with open(detectionIndexFileName, 'w') as f:
        f.write(s)

    toReturn.filterFile = detectionIndexFileName

    return toReturn
Esempio n. 60
0
def to_json(obj):
    jsonpickle.set_encoder_options('json',
                                   sort_keys=True,
                                   indent=4,
                                   separators=(',', ': '))
    return jsonpickle.encode(obj)