Ejemplo n.º 1
0
def test_get_file_info():
    s = '\n'

    path = util.get_relative_path(__file__, 'a.txt')
    s += 'get_file_info(\'' + path + '\') = ' + util.to_json(
        util.get_file_info(path)) + '\n'

    s += '\n'
    path = util.get_relative_path(__file__, 'dir1')
    s += 'get_file_info(\'' + path + '\') = ' + util.to_json(
        util.get_file_info(path)) + '\n'

    s += '\n'
    path = util.get_relative_path(__file__, 'dir2')
    s += 'get_file_info(\'' + path + '\') = ' + util.to_json(
        util.get_file_info(path)) + '\n'

    s += '\n'
    path = util.get_relative_path(__file__, 'dir2/')
    s += 'get_file_info(\'' + path + '\') = ' + util.to_json(
        util.get_file_info(path)) + '\n'

    #path = util.get_relative_path(__file__, 'notexist')
    #s += 'get_file_info(\'' + path + '\') = ' + util.to_json(util.get_file_info(path)) + '\n'

    return s
Ejemplo n.º 2
0
def mine():
    # check the transactions if exist
    if not block_chain.current_transactions:
        return msg_resp('No transactions', 400)

    # do work

    proof = block_chain.proof_of_work(block_chain.last_proof())

    # get a reword
    block_chain.new_transaction("0", node_identifier, 1)

    # build a block
    previous_hash = block_chain.hash(block_chain.last_block)

    # add to chain
    block = block_chain.new_block(proof, previous_hash)

    # add start
    block_chain_json = {
        'chain': block_chain.chain,
        'length': len(block_chain.chain),
    }
    # pub msg to other nodes
    pub_msg(TOPIC_RESOLVE, util.to_json(block_chain_json))
    # add end

    resp = util.to_json(block)
    return json_resp(resp)
Ejemplo n.º 3
0
        def get(self):
            # Retreive the original url from parameter or Referer header
            if 'url' in self.request.params:
                original_url = self.request.params['url']
            else:
                original_url = self.request.headers.get('referer')
            if 'agendamlg-api' in original_url:
                original_url = '/'

            self.response.status_int = 301
            if decorator.has_credentials():
                # It is already logged in, so, store information about the user in ndb
                try:
                    # Retreive the original url from memcache or from the upper method
                    come_from = memcache.get('come_from')
                    original_url = come_from if come_from else original_url
                    if isinstance(original_url, unicode):
                        original_url = original_url.encode('utf-8')
                    memcache.delete('come_from')
                    # Get user's info
                    http = decorator.http()
                    user = service.people().get(userId="me").execute(http=http)
                    user_id = user[u'id']
                    # Register or modify the user in ndb
                    user_or_not = Usuario.query(Usuario.idGoogle == user_id).fetch()
                    if not user_or_not:
                        Usuario(
                            idGoogle=user[u'id'],
                            tipo=1,
                            preferencias=[],
                            extra=to_json(user),
                            parent=agenda_key()
                        ).put()
                        newcomer = True
                    else:
                        user_or_not[0].extra = to_json(user)
                        user_or_not[0].put()
                        newcomer = False

                    jwt_token = create_jwt_token(user_id)
                    if original_url:
                        self.redirect('{}?token={}&newcomer={}'.format(original_url, jwt_token, newcomer))
                    else:
                        newcomer = str(newcomer).lower()
                        self.response.headers['Content-Type'] = 'application/json; charset=utf8'
                        self.response.write(u'{{"token": "{}", "newcomer": {}}}'.format(jwt_token, newcomer))
                except client.AccessTokenRefreshError:
                    self.redirect(self.request.path_url + '?' + urlencode({'url': original_url}))
            else:
                # Redirect to Google Authentication page
                # TODO Make a better way to store the original_url, that is unique for any login
                memcache.set('come_from', original_url) if original_url else None
                self.redirect(decorator.authorize_url())
Ejemplo n.º 4
0
def test_grep():
    ret = ''
    ret += util.to_json(util.grep('dir2', '2019')) + '\n\n'
    ret += util.to_json(util.grep('dir2', '\d')) + '\n\n'
    ret += util.to_json(util.grep('dir2/file1.txt', 'b')) + '\n\n'
    ret += util.to_json(util.grep('dir2/file1.txt', 'a|b')) + '\n\n'
    ret += util.to_json(util.grep('dir2/file1.txt', 'z')) + '\n\n'
    #ret += util.to_json(util.grep('C:/Program Files', '\d')) + '\n\n'
    ret += util.grep('dir2', '\d', output='text') + '\n\n'

    ret += '\\.log$\n'
    ret += util.grep('dir2', '\d', filename=r'\.log$', output='text') + '\n\n'

    return ret
Ejemplo n.º 5
0
def dump_source_to_meta(source_id_to_meta, format="text"):
    if format == "text":
        for source_meta in source_id_to_meta.values():
            print("-"*10, source_meta["source_id"])
            for place in source_meta["places"].values():
                print(f"  {place['city_name']:25} {place['place_name']:50} {(place['num_all'] or ''):6} ({place['place_id']})")

    elif format == "json":
        print(to_json(source_id_to_meta, indent=2))

    elif format == "csv":
        keys = ["place_id", "place_name", "city_name", "num_all", "address", "latitude", "longitude",
                "place_url", "source_id", "source_web_url"]

        rows = []
        for source_meta in source_id_to_meta.values():
            for place in source_meta["places"].values():
                place = place.copy()
                if place["address"]:
                    place["address"] = "\n".join(place["address"])
                if place["coordinates"]:
                    place["latitude"] = place["coordinates"][0]
                    place["longitude"] = place["coordinates"][1]
                place.pop("coordinates", None)
                place["source_id"] = source_meta["source_id"]
                place["source_web_url"] = source_meta["source_web_url"]

                rows.append(place)

        with StringIO() as fp:
            writer = csv.DictWriter(fp, keys)
            writer.writeheader()
            writer.writerows(rows)
            fp.seek(0)
            print(fp.read())
Ejemplo n.º 6
0
def dump_place_id_to_timestamps(place_id_to_timestamps, place_id_filters=None, format="text"):
    filtered_data = dict()
    for place_id in sorted(place_id_to_timestamps):
        if place_id_filters and not place_id_filters.matches(place_id):
            continue
        filtered_data[place_id] = place_id_to_timestamps[place_id]

    if format == "text":
        for place_id in sorted(filtered_data):
            print(place_id)
            for value in sorted(place_id_to_timestamps[place_id], key=lambda v: v["timestamp"]):
                print("  ", value["timestamp"].isoformat(), ":", value["num_free"])

    elif format == "json":
        print(to_json(filtered_data))

    elif format == "csv":
        io = StringIO()
        writer = csv.DictWriter(io, fieldnames=("place_id", "timestamp", "num_free"))
        writer.writeheader()
        for place_id in sorted(filtered_data):
            for ts in sorted(place_id_to_timestamps[place_id], key=lambda v: v["timestamp"]):
                ts = deepcopy(ts)
                ts["place_id"] = place_id
                writer.writerow(ts)
        io.seek(0)
        print(io.read())
Ejemplo n.º 7
0
      def __call__(self, environ, start_response):
          try:
              # create http request
              request = HttpRequest(environ)

              (re_url, url, func_callback) = self.__find_mapped_func(request)

              resp_mimetype = func_callback.produces
              if "application/json" == resp_mimetype.lower():
                 response = HttpJSonResponse(output = {}, content_type = resp_mimetype)
              else:
                 response = HttpResponse(content_type = resp_mimetype)

              opt_func_out = func_callback(request, response) # call the web-mapped-function
              # developer can directly manipulate the response, or return objects|string as ouput

              if opt_func_out is not None and opt_func_out:
                 if isinstance(opt_func_out, basestring):
                    response.set_output_body( str(opt_func_out) )

                 elif util.is_collection(opt_func_out): # is collection
                    response.set_output_body( util.to_json(str(opt_func_out)) )

              return response.send(start_response)

          except (KeyboardInterrupt, SystemExit, MemoryError):
              raise InternalServerError("Request failed due to an Internal Server Error.")

          except Exception, e:
              return self.__handle_error(e, (environ, start_response))
Ejemplo n.º 8
0
 def __unicode__(self):
     return to_json({
         'default_lang': self.default_lang,
         'id': self.pid,
         'version': self.version,
         'name': self.name,
         'version_name': self.version_name,
         'recent_change': self.recent_change,
         'description': self.description,
         'app_id': self.app_id,
         'category': self.category,
         'min_app_vc': self.min_app_vc,
         'max_app_vc': self.max_app_vc,
         'supported_mod': self.supported_mod,
         'author_email': self.author_email,
         'author_name': self.author_name,
         'author_phone': self.author_phone,
         'author_website': self.author_website,
         'logo': self.logo,
         'cover': self.cover,
         'screenshot1': self.screenshot1,
         'screenshot2': self.screenshot2,
         'screenshot3': self.screenshot3,
         'screenshot4': self.screenshot4,
         'screenshot5': self.screenshot5,
         'dependencies': self.dependencies,
     })
Ejemplo n.º 9
0
 def hash(block):
     # block_string = json.dumps(block, sort_keys=True).encode()
     if not block:
         block_str = "root"
     else:
         block_str = util.to_json(block)
     return hashlib.sha256(block_str.encode()).hexdigest()
Ejemplo n.º 10
0
 def __unicode__(self):
     return to_json({
         'default_lang': self.default_lang,
         'id': self.pid,
         'version': self.version,
         'name': self.name,
         'version_name': self.version_name,
         'recent_change': self.recent_change,
         'description': self.description,
         'app_id': self.app_id,
         'category': self.category,
         'min_app_vc': self.min_app_vc,
         'max_app_vc': self.max_app_vc,
         'supported_mod': self.supported_mod,
         'author_email': self.author_email,
         'author_name': self.author_name,
         'author_phone': self.author_phone,
         'author_website': self.author_website,
         'logo': self.logo,
         'cover': self.cover,
         'screenshot1': self.screenshot1,
         'screenshot2': self.screenshot2,
         'screenshot3': self.screenshot3,
         'screenshot4': self.screenshot4,
         'screenshot5': self.screenshot5,
         'dependencies': self.dependencies,
     })
Ejemplo n.º 11
0
def test_to_json():
    data = {
        'key1': 'val1',
        'key2': 'val2',
        'key3': [1, 2, 3],
        'key4': {
            'key4-1': 1,
            'key4-2': 2,
            'key4-3': 3
        }
    }

    s = util.to_json(data) + '\n'
    s += util.to_json(data, indent=2) + '\n'

    return s
Ejemplo n.º 12
0
def main():
    args = parse_args()
    config = util.load_yaml(args.config)
    files = util.input_files(args.files)
    lang = language.get(args.lang, None)["TypeMap"]

    files.extend([f"{PRESTO_HOME}/{file}" for file in config.JavaClasses])

    classes = defaultdict(util.attrdict)
    depends = defaultdict(set)

    subclasses = {}
    for abstract_name, abstract_value in config.AbstractClasses.items():

        classes[abstract_name].class_name = abstract_name
        classes[abstract_name].field_name = member_name(abstract_name)
        classes[abstract_name].abstract = True
        classes[abstract_name].super_class = abstract_value.super
        if "comparable" in abstract_value:
            classes[abstract_name].comparable = True
        classes[abstract_name].subclasses = []

        for subclass in abstract_value.subclasses:
            subclasses[subclass.name] = util.attrdict(super=abstract_name,
                                                      key=subclass.key)

            classes[abstract_name].subclasses.append(
                util.attrdict(
                    type=subclass.name,
                    name=member_name(subclass.name),
                    key=subclass.key,
                ))
            classes[abstract_name].subclasses[-1]._N = len(
                classes[abstract_name].subclasses)

        classes[abstract_name].subclasses[-1]._last = True

        if "source" in abstract_value:
            file = abstract_value.source
            process_file(f"{PRESTO_HOME}/{file}", config, lang, subclasses,
                         classes, depends)
        else:
            classes[abstract_name].fields = []
            add_extra(abstract_name, abstract_name, config, lang, classes,
                      depends)

    for file in files:
        process_file(file, config, lang, subclasses, classes, depends)

    depends = list(topological({k: list(v) for k, v in depends.items()}))[::-1]

    comment = "// This file is generated DO NOT EDIT @" + "generated"
    result = [{"comment": comment}]
    result += [classes[name] for name in depends if name in classes]
    result += [classes[name] for name in config.AddToOutput]

    if args.json:
        print(util.to_json(result))
Ejemplo n.º 13
0
    def initialize_model(self):
        """
        corpus feature and model param initialize
        """
        train, label = self.corpus.generator()
        self.train_matrix = xgb.DMatrix(train, label=label)

        # read params from config
        self.max_depth = to_json(self.config.get('model', 'max_depth'))
        self.eta = to_json(self.config.get('model', 'eta'))
        self.subsample = to_json(self.config.get('model', 'subsample'))
        self.objective = to_json(self.config.get('model', 'objective'))
        self.silent = to_json(self.config.get('model', 'silent'))
        self.num_boost_round = int(self.config.get('model', 'num_boost_round'))
        self.nfold = int(self.config.get('model', 'nfold'))
        self.stratified = True if int(self.config.get('model', 'stratified')) else False
        self.metrics = self.config.get('model', 'metrics')
        self.early_stopping_rounds = int(self.config.get('model', 'early_stopping_rounds'))
Ejemplo n.º 14
0
def app(environ, start_response):
    status = "200 OK"
    headers = [("Content-type", "application/octet-stream")]
    if environ["REQUEST_METHOD"] == "GET":
        body = util.to_json(environ)
    else:
        body = environ["wsgi.input"].read(int(environ["CONTENT_LENGTH"]))
    start_response(status, headers)
    return body
Ejemplo n.º 15
0
def new_transaction():
    values = request.get_json()
    index = add_transaction(values)
    if index < 0:
        return msg_resp('Missing values', 400)
    # pub transaction to other nodes
    pub_msg(TOPIC_NEW_TRANSACTION, util.to_json(values))
    # do response
    return msg_resp(f'Transaction will be added to Block {index}', 201)
Ejemplo n.º 16
0
def eventos_json(eventos, version_corta=True):
    """
    Dada una lista de eventos devuelve el json correspondiente para ser enviado al navegador
    :param eventos: list Eventos para ser convertidos a JSON
    :param version_corta: bool, indica si se quiere que se muestre la version corta de estos
    :return:
    """

    eventos_dic = lista_eventos_diccionario(eventos, version_corta)

    return util.to_json(eventos_dic)
Ejemplo n.º 17
0
 def get(self, path):
     if not path:
         path = "."
     if os.path.isdir(path):
         p = lambda i: os.path.join(path, i)
         self.render("dir.html",
                     ls=[(i + "/" * os.path.isdir(p(i)),
                          os.stat(p(i)).st_size) for i in os.listdir(path)])
     else:
         environ = tornado.wsgi.WSGIContainer.environ(self.request)
         self.set_header("Content-type", "text/plain")
         self.write(util.to_json(environ))
Ejemplo n.º 18
0
 def get(self, path):
     if not path:
         path = "."
     if os.path.isdir(path):
         p = lambda i: os.path.join(path, i)
         self.render("dir.html", ls=[
             (i + "/" * os.path.isdir(p(i)), os.stat(p(i)).st_size)
             for i in os.listdir(path)
         ])
     else:
         environ = tornado.wsgi.WSGIContainer.environ(self.request)
         self.set_header("Content-type", "text/plain")
         self.write(util.to_json(environ))
Ejemplo n.º 19
0
def backup_tmux(tmux_id):
    """get current tmux information and return Tmux object
    """

    LOG.info('backing up the current tmux sessions')
    #id is timestamp
    parent_dir    = path.join(config.BACKUP_PATH,tmux_id)

    tmux          = tmux_obj.Tmux(tmux_id)
    tmux.create_time = datetime.datetime.fromtimestamp(time.time()).strftime('%Y-%m-%d %H:%M:%S')
    tmux.sessions = load_sessions()

    util.to_json(tmux, parent_dir, tmux_id + '.json')

    #if tmux and not os.path.exists(parent_dir):
        #os.makedirs(parent_dir)
    for s in tmux.sessions:
        for w in s.windows:
            for p in w.panes:
                cmd.capture_pane(p.idstr(), path.join(parent_dir,p.idstr()))

    LOG.info('Backup of sessions was saved under %s'%parent_dir)
Ejemplo n.º 20
0
def backup_tmux(tmux_id):
    """get current tmux information and return Tmux object
    """

    LOG.info('backing up the current tmux sessions')
    #id is timestamp
    parent_dir = path.join(config.BACKUP_PATH, tmux_id)

    tmux = tmux_obj.Tmux(tmux_id)
    tmux.create_time = datetime.datetime.fromtimestamp(
        time.time()).strftime('%Y-%m-%d %H:%M:%S')
    tmux.sessions = load_sessions()

    util.to_json(tmux, parent_dir, tmux_id + '.json')

    #if tmux and not os.path.exists(parent_dir):
    #os.makedirs(parent_dir)
    for s in tmux.sessions:
        for w in s.windows:
            for p in w.panes:
                cmd.capture_pane(p.idstr(), path.join(parent_dir, p.idstr()))

    LOG.info('Backup of sessions was saved under %s' % parent_dir)
Ejemplo n.º 21
0
def open_forms(entity, action, key=None):

    if key:
        entity = ENTITY_DICT[entity][key]
        data = to_json(entity.to_dict(related_objects=True))
        print(data)
    tmpl = templates[action + "_" + entity]
    if "focus_patient" in app.config:
        patient_id = app.config["focus_patient"]

        return template(tmpl,
                        data=data,
                        username=app.config['username'],
                        patient_id=patient_id)
    return template(tmpl, username=app.config['username'])
Ejemplo n.º 22
0
    def __comentarios_a_json(comentarios):
        def creador(usuario_key):
            usuario = usuario_key.get()
            usuario_info = from_json(usuario.extra)
            return {
                u'id': usuario.idGoogle,
                u'nombre': usuario_info[u'displayName'],
                u'image': usuario_info[u'image'][u'url'][0:-6]
            }

        return to_json([{
            u'texto': c.texto,
            u'fecha': u'{}Z'.format(c.fecha.isoformat()),
            u'creador': creador(c.creador),
            u'id': c.key.urlsafe()
        } for c in comentarios])
Ejemplo n.º 23
0
def parse():
    source = request.form.get('source', "")
    if not source.endswith("\n"):
        source += "\n"
    return jsonify(tree=to_json(parse_string(source)))
Ejemplo n.º 24
0
def test_to_json():
    data = {
        'key1': 'val1',
        'key2': 'val2',
    }
    return util.to_json(data)
Ejemplo n.º 25
0
 def decorated_function(*args, **kwargs):
     result = f(*args, **kwargs)  # call function
     json_result = util.to_json(result)
     return Response(response=json_result,
                     status=200,
                     mimetype='application/json')
Ejemplo n.º 26
0
def main():
    parser = argparse.ArgumentParser(description='Confluence Secret Finder')
    parser.add_argument('--domain',
                        '-d',
                        action="store",
                        dest='domain',
                        help="Confluence domain.",
                        required=True)
    parser.add_argument('--user',
                        '-u',
                        action="store",
                        dest='user',
                        help="Confluence user.",
                        required=True)
    parser.add_argument('--token',
                        '-t',
                        action="store",
                        dest='token',
                        help="API token for the user.",
                        required=True)
    parser.add_argument(
        '--start-date',
        '-s',
        action="store",
        dest='start_date',
        help=
        "Date (YYYY-MM-DD) from which to start the crawling. Otherwise, the script will default to the oldest content creation date or resume where it last stopped.",
        required=False)
    parser.add_argument(
        '--max-attachment-size',
        '-m',
        action="store",
        dest='max_attachment_size',
        default=10,
        help="Max attachment size to download in MB. Defaults to 10MB.",
        required=False)
    parser.add_argument('--blacklist',
                        '-b',
                        action='store',
                        dest='blacklist_file',
                        default=None,
                        help='File containing regexes to blacklist secrets.')
    parser.add_argument(
        '--cache-location',
        '-c',
        action='store',
        dest='cache_location',
        default=None,
        help='Specified where the cache sqlite file will be saved.')
    parser.add_argument('-v',
                        action="store_true",
                        dest='verbose',
                        default=False,
                        help="Increases output verbosity.")
    parser.add_argument('-vv',
                        action="store_true",
                        dest='verbose_debug',
                        default=False,
                        help="Increases output verbosity even more.")
    parser.add_argument('--json',
                        '-j',
                        action="store_true",
                        dest='json',
                        default=False,
                        help="Outputs the results as json.")

    args = parser.parse_args()

    start_date = None
    if args.start_date:
        start_date = dateutil.parser.parse(args.start_date).date()

    if args.verbose or args.verbose_debug:
        logging.getLogger("sqlitedict").setLevel(logging.ERROR)
        logging.getLogger("chardet.charsetprober").setLevel(logging.ERROR)
        logging.getLogger().setLevel(
            logging.DEBUG if args.verbose_debug else logging.INFO)

    with App(args.domain, args.user, args.token, args.blacklist_file,
             args.max_attachment_size, args.cache_location, start_date) as app:
        for s in app.find_secrets():
            if args.json:
                j = to_json(s)
                print(j)
            else:
                print(f"{s.content.space}): {s.version}: {s.secrets}")
Ejemplo n.º 27
0
def main():
    args = parse_args()
    config = util.load_yaml(args.config)
    thrift = util.load_yaml(args.thrift)
    protocol = util.load_yaml(args.protocol)

    pmap = {}
    for item in protocol:
        if "class_name" in item:
            pmap[item.class_name] = item

    comment = "// This file is generated DO NOT EDIT @" + "generated"
    result = [{"comment": comment}]

    for thrift_item in thrift:
        config_item = None
        if "class_name" in thrift_item and thrift_item.class_name in pmap:
            protocol_item = pmap[thrift_item.class_name]

            special = False
            if "struct" in thrift_item:
                if thrift_item.class_name in config.StructMap:
                    config_item = config.StructMap[thrift_item.class_name]
                    thrift_item["proto_name"] = config_item.class_name
                    special = True

                for field in thrift_item.fields:
                    if (
                        config_item is not None
                        and field.field_name in config_item.fields
                    ):
                        field["proto_name"] = config_item.fields[
                            field.field_name
                        ].field_name
                    else:
                        field["proto_name"] = field.field_name

                if "struct" in protocol_item:
                    thrift_field_set = {t.proto_name for t in thrift_item.fields}
                    protocol_field_set = {p.field_name for p in protocol_item.fields}
                    valid_fields = thrift_field_set.intersection(protocol_field_set)

                    for field in thrift_item.fields:
                        if field.field_name in valid_fields:
                            field["convert"] = True

                    if len((thrift_field_set - protocol_field_set)) != 0:
                        eprint(
                            "Missing protocol fields: "
                            + thrift_item.class_name
                            + " "
                            + str(thrift_field_set - protocol_field_set)
                        )

                    if len((protocol_field_set - thrift_field_set)) != 0:
                        eprint(
                            "Missing thrift fields: "
                            + thrift_item.class_name
                            + " "
                            + str(protocol_field_set - thrift_field_set)
                        )
                else:
                    hfile = "./special/" + thrift_item.class_name + ".hpp.inc"
                    special = special_file(hfile, special, thrift_item, "hinc")

                    cfile = "./special/" + thrift_item.class_name + ".cpp.inc"
                    special = special_file(cfile, special, thrift_item, "cinc")

                    if not special:
                        eprint(
                            "Thrift struct missing from presto_protocol: "
                            + thrift_item.class_name
                        )
        else:
            eprint("Thrift item missing from presto_protocol: " + item.class_name)

    result.extend(thrift)
    print(util.to_json(result))
Ejemplo n.º 28
0
def dump_stats(place_arrays, place_id_filters=None, format="text"):
    max_place_id_length = max(len(place["place_id"]) for place in place_arrays)

    stats_list = []
    for place in place_arrays:
        if place_id_filters and not place_id_filters.matches(place["place_id"]):
            continue

        num_changes = 0
        last_value = "---"
        for value in place["y"]:
            if value != last_value:
                last_value = value
                num_changes += 1

        true_y = [v for v in place["y"] if v is not None]

        abs_change = 0
        if true_y:
            last_v = true_y[0]
            for v in true_y[1:]:
                abs_change += abs(v - last_v)
                last_v = v
            abs_change / len(true_y)

        stats = {
            "place_id": place["place_id"],
            "num_timestamps": len(place["x"]),
            "num_changes": num_changes,
            "abs_changes": abs_change,
            "min_timestamp": place["x"][0],
            "max_timestamp": place["x"][-1],
        }

        for key in ("average", "min", "max", "median", "mean", "std", "var"):
            if true_y:
                stats[key] = round(getattr(np, key)(true_y), 1)
            else:
                stats[key] = ""

        stats_list.append(stats)

    if format == "text":
        for place in stats_list:
            print(
                f"{place['place_id']:{max_place_id_length}}"
                f" {place['min_timestamp']} first ts"
                f" {place['num_timestamps']:5} snapshots"
                f" {place['num_changes']:5} changes"
                f" {place['average']:7} average"
                f" {place['min']:7} min"
                f" {place['max']:7} max"
                f" {place['std']:7} std"
                f" {place['var']:7} var"
                f" {place['abs_changes']:7} abs-changes"
            )
        print(f"num places: {len(place_arrays)}")

    elif format == "json":
        print(to_json(stats_list))

    elif format == "csv":
        with StringIO() as fp:
            writer = csv.DictWriter(fp, stats_list[0].keys())
            writer.writeheader()
            writer.writerows(stats_list)
            fp.seek(0)
            print(fp.read())
Ejemplo n.º 29
0
def full_chain():
    resp = {
        'chain': block_chain.chain,
        'length': len(block_chain.chain),
    }
    return json_resp(util.to_json(resp))
Ejemplo n.º 30
0
 def decorated_function(*args, **kwargs):
     result = f(*args, **kwargs)  # Call Function
     json_result = util.to_json(result)
     return Response(response=json_result,
                     status=200,
                     mimetype="application/json")
Ejemplo n.º 31
0
 def render(self):
     if self.model is None:
         return "null"
     else:
         return to_json(self.model)
Ejemplo n.º 32
0
	def render(self):
		if self.model is None:
			return "null"
		else:
			return to_json(self.model)
Ejemplo n.º 33
0
def main():

    args = parse_args()

    sources = DataSources()

    if args.include:
        sources = sources.filtered(*args.include)

    if args.exclude:
        sources = sources.excluded(*args.exclude)

    if not sources.sources:
        print("No data sources matching the filter")
        exit(1)

    min_date, max_date = None, None

    try:
        min_date = datetime.datetime.strptime(args.date, "%Y-%m-%d")
    except (ValueError, TypeError):
        if args.date is None:
            pass
        elif args.date == "today":
            min_date = datetime.datetime.now().replace(hour=0, minute=0, second=0, microsecond=0)
        elif args.date == "yesterday":
            min_date = datetime.datetime.now().replace(hour=0, minute=0, second=0, microsecond=0)
            min_date -= datetime.timedelta(days=1)
        else:
            print(f"Invalid date '{args.date}'")
            exit(2)

    if min_date:
        max_date = min_date + datetime.timedelta(days=1) - datetime.timedelta(seconds=1)

    place_id_filters = args.include_id
    if place_id_filters:
        place_id_filters = RegexFilter(*place_id_filters)

    if args.command == "list":
        for source in sources.sources:
            print(source)

    elif args.command == "dump" or args.command == "test":
        all_data = download_sources(sources, use_cache=args.cache)
        if args.command == "dump":
            dump_raw_data(all_data, place_id_filters=place_id_filters)

    elif args.command == "dump-meta" or args.command == "test-meta":
        all_data = download_sources(sources, use_cache=args.cache, meta=True)
        if args.command == "dump-meta":
            dump_raw_data(all_data, place_id_filters=place_id_filters)

    elif args.command == "store":
        download_sources(sources, use_cache=args.cache, do_store=True)

    elif args.command == "store-meta":
        download_sources(sources, use_cache=args.cache, do_store=True, meta=True)

    elif args.command == "load":
        place_id_to_timestamps = Storage().load_sources(sources, min_timestamp=min_date, max_timestamp=max_date)
        dump_place_id_to_timestamps(place_id_to_timestamps, place_id_filters=place_id_filters, format=args.format)

    elif args.command == "load-meta":
        source_id_to_meta = Storage().load_sources_meta(sources, min_timestamp=min_date, max_timestamp=max_date)
        dump_source_to_meta(source_id_to_meta, format=args.format)

    elif args.command == "load-stats":
        place_arrays = Storage().load_sources_arrays(sources, min_timestamp=min_date, max_timestamp=max_date)
        dump_stats(place_arrays, place_id_filters=place_id_filters, format=args.format)

    elif args.command == "export-csv":
        export_csv(sources, min_date, max_date, place_id_filters, args.csv_path)

    elif args.command == "export":
        place_id_to_timestamps = Storage().load_sources(sources, min_timestamp=min_date, max_timestamp=max_date)
        source_id_to_meta = Storage().load_sources_meta(sources)
        data = convert_place_id_to_timestamps_to_export(place_id_to_timestamps, source_id_to_meta, place_id_filters=place_id_filters)
        print(to_json(data, indent=2))

    elif args.command == "to-influxdb":
        place_id_to_timestamps = Storage().load_sources(sources, min_timestamp=min_date)
        source_id_to_meta = Storage().load_sources_meta(sources)
        export_place_id_to_timestamps_influx(place_id_to_timestamps, source_id_to_meta, place_id_filters=place_id_filters)

    else:
        print(f"Unknown command '{args.command}'")
        exit(2)
Ejemplo n.º 34
0
    m, n = 3, 3
    tf_range = linspace(800, 1200, m)
    samples_range = floor(linspace(50, 70, n))
    biases = zeros(shape=(m, n, 3))
    stderr = zeros(shape=(m, n, 3))
    mc = MonteCarlo(runs=200)
    for i, tf in enumerate(tf_range):
        for j, samples in enumerate(samples_range):
            estimates = mc.simluate(
                a=xt[0], b=xt[1], c=xt[2],
                sigma=sigma, n=samples, tf=tf
            )
            biases[i, j, :] = median(estimates, axis=0) - xt
            stderr[i, j, :] = std(estimates, axis=0)

    to_json(biases, "biases.json")
    to_json(stderr, "stderr.json")
    xlab="Total Time of Observation"
    ylab="Total Number of Measurements"
    xx, yy = meshgrid(tf_range, samples_range, indexing="ij")
    plot_contour(
        xx, yy, stderr[:, :, 2],
        xlab=xlab,
        ylab=ylab,
        title="Std Err (c)",
        fname="fig3_stderr_c.png")
    plot_contour(
        xx, yy, stderr[:, :, 1],
        xlab=xlab,
        ylab=ylab,
        title="Std Err (b)",
Ejemplo n.º 35
0
def test_get_dir_info():
    s = '\n'

    path = util.get_relative_path(__file__, 'a.txt')
    s += 'get_dir_info(\'' + path + '\') = ' + util.to_json(
        util.get_dir_info(path)) + '\n'

    s += '\n'
    path = util.get_relative_path(__file__, 'dir1')
    s += 'get_dir_info(\'' + path + '\') = ' + util.to_json(
        util.get_dir_info(path)) + '\n'

    s += '\n'
    path = util.get_relative_path(__file__, 'dir2')
    s += 'get_dir_info(\'' + path + '\') = ' + util.to_json(
        util.get_dir_info(path)) + '\n'

    s += '\n'
    path = util.get_relative_path(__file__, 'dir2')
    s += 'get_dir_info(\'' + path + '\', recursive=0) = ' + util.to_json(
        util.get_dir_info(path, recursive=0)) + '\n'

    s += '\n'
    path = util.get_relative_path(__file__, 'dir2')
    s += 'get_dir_info(\'' + path + '\', recursive=2) = ' + util.to_json(
        util.get_dir_info(path, recursive=2)) + '\n'

    s += '\n'
    path = util.get_relative_path(__file__, 'dir2')
    s += 'get_dir_info(\'' + path + '\', True, pattern=r\'.+\.log\', recursive=0) = ' + util.to_json(
        util.get_dir_info(path, pattern=r'.+\.log', recursive=0)) + '\n'

    s += '\n'
    path = util.get_relative_path(__file__, 'dir2')
    s += 'get_dir_info(\'' + path + '\', pattern=r\'.*log.*\', recursive=0) = ' + util.to_json(
        util.get_dir_info(path, pattern=r'.*log.*', recursive=0)) + '\n'

    s += '\n'
    path = util.get_relative_path(__file__, 'dir2')
    s += 'get_dir_info(\'' + path + '\', recursive=0, depth=1) = ' + util.to_json(
        util.get_dir_info(path, recursive=0, depth=1)) + '\n'

    s += '\n'
    path = util.get_relative_path(__file__, 'dir2')
    s += 'get_dir_info(\'' + path + '\', recursive=0, depth=2) = ' + util.to_json(
        util.get_dir_info(path, recursive=0, depth=2)) + '\n'

    s += '\n'
    path = util.get_relative_path(__file__, 'dir2/')
    s += 'get_dir_info(\'' + path + '\', recursive=0, depth=2) = ' + util.to_json(
        util.get_dir_info(path, recursive=0, depth=2)) + '\n'

    s += '\n'
    path = util.get_relative_path(__file__, 'dir2/aaa.txt')
    s += 'get_dir_info(\'' + path + '\') = ' + util.to_json(
        util.get_dir_info(path)) + '\n'

    #s += '\n'
    #path ='C:/Program Files/'
    #s += 'get_dir_info(\'' + path + '\', recursive=0, depth=1) = ' + util.to_json(util.get_dir_info(path, recursive=0, depth=1)) + '\n'

    s += '\n'
    path = 'C:/notexist/'
    try:
        s += 'get_dir_info(\'' + path + '\', recursive=0, depth=1) = ' + util.to_json(
            util.get_dir_info(path, recursive=0, depth=1)) + '\n'
    except FileNotFoundError as e:
        s += str(e)

    return s