Exemplo n.º 1
0
 def resolve_flat(self, info, **kwargs):
     value = reduce(assign,
                    read_json(join(Args.logdir, self.id[1:])) or [{}])
     return dot_flatten(value)
Exemplo n.º 2
0
                        "date": {"type": "date",
                                 "format": "yyyy-MM-dd'T'HH:mm:ss.SSSSSS"},
                        "long": {"type": "long"},
                        "float": {"type": "float"},
                        "null": {"type": "boolean", "null_value": False}
                    }
                }
            }
        )))

    for chunk in tqdm(chunked(parameter_files, 1000), desc="Uploading..."):
        parameters = [reduce(assign, [
            *(read_json(join(cwd, f['path'])) or []), {"dir": f['dir']}
        ]) for f in chunk]
        actions = [{"index": dict(_id=p['dir'], )} for p in parameters]
        documents = [dict(index=[dict(key=k, **v) for k, v in typify(dot_flatten(p)).items()], **p)
                     for p in parameters]

        # documents[0]

        # https://stackoverflow.com/questions/20288770/how-to-use-bulk-api-to-store-the-keywords-in-es-by-using-python
        response = es.bulk(index='ml-dash', body=interleave(actions, documents))

        if response['errors']:
            for i, item in enumerate(response['items']):
                if item['index']['status'] >= 300:
                    print(item['index'])
                    print(documents[i])
                    break

    cprint('finished', 'green')
Exemplo n.º 3
0
 def resolve_flat(self, info, **kwargs):
     # note: this always gives truncated some-folder/arameter.pkl path.
     value = reduce(
         assign,
         read_pickle_for_json(pJoin(Args.logdir, self.id[1:])) or [{}])
     return dot_flatten(value)
Exemplo n.º 4
0
                    }
                }
            })))

    for chunk in tqdm(chunked(parameter_files, 1000), desc="Uploading..."):
        parameters = [
            reduce(assign, [
                *(read_pickle_for_json(join(cwd, f['path'])) or []), {
                    "dir": f['dir']
                }
            ]) for f in chunk
        ]
        actions = [{"index": dict(_id=p['dir'], )} for p in parameters]
        documents = [
            dict(index=[
                dict(key=k, **v) for k, v in typify(dot_flatten(p)).items()
            ],
                 **p) for p in parameters
        ]

        # documents[0]

        # https://stackoverflow.com/questions/20288770/how-to-use-bulk-api-to-store-the-keywords-in-es-by-using-python
        response = es.bulk(index='ml-dash',
                           body=interleave(actions, documents))

        if response['errors']:
            for i, item in enumerate(response['items']):
                if item['index']['status'] >= 300:
                    print(item['index'])
                    print(documents[i])