Example #1
0
def load_simdjson(file, data):
    import simdjson
    if file is not None:
        return simdjson.load(file)

    with open(data, 'r') as f:
        return simdjson.load(f)
Example #2
0
def load_simdjson(file, data):
    import simdjson
    if file is not None:
        args['verbose'] and print(f"Loading file using simdjson")
        return simdjson.load(file)

    with open(data, 'r') as f:
        return simdjson.load(f)
Example #3
0
 def _get_pred_attr_cache(self, gid):
     op = dag_utils.report_graph(self._graph, types_pb2.PRED_ATTR_BY_GID, gid=gid)
     archive = op.eval()
     gid = archive.get_uint64()
     fp = io.BytesIO(archive.get_bytes())
     pred_attr_cache = simdjson.load(fp)
     return gid, pred_attr_cache
Example #4
0
def _test_load():
    """Ensure basic usage of load is the same."""
    # We don't use a binary file here because pre-py3.6 the built-in couldn't
    # handle bytes.
    with open('jsonexamples/canada.json', 'r') as fin:
        builtin_json = json.load(fin)

    with open('jsonexamples/canada.json', 'rb') as fin:
        simd_json = simdjson.load(fin)

    assert builtin_json == simd_json
Example #5
0
def load_p4(p4_file, p4_version=16, add_attrs=True):
    p4_file = os.path.expandvars(p4_file)
    json_filename = hlir16.hlir.p4_to_json(p4_file)

    import simdjson
    with open(json_filename, 'r') as json:
        json_contents = simdjson.load(json)

    hlir = hlir16.hlir.walk_json_from_top(json_contents)
    if type(error_code := hlir) is not P4Node:
        print(f"Could not load P4 file {p4_file}, error code: {error_code}")
        sys.exit(error_code)
Example #6
0
def read_simdjson(filepath: str):
    try:
        with open(filepath) as fp:
            return simdjson.load(fp)
    except:
        return None
Example #7
0
def read_json(path: Union[str, Path]):
    """
    Read a json file from a string path
    """
    with open(path) as f:
        return simdjson.load(f)
Example #8
0
def auth_cloud(gcs=None, s3=None):
    # expects a json file for gcs
    global _gcs_available, _s3_available, s3_client, gcp_client, gcp_storage_client
    if gcs:
        try:
            gcp_client = service_account.Credentials.service_account_info = json.load(open(gcs, 'r'))
            gcp_storage_client = storage.Client.from_service_account_json(gcs)
            _gcs_available = True
        except Exception as e:
            print(f'Unable to Authenticate GCS: {str(e)}')
            _gcs_available = False

    if s3:
        if _io_type(s3) == 'str' and os.path.exists(s3):
            os.environ['AWS_SHARED_CREDENTIALS_FILE'] = s3
            try:
                s3_client = boto3.Session()
                _s3_available = True
            except Exception as e:
                print(f'Unable to Authenticate S3: {str(e)}')
                _s3_available = False
        elif _io_type(s3) == 'dict':
            if 'aws_access_key_id' in s3 and 'aws_secret_access_key' in s3:
                try:
                    s3_client = boto3.Session(aws_access_key_id=s3['aws_access_key_id'], aws_secret_access_key=s3['aws_secret_access_key'])
                    _s3_available = True
                except Exception as e:
                    print(f'Unable to Authenticate S3: {str(e)}')
                    _s3_available = False
            elif 'aws_access_key_id'.upper() in s3 and 'aws_secret_access_key'.upper() in s3:
                try:
                    s3_client = boto3.Session(aws_access_key_id=s3['aws_access_key_id'.upper()], aws_secret_access_key=s3['aws_secret_access_key'.upper()])
                    _s3_available = True
                except Exception as e:
                    print(f'Unable to Authenticate S3: {str(e)}')
                    _s3_available = False
            elif 'AWS_SHARED_CREDENTIALS_FILE' in s3:
                os.environ['AWS_SHARED_CREDENTIALS_FILE'] = s3
                try:
                    s3_client = boto3.Session()
                    _s3_available = True
                except Exception as e:
                    print(f'Unable to Authenticate S3: {str(e)}')
                    _s3_available = False
    else:
        _S3_ID = os.environ.get("AWS_ACCESS_KEY_ID", None)
        _S3_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY", None)
        _S3_PATH = os.environ.get("AWS_SHARED_CREDENTIALS_FILE", None)
        _GCS_PATH = os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", None)
        if not _s3_available:
            if _S3_ID and _S3_KEY:
                try:
                    s3_client = boto3.Session(aws_access_key_id=_S3_ID, aws_secret_access_key=_S3_KEY)
                    _s3_available = True
                except Exception as e:
                    print(f'Unable to Authenticate S3: {str(e)}')
                    _s3_available = False
            elif _S3_PATH:
                try:
                    os.environ['AWS_SHARED_CREDENTIALS_FILE'] = _S3_PATH
                    s3_client = boto3.Session()
                    _s3_available = True
                except Exception as e:
                    print(f'Unable to Authenticate S3: {str(e)}')
                    _s3_available = False
        if not _gcs_available and _GCS_PATH:
            try:
                gcp_client = service_account.Credentials.service_account_info = json.load(open(_GCS_PATH, 'r'))
                gcp_storage_client = storage.Client.from_service_account_json(_GCS_PATH)
                _gcs_available = True
            except Exception as e:
                print(f'Unable to Authenticate GCS: {str(e)}')
                _gcs_available = False

    if _gcs_available:
        print('Authenticated GCS')
    else:
        gcp_client = None
        gcp_storage_client = None

    if _s3_available:
        print('Authenticated S3')
    else:
        s3_client = None
Example #9
0
    _tqdm_available = False
try:
    import numpy as np
    _numpy_available = True
except ImportError:
    _numpy_available = False



try:
    from google.cloud import storage
    from google.oauth2 import service_account
    USE_GCS = os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", None)
    if USE_GCS:
        if os.path.exists(USE_GCS):
            gcp_client = service_account.Credentials.service_account_info = json.load(open(USE_GCS, 'r'))
            gcp_storage_client = storage.Client.from_service_account_json(USE_GCS)
            _gcs_available = True
        else:
            gcp_client, gcp_storage_client = None, None
            _gcs_available = False
    else:
        _gcs_available = False
except ImportError:
    gcp_client, gcp_storage_client = None, None
    _gcs_available = False

try:
    import boto3
    _S3_ID = os.environ.get("AWS_ACCESS_KEY_ID", None)
    _S3_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY", None)
Example #10
0
 def jsonload(cls, filename):
     return json.load(gfile(filename, 'r'))
Example #11
0
def read_simdjson(filepath: str):
    with open(filepath) as fp:
        return simdjson.load(fp)