Example #1
0
File: api.py Project: eotp/xray
def to_netcdf(dataset,
              path=None,
              mode='w',
              format=None,
              group=None,
              engine=None,
              writer=None,
              encoding=None):
    """This function creates an appropriate datastore for writing a dataset to
    disk as a netCDF file

    See `Dataset.to_netcdf` for full API docs.

    The ``writer`` argument is only for the private use of save_mfdataset.
    """
    if encoding is None:
        encoding = {}
    if path is None:
        path = BytesIO()
        if engine is None:
            engine = 'scipy'
        elif engine is not None:
            raise ValueError('invalid engine for creating bytes with '
                             'to_netcdf: %r. Only the default engine '
                             "or engine='scipy' is supported" % engine)
    else:
        if engine is None:
            engine = _get_default_engine(path)
        path = _normalize_path(path)

    # validate Dataset keys, DataArray names, and attr keys/values
    _validate_dataset_names(dataset)
    _validate_attrs(dataset)

    try:
        store_cls = WRITEABLE_STORES[engine]
    except KeyError:
        raise ValueError('unrecognized engine for to_netcdf: %r' % engine)

    if format is not None:
        format = format.upper()

    # if a writer is provided, store asynchronously
    sync = writer is None

    store = store_cls(path, mode, format, group, writer)
    try:
        dataset.dump_to_store(store, sync=sync, encoding=encoding)
        if isinstance(path, BytesIO):
            return path.getvalue()
    finally:
        if sync:
            store.close()

    if not sync:
        return store
Example #2
0
def to_netcdf(dataset, path=None, mode='w', format=None, group=None,
              engine=None, writer=None, encoding=None):
    """This function creates an appropriate datastore for writing a dataset to
    disk as a netCDF file

    See `Dataset.to_netcdf` for full API docs.

    The ``writer`` argument is only for the private use of save_mfdataset.
    """
    if encoding is None:
        encoding = {}
    if path is None:
        path = BytesIO()
        if engine is None:
            engine = 'scipy'
        elif engine is not None:
            raise ValueError('invalid engine for creating bytes with '
                             'to_netcdf: %r. Only the default engine '
                             "or engine='scipy' is supported" % engine)
    else:
        if engine is None:
            engine = _get_default_engine(path)
        path = _normalize_path(path)

    # validate Dataset keys, DataArray names, and attr keys/values
    _validate_dataset_names(dataset)
    _validate_attrs(dataset)

    try:
        store_cls = WRITEABLE_STORES[engine]
    except KeyError:
        raise ValueError('unrecognized engine for to_netcdf: %r' % engine)

    if format is not None:
        format = format.upper()

    # if a writer is provided, store asynchronously
    sync = writer is None

    store = store_cls(path, mode, format, group, writer)
    try:
        dataset.dump_to_store(store, sync=sync, encoding=encoding)
        if isinstance(path, BytesIO):
            return path.getvalue()
    finally:
        if sync:
            store.close()

    if not sync:
        return store
Example #3
0
            method_csv = 2
        if method_csv == 'DELETE':
            method_csv = 3
        if method_csv == 'PUT':
            method_csv = 4
        if method_csv == 'HEAD':
            method_csv = 5
        if method_csv == 'OPTIONS':
            method_csv = 6
        if method_csv == 'TRACE':
            method_csv = 7
        # print method_csv

        path = StringIO.StringIO()
        pprint(log_line_data['request_url'], path)
        path_csv = path.getvalue().replace("\n", "").replace("'", "")
        # path_csv = '/dvwa-1.9/dvwa/css/\"980610%40'
        # print path_csv
        list = '%, ", ?, _, $, &, *, /, \\, \., \|'
        symbol_in_request = 0
        for i in range(0, len(path_csv)):
            if path_csv[i] in list:
                symbol_in_request += 1
                x = float(len(path_csv))
        p_symbol = symbol_in_request / x
        status = StringIO.StringIO()
        pprint(log_line_data['status'], status)
        status_csv = status.getvalue().replace("\n", "").replace("'", "")
        status_csv = float(status_csv) / 500
        # print status_csv
        outputWriter.writerow(
Example #4
0
                        {'name': nname, 'scaling': self.sensor_scalings[sensor_id], 'triggers': ntp}))

        except Exception, e:
            print 'Error writing current profile'
            print e
            return False

        try:
            if not is_a_string_buffer:
                path = open(path, 'w')
            path.write(
                '# This file contains a fan profile for tpfancod')
            path.write('\n\n\n')
            current_profile.write(path)
            if is_a_string_buffer:
                self.profile_as_string = path.getvalue()

        except Exception, e:
            print 'Error writing profile file: %s' % path
            print e
            return False
        return True

    def load_config(self, settings_from_config):
        """apply settings from a config"""
        self.verify_config(settings_from_config)
        if settings_from_config['status']:
            self.enabled = settings_from_config['enabled']
            self.override_profile = settings_from_config['override_profile']
            self.current_profile = settings_from_config['current_profile']