def write_posterior_to_hdf(posterior_path, headers, posterior, metadata,
                           run_identifier):
    from lalinference.io import write_samples
    write_samples(posterior,
                  posterior_path,
                  path='/'.join([
                      '', 'lalinference', run_identifier, 'posterior_samples'
                  ]),
                  metadata=metadata)
	parser.add_option(
		'-b','--fixedBurnin',dest='fixedBurnin',action="callback",
		callback=multipleFileCB,help='Fixed number of iteration for burnin.')
	opts, args = parser.parse_args()

	datafiles=[]
	if args:
		datafiles = datafiles + args
	if opts.data:
		datafiles = datafiles + opts.data

	if opts.fixedBurnin:
	# If only one value for multiple chains, assume it's to be applied to all chains
		if len(opts.fixedBurnin) == 1:
			fixedBurnins = [int(opts.fixedBurnin[0]) for df in datafiles]
		else:
			fixedBurnins = [int(fixedBurnin) for fixedBurnin in opts.fixedBurnin]
	else:
		fixedBurnins = [None]*len(datafiles)

	chain_posteriors = []

	for i in range(len(datafiles)):
		chain_posteriors.append(downsample_and_evidence(datafiles[i],
			deltaLogP=opts.deltaLogP, fixedBurnin=fixedBurnins[i], nDownsample=opts.downsample, verbose=opts.verbose))

	final_posterior, metadata = weight_and_combine(chain_posteriors, verbose=opts.verbose)

	write_samples(final_posterior, opts.pos,
		path='/'.join(['','lalinference','lalinference_mcmc','posterior_samples']), metadata=metadata)
def write_posterior_to_hdf(posterior_path, headers, posterior, metadata,
                           run_identifier):
  from lalinference.io import write_samples
  write_samples(posterior, posterior_path, path='/'.join(['','lalinference',run_identifier,'posterior_samples']), metadata=metadata, overwrite=True)
Example #4
0
        verbose=opts.verbose,
        evidence_weighting=not opts.equal_weighting,
        combine_only=opts.combine_only)

    for path in datafiles:
        run_identifier = extract_metadata(path, metadata)

    # Remove duplicate metadata
    path_to_samples = '/'.join(
        ['', 'lalinference', run_identifier, 'posterior_samples'])
    if path_to_samples in metadata:
        for colname in final_posterior.columns:
            metadata[path_to_samples].pop(colname, None)

    # for metadata which is in a list, take the average.
    for level in metadata:
        for key in metadata[level]:
            #if isinstance(metadata[level][key], list) and all(isinstance(x, (int,float)) for x in metadata[level][key]):
            #    metadata[level][key] = mean(metadata[level][key])
            if isinstance(metadata[level][key], list) and all(
                    isinstance(x, (str)) for x in metadata[level][key]):
                print(
                    "Warning: only printing the first of the %d entries found for metadata %s/%s. You can find the whole list in the headers of individual hdf5 output files\n"
                    % (len(metadata[level][key]), level, key))
                metadata[level][key] = metadata[level][key][0]

    write_samples(final_posterior,
                  opts.pos,
                  path=path_to_samples,
                  metadata=metadata)
Example #5
0
	parser.add_option(
		'-b','--fixedBurnin',dest='fixedBurnin',action="callback",
		callback=multipleFileCB,help='Fixed number of iteration for burnin.')
	opts, args = parser.parse_args()

	datafiles=[]
	if args:
		datafiles = datafiles + args
	if opts.data:
		datafiles = datafiles + opts.data

	if opts.fixedBurnin:
	# If only one value for multiple chains, assume it's to be applied to all chains
		if len(opts.fixedBurnin) == 1:
			fixedBurnins = [int(opts.fixedBurnin[0]) for df in datafiles]
		else:
			fixedBurnins = [int(fixedBurnin) for fixedBurnin in opts.fixedBurnin]
	else:
		fixedBurnins = [None]*len(datafiles)

	chain_posteriors = []

	for i in range(len(datafiles)):
		chain_posteriors.append(downsample_and_evidence(datafiles[i],
			deltaLogP=opts.deltaLogP, fixedBurnin=fixedBurnins[i], nDownsample=opts.downsample, verbose=opts.verbose))

	final_posterior, metadata = weight_and_combine(chain_posteriors, verbose=opts.verbose)

	write_samples(final_posterior, opts.pos,
		path='/'.join(['','lalinference','lalinference_mcmc','posterior_samples']), metadata=metadata)