def main():
    # set up logging
    logger = logging.getLogger('store_data_locally')
    logger.setLevel(logging.DEBUG)
    fh = logging.FileHandler('store_data_locally.log')
    fh.setLevel(logging.DEBUG)
    formatter = logging.Formatter(
        '%(asctime)s - %(name)s - %(levelname)s - %(message)s')
    fh.setFormatter(formatter)
    logger.addHandler(fh)

    # get arguments
    parser = argparse.ArgumentParser(
        description='Query sensor readings from the IDEAL database'
        'and store locally.')
    parser.add_argument('--dataset_path',
                        help='directory of the original IDEAL dataset')
    parser.add_argument('--data_path',
                        default=LOCAL_DATA_DIR,
                        help='directory to store data')

    args = parser.parse_args()

    # store metadata locally
    converter = IdealCSV2Hdf5(args.dataset_path, data_dir=args.data_path)
    converter.store_metadata()

    with MetaDataStore(data_dir=args.data_path) as s:
        metadata = MetaData(s)

    # get relevant sensorids
    sensors = metadata.sensor_merged()
    indices = pd.Series([False] * sensors.shape[0], index=sensors.index.copy())
    indices = indices | sensors.sensorid.isin(metadata.electric_sensors())
    indices = indices & sensors.homeid.astype(int).isin(metadata.gold_homes())
    sensorids = sensors.sensorid[indices]
    sensorids_to_store = sensorids

    print('Query and store readings from {0} sensors'.format(
        len(sensorids_to_store)))

    for idx, sensorid in enumerate(sensorids_to_store):
        converter = IdealCSV2Hdf5(args.dataset_path, data_dir=args.data_path)

        logger.info('({0}/{1}) Sensorid: {2}'.format(idx + 1,
                                                     len(sensorids_to_store),
                                                     sensorid))

        converter.store_readings(sensorid)

    # try and read stored data
    readings_store = ReadingDataStore(data_dir=args.data_path)
    readings_count = 0

    for idx, sensorid in enumerate(sensorids):
        readings = readings_store.get_sensor_readings(sensorid)
        readings_count += len(readings)

    logger.info('Total readings : {0}'.format(readings_count))
Exemple #2
0
def test_is_equation_valid(equation, is_expected_valid):
    # Set up validation object
    metadata = MetaData(OPERATORS_DICTIONARY)
    # handle the legal white spaces
    metadata.equation_string = remove_white_spaces(equation,
                                                   LEGAL_WHITE_SPACES)
    sv = StringValidator(metadata)
    assert sv.is_equation_valid() == is_expected_valid
Exemple #3
0
    def main(self):
        self.define_parser()
        args = self.parser.parse_args()
        self.validate(args)

        md = MetaData(args.i)

        if md.version == "3.1":
            iLabels = md.getLabels("data_optics")
        else:
            self.error("Relion 3.1 star file is needed as input.")

        if "rlnEvenZernike" not in iLabels:
            self.error(
                "Zernike 4th order polynomials are not present in the STAR file. Please do a 4th order aberration CTF refinement first."
            )

        # create output header
        print("| Optics group | Apparent Cs [mm] | realApix [A] |")
        print("|------------------------------------------------|")

        apixSum = 0.0
        csSum = 0.0
        opticsGroupNr = 0

        for optic_group in md.data_optics:
            z40 = float(optic_group.rlnEvenZernike.split(",")[6])
            csTrue = optic_group.rlnSphericalAberration
            nomPixSize = optic_group.rlnImagePixelSize

            # note wavelength is for relativistically corrected accelerating voltage (i.e. 388.06 kV, 239.14 kV and 109.78 kV)
            if optic_group.rlnVoltage == 300:
                waveLength = 0.019687
            elif optic_group.rlnVoltage == 200:
                waveLength = 0.025079
            elif optic_group.rlnVoltage == 100:
                waveLength = 0.037014
            else:
                self.error(
                    "Only 100, 200 and 300 kV acceleration voltages are supported."
                )

            csApparent = csTrue + (12 * z40 * 1e-7) / (pi * waveLength**3)

            realPixSize = nomPixSize * (csTrue / csApparent)**0.25

            print("| %2d           |             %0.2f |        %0.3f |" %
                  (optic_group.rlnOpticsGroup, csApparent, realPixSize))

            opticsGroupNr += 1
            apixSum += realPixSize
            csSum += csApparent

        # Show average values
        print("|------------------------------------------------|")
        print("| Average      |             %0.2f |        %0.3f |" %
              (csSum / opticsGroupNr, apixSum / opticsGroupNr))
        print("|------------------------------------------------|")
Exemple #4
0
def main():
	# create instance
    oodt = OODTWorkFlowWrapper("http://localhost:9200")
    # get event info
    events = oodt.getEventNames()
    # create metadata object to invoke an event
    met = MetaData()
    met.addMetaData("hello", "world")
    # print available events
    print 'available events:', events
Exemple #5
0
def main():
    # create instance
    oodt = OODTWorkFlowWrapper("http://localhost:9200")
    # get event info
    events = oodt.getEventNames()
    # create metadata object to invoke an event
    met = MetaData()
    met.addMetaData("hello", "world")
    # print available events
    print 'available events:', events
 def append_terminate(self, path: Path, uid_state_map, resulting_end_state):
     assert path.nodes
     last_node = path.nodes[-1]
     last_node_o = uid_state_map[last_node]
     assert last_node_o is not None, f"Did not find an object for node {last_node_o}"
     resulting_end_state_o = uid_state_map[resulting_end_state]
     assert resulting_end_state_o is not None, f"Did not find an object for node {resulting_end_state}"
     trans = Transition.construct_terminate_transition(source_state_o=last_node_o, resulting_state_o=resulting_end_state_o)
     meta_data = [MetaData(data={metadata.META_DATA_TRANSITION: trans}), MetaData()]
     path.append_path(nodes=[last_node, resulting_end_state], meta_data_elements=meta_data, count_dist=False)
     return path
Exemple #7
0
def lambda_handler(event, contex):

    if event:
        meta = MetaData(event=event)
        meta.metadata()

        response = json.loads(meta.to_json())

        if response.get('output'):
            return str(response)
        else:
            response['output'] = 1
            return str(response)
def test_create_organize_list_convertor(equation, expected):
    # Set up the ConvertListToOrganizedList

    # create the metadata
    metadata = MetaData(OPERATORS_DICTIONARY)

    # convert the string to list
    convertor_string_to_list = StringToListConverter()
    metadata.equation_list = convertor_string_to_list.convert_string_to_list(
        equation)

    # create main convertor
    convertor = ConvertListToOrganizedList(metadata)
    assert convertor.create_organize_list() == expected, \
        "failed to check item: " + equation
def test_create_postfix_list_convertor(equation, expected):
    # Set up the InfixToPostFixConvertor

    # create the metadata
    metadata = MetaData(OPERATORS_DICTIONARY)

    # convert the string to list
    convertor_string_to_list = StringToListConverter()
    metadata.equation_list = convertor_string_to_list.convert_string_to_list(
        equation)

    # create main convertor
    convertor = InfixToPostfixConvertor(metadata.equation_list,
                                        OPERATORS_DICTIONARY)
    assert convertor.infix_to_postfix() == expected, "failed item: " + equation
Exemple #10
0
def screen_record():
    x, y, w, h = 0, 50, 1024, 768
    meta = MetaData.from_screen(w - x, h - y)
    arrow = Arrow(meta.width)
    last_time = 0
    with mss() as sct:
        # Part of the screen to capture
        monitor = {"top": y, "left": x, "width": w, "height": h}
        while True:
            frame = numpy.array(sct.grab(monitor))
            try:
                processed_img = process_img(frame, meta)
                lines = find_lines(processed_img)
                p = separate_lines(lines)
                cv2.circle(frame, (int(p.x), int(p.y)), 2, [0, 255, 255], 10)
                arrow.add_point(int(p.x))
            except:
                pass

            draw_arrow(frame, arrow, meta.width)
            cv2.imshow("OpenCV/Numpy normal", frame)

            fps = "fps: {}".format(1 / (time.time() - last_time))
            last_time = time.time()
            print(fps)

            # Press "q" to quit
            if cv2.waitKey(50) & 0xFF == ord("q"):
                cv2.destroyAllWindows()
                break
Exemple #11
0
def video_record(video):
    meta = MetaData.from_video(video)
    cap = cv2.VideoCapture(video)
    arrow = Arrow(meta.width)
    cpt_frame = 0
    while cap.isOpened():
        ret, frame = cap.read()
        cpt_frame += 1
        if ret:
            try:
                processed_img = process_img(frame, meta)
                lines = find_lines(processed_img)
                l1, l2 = separate_lines(lines)
                p = intersect_droit(l1, l2)
                arrow.add_point(int(p.x))

                processed_img = cv2.cvtColor(processed_img, cv2.COLOR_GRAY2BGR)

                cv2.imshow('window', processed_img)
                draw_infos(frame, p, l1, l2)
                draw_arrow(frame, arrow, meta.width)
                cv2.imshow('window', frame)

            except:
                pass
        # draw_arrow(frame, arrow, meta.width)

        # cv2.imshow('window', frame)
        if cv2.waitKey(25) & 0xFF == ord('q'):
            break

    cap.release()
    cv2.destroyAllWindows()
Exemple #12
0
    def __init__(self, file_name=None, file_type=None):
        self.__dataset = np.ndarray((0, 0), dtype=float)
        self.__metadata = MetaData()
        self.__header = {}

        if file_name is not None:
            self.load(file_name, file_type)
Exemple #13
0
    def __init__(self, file_name=None, file_type=None):
        """
        Initializer of a BData instance

        Parameters
        ----------
        file_name : str, optional
            File which contains BData (default: None)
        file_type : {'Npy', 'Matlab', 'HDF5'}
            File type (default: None)
        """

        self.dataSet = np.ndarray((0, 0), dtype=float)
        self.metaData = MetaData()

        if file_name is not None:
            self.load(file_name, file_type)
Exemple #14
0
    def main(self):
        self.define_parser()
        args = self.parser.parse_args()
        self.validate(args)

        md = MetaData(args.i)

        if args.lb == "ALL":
            if md.version == "3.1":
                iLabels = md.getLabels("data_particles")
            else:
                iLabels = md.getLabels("data_")
        else:
            iLabels = args.lb.split(" ")

        particles = self.get_particles(md)

        self.statsOnParticles(particles, iLabels)
def test_solve_equation(equation, expected):
    # Set up the solver

    # create the metadata
    metadata = MetaData(OPERATORS_DICTIONARY)

    # clean the white spaces from equation
    equation = remove_white_spaces(equation, LEGAL_WHITE_SPACES)

    # convert the string to list
    convertor_string_to_list = StringToListConverter()
    metadata.equation_list = \
        convertor_string_to_list.convert_string_to_list(equation)

    # create solver
    model = Model()
    assert model.solve_equation(metadata).result == expected, \
        "failed to check item: " + equation
Exemple #16
0
 def __init__(self, nodes=None, meta_data_elements: List[MetaData] = None):
     if nodes is None:
         nodes = []
         assert meta_data_elements is None
         meta_data_elements = [MetaData() for node in nodes]
     else:
         assert meta_data_elements is not None
         assert len(nodes) == len(meta_data_elements)
     self.nodes: List[str] = []
     self.actual_length = 0
     self.meta_data: List[MetaData] = []
     self.append_path(nodes=nodes, meta_data_elements=meta_data_elements)
     self.trace = None
Exemple #17
0
 def prepend_path(self,
                  nodes: List[str],
                  meta_data_elements: List[MetaData] = None):
     """
     Do not count the distance.
     """
     assert self.nodes
     assert self.nodes[0] == nodes[-1]
     len_nodes = len(nodes)
     self.nodes = nodes[:len_nodes - 1] + self.nodes
     if meta_data_elements is None:
         meta_data_elements = [MetaData() for node in nodes]
     assert len(nodes) == len(meta_data_elements)
     self.meta_data = meta_data_elements[:len_nodes - 1] + self.meta_data
Exemple #18
0
    def main(self):
        self.define_parser()
        args = self.parser.parse_args()

        self.validate(args)

        md = MetaData(args.i)

        print("Reading in input star file.....")

        if not os.path.exists(args.o_dir):
            os.makedirs(args.o_dir)

        for i, particle in enumerate(md, start=1):
            outputImageName = '%s/%s_%06d.mrc' % (args.o_dir, args.o_pref, i)
            self.splitMrcStack(particle.rlnImageName, outputImageName)
            particle.rlnImageName = outputImageName
            particle.rlnMicrographName = outputImageName

        md.write(args.o_pref + ".star")

        print("Total %s images created from MRC stacks." % str(i))

        print("New star file %s.star created. Have fun!" % args.o_pref)
Exemple #19
0
    def get_flatted_representation(self, meta_data=MetaData()):
        """
        Trans 1: A -> B
        Trans 2: B -> C
        Trans 3 Q-Start: C -> D
        Trans 4: C -> D
        Trans 5: C -> D
        Trans 6 Q-End: C -> D
        Trans 7: D -> E

        :returns:
        states = [A, B, C, D, D, D, D, E]
        transitions = [A -> B, B -> C, C -> D, D -> D, D -> D, D -> D, D -> E]
        """
        transitions = []
        states = []
        flatted_meta_data = MetaData()
        for seq_elem in self.seq_elements:
            tmp_states, tmp_transitions, tmp_meta_data = seq_elem.get_flatted_representation(
                meta_data=meta_data)
            states.extend(tmp_states)
            transitions.extend(tmp_transitions)
            flatted_meta_data.merge(tmp_meta_data)
        return states, transitions, flatted_meta_data
Exemple #20
0
def generate_cache_data(data_name, algorithm_name):
    print(f'Preprocessing the data {data_name} - {algorithm_name}...')
    print("graph data loading... [0/6]")
    graph_object, label_dict_set, labels = load_data_from_text(
        data_name=data_name)
    print("done.")
    print("The node size " + str(len(graph_object.nodes)))
    print("The edge size " + str(len(graph_object.edges)))
    meta_data = MetaData(graph_object=graph_object,
                         label_dict_set=label_dict_set,
                         algorithm_name=algorithm_name,
                         labels=labels)

    ###################################################################
    # overview data file format
    ###################################################################
    overview_data = []
    print("data caching... [6/6]")
    for perturbation in meta_data.perturbations:
        overview_data_item = {
            "remove_id": perturbation["remove_id"],
            "vul_percentile": perturbation["vul_percentile"],
            "rank": perturbation["rank"],
            "node_influence": perturbation["node_influence"],
            "label": perturbation["label"],
            "label_influence": perturbation["label_influence"]
        }
        overview_data.append(overview_data_item)
        with open(
            "../cached_data/" + data_name + "_" + algorithm_name + "_detail_" + str(perturbation["remove_id"]) + ".json",
            "w") as jf:
            json.dump(perturbation, jf, cls=MetaDataEncoder)

    with open(
            "../cached_data/" + data_name + "_" + algorithm_name + "_overview.json",
            "w") as jf:
        json.dump({"perturbations": overview_data,
                   "nodes": meta_data.nodes,
                   "labels": meta_data.labels,
                   "labelNames": meta_data.labelNames,
                   "vulnerabilityList": meta_data.vulnerabilityList,
                   "perturbationSummary": meta_data.perturbationSummary}, jf, cls=MetaDataEncoder)

    print("data cached.")
Exemple #21
0
    def main(self):
        self.define_parser()
        args = self.parser.parse_args()

        self.validate(args)

        print(
            "Extracting coordinated of particles per micrograph and storing as box files."
        )

        md = MetaData(args.i)

        particles = self.get_particles(md)

        boxes = self.getBoxes(particles)

        if not os.path.exists(args.o):
            os.makedirs(args.o)

        micrographName = ""
        boxFile = open("%s/%s" % (args.o, "dummy"), 'w')

        for box in boxes:
            if not box[0] == micrographName:
                boxFile.close()
                boxFile = open("%s/%s" % (args.o, box[0]), 'w')
                micrographName = box[0]
                boxFile.write(
                    "%d %d %s %s\n" %
                    (box[1] - int(args.box_size / 2), box[2] -
                     int(args.box_size / 2), args.box_size, args.box_size))
            else:
                boxFile.write(
                    "%d %d %s %s\n" %
                    (box[1] - int(args.box_size / 2), box[2] -
                     int(args.box_size / 2), args.box_size, args.box_size))
        boxFile.close()

        os.remove("%s/%s" % (args.o, "dummy"))

        print("Box-files written out. Have fun!")
Exemple #22
0
    def main(self):
        self.define_parser()
        args = self.parser.parse_args()
        self.validate(args)

        print("Removing columns fromstar file....")

        md = MetaData(args.i)
        md.removeLabels("data_particles", "rlnImageName", "rlnMicrographName",
                        "rlnMagnification", "rlnDetectorPixelSize",
                        "rlnCtfFigureOfMerit", "rlnVoltage", "rlnDefocusU",
                        "rlnDefocusV", "rlnDefocusAngle",
                        "rlnSphericalAberration", "rlnCtfBfactor",
                        "rlnCtfScalefactor", "rlnPhaseShift",
                        "rlnAmplitudeContrast")

        md.write(args.o)

        print("New star file " + args.o + " created. Have fun!")
					max = min + item('uniqueness')
			else:
				min = 0
				max = 128
			source[table][column]['min'], source[table][column]['max'] = min, max
if __name__ == '__main__':
	config_items = {
					'meta_file':			{'value':None}, \
					'size_file':			{'value':None,'default':None}, \
					'index_file':			{'value':None, 'default':None}, \
					}
	parser = argparse.ArgumentParser()
	for k, v in sorted(config_items.iteritems()):
			parser.add_argument("--" + k)
	args = parser.parse_args()

	for k, v in config_items.iteritems():
		if eval(str("args."+k)) == None:
			if 'default' in config_items[k].keys():
				config_items[k]['value'] = config_items[k]['default']
			else: 
				config_items[k]['value'] = raw_input("Enter " + k + ":")
		else:
			config_items[k]['value'] = eval(str("args."+k))
	json_file = config_items['meta_file']['value']
	m = MetaData()
	m.import_from_file(json_file)
	update(m,config_items['size_file']['value'],config_items['index_file']['value'])
	m.export_to_file(json_file + ".updated.json")
	m.export_to_stdout()
Exemple #24
0
    def main(self):
        self.define_parser()
        args = self.parser.parse_args()

        self.validate(args)

        md = MetaData(args.i)

        print("Reading in input star file.....")

        micrographs = self.get_micrographs(md)

        print(
            "Total %s micrographs in input star file. \nRenaming to micXXX convention."
            % str(len(micrographs)))

        self.renameMicrographs(micrographs, args.mic_dir)

        mdOut = MetaData()
        if md.version == "3.1":
            mdOut.version = "3.1"
            mdOut.addDataTable("data_optics")
            mdOut.addLabels("data_optics", md.getLabels("data_optics"))
            mdOut.addData("data_optics", getattr(md, "data_optics"))
            particleTableName = "data_particles"
        else:
            particleTableName = "data_"

        mdOut.addDataTable(particleTableName)
        mdOut.addLabels(particleTableName, md.getLabels(particleTableName))
        mdOut.addData(particleTableName, micrographs)

        mdOut.write(args.o)

        print("New star file %s created. Have fun!" % args.o)
        ['custom_fun', [], [], [], '', False, False, False, False],
        ['_custom_fun', [], [], [], '', False, False, False, False],
        ['custom_func1', [], [], [], '', False, False, False, False],
        ['custom_func2', [], [], [], '', False, False, False, False],
        ['set_returning_func', ['x', 'y'], ['integer', 'integer'],
            ['b', 'b'], '', False, False, True, False]],
    'datatypes': ['custom_type1', 'custom_type2'],
    'foreignkeys': [
        ('public', 'users', 'id', 'public', 'users', 'parentid'),
        ('public', 'users', 'id', 'public', 'Users', 'userid')
    ],
}

metadata = dict((k, {'public': v}) for k, v in metadata.items())

testdata = MetaData(metadata)

cased_users_col_names = ['ID', 'PARENTID', 'Email', 'First_Name', 'last_name']
cased_users2_col_names = ['UserID', 'UserName']
cased_func_names = [
    'Custom_Fun', '_custom_fun', 'Custom_Func1', 'custom_func2', 'set_returning_func'
]
cased_tbls = ['Users', 'Orders']
cased_views = ['User_Emails', 'Functions']
casing = (
    ['SELECT', 'PUBLIC'] + cased_func_names + cased_tbls + cased_views
    + cased_users_col_names + cased_users2_col_names
)
# Lists for use in assertions
cased_funcs = [
    function(f) for f in ('Custom_Fun()', '_custom_fun()', 'Custom_Func1()', 'custom_func2()')
Exemple #26
0
    def main(self):
        self.define_parser()
        args = self.parser.parse_args()

        self.validate(args)

        print("Binning correct input star file. Using binning factor %s." % str(args.bin_factor))

        md = MetaData(args.i)

        new_particles = []

        particles = self.get_particles(md)

        if (hasattr(particles[0], 'rlnOriginX')) and (hasattr(particles[0], 'rlnOriginY')):
            correctOrigin = True
        else:
            print("Note: rlnOriginX or rlnOriginY not found in input star file. Not correcting for particle shift.")
            correctOrigin = False

        if hasattr(particles[0], 'rlnDetectorPixelSize'):
            correctApix = True
        else:
            print("Note: rlnDetectorPixelSize not found in input star file. Not correcting for pixel size.")
            correctApix = False

        new_particles.extend(
            self.binParticles(particles, args.bin_factor, correctOrigin, correctApix, args.suf_orig, args.suf_new))

        mdOut = MetaData()
        if md.version == "3.1":
            mdOut.version = "3.1"
            mdOut.addDataTable("data_optics")
            mdOut.addLabels("data_optics", md.getLabels("data_optics"))
            mdOut.addData("data_optics", getattr(md, "data_optics"))
            particleTableName = "data_particles"
        else:
            particleTableName = "data_"

        mdOut.addDataTable(particleTableName)
        mdOut.addLabels(particleTableName, md.getLabels(particleTableName))
        mdOut.addData(particleTableName, new_particles)

        mdOut.write(args.o)

        print("New star file %s created. Have fun!" % args.o)
def analyzeBlock(block):
    func_hash = ""
    path_condition_meta = 0
    arithmetic_meta = 0
    logic_meta = 0
    env_meta = 0
    chain_meta = 0
    stack_meta = 0
    memory_meta = 0
    storage_meta = 0
    call_meta = 0

    memory_def = False  # aliu: True - has memory def; False - no memory def

    KallQ = []  # aliu: for update-call, use-call
    Qs = {}  # aliu: queue for all storage addresses, {storage_addr, []}

    single_storage_def = {}  # aliu: {storage_addr, single-def count}
    single_storage_use = {}  # aliu: {storage_addr, single-use count}
    single_call = 0  # aliu: single-call count
    storage_def_use = {}  # aliu: {storage_addr, def-use count}
    storage_use_update = {}  # aliu: {storage_addr, use-update count}
    update_call = 0  # aliu: update-call count
    use_call = 0  # aliu: use-call count
    call_finalize = 0  # aliu: call-finalize count

    if hasattr(block, 'func_hash'):
        func_hash = block.get_function_hash()
    if hasattr(block, 'path_condition'):
        path_condition_meta = 1
    try:
        block_ins = block.get_instructions()
    except KeyError:
        log.debug(
            "This path results in an exception, possibly an invalid jump address"
        )
        return ["ERROR"]

    for instr in block_ins:
        instruction = instr.get_inst_str()
        instr_parts = str.split(instruction, ' ')
        mnemonic = instr_parts[0]
        instr_type = getTypeOfInstruction(mnemonic)

        if instr_type == "UNSUPPORTED":
            continue

        if instr_type == ARITHMETIC_OP_TYPE:
            arithmetic_meta += 1

        if instr_type == LOGIC_OP_TYPE:
            logic_meta += 1

        if instr_type == ENV_OP_TYPE:
            env_meta += 1

        if instr_type == CHAIN_OP_TYPE:
            chain_meta += 1

        if instr_type == STACK_OP_TYPE:
            stack_meta += 1

        if instr_type == MEMORY_OP_TYPE:
            if mnemonic == 'MSTORE' or mnemonic == 'MSTORE8':
                memory_def = True
            if mnemonic == 'MLOAD' and memory_def == True:
                memory_meta += 1
                memory_def = False

        if instr_type == STORAGE_OP_TYPE:
            if not hasattr(instr, 'storage'):
                continue
            storage_addr = str(instr.get_storage_access()["storage"])
            if hasattr(Qs, storage_addr):
                queue = Qs[storage_addr]
            else:
                queue = []
                Qs[storage_addr] = queue

            if mnemonic == 'SSTORE':
                KallQ.append('DEF')  # aliu: update KallQ

                if len(queue) > 0 and list(reversed(
                        queue))[0] == 'DEF':  # aliu: count a single def
                    if hasattr(single_storage_def, storage_addr):
                        old_single_def = single_storage_def[storage_addr]
                        single_storage_def[storage_addr] = old_single_def + 1
                    else:
                        single_storage_def[storage_addr] = 1

                use_update_count = 0  # use-update
                call_finalize_count = 0  # call-finalize
                for e in reversed(queue):
                    if e == 'DEF':
                        break
                    if e == 'USE':
                        use_update_count += 1  # aliu: count a use-update
                    if e == 'CALL':
                        call_finalize_count += 1  # aliu: count a call-finalize
                # aliu: update global use-update
                if hasattr(storage_use_update, storage_addr):
                    old_use_update_count = storage_use_update[storage_addr]
                    storage_use_update[
                        storage_addr] = old_use_update_count + use_update_count
                else:
                    storage_use_update[storage_addr] = use_update_count
                # aliu: update global call-finalize
                call_finalize += call_finalize_count

                queue.append('DEF')
                Qs[storage_addr] = queue  # aliu: update the queue

            if mnemonic == 'SLOAD':
                KallQ.append('USE')  # aliu: update KallQ

                if len(queue) > 0 and list(reversed(
                        queue))[0] == 'USE':  # aliu: count a single use
                    if hasattr(single_storage_use, storage_addr):
                        old_single_use = single_storage_use[storage_addr]
                        single_storage_use[storage_addr] = old_single_use + 1
                    else:
                        single_storage_use[storage_addr] = 1

                def_use_count = 0  # def-use
                for e in reversed(queue):
                    if e == 'DEF':
                        def_use_count += 1  # aliu: count a def-use
                        break
                if hasattr(storage_def_use, storage_addr):
                    old_def_use_count = storage_def_use[storage_addr]
                    storage_def_use[
                        storage_addr] = old_def_use_count + def_use_count
                else:
                    storage_def_use[storage_addr] = def_use_count

                queue.append('USE')
                Qs[storage_addr] = queue  # aliu: update the queue

        if instr_type == CALL_OP_TYPE:
            # aliu: add CALL to all the queues
            for k, v in Qs.iteritems():
                new_v = v.append('CALL')
                Qs[k] = new_v

            #if reversed(KallQ)[0] == 'CALL': # aliu: count a single call
            single_call += 1

            update_call_count = 0
            use_call_count = 0
            for e in reversed(KallQ):
                if e == 'DEF':
                    update_call_count += 1
                    break
            for e in reversed(KallQ):
                if e == 'USE':
                    use_call_count += 1
                    break
            update_call += update_call_count
            use_call += use_call_count

    # aliu: generate meta data
    meta_data = MetaData(arithmetic_meta, logic_meta, env_meta, chain_meta,
                         stack_meta, memory_meta)

    # aliu: generate birthmark
    pc_cantor = 5  # aliu: TODO

    def_count = len(single_storage_def)
    if len(single_storage_def.values()) == 0:
        def_max = 0
    else:
        def_max = max(single_storage_def.values())
    def_cantor = compute_cantor(def_count, def_max)

    use_count = len(single_storage_use)
    if len(single_storage_use.values()) == 0:
        use_max = 0
    else:
        use_max = max(single_storage_use.values())
    use_cantor = compute_cantor(use_count, use_max)

    call_count = single_call

    du_count = len(storage_def_use)
    if len(storage_def_use.values()) == 0:
        du_max = 0
    else:
        du_max = max(storage_def_use.values())
    du_cantor = compute_cantor(du_count, du_max)

    uu_count = len(storage_use_update)
    if len(storage_use_update.values()) == 0:
        uu_max = 0
    else:
        uu_max = max(storage_use_update.values())
    uu_cantor = compute_cantor(uu_count, uu_max)

    birth_mark = BirthMark(pc_cantor, def_cantor, use_cantor, call_count,
                           du_cantor, uu_cantor, update_call, use_call,
                           call_finalize)

    return {
        "func_hash": func_hash,
        "metadata": meta_data,
        "birthmark": birth_mark
    }
class QueryGenerator(multiprocessing.Process):

	#join_types = ['INNER','LEFT','RIGHT']
	join_types = ['INNER','LEFT']
	aggregate_functions = ['MIN', 'MAX', 'AVG', 'COUNT', 'SUM']

	def __init__(self, pipe_home, mysqluser, mysqlpass, host, port, socket, database, table, duration, profile = "Random", write_sql_to_disk = False):
		multiprocessing.Process.__init__(self)
		self.pipe_home 		= pipe_home

		self.savepoint_list					= []
		self.savepoint_counter 				= 0
		self.potential_tables 				= []
		self.__begin_sql_insert	 			= {}
		self.__insert_sql 					= {}
		self.transaction_size 				= 1
		self.iterations_instead_of_duration  = True
		self.write_sql_to_disk 				= write_sql_to_disk
		self.update_min_max_counter			= 0
		self.tables_in_transaction 			= []
		self.num_inserts 					= 0
		self.delta 							= 0.0
		self.start_time 					= 0.0
		self.poison_pill 					= False
		self.where_clause_gen_having_trouble = 0
		self.where_clause_gen_trying_desc = 0

		if os.path.isfile(profile):
			self.__metadata 	= MetaData(profile)
			self.__metadata.import_from_file(profile)
			self.profilefilename = os.path.splitext(os.path.basename(profile))[0]
			#print self.name + " loaded " + profile + " from disk"
			# loaded Profile from disk.
		else:
			self.__metadata 	= MetaData(profile)
			self.__metadata.load(mysqluser, mysqlpass, host, port, socket, database, True)
			self.__metadata.export_to_file(os.path.dirname(os.path.realpath(__file__)) + "/meta_data/" + database + "." + profile + ".json")
			self.profilefilename = profile
			#print "creating profile on the fly..." 
		self.profile			= profile
		self.write_sql_to_file	= os.path.dirname(os.path.realpath(__file__)) + "/meta_data/" + self.name + self.profilefilename + ".sql"

		duration = str(duration)
		if duration[-1] == 's':
			self.iterations_instead_of_duration = False
			duration = duration[:-1]
			self._duration 	= float(duration)
		else:
			self.iterations_instead_of_duration = True
			self._duration 	= int(duration)

		self.__mysqluser 	= mysqluser
		self.__mysqlpass 	= mysqlpass
		self.__host			= host
		self.__port			= port
		self.__socket		= socket
		self.__database		= database
		self.__table		= table
		
		self.auto_increment_increment 	= 1  # number of clients
		self.auto_increment_offset 		= 1  # client number
		self.statistics = ClientQueryStats(self.name)
		#self.random_worker = RandomWorker()
		self.generated_queries = {}

		self.connect_to_mysql()

		self.__initialize_things()

	def connect_to_mysql(self):
		try:
			if self.__socket == None:
				self.__db = MySQLdb.connect(host=self.__host, port=self.__port, user=self.__mysqluser, passwd=self.__mysqlpass, db=self.__database)
			else:
				self.__db = MySQLdb.connect(unix_socket=self.__socket, user=self.__mysqluser, passwd=self.__mysqlpass, db=self.__database)
			self.__db.autocommit(1)
			self.__cur = self.__db.cursor()
			self.__cur.execute("SET @@session.time_zone='+00:00'")
			#self.__cur.execute('set unique_checks=0')
		except MySQLdb.Error, e:
			print "An Error occured in " + self.__class__.__name__ + " %s" %e
			exit(1)		
import os

from metadata import MetaData

data_dir = '../data/'

mData = MetaData()
for each in os.walk(data_dir):
	dirname, dirs, files = each
	if not files:
		continue
	for f in files:
		if f.endswith('.jpg') and not f.endswith('_contour.jpg'):
			mData.add(os.path.join(dirname, f))
mData.save()
print 'database saved'
            source[table][column]['min'], source[table][column][
                'max'] = min, max


if __name__ == '__main__':
    config_items = {
        'meta_file':   {'value':None}, \
        'size_file':   {'value':None,'default':None}, \
        'index_file':   {'value':None, 'default':None}, \
        }
    parser = argparse.ArgumentParser()
    for k, v in sorted(config_items.iteritems()):
        parser.add_argument("--" + k)
    args = parser.parse_args()

    for k, v in config_items.iteritems():
        if eval(str("args." + k)) == None:
            if 'default' in config_items[k].keys():
                config_items[k]['value'] = config_items[k]['default']
            else:
                config_items[k]['value'] = raw_input("Enter " + k + ":")
        else:
            config_items[k]['value'] = eval(str("args." + k))
    json_file = config_items['meta_file']['value']
    m = MetaData()
    m.import_from_file(json_file)
    update(m, config_items['size_file']['value'],
           config_items['index_file']['value'])
    m.export_to_file(json_file + ".updated.json")
    m.export_to_stdout()
Exemple #31
0
    def main(self):
        self.define_parser()
        args = self.parser.parse_args()

        self.validate(args)

        print("Selecting particles/micrographs from star file...")

        md = MetaData(args.i)

        if md.version == "3.1":
            ilabels = md.getLabels("data_particles")
        else:
            ilabels = md.getLabels("data_")

        if ("rlnDefocusU" not in ilabels) or ("rlnDefocusV" not in ilabels):
            self.error(
                "No labels rlnDefocusU or rlnDefocusV found in Input file.")
        if ("rlnFinalResolution" not in ilabels) and (args.res > 0):
            print(
                "No label rlnFinalResolution found in input file. Switching off resolution filtering..."
            )
            args.res = 0

        mdOut = MetaData()

        new_particles = []

        particles = self.get_particles(md)

        new_particles.extend(
            self.selParticles(
                particles,
                args.astg,
                args.res,
            ))

        if md.version == "3.1":
            mdOut.version = "3.1"
            mdOut.addDataTable("data_optics")
            mdOut.addLabels("data_optics", md.getLabels("data_optics"))
            mdOut.addData("data_optics", getattr(md, "data_optics"))
            particleTableName = "data_particles"
        else:
            particleTableName = "data_"

        mdOut.addDataTable(particleTableName)
        mdOut.addLabels(particleTableName, md.getLabels(particleTableName))
        mdOut.addData(particleTableName, new_particles)
        mdOut.write(args.o)

        print("New star file %s created. Have fun!" % args.o)
def main():
	print("Welcome to Deep Data Bench. version: %s" % __version__)
	config_items = OrderedDict()

	config_items['source_mysql_user'] 			= {'section':'MySQL',	'value':None, 'default' : None} 
	config_items['source_mysql_password'] 		= {'section':'MySQL',	'value':None, 'default' : None}
	config_items['source_mysql_socket'] 		= {'section':'MySQL',	'value':None, 'default' : None}
	config_items['source_mysql_host'] 			= {'section':'MySQL',	'value':None, 'default' : None}
	config_items['source_mysql_port'] 			= {'section':'MySQL',	'value':None, 'default' : 3306}
	config_items['source_database'] 			= {'section':'MySQL',	'value':None, 'default' : None}
	config_items['destination_mysql_user'] 		= {'section':'MySQL',	'value':None}
	config_items['destination_mysql_password'] 	= {'section':'MySQL',	'value':None}
	config_items['destination_mysql_socket'] 	= {'section':'MySQL',	'value':None, 'default' : None}
	config_items['destination_mysql_host'] 		= {'section':'MySQL',	'value':None}
	config_items['destination_mysql_port'] 		= {'section':'MySQL',	'value':None, 'default' : 3306}
	config_items['destination_database'] 		= {'section':'MySQL',	'value':None}
	config_items['pillar_durations'] 			= {'section':'Pillar',  'value':None, 'default' : "60s,60s,60s"}
	config_items['pillars'] 					= {'section':'Pillar',  'value':None, 'default' : "PureLoad,EvenCRUD,Analytics"}
	config_items['num_clients'] 				= {'section':'Pillar',  'value':None, 'default' : "8,8,8"}
	config_items['show_stats_frequency'] 		= {'section':'Pillar',  'value':0, 'default':0}
	config_items['tables'] 						= {'section':'MySQL',   'value':None, 'default':'*'}
	config_items['collect_stats'] 				= {'section':'MySQL',   'value':None, 'default':False} #TODO: Make this a flag and cleanup metadata.py
	config_items['retain_destination_database'] = {'section':'MySQL',   'value':None, 'default':False} #TODO: Make this a flag
	config_items['write_sql_to_disk'] 			= {'section':'Pillar',   'value':None, 'default':False} #TODO: Make this a flag
	config_items['repeat_x_times'] 				= {'section':'Pillar',  'value':1, 'default':1}
	config_items['report_name'] 				= {'section':'Pillar',  'value':None, 'default':None}
	config_items['destination_mysql_engine'] 	= {'section':'MySQL',   'value':None, 'default':None}
	
	parser = argparse.ArgumentParser()
	config = ConfigParser.ConfigParser()
	parser.add_argument("--config",help="config containing all arguments")
	for k, v in sorted(config_items.iteritems()):
			parser.add_argument("--" + k)
	args = parser.parse_args()
	if (args.config != None):
		if os.path.isfile(args.config) and os.access(args.config, os.R_OK):
			#print "config " + args.config + " exists and is readable"	
			config.read(args.config)
			for k, v in config_items.iteritems():
				if not(config.has_section(v['section']) and config.has_option(v['section'], k)):
					if eval(str("args."+k)) != None:
						config_items[k]['value'] = eval(str("args."+k))
					elif 'default' in config_items[k].keys():
						#print "The config does not contain " + k + " in section " + v['section'] + ". Using default: " + str(config_items[k]['default'])
						config_items[k]['value'] = config_items[k]['default']
					else:	
						print "The config does not contain " + k + " in section " + v['section']
						sys.exit(1)
				else:
					#make command line override config file
					if eval(str("args."+k)) != None:
						config_items[k]['value'] = eval(str("args."+k))
					else:
						config_items[k]['value'] = config.get(v['section'], k)
		else:
			print args.config + " is either missing or is not readable."
			sys.exit(1)
	else:
		for k, v in config_items.iteritems():
			if eval(str("args."+k)) == None:
				if 'default' in config_items[k].keys():
					config_items[k]['value'] = config_items[k]['default']
				else:
					config_items[k]['value'] = raw_input("Enter " + k + ":")
			else:
				config_items[k]['value'] = eval(str("args."+k))
			if not config.has_section(config_items[k]['section']):
				config.add_section(config_items[k]['section'])		
		
		for k, v in config_items.iteritems():
			if config_items[k]['value'] is not None:
				config.set(v['section'], k, config_items[k]['value'])
		# Writing our configuration file to
		with open('my.ini', 'wb') as configfile:
		    config.write(configfile)
	for k, v in config_items.iteritems():
		print k + ":" + str(v['value'])	

	profiles_to_run = config_items['pillars']['value'].split(",")
	pillar_durations = config_items['pillar_durations']['value'].split(",")

	if config_items['source_database']['value'] is not None:
		source_databases = config_items['source_database']['value'].split(",")
	else:
		source_databases = ['placeholder']
	
	destination_databases = config_items['destination_database']['value'].split(",")
	num_clients = config_items['num_clients']['value'].split(",")

	if config_items['source_database']['value'] is not None:
		if not (len(source_databases) == len(destination_databases)):
			print "the source and destination databases must have the same number of comma separated items."
			sys.exit(1)

	if not (len(profiles_to_run) == len(pillar_durations) == len(num_clients)):
		print "pillars, pillar_durations and num_clients must have the same number of comma separated items."
		sys.exit(1)		

	#if len(profiles_to_run) < len(destination_databases):
	#	destination_databases = []
	#	for d in destination_databases:
	#		destination_databases.append(config_items['destination_database']['value'])

	todo = zip(profiles_to_run, pillar_durations,num_clients)

	for k in range(int(config_items['repeat_x_times']['value'])):

		print "starting iteration " + str(k) 
		for index, source_database in enumerate(source_databases):

			print "Source DB: " + source_database + ", Destination DB: " + destination_databases[index]

			meta = MetaData(profiles_to_run[0])
			
			if os.path.isfile(profiles_to_run[0].split(" ")[0]):
				meta.import_from_file(profiles_to_run[0].split(" ")[0])
				print "imported " + str(profiles_to_run[0].split(" ")[0])
			else:
				meta.load(config_items['source_mysql_user']['value'], config_items['source_mysql_password']['value'], config_items['source_mysql_host']['value'], int(config_items['source_mysql_port']['value']), config_items['source_mysql_socket']['value'], source_database, False)

			if not config_items['retain_destination_database']['value']:
				print "dumping into " + destination_databases[index]
				meta.dump(config_items['destination_mysql_user']['value'], config_items['destination_mysql_password']['value'], config_items['destination_mysql_host']['value'], int(config_items['destination_mysql_port']['value']), config_items['destination_mysql_socket']['value'], destination_databases[index],config_items['destination_mysql_engine']['value'])

			dict_of_profiles_to_run = {}
			
			final_report = PillarReport()
			final_report.num_tables = meta.getNumberOfTables()
			final_report.num_columns = meta.getTotalNumberOfColumns()
			final_report.num_indexes = meta.getTotalNumberOfIndexes()
			final_report.mysql_variables = meta.mysql_variables
			final_report.create_tables = meta.create_tables
			
			for pillar in todo:

				profile_file_list = []
				for profile in pillar[0].split(" "):
					m = MetaData(profile)
					if os.path.isfile(profile.strip()):
						m.import_from_file(profile)
						profile_file_list.append(profile)
					else:
						if profile == "PureLoad":
							m.load(config_items['source_mysql_user']['value'], config_items['source_mysql_password']['value'], config_items['source_mysql_host']['value'], int(config_items['source_mysql_port']['value']), config_items['source_mysql_socket']['value'], source_database, config_items['collect_stats']['value'])
						else:
							m.load(config_items['source_mysql_user']['value'], config_items['source_mysql_password']['value'], config_items['source_mysql_host']['value'], int(config_items['source_mysql_port']['value']), config_items['source_mysql_socket']['value'], source_database,False)
						f = os.path.dirname(os.path.realpath(__file__)) + "/meta_data/" + profile + ".json"  #TODO  add tables somehow to the name?  maybe just random name
						m.export_to_file(f)
						profile_file_list.append(f)
					#check for no tables
					if len(m.meta_data.keys()) == 0:
					 	print "profile: " + profile + " has no tables?  exiting..."
					 	sys.exit(1)	

				pillar_name = pillar[0].replace(" ","_")
				
				dict_of_profiles_to_run[pillar_name] = DBAppConductor(pillar[2].split(" "), 
												pillar[1],
												config_items['destination_mysql_user']['value'],
												config_items['destination_mysql_password']['value'],
												config_items['destination_mysql_host']['value'],
												int(config_items['destination_mysql_port']['value']),
												config_items['destination_mysql_socket']['value'],
												destination_databases[index],
												profile_file_list,
												config_items['tables']['value'].split(" ")
												)
				print "Running Profile: " + pillar_name + " on database " + destination_databases[index] + " ..."
				dict_of_profiles_to_run[pillar_name].setShowStatsFrequency(config_items['show_stats_frequency']['value'])
				dict_of_profiles_to_run[pillar_name].write_sql_to_disk = config_items['write_sql_to_disk']['value']
				dict_of_profiles_to_run[pillar_name].go()
				final_report.feedClientStats(pillar_name,dict_of_profiles_to_run[pillar_name].getClientStats())

			final_report.printFullReport()
			if config_items['report_name']['value'] == None:
				#f = tempfile.NamedTemporaryFile(mode='wb', delete=False, dir=os.path.dirname(os.path.realpath(__file__)) + "/reports/", suffix='.dump', prefix=source_database+'_report')
				f = tempfile.NamedTemporaryFile(mode='wb', delete=False, dir=os.getcwd(), suffix='.dump', prefix=destination_databases[index] + '_report')
			else:
				#f = open(os.path.dirname(os.path.realpath(__file__)) + "/reports/" + config_items['report_name']['value'] + "_" + str(k) + ".dump",'wb')
				f = open(os.getcwd() + "/" + config_items['report_name']['value'] + "_" + str(k) + ".dump",'wb')
			pickle.dump(final_report, f)
			f.close()
        'public': ['typ1', 'typ2'],
        'custom': ['typ3', 'typ4'],
     },
    'foreignkeys': {
        'custom': [
            ('public', 'users', 'id', 'custom', 'shipments', 'user_id')
        ],
        'blog': [
            ('blog', 'entries', 'entryid', 'blog', 'entacclog', 'entryid'),
            ('blog', 'entries', 'entryid', 'blog', 'entrytags', 'entryid'),
            ('blog', 'tags', 'tagid', 'blog', 'entrytags', 'tagid'),
        ],
    },
}

testdata = MetaData(metadata)
cased_schemas = [schema(x) for x in ('public', 'blog', 'CUSTOM', '"Custom"')]

@pytest.fixture
def completer():
    return testdata.completer

casing = ('SELECT', 'Orders', 'User_Emails', 'CUSTOM', 'Func1', 'Entries',
          'Tags', 'EntryTags', 'EntAccLog',
          'EntryID', 'EntryTitle', 'EntryText')

@pytest.fixture
def completer_with_casing():
    return testdata.get_completer(casing=casing)

@pytest.fixture
	def __init__(self, pipe_home, mysqluser, mysqlpass, host, port, socket, database, table, duration, profile = "Random", write_sql_to_disk = False):
		multiprocessing.Process.__init__(self)
		self.pipe_home 		= pipe_home

		self.savepoint_list					= []
		self.savepoint_counter 				= 0
		self.potential_tables 				= []
		self.__begin_sql_insert	 			= {}
		self.__insert_sql 					= {}
		self.transaction_size 				= 1
		self.iterations_instead_of_duration  = True
		self.write_sql_to_disk 				= write_sql_to_disk
		self.update_min_max_counter			= 0
		self.tables_in_transaction 			= []
		self.num_inserts 					= 0
		self.delta 							= 0.0
		self.start_time 					= 0.0
		self.poison_pill 					= False
		self.where_clause_gen_having_trouble = 0
		self.where_clause_gen_trying_desc = 0

		if os.path.isfile(profile):
			self.__metadata 	= MetaData(profile)
			self.__metadata.import_from_file(profile)
			self.profilefilename = os.path.splitext(os.path.basename(profile))[0]
			#print self.name + " loaded " + profile + " from disk"
			# loaded Profile from disk.
		else:
			self.__metadata 	= MetaData(profile)
			self.__metadata.load(mysqluser, mysqlpass, host, port, socket, database, True)
			self.__metadata.export_to_file(os.path.dirname(os.path.realpath(__file__)) + "/meta_data/" + database + "." + profile + ".json")
			self.profilefilename = profile
			#print "creating profile on the fly..." 
		self.profile			= profile
		self.write_sql_to_file	= os.path.dirname(os.path.realpath(__file__)) + "/meta_data/" + self.name + self.profilefilename + ".sql"

		duration = str(duration)
		if duration[-1] == 's':
			self.iterations_instead_of_duration = False
			duration = duration[:-1]
			self._duration 	= float(duration)
		else:
			self.iterations_instead_of_duration = True
			self._duration 	= int(duration)

		self.__mysqluser 	= mysqluser
		self.__mysqlpass 	= mysqlpass
		self.__host			= host
		self.__port			= port
		self.__socket		= socket
		self.__database		= database
		self.__table		= table
		
		self.auto_increment_increment 	= 1  # number of clients
		self.auto_increment_offset 		= 1  # client number
		self.statistics = ClientQueryStats(self.name)
		#self.random_worker = RandomWorker()
		self.generated_queries = {}

		self.connect_to_mysql()

		self.__initialize_things()
Exemple #35
0
    def main(self):
        self.define_parser()
        args = self.parser.parse_args()
        self.validate(args)

        print("Performing rotation of particles from star file...")

        md = MetaData(args.i)

        new_particles = []

        particles = self.get_particles(md)

        new_particles.extend(
            self.rotateParticles(particles, rotValue, tiltValue, psiValue,
                                 xValue, yValue, zValue, md.version))
        mdOut = MetaData()

        if md.version == "3.1":
            mdOut.version = "3.1"
            mdOut.addDataTable("data_optics")
            mdOut.addLabels("data_optics", md.getLabels("data_optics"))
            mdOut.addData("data_optics", getattr(md, "data_optics"))
            particleTableName = "data_particles"
        else:
            particleTableName = "data_"

        mdOut.addDataTable(particleTableName)
        mdOut.addLabels(particleTableName, md.getLabels(particleTableName))
        mdOut.addData(particleTableName, new_particles)

        mdOut.write(args.o)

        print("New star file %s created. Have fun!" % args.o)
Exemple #36
0
    def main(self):
        self.define_parser()
        args = self.parser.parse_args()

        self.validate(args)

        md = MetaData(args.i)
        md.addLabels("data_particles", "rlnBeamTiltClass")

        print("Reading in input star file.....")

        particles = self.get_particles(md)

        print(
            "Total %s particles in input star file. \nAdding rlnBeamTiltClass."
            % str(len(particles)))

        self.addBeamTiltClass(particles)

        mdOut = MetaData()
        mdOut.addDataTable("data_")
        mdOut.addLabels("data_", md.getLabels("data_"))
        mdOut.addData("data_", particles)
        mdOut.write(args.o)

        print("New star file %s created. Have fun!" % args.o)