예제 #1
0
def get_output_parameters(observable_df: pd.DataFrame,
                          sbml_model: libsbml.Model) -> List[str]:
    """Get output parameters

    Returns IDs of parameters used in observable and noise formulas that are
    not defined in the SBML model.

    Arguments:
        observable_df: PEtab observable table
        sbml_model: SBML model

    Returns:
        List of output parameter IDs
    """
    formulas = list(observable_df[OBSERVABLE_FORMULA])
    if NOISE_FORMULA in observable_df:
        formulas.extend(observable_df[NOISE_FORMULA])
    output_parameters = OrderedDict()

    for formula in formulas:
        free_syms = sorted(sp.sympify(formula).free_symbols,
                           key=lambda symbol: symbol.name)
        for free_sym in free_syms:
            sym = str(free_sym)
            if sbml_model.getElementBySId(sym) is None and sym != 'time':
                output_parameters[sym] = None

    return list(output_parameters.keys())
예제 #2
0
 async def do_menu(self, ctx, c):
     emoji_to_embed = OrderedDict()
     for idx, image_info in enumerate(c['images']):
         emoji = char_to_emoji(str(idx))
         emoji_to_embed[emoji] = make_card_embed(c, idx)
     starting_menu_emoji = list(emoji_to_embed.keys())[0]
     return await self._do_menu(ctx, starting_menu_emoji, emoji_to_embed)
예제 #3
0
    def parse(self, input_features):
        if 'bounding_boxes_{}'.format(self.disease_annotation) not in input_features:
            Add_Bounding_Box = Add_Bounding_Box_Indexes([self.disease_annotation], add_to_dictionary=False)
            input_features = Add_Bounding_Box.parse(input_features)
        if 'bounding_boxes_{}'.format(self.disease_annotation) in input_features:
            bounding_boxes = input_features['bounding_boxes_{}'.format(self.disease_annotation)]
            voxel_volumes = input_features['voxel_volumes_{}'.format(self.disease_annotation)]
            del input_features['voxel_volumes_{}'.format(self.disease_annotation)]
            del input_features['bounding_boxes_{}'.format(self.disease_annotation)]
            image_base = input_features['image']
            annotation_base = input_features['annotation']
            out_features = OrderedDict()
            for cube_index, [box, voxels] in enumerate(zip(bounding_boxes, voxel_volumes)):
                if voxels < self.min_voxel_volume or voxels > self.max_voxels:
                    continue
                z_start, z_stop, r_start, r_stop, c_start, c_stop = add_bounding_box_to_dict(box, return_indexes=True)
                box_size = [z_stop - z_start, r_stop - r_start, c_stop - c_start]
                remainders = np.asarray([self.cube_size[i] - box_size[i] % self.cube_size[i]
                                         if box_size[i] % self.cube_size[i] != 0 else 0 for i in range(3)])
                z_start, z_stop, r_start, r_stop, c_start, c_stop = expand_box_indexes(z_start, z_stop, r_start, r_stop,
                                                                                       c_start, c_stop,
                                                                                       annotation_shape=
                                                                                       annotation_base.shape,
                                                                                       bounding_box_expansion=
                                                                                       remainders//2+1)
                image = image_base[z_start:z_stop, r_start:r_stop, c_start:c_stop]
                annotation = annotation_base[z_start:z_stop, r_start:r_stop, c_start:c_stop]

                stack_image, stack_annotation = [image[None,...]], [annotation[None,...]]
                for axis in range(3):
                    output_images = []
                    output_annotations = []
                    for i in stack_image:
                        split = i.shape[axis+1] // self.cube_size[axis]
                        if split > 1:
                            output_images += np.array_split(i, split, axis=axis+1)
                        else:
                            output_images += [i]
                    for i in stack_annotation:
                        split = i.shape[axis+1] // self.cube_size[axis]
                        if split > 1:
                            output_annotations += np.array_split(i, split, axis=axis+1)
                        else:
                            output_annotations += [i]
                    stack_image = output_images
                    stack_annotation = output_annotations
                for box_index, [image_cube, annotation_cube] in enumerate(zip(stack_image, stack_annotation)):
                    temp_feature = OrderedDict()
                    image_cube, annotation_cube = image_cube[0], annotation_cube[0]
                    temp_feature['image'] = image_cube[:self.cube_size[0]]
                    temp_feature['annotation'] = annotation_cube[:self.cube_size[0]]
                    for key in input_features:  # Bring along anything else we care about
                        if key not in temp_feature.keys():
                            temp_feature[key] = input_features[key]
                    out_features['Disease_Box_{}_{}'.format(cube_index, box_index)] = temp_feature
            input_features = out_features
            return input_features
        return input_features
예제 #4
0
def f_rec(n):
    dict = OrderedDict({0: 0, 1: 1, 2: 2})
    if n in dict.keys():
        return dict[n]
    else:
        for i in range(3, n + 1):
            current_value = dict[i - 1] + dict[i - 3]
            dict.popitem(last=False)
            dict[i] = current_value
        return dict[n]
예제 #5
0
class CppCompleter(CompleterBase):
    # temp file for racer input
    live_template_file = os.path.join(util.get_resources_dir(),
                                      "live_templates_cpp.xml")

    # TODO: Support clang completer
    def setCompletionPrefix(self, text: str):
        temp_file_name = util.get_temp_file("cpp")
        with codecs.open(temp_file_name, "w", "utf-8") as f:
            f.write(self.parent.toPlainText())
        src_line_num = str(self.parent.textCursor().blockNumber() + 1)
        src_char_num = str(self.parent.textCursor().columnNumber())

        try:
            out = subprocess.check_output(
                # read all header is too slow
                # "clang -fsyntax-only -Xclang -code-completion-at=%s:%s:%s %s"
                "clang -cc1 -fsyntax-only -code-completion-at=%s:%s:%s %s" %
                (temp_file_name, src_line_num, src_char_num, temp_file_name),
                shell=True,
            ).decode()
        except subprocess.CalledProcessError as e:
            out = e.output.decode()

        self.candidates_dict = OrderedDict()
        for line in out.split("\n"):
            if line.startswith("COMPLETION:"):
                cand = line.split(" ")[1]
                if text not in cand:
                    continue
                if cand not in self.ng_words:
                    self.candidates_dict[cand] = -1

        for live_template in self.live_templates:
            if live_template.name.startswith(text):
                self.candidates_dict[
                    live_template.template] = live_template.rpos
        if len(self.candidates_dict
               ) >= 10 or text in self.candidates_dict.keys():
            self.candidates_dict = {}
        self.setModel(QtCore.QStringListModel(self.candidates_dict.keys()))
        super().setCompletionPrefix(text)
예제 #6
0
def commentCountDistribution(streamComments):
    commentCount = OrderedDict()
    for user in streamComments:
        count = len(streamComments[user])
        if count not in commentCount:
            commentCount[count] = []
        commentCount[count].append(user)
    commentDistribution = {}
    for count in sorted(commentCount.keys()):
        commentDistribution[count] = len(commentCount[count])
    return commentDistribution
예제 #7
0
def get_features(image_path, annotation_path, image_processors=None, record_writer=None):
    features = OrderedDict()
    features['image_path'] = image_path
    features['annotation_path'] = annotation_path
    if image_processors is not None:
        for image_processor in image_processors:
            features, _ = down_dictionary(features, OrderedDict(), 0)
            for key in features.keys():
                features[key] = image_processor.parse(features[key])
        features, _ = down_dictionary(features, OrderedDict(), 0)
    record_writer.parse(features)
예제 #8
0
 def parse(self, input_features):
     out_features = OrderedDict()
     start_chop = 0
     image_base = input_features['image']
     annotation_base = input_features['annotation']
     image_path = input_features['image_path']
     spacing = input_features['spacing']
     z_images_base, rows, cols = image_base.shape
     if self.max_rows != np.inf:
         rows = min([rows,self.max_rows])
     if self.max_cols != np.inf:
         cols = min([cols,self.max_cols])
     image_base, annotation_base = image_base[:, :rows, :cols], annotation_base[:, :rows, :cols]
     step = min([self.max_z, z_images_base])
     for index in range(z_images_base // step + 1):
         image_features = OrderedDict()
         if start_chop >= z_images_base:
             continue
         image = image_base[start_chop:start_chop + step, ...]
         annotation = annotation_base[start_chop:start_chop + step, ...]
         if image.shape[0] < max([step, self.min_z]):
             if self.mirror_small_bits:
                 while image.shape[0] < max([step, self.min_z]):
                     mirror_image = np.flip(image, axis=0)
                     mirror_annotation = np.flip(annotation, axis=0)
                     image = np.concatenate([image, mirror_image], axis=0)
                     annotation = np.concatenate([annotation, mirror_annotation], axis=0)
                 image = image[:max([step, self.min_z])]
                 annotation = annotation[:max([step, self.min_z])]
         start, stop = get_start_stop(annotation, extension=0)
         image_features['image_path'] = image_path
         image_features['image'] = image
         image_features['annotation'] = annotation
         image_features['start'] = start
         image_features['stop'] = stop
         image_features['z_images'] = image.shape[0]
         image_features['rows'] = image.shape[1]
         image_features['cols'] = image.shape[2]
         image_features['spacing'] = spacing
         for key in input_features.keys():
             if key not in image_features.keys():
                 image_features[key] = input_features[key] # Pass along all other keys.. be careful
         out_features['Image_{}'.format(index)] = image_features
     input_features = out_features
     return input_features
def createFasta(input_file, append_file, order):
    fastaInDict = dict()
    orderDictSc = OrderedDict()
    with open(input_file) as FASTAIN, open(append_file,
                                           "a") as APP, open(order) as ORD:
        fastaParse = SeqIO.parse(FASTAIN, "fasta")
        fastaOutDict = defaultdict(list)
        for fastaSeq in fastaParse:
            s = str(fastaSeq.seq)
            idFasta = fastaSeq.id
            fastaInDict[idFasta] = s

        keyMap = {
            "Smic.scaffold9__1420062__1920061":
            "Smic.scaffold9__1420062__2138115",
            "Smic.scaffold236__1__500000": "Smic.scaffold236__1__795886",
            "Smic.scaffold338__1__500000": "Smic.scaffold338__1__646490",
            "Smic.scaffold458__1__500000": "Smic.scaffold458__1__544999"
        }

        for line in ORD:
            line = line.rstrip("\n")
            val = line.split("\t")

            k = val[0]
            if (k in keyMap):
                k = keyMap[k]
                seq = fastaInDict[k][0:500000]
            else:
                seq = fastaInDict[k]

            fastaOutDict[val[1]].append(seq)
            orderDictSc[val[1]] = len(seq)

        for keys in orderDictSc.keys():
            chrom = "cluster" + keys
            print("Writing Chromosome " + str(chrom))
            APP.write(">" + chrom + "\n")
            APP.write("".join(fastaOutDict[keys]))
            APP.write("\n")
예제 #10
0
class App:
    def __init__(self):
        self.root = Tk()
        self.block_size = IntVar()
        self.current_column_step = IntVar()
        self.block_size.set(3)
        self.isDecode = False

    """Увеличиаем размер блока"""

    def update_block_size(self):
        self.block_size.set(self.spin.get())

    """Выделяем блок мышкой"""

    def markArea(self, event):
        # Если есть таблица какая-нибудь
        if (self.isDecode and event.x > 30):
            self.firstCanvas.delete("hello")
            # Надо выделить область по которой кликнули мышкой
            column_size = 270 // self.block_size.get()
            # определяем в какой мы колонке
            our_column = event.x // column_size
            # выделяем
            self.firstCanvas.create_rectangle(40 + our_column * column_size,
                                              25,
                                              column_size +
                                              column_size * our_column + 20,
                                              30 + 15 * len(self.table[0]),
                                              outline="#ff0000",
                                              tags="hello")
            self.marked = our_column
            # Передаем данные полученные из частотного анализа
            self.draw_data(self.marked)
            if (our_column < self.block_size.get()):
                # передаем в ползунок
                self.current_column_step.set(self.steps[self.marked])

    """"""

    def peredecode(self, event):
        if (self.isDecode and self.marked < self.block_size.get()):
            self.steps[self.marked] = self.current_column_step.get()
            # Теперь перерисовываем
            self.create_message(self.steps, self.table)

    def draw_data(self, item):
        self.fq.delete("all")
        column = 0
        j = 0
        for (i, letter) in enumerate(self.super_dicts[item].keys()):
            if (10 + 15 * j > 350):
                column += 1
                j = 0
            self.fq.create_text(20 + 30 * column,
                                10 + 15 * j,
                                text=letter + ": " +
                                str(self.super_dicts[item].get(letter)))
            j += 1

    def makeWidgets(self):
        self.firstFrame = Frame(self.root)
        self.firstFrame.pack(side=LEFT, anchor="nw", padx=10, pady=10)
        Label(self.firstFrame, text="Таблица для исходного сообщения").pack()
        self.firstAlterFrame = Frame(self.firstFrame)
        self.firstAlterFrame.pack()
        self.firstCanvas = Canvas(self.firstAlterFrame,
                                  width=300,
                                  height=500,
                                  bg="#ffffff")
        self.firstCanvas.bind("<Button-1>", self.markArea)
        self.firstCanvas.pack(side=LEFT)
        self.firstCanvasScroll = Scrollbar(self.firstAlterFrame,
                                           orient='vertical',
                                           command=self.firstCanvas.yview)
        self.firstCanvasScroll.pack(side=RIGHT, fill=Y, expand=True)
        self.firstCanvas.configure(yscrollcommand=self.firstCanvasScroll.set)
        Label(self.firstFrame, text="Введите исходное сообщение").pack(pady=10)
        self.firstText = Text(self.firstFrame, width=37, height=10)
        self.firstText.pack()

        self.secondFrame = Frame(self.root)
        self.secondFrame.pack(side=LEFT, anchor="n", padx=10, pady=10)
        Label(self.secondFrame, text="Таблица для конечного сообщения").pack()
        self.secondAlterFrame = Frame(self.secondFrame)
        self.secondAlterFrame.pack()
        self.secondCanvas = Canvas(self.secondAlterFrame,
                                   width=300,
                                   height=500,
                                   bg="#ffffff")
        self.secondCanvas.pack(side=LEFT)
        self.secondCanvasScroll = Scrollbar(self.secondAlterFrame,
                                            orient='vertical',
                                            command=self.secondCanvas.yview)
        self.secondCanvasScroll.pack(side=RIGHT, fill=Y, expand=True)
        self.secondCanvas.configure(yscrollcommand=self.secondCanvasScroll.set)
        Label(self.secondFrame, text="Конечный результат").pack(pady=10)
        self.secondText = Text(self.secondFrame, width=37, height=10)
        self.secondText.pack()

        self.lastFrame = Frame(self.root)
        self.lastFrame.pack(side=LEFT, anchor="n", padx=10, pady=10)
        Label(self.lastFrame, text="Панель управления").pack()
        Label(self.lastFrame, text="Размер блока").pack(pady=10)
        self.spin = Spinbox(self.lastFrame,
                            from_=0,
                            textvariable=self.block_size,
                            to=10,
                            command=self.update_block_size)
        self.spin.pack()
        Label(self.lastFrame, text="Частотный анализ").pack(pady=10)
        self.fq = Canvas(self.lastFrame, width=150, height=400, bg="#ffffff")
        self.fq.pack()
        # Наш ползунок
        Label(self.lastFrame, text="Ручная корректировка").pack(pady=10)
        self.scale = Scale(self.lastFrame,
                           from_=0,
                           to=33,
                           length=150,
                           tickinterval=6,
                           orient='horizontal',
                           showvalue=YES,
                           variable=self.current_column_step,
                           command=self.peredecode)
        self.scale.pack()
        Button(self.lastFrame,
               text="Расшифровать!",
               width=15,
               height=5,
               command=self.decode).pack(pady=20)

    """Инициализирукм гуй"""

    def start(self):
        self.root.title("Лабораторная работа номер 2")
        # self.root.geometry("900x500")
        self.makeWidgets()
        self.root.mainloop()

    def decode(self):
        self.isDecode = True
        self.super_dicts = []
        # Берем сообщение из текстового блока и загоняем в столбец по размеру блока
        self.table = ['' for i in range(self.block_size.get())]
        message = self.firstText.get("1.0", 'end-1c')
        for (i, letter) in enumerate(message):
            self.table[i % self.block_size.get()] += letter
        self.draw(self.firstCanvas, self.table)
        # Теперь мы должны сделать словари с частотными анализами для каждой колонки ох, говорю я
        self.steps = [
            self.analis(self.table[i]) for i in range(self.block_size.get())
        ]
        # Так, у нас массив сдвигов, нам надо каждый столбец сдвинуть на это число
        # Нам нужен не только массив сдвигов, но и словари с частоными анализами
        # Теперь берем каждую строку и сдвигаем на сдвиги, но это все в отдельной функции
        self.create_message(self.steps, self.table)

    """Собираем исходное сообщение"""

    def create_message(self, steps, table):
        self.alterTable = ['' for i in table]
        for i in range(len(table)):
            self.alterTable[i] = App.caesar(table[i], 33 - steps[i])
        self.draw(self.secondCanvas, self.alterTable)
        out = ''
        for i in range(len(self.alterTable[0])):
            for j in range(self.block_size.get()):
                try:
                    out += self.alterTable[j][i]
                except IndexError:
                    out += ''
        self.secondText.delete('1.0', "end")
        self.secondText.insert('1.0', out)

    """Функция для сдвига по шифру цезаря Можно конечно в одну строчку через bytes.translate, но этот вариант мне больше нравится"""

    def caesar(message, step):
        alpha = [
            ' ', 'а', 'б', 'в', 'г', 'д', 'е', 'ж', 'з', 'и', 'й', 'к', 'л',
            'м', 'н', 'о', 'п', 'р', 'с', 'т', 'у', 'ф', 'х', 'ц', 'ч', 'ш',
            'щ', 'ъ', 'ы', 'ь', 'э', 'ю', 'я'
        ]
        res = []
        for item in range(len(alpha)):
            if item + step >= len(alpha):
                res += alpha[item + step - len(alpha)]
            elif item + step < 0:
                res += alpha[len(alpha) - item + step]
            else:
                res += alpha[item + step]
        wow = list(zip(alpha, res))
        msq = ''
        for letter in message:
            for item in wow:
                if letter == item[0]:
                    msq += item[1]
                    break
            else:
                msq += letter
        return msq

    """Анализ частотностит символов"""

    def analis(self, column):
        alphabet = " абвгдежзийклмнопрстуфхцчшщъыьэюя"
        self.dicts = {letter: 0 for letter in alphabet}
        dict_step = {letter: i for (i, letter) in enumerate(alphabet)}
        for i in column:
            self.dicts[i] += 1
        self.dicts = OrderedDict(
            sorted(self.dicts.items(), key=lambda t: -t[1]))
        self.super_dicts.append(self.dicts)
        azaza = list(self.dicts.keys())
        return dict_step.get(azaza[0])

    """Заполнянм кавас столбиками с буквами"""

    def draw(self, obj, table):
        # наша исходная таблица готова, нам необходимо теперь заполнить канвас
        obj.delete("all")
        obj.create_line(30, 0, 30, 30 + len(table[0]) * 15, fill="#000000")
        obj.create_line(30, 20, 300, 20, fill="#000000")
        # Делим оставшееся пространство для того что бы положить туда столбцы
        column_size = 270 // self.block_size.get()
        # теперь в цикле раставляем цифры и сообщение
        for i in range(self.block_size.get()):
            obj.create_text(column_size / 1.2 + column_size * i,
                            10,
                            text=str(i + 1))
            for (j, letter) in enumerate(table[i]):
                obj.create_text(column_size / 1.2 + column_size * i,
                                30 + j * 15,
                                text=letter)
                # Нумерация строк
        for i in range(len(table[0])):
            obj.create_text(10, 30 + i * 15, text=str(i + 1))
        obj.configure(scrollregion=obj.bbox("all"))
예제 #11
0
    def build_compute_landscape(self, world_map):
        '''
        # Using the world_map.json config file, build 
        # a dict self.gpu_landscape like this:
        #
        #    {'machine_name1' : {'start_rank'    : <int>,
        #                        'num_gpus'      : <int>,
        #                        'gpu_device_ids': [<int>,<int>,...]
        #    {'machine_name2' : {'start_rank'    : <int>,
        #                        'num_gpus'      : <int>,
        #                        'gpu_device_ids': [<int>,<int>,...]
        #    } 
        #
        # Also sets 
        #     o self.master_hostname, the hostname
        #       running the one process that coordinates all others.
        #     o self.WORLD_SIZE, number of GPUs used across all machines
        #     o self.my_gpus, the number of GPUs on this machine
        
        :param world_map:
        :type world_map:
        :return: information about how many GPUs are
            on each node
        :rtype: OrderedDict
        '''

        if not self.hostname in world_map.keys():
            raise ConfigError(
                f"World map does not contain an entry for this machine {self.hostname}"
            )

        # World size is the number of training script processes,
        # which is equal to number of GPUs used on all participating
        # machines combined:

        # Number of GPUs across all machines:
        self.WORLD_SIZE = 0

        self.master_hostname = None

        # Go through the world map, machine (a.k.a. node)
        # one at a time, in alpha order of the machine
        # names to ensure all copies of this script
        # come to the same conclusions about ranks

        # Build gpu_landscape:
        #
        #    {'machine_name1' : {'start_rank'    : <int>,
        #                        'num_gpus'      : <int>,
        #                        'gpu_device_ids': [<int>,<int>,...]
        #    {'machine_name2' : {'start_rank'    : <int>,
        #                        'num_gpus'      : <int>,
        #                        'gpu_device_ids': [<int>,<int>,...]
        #    }
        #
        # The structure is an OrderedDict(), containing
        # machines alphabetically by name. This discipline
        # is required so that all copies of this launch script
        # (one copy per machine) arrive at the same ordering of
        # GPUs:

        gpu_landscape = OrderedDict({})

        for machine_name in sorted(world_map.keys()):

            # Get dict of info about the machine:

            machine_info = world_map[machine_name]

            try:
                machine_gpus = machine_info['gpus']
            except KeyError:
                print(
                    "World map must include a 'gpus' entry; the value may be 0"
                )

            gpu_landscape[machine_name] = {}
            gpu_landscape[machine_name]['num_gpus'] = machine_gpus

            # List of GPU numbers to use is optional
            # in world_maps:

            machine_gpus_to_use = machine_info.get('devices', None)
            if machine_gpus_to_use is None:
                # Use all GPUs on that machine:
                machine_gpus_to_use = list(range(machine_gpus))

            gpu_landscape[machine_name]['gpu_device_ids'] = machine_gpus_to_use

            # Accept all kinds of affirmatives as values:
            # for identification of the master node entry:

            is_master_node = machine_info.get('master', False) \
                in [1, 'True', 'true', 'Yes', 'yes']

            if is_master_node:
                self.master_hostname = machine_name
                if machine_name == self.hostname:
                    self.am_master_node = True
                try:
                    self.MASTER_ADDR = socket.gethostbyname(machine_name)
                except socket.gaierror:
                    # For machines that have no
                    # findable IP address:
                    self.MASTER_ADDR = '127.0.0.1'

            self.WORLD_SIZE += machine_gpus

        # Go through the machine enries in gpu_landscape, and
        # assign rank ranges to each. Must start with
        # the master node, b/c it must start with rank 0.
        # For the master node, it is possible that it has
        # no GPUs

        master_info = gpu_landscape[self.master_hostname]
        master_info['rank_range'] = list(range(master_info['num_gpus']))
        master_info['start_rank'] = 0
        if len(master_info['rank_range']) == 0:
            # Master only has a GPU:
            master_info['rank_range'] = [0]

        # Start assigning more ranks after
        # the GPUs of the master:

        running_rank = master_info['rank_range'][-1] + 1

        for machine_name in gpu_landscape.keys():
            if machine_name == self.master_hostname:
                # We already did the master node
                continue
            mach_info = gpu_landscape[machine_name]
            mach_info['start_rank'] = running_rank
            num_gpus = mach_info['num_gpus']
            range_bound = running_rank + (num_gpus if num_gpus > 0 else 1)
            mach_info['rank_range'] = list(range(running_rank, range_bound))
            running_rank += (num_gpus if num_gpus > 0 else 1)

        self.my_gpus = gpu_landscape[self.hostname]['num_gpus']
        self.gpu_landscape = gpu_landscape
        return gpu_landscape
예제 #12
0
class ControlGUIClient(object):
    '''
    Controller GUI class
    '''
    def __init__(self, port, controller):
        '''
        Builds the GUI, connects it to the server (thread). The GUI is just another client of
        the service.
        '''
        global guiClient
        guiClient = self
        self.logger = logging.getLogger(__name__)
        self.port = port
        self.controller = controller
        self.context = controller.context
        GObject.threads_init()
        self.builder = Gtk.Builder()
        riaps_folder = os.getenv('RIAPSHOME', './')
        try:
            self.builder.add_from_file(
                join(riaps_folder, "etc/riaps-ctrl.glade"))  # GUI construction
        except RuntimeError:
            self.logger.error('Cannot find GUI configuration file')
            raise
        self.builder.connect_signals({
            "onDeleteWindow": self.on_Quit,
            "onConsoleEntryActivate": self.on_ConsoleEntry,
            "onSelectApplication": self.on_SelectApplication,
            "onSelectDeployment": self.on_SelectDeployment,
            "onFolderEntryActivate": self.on_folderEntryActivate,
            "onResetAll": self.on_resetAll,
            "onHaltAll": self.on_haltAll,
            "onQuit": self.on_Quit,
            "onLoadApplication": self.on_loadApplication,
            "onViewApplication": self.on_viewApplication,
            "onLogChanged": self.on_LogChanged
        })

        #keyFile = controller.keyFile
        #certFile = controller.certFile
        self.socket = self.context.socket(zmq.PULL)
        self.socket.bind(self.controller.endpoint)
        GLib.io_add_watch(self.socket.fileno(), 1, GLib.IO_IN,
                          self.on_serverMessage)

        # self.conn = rpyc.connect(self.controller.hostAddress, port)  # Local connection to the service
        # GLib.io_add_watch(self.conn, 1, GLib.IO_IN, self.bg_server)  # Register the callback with the service
        # self.conn.root.login("*gui*", self.on_serverMessage)  # Log in to the service

        self.mainWindow = self.builder.get_object("window1")
        self.messages = self.builder.get_object("messageTextBuffer")
        self.logWindow = self.builder.get_object("scrolledwindow1")
        self.consoleIn = self.builder.get_object("consoleEntryBuffer")
        self.appNameEntry = self.builder.get_object("appNameEntry")
        self.deplNameEntry = self.builder.get_object("deplNameEntry")
        self.folderEntry = self.builder.get_object("folderEntry")
        #self.launchButton = self.builder.get_object("launchButton")
        #self.launchButton.set_sensitive(False)
        #self.stopButton = self.builder.get_object("stopButton")
        #self.stopButton.set_sensitive(False)
        #self.removeButton = self.builder.get_object("removeButton")
        #self.removeButton.set_sensitive(False)
        self.appLaunched = False
        self.appDownLoaded = False
        '''
        Status Table Additions
        '''
        self.cellTextPlaceHolder = '                '
        self.column_cur_size = 12
        self.row_cur_size = 16
        self.appToLoad = None
        self.appSelected = None
        self.gridScrollWindow = self.builder.get_object('scrolledwindow2')
        self.gridTable = Gtk.Grid()
        self.gridScrollWindow.add_with_viewport(self.gridTable)
        self.nodeIDDict = OrderedDict()
        self.appStatusDict = OrderedDict()
        self.init_GridTable()

        self.mainWindow.show_all()

    def run(self):
        self.messages.insert(self.messages.get_end_iter(), " " * 256 + "\n")
        Gtk.main()

#     def bg_server(self, source=None, cond=None):
#         '''
#         Check if there is something pending from the server thread. Called by the main GUI loop
#         '''
#         if self.conn:
#             self.conn.poll_all()
#             return True
#         else:
#             return False

    def log(self, text, prompt='> '):
        global guiLock
        with guiLock:
            end = self.messages.get_end_iter()
            text = prompt + text + '\n'
            self.messages.insert(end, text)
        self.updateStatus(text)

    def on_LogChanged(self, *_args):
        with guiLock:
            adj = self.logWindow.get_vadjustment()
            upper, page = adj.get_upper(), adj.get_page_size()
            adj.set_value(upper - page)

    def on_serverMessage(self, _channel=None, _cond=None):
        '''
        Callback used by the service thread(s): it prints a log message.
        '''
        while True:
            try:
                text = self.socket.recv_pyobj(flags=zmq.NOBLOCK)
                self.log(text)
            except zmq.error.ZMQError:
                break
        return True

    def isIPaddress(self, addr):
        try:
            socket.inet_aton(addr)
            return True
        except socket.error:
            return False

    def getIPaddress(self, hName):
        if self.isIPaddress(hName):
            return hName
        else:
            try:
                ipAddr = socket.gethostbyname(hName)
                return ipAddr
            except socket.error:
                return hName

    def on_ConsoleEntry(self, *args):
        '''
        Called when the console entry receives an 'activate' event
        '''
        global guiLock
        fabcmd = self.consoleIn.get_text()
        if len(fabcmd) == 0: fabcmd = "help"
        fcmd = "fab"
        fflag = "-f"
        fpath = self.controller.fabModule
        hosts = self.controller.getClients()
        tPath = None
        if len(hosts) == 0:
            self.log('? No hosts connected - using default')
            cmd = str.join(' ', (fcmd, fflag, fpath, fabcmd))
        else:
            cHost = self.getIPaddress(self.controller.nodeName)
            hNames = [
                self.getIPaddress(socket.getfqdn(host)) for host in hosts
            ]
            hConf = {'RIAPS': {'nodes': hNames, 'control': cHost}}
            fAppsFolder = ""
            if cHost in hNames:
                appsFolder = os.getenv('riapsApps', None)
                fAppsFolder = "--set RIAPSAPPS=%s" % appsFolder if appsFolder else ""
            _drop, tPath = tempfile.mkstemp(text=True)
            with open(tPath, "w") as tFd:
                toml.dump(hConf, tFd)
            fhostsFile = ("--set hostsFile=" + tPath)
            cmd = str.join(
                ' ', (fcmd, fflag, fpath, fabcmd, fhostsFile, fAppsFolder))
        self.log(cmd)
        proc = subprocess.run(shlex.split(cmd),
                              stdout=subprocess.PIPE,
                              stderr=subprocess.STDOUT)
        resp = proc.stdout.decode('utf-8')
        if tPath: os.unlink(tPath)
        # print(resp)
        # self.log(resp,': ')
        for line in resp.split('\n'):
            if len(line) > 0:
                self.log(line, ': ')
        self.consoleIn.delete_text(0, -1)

    def selectFile(self, title, patterns):
        '''
        File selection dialog
        '''
        self.fcd = Gtk.FileChooserDialog(
            "Select " + str(title), None, Gtk.FileChooserAction.OPEN,
            (Gtk.STOCK_CANCEL, Gtk.ResponseType.CANCEL, Gtk.STOCK_OPEN,
             Gtk.ResponseType.OK))
        for pattern in patterns:
            filterR = Gtk.FileFilter()
            filterR.set_name("%s" % pattern)
            filterR.add_pattern(pattern)
            self.fcd.add_filter(filterR)

        filterA = Gtk.FileFilter()
        filterA.set_name("All files")
        filterA.add_pattern("*")
        self.fcd.add_filter(filterA)

        self.fcd.set_transient_for(self.mainWindow)

        self.response = self.fcd.run()
        fileName = None
        if self.response == Gtk.ResponseType.OK:
            fileName = self.fcd.get_filename()
        self.fcd.destroy()
        return fileName

    def selectFolder(self, title):
        '''
        Folder selection dialog
        '''
        self.fcd = Gtk.FileChooserDialog(
            "Select " + str(title), None, Gtk.FileChooserAction.SELECT_FOLDER,
            (Gtk.STOCK_CANCEL, Gtk.ResponseType.CANCEL, "Select",
             Gtk.ResponseType.OK))

        self.fcd.set_transient_for(self.mainWindow)

        self.response = self.fcd.run()
        folderName = None
        if self.response == Gtk.ResponseType.OK:
            folderName = self.fcd.get_filename()
        self.fcd.destroy()
        return folderName

    def isAppOK(self):
        aName = self.appNameEntry.get_text()
        dName = self.deplNameEntry.get_text()
        return (aName != None and aName != '' and dName != None
                and dName != '')

    def on_SelectApplication(self, *args):
        '''
        App selection. Sets the app entry and calls the controller to compile the app model.
        '''
        fileName = self.selectFile("application model", ["*.riaps", "*.json"])
        if fileName != None:
            self.appNameEntry.set_text(os.path.basename(fileName))
            self.controller.compileApplication(fileName,
                                               self.folderEntry.get_text())
            #if self.isAppOK():
            #    self.launchButton.set_sensitive(True)
            #    self.removeButton.set_sensitive(True)

    def clearApplication(self):
        '''
        Clears the app entry.
        '''
        self.appNameEntry.set_text('')

    def on_SelectDeployment(self, *args):
        '''
        Deployment selection. Sets the deployment entry and calls the controller
        to compile the deployment model.
        '''
        fileName = self.selectFile("deployment", ["*.depl", "*.json"])
        if fileName != None:
            self.deplNameEntry.set_text(os.path.basename(fileName))
            self.appToLoad = self.controller.compileDeployment(fileName)
            #if self.isAppOK():
            #    self.launchButton.set_sensitive(True)
            #    self.removeButton.set_sensitive(True)

    def clearDeployment(self):
        '''
        Clears the deployment entry
        '''
        self.deplNameEntry.set_text('')

    def on_folderEntryActivate(self, *args):
        '''
        App folder selection. Called when the folder entry or the folder button is activated.
        '''
        folderName = self.selectFolder("application directory")
        if folderName != None:
            self.folderEntry.set_text(folderName)
            self.controller.setAppFolder(folderName)

    def on_haltAll(self, *args):
        '''
        Reset and halt all connected clients. Deplos maybe restarted automatically. 
        '''
        self.controller.cleanAll()
        self.controller.killAll()

    def on_resetAll(self, *args):
        '''
        Clean all connected deplos (stop/remove apps)
        '''
        self.controller.cleanAll()

    def on_Quit(self, *args):
        '''
        Quit the app. Forces a return from the GUI loop
        '''
        # self.conn.close()
        self.socket.close()
        Gtk.main_quit()

    """
    Begin Status Table Additions
    """

    def updateStatus(self, text):
        '''
        Server message parser and dispatcher. Based on the log message received, updates the status grid gui.
        To be deprecated when the server is updated to call directly the status grid gui update functions. 
        '''
        statusList = text.split(' ')
        if statusList[0] == ('>'):
            ip = re.findall(r'[0-9]+(?:\.[0-9]+){3}', text)
            #if len(ip) == 0:
            #    return
            ip = ''.join(ip)
            if statusList[1] == ('+'):  # node connected
                self.update_node_connected_status(ip)
                return
            elif statusList[1] == '-':  # node disconnected
                self.update_node_disconnected_status(ip)
                return
            elif statusList[1] == 'R':  # remove status
                self.update_remove_status(statusList[2])
                return
            elif statusList[1] == 'H':  # halt status
                self.update_halt_status(statusList[2], statusList[3])
                return
            elif statusList[1] == 'L':  # launch status
                self.update_launch_status(statusList)

    """ Grid table update functions - to be used as entry points for status grid gui updates  """

    def update_node_connected_status(self, ip):
        '''
        A new node connected to the controller
        '''
        self.node_connected(ip)

    def update_node_disconnected_status(self, ip):
        '''
        A node disconnected from the controller
        '''
        self.node_disconnected(ip)

    def update_remove_status(self, app):
        ''' 
        An app has been removed
        '''
        # '> R DistributedAverager
        self.remove_app(app)

    def update_halt_status(self, node, app):
        '''
        An app has been halted on a node
        '''
        #'> H 129.59.105.70 DistributedAverager Averager'
        self.halt_app(node, app)

    def update_launch_status(self, info):
        ''' 
        An app has been launched on a node
        '''
        # '> L 129.59.105.70 DistributedAverager Averager []'
        node = info[2]
        app = info[3]
        actor = info[4]
        self.launch_app(node, app, actor)

    def update_node_apps(self, clientName, data):
        '''
        Update the gui with list of apps running on the client 
        '''
        global guiLock
        with guiLock:
            if not data: return
            for item in data:
                appName, actors = item[0], item[1]
                self.add_app(appName)
                for actorName in actors:
                    self.launch_app(clientName, appName, actorName)
                    self.controller.addToLaunchList(clientName, appName,
                                                    actorName)

    """ Status grid gui update functions - these are the actual update functions """

    def node_connected(self, ip):
        '''
        A node connected to the controller
        '''
        self.nodeIDDict[ip] = True
        num_nodes = len(self.nodeIDDict)
        num_node_cols = self.column_cur_size - 1

        if num_nodes > num_node_cols:
            for i in range(self.column_cur_size, num_nodes + 1):
                self.add_table_column(i)
                self.column_cur_size = self.column_cur_size + 1

        keys = list(self.nodeIDDict.keys())
        col_id = keys.index(ip) + 1
        cell = self.gridTable.get_child_at(col_id, 0)
        if cell is not None:
            self.modify_text_cell_color(cell, 'black', 'white')
            self.modify_text_cell_text(cell, ip)

        self.gridTable.show_all()

    def node_disconnected(self, ip):
        '''
        A node disconnected from the controller
        '''
        if ip not in self.nodeIDDict:
            return

        self.nodeIDDict[ip] = False
        col_keys = list(self.nodeIDDict.keys())
        col_idx = col_keys.index(ip) + 1
        c_cell = self.gridTable.get_child_at(col_idx, 0)

        if c_cell is not None:
            self.modify_text_cell_color(c_cell, 'black', 'black')
            self.modify_text_cell_text(c_cell, '')

            # modify data - reset the data at a particular (col_idx)
            for row_idx, key in enumerate(self.appStatusDict, 1):
                self.appStatusDict[key][col_idx] = ''
                r_cell = self.gridTable.get_child_at(col_idx, row_idx)
                if r_cell is not None:
                    self.modify_text_cell_color(r_cell, 'white')
                    self.modify_text_cell_text(r_cell,
                                               self.cellTextPlaceHolder)

        self.gridTable.show_all()

    def remove_app(self, app):
        ''' 
        An app has been removed from the system
        '''
        # modify gui
        index = list(self.appStatusDict.keys()).index(app) + 1
        self.remove_table_row(index)
        self.row_cur_size = self.row_cur_size - 1

        # modify data
        del self.appStatusDict[app]

        self.create_table_row(self.row_cur_size, self.column_cur_size)
        self.row_cur_size = self.row_cur_size + 1
        self.gridTable.show_all()

    def halt_app(self, node, app):
        '''
        An app has been halted
        '''
        if node not in self.nodeIDDict:
            return

        if self.nodeIDDict[node] is True:
            col_map = list(self.nodeIDDict.keys())
            col_id = col_map.index(node) + 1
            row_map = list(self.appStatusDict.keys())
            row_id = row_map.index(app) + 1
            cell = self.gridTable.get_child_at(col_id, row_id)
            self.modify_text_cell_color(cell, 'grey')

    def launch_app(self, node, app, actor):
        '''
        An app has been launched
        '''
        if node not in self.nodeIDDict:
            return

        col_map = list(self.nodeIDDict.keys())
        col_id = col_map.index(node) + 1
        row_map = list(self.appStatusDict.keys())
        row_id = row_map.index(app) + 1

        existing_actor = self.appStatusDict[app][col_id - 1]
        if existing_actor == 'None' or existing_actor == '':
            self.appStatusDict[app][col_id - 1] = actor
        elif actor not in existing_actor:
            self.appStatusDict[app][col_id - 1] = existing_actor + '\n' + actor

        cell = self.gridTable.get_child_at(col_id, row_id)
        if cell is None:
            self.gridTable.attach(
                self.create_table_cell(self.appStatusDict[app][col_id - 1],
                                       'black', 'lime'), col_id, row_id, 1, 1)
        else:
            self.modify_text_cell_color(cell, 'lime', 'black')
            child_list = cell.get_children()
            child_list[0].set_label(self.appStatusDict[app][col_id - 1])

        self.gridTable.show_all()  # not sure if it's necessary here

    def add_app(self, app):
        if app in self.appStatusDict:
            return

        self.appStatusDict[app] = [''] * self.column_cur_size
        num_apps = len(self.appStatusDict)
        num_app_rows = self.row_cur_size - 1
        if (num_apps > num_app_rows):
            for i in range(self.row_cur_size, num_apps + 1):
                self.create_table_row(i, self.column_cur_size)
                self.row_cur_size = self.row_cur_size + 1

        cell = self.gridTable.get_child_at(0, len(self.appStatusDict))
        cell.destroy()
        self.gridTable.attach(self.create_app_menu_button(app), 0,
                              len(self.appStatusDict), 1, 1)

        self.gridScrollWindow.show_all()

    def on_loadApplication(self, widget):
        '''
        Load the selected application onto to the network
        '''
        # add a row in the table for the application
        if self.appToLoad is None:
            return

        self.add_app(self.appToLoad)
        self.clearApplication()
        self.clearDeployment()
        self.appToLoad = None

    def on_viewApplication(self, widget):
        '''
        View the selected application as to be deployed
        '''
        model = self.appNameEntry.get_text()
        deplo = self.deplNameEntry.get_text()
        try:
            fileName = gviz(model, deplo) + '.dot'
            subprocess.Popen(['xdot', fileName])
        except:
            pass

    ''' Event handlers for widgets(buttons etc) '''

    def on_show_app_ctrl_options(self, widget):
        # set selected app name
        self.appSelected = widget.get_label()
        widget.get_popup().show_all()

    def on_launch_app_press(self, widget):
        appSelected = self.appSelected
        self.appSelected = ''
        self.controller.launchByName(appSelected)

    def on_stop_app_press(self, widget):
        appSelected = self.appSelected
        self.appSelected = ''
        self.controller.haltByName(appSelected)

    def on_remove_app_press(self, widget):
        appSelected = self.appSelected
        self.appSelected = ''
        self.controller.removeAppByName(appSelected)

    ''' Initialize grid table with blank cells '''

    def init_GridTable(self):
        # Add 1st button
        for c in range(self.row_cur_size):
            self.create_table_row(c, self.column_cur_size)

    def create_table_row(self, row_index, col_length):
        for i in range(col_length):
            cell = self.create_table_cell(self.cellTextPlaceHolder)
            self.gridTable.attach(cell, i, row_index, 1, 1)
            if row_index == 0:
                self.modify_text_cell_color(cell, 'black', 'white')
                if i == 0:
                    self.modify_text_cell_text(cell, "App \\ Node")

    ''' Utility functions '''

    def create_app_menu_button(self, text):
        menu_button = Gtk.MenuButton(text)
        menu_button.connect('pressed', self.on_show_app_ctrl_options)

        menu = Gtk.Menu()
        menu_button.set_popup(menu)

        item = Gtk.ImageMenuItem.new_from_stock(Gtk.STOCK_MEDIA_PLAY)
        item.get_child().set_text('Launch')
        item.connect('activate', self.on_launch_app_press)
        menu.append(item)
        item = Gtk.ImageMenuItem.new_from_stock(Gtk.STOCK_MEDIA_STOP)
        item.connect('activate', self.on_stop_app_press)
        menu.append(item)
        menu.append(Gtk.SeparatorMenuItem())
        item = Gtk.ImageMenuItem.new_from_stock(Gtk.STOCK_REMOVE)
        item.connect('activate', self.on_remove_app_press)
        menu.append(item)

        return menu_button

    def create_table_cell(self, text, fg_color='blue', bg_color='white'):
        try:
            frame_box = Gtk.Frame()
            frame_box.modify_fg(Gtk.StateType.NORMAL,
                                Gdk.color_parse(fg_color))
            frame_box.modify_bg(Gtk.StateType.NORMAL,
                                Gdk.color_parse(bg_color))

            text_label = Gtk.Label()
            text_content = '<b>' + ''.join(text) + '</b>'
            text_label.set_markup(text_content)
            text_label.set_justify(Gtk.Justification.CENTER)
            frame_box.add(text_label)
            return frame_box
        except Exception as e:
            message = str(e)
            return None

    def add_table_column(self, col_index):
        for i in range(self.row_cur_size):
            self.gridTable.attach(
                self.create_table_cell(self.cellTextPlaceHolder, 'black'),
                col_index, i, 1, 1)

        self.gridTable.show_all()

    def remove_table_row(self, index):
        self.gridTable.remove_row(index)

    ''' Cell helper functions'''

    def modify_text_cell_color(self, cell, bg='', fg=''):
        if cell is not None:
            if bg != '':
                cell.modify_bg(Gtk.StateType.NORMAL, Gdk.color_parse(bg))

            if fg != '':
                cell.modify_fg(Gtk.StateType.NORMAL, Gdk.color_parse(fg))

    def modify_text_cell_text(self, cell, text=''):
        if cell is not None:
            children = cell.get_children()
            if len(children) > 0:
                children[0].set_label(text)

    def set_cell_bg_color(self, text):
        if 'Actor' in text:
            bg_color = 'lime'
        if 'Stopped' in text:
            bg_color = 'grey'
        return bg_color

    '''
예제 #13
0
'''
Created on 16-Aug-2019

@author: Sanjay Ghosh
'''
from _collections import OrderedDict

mydict = OrderedDict()
T = int(input())
for loop in range(T):
    myarr = input().split()
    key = " ".join(myarr[0:len(myarr) - 1])
    value = int(" ".join(myarr[len(myarr) - 1:]))
    if key not in mydict:
        mydict[key] = value
    else:
        mydict[key] = mydict[key] + value
for key in mydict.keys():
    print(key + " " + str(mydict[key]))
예제 #14
0
#deque
q = deque([1, 2, 3])
q.append('a')
q.append('abc')
q.appendleft('first')
oFirst = q.pop()
print(oFirst)

#defaultdict
d = defaultdict(lambda: None)
d['key1'] = 'abc'
print(d['key1'])
print(d['key2'])
#orderdict
od = OrderedDict([('b', 1), ('a', 2), ('c', 3)])
print(od)
print(od.keys())

#Counter
c = Counter()
for ch in "programming":
    c[ch] = c[ch] + 1
print(c)
print(c.elements())
print(c.most_common(3))
print(sum(c.values()))

c1 = Counter({'g': 2, 'm': 2, 'r': 2, 'a': 1, 'i': 1, 'o': 1, 'n': 1, 'p': 1})
print(c1)
예제 #15
0
def import_model(sbml_model: Union[str, 'libsbml.Model'],
                 condition_table: Optional[Union[str, pd.DataFrame]] = None,
                 observable_table: Optional[Union[str, pd.DataFrame]] = None,
                 measurement_table: Optional[Union[str, pd.DataFrame]] = None,
                 model_name: Optional[str] = None,
                 model_output_dir: Optional[str] = None,
                 verbose: Optional[Union[bool, int]] = True,
                 allow_reinit_fixpar_initcond: bool = True,
                 **kwargs) -> None:
    """
    Create AMICI model from PEtab problem

    :param sbml_model:
        PEtab SBML model or SBML file name.

    :param condition_table:
        PEtab condition table. If provided, parameters from there will be
        turned into AMICI constant parameters (i.e. parameters w.r.t. which
        no sensitivities will be computed).

    :param observable_table:
        PEtab observable table.

    :param measurement_table:
        PEtab measurement table.

    :param model_name:
        Name of the generated model. If model file name was provided,
        this defaults to the file name without extension, otherwise
        the SBML model ID will be used.

    :param model_output_dir:
        Directory to write the model code to. Will be created if doesn't
        exist. Defaults to current directory.

    :param verbose:
        Print/log extra information.

    :param allow_reinit_fixpar_initcond:
        See :class:`amici.ode_export.ODEExporter`. Must be enabled if initial
        states are to be reset after preequilibration.

    :param kwargs:
        Additional keyword arguments to be passed to
        :meth:`amici.sbml_import.SbmlImporter.sbml2amici`.
    """

    set_log_level(logger, verbose)

    logger.info(f"Importing model ...")

    # Get PEtab tables
    observable_df = petab.get_observable_df(observable_table)
    # to determine fixed parameters
    condition_df = petab.get_condition_df(condition_table)

    if observable_df is None:
        raise NotImplementedError("PEtab import without observables table "
                                  "is currently not supported.")

    # Model name from SBML ID or filename
    if model_name is None:
        if isinstance(sbml_model, libsbml.Model):
            model_name = sbml_model.getId()
        else:
            model_name = os.path.splitext(os.path.split(sbml_model)[-1])[0]

    if model_output_dir is None:
        model_output_dir = os.path.join(os.getcwd(), model_name)

    logger.info(f"Model name is '{model_name}'. "
                f"Writing model code to '{model_output_dir}'.")

    # Load model
    if isinstance(sbml_model, str):
        # from file
        sbml_reader = libsbml.SBMLReader()
        sbml_doc = sbml_reader.readSBMLFromFile(sbml_model)
        sbml_model = sbml_doc.getModel()
    else:
        # Create a copy, because it will be modified by SbmlImporter
        sbml_doc = sbml_model.getSBMLDocument().clone()
        sbml_model = sbml_doc.getModel()

    show_model_info(sbml_model)

    sbml_importer = amici.SbmlImporter(sbml_model)
    sbml_model = sbml_importer.sbml

    if observable_df is not None:
        observables, noise_distrs, sigmas = \
            get_observation_model(observable_df)

    logger.info(f'Observables: {len(observables)}')
    logger.info(f'Sigmas: {len(sigmas)}')

    if not len(sigmas) == len(observables):
        raise AssertionError(
            f'Number of provided observables ({len(observables)}) and sigmas '
            f'({len(sigmas)}) do not match.')

    # TODO: adding extra output parameters is currently not supported,
    #  so we add any output parameters to the SBML model.
    #  this should be changed to something more elegant
    # <BeginWorkAround>
    formulas = chain((val['formula'] for val in observables.values()),
                     sigmas.values())
    output_parameters = OrderedDict()
    for formula in formulas:
        # we want reproducible parameter ordering upon repeated import
        free_syms = sorted(sp.sympify(formula).free_symbols,
                           key=lambda symbol: symbol.name)
        for free_sym in free_syms:
            sym = str(free_sym)
            if sbml_model.getElementBySId(sym) is None:
                output_parameters[sym] = None
    logger.debug(f"Adding output parameters to model: {output_parameters}")
    for par in output_parameters.keys():
        petab.add_global_parameter(sbml_model, par)
    # <EndWorkAround>

    # TODO: to parameterize initial states or compartment sizes, we currently
    #  need initial assignments. if they occur in the condition table, we
    #  create a new parameter initial_${startOrCompartmentID}.
    #  feels dirty and should be changed (see also #924)
    # <BeginWorkAround>
    initial_states = [
        col for col in condition_df if sbml_model.getSpecies(col) is not None
    ]
    initial_sizes = [
        col for col in condition_df
        if sbml_model.getCompartment(col) is not None
    ]
    fixed_parameters = []
    if len(initial_states) or len(initial_sizes):
        # add preequilibration indicator variable
        # NOTE: would only be required if we actually have preequilibration
        #  adding it anyways. can be optimized-out later
        if sbml_model.getParameter(PREEQ_INDICATOR_ID) is not None:
            raise AssertionError("Model already has a parameter with ID "
                                 f"{PREEQ_INDICATOR_ID}. Cannot handle "
                                 "species and compartments in condition table "
                                 "then.")
        indicator = sbml_model.createParameter()
        indicator.setId(PREEQ_INDICATOR_ID)
        indicator.setName(PREEQ_INDICATOR_ID)
        # Can only reset parameters after preequilibration if they are fixed.
        fixed_parameters.append(PREEQ_INDICATOR_ID)

    for assignee_id in initial_sizes + initial_states:
        init_par_id_preeq = f"initial_{assignee_id}_preeq"
        init_par_id_sim = f"initial_{assignee_id}_sim"
        for init_par_id in [init_par_id_preeq, init_par_id_sim]:
            if sbml_model.getElementBySId(init_par_id) is not None:
                raise ValueError(
                    "Cannot create parameter for initial assignment "
                    f"for {assignee_id} because an entity named "
                    f"{init_par_id} exists already in the model.")
            init_par = sbml_model.createParameter()
            init_par.setId(init_par_id)
            init_par.setName(init_par_id)
        assignment = sbml_model.createInitialAssignment()
        assignment.setSymbol(assignee_id)
        formula = f'{PREEQ_INDICATOR_ID} * {init_par_id_preeq} '\
                  f'+ (1 - {PREEQ_INDICATOR_ID}) * {init_par_id_sim}'
        math_ast = libsbml.parseL3Formula(formula)
        assignment.setMath(math_ast)
    # <EndWorkAround>

    fixed_parameters.extend(
        get_fixed_parameters(sbml_model=sbml_model, condition_df=condition_df))

    logger.debug(f"Fixed parameters are {fixed_parameters}")
    logger.info(f"Overall fixed parameters: {len(fixed_parameters)}")
    logger.info(
        "Variable parameters: " +
        str(len(sbml_model.getListOfParameters()) - len(fixed_parameters)))

    # Create Python module from SBML model
    sbml_importer.sbml2amici(
        model_name=model_name,
        output_dir=model_output_dir,
        observables=observables,
        constant_parameters=fixed_parameters,
        sigmas=sigmas,
        allow_reinit_fixpar_initcond=allow_reinit_fixpar_initcond,
        noise_distributions=noise_distrs,
        verbose=verbose,
        **kwargs)
예제 #16
0
class TrainScriptLauncher:

    #------------------------------------
    # Constructor
    #-------------------

    # Use distributed torch default port:
    COMM_PORT = '5678'

    def __init__(self, unittesting=False):

        self.hostname = socket.getfqdn()
        if unittesting:
            # Let unittests create an instance
            # and call individual methods:
            return

        # Logging to console during launch:
        self.log = LoggingService()

        # Convenience: directory of this
        # script, and project root directory
        curr_dir = Path(__file__).parent
        proj_root = curr_dir.joinpath('../..').resolve()
        self.curr_dir = str(curr_dir)
        self.proj_root = str(proj_root)

        args_parser = BirdsTrainingArgumentsParser(
            formatter_class=BirdsTrainingArgumentsParser.
            BlankLinesHelpFormatter,
            description="PyTorch distributed training launch "
            "helper to spawn multiple distributed "
            "birds_train_parallel.py processes")

        all_args = args_parser.parse_args()
        # Separate the args for this launch script
        # from the args destined for the copies of
        # the train script:
        self.launch_args = all_args['launch_args']
        self.script_args = all_args['script_args']

        # Build the gpu_landscape dict:
        self.gather_world_layout(self.launch_args)

        self.GPUS_USED_THIS_MACHINE = self.gpu_landscape[
            self.hostname]['num_gpus']

    #------------------------------------
    # gather_world_layout
    #-------------------

    def gather_world_layout(self, launch_args):
        '''
        # Compute a unique number for each GPU within
        # the group of nodes (machines). Starting with
        # the master node's first GPU as 0 (if master node
        # has a GPU.
        # The resulting GPU layout is assigned to
        # variable gpu_landscape: 
        
        
        :param launch_args:
        :type launch_args:
        '''

        try:
            config_file = launch_args['config']
            if not os.path.exists(config_file):
                raise ConfigError(
                    f"Configuration file {config_file} that was provided as command line arg does not exist."
                )
        except KeyError:
            raise RuntimeError(
                "Error: launch args must include a config file. See config.cfg.Example in project root"
            )

        self.config = DottableConfigParser(config_file)

        # Ensure that the launch_args contains
        # the path to the training script. It
        # will be there if provided on the cmd line.
        # But it may instead be under Path:train_script
        # in the configuration:

        try:
            self.launch_args['training_script']
        except KeyError:
            # The training script was not specified
            # on the command line. Is it in the config
            # file:
            try:
                self.launch_args['training_script'] = self.config.getpath(
                    'Paths', 'train_script', relative_to=self.curr_dict)
            except KeyError:
                raise ValueError(
                    "No training script specified on command line or in config file"
                )

        try:
            self.world_map_path = self.config.getpath(
                'Paths', 'world_map', relative_to=self.curr_dir)
        except KeyError:
            raise RuntimeError(
                f"Could not find entry for 'world_map' in config file {config_file}"
            )

        self.world_map = self.read_world_map(self.world_map_path)
        # Ensure that this machine has an
        # entry in the world_map:
        try:
            # Get this machine's info (sub)dict:
            _my_world_info = self.world_map[self.hostname]
        except KeyError:
            raise ConfigError(
                f"World map file does not contain entry for this machine ({self.hostname})"
            )

        self.compute_landscape = {}

        # Whether or not machine running this
        # code is the master node:
        self.am_master_node = False

        # Build gpu_landscape, which maps
        # machine names to the rank range
        # that they occupy via the number of
        # their GPUs
        #
        #    {machine_name1 : [1],
        #     machine_name2 : [0],
        #     machine_name3 : [1,2,3],

        self.gpu_landscape = self.build_compute_landscape(self.world_map)

        if self.master_hostname is None:
            raise ConfigError(
                f'No master machine in {self.world_map_path}; one entry needs to be "master" : 1'
            )

        # Common pytorch port is either in the config file,
        # or we use the pytorch default
        self.MASTER_PORT = self.config.getint('Parallelism', 'master_port',
                                              self.COMM_PORT)

        # Handle special case: no GPUs anywere, and
        # we are on node 0: in that case start a single
        # copy of the training script. If it is written
        # properly, it will detect the absence of a GPU,
        # and use the CPU. This happens during debugging
        # on a laptop:

        if self.WORLD_SIZE == 0 and self.am_master_node:
            self.WORLD_SIZE += 1

        # If trying to launch on a node without GPUs,
        # when GPUs are available elsewhere, refuse to
        # start the script (is this needed?):
        if not TESTING:
            if self.my_gpus == 0 and self.WORLD_SIZE > 0:
                raise RuntimeError(
                    "This machine does not have any GPU, but others do; training script not started."
                )

    #------------------------------------
    # launch_scripts
    #-------------------

    def launch_scripts(self):
        '''
        Launch (possibly) multiple copies of
        the training script. Use world_map.json
        to know how many, and which GPUs this
        machine is to use.
        
        Each copy is told:
        
            o MASTER_ADDR  # Where to reach the coordinating process
            o MASTER_PORT  # Corresponding port
            o RANK         # The copy's sequence number, which is
                           # Unique across all participating machines
            o LOCAL_RANK   # Which of this machine's GPU to use (0-origin)
            o WORLD_SIZE   # How many GPUs are used on all machines together
            o GPUS_USED_THIS_MACHINE # Number of GPUs *used* on this
                                     # machine, according to the world_map.

        '''

        # Compute a unique number for each GPU within
        # the group of nodes (machines). Starting with
        # the master node's first GPU as 0 (if master node
        # has a GPU.
        # The resulting GPU layout is assigned to
        # variable gpu_landscape:
        #
        #     {<machine_name> :

        # This machine's range of ranks:
        rank_range = self.gpu_landscape[self.hostname]['rank_range']
        this_machine_gpu_ids = self.gpu_landscape[
            self.hostname]['gpu_device_ids']
        min_rank_this_machine = self.gpu_landscape[self.hostname]['start_rank']

        local_rank = 0
        # Map from process object to rank (for debug msgs):
        self.who_is_who = OrderedDict()
        for rank in rank_range:

            cmd = self.training_script_start_cmd(
                rank, len(this_machine_gpu_ids), local_rank,
                min_rank_this_machine, self.launch_args, self.script_args)

            # Copy stdin, and give the copy to the subprocess.
            # This enables the subprocess to ask user whether
            # to save training state in case of a cnt-C:
            newstdin = os.fdopen(os.dup(sys.stdin.fileno()))

            # Spawn one training script.

            process = subprocess.Popen(
                cmd,
                stdin=newstdin,
                stdout=None,  # Script inherits this launch
                stderr=None  # ... script's stdout/stderr  
            )
            self.who_is_who[process] = rank
            local_rank += 1

        if not self.launch_args['quiet']:
            print(
                f"Node {self.hostname} {os.path.basename(sys.argv[0])}: Num processes launched: {len(self.who_is_who)}"
            )
            if self.am_master_node:
                print(f"Awaiting {self.WORLD_SIZE} process(es) to finish...")
            else:
                print(f"Awaiting {self.my_gpus} process(es) to finish...")

        failed_processes = []
        try:
            for process in self.who_is_who.keys():
                process.wait()
                if process.returncode != 0:
                    failed_processes.append(process)
                continue
        except KeyboardInterrupt:
            # Gently kill the training scripts:
            self.handle_cnt_c()
            pass  # See which processes get the interrupt

        num_failed = len(failed_processes)
        if num_failed > 0:
            print(f"Number of failed training scripts: {num_failed}")
            for failed_process in failed_processes:
                train_script = self.launch_args['training_script']
                script_rank = self.who_is_who[failed_process]
                msg = (
                    f"Training script {train_script} (rank {script_rank}) encountered error(s); see logfile"
                )
                print(msg)

    #------------------------------------
    # training_script_start_cmd
    #-------------------

    def training_script_start_cmd(self, rank, gpus_used_this_machine,
                                  local_rank, min_rank_this_machine,
                                  launch_args, script_args):
        '''
        From provided information, creates a legal 
        command string for starting the training script.
        
        :param rank: rank of the script; i.e. it's process' place
            in the sequence of all train script processes
            across all machines
        :type rank: int
        :param gpus_used_this_machine: number of GPU devices to 
            be used, according to the world_map; may be less than
            number of available GPUs
        :type gpus_used_this_machine: int
        :param local_rank: index into the local sequence of GPUs
            for for the GPU that the script is to use
        :type local_rank: int
        :param min_rank_this_machine: the lowest of the ranks among
            the training scripts on this machine
        :type min_rank_this_machine: int
        :param launch_args: command line arguments intended for the
            launch script, as opposed to being destined for the 
            train script
        :type launch_args: {str : Any}
        :param script_args: additional args for the train script
        :type script_args: {str : Any}
        '''

        # Build the shell command line,
        # starting with 'python -u':
        cmd = [sys.executable, "-u"]

        cmd.append(launch_args['training_script'])

        # Add the args for the script that were
        # in the command line:
        for arg_name in script_args.keys():
            script_arg_val = script_args[arg_name]
            if script_arg_val is None or arg_name == 'config':
                # Skip over non-specified CLI args:
                continue
            cmd.append(f"--{arg_name}={script_args[arg_name]}")

        # Add the 'secret' args that tell the training
        # script all the communication parameters:

        cmd.extend([
            f"--MASTER_ADDR={self.MASTER_ADDR}",
            f"--MASTER_PORT={self.MASTER_PORT}", f"--RANK={rank}",
            f"--LOCAL_RANK={local_rank}",
            f"--MIN_RANK_THIS_MACHINE={min_rank_this_machine}",
            f"--WORLD_SIZE={self.WORLD_SIZE}",
            f"--GPUS_USED_THIS_MACHINE={gpus_used_this_machine}"
        ])

        # Finally, the obligatory non-option arg
        # to the training script: the configuration
        # file:

        config_file_name = script_args['config']
        cmd.append(config_file_name)

        self.log.debug(f"****** Launch: the cmd is {cmd}")
        return cmd

    #------------------------------------
    # read_world_map
    #-------------------

    def read_world_map(self, path):
        '''
        Read the JSON5 world map file, and 
        return a corresponding dict. JSON5
        allows something like:
        
        /*
            This is a block comment.
            Notice the lacking quote
            chars around the keys below.
            The are optional in JSON5
            
        */
        
        {quintus.stanford.edu : {
            "master" : Yes
            "gpus" : 2
         },
        
         quatro.stanford.edu  : {
             "gpus" : 2,
             "devices" : [1,2]
         }
        }
        
        BUT: JSON5 gets angry at dots in the 
             keys. 
        So we first read the file, and try to find 
        the machine names. We temporarily replace
        them with an acceptable marker, and then 
        convert back.
                
        :param path: path to world map file
        :type path: string
        '''
        dot_substitute = '___'

        try:
            # Read all the world map file lines:
            with open(path, 'r') as world_map_fd:
                tmp_world_map = world_map_fd.readlines()
        except IOError as e:
            raise IOError(f"World map file at {path} not found") from e

        # Replace occurrences of '.' with dot_substitute:
        new_text = []
        for line in tmp_world_map:
            new_text.append(line.replace('.', dot_substitute))

        # ... and make one string from all the lines:
        json_str = '\n'.join(new_text)

        try:
            # Hopefully, JSON5 will eat it now:
            world_map_almost = json5.loads(json_str)
        except JSONError as e:
            raise JSONError(
                f"World map file at {path} contains bad JSON") from e

        # Need to fix all the dot substitutions.
        # At this point the data structure is
        #    { <machine_name> : {spec_attr1 : val1,
        #                        spec_attr2 : val2,
        #                       }
        #    }

        # Fix the machine names first:
        mach_names_fixed = [
            machine_name.replace(dot_substitute, '.')
            for machine_name in world_map_almost.keys()
        ]

        machine_specs_fixed = []

        # Now dig into each of the nested machine spec
        # dicts, and fix attrs and values there:
        for spec in world_map_almost.values():
            # Spec is a dict nested inside the outer one:
            spec_fixed = {
                key.replace(dot_substitute, '.'): val.replace(
                    dot_substitute, '.') if isinstance(val, str) else val
                for key, val in spec.items()
            }
            machine_specs_fixed.append(spec_fixed)

        # Put it all together:
        world_map = {
            machine_name: spec_dict
            for machine_name, spec_dict in zip(mach_names_fixed,
                                               machine_specs_fixed)
        }

        return world_map

    #------------------------------------
    # build_compute_landscape
    #-------------------

    def build_compute_landscape(self, world_map):
        '''
        # Using the world_map.json config file, build 
        # a dict self.gpu_landscape like this:
        #
        #    {'machine_name1' : {'start_rank'    : <int>,
        #                        'num_gpus'      : <int>,
        #                        'gpu_device_ids': [<int>,<int>,...]
        #    {'machine_name2' : {'start_rank'    : <int>,
        #                        'num_gpus'      : <int>,
        #                        'gpu_device_ids': [<int>,<int>,...]
        #    } 
        #
        # Also sets 
        #     o self.master_hostname, the hostname
        #       running the one process that coordinates all others.
        #     o self.WORLD_SIZE, number of GPUs used across all machines
        #     o self.my_gpus, the number of GPUs on this machine
        
        :param world_map:
        :type world_map:
        :return: information about how many GPUs are
            on each node
        :rtype: OrderedDict
        '''

        if not self.hostname in world_map.keys():
            raise ConfigError(
                f"World map does not contain an entry for this machine {self.hostname}"
            )

        # World size is the number of training script processes,
        # which is equal to number of GPUs used on all participating
        # machines combined:

        # Number of GPUs across all machines:
        self.WORLD_SIZE = 0

        self.master_hostname = None

        # Go through the world map, machine (a.k.a. node)
        # one at a time, in alpha order of the machine
        # names to ensure all copies of this script
        # come to the same conclusions about ranks

        # Build gpu_landscape:
        #
        #    {'machine_name1' : {'start_rank'    : <int>,
        #                        'num_gpus'      : <int>,
        #                        'gpu_device_ids': [<int>,<int>,...]
        #    {'machine_name2' : {'start_rank'    : <int>,
        #                        'num_gpus'      : <int>,
        #                        'gpu_device_ids': [<int>,<int>,...]
        #    }
        #
        # The structure is an OrderedDict(), containing
        # machines alphabetically by name. This discipline
        # is required so that all copies of this launch script
        # (one copy per machine) arrive at the same ordering of
        # GPUs:

        gpu_landscape = OrderedDict({})

        for machine_name in sorted(world_map.keys()):

            # Get dict of info about the machine:

            machine_info = world_map[machine_name]

            try:
                machine_gpus = machine_info['gpus']
            except KeyError:
                print(
                    "World map must include a 'gpus' entry; the value may be 0"
                )

            gpu_landscape[machine_name] = {}
            gpu_landscape[machine_name]['num_gpus'] = machine_gpus

            # List of GPU numbers to use is optional
            # in world_maps:

            machine_gpus_to_use = machine_info.get('devices', None)
            if machine_gpus_to_use is None:
                # Use all GPUs on that machine:
                machine_gpus_to_use = list(range(machine_gpus))

            gpu_landscape[machine_name]['gpu_device_ids'] = machine_gpus_to_use

            # Accept all kinds of affirmatives as values:
            # for identification of the master node entry:

            is_master_node = machine_info.get('master', False) \
                in [1, 'True', 'true', 'Yes', 'yes']

            if is_master_node:
                self.master_hostname = machine_name
                if machine_name == self.hostname:
                    self.am_master_node = True
                try:
                    self.MASTER_ADDR = socket.gethostbyname(machine_name)
                except socket.gaierror:
                    # For machines that have no
                    # findable IP address:
                    self.MASTER_ADDR = '127.0.0.1'

            self.WORLD_SIZE += machine_gpus

        # Go through the machine enries in gpu_landscape, and
        # assign rank ranges to each. Must start with
        # the master node, b/c it must start with rank 0.
        # For the master node, it is possible that it has
        # no GPUs

        master_info = gpu_landscape[self.master_hostname]
        master_info['rank_range'] = list(range(master_info['num_gpus']))
        master_info['start_rank'] = 0
        if len(master_info['rank_range']) == 0:
            # Master only has a GPU:
            master_info['rank_range'] = [0]

        # Start assigning more ranks after
        # the GPUs of the master:

        running_rank = master_info['rank_range'][-1] + 1

        for machine_name in gpu_landscape.keys():
            if machine_name == self.master_hostname:
                # We already did the master node
                continue
            mach_info = gpu_landscape[machine_name]
            mach_info['start_rank'] = running_rank
            num_gpus = mach_info['num_gpus']
            range_bound = running_rank + (num_gpus if num_gpus > 0 else 1)
            mach_info['rank_range'] = list(range(running_rank, range_bound))
            running_rank += (num_gpus if num_gpus > 0 else 1)

        self.my_gpus = gpu_landscape[self.hostname]['num_gpus']
        self.gpu_landscape = gpu_landscape
        return gpu_landscape

    #------------------------------------
    # handle_cnt_c
    #-------------------

    def handle_cnt_c(self):
        '''
        Given a list of process instances,
        Send SIGINT (cnt-C) to them:
        :param procs:
        :type procs:
        '''
        # Line processes up, highest rank first,
        # master process last:

        procs_terminate = sorted([proc for proc in self.who_is_who.keys()],
                                 key=lambda obj: self.who_is_who[obj],
                                 reverse=True)

        for process in procs_terminate:
            # If process is no longer running,
            # forget about it:
            if process.poll is not None:
                # Process dead:
                continue
            process.send_signal(signal.SIGTERM)
            process.wait()
예제 #17
0
def create_atlas(animal, create):

    fileLocationManager = FileLocationManager(animal)
    atlas_name = 'atlasV7'
    THUMBNAIL_DIR = os.path.join(ROOT_DIR, animal, 'preps', 'CH1', 'thumbnail')
    ATLAS_PATH = os.path.join(DATA_PATH, 'atlas_data', atlas_name)
    ORIGIN_PATH = os.path.join(ATLAS_PATH, 'origin')
    VOLUME_PATH = os.path.join(ATLAS_PATH, 'structure')
    OUTPUT_DIR = os.path.join(fileLocationManager.neuroglancer_data, 'atlas')
    if os.path.exists(OUTPUT_DIR):
        shutil.rmtree(OUTPUT_DIR)
    os.makedirs(OUTPUT_DIR, exist_ok=True)
    origin_files = sorted(os.listdir(ORIGIN_PATH))
    volume_files = sorted(os.listdir(VOLUME_PATH))
    sqlController = SqlController(animal)
    resolution = sqlController.scan_run.resolution
    surface_threshold = 0.8
    SCALE = (10 / resolution)

    structure_volume_origin = {}
    for volume_filename, origin_filename in zip(volume_files, origin_files):
        structure = os.path.splitext(volume_filename)[0]
        if structure not in origin_filename:
            print(structure, origin_filename)
            break

        color = get_structure_number(structure.replace('_L', '').replace('_R', ''))

        origin = np.loadtxt(os.path.join(ORIGIN_PATH, origin_filename))
        volume = np.load(os.path.join(VOLUME_PATH, volume_filename))

        volume = np.rot90(volume, axes=(0, 1))
        volume = np.flip(volume, axis=0)
        volume[volume > surface_threshold] = color
        volume = volume.astype(np.uint8)

        structure_volume_origin[structure] = (volume, origin)

    col_length = sqlController.scan_run.width/SCALE
    row_length = sqlController.scan_run.height/SCALE
    z_length = len(os.listdir(THUMBNAIL_DIR))
    atlasV7_volume = np.zeros(( int(row_length), int(col_length), z_length), dtype=np.uint8)
    print('atlas volume shape', atlasV7_volume.shape)

    ##### actual data for both sets of points, pixel coordinates
    centers = OrderedDict(MD589_centers)
    centers_list = []
    for value in centers.values():
        centers_list.append((value[1]/SCALE, value[0]/SCALE, value[2]))
    COM = np.array(centers_list)
    atlas_com_centers = OrderedDict()
    atlas_all_centers = {}
    for structure, (volume, origin) in sorted(structure_volume_origin.items()):
        midcol, midrow, midz = origin
        row_start = midrow + row_length / 2
        col_start = midcol + col_length / 2
        z_start = midz / 2 + z_length / 2
        row_end = row_start + volume.shape[0]
        col_end = col_start + volume.shape[1]
        z_end = z_start + (volume.shape[2] + 1) / 2
        midcol = (col_end + col_start) / 2
        midrow = (row_end + row_start) / 2
        midz = (z_end + z_start) / 2
        if structure in centers.keys():
            atlas_com_centers[structure] = [midrow, midcol, midz]
        atlas_all_centers[structure] = [midrow, midcol, midz]
    ATLAS_centers = OrderedDict(atlas_com_centers)
    ATLAS = np.array(list(ATLAS_centers.values()))
    #### both sets of data are scaled to stack of DK52
    pprint(COM)
    pprint(ATLAS)
    #####Transform to auto align
    r_auto, t_auto = align_point_sets(ATLAS.T, COM.T)

    rotationpath = os.path.join(ATLAS_PATH, f'atlas2{animal}.rotation.npy')
    np.save(rotationpath, r_auto)
    translatepath = os.path.join(ATLAS_PATH, f'atlas2{animal}.translation.npy')
    np.save(translatepath, t_auto)


    # Litao, look at the start and end for these structures, the x and y look good
    # but the z (section) is off
    debug = True
    for structure, (volume, origin) in sorted(structure_volume_origin.items()):
        print(str(structure).ljust(7),end=": ")

        source_point = np.array(atlas_all_centers[structure]) # get adjusted x,y,z from above loop
        results = (r_auto @ source_point + t_auto.T).reshape(1,3) # transform to fit
        x = results[0][1] # new x
        y = results[0][0] # new y
        z = results[0][2] # z
        x = x - volume.shape[0]/2
        y = y - volume.shape[1]/2
        x_start = int( round(x))
        y_start = int( round(y))
        z_start = int(z - volume.shape[2]/4)

        x_end = int( round(x_start + volume.shape[0]))
        y_end = int( round(y_start + volume.shape[1]))
        z_end = int( round(z_start + (volume.shape[2] + 1) // 2))

        if debug:
            #print('volume shape', volume.shape, end=" ")
            print('COM row',
                  str(int(y)).rjust(4),
                  'mid col',
                  str(int(x)).rjust(4),
                  'mid z',
                  str(int(z)).rjust(4),
                  end=" ")
            print('Row range',
                  str(y_start).rjust(4),
                  str(y_end).rjust(4),
                  'col range',
                  str(x_start).rjust(4),
                  str(x_end).rjust(4),
                  'z range',
                  str(z_start).rjust(4),
                  str(z_end).rjust(4),
                  end=" ")

        if structure in centers.keys():
            xo,yo,zo = MD589_centers[structure]
            print('COM off by:',
                  round(x*SCALE - xo, 2),
                  round(y*SCALE - yo, 2),
                  round(z - zo, 2),
                  end=" ")

        z_indices = [z for z in range(volume.shape[2]) if z % 2 == 0]
        volume = volume[:, :, z_indices]
        volume = np.swapaxes(volume, 0, 1)
        try:
            atlasV7_volume[y_start:y_end, x_start:x_end, z_start:z_end] += volume
        except ValueError as ve:
            print('Bad fit', end=" ")

        print()

    resolution = int(resolution * 1000 * SCALE)
    print('Shape of downsampled atlas volume', atlasV7_volume.shape)

    print('Resolution at', resolution)

    if create:
        atlasV7_volume = np.rot90(atlasV7_volume, axes=(0, 1))
        atlasV7_volume = np.fliplr(atlasV7_volume)
        atlasV7_volume = np.flipud(atlasV7_volume)
        atlasV7_volume = np.fliplr(atlasV7_volume)

        offset = [0,0,0]
        ng = NumpyToNeuroglancer(atlasV7_volume, [resolution, resolution, 20000], offset=offset)
        ng.init_precomputed(OUTPUT_DIR)
        ng.add_segment_properties(get_segment_properties())
        ng.add_downsampled_volumes()
        ng.add_segmentation_mesh()

        #outpath = os.path.join(ATLAS_PATH, f'{atlas_name}.tif')
        #io.imsave(outpath, atlasV7_volume.astype(np.uint8))
    end = timer()
    print(f'Finito! Program took {end - start} seconds')
예제 #18
0
from _collections import OrderedDict

# print("Before:\n")
# od = OrderedDict()
# od['a'] = 1
# od['b'] = 2
# od['c'] = 3
# od['d'] = 4
# for key, value in od.items():
#     print(key, value)
#
# print("\nAfter:\n")
# od['c'] = 5
# for key, value in od.items():
#     print(key, value)

input_string = 'aabbccddef'
ordered_dict = OrderedDict()

for input in input_string:
    keys = ordered_dict.keys()
    if input not in keys:
        ordered_dict[input] = 1
    else:
        ordered_dict[input] += 1
out_string = ""
for key, value in ordered_dict.items():
    out_string = out_string + str(key) + str(value)

print(out_string)
예제 #19
0
    def _report_textual_results(self, tally_coll, res_dir):
        '''
        Give a sequence of tallies with results
        from a series of batches, create long
        outputs, and inputs lists from all tallies
        
        Computes information retrieval type values:
             precision (macro/micro/weighted/by-class)
             recall    (macro/micro/weighted/by-class)
             f1        (macro/micro/weighted/by-class)
             acuracy
             balanced_accuracy
        
        Combines these results into a Pandas series, 
        and writes them to a csv file. That file is constructed
        from the passed-in res_dir, appended with 'ir_results.csv'.
        
        Finally, constructs Github flavored tables from the
        above results, and posts them to the 'text' tab of 
        tensorboard.
        
        Returns the results measures Series 
        
        :param tally_coll: collect of tallies from batches
        :type tally_coll: ResultCollection
        :param res_dir: directory where all .csv and other 
            result files are to be written
        :type res_dir: str
        :return results of information retrieval-like measures
        :rtype: pandas.Series
        '''

        all_preds = []
        all_labels = []

        for tally in tally_coll.tallies(phase=LearningPhase.TESTING):
            all_preds.extend(tally.preds)
            all_labels.extend(tally.labels)

        res = OrderedDict({})
        res['prec_macro'] = precision_score(all_labels,
                                            all_preds,
                                            average='macro',
                                            zero_division=0)
        res['prec_micro'] = precision_score(all_labels,
                                            all_preds,
                                            average='micro',
                                            zero_division=0)
        res['prec_weighted'] = precision_score(all_labels,
                                               all_preds,
                                               average='weighted',
                                               zero_division=0)
        res['prec_by_class'] = precision_score(all_labels,
                                               all_preds,
                                               average=None,
                                               zero_division=0)

        res['recall_macro'] = recall_score(all_labels,
                                           all_preds,
                                           average='macro',
                                           zero_division=0)
        res['recall_micro'] = recall_score(all_labels,
                                           all_preds,
                                           average='micro',
                                           zero_division=0)
        res['recall_weighted'] = recall_score(all_labels,
                                              all_preds,
                                              average='weighted',
                                              zero_division=0)
        res['recall_by_class'] = recall_score(all_labels,
                                              all_preds,
                                              average=None,
                                              zero_division=0)

        res['f1_macro'] = f1_score(all_labels,
                                   all_preds,
                                   average='macro',
                                   zero_division=0)
        res['f1_micro'] = f1_score(all_labels,
                                   all_preds,
                                   average='micro',
                                   zero_division=0)
        res['f1_weighted'] = f1_score(all_labels,
                                      all_preds,
                                      average='weighted',
                                      zero_division=0)
        res['f1_by_class'] = f1_score(all_labels,
                                      all_preds,
                                      average=None,
                                      zero_division=0)

        res['accuracy'] = accuracy_score(all_labels, all_preds)
        res['balanced_accuracy'] = balanced_accuracy_score(
            all_labels, all_preds)

        res_series = pd.Series(list(res.values()), index=list(res.keys()))

        # Write information retrieval type results
        # to a one-line .csv file, using pandas Series
        # as convenient intermediary:
        res_csv_path = os.path.join(res_dir, 'ir_results.csv')
        res_series.to_csv(res_csv_path)

        res_rnd = {}
        for meas_nm, meas_val in res.items():

            # Measure results are either floats (precision, recall, etc.),
            # or np arrays (e.g. precision-per-class). For both
            # cases, round each measure to one digit:

            res_rnd[meas_nm] = round(meas_val,1) if type(meas_val) == float \
                                                 else meas_val.round(1)

        ir_measures_skel = {
            'col_header': ['precision', 'recall', 'f1'],
            'row_labels': ['macro', 'micro', 'weighted'],
            'rows': [[
                res_rnd['prec_macro'], res_rnd['recall_macro'],
                res_rnd['f1_macro']
            ],
                     [
                         res_rnd['prec_micro'], res_rnd['recall_micro'],
                         res_rnd['f1_micro']
                     ],
                     [
                         res_rnd['prec_weighted'], res_rnd['recall_weighted'],
                         res_rnd['f1_weighted']
                     ]]
        }

        ir_per_class_rows = [[
            prec_class, recall_class, f1_class
        ] for prec_class, recall_class, f1_class in zip(
            res_rnd['prec_by_class'], res_rnd['recall_by_class'],
            res_rnd['f1_by_class'])]
        ir_by_class_skel = {
            'col_header': ['precision', 'recall', 'f1'],
            'row_labels': self.class_names,
            'rows': ir_per_class_rows
        }

        accuracy_skel = {
            'col_header': ['accuracy', 'balanced_accuracy'],
            'row_labels': ['Overall'],
            'rows': [[res_rnd['accuracy'], res_rnd['balanced_accuracy']]]
        }

        ir_measures_tbl = GithubTableMaker.make_table(ir_measures_skel,
                                                      sep_lines=False)
        ir_by_class_tbl = GithubTableMaker.make_table(ir_by_class_skel,
                                                      sep_lines=False)
        accuracy_tbl = GithubTableMaker.make_table(accuracy_skel,
                                                   sep_lines=False)

        # Write the markup tables to Tensorboard:
        self.writer.add_text('Information retrieval measures',
                             ir_measures_tbl,
                             global_step=0)
        self.writer.add_text('Per class measures',
                             ir_by_class_tbl,
                             global_step=0)
        self.writer.add_text('Accuracy', accuracy_tbl, global_step=0)

        return res_series
예제 #20
0
class Transect(sm.CustomObject):
    base_type = "transect"
    type = "transect"
    datum = "top of face"

    def __init__(self, **kwargs):
        super(Transect, self).__init__()
        self._locs = OrderedDict()
        self.name = kwargs.get("name", None)
        start = kwargs.get("start", (0, 0))  # coords (lat, long)
        end = kwargs.get("end", (0, 0))
        self.s_coords = Coords(lat=start[0], lon=start[1])
        self.e_coords = Coords(lat=end[0], lon=end[1])
        self.ug_values = []
        self.ug_xs = []
        self.h_face = kwargs.get("h_face", None)
        self.av_ground_slope = kwargs.get("av_ground_slope", None)
        self._extra_class_inputs = [
            "locs", "start", "end", "ug_values", "ug_xs", "h_face",
            "av_ground_slope", "datum"
        ]
        self.inputs = self.inputs + self._extra_class_inputs

    def add_cpt_by_coords(self, cpt, coords, **kwargs):

        esp = kwargs.get("esp", None)
        loc = Loc(cpt=cpt, name=cpt.file_name, esp=esp)
        loc.coords = coords
        return self.add_loc_by_coords(coords, loc)

    def add_cpt(self, cpt, x, **kwargs):
        offset = kwargs.get("offset", None)
        off_dir = kwargs.get("off_dir", "-")
        esp = kwargs.get("esp", None)
        loc = Loc(cpt=cpt,
                  name=cpt.file_name,
                  offset=offset,
                  off_dir=off_dir,
                  esp=esp)
        return self.add_loc(x, loc)

    def get_cpt_names(self):
        _cpts = []
        for x in self.locs:
            _cpts.append(self.locs[x].cpt_file_name)
        return _cpts

    def set_ids(self):
        for i, loc_name in enumerate(self.locs):
            self.locs[loc_name].id = i + 1
            if self.locs[loc_name].soil_profile is not None:
                self.locs[loc_name].soil_profile.id = i + 1

    def to_dict(self, extra=(), **kwargs):
        outputs = OrderedDict()
        skip_list = ["locs"]
        if hasattr(self, "inputs"):
            full_inputs = list(self.inputs) + list(extra)
        else:
            full_inputs = list(extra)
        for item in full_inputs:
            if item not in skip_list:
                value = self.__getattribute__(item)
                outputs[item] = sf.collect_serial_value(value)
        return outputs

    def add_to_dict(self, models_dict, **kwargs):
        if self.base_type not in models_dict:
            models_dict[self.base_type] = OrderedDict()
        outputs = self.to_dict(**kwargs)
        models_dict[self.base_type][self.unique_hash] = outputs
        for loc_num in self.locs:
            self.locs[loc_num].add_to_dict(
                models_dict,
                parent_dict=models_dict[self.base_type][self.unique_hash])

    def reset_cpt_folder_paths(self, folder_path):
        for loc_name in self.locs:
            self.locs[loc_name].cpt_folder_path = folder_path

    @property
    def tran_line(self):
        try:
            from liquepy.spatial.map_coords import Line
            return Line(self.s_coords, self.e_coords)
        except ImportError as e:
            warnings.warn('Need to import spatial packages', stacklevel=3)
            warnings.warn(e, stacklevel=3)
            return None

    @property
    def x_end(self):
        return self.tran_line.dist

    @property
    def locs(self):
        return self._locs

    def add_loc(self, x: float, loc):
        loc.x = x
        self._locs[x] = loc
        self._sort_locs()
        return self._locs[x]

    def add_loc_by_coords(self, coords, loc):
        from liquepy.spatial import map_coords
        if not sum(self.start) or not sum(self.end):
            raise ValueError("start and end coordinates must be set")
        loc.x = map_coords.calc_proj_line_dist(self.tran_line, coords)
        loc.offset = map_coords.calc_line_offset(self.tran_line, coords)
        loc.off_dir = map_coords.calc_line_off_dir(self.tran_line, coords)
        self._locs[loc.x] = loc
        self._sort_locs()
        return self._locs[loc.x]

    @locs.setter
    def locs(self, locs):
        for loc_id in locs:
            loc_dist = locs[loc_id]["x"]
            self.locs[loc_dist] = Loc()
            sm.add_to_obj(self.locs[loc_dist], locs[loc_id])

    def _sort_locs(self):
        """
        Sort the locs by distance.
        :return:
        """
        self._locs = OrderedDict(sorted(self._locs.items(),
                                        key=lambda t: t[0]))

    def get_loc_by_name(self, name):
        for x in self.locs:
            if self.locs[x].name == name:
                return self.locs[x]

    def get_loc_by_dist(self, dist):
        return self.locs[dist]

    def loc(self, index):
        index = int(index)
        if index == 0:
            raise KeyError("index=%i, but must be 1 or greater." % index)
        return list(self._locs.values())[index - 1]

    def remove_loc(self, loc_int):
        key = list(self._locs.keys())[loc_int - 1]
        del self._locs[key]

    def replace_loc(self, loc_int, soil):
        key = list(self._locs.keys())[loc_int - 1]
        self._locs[key] = soil

    @property
    def start(self):
        return self.s_coords.as_tuple

    @property
    def end(self):
        return self.e_coords.as_tuple

    @start.setter
    def start(self, values):
        self.s_coords = Coords(lat=values[0], lon=values[1])

    @end.setter
    def end(self, values):
        self.e_coords = Coords(lat=values[0], lon=values[1])
예제 #21
0
def build_experience_buffer(grid, Vx_rzns, Vy_rzns, paths, sampling_interval,
                            num_of_paths, num_actions):
    exp_buffer_all_trajs = []
    for k in range(num_of_paths):
        exp_buffer_kth_traj = []
        Vxt = Vx_rzns[k, :, :]
        Vyt = Vy_rzns[k, :, :]
        trajectory = paths[0, k]

        # append starting point to traj
        coord_traj = [(trajectory[0][0], trajectory[0][1])]
        s_i, s_j = compute_cell(grid, trajectory[0])
        state_traj = [(s_i, s_j)]

        # make dictionary states mapping to coords. and choose middle coord to append to traj
        traj_dict = OrderedDict()
        for j in range(0, len(trajectory)):
            s_i, s_j = compute_cell(grid, trajectory[j])
            s = (s_i, s_j)
            c = (trajectory[j][0], trajectory[j][1])
            if not traj_dict.get(s):
                traj_dict[s] = [c]
            else:
                traj_dict[s].append(c)
        keys = list(traj_dict.keys())
        keys.remove(keys[0])  #remove first and last keys (states).
        keys.remove(keys[-1])  #They are appended separately

        for s in keys:
            state_traj.append(s)
            l = len(traj_dict[s])
            coord_traj.append(traj_dict[s][int(l // 2)])

        coord_traj.append((trajectory[-1][0], trajectory[-1][1]))
        s_i, s_j = compute_cell(grid, trajectory[-1])
        state_traj.append((s_i, s_j))

        state_traj.reverse()
        coord_traj.reverse()

        #build buffer
        print("check warning: ", k)
        # print("s1, p1, p2, Vxt, Vyt")
        for i in range(len(state_traj) - 1):
            s1 = state_traj[i + 1]
            s2 = state_traj[i]
            # t ,m,n=s1
            m, n = s1
            p1 = coord_traj[i + 1]
            p2 = coord_traj[i]
            """COMMENTING THIS STATEMENT BELOW"""
            # if (s1[1],s1[2])!=(s2[1],s2[2]):
            #vx=Vxt[t,i,j]
            vx = Vxt[m, n]
            vy = Vyt[m, n]
            # print(s1,p1,p2, vx, vy)
            a1 = Calculate_action(s1, p1, p2, vx, vy, grid)
            r1 = grid.move_exact(a1, vx, vy)
            exp_buffer_kth_traj.append([s1, a1, r1, s2])

        #append kth-traj-list to master list
        exp_buffer_all_trajs.append(exp_buffer_kth_traj)

    return exp_buffer_all_trajs