Esempio n. 1
0
def round(value, decimal=7, digits=None):
    """
    ROUND TO GIVEN NUMBER OF DIGITS, OR GIVEN NUMBER OF DECIMAL PLACES
    decimal - NUMBER OF DIGITS AFTER DECIMAL POINT (NEGATIVE IS VALID)
    digits - NUMBER OF SIGNIFICANT DIGITS (LESS THAN 1 IS INVALID)
    """
    if value == None:
        return None
    else:
        value = float(value)

    if digits != None:
        if digits <= 0:
            if value == 0:
                return int(_round(value, digits))
            try:
                m = pow(10, math_ceil(math_log10(abs(value))))
                return int(_round(value / m, digits) * m)
            except Exception as e:
                from mo_logs import Log

                Log.error("not expected", e)
        else:
            if value == 0:
                return _round(value, digits)
            try:
                m = pow(10, math_ceil(math_log10(abs(value))))
                return _round(value / m, digits) * m
            except Exception as e:
                from mo_logs import Log
                Log.error("not expected", e)
    elif decimal <= 0:
        return int(_round(value, decimal))
    else:
        return _round(value, decimal)
        def showDataTableCB(result):
            if result:
                total_items = int(result.get('total_items'))
                self.m_max_page_items = int(result.get('max_page_items'))
                self.m_last_page = self.m_max_page_items and int(
                    math_ceil(
                        float(total_items) /
                        float(self.m_max_page_items))) or 0

                cur_page = result.get('cur_page') and int(
                    result.get('cur_page')) or self.scepgList.getPage()
                selected_item = result.get('selected_item') and int(
                    result.get('selected_item')) - 1 or index

                self.scepgList.clear(cur_page, self.m_last_page)
                if cur_page is -1:
                    print("[StalkerClient] got no event.")
                    return

                data = result.get('data')
                for events in data:
                    d = StalkerServiceEvent(events)
                    self.scepgList.addItem(d)
                self.scepgList.updateList(selected_item)
            else:
                print("[StalkerClient] got no event.")
Esempio n. 3
0
 def ceil(self):
     """
     Returns:
     ceil: Unit
         The unit rounded up to the nearest integer
     """
     return Unit(math_ceil(self.magnitude), self.unit)
Esempio n. 4
0
    def list(self, tag_slug, cur_p=''):
        '''
        根据 cat_handler.py 中的 def view_cat_new(self, cat_slug, cur_p = '')
        :param tag_slug:
        :return:
        '''
        if cur_p == '' or cur_p == '-1':
            current_page_number = 1
        else:
            current_page_number = int(cur_p)
        taginfo = self.mcat.get_by_slug(tag_slug)
        if taginfo.kind == self.kind:
            pass
        else:
            return False

        num_of_tag = self.mapp2tag.count_of_certain_category(taginfo.uid)
        page_num = math_ceil(num_of_tag / config.page_num) 
        tag_name = taginfo.name

        kwd = {
            'tag_name': tag_name,
            'tag_slug': tag_slug,
            'title': tag_name,
        }

        self.render('infor/tag/list.html',
                    tag_name=tag_name,
                    infos=self.mapp2tag.query_pager_by_slug(tag_slug, current_page_number),
                    unescape=tornado.escape.xhtml_unescape,
                    kwd=kwd,
                    pager=self.gen_pager_bootstrap(tag_slug, page_num, current_page_number),
                    userinfo=self.userinfo,
                    )
Esempio n. 5
0
 async def command_error(self, ctx, error):
     if isinstance(error, commands.CommandOnCooldown):
         msg = "Só podes voltar a partilhar uma música em {}.".format(
             timedelta(seconds=(math_ceil(error.retry_after))))
         embed = Embed(title="Uh oh!", description=msg, colour=0xbf0000)
         embed.set_author(icon_url=ctx.author.avatar_url, name=ctx.author)
         return await ctx.send(embed=embed)
def updateFuture(result):
    chid, t = saveEvents(result)

    if result:
        max_page_items = 0
        after_items = 0
        cur_page = 1
        try:
            total_items = int(result.get('total_items'))
            max_page_items = int(result.get('max_page_items'))
            cur_page = result.get('cur_page') and int(
                result.get('cur_page')) or 1
            after_items = total_items - ((cur_page - 1) * max_page_items +
                                         int(result.get('selected_item')))
        except Exception as e:
            print("[StalkerClient]", e)
            cur_page = 1
            max_page_items = 0

        last_page = max_page_items and int(
            math_ceil(float(after_items) / float(max_page_items))) or 0
        last_page += 1

        for p in range(cur_page, last_page):
            result = stalker.getSimpleDataTable(
                str(chid), str(strftime('%Y-%m-%d', localtime(t))), str(p + 1))
            saveEvents(result)
Esempio n. 7
0
 def doGetCB(result):
     Log.i("Loading the services of %s (page %s/%s) took %s" %
           (genre.name, genre.currentPage, genre.lastPage,
            time.time() - self.begin))
     genre.channels.extend(result["js"]["data"])
     if genre.currentPage == 1:
         total_items = float(result["js"]["total_items"])
         max_page_items = float(result["js"]["max_page_items"])
         genre.lastPage = int(math_ceil(total_items / max_page_items))
     if genre.currentPage < genre.lastPage:
         genre.currentPage += 1
         doGet(genre, genre.currentPage)
         genre.callback(genre, genre.channels, isFinished=False)
     else:
         genre.callback(genre, genre.channels)
Esempio n. 8
0
def stft(signal, len_each_section, frac_overlap, padding, window=None):
    try:
        assert torch_is_tensor(signal)
    except:
        signal = torch_from_numpy(signal).double()
    if signal.is_contiguous() is False:
        LOGGER.debug('stft: signal is not contiguous')
        signal = signal.contiguous()
    if window is None:
        window = torch_ones(len_each_section, dtype=torch_float64)
    else:
        raise NotImplementedError('stft: window function {} has not been implemented'.format(window))
    shift_length = round(len_each_section * (1. - frac_overlap)) # shift_length = 2

    y, num_elements, num_beams = signal.shape

    num_frames = math_ceil((y - len_each_section + 1) / shift_length)

    startLocs = torch_arange(0, num_frames*shift_length, shift_length)

    num_elements_beams = num_elements*num_beams
    freq = torch_arange(padding)/padding
    # CHANGED: Recast stft result to float
    signal = signal.double()
    signal_stft = torch_stft(signal.view(y, num_elements_beams).permute(1, 0),
                             len_each_section, window=window,
                             hop_length=shift_length, center=False,
                             onesided=False, normalized=False,
                             pad_mode='constant') \
                             .float() \
                             .permute(1, 2, 0, 3) \
                             .view(len_each_section, num_frames, num_elements, num_beams, 2)

    del signal
    return {
        'stft': signal_stft,
        'freqs': freq,
        'startOffsets': startLocs,
        'len_each_section': len_each_section,
        'padding': padding,
        'win_info': window,
        'frac_overlap': frac_overlap,
        'shift_length': shift_length,
    }
def tasks():
    page = request.args.get('page', '1')
    try:
        page = int(page)
    except ValueError:
        return redirect(url_for('tasks.tasks', page=1))
    if page < 1:
        return redirect(url_for('tasks.tasks', page=1))
    nb = 20
    offset = (page - 1) * nb
    tasks = get_tasks(nb, offset)
    if page != 1 and not tasks:
        return redirect(url_for('tasks.tasks', page=1))
    nb_pages = math_ceil(get_nb_tasks() / nb)
    return render_template('tasks/tasks.html',
                           page=page,
                           nbPages=nb_pages,
                           tasks=tasks,
                           isLogged=True)
Esempio n. 10
0
    def render(self, *args, **kwargs):
        tag_slug = args[0]
        current = int(args[1])
        taginfo = MCategory.get_by_slug(tag_slug)
        num_of_tag = MPost2Catalog.count_of_certain_category(taginfo.uid)
        page_num = math_ceil(num_of_tag / config.CMS_CFG['list_num'])

        kwd = {
            'page_home': False if current <= 1 else True,
            'page_end': False if current >= page_num else True,
            'page_pre': False if current <= 1 else True,
            'page_next': False if current >= page_num else True,
        }

        return self.render_string('modules/post/tag_pager.html',
                                  kwd=kwd,
                                  cat_slug=tag_slug,
                                  pager_num=page_num,
                                  page_current=current)
Esempio n. 11
0
def list_all(host, token, api):
    """
        Returns a list of all projects, groups, users, etc. 
        You will need to provide the GL host, access token, and specific api url.
    """

    count = get_count(host, token, api)

    PER_PAGE = 20
    start_at = 0
    end_at = count

    total_work = end_at - start_at
    total_pages = total_work / PER_PAGE
    start_page = (start_at / PER_PAGE) + 1  # pages are 1-indexed
    end_page = int(math_ceil(float(end_at) / float(PER_PAGE)))

    #logging.info("List projects from page %d to page %d.", start_page, end_page)

    current_page = start_page

    while current_page <= end_page:
        #logging.info("Listing page %d" % current_page)
        print "Retrieving %d %s" % (PER_PAGE * current_page, api)
        query = {"page": current_page, "per_page": PER_PAGE}

        query_params = urllib.urlencode(query)

        response = generate_get_request(host, token,
                                        "%s?%s" % (api, query_params))

        data = json_loads(response.read())

        for project in data:
            # Do something with this project
            #logging.info("I have found project %s" % project["name_with_namespace"])
            yield project

        if len(data) < PER_PAGE:
            break

        current_page += 1
Esempio n. 12
0
        def showOrderedListCB(result):
            items = None
            if result:
                try:
                    items = result.get('data')
                    total_items = float(result.get('total_items'))

                    self.m_max_page_items = int(result.get('max_page_items'))
                    self.m_last_page = int(
                        math_ceil(total_items / float(self.m_max_page_items)))
                except Exception as e:
                    print("[StalkerClient]", e)
                    items = None

            if items:
                self.scListMode = self.MODE_SERVICE
                self.scListService[0] = id
                need2reload = False

                self.scList.clear(page, self.m_last_page)
                for service in items:
                    if isinstance(service, dict):
                        s = StalkerService(service)
                        self.scList.addItem(s)
                        if s.name is '':
                            need2reload = True
                    else:
                        need2reload = True

                if page is self.m_last_page and len(items) != int(
                        total_items % self.m_max_page_items):
                    need2reload = True

                if need2reload:
                    self.m_need2reload_queue.append((id, page))

                self.scListService[1] = page
                self.scListService[2] = index
                self.scList.updateList(index)
            else:
                self.m_need2reload_queue.append((id, page))
            self.updateInfomation()
Esempio n. 13
0
def parse(startups_variables):
    from jobs.models import Job
    page = 1
    current_page = get_page_with_startups(page, startups_variables)
    startups_count = current_page['data']['talent']['jobSearchResults'][
        'totalStartupCount']
    page_count = math_ceil(startups_count / 10.0)
    while page <= page_count:
        current_page = get_page_with_startups(page, startups_variables)
        startups = current_page['data']['talent']['jobSearchResults'][
            'startups']['edges']
        for startup in startups:
            jobs = startup['node']['highlightedJobListings']
            startup_slug = startup['node']['slug']
            startup_name = startup['node']['name']
            for job in jobs:
                job_posted_at = dt.date.fromtimestamp(job['liveStartAt'])
                job = get_job_details(job['id'])['data']['jobListing']

                job_skills = ','.join(
                    list(map(lambda el: el['displayName'], job['skills'])))

                job_url = f"https://angel.co/company/{startup_slug}/jobs/{job['id']}-{job['slug']}"
                job_slug = generate_slug("angel", job['id'], startup_slug,
                                         job['slug'])
                new_job, created = Job.objects.get_or_create(
                    company_name=startup_name,
                    title=job['title'],
                    source='angel',
                    description=job['description'],
                    url=job_url,
                    remote=job['remote'],
                    posted_at=job_posted_at,
                    job_type=job['jobType'],
                    salary=job['compensation'],
                    locations=','.join(job['locationNames']),
                    experience=job['yearsExperienceMin']
                    if job['yearsExperienceMin'] else 0,
                    skills=job_skills,
                    slug=job_slug)
        page += 1
    def give_troops_to_deploy(self):
        if (self.curr_phase != "deploy"):
            raise ValueError('Unable to provide troops.Not the deploy phase')
        bonus_troops = 0

        i = self.players.index(self.curr_player)
        num = len([1 for r in self.curr_gamestate if r[0] == i])
        bonus_troops = min(3, int(math_ceil(num / 3)))

        continents = self.map.get('continents')
        territories = self.map.get('territories')
        '''for i,p in enumerate(self.players):
            for n,c in continents.items():
                
                territories_in_c = [t for n,t in territories.items() if continents.get(t['continent'])['id']==c['id']]
                territories_in_c_under_player_p = [t for t in territories_in_c if self.curr_gamestate[t['id']][0]==p.id]

                if(len(territories_in_c)==len(territories_in_c_under_player_p)):
                    bonus_troops+=int(c['value'])'''

        for n, c in continents.items():

            territories_in_c = [
                t for n, t in territories.items()
                if continents.get(t['continent'])['id'] == c['id']
            ]
            territories_in_c_under_curr_player = [
                t for t in territories_in_c
                if self.curr_gamestate[t['id']][0] == self.curr_player.id
            ]

            if (len(territories_in_c) == len(
                    territories_in_c_under_curr_player)):
                bonus_troops += int(c['value'])

        self.curr_player.curr_troops_num += bonus_troops

        self.update_logs(
            message="{0} recieved {1} new troops to deploy".format(
                self.curr_player.name, bonus_troops))
Esempio n. 15
0
def turn2clips_byfilesize(vid_setting):
    vid = VideoAccess(input_setting=vid_setting)
    output_pattern = vid_setting['input'].replace('.mp4', '.%02d.mp4')

    sample_url = output_pattern % 0
    vid.output(sample_url)

    vid_prop = vid.vid_prop
    sample_prop = VideoAccess.get_video_prop(sample_url)

    # 20<<20 = 20 * 1024**2, file size limit of wechat
    if int(sample_prop['file_size']) > 20 << 20:
        raise Exception('please change target file size and try again later')

    start_frame = 0
    total_frame = vid_prop['frame_count']
    frames = sample_prop['frame_count']
    number_of_clips = math_ceil(total_frame / frames)
    clips_produced = [output_pattern % 0]

    if number_of_clips < 2:
        return

    for i in range(1, number_of_clips):
        start_frame += frames
        if start_frame > total_frame:
            start_frame -= frames
            frames = total_frame - start_frame

        print('total_frame %d, start_frame %d, frames %d' %
              (total_frame, start_frame, frames))

        vid.set_output_frame(start_frame=start_frame, frames=frames)

        try:
            vid.output(output_pattern % i)
            clips_produced.append(output_pattern % i)
        except:
            print('ERROR occurs when', i)
Esempio n. 16
0
def ceil(number):
    """Round up to the next integer.

    Arguments:
        number (float): The number to round.

    Returns:
        int: The next integer up from the argument.

    Examples:

        >>> print(ceil(1))
        1

        >>> print(ceil(1.4))
        2

        >>> print(ceil(1.5))
        2
    """
    from math import ceil as math_ceil
    return math_ceil(number)
Esempio n. 17
0
	def setup_canvas(self,canvas,exposure_frame_count=12,exposure_frame_weight=None,step=1,**kwargs):
		
		self.canvas_dims = canvas if isinstance(canvas,tuple) else (canvas.shape if canvas is not None else None)
		self.initialised = canvas is not None


		self.exposure_frame_count = exposure_frame_count
		
		# exposure_frame_weight is a weighting added to the frame, dependent on when it was added in the exposure period
		if exposure_frame_weight is None or len(exposure_frame_weight) < exposure_frame_count:
			print("no weights provided" if exposure_frame_weight is None else "not enough weights provided" )
			self.exposure_frame_weight = None
		else:
			e_max = max(exposure_frame_weight)
			e_min = min(exposure_frame_weight)
			if e_max == e_min:
				print("min/max weights the same")
				self.exposure_frame_weight = None
			else:
				e_mod = max(e_max,abs(e_min))
				if e_mod>1:
					self.exposure_frame_weight = [e/e_mod for e in exposure_frame_weight]
				else:
					self.exposure_frame_weight = [e for e in exposure_frame_weight]
		

		self.step = 1 if step < 1 else int(step)
		self.step_counter = -1
		self.current_canvas = -1

		#self.required_canvases = 1 + self.exposure_frame_count - self.step
		self.required_canvases = math_ceil(self.exposure_frame_count / self.step)
		print("required_canvases",self.required_canvases)

		self.canvases = [None] * self.required_canvases
		self.canvas_tracker = [0] * self.required_canvases

		if self.initialised:
			self.initialize_canvases()
        def showSimpleDataTableCB(result):
            if result:
                self.scepgList.clear()
                updateEvent(result)

                max_page_items = 0
                after_items = 0
                try:
                    total_items = int(result.get('total_items'))
                    max_page_items = int(result.get('max_page_items'))
                    cur_page = int(result.get('cur_page'))
                    after_items = total_items - (
                        (cur_page - 1) * max_page_items +
                        int(result.get('selected_item')))
                except Exception as e:
                    print("[StalkerClient]", e)
                    cur_page = 1
                    max_page_items = 0

                last_page = max_page_items and int(
                    math_ceil(float(after_items) / float(max_page_items))) or 0
                last_page += 1

                for p in range(cur_page, last_page):
                    self.thread.addTask(updateEvent,
                                        stalker.getSimpleDataTable,
                                        self.m_channel[0],
                                        str(strftime('%Y-%m-%d', today)),
                                        str(p + 1))

                # after PM 3:00
                if int(today[3]) > 15:
                    tomorrow = localtime(time() + 60 * 60 * 24)
                    self.thread.addTask(updateEvent,
                                        stalker.getSimpleDataTable,
                                        self.m_channel[0],
                                        str(strftime('%Y-%m-%d',
                                                     tomorrow)), "0")
Esempio n. 19
0
def add_opt_in(request):
    '''
`	The view function for opting in a user or admin flowspace into an experiment
    FOR USER OPTIN:
    request.POST should contain:
    key: experiment: database id of the experiment to opted in
    FOR ADMIN OPTIN:
    request.POST should contain:
    key: experiment: database id of the experiment to opted in
    key: all flowspace fields as specified in AdminOptInForm
    ''' 
    profile = UserProfile.get_or_create_profile(request.user)
    
    ############################
    #      Admin Opt-In        #
    ############################   
    if (profile.is_net_admin):
        
        # find all experiments that the admin can opt into them.
        all_exps = Experiment.objects.all().order_by('project_name','slice_name')
        admin_fs = AdminFlowSpace.objects.filter(user=request.user)
        exps = []

        for exp in all_exps:
            exp_fs = ExperimentFLowSpace.objects.filter(exp=exp)
            intersection = multi_fs_intersect(exp_fs,admin_fs,FlowSpace)
            if (len(intersection)>0):
                exps.append(exp)

        ######## XXX Experimental: Show allocated VLANs ######
        allocated_vlans = vlanController.get_allocated_vlans()
        requested_vlans = vlanController.get_requested_vlans_by_all_experiments()
        ########################################################################################

        assigned_priority = profile.max_priority_level - Priority.Strict_Priority_Offset - 1
        error_msg = []
        if (request.method == "POST"):
            form = AdminOptInForm(request.POST)
            if form.is_valid():
                all_this_admin_opts = UserOpts.objects.filter(user=request.user,nice=True)
                for admin_opt in all_this_admin_opts:
                    if admin_opt.priority <= assigned_priority:
                        assigned_priority = admin_opt.priority - 1
                        
                if assigned_priority <= 0:
                    error_msg.append("Too many opt-ins")
         
                # check if the selected experiment is valid:
                selected_exp_id = request.POST['experiment']
                try:
                    selexp = Experiment.objects.get(id = selected_exp_id)
                except:
                    error_msg.append("Invalid experiment selected!")
                if len(error_msg)==0:
                    requested_opt = form.get_flowspace(FlowSpace)
                    adminFS = AdminFlowSpace.objects.filter(user = request.user)
                    
                    intersected_flowspace = multi_fs_intersect([requested_opt],adminFS,FlowSpace)
                    #for fs in intersected_flowspace:
                    #    print "\n\nFLOWSPACE"
                    #    print fs.stringify()
                    #    print fs.__unicode__()
                    if len(intersected_flowspace) == 0:
                        error_msg.append("Selected flowspace doesn't have any intersection with admin FS")
                if len(error_msg)==0:
                    try:
                        [fv_args,match_list] = opt_fs_into_exp(intersected_flowspace,
                                selexp,request.user,assigned_priority,True)
                        fv = FVServerProxy.objects.all()[0]
                        try:
                            if len(fv_args) > 0:
                                returned_ids = fv.proxy.api.changeFlowSpace(fv_args)
                                for i in range(len(match_list)):
                                    match_list[i].fv_id = returned_ids[i]
                                    match_list[i].save()
                            try:
                                allopts = UserOpts.objects.filter(user = request.user).order_by('-priority')
                                for opt in allopts:
                                    this_opt_fses = opt.optsflowspace_set.all()
                                    fs_project = opt.experiment.project_name or ""
                                    fs_slice = opt.experiment.slice_name or ""
                                    fs_description = ""
                                    for fs in this_opt_fses:
                                        if fs_description != "":
                                            fs_description = fs_description + "\n%s"%fs
                                        else:
                                            fs_description = "%s"%fs
                                site_domain_url = " You may access your slice page at Expedient's site to check the granted Flowspace."
                                send_mail(
                                         settings.EMAIL_SUBJECT_PREFIX + "Your Flowspace request has been attended",
                                         "Your Flowspace request has been attended.%s\n\nProject: %s\nSlice: %s\nFlowspace granted:\n\n%s" % (site_domain_url, fs_project, fs_slice, fs_description),
                                         from_email=settings.DEFAULT_FROM_EMAIL,
                                         recipient_list= [selexp.owner_email],
                                         #recipient_list=[settings.ROOT_EMAIL],
                                 )
                            except Exception as e:
                                print "User email notification could not be sent. Exception: %s" % str(e)

                            return simple.direct_to_template(request, 
                                template ="openflow/optin_manager/opts/opt_in_successful_admin.html",
                                extra_context = {
                                    'expname':"%s:%s"%(selexp.project_name,selexp.slice_name),
                                },
                            )
                        except Exception,e:
                            import traceback
                            traceback.print_exc()
                            transaction.rollback()
                            error_msg.append("Couldn't opt into the requested experiment, Flowvisor error: %s"%str(e))
                    except Exception,e:
                        import traceback
                        traceback.print_exc()
                        transaction.rollback()
                        error_msg.append("Flowvisor not set: %s"%str(e))
                
        else:
            form = AdminOptInForm()
                        
        # if not a post request, we will start from here            
        if (len(exps)>0):
            exp_exist = True
            first_exp = exps[0].id
        else:
            exp_exist = False
            first_exp = 0
           
        upload_form = UploadFileForm()
        return simple.direct_to_template(request, 
                        template = 'openflow/optin_manager/opts/admin_opt_in.html', 
                        extra_context = {
                                'user':request.user,
                                'experiments':exps,
                                'error_msg':error_msg,
                                'exp_exist':exp_exist,
                                'first_exp':first_exp,
                                'form':form,
                                'upload_form':upload_form,
                                'requested_vlans':requested_vlans,
                                # Carolina: ceil function to take into account 0-indexed range [0,4095]
                                # has 1 more element that would be normally displaced to another column
                                # (not 5 columns anymore). Displace one element per column to fit.
                                'vlan_list_length': math_ceil(len(allocated_vlans)/5.0),
                                'allocated_vlans': allocated_vlans,
                            },
                    )  
Esempio n. 20
0
def first_star(my_ts, buses_keys) -> int:
    divs = [my_ts / bus for bus in buses_keys]
    my_bus_id = buses_keys[divs.index(min(divs))]
    return ((math_ceil(min(divs)) * my_bus_id) - my_ts) * my_bus_id
Esempio n. 21
0
def feature_extract(inputs, complex_graphs, test_complex_graphs, G):
    G_nodes = G.nodes()
    n_feats = inputs['feats']
    out_comp_nm = inputs['dir_nm'] + inputs['out_comp_nm']
    mode = inputs['mode']
    # mode = "non_gen" # Change to gen if you want to generate matrices

    # n_pos = len(complex_graphs)
    sizes = [len(comp) for comp in complex_graphs]

    # get quartiles
    q1 = np_percentile(sizes, 25)
    q3 = np_percentile(sizes, 75)
    max_wo_outliers = math_ceil(q3 + 4.5 *
                                (q3 - q1))  # Maximum after removing outliers

    max_size_train = max(sizes)
    recommended_max_size = min(max_size_train, max_wo_outliers)

    max_sizeF = inputs['dir_nm'] + inputs[
        'train_test_files_dir'] + "/res_max_size_search"
    with open(max_sizeF, 'wb') as f:
        pickle_dump(recommended_max_size, f)

    # n_pos_test = len(test_complex_graphs)
    sizes_test = [len(comp) for comp in test_complex_graphs]
    max_size_test = max(sizes_test)

    fig = plt.figure()
    # Plot box plot of sizes to know the outliers (for setting step size in sampling)
    sns_boxplot(sizes)
    plt.xlabel("Size")
    plt.title("Size distribution of training complexes")
    plt.savefig(out_comp_nm + "_known_train_size_dist_box_plot")
    plt.close(fig)

    fig = plt.figure()
    # Plot box plot of sizes to know the outliers (for setting step size in sampling)
    sns_boxplot(sizes + sizes_test)
    plt.xlabel("Size")
    plt.title("Size distribution of known complexes")
    plt.savefig(out_comp_nm + "_known_size_dist_box_plot")
    plt.close(fig)

    if inputs[
            'model_type'] == "tpot" and mode == "non_gen":  # CHANGE X_POS, Y_POS later !!!!
        logging_info("Reading labeled feature matrix from file...")
        # Read X,y from csv file

        y, X, X_pos, y_pos, X_neg, y_neg = read_from_csv(
            inputs['train_feat_mat'])

        y_test, X_test, X_pos_test, y_pos_test, X_neg_test, y_neg_test = read_from_csv(
            inputs['test_feat_mat'])

        logging_info("Finished reading feature matrix")
    else:

        logging_info("Feature extraction...")

        feat_list = [
            "dens", "nodes", "degree_max", "degree_mean", "degree_median",
            "degree_var", "CC_max", "CC_mean", "CC_var", "edge_wt_mean",
            "edge_wt_max", "edge_wt_var", "DC_mean", "DC_var", "DC_max", "sv1",
            "sv2", "sv3", "complex"
        ]

        X_pos = create_feat_mat(complex_graphs, n_feats)
        X_pos_test = create_feat_mat(test_complex_graphs, n_feats)

        X_allpos = np_vstack((X_pos, X_pos_test))
        n_allpos = len(X_allpos)
        y, X, X_pos, y_pos, X_neg, y_neg = extract_features(
            out_comp_nm, 'train', max_size_train, inputs, G_nodes, feat_list,
            X_pos, X_allpos, n_allpos, sizes)
        y_test, X_test, X_pos_test, y_pos_test, X_neg_test, y_neg_test = extract_features(
            out_comp_nm, 'test', max_size_test, inputs, G_nodes, feat_list,
            X_pos_test, X_allpos, n_allpos, sizes_test)

        logging_info("Finished Feature extraction")
    return max_size_train, max_size_test, X_pos_test, X_neg_test, X_test, y_test, X_pos, y_pos, X, y, X_neg, y_neg
def main():
    parser = argparse_ArgumentParser("Input parameters")
    parser.add_argument("--input_file_name", default="input_toy.yaml", help="Input parameters file name")
    parser.add_argument("--out_dir_name", default="/results", help="Output directory name")
    parser.add_argument("--train_test_files_dir", default="", help="Train test file path")    
    parser.add_argument("--graph_files_dir", default="", help="Graph files' folder path") 
    parser.add_argument("--seed_mode", help="Seed mode - specify 'cliques' for the cliques algo")
    parser.add_argument("--max_size_thres", help="Max size threshold")    
    parser.add_argument("--n_pts", default=1, help="number of partitions (computers)")
    args = parser.parse_args()

    with open(args.input_file_name, 'r') as f:
        inputs = yaml_load(f, yaml_Loader)

    if args.seed_mode:
        inputs['seed_mode'] = args.seed_mode
    if args.max_size_thres:
        inputs['max_size_thres'] = int(args.max_size_thres)        

    # Override output directory name if same as gen
    if args.out_dir_name or inputs['out_comp_nm'] == "/results/res":
        if not os_path.exists(inputs['dir_nm'] + args.out_dir_name):
            os_mkdir(inputs['dir_nm'] + args.out_dir_name)
        inputs['out_comp_nm'] = args.out_dir_name + "/res"
        
    inputs['train_test_files_dir'] = ''
    if args.train_test_files_dir:
        if not os_path.exists(inputs['dir_nm'] + args.train_test_files_dir):
            os_mkdir(inputs['dir_nm'] + args.train_test_files_dir)
        inputs['train_test_files_dir'] = args.train_test_files_dir    

    inputs['graph_files_dir'] = ''
    if args.graph_files_dir:
        if not os_path.exists(inputs['dir_nm'] + args.graph_files_dir):
            os_mkdir(inputs['dir_nm'] + args.graph_files_dir)
        inputs['graph_files_dir'] = args.graph_files_dir             

    with open(inputs['dir_nm'] + inputs['out_comp_nm'] + "_input_sample_partition.yaml", 'w') as outfile:
        yaml_dump(inputs, outfile, default_flow_style=False)

    logging_basicConfig(filename=inputs['dir_nm'] + inputs['out_comp_nm'] + "_logs.yaml", level=logging_INFO)
        
    neig_dicts_folder = inputs['dir_nm'] +inputs['graph_files_dir']+ "/neig_dicts"

    num_comp = inputs['num_comp']
    max_size_thres = inputs['max_size_thres']
    max_size_trainF = inputs['dir_nm'] + inputs['train_test_files_dir']+ "/res_max_size_train"
    with open(max_size_trainF, 'rb') as f:
        max_size_train = pickle_load(f)

    max_size = max_size_train
    
    max_sizeF_feat = inputs['dir_nm'] + inputs['train_test_files_dir']+ "/res_max_size_search"  
    if os_path.exists(max_sizeF_feat):
        with open(max_sizeF_feat, 'rb') as f:
            max_size = pickle_load(f)
    else:            
        with open(inputs['dir_nm'] + inputs['comf_nm']) as f:
            sizes = [len(line.rstrip().split()) for line in f.readlines()]    
        max_size = max(sizes)
        q1 = np_percentile(sizes, 25)
        q3 = np_percentile(sizes, 75)
        max_wo_outliers = math_ceil(q3 + 4.5*(q3-q1))  # Maximum after removing outliers    
        max_size = min(max_size,max_wo_outliers)
        
        
    if max_size >= max_size_thres:
        max_size = max_size_thres
        
    out_comp_nm = inputs['dir_nm'] + inputs['out_comp_nm']

    with open(out_comp_nm + '_metrics.out', "a") as fid:
        print("Max number of steps for complex growth = ", max_size, file=fid)  # NOT actual max size since you merge later
    
    max_sizeF = inputs['dir_nm'] + inputs['train_test_files_dir']+ "/res_max_size_search_par"
    
    with open(max_sizeF, 'wb') as f:
        pickle_dump(max_size, f)

    seed_mode = inputs['seed_mode']

    if seed_mode == "all_nodes":
        #graph_nodes = list(myGraph.nodes())
        seed_nodes = rand_perm(os_listdir(neig_dicts_folder))
    elif seed_mode == "n_nodes":
        seed_nodes = rand_perm(os_listdir(neig_dicts_folder))[:num_comp]
    elif seed_mode == "all_nodes_known_comp":
        protlistfname = inputs['dir_nm']+ inputs['train_test_files_dir'] + "/res_protlist"
        with open(protlistfname, 'rb') as f:
            prot_list = pickle_load(f)        
        seed_nodes = list(prot_list)
    elif seed_mode == "cliques":
        myGraphName = inputs['dir_nm'] + inputs['graph_files_dir']+ "/res_myGraph"
        with open(myGraphName, 'rb') as f:
            myGraph = pickle_load(f)        
        clique_list = list(nx_find_cliques(myGraph))
        to_rem = []
        # Removing 2 node and big complexes
        for comp in clique_list:
            if len(comp) <= 2 or len(comp) >= max_size:
                to_rem.append(comp)

        for comp in to_rem:
            clique_list.remove(comp)

        seed_nodes = clique_list  # Remove duplicates later.

    # partition
    ptns = int(args.n_pts)

    nc = len(seed_nodes)
    if seed_mode == 'n_nodes':
        seed_nodes_F = out_comp_nm + "_seed_nodes"
        each_ptn = nc // ptns
        for i in range(ptns - 1):
            with open(seed_nodes_F + str(i), 'wb') as f:
                pickle_dump(seed_nodes[i * each_ptn:(i + 1) * each_ptn], f)
        with open(seed_nodes_F + str(ptns - 1), 'wb') as f:
            pickle_dump(seed_nodes[(ptns - 1) * each_ptn:], f)
    else:
        seed_nodes_dir =  inputs['dir_nm'] + inputs['graph_files_dir']+ "/" + seed_mode + "_n_pts_" + str(ptns)

        if not os_path.exists(seed_nodes_dir):
            os_mkdir(seed_nodes_dir)
            seed_nodes_F = seed_nodes_dir + "/res_seed_nodes"
            each_ptn = nc // ptns
            for i in range(ptns - 1):
                with open(seed_nodes_F + str(i), 'wb') as f:
                    pickle_dump(seed_nodes[i * each_ptn:(i + 1) * each_ptn], f)

            with open(seed_nodes_F + str(ptns - 1), 'wb') as f:
                pickle_dump(seed_nodes[(ptns - 1) * each_ptn:], f)
Esempio n. 23
0
def ceil(value):
    """Ceils value"""
    return math_ceil(float(value))