def first_combination(self, btc, s_date, e_date): first_selections = [] wl_base_L1 = btc[(btc.opportunity_kind != 'offer') &\ (btc.cable_production_line == 'Line1')].workload.sum() wl_base_L2 = btc[(btc.opportunity_kind != 'offer') &\ (btc.cable_production_line == 'Line2')].workload.sum() ################################### ### MIDDLE SIZE OFFER SELECTION ### ################################### offers = btc[(btc.opportunity_kind == 'offer') &\ (btc.opportunity_revenue >= self.MID_SIZE_LEVEL)] offers[offers.opportunity_revenue.notnull()] offers[offers.opportunity_margin.notnull()] if len(offers) < 1: return btc[(btc.opportunity_kind != 'offer')].opportunity_id.tolist() elif len(offers) >= 1: opportunity_ids = list(set(offers.opportunity_id.tolist())) proj_id = btc[ btc.opportunity_kind != 'offer'].opportunity_id.tolist() days = (e_date - s_date).days for i in np.arange(0, len(opportunity_ids) + 1): combinations = list(itercomb(opportunity_ids, i)) for element in combinations: dfa = btc[btc.opportunity_id.isin(list(element))] sel_off_L1 = (dfa.cable_voltage <= self.MAX_VOLTAGE_L1) & \ (dfa.cable_area <= self.MAX_AREA_L1) sel_off_L2 = (dfa.cable_voltage > self.MAX_VOLTAGE_L1) | \ (dfa.cable_area > self.MAX_AREA_L1) | \ (dfa.cable_kind == 'SEG') off_L1_workload = dfa[sel_off_L1].workload.sum() off_L2_workload = dfa[sel_off_L2].workload.sum() L1_workload = off_L1_workload + wl_base_L1 L2_workload = off_L2_workload + wl_base_L2 if (L1_workload < \ days * self.MAX_LOAD_L1 * self.OVER_FACTOR) and \ (L2_workload < \ days * self.MAX_LOAD_L2 * self.OVER_FACTOR) and \ (L1_workload + L2_workload) < \ days * self.OVER_FACTOR * \ (self.MAX_LOAD_L1 + self.MAX_LOAD_L2): first_selections.append(list(element) + proj_id) return first_selections
def first_selection(batches, clusterdays): ''' Arguments --------- batches : a list of dictionaries containing first batches to be be produced in a given cluster dictionary keys are: 'id', 'workload', 'opp_kind', 'cable_kind', 'area', 'voltage', clusterdays : the number of days in the given cluster Returns ------- first_selctions : list of lists of ids ''' first_selections = [] df = pd.DataFrame(batches) print(df.revenue.min()) #check on the margin value df = df[((df.margin.isnull()) & (df.opp_kind == 'offer')) == False] sel_proj = df.opp_kind.isin(['project','internal']) sel_proj_L1 = sel_proj & (df.line == 'Line1') sel_proj_L2 = sel_proj & (df.line == 'Line2') proj_L1_tot_workload = np.sum(df[sel_proj_L1]['workload']) proj_L2_tot_workload = np.sum(df[sel_proj_L2]['workload']) proj_id = df.id[sel_proj_L1].tolist() + df.id[sel_proj_L2].tolist() offers = df.id[df.opp_kind == 'offer'] revenue = [] margin = [] for IOffers in np.arange(len(offers), 0, -1): combinations = list(itercomb(offers, IOffers)) for element in combinations: dfa = df[df.id.isin(element)] sel_off_L1 = (dfa.opp_kind == 'offer') & \ (dfa.voltage <= MAX_VOLTAGE_L1) & \ (dfa.area <= MAX_AREA_L1) sel_off_L2 = (df.opp_kind == 'offer') & \ ((dfa.voltage > MAX_VOLTAGE_L1) | \ (dfa.area > MAX_AREA_L1 ) | \ (dfa.cable_kind == 'SEG')) off_L1_tot_workload = np.sum(dfa.workload[sel_off_L1]) off_L2_tot_workload = np.sum(dfa.workload[sel_off_L2]) L1_tot_workload = off_L1_tot_workload + proj_L1_tot_workload L2_tot_workload = off_L2_tot_workload + proj_L2_tot_workload if (L1_tot_workload < clusterdays * MAX_LOAD_L1 * OVER_FACTOR) and \ (L2_tot_workload < clusterdays * MAX_LOAD_L2 * OVER_FACTOR) and \ (L1_tot_workload + L2_tot_workload) < clusterdays * OVER_FACTOR \ * (MAX_LOAD_L1 + MAX_LOAD_L2): first_selections.append(dfa['id'].tolist() + proj_id) return first_selections
def wedgeFailure(sstr, sdip, jfriction, jstr, jdip, to_plot=False): """ Evaluates wedge failure of joints vis-a-vis a slope face with a given strike and dip, such that a line defined by the intersection of two joints plots 1) on the convex side of the wedge daylight envelope, and 2) within the wedge friction envelope. For each line, it conservatively uses the smaller friction angle (jfriction) Parameters ---------- sstr : int or float The strike of the slope face in degrees, with dip direction indicated by the azimuth (e.g. 315 vs. 135) specified following the "right hand rule". sdip : int or float The dip of the slope face in degrees. jfriction : int, float, or array of int or float The friction angle of the joint plane in degrees. jstr : int, float, or array of int or float The strike of the joint plane in degrees, with dip direction indicated by the azimuth (e.g. 315 vs. 135) specified following the "right hand rule". jdip : int, float, or array of int or float The dip of the joint plane in degrees. Returns ------- wedgeFail: boolean array of size = len(np.atleast_1d(jstr)) Indicates if corresponding joints will allow wedge failure. """ # ensure jstr, jdip, and jfriction are 1-d arrays jstr, jdip = np.atleast_1d(jstr, jdip) try: len(jfriction) uniformFriction = False except: jfriction = jfriction * (np.ones(len(jstr))) uniformFriction = True # get plunge and bearing of unique joint pair intersections c = np.array(list(itercomb(range(len(jstr)), 2))) wl_plunge, wl_bearing = st.plane_intersection(jstr[c[:, 0]], jdip[c[:, 0]], jstr[c[:, 1]], jdip[c[:, 1]]) # get minimum jfriction for each joint pair wl_friction = np.min((np.vstack([jfriction[c[:, 0]], jfriction[c[:, 1]]])), axis=0) # determinde daylight and friction envelopes wde_strike, wde_dip = env.wedge_daylight(sstr, sdip, False) wfe_plunge, wfe_bearing, wfe_angle = env.wedge_friction(wl_friction, False) # evaluate if wedge lines are within daylight and friction envelopes (cones) convexDaylight = np.empty(len(wl_plunge)) inFriction = np.empty(len(wl_plunge)) for a in range(len(wl_plunge)): convexDaylight[a] = line_above_plane(wde_strike, wde_dip, wl_plunge[a], wl_bearing[a]) inFriction[a] = line_in_cone(wfe_plunge[a], wfe_bearing[a], wfe_angle[a], wl_plunge[a], wl_bearing[a]) wedgeFail = ((convexDaylight == True) & (inFriction == True)) # plotting results if uniformFriction and to_plot: env.setup_axes(sstr, sdip, jfriction[0], failure='wedge', to_plot=True) plt.gca().line(wl_plunge[~wedgeFail], wl_bearing[~wedgeFail], color='0.5', marker='.') plt.gca().line(wl_plunge[wedgeFail], wl_bearing[wedgeFail], color='r', marker='.') return wedgeFail
def filling(self, baseline): batches = self.get_batches() line_1, line_2 = self.preliminary_schedule(baseline, batches=batches) line_1 = pd.DataFrame(line_1) line_2 = pd.DataFrame(line_2) init_start = min(line_1.start_date.tolist() + line_2.start_date.tolist()) init_end = max(line_1.end_date.tolist() + line_2.end_date.tolist()) n_months = diff_month(init_end, init_start) start = copy(init_start) # for i_month in range(0,n_months): for i_month in range(0, self.N_CLUSTERS): # 1. initialize new step baseline_update = [] end = start + relativedelta(months=1) # 2. fillers selections for month i_month select = (batches.opportunity_revenue <= self.MID_SIZE_LEVEL) & \ (batches.opportunity_kind == 'offer') & \ (batches.delivery_date >= start) & \ (batches.delivery_date < end) & \ (batches.number == 0) batch = batches[select] print('Number of Fillers:', len(set(batch.opportunity_id.tolist()))) start = copy(end) # 3. loop over the baselines if isinstance(baseline[0], list) is False: baseline = [baseline] for bsl in baseline: baseline_update.append(bsl) line_1, line_2 = self.preliminary_schedule(bsl, batches=batches) line_1 = pd.DataFrame(line_1) line_2 = pd.DataFrame(line_2) al1 = line_1[line_1.month == i_month].delay.values al2 = line_2[line_2.month == i_month].delay.values avail_time = 0 if al1 < 0: avail_time += abs(al1) if al2 < 0: avail_time += abs(al2) # 3.5 list for teh primary key ids = list(set(batch.opportunity_id.tolist())) # 4. combinations -> start from zero if len(ids) <= 1: baseline_update.append(bsl + ids) else: wkl = [] for i in ids: wkl.append( batch[batch.opportunity_id == i].workload.sum()) wkl = np.cumsum(np.sort(wkl)) up_bound = np.sum(wkl < avail_time) + 1 lo_bound = 0 if len(ids) - 1 < 0 else len(ids) - 1 for i in range(lo_bound, up_bound): comb_ids = list(itercomb(ids, i)) for el in comb_ids: btc = batch[batch.opportunity_id.isin(el)] btc_L1 = btc[(btc.cable_voltage <= \ self.MAX_VOLTAGE_L1)&\ (btc.cable_area <= self.MAX_AREA_L1)] btc_L2 = btc[(btc.cable_voltage > \ self.MAX_VOLTAGE_L1)|\ (btc.cable_area > self.MAX_AREA_L1)|\ (btc.cable_kind == 'SEG')] if btc_L1.workload.sum() <= \ abs(line_1[line_1.month == i_month].delay.values)\ * self.OVER_FACTOR: baseline_update.append( bsl + btc_L1.opportunity_id.tolist()) if btc_L2.workload.sum() <= \ abs(line_2[line_2.month == i_month].delay.values)\ * self.OVER_FACTOR: baseline_update.append( bsl + btc_L2.opportunity_id.tolist()) baseline_update = [list(x) for x in set(tuple(x) \ for x in baseline_update)] baseline = copy(baseline_update) #TODO: iterative selection on the low workload combination if isinstance(baseline[0], list) is not True: baseline = [baseline] return baseline
def combinations(xs: Iterable[T]) -> Generator[Tuple[T, ...], None, None]: """ all combinations of given in the order of increasing its size """ xs = list(xs) for i in range(len(xs) + 1): for comb in itercomb(xs, i): yield comb