Ejemplo n.º 1
0
    def add_unavailability(self, solver, unavailability_map):
        """
            This constraint ensures that workers cannot work scheduled tasks within given time spans
            unavailability_map : { [worker_id]: { range: { start_time, end_time } } }
        """
        for worker in self.workers:
            if str(worker.id) in unavailability_map:
                range = utils.get_range(unavailability_map[str(worker.id)]['range'])
                tasks_in_range = utils.get_tasks_in_range(self.tasks, range)

                for task in tasks_in_range:
                    solver.Add(self.assignments[worker.index][task.index] == 0)
Ejemplo n.º 2
0
def get_work(factory_id, worker_id):
    w_id = db.assign_work(factory_id, worker_id).get('work_id')
    if w_id is None:
        return {'isWork': 0}

    f_info = db.get_factory_info(factory_id)
    f_size = f_info['file_size']
    w_size = f_info['work_size']
    [a, b] = utils.get_range(f_size, w_size, w_id)

    print('[INTERFACE GET WORK] done.')
    return {'isWork': 1, 'work_id': w_id, 'range_start': a, 'range_end': b}
Ejemplo n.º 3
0
def matching_files_by_size(torrent, exist_files):
    tor_name = torrent_utils.get_torrent_name(torrent)
    files = []
    
    if torrent_utils.is_single_file_torrent(torrent):
        size = torrent_utils.get_payload_size(torrent)
        files.append( TFile('', tor_name, size, 0) )
    else:        
        tor_files = torrent['info']['files']
        stream_offset = 0
        for file in tor_files:
            fsize = file['length']
            matched_files = get_range(exist_files, fsize, lambda x: x['len'])
            fpath = os.path.join(tor_name, *file['path'][:-1])
            fname = file['path'][-1]
            files.append(TFile(fpath, fname, fsize, stream_offset, matched_files))
            stream_offset += fsize

    return files
Ejemplo n.º 4
0
def matching_files_by_size(torrent, exist_files):
    tor_name = torrent_utils.get_torrent_name(torrent)
    files = []

    if torrent_utils.is_single_file_torrent(torrent):
        size = torrent_utils.get_payload_size(torrent)
        files.append(TFile('', tor_name, size, 0))
    else:
        tor_files = torrent['info']['files']
        stream_offset = 0
        for file in tor_files:
            fsize = file['length']
            matched_files = get_range(exist_files, fsize, lambda x: x['len'])
            fpath = os.path.join(tor_name, *file['path'][:-1])
            fname = file['path'][-1]
            files.append(
                TFile(fpath, fname, fsize, stream_offset, matched_files))
            stream_offset += fsize

    return files
Ejemplo n.º 5
0
dataloader = DataLoader(dataset, batch_size=batch_size)

net = models.UNET_1D(input_channels, depth_step, kernel_size)

writer = SummaryWriter(log_dir=f'./logs/{time}')

weight = torch.tensor(weight)

loss_fn = nn.NLLLoss(weight)
optimizer = optim.Adam(net.parameters(), lr=lr)

max_acc = 0

l, v = dataset.get_length()
num_batches = get_range(l, mini_batch)
val_batches = get_range(v, mini_batch)

for i in range(num_epochs):
    """ Set variables to zero """
    batch_losses, counter, batch_accuracy = 0, 0, 0
    recall, precision, val_recall, val_precision = 0, 0, 0, 0
    epoch_losses, epoch_counter, epoch_accuracy = 0, 0, 0

    for data in dataloader:
        """ Training """
        net.train()
        targets = data['training']['labels']
        inputs = data['training']['sequence']

        for k in num_batches:
Ejemplo n.º 6
0
SYNAPSE_USER = os.environ["SYNAPSE_USER"]
SYNAPSE_PW = os.environ["SYNAPSE_PW"]

driver = "ODBC+Driver+17+for+SQL+Server"
db_name = "loan"
table_name = "cleanLoan"
port = "1433"

engine = create_engine(
    f"mssql+pyodbc://{SYNAPSE_USER}:{SYNAPSE_PW}@{SYNAPSE_HOST}:{port}/{db_name}?driver={driver}"
)

connection = engine.connect()

# Get distincts and range
loan_min, loan_max = utils.get_range(connection, db_name, table_name,
                                     "loan_amnt")
inc_min, inc_max = utils.get_range(connection, db_name, table_name,
                                   "annual_inc")
app_types = utils.get_unique(connection, db_name, table_name,
                             "application_type")
purposes = utils.get_unique(connection, db_name, table_name, "purpose")
ownerships = utils.get_unique(connection, db_name, table_name,
                              "home_ownership")[1:]

# Close connection
connection.close()

# Make some calculations based on value range retrieved
loan_marks = loan_max // 4
loan_min //= loan_marks
Ejemplo n.º 7
0
import json
from os.path import dirname

#net = torch.load('./trained_models/best_model_weight_3.pt')
path = 'logs/2012012257/best_performance.pt'
root = dirname(path)
net = torch.load(path)
net.eval()

dataset = SyntheticData('./test_data', 1)
dataloader = DataLoader(dataset, batch_size=1)

mini_batch = 512

l, v = dataset.get_length()
num_batches = get_range(l, mini_batch)

N = len(dataset)

test_results = {'PNR': [], 'Counts': [], 'TP': [], 'TC': [], 'FP': []}
for i, data in enumerate(dataloader):

    targets = data['training']['labels']
    inputs = data['training']['sequence']
    counts, ground = 0, 0
    epoch_losses, epoch_counter, epoch_accuracy, recall, precision = 0, 0, 0, 0, 0
    true_positive, total_count, false_positive, total_count_p = 0, 0, 0, 0
    for k in num_batches:
        with torch.no_grad():
            x, t = get_chunks(inputs, targets, k, mini_batch)
Ejemplo n.º 8
0
 def check_range(self, sub_fighter, obj_fighter):
     if self.max_range > utils.get_range(
             sub_fighter.fcs.platform,
             obj_fighter.fcs.platform) > self.min_range:
         self.increase_fitness(1)
Ejemplo n.º 9
0
 def compute_range_error_score( self, blue_plant, red_plant ) :
     distance_to_red = utils.get_range( blue_plant.platform, red_plant.platform )
     # Safety distance is set to 500m
     engagement_range = distance_to_red - 500.0
     # "Optimal" range is set to 800m
     return 1.0 - np.exp( -1.0*np.power(engagement_range - 800.0,2.0) )
Ejemplo n.º 10
0
 def save(self, *args, **kwargs):
     if self.ip:
         self.range_from, self.range_to = get_range(self.ip)
         self.is_network = (self.range_to - self.range_from) > 0
     super(Address, self).save(*args, **kwargs)
Ejemplo n.º 11
0
 def matching(self, ip):
     ip_from, ip_to = get_range(ip)
     where = "(ip = %s) OR (%s BETWEEN range_from AND range_to) OR (%s BETWEEN range_from AND range_to) OR (%s <= range_from AND %s >= range_to)"
     params = [str(ip), ip_from, ip_to, ip_from, ip_to]
     return self.get_query_set().extra(where=[where], params=params)