def test_to_JSON(): target = TargetSize(length=300, quantity=4) assert target.json() == '{"length": 300, "quantity": 4}' job = Job(max_length=1200, target_sizes=(target, TargetSize(length=200, quantity=3)), cut_width=0) assert job.json() == '{"max_length": 1200, "target_sizes": [{"length": 300, "quantity": 4}, ' \ '{"length": 200, "quantity": 3}], "cut_width": 0}'
def test_compress(): job = Job(max_length=100, target_sizes=(TargetSize(length=100, quantity=2), TargetSize(length=100, quantity=3)), cut_width=0) job.compress() compressed_job = Job(max_length=100, target_sizes=([TargetSize(length=100, quantity=5)]), cut_width=0) assert job == compressed_job
def test_job_generator(): job = Job(max_length=1550, target_sizes=(TargetSize(length=500, quantity=4), TargetSize(length=200, quantity=3), TargetSize(length=100, quantity=2)), cut_width=5) resulting_list = [] for length in job.get_sizes(): resulting_list.append(length) assert resulting_list == [500, 500, 500, 500, 200, 200, 200, 100, 100]
def test_job_dunders(): job1 = Job(max_length=100, target_sizes=(TargetSize(length=100, quantity=2), TargetSize(length=200, quantity=1)), cut_width=0) job2 = Job(max_length=100, target_sizes=(TargetSize(length=100, quantity=2), TargetSize(length=200, quantity=1)), cut_width=0) assert job1 == job2 assert len(job1) == 3
def post_solve(job: Job): assert job.__class__ == Job assert job.valid() solved: Result = distribute(job) assert solved.valid() return solved
def test_from_JSON(): json_file = Path("./tests/data/in/testjob.json") assert json_file.exists() with open(json_file, "r") as encoded_job: job = Job.parse_raw(encoded_job.read()) assert job.__class__ == Job assert len(job) > 0
def distribute(job: Job) -> Result: # optimize before distibuting job.compress() result: Result time: float = perf_counter() if len(job) <= n_max_precise: result = _solve_bruteforce(job) elif len(job) <= n_max_good: result = _solve_FFD(job) elif len(job) <= n_max: result = _solve_gapfill(job) else: raise OverflowError("Input too large") result.time_us = int((perf_counter() - time) * 1000 * 1000) return result
def random_job() -> Job: max_length = random.randint(1000, 2000) cut_width = random.randint(0, 10) n_sizes = random.randint(5, 10) sizes = [] for i in range(n_sizes): sizes.append( TargetSize(length=random.randint(10, 1000), quantity=random.randint(1, 20))) return Job(max_length=max_length, target_sizes=sizes, cut_width=cut_width)
def test_full_model(): json_job = Path("./tests/data/in/testjob.json") assert json_job.exists() json_result = Path("./tests/data/out/testresult.json") with open(json_job, "r") as encoded_job: job = Job.parse_raw(encoded_job.read()) solved = distribute(job) encoded_solved = solved.json() assert len(encoded_solved) > 20 with open(json_result, "r") as encoded_result: result = Result.parse_raw(encoded_result.read()) assert solved == result
def test_benchmark(): job = Job(max_length=1200, target_sizes=(TargetSize(length=300, quantity=3), TargetSize(length=200, quantity=3), TargetSize(length=100, quantity=3)), cut_width=0) start = time.perf_counter() solved_bruteforce = _solve_bruteforce(job) t_bruteforce = time.perf_counter() - start solved_gapfill = _solve_gapfill(job) t_gapfill = time.perf_counter() - t_bruteforce solved_FFD = _solve_FFD(job) t_FFD = time.perf_counter() - t_gapfill # bruteforce should be better at the cost of increased runtime print( f"[Runtime] Bruteforce: {t_bruteforce:.2f}s, Gapfill: {t_gapfill:.2f}s, FFD: {t_FFD:.2f}s" )
def _solve_bruteforce(job: Job) -> Result: # failsafe if len(job) > 12: raise OverflowError("Input too large") # find every possible ordering (n! elements) all_orderings = permutations(job.get_sizes()) # TODO: remove duplicates (due to "quantity") # "infinity" min_trimmings = len(job) * job.max_length min_stocks: List[List[int]] = [] # possible improvement: Distribute combinations to multiprocessing worker threads for combination in all_orderings: stocks, trimmings = _split_combination(combination, job.max_length, job.cut_width) if trimmings < min_trimmings: min_stocks = stocks min_trimmings = trimmings return Result(solver_type=SolverType.bruteforce, lengths=min_stocks)
def generate_testjob(): json_job = Path("./tests/data/in/testjob.json") assert json_job.exists() with open(json_job, "r") as encoded_job: return Job.parse_raw(encoded_job.read())
def test_invalid(): job1 = Job(max_length=0, target_sizes=(TargetSize(length=100, quantity=2), TargetSize(length=200, quantity=1)), cut_width=0) assert not job1.valid()