def check(tops_filename, pants_filename, skirts_filename): file_all_recorder = open('datasets/final-rank/' + account.get_signal() + '/download/recorder.csv', 'r', newline='') recorder_to_read = csv.reader(file_all_recorder) all_recorder = [row[0] for row in recorder_to_read][1:] file_all_recorder.close() file_recorder_tops = open( 'results/' + account.get_signal() + '/prediction_length_results_classification/prediction_length_coat.csv', 'r', newline='') recorder_to_read = csv.reader(file_recorder_tops) tops_recorder = [row[0].split('/')[-1] for row in recorder_to_read] file_recorder_tops.close() file_pants_recorder = open( 'results/' + account.get_signal() + '/prediction_length_results_classification/prediction_length_pant.csv', 'r', newline='') recorder_to_read = csv.reader(file_pants_recorder) pants_recorder = [row[0].split('/')[-1] for row in recorder_to_read] file_pants_recorder.close() file_skirts_recorder = open( 'results/' + account.get_signal() + '/prediction_length_results_classification/prediction_length_skirt.csv', 'r', newline='') recorder_to_read = csv.reader(file_skirts_recorder) skirts_recorder = [row[0].split('/')[-1] for row in recorder_to_read] file_skirts_recorder.close() # 新上传的内容 add_tops = check_add(all_recorder, tops_filename) add_pants = check_add(all_recorder, pants_filename) add_skirts = check_add(all_recorder, skirts_filename) # 找到需要删除的图片名称 all_delete = [] all_filename = tops_filename + pants_filename + skirts_filename for item in all_recorder: if item not in all_filename: all_delete.append(item) # 分类需要删除的图片 delete_tops = classify_delete(all_delete, tops_recorder) delete_pants = classify_delete(all_delete, pants_recorder) delete_skirts = classify_delete(all_delete, skirts_recorder) return add_tops, add_pants, add_skirts, delete_tops, delete_pants, delete_skirts
def get_color_labels(address_list): # 创建一个list color_list = [] # 打开results文件 input_file_0 = open( './results/' + account.get_signal() + '/prediction_length_results_classification/prediction_length_coat.csv', 'r') file_to_read_0 = list(csv.reader(input_file_0)) input_file_0.close() input_file_1 = open( './results/' + account.get_signal() + '/prediction_length_results_classification/prediction_length_pant.csv', 'r') file_to_read_1 = list(csv.reader(input_file_1)) input_file_1.close() input_file_2 = open( './results/' + account.get_signal() + '/prediction_length_results_classification/prediction_length_skirt.csv', 'r') file_to_read_2 = list(csv.reader(input_file_2)) input_file_2.close() for sublist in address_list: top_color = '' down_color = '' for eachline in file_to_read_0: if sublist[0] == eachline[0]: top_color = eachline[2] break if (sublist[1].split('/')[3]) == 'pant_length_labels': for eachline in file_to_read_1: if sublist[1] == eachline[0]: down_color = eachline[2] break if (sublist[1].split('/')[3]) == 'skirt_length_labels': for eachline in file_to_read_2: if sublist[1] == eachline[0]: down_color = eachline[2] break sub_color_list = [top_color, down_color] color_list.append(sub_color_list) return color_list
def image_composite(imageList): baseimg = Image.open(imageList[0]) baseimg = baseimg.convert('RGBA') sz = baseimg.size basemat = np.atleast_2d(baseimg) for file in imageList[1:]: im = Image.open(file, 'r') im = im.convert('RGBA') # resize to same width sz2 = im.size if sz2 != sz: im = im.resize((sz[0], round(sz2[0] / sz[0] * sz2[1])), Image.ANTIALIAS) mat = np.atleast_2d(im) basemat = np.append(basemat, mat, axis=0) report_img = Image.fromarray(basemat) # report_img.show() filename = str(uuid.uuid1()) report_img.save('results/' + account.get_signal() + '/matching_results/' + filename + '.png') PNG_JPG('results/' + account.get_signal() + '/matching_results/' + filename + '.png') return filename + '.jpg'
def get_required_skirt(arrvalue): try: required_skirt_list = [] input_file_all_skirt = open( 'results/' + account.get_signal() + '/prediction_length_results_classification/prediction_length_skirt.csv', 'r') file_to_read_all_skirt = csv.reader(input_file_all_skirt) all_skirt_rows = [row for row in file_to_read_all_skirt] for row in all_skirt_rows: if (float(row[3]) == arrvalue): required_skirt_list.append(row[0]) return required_skirt_list except: print('File Warning') return []
def prediction_length_classification(): # prediction length results input_file_0 = open('results/test_length.csv', 'r') file_to_read_0 = csv.reader(input_file_0) # prediction_length_coat_temp.csv output_file_coat = open( 'results/' + account.get_signal() + '/prediction_length_results_classification/prediction_length_coat_temp.csv', 'a', newline='') file_to_write_coat = csv.writer(output_file_coat, dialect='excel') # prediction_length_sleeve_temp.csv output_file_sleeve = open( 'results/' + account.get_signal() + '/prediction_length_results_classification/prediction_length_sleeve_temp.csv', 'a', newline='') file_to_write_sleeve = csv.writer(output_file_sleeve, dialect='excel') # prediction_length_pant.csv output_file_pant = open( 'results/' + account.get_signal() + '/prediction_length_results_classification/prediction_length_pant.csv', 'a', newline='') file_to_write_pant = csv.writer(output_file_pant, dialect='excel') # prediction_length_skirt.csv output_file_skirt = open( 'results/' + account.get_signal() + '/prediction_length_results_classification/prediction_length_skirt.csv', 'a', newline='') file_to_write_skirt = csv.writer(output_file_skirt, dialect='excel') for eachline in file_to_read_0: result_length = [] result_length.append('datasets/final-rank/' + eachline[0]) result_length.append(eachline[1]) var = eachline[2].split(';') index = var.index(max(var)) if (eachline[1] == 'coat_length_labels'): color = color_detect.return_color_2('datasets/final-rank/' + eachline[0]) result_length.append(color) if (index <= 1): result_length.append(1) elif (index <= 3 and index > 1): result_length.append(2) elif (index <= 5 and index > 3): result_length.append(3) elif (index <= 7 and index > 5): result_length.append(4) file_to_write_coat.writerow(result_length) elif (eachline[1] == 'sleeve_length_labels'): result_length.append('') if (index == 0): result_length.append(0) elif (index <= 2 and index > 0): result_length.append(1) elif (index <= 4 and index > 2): result_length.append(2) elif (index <= 6 and index > 4): result_length.append(3) elif (index <= 8 and index > 6): result_length.append(4) file_to_write_sleeve.writerow(result_length) elif (eachline[1] == 'pant_length_labels'): color = color_detect.return_color_2('datasets/final-rank/' + eachline[0]) result_length.append(color) if (index <= 1): result_length.append(1) elif (index <= 3 and index > 1): result_length.append(2) elif (index <= 5 and index > 3): result_length.append(3) file_to_write_pant.writerow(result_length) elif (eachline[1] == 'skirt_length_labels'): color = color_detect.return_color_2('datasets/final-rank/' + eachline[0]) result_length.append(color) if (index <= 1): result_length.append(1) elif (index <= 3 and index > 1): result_length.append(2) elif (index <= 5 and index > 3): result_length.append(3) file_to_write_skirt.writerow(result_length) input_file_0.close() output_file_coat.close() output_file_sleeve.close() output_file_pant.close() output_file_skirt.close() input_file_coat_temp = open( 'results/' + account.get_signal() + '/prediction_length_results_classification/prediction_length_coat_temp.csv', 'r') file_to_read_coat_temp = csv.reader(input_file_coat_temp) input_file_sleeve_temp = open( 'results/' + account.get_signal() + '/prediction_length_results_classification/prediction_length_sleeve_temp.csv', 'r') file_to_read_sleeve_temp = csv.reader(input_file_sleeve_temp) output_file_coat = open( 'results/' + account.get_signal() + '/prediction_length_results_classification/prediction_length_coat.csv', 'a', newline='') file_to_write_coat = csv.writer(output_file_coat, dialect='excel') rows_coat = [row for row in file_to_read_coat_temp] rows_sleeve = [row for row in file_to_read_sleeve_temp] for i in range(len(rows_coat)): file_to_write_coat.writerow([ rows_coat[i][0], 'coat_length_labels', rows_coat[i][2], (int(rows_coat[i][3]) + int(rows_sleeve[i][3])) / 2 ]) input_file_sleeve_temp.close() input_file_coat_temp.close() output_file_coat.close() os.remove( 'results/' + account.get_signal() + '/prediction_length_results_classification/prediction_length_coat_temp.csv' ) os.remove( 'results/' + account.get_signal() + '/prediction_length_results_classification/prediction_length_sleeve_temp.csv' )
def prehanding(): # Address for downloading images to save sourDir = 'datasets/final-rank/' + account.get_signal() + '/download/' destDirCoat = [ 'datasets/final-rank/' + account.get_signal() + '/coat_length_labels/', 'datasets/final-rank/' + account.get_signal() + '/sleeve_length_labels/', 'datasets/final-rank/' + account.get_signal() + '/collar_design_labels/', 'datasets/final-rank/' + account.get_signal() + '/lapel_design_labels/', 'datasets/final-rank/' + account.get_signal() + '/neck_design_labels/', 'datasets/final-rank/' + account.get_signal() + '/neckline_design_labels/' ] destDirPant = 'datasets/final-rank/' + account.get_signal( ) + '/pant_length_labels/' destDirSkirt = 'datasets/final-rank/' + account.get_signal( ) + '/skirt_length_labels/' responseTops = urllib.request.urlopen(urlTops + 'GetTopTXT?phone=' + account.get_encode_signal()) responsePants = urllib.request.urlopen(urlPants + 'GetPantTXT?phone=' + account.get_encode_signal()) responseSkirts = urllib.request.urlopen(urlSkirts + 'GetSkirtTXT?phone=' + account.get_encode_signal()) print(account.get_encode_signal()) print(account.get_signal()) topsfilename = (responseTops.read().decode()).split('\r\n')[:-1] pantsfilename = (responsePants.read().decode()).split('\r\n')[:-1] skirtsfilename = (responseSkirts.read().decode()).split('\r\n')[:-1] print('TopsFilename: {0} {1}'.format(len(topsfilename), topsfilename)) print('PantsFilename: {0} {1}'.format(len(pantsfilename), pantsfilename)) print('SkirtsFilename: {0} {1}'.format(len(skirtsfilename), skirtsfilename)) add_tops, add_pants, add_skirts, delete_tops, delete_pants, delete_skirts = check_duplication.check( topsfilename, pantsfilename, skirtsfilename) print('TopsAdd: {0} {1}'.format(len(add_tops), add_tops)) print('PantsAdd: {0} {1}'.format(len(add_pants), add_pants)) print('SkirtsAdd: {0} {1}'.format(len(add_skirts), add_skirts)) print('TopsDelete: {0} {1}'.format(len(delete_tops), delete_tops)) print('PantsDelete: {0} {1}'.format(len(delete_pants), delete_pants)) print('SkirtsDelete: {0} {1}'.format(len(delete_skirts), delete_skirts)) # delete delete_all = delete_tops + delete_pants + delete_skirts if not delete_all: print('no image needs to delete') else: # delete items in recorder check_duplication.delete_item( 'datasets/final-rank/' + account.get_signal() + '/download/recorder.csv', delete_all) # delete items in 3 csv check_duplication.delete_item( 'results/' + account.get_signal() + '/prediction_length_results_classification/prediction_length_coat.csv', delete_all) check_duplication.delete_item( 'results/' + account.get_signal() + '/prediction_length_results_classification/prediction_length_pant.csv', delete_all) check_duplication.delete_item( 'results/' + account.get_signal() + '/prediction_length_results_classification/prediction_length_skirt.csv', delete_all) # delete images in download for each_to_delete in delete_all: check_duplication.delete_image('datasets/final-rank/' + account.get_signal() + '/download/' + each_to_delete) for each_dir in destDirCoat + destDirPant + destDirSkirt: check_duplication.delete_image(each_dir + each_to_delete) # 写入新增的、需要预测的内容 add_all = add_tops + add_pants + add_skirts if not add_all: print('all images has been downloaded') else: write_recorder( add_all, 'datasets/final-rank/' + account.get_signal() + '/download/recorder.csv', 'a') for eachDir in destDirCoat: write_question(eachDir[20:], add_tops, eachDir[35:-1], 'datasets/final-rank/Tests/question.csv', 'a') write_question(destDirPant[20:], add_pants, destDirPant[35:-1], 'datasets/final-rank/Tests/question.csv', 'a') write_question(destDirSkirt[20:], add_skirts, destDirSkirt[35:-1], 'datasets/final-rank/Tests/question.csv', 'a') if add_tops: for item in add_tops: item_url = urlTops + 'GetTopPhoto?phone=' + account.get_encode_signal( ) + '&path=' + item response = urllib.request.urlopen(item_url) pic = response.read() with open(sourDir + item, 'wb') as f: f.write(pic) for eachDir in destDirCoat: shutil.copyfile(sourDir + item, eachDir + item) # write_question_csv(eachDir[20:] + item, eachDir[40:-1]) if add_pants: for item in add_pants: item_url = urlTops + 'GetPantPhoto?phone=' + account.get_encode_signal( ) + '&path=' + item response = urllib.request.urlopen(item_url) pic = response.read() with open(sourDir + item, 'wb') as f: f.write(pic) shutil.copyfile(sourDir + item, destDirPant + item) # write_question_csv(destDirPant[20:] + item, destDirPant[40:-1]) if add_skirts: for item in add_skirts: item_url = urlTops + 'GetSkirtPhoto?phone=' + account.get_encode_signal( ) + '&path=' + item response = urllib.request.urlopen(item_url) pic = response.read() with open(sourDir + item, 'wb') as f: f.write(pic) shutil.copyfile(sourDir + item, destDirSkirt + item)
def start_matching(username, times): try: print('start matching...') match_results_filename = [] weather_list = getWeatherAttribures(weather_url) if not (weather_list[2]): weather_list[2] = 0 weather_value = weather_analysis(float(weather_list[1]), float(weather_list[2])) print( 'Today Weather: weather = {0[0]}, temperature = {0[1]}, windScale = {0[2]}, humidity = {0[3]}, Weather Value = {1}' .format(weather_list, weather_value)) basic_top_value = int(weather_value / 2) basic_bottom_value = weather_value - basic_top_value matching_up = [] matching_bottom = [] for i in [ basic_top_value - 0.5, basic_top_value, basic_top_value + 0.5, basic_top_value + 1 ]: if (get_required_coat(i) != []): matching_up = get_required_coat(i) for j in [ basic_bottom_value - 1, basic_bottom_value - 0.5, basic_bottom_value, basic_bottom_value + 0.5 ]: if (get_required_pant(j) != []): matching_bottom = get_required_pant(j) if (get_required_skirt(j) != []): matching_bottom = matching_bottom + get_required_skirt(j) print('Top Candidate: ', matching_up) print('Bottom Candidate: ', matching_bottom) matching_results_color_detect = [] for times in range(times): temp = matching(matching_up, matching_bottom) #print('matching result {0}: {1}'.format(times, temp)) matching_results_color_detect.append(temp) beforeInColor = [ list(t) for t in set( tuple(_) for _ in matching_results_color_detect) ] print('before: {0} {1}'.format(len(beforeInColor), beforeInColor)) print('start detecting color matching...') final_results = color_filter.color_matching(beforeInColor, 4) for k in range(len(final_results)): print('composite image {0}: {1}'.format(k, final_results[k])) match_results_filename.append(image_composite(final_results[k])) print('matching completed') print( '=======================================================================================================' ) print('start uploading...') image_upload.upload_filename(match_results_filename, username) for eachpic in match_results_filename: image_upload.upload_image( 'results/' + account.get_signal() + '/matching_results/' + eachpic, eachpic.split('.')[0], username) print('uploading completed') print( '=======================================================================================================' ) except Exception as e: print( '=======================================================================================================' ) print('error:', e) image_upload.upload_filename(['error_image.jpg'], username) image_upload.upload_image('results/error_image.jpg', 'error_image', username)