def conversion(): args = convert.parser().parse_args(['-i', IMG_PATH, '-o', ARR_PATH, 'numpy']) # Convert images to numpy arrays convert.main(args) yield None # Cleanup generated numpy arrays shutil.rmtree(ARR_PATH)
def make_slide(slide_number, markdown_file_name, slide_count): content_file_name = 'content.html' convert.main(markdown_file_name, content_file_name) template_file_name = 'template.html' slide_file_name = make_slide_file_name(slide_number) next_slide_number = slide_number + 1 if slide_number < slide_count else 1 next_slide_file_name = make_slide_file_name(next_slide_number) previous_slide_number = slide_number - 1 if slide_number > 1 else slide_count previous_slide_file_name = make_slide_file_name(previous_slide_number) title = parse_slide_title(markdown_file_name) template.main(template_file_name, slide_file_name, title, next_slide_file_name, previous_slide_file_name)
def make_slide(slide_number, markdown_file_name, slide_count): content_file_name = 'content.html' convert.main(markdown_file_name, content_file_name) template_file_name = 'template.html' slide_file_name = make_slide_file_name(slide_number) next_slide_number = slide_number + 1 if slide_number < slide_count else 1 next_slide_file_name = make_slide_file_name(next_slide_number) previous_slide_number = slide_number - 1 if slide_number > 1 else slide_count previous_slide_file_name = make_slide_file_name(previous_slide_number) title = parse_slide_title(markdown_file_name) template.main(template_file_name, slide_file_name, title, next_slide_file_name, previous_slide_file_name)
def test_lammps(args, indir, outdir): files = sorted(glob.glob( '%s/*/*.lmp' % indir)) # return list of files that match the string results = [] basedir = '%s/FromLammps' % outdir if not os.path.isdir(basedir): os.mkdir(basedir) for f in files: prefix = f[f.rfind('/') + 1:-4] odir = '%s/%s' % (basedir, prefix) if not os.path.isdir(odir): os.mkdir(odir) h1, h2 = add_handler(odir) flags = [ '--lmp_in', f, '--desmond', '--gromacs', '--lammps', '--odir', odir ] flags = add_flags(args, flags) logger.info('Converting %s with command:\n python convert.py %s\n' % (f, ' '.join(flags))) try: diff = convert.main(flags) # reuses code from convert.py assert (len(diff) == N_FORMATS) results += diff except Exception as e: logger.exception(e) results += [e] * N_FORMATS remove_handler(h1, h2) return files, results
def main(statement): # Process the statement the user inputted inputValue = convert.main(statement) countLines = 0; allKarnaughMaps = []; #Invoke Conversion to CDNF Form: outputFromHLDEquiv = equivCheck.generate_equivalency(str(inputValue), str(inputValue), True) resultNormalForm = outputFromHLDEquiv[1] isContradiction = outputFromHLDEquiv[3] currentRoot = buildExpressionTreeData(resultNormalForm); if(countLines != 0): print(""); """ print(resultNormalForm) print("Pre-Order Traversal:"); printPreOrder(currentRoot); print("Done with Pre-Order Traversal.\n"); print("Get All Satisfying Values:"); print(currentRoot.getSatisfyingValues(isContradiction)); print("Computed All Satisfying Values.\n"); print("Total # Variables:"); """ variables = currentRoot.getDistinctVariables() # print(variables); currentKMap = KarnaughMap(currentRoot.getDistinctVariables()); currentKMap.setOneValues(currentRoot.getSatisfyingValues(isContradiction)); currentKMap.printMatrix(); # print("Done w/ Karnaugh Map."); countLines += 1; allKarnaughMaps.append(currentKMap); return allKarnaughMaps[0], variables, statement # outputFromHLDEquiv[5]
def test_gromacs(args, indir, outdir): gro_files = sorted(glob.glob( '%s/*/*.gro' % indir)) # return list of files that match the string gro_files = [x for x in gro_files if not x.endswith('out.gro')] top_files = sorted(glob.glob( '%s/*/*.top' % indir)) # return list of files that match the string results = [] basedir = '%s/FromGromacs' % outdir if not os.path.isdir(basedir): os.mkdir(basedir) for g, t in zip(gro_files, top_files): prefix = g[g.rfind('/') + 1:-4] odir = '%s/%s' % (basedir, prefix) if not os.path.isdir(odir): os.mkdir(odir) h1, h2 = add_handler(odir) flags = [ '--gro_in', g, t, '--desmond', '--gromacs', '--lammps', '--odir', odir ] flags = add_flags(args, flags) logger.info( 'Converting %s, %s with command:\n python convert.py %s\n' % (g, t, ' '.join(flags))) try: diff = convert.main(flags) # reuses code from convert.py assert (len(diff) == N_FORMATS) results += diff except Exception as e: logger.exception(e) results += [e] * N_FORMATS remove_handler(h1, h2) return gro_files, results
def test_lammps(args, indir, outdir): files = sorted(glob.glob('%s/*/*.lmp' % indir)) # return list of files that match the string results = [] basedir = '%s/FromLammps' % outdir if not os.path.isdir(basedir): os.mkdir(basedir) for f in files: prefix = f[f.rfind('/') + 1:-4] odir = '%s/%s' % (basedir, prefix) if not os.path.isdir(odir): os.mkdir(odir) h1, h2 = add_handler(odir) flags = ['--lmp_in', f, '--desmond', '--gromacs', '--lammps', '--odir', odir] flags = add_flags(args, flags) logger.info('Converting %s with command:\n python convert.py %s\n' % (f,' '.join(flags))) try: diff = convert.main(flags) # reuses code from convert.py assert(len(diff) == N_FORMATS) results += diff except Exception as e: logger.exception(e) results += [e]*N_FORMATS remove_handler(h1, h2) return files, results
def test_gromacs(args, indir, outdir): gro_files = sorted(glob.glob('%s/*/*.gro' % indir)) # return list of files that match the string gro_files = [x for x in gro_files if not x.endswith('out.gro')] top_files = sorted(glob.glob('%s/*/*.top' % indir)) # return list of files that match the string results = [] basedir = '%s/FromGromacs' % outdir if not os.path.isdir(basedir): os.mkdir(basedir) for g, t in zip(gro_files, top_files): prefix = g[g.rfind('/') + 1:-4] odir = '%s/%s' % (basedir, prefix) if not os.path.isdir(odir): os.mkdir(odir) h1, h2 = add_handler(odir) flags = ['--gro_in', g, t, '--desmond', '--gromacs', '--lammps', '--odir', odir] flags = add_flags(args, flags) logger.info('Converting %s, %s with command:\n python convert.py %s\n' % (g, t,' '.join(flags))) try: diff = convert.main(flags) # reuses code from convert.py assert(len(diff) == N_FORMATS) results += diff except Exception as e: logger.exception(e) results += [e]*N_FORMATS remove_handler(h1, h2) return gro_files, results
def main (path,offx=0, offy=0,stdout=False): call(['mkdir','tmpdir']) cmdstr = 'tmpdir/out%d.png' call(["convert",path,'-coalesce',cmdstr]) framelist = [] for filename in sorted(os.listdir('tmpdir')): framelist.append(convert.main(Image.open('tmpdir/'+str(filename)).convert('RGBA'),offx,offy)) call(['rm','-R','tmpdir']) if stdout: print('\n'.join('\n'.join(l) for l in framelist)) return framelist
def main(path, offx=0, offy=0, stdout=False): call(['mkdir', 'tmpdir']) cmdstr = 'tmpdir/out%d.png' call(["convert", path, '-coalesce', cmdstr]) framelist = [] for filename in sorted(os.listdir('tmpdir')): framelist.append( convert.main( Image.open('tmpdir/' + str(filename)).convert('RGBA'), offx, offy)) call(['rm', '-R', 'tmpdir']) if stdout: print('\n'.join('\n'.join(l) for l in framelist)) return framelist
def scriptfilter(items, query): import convert import cPickle as pickle try: assert not DEBUG with open(constants.UNITS_PICKLE_FILE, 'rb') as fh: units = pickle.load(fh) except: # pragma: no cover units = convert.Units() units.load(constants.UNITS_XML_FILE) with open(constants.UNITS_PICKLE_FILE, 'wb') as fh: pickle.dump(units, fh, -1) with open(constants.UNITS_PICKLE_FILE, 'rb') as fh: units = pickle.load(fh) if DEBUG: import pprint items = list(convert.main(units, query, debug_item_creator)) pprint.pprint(items) else: list(convert.main(units, query, item_creator(items)))
def __init__(self, phone_numbers, route_costs): # run convert to generate files convert.main(phone_numbers, route_costs) # import data dictionaries # from given text file inputs. phone_dict = __import__(phone_numbers) route_dict = __import__(route_costs) # dictionary of string:double... # {route number:lowest price} self.route_dict = route_dict.dictionary # dictionary of phone string... # {phone number:NONE } self.phone_dict = phone_dict.dictionary # dictionary of string:double... # {phone number:lowest price} self.price_dict = {} # our goal is to take the first two dictionaries, & # generate a list of numbers with prices. self.get_prices()
def runn(mode, character, crop_mode): if mode == 'vert': out_vert.main(character) if mode == 'repose': repose.main(character) if mode == 'mesh': mesh.main(character) if mode == 'crop': extractor.main(character, crop_mode) if mode == 'convert': convert.main(character) if mode == 'comp': replacer.main(character) if mode == 'swap': fswap.main(character) if mode == 'train': train.main(character)
def resultados(request): central = Central.objects.all() centralov = CentralOv.objects.all() centrales = Centrales.objects.all() ofererc = Ofererc.objects.all() paqgen = Paqgen.objects.all() paquetes = Paquetes.objects.all() paquetes2 = Paquetes2.objects.all() nodoof = Nodoof.objects.all() regionof = Regionof.objects.all() sistemainter = Sistemainter.objects.all() zonaof = Zonaof.objects.all() paqin = Paqin.objects.all() paqexc = Paqexc.objects.all() conpaqexc = Conpaqexc.objects.all() ofertas = Ofertas.objects.all() paqin2, paqexc2, centralov2 = convert.main(paqin, paqexc, centralov, centrales, paqgen) slp2015, Up, Uc, DemP, DemC, DemE = SLP2015.main(paqgen, ofererc, central, centrales, centralov2, paqin2, paqexc2, paquetes, paquetes2, ofertas, conpaqexc, nodoof, regionof, zonaof, sistemainter) Upaq, Compra, Ucen = resul.main(paqgen, ofererc, centrales, Up, Uc, DemP, DemC, DemE) fob = value(slp2015.objective) status = LpStatus[slp2015.status] context = {'slp2015': slp2015, 'fob': fob, 'status': status, 'paqgen': paqgen, 'Up': Upaq, 'Uc': Ucen, 'Compra': Compra} return render(request, 'SLP15/resultados.html', context)
def _Test(self, name, expected_return_code=0): """Verifies that the input file is converted as expected.""" # Copy the contents of the input file to a temporary file. with open(os.path.join(self.TESTDATA, '{0}-input.py'.format(name))) as f: input_contents = f.read() self.temp_file.write(input_contents) self.temp_file.close() # Convert the temporary file in-place. return_code = convert.main(['convert', self.temp_file.name]) # Check the return code. AssertThat(return_code).IsEqualTo(expected_return_code) # Check the contents line by line. # This is not strictly necessary given the SHA-512 verification, but it # makes debugging test failures easier. expected_path = os.path.join(self.TESTDATA, '{0}-expected.py'.format(name)) line = 0 with open(self.temp_file.name) as converted_file: with open(expected_path) as expected_file: for converted_line in converted_file: line += 1 name = 'at line {0}'.format(line) expected_line = expected_file.readline() AssertThat(converted_line).Named(name).IsEqualTo( expected_line) # Verify the contents are exactly identical. actual_hash = self._Checksum(self.temp_file.name) expected_hash = self._Checksum(expected_path) AssertThat(actual_hash).IsEqualTo(expected_hash)
def main(): convert.main() data.main() initiateModel.main()
def testNoFilesGiven(self): AssertThat(convert.main(['convert'])).IsNonZero()
def testCheckFails(self, mock_check): AssertThat(convert.main(['convert', self.INPUT_PY])).IsNonZero() AssertThat(mock_check).WasCalled().Once()
def _cmp(x): import re if len(re.findall(r'.*?(\d+).*(.mp3|.m4v|.M4V|.mp4|.MP4|.mov)$', x)) == 0: return 0 else: return int(re.findall(r'.*?(\d+).*(.mp3|.m4v|.M4V|.mp4|.MP4|.mov)$', x)[0][0]) dir = sys.argv[1] print(os.listdir(dir)) url = sys.argv[2] myDir = os.path.abspath('.') os.chdir(dir) convert.main(dir) listDir = list(filter(lambda x: path.isdir(x), os.listdir(dir))) if listDir: print(listDir) lm = list(map(lambda x: list(map(lambda k: x + '/' + k, os.listdir(x))), listDir)) print("lm:{}\n *********************".format(lm)) listDir = reduce(lambda x, y: x + y, lm) files = list(filter(lambda x: path.splitext(x)[1] in [".MP3",".mp3",".M4V", ".m4v", '.mp4',".MP4", ".mov"], listDir + os.listdir(dir))) files.sort(key=_cmp) myItems = [(rss.RSSItem( title=n, description='', enclosure=rss.Enclosure(url+':'+str(port) + '/' + esc(n), 0, "audio/mpeg") )) for n in files] myItems.reverse() feed = rss.RSS2(
console.setLevel(logging.CRITICAL) # Set a format which is simpler for console use formatter = logging.Formatter('%(message)s') # Tell the handler to use this format console.setFormatter(formatter) # Add the handler to the root logger logging.getLogger('').addHandler(console) # Create a logger log = logging.getLogger(__name__) # # RUN SELECTED COMMAND # if cliArgs.command == "simulate": # Run simulate.py simulate.main(cliArgs) elif cliArgs.command == "list": # Run list.py list.main() elif cliArgs.command == "convert": # Run convert.py convert.main(cliArgs) elif cliArgs.command == "extract": # Run extract.py extract.main(cliArgs)
def generateFrame(path,offx,offy): return convert.main(Image.open(path).convert('RGBA'),offx,offy)
import os import sys sys.path.append(os.path.dirname(sys.argv[0])) import load_dataset import convert import separate import dataset_postprocessing #load_dataset.main(global_path=os.path.dirname(sys.argv[0]), dataset_name='obj_detection') #convert.main(global_path=os.path.dirname(sys.argv[0]), dataset_name='obj_detection') #separate.main(global_path=os.path.dirname(sys.argv[0])) #execution_path = os.path.dirname(sys.argv[0]) #print "the end!" #print sys.argv[0] #print sys.argv[2] if sys.argv[2] == 'labeling': load_dataset.main(global_path=os.path.dirname(sys.argv[0]), dataset_name=sys.argv[1]) #elif sys.argv[2] == 'convert': convert.main(global_path=os.path.dirname(sys.argv[0]), dataset_name=sys.argv[1]) #elif sys.argv[2] == 'separate': separate.main(global_path=os.path.dirname(sys.argv[0])) dataset_postprocessing.main(global_path=os.path.dirname(sys.argv[0]), dataset_name='obj_detection', batch_size=64, subdivisions=8)
def _cmp(x): import re if len(re.findall(r'.*?(\d+).*(.mp3|.m4v|.M4V|.mp4|.MP4|.mov)$', x)) == 0: return 0 else: return int( re.findall(r'.*?(\d+).*(.mp3|.m4v|.M4V|.mp4|.MP4|.mov)$', x)[0][0]) dir = sys.argv[1] print(os.listdir(dir)) url = sys.argv[2] myDir = os.path.abspath('.') os.chdir(dir) convert.main(dir) listDir = list(filter(lambda x: path.isdir(x), os.listdir(dir))) if listDir: print(listDir) lm = list( map(lambda x: list(map(lambda k: x + '/' + k, os.listdir(x))), listDir)) print("lm:{}\n *********************".format(lm)) listDir = reduce(lambda x, y: x + y, lm) files = list( filter( lambda x: path.splitext(x)[ 1] in [".MP3", ".mp3", ".M4V", ".m4v", '.mp4', ".MP4", ".mov"], listDir + os.listdir(dir))) files.sort(key=_cmp) myItems = [
def converting (defaultConvertFile): x = convert.main(defaultConvertFile)
output_path= '/home/osm/Schreibtisch/01_Datasets/Master_Diekel/00_split_70_30_reduced', rearrange_ids=False, rel_output_path=None, remap_labels=True, set_sizes=[70.0, 30.0], sets=['train', 'val'], show_not_verified=False, shuffle=True, skip_images_without_label=False, stats=False, stats_img=False, stats_label=False, tablefmt='psql', target_format='csv', year=2019) current_min_delta = 15 dst_name = '00_split_70_30_reduced({})' while True: max_delta = round(main(deepcopy(args)), 2) if max_delta >= 0 and max_delta < current_min_delta: current_min_delta = max_delta src = args.output_path dst = os.path.join(os.path.dirname(args.output_path), dst_name.format(max_delta)) os.rename(src, dst)
def prove(self, testname, solution): testfile_path = "../testFiles/" + testname output = prove.main(convert.main(testfile_path).sentences) self.assertEqual(solution, output)
gyro.set_title('angular acceleration', fontsize=22) gyro.plot(t, data['gx'], color=c['blue']) gyro.plot(t, data['gy'], color=c['lred']) gyro.plot(t, data['gz'], color=c['green']) gyro.legend(['gx', 'gy', 'gz'], frameon=False) gyro.yaxis.tick_right() gyro.yaxis.set_label_position("right") gyro.spines['top'].set_visible(False) gyro.spines['left'].set_visible(False) gyro.spines['bottom'].set_visible(False) gyro.tick_params(bottom='off', top='off', right='off') gyro.set_ylabel('Hz/s', rotation='horizontal') gyro.set_xlabel('s') gyro.plot(t_trigger, data['gz'][t_trigger_i], color=c['red'], marker='o') gyro.axhline(color='k') gyro.axvline(t_hmax, color=c['lturqois']) # savefig config sc = 1 fig.set_size_inches(16*sc,9*sc) plt.tight_layout() plt.savefig(filename[:-4]+'.png', dpi=120, format='png', transparent=False, frameon=False) # plt.show() if __name__ == "__main__": import convert convert.main(sys.argv[1]) main(sys.argv[1][:-4]+'.csv')
h5gr_new.attrs[hdf5_io.ATTR_FORMAT] = type_repr for _model in [ ('models.model', 'model'), # base class # and derived classes defined in tenpy ('models.bhf', 'bhf_model'), ('models.boson', 'boson_model'), ('models.boson2d', 'boson2d_model'), ('models.double_model', 'double_model'), ('models.dual_ising', 'dual_ising_model'), ('models.fermions_chain', 'sf_model'), ('models.fermions_hubbard', 'fh_model'), ('models.fermions_ladder', 'fermionic_model'), ('models.height_models', 'height_model'), ('models.levingu', 'levingu_model'), ('models.long_range_spin_chain', 'spin_chain_model'), ('models.majorana_island', 'majorana_island_model'), ('models.multilayer_qh', 'QH_model'), ('models.potts', 'potts_model'), ('models.quantum_hall', 'QH_model'), ('models.spin_chain', 'spin_chain_model'), ('models.xxz_tfi', 'xxz_tfi_model'), ]: mappings[_model] = (('tenpy.models.model', 'MPOModel'), convert_model) del _model if __name__ == "__main__": args = convert.parse_args(converter_cls=Converter) convert.main(args)
arguments = parser.parse_args() args = vars(arguments) VERBOSE = args.get("verbose", False) log = Logger("workflow.log", "workflow.py", True, True) utils.log_header(log, DESCRIPTION, VERBOSE) try: log.debug("Debug mode activated.", VERBOSE) log.debug("Args: ", VERBOSE) for i in args: log.debug(i + ": " + str(args[i]), VERBOSE) if not args.get("test", False): download.main(args, logger=log) if args.get("facerec", None) != None and not args.get("test", False): if "all" in args.get("facerec", []): # pass all videos from utils.getVideos() to facerec facerec.main({"files": utils.getVideos()}) else: # pass all videos from args to facerec facerec.main({"files": args.get("facerec", [])}) if not args.get("test", False): convert.main(args, log) if not args.get("test", False): transfer.main(args, log) log.success("Workflow routine finished!", not args.get("silent", False)) except KeyboardInterrupt: log.context = "workflow.py" log.warning("exiting...", True) exit(0)
kernel_initializer="uniform")) # Adding the second hidden layer classifier.add( Dense(activation='relu', units=397, kernel_initializer="uniform")) # Adding the output layer classifier.add( Dense(activation='softmax', units=10, kernel_initializer="uniform")) # Compiling the ANN classifier.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy']) # Fitting the ANN to the training set classifier.fit(X_train, y, epochs=5) return classifier, sc_X, X_train running = True if __name__ == "__main__": clf, sc_X, X_train = main2() while running: num = paint.main() degrade.main() convert.main() running = ANN.main(clf, sc_X, X_train, num)
def generateFrame(path, offx, offy): return convert.main(Image.open(path).convert('RGBA'), offx, offy)