def main(): thislocation = os.path.dirname(os.path.realpath(__file__)) if args.jobfile: jobfile = os.path.join(thislocation, "testjobs", args.jobfile) print(jobfile) with open(jobfile) as fp: job = fp.read() job = jobimport.convert(job, tolerance=float(args.tolerance), optimize=not (args.nooptimize)) # stats total_points = 0 for path in job['vector']['paths']: for polyline in path: for point in polyline: total_points += 1 print("STATS:") print("\ttotal points: %s" % total_points) if 'vector' in job and 'optimized' in job['vector']: print("\ttolerance: %s" % job['vector']['optimized']) else: jobpath = os.path.join(thislocation, "testjobs") cwd_temp = os.getcwd() os.chdir(jobpath) files = glob.glob("*.*") os.chdir(cwd_temp) print("Name one of the following files:") for file_ in files: print(file_)
def main(): thislocation = os.path.dirname(os.path.realpath(__file__)) if args.jobfile: jobfile = os.path.join(thislocation, "testjobs", args.jobfile) print jobfile with open(jobfile) as fp: job = fp.read() job = jobimport.convert(job, tolerance=float(args.tolerance), optimize=not(args.nooptimize)) # stats total_points = 0 for path in job['vector']['paths']: for polyline in path: for point in polyline: total_points += 1 print "STATS:" print "\ttotal points: %s" % total_points if 'vector' in job and 'optimized' in job['vector']: print "\ttolerance: %s" % job['vector']['optimized'] else: jobpath = os.path.join(thislocation, "testjobs") cwd_temp = os.getcwd() os.chdir(jobpath) files = glob.glob("*.*") os.chdir(cwd_temp) print "Name one of the following files:" for file_ in files: print file_
def open_file(self, jobfile, optimize=True, tolerance=None): """Load and convert a job file locally. Args: jobfile: Path to job file (dba, svg, dxf, or ngc). optimize: Flag for optimizing path tolerances. tolerance: Tolerance used in convert/optimization. Returns: A parsed .dba job. """ import jobimport # dependancy only when actually needed name_f = os.path.basename(jobfile) with open(jobfile) as fp: job = fp.read() name_f, ext = os.path.splitext(name_f) if tolerance: job = jobimport.convert(job, optimize=optimize, tolerance=tolerance) else: job = jobimport.convert(job, optimize=optimize) return job
def load(): """Load a dba, svg, dxf, or gcode job. Args: (Args come in through the POST request.) job: Parsed dba or job string (dba, svg, dxf, or gcode). name: name of the job (string) optimize: flag whether to optimize (bool) overwrite: flag whether to overwite file if present (bool) matrix: alignment matrix to apply to dba (3x3 list of lists of float) """ load_request = json.loads(bottle.request.forms.get('load_request')) job = load_request.get('job') # always a string if job == 'upload': # data was passed as gzip file upload upload = bottle.request.files.get('job', None) job = gzip.GzipFile(fileobj=upload.file, mode='rb').read() name = load_request.get('name') # optimize defaults if 'optimize' in load_request: optimize = load_request['optimize'] else: optimize = True # overwrite defaults if 'overwrite' in load_request: overwrite = load_request['overwrite'] else: overwrite = False # alignment matrix if 'matrix' in load_request: matrix = load_request['matrix'] else: matrix = None # sanity check if job is None or name is None: bottle.abort(400, "Invalid request data.") # convert try: job = jobimport.convert(job, optimize=optimize, matrix=matrix) except TypeError: if DEBUG: traceback.print_exc() bottle.abort(400, "Invalid file type.") if not overwrite: name = _unique_name(name) _add(json.dumps(job), name) return json.dumps(name)
def load(): """Load a dba, svg, dxf, or gcode job. Args: (Args come in through the POST request.) job: Parsed dba or job string (dba, svg, dxf, or gcode). name: name of the job (string) optimize: flag whether to optimize (bool) overwrite: flag whether to overwite file if present (bool) """ load_request = json.loads(bottle.request.forms.get('load_request')) job = load_request.get('job') # always a string if job == 'upload': # data was passed as gzip file upload upload = bottle.request.files.get('job', None) job = gzip.GzipFile(fileobj=upload.file, mode='rb').read() name = load_request.get('name') # optimize defaults if 'optimize' in load_request: optimize = load_request['optimize'] else: optimize = True # overwrite defaults if 'overwrite' in load_request: overwrite = load_request['overwrite'] else: overwrite = False # sanity check if job is None or name is None: bottle.abort(400, "Invalid request data.") # convert try: job = jobimport.convert(job, optimize=optimize) except TypeError: if DEBUG: traceback.print_exc() bottle.abort(400, "Invalid file type.") if not overwrite: name = _unique_name(name) _add(json.dumps(job), name) return json.dumps(name)
def load(): """Load a dba, svg, dxf, or gcode job. Args: (Args come in through the POST request.) job: Parsed dba or job string (dba, svg, dxf, or ngc). name: name of the job (string) optimize: flag whether to optimize (bool) overwrite: flag whether to overwite file if present (bool) """ load_request = json.loads(bottle.request.forms.get('load_request')) job = load_request.get('job') # always a string name = load_request.get('name') # optimize defaults if 'optimize' in load_request: optimize = load_request['optimize'] else: optimize = True # overwrite defaults if 'overwrite' in load_request: overwrite = load_request['overwrite'] else: overwrite = False # sanity check if job is None or name is None: bottle.abort(400, "Invalid request data.") # convert try: job = jobimport.convert(job, optimize=optimize) except TypeError: if DEBUG: traceback.print_exc() bottle.abort(400, "Invalid file type.") if not overwrite: name = _unique_name(name) _add(json.dumps(job), name) return json.dumps(name)
argparser.add_argument('-a', '--animate', dest='animate', action='store_true', default=False, help='animate job') argparser.add_argument('-f', '--fast', dest='fast', action='store_true', default=False, help='animate fast') argparser.add_argument('-n', '--nooptimize', dest='nooptimize', action='store_true', default=False, help='do not optimize geometry') argparser.add_argument('-t', '--tolerance', dest='tolerance', default=0.08, help='tolerance in mm') args = argparser.parse_args() thislocation = os.path.dirname(os.path.realpath(__file__)) if args.jobfile: jobfile = os.path.join(thislocation, "testjobs", args.jobfile) with open(jobfile) as fp: job = fp.read() job = jobimport.convert(job, tolerance=float(args.tolerance), optimize=not(args.nooptimize)) # stats total_points = 0 for path in job['vector']['paths']: for polyline in path: for point in polyline: total_points += 1 print("STATS:") print("\ttotal points: %s" % total_points) if 'vector' in job and 'optimized' in job['vector']: print("\ttolerance: %s" % job['vector']['optimized']) # run gtk window PyApp() gtk.main()