def confirm(app_name, repo="hbb1.oscwii.org"): # https://hbb1.oscwii.org/unzipped_apps/wiixplorer/apps/wiixplorer/ with Halo(text="Loading Metadata..", color="white"): xml = requests.get("https://" + repo + "/unzipped_apps/" + app_name + "/apps/" + app_name + "/meta.xml").text # remove unicode declaration xml = xml.split("\n", 1)[1] # get information from XML root = lxml.etree.fromstring(xml) try: display_name = root.find('name').text except AttributeError: print( "[Error D:002] Could not find application on the server. Cannot continue." ) exit(1) metadata.get(app_name) answer = input('Continue with download of "' + display_name + '"? (y/n) > ') if answer == "y": pass elif answer == "n": print(FAIL + "Cancelled download operation. Exiting.") exit(1) else: print("Please reply with 'y' to continue or 'n' to cancel.")
def everything(output, extract=False, repo="hbb1.oscwii.org"): data = parsecontents.get_list(repo=repo) progress = 0 amount = len(data.keys()) for key in data.keys(): metadata.get(app_name=key, type="default", repo=repo) get(app_name=key, extract=extract, repo=repo) # remember to implement output or it's gonna be very sad progress = progress+1 print("[Progress] Downloaded " + str(progress) + " out of " + str(amount) + " apps.")
def visit_Subscript(self, node): self.visit(node.value) if metadata.get(node, metadata.Attribute): f = lambda t: AttributeType(node.slice.value.n, t) elif isinstance(node.slice, ast.ExtSlice): d = sum(int(type(dim) is ast.Index) for dim in node.slice.dims) f = lambda t: reduce(lambda x, y: ContentType(x), range(d), t) elif isinstance(node.slice, ast.Slice): self.visit(node.slice) f = lambda x: ExpressionType( lambda a, b: "{0}[{1}]".format(a, b), [x, self.result[node.slice]] ) elif isinstance(node.slice.value, ast.Num) and node.slice.value.n >= 0: f = lambda t: ElementType(node.slice.value.n, t) elif isinstance(node.slice.value, ast.Tuple): f = lambda t: reduce(lambda x, y: ContentType(x), node.slice.value.elts, t) else: self.visit(node.slice) f = lambda x: ExpressionType( lambda a, b: "{0}[{1}]".format(a, b), [x, self.result[node.slice]] ) f and self.combine(node, node.value, unary_op=f)
def attach_data(self, node): '''Generic method called for visit_XXXX() with XXXX in GatherOMPData.statements list ''' if self.current: for curr in self.current: md = OMPDirective(curr) metadata.add(node, md) self.current = list() # add a Pass to hold some directives for field_name, field in ast.iter_fields(node): if field_name in GatherOMPData.statement_lists: if (field and isinstance(field[-1], ast.Expr) and self.isompdirective(field[-1].value)): field.append(ast.Pass()) self.generic_visit(node) # add an If to hold scoping OpenMP directives directives = metadata.get(node, OMPDirective) field_names = {n for n, _ in ast.iter_fields(node)} has_no_scope = field_names.isdisjoint(GatherOMPData.statement_lists) if directives and has_no_scope: # some directives create a scope, but the holding stmt may not # artificially create one here if needed sdirective = ''.join(d.s for d in directives) scoping = ('parallel', 'task', 'section') if any(s in sdirective for s in scoping): node = ast.If(ast.Num(1), [node], []) return node
def visit_Subscript(self, node): value = self.visit(node.value) # attribute case if metadata.get(node, metadata.Attribute): return "getattr<{0}>({1})".format(node.slice.value.n, value) # positive static index case elif (isinstance(node.slice, ast.Index) and isinstance(node.slice.value, ast.Num) and (node.slice.value.n >= 0) and any(isinstance(node.slice.value.n, t) for t in (int, long))): return "std::get<{0}>({1})".format(node.slice.value.n, value) # slice optimization case elif (isinstance(node.slice, ast.Slice) and (isinstance(node.ctx, ast.Store) or node not in self.bounded_expressions)): slice = self.visit(node.slice) return "{1}({0})".format(slice, value) # extended slice case elif isinstance(node.slice, ast.ExtSlice): slice = self.visit(node.slice) return "{1}({0})".format(','.join(slice), value) # standard case else: slice = self.visit(node.slice) return "{1}[{0}]".format(slice, value)
def run(self, *args): """ Basic run method. This method should be called from EgadsAlgorithm children, passing along the correct inputs to the _call_algorithm method. :param *args: Parameters to pass into algorithm in the order specified in algorithm metadata. """ logging.debug('egads - egads_core.py - EgadsAlgorithm - run - name ' + self.name + ', args ' + str(args)) if not isinstance(self.output_metadata, list): output_metadata = self.output_metadata self.output_metadata = [] self.output_metadata.append(output_metadata) for metadata in self.output_metadata: for key, value in metadata.iteritems(): try: match = re.compile('input[0-9]+').search(value) while match: input_seq = metadata.get( key)[match.start():match.end()] input_index = int(input_seq.strip('input')) if isinstance(args[input_index], EgadsData): metadata[key] = metadata[key].replace( input_seq, args[input_index].metadata.get(key, '')) else: metadata[key] = metadata[key].replace( input_seq, '') match = re.compile('input[0-9]+').search(metadata[key]) except TypeError: match = None try: if key == 'Category': if value == ['']: out_category = [] for arg in args: if isinstance(arg, EgadsData): out_category.append(arg.metadata[key]) metadata[key] = out_category except KeyError: pass output = self._call_algorithm(*args) if len(self.metadata['Outputs']) > 1: result = [] for i, value in enumerate(output): self.output_metadata[i].set_parent(self.metadata) result.append( self._return_result(value, self.output_metadata[i])) result = tuple(result) else: self.output_metadata[0].set_parent(self.metadata) result = self._return_result(output, self.output_metadata[0]) return result
def reduce(iter, params): for doi, nones in kvgroup(iter): try: yield doi, metadata.get(doi) except db.NotFound: try: yield doi, metadata.fetch(doi) except CommError, exc: yield 'error', str(exc) # CommError has useless repr except Exception, exc: yield 'error', repr(exc)
def dl_list(file, display="False", repo="hbb1.oscwii.org"): for line in open(file): # if anyone has any idea how to make this less hacky then please help try: line = line.rstrip("\n\r") except Exception: pass try: line = line.rstrip("\n") except Exception: pass if line is "": pass else: if display is True: print(line) else: if parsecontents.query(term=line, repo=repo) is True: metadata.get(app_name=line, type="default", repo=repo) download.get(app_name=line, repo=repo)
def visit_Assign(self, node): if not all(isinstance(n, ast.Name) or isinstance(n, ast.Subscript) for n in node.targets): raise PythranSyntaxError( "Must assign to an identifier or a subscript", node) value = self.visit(node.value) targets = [self.visit(t) for t in node.targets] alltargets = "= ".join(targets) if any(metadata.get(t, metadata.LocalVariable) for t in node.targets): alltargets = "auto {0}".format(alltargets) stmt = Assign(alltargets, value) return self.process_omp_attachements(node, stmt)
def visit_Subscript(self, node): self.visit(node.value) if metadata.get(node, metadata.Attribute): f = lambda t: AttributeType(node.slice.value.n, t) elif isinstance(node.slice, ast.Slice): f = lambda t: t elif isinstance(node.slice.value, ast.Num): f = lambda t: ElementType(node.slice.value.n, t) elif isinstance(node.slice.value, ast.Tuple): f = lambda t: reduce(lambda x, y: ContentType(x), node.slice.value.elts, t) else: f = ContentType self.combine(node, node.value, unary_op=f)
def run(self, *args): """ Basic run method. This method should be called from EgadsAlgorithm children, passing along the correct inputs to the _call_algorithm method. :param *args: Parameters to pass into algorithm in the order specified in algorithm metadata. """ if not isinstance(self.output_metadata, list): output_metadata = self.output_metadata self.output_metadata = [] self.output_metadata.append(output_metadata) for metadata in self.output_metadata: for key, value in metadata.iteritems(): try: match = re.compile('input[0-9]+').search(value) while match: input = metadata.get(key)[match.start():match.end()] input_index = int(input.strip('input')) if isinstance(args[input_index], EgadsData): metadata[key] = metadata[key].replace(input, args[input_index].metadata.get(key, '')) else: metadata[key] = metadata[key].replace(input, '') match = re.compile('input[0-9]+').search(metadata[key ]) except TypeError: match = None output = self._call_algorithm(*args) if len(self.metadata['Outputs']) > 1: result = [] for i, value in enumerate(output): self.output_metadata[i].set_parent(self.metadata) result.append(self._return_result(value, self.output_metadata[i])) result = tuple(result) else: self.output_metadata[0].set_parent(self.metadata) result = self._return_result(output, self.output_metadata[0]) return result
def dispatch(self, tree): "Dispatcher function, dispatching tree type T to method _T." #display omp directive in python dump for omp in metadata.get(tree, openmp.OMPDirective): deps = list() for dep in omp.deps: old_file = self.f self.f = cStringIO.StringIO() self.dispatch(dep) deps.append(self.f.getvalue()) self.f = old_file directive = omp.s.format(*deps) self._Expr(ast.Expr(ast.Str(s=directive))) if isinstance(tree, list): for t in tree: self.dispatch(t) return meth = getattr(self, "_" + tree.__class__.__name__) meth(tree)
def process_omp_attachements(self, node, stmt, index=None): l = metadata.get(node, OMPDirective) if l: directives = list() for directive in reversed(l): # special hook for default for index scope if isinstance(node, ast.For): target = node.target hasfor = 'for' in directive.s nodefault = 'default' not in directive.s noindexref = all(x.id != target.id for x in directive.deps) if hasfor and nodefault and noindexref: directive.s += ' private({})' directive.deps.append(ast.Name(target.id, ast.Param())) directives.append(directive) if index is None: stmt = AnnotatedStatement(stmt, directives) else: stmt[index] = AnnotatedStatement(stmt[index], directives) return stmt
def visit_Assign(self, node): if not all(isinstance(n, ast.Name) or isinstance(n, ast.Subscript) for n in node.targets): raise PythranSyntaxError( "Must assign to an identifier or a subscript", node) value = self.visit(node.value) targets = [self.visit(t) for t in node.targets] alltargets = "= ".join(targets) islocal = any(metadata.get(t, metadata.LocalVariable) for t in node.targets) if len(targets) == 1 and isinstance(node.targets[0], ast.Name): islocal |= node.targets[0].id in self.scope[node] if islocal and not self.yields: # remove this decl from local decls tdecls = {t.id for t in node.targets} self.ldecls = {d for d in self.ldecls if d.id not in tdecls} # add a local declaration alltargets = '{} {}'.format(self.local_types[node.targets[0]], alltargets) stmt = Assign(alltargets, value) return self.process_omp_attachements(node, stmt)
def do_call(connection_jar, transaction_id): metadata_bodies = dict([ (metadata_body['originalName'] if 'originalName' in metadata_body and metadata_body['originalName'] is not None else '', metadata_body) for metadata_body in metadata.get(connection_jar, transaction_id) ]) print("Found job metadata:") for name, metadata_body in metadata_bodies.iteritems(): print("\t{}: {}".format(name, metadata_body['navigatorUrl'])) success = \ assert_metadata(metadata_bodies, 'asystem-astore-process-repair') and \ assert_metadata(metadata_bodies, 'asystem-astore-process-repair', 'System_Exit') and \ assert_metadata(metadata_bodies, 'asystem-astore-process-repair', 'STAGED_FILES_PURE', False, lambda x: x > "0") and \ assert_metadata(metadata_bodies, 'asystem-astore-process-repair', 'STAGED_FILES_FAIL', False) and \ assert_metadata(metadata_bodies, 'asystem-astore-process-repair', 'PROCESSED_FILES_FAIL', False) and \ assert_metadata(metadata_bodies, 'asystem-astore-process-repair', 'PROCESSED_FILES_PURE', False, lambda x: x >= "0") and \ assert_metadata(metadata_bodies, 'asystem-astore-process-batch') and \ assert_metadata(metadata_bodies, 'asystem-astore-process-batch', 'System_Exit') and \ assert_metadata(metadata_bodies, 'asystem-astore-process-batch', 'STAGED_FILES_PURE', False, lambda x: x > "0") and \ assert_metadata(metadata_bodies, 'asystem-astore-process-batch', 'STAGED_FILES_FAIL', False) and \ assert_metadata(metadata_bodies, 'asystem-astore-process-batch', 'STAGED_FILES_TEMP', False) and \ assert_metadata(metadata_bodies, 'asystem-astore-process-batch', 'STAGED_PARTITIONS_TEMP', False) and \ assert_metadata(metadata_bodies, 'asystem-astore-process-batch', 'PROCESSED_FILES_FAIL', False) and \ assert_metadata(metadata_bodies, 'asystem-astore-process-batch', 'PROCESSED_FILES_PURE', False, lambda x: x >= "0") and \ assert_metadata(metadata_bodies, 'asystem-astore-process-stats') and \ assert_metadata(metadata_bodies, 'asystem-astore-process-stats', 'System_Exit') and \ assert_metadata(metadata_bodies, 'asystem-astore-process-stats', 'STAGED_FILES_PURE', False, lambda x: x > "0") and \ assert_metadata(metadata_bodies, 'asystem-astore-process-stats', 'STAGED_FILES_FAIL', False) and \ assert_metadata(metadata_bodies, 'asystem-astore-process-stats', 'STAGED_FILES_TEMP', False) and \ assert_metadata(metadata_bodies, 'asystem-astore-process-stats', 'STAGED_PARTITIONS_TEMP', False) and \ assert_metadata(metadata_bodies, 'asystem-astore-process-stats', 'STAGED_PARTITIONS_DONE', False, lambda x: x >= "0") and \ assert_metadata(metadata_bodies, 'asystem-astore-process-stats', 'STAGED_PARTITIONS_REDO', False) and \ assert_metadata(metadata_bodies, 'asystem-astore-process-stats', 'PROCESSED_FILES_FAIL', False) and \ assert_metadata(metadata_bodies, 'asystem-astore-process-stats', 'PROCESSED_PARTITIONS_DONE', False, lambda x: x >= "0") and \ assert_metadata(metadata_bodies, 'asystem-astore-process-stats', 'PROCESSED_PARTITIONS_REDO', False) and \ assert_metadata(metadata_bodies, 'asystem-astore-process-stats', 'PROCESSED_FILES_PURE', False, lambda x: x > "0") return 0 if success else 1
def visit_For(self, node): # first unroll children if needed or possible self.generic_visit(node) # if the user added some OpenMP directive, trust him and no unroll has_omp = metadata.get(node, OMPDirective) # a break or continue in the loop prevents unrolling too has_break = any(self.passmanager.gather(HasBreak, n, self.ctx) for n in node.body) has_cont = any(self.passmanager.gather(HasContinue, n, self.ctx) for n in node.body) # do not unroll too much to prevent code growth node_count = self.passmanager.gather(NodeCount, node, self.ctx) if type(node.iter) is ast.List: isvalid = not(has_omp or has_break or has_cont) total_count = node_count * len(node.iter.elts) issmall = total_count < LoopFullUnrolling.MAX_NODE_COUNT if isvalid and issmall: def unroll(elt): return ([ast.Assign([deepcopy(node.target)], elt)] + deepcopy(node.body)) return reduce(list.__add__, map(unroll, node.iter.elts)) return node
if args.verify is False: parsecontents.query(args.name, repo=args.host) else: parsecontents.query_verify(args.name, repo=args.host) # get metadata command if args.cmd == 'meta': if args.host is None: args.host = "hbb1.oscwii.org" if args.type is None: args.type = "default" appmeta = metadata.get(app_name=args.name, type=args.type, repo=args.host) if appmeta is not None: print(appmeta) # get list of repos on server command if args.cmd == 'repo-list': parsecontents.repository_list() # get the entire repo command if args.cmd == 'get-all': args.output = "default" if args.host is None: args.host = "hbb1.oscwii.org"
if ARGS[index] in COMMAND_LINE_FLAGS: ARGUMENTS[ARGS[index]] = ARGS[index+1] else: OUTPUT_DICTIONARY[gb.ERRS].append({gb.ERR_TYPE: 'Warning', gb.ERR_STR: 'nonfatal error: \ unrecognized flag: ' + ARGS[index] + ', this flag will not be excluded. Refer to ' + \ NLP_ENGINE_PATH + 'COMMAND_LINE_FLAGS.txt for a complete list and description of command line flags'}) ## build the dictionary for the json output ## OUTPUT_DICTIONARY[gb.CNTL] = {} OUTPUT_DICTIONARY[gb.CNTL]["engineVersion"] = __version__ OUTPUT_DICTIONARY[gb.CNTL]["referenceId"] = "12345" OUTPUT_DICTIONARY[gb.CNTL]["docVersion"] = "document version" OUTPUT_DICTIONARY[gb.CNTL]["source"] = "document source" OUTPUT_DICTIONARY[gb.CNTL]["docDate"] = "doc date" OUTPUT_DICTIONARY[gb.CNTL]["processDate"] = str(datetime.today().isoformat()) metadata = metadata.get(NLP_ENGINE_PATH, ARGUMENTS) OUTPUT_DICTIONARY[gb.CNTL]["metadata"] = metadata OUTPUT_DICTIONARY[gb.REPORTS] = [] ## add in flag info to the json output dictionary OUTPUT_DICTIONARY[gb.CNTL]["docName"] = ARGUMENTS.get('-f') OUTPUT_DICTIONARY[gb.CNTL]["docType"] = ARGUMENTS.get('-t') OUTPUT_DICTIONARY[gb.CNTL]["diseaseGroup"] = ARGUMENTS.get('-g') ## ERR out for missing flags that are required ## MISSING_FLAGS = REQUIRED_FLAGS-set(ARGUMENTS.keys()) if len(MISSING_FLAGS) > 0: for each_flag in MISSING_FLAGS: sys.stderr.write('FATAL ERROR: missing required flag: ' + each_flag + ' ' + COMMAND_LINE_FLAGS[each_flag][1]) sys.exit(1) else:
if ARGS[index] in COMMAND_LINE_FLAGS: ARGUMENTS[ARGS[index]] = ARGS[index + 1] else: OUTPUT_DICTIONARY[gb.ERRS].append({gb.ERR_TYPE: 'Warning', gb.ERR_STR: 'nonfatal error: \ unrecognized flag: ' + ARGS[index] + ', this flag will not be excluded. Refer to ' + \ NLP_ENGINE_PATH + 'COMMAND_LINE_FLAGS.txt for a complete list and description of command line flags'}) ## build the dictionary for the json output ## OUTPUT_DICTIONARY[gb.CNTL] = {} OUTPUT_DICTIONARY[gb.CNTL]["engineVersion"] = __version__ OUTPUT_DICTIONARY[gb.CNTL]["referenceId"] = "12345" OUTPUT_DICTIONARY[gb.CNTL]["docVersion"] = "document version" OUTPUT_DICTIONARY[gb.CNTL]["source"] = "document source" OUTPUT_DICTIONARY[gb.CNTL]["docDate"] = "doc date" OUTPUT_DICTIONARY[gb.CNTL]["processDate"] = str(datetime.today().isoformat()) metadata = metadata.get(NLP_ENGINE_PATH, ARGUMENTS) OUTPUT_DICTIONARY[gb.CNTL]["metadata"] = metadata OUTPUT_DICTIONARY[gb.REPORTS] = [] ## add in flag info to the json output dictionary OUTPUT_DICTIONARY[gb.CNTL]["docName"] = ARGUMENTS.get('-f') OUTPUT_DICTIONARY[gb.CNTL]["docType"] = ARGUMENTS.get('-t') OUTPUT_DICTIONARY[gb.CNTL]["diseaseGroup"] = ARGUMENTS.get('-g') ## ERR out for missing flags that are required ## MISSING_FLAGS = REQUIRED_FLAGS - set(ARGUMENTS.keys()) if len(MISSING_FLAGS) > 0: for each_flag in MISSING_FLAGS: sys.stderr.write('FATAL ERROR: missing required flag: ' + each_flag + ' ' + COMMAND_LINE_FLAGS[each_flag][1]) sys.exit(1) else:
def visit_Assign(self, node): for t in node.targets: assert isinstance(t, ast.Name) or isinstance(t, ast.Subscript) if isinstance(t, ast.Name) and not md.get(t, md.LocalVariable): self.result.add(t)
def visit_For(self, node): if not isinstance(node.target, ast.Name): raise PythranSyntaxError( "Using something other than an identifier as loop target", node.target) iter = self.visit(node.iter) target = self.visit(node.target) if node.orelse: break_handler = "__no_breaking{0}".format(len(self.break_handler)) else: break_handler = None self.break_handler.append(break_handler) local_iter = "__iter{0}".format(len(self.break_handler)) local_target = "__target{0}".format(len(self.break_handler)) local_iter_decl = Assignable(DeclType(iter)) local_target_decl = NamedType("{0}::iterator".format(local_iter_decl)) if self.yields: self.extra_declarations.append((local_iter, local_iter_decl,)) self.extra_declarations.append((local_target, local_target_decl,)) local_target_decl = "" local_iter_decl = "" target_decl = "" else: target_decl = ("auto" if metadata.get(node.target, metadata.LocalVariable) else "") loop_body = [self.visit(n) for n in node.body] self.break_handler.pop() # eventually add local_iter in a shared clause omp = metadata.get(node, OMPDirective) if omp: for directive in omp: if 'parallel' in directive.s: directive.s += ' shared({})' directive.deps.append(ast.Name(local_iter, ast.Param())) prelude = Statement("{0} {1} = {2}".format( local_iter_decl, local_iter, iter) ) loop_body_prelude = Statement( "{0} {1}= *{2}".format( target_decl, target, local_target) ) loop = For( "{0} {1} = {2}.begin()".format( local_target_decl, local_target, local_iter), "{0} < {1}.end()".format( local_target, local_iter), "++{0}".format(local_target), Block([loop_body_prelude] + loop_body)) stmts = [prelude, loop] # in that case when can proceed to a reserve for comp in metadata.get(node, metadata.Comprehension): stmts.insert(1, Statement("pythonic::reserve({0},{1})".format( comp.target, local_iter))) if break_handler: orelse = map(self.visit, node.orelse) orelse_label = Statement("{0}:".format(break_handler)) stmts.append(Block(orelse + [orelse_label])) return Block(self.process_omp_attachements(node, stmts, 1))
def visit_For(self, node): if not isinstance(node.target, ast.Name): raise PythranSyntaxError( "Using something other than an identifier as loop target", node.target) iter = self.visit(node.iter) target = self.visit(node.target) if node.orelse: break_handler = "__no_breaking{0}".format(len(self.break_handlers)) else: break_handler = None self.break_handlers.append(break_handler) local_iter = "__iter{0}".format(len(self.break_handlers)) local_target = "__target{0}".format(len(self.break_handlers)) local_iter_decl = Assignable(DeclType(iter)) local_target_decl = NamedType("{0}::iterator".format(local_iter_decl)) if self.yields: self.extra_declarations.append((local_iter, local_iter_decl,)) self.extra_declarations.append((local_target, local_target_decl,)) local_target_decl = "" local_iter_decl = "" loop_body = Block(map(self.visit, node.body)) self.break_handlers.pop() # eventually add local_iter in a shared clause omp = metadata.get(node, OMPDirective) if omp: for directive in omp: if 'parallel' in directive.s: directive.s += ' shared({})' directive.deps.append(ast.Name(local_iter, ast.Param())) prelude = Statement("{0} {1} = {2}".format( local_iter_decl, local_iter, iter) ) auto_for = bool(metadata.get(node.target, metadata.LocalVariable)) auto_for |= (type(node.target) is ast.Name and node.target.id in self.scope[node]) auto_for &= not self.yields and not omp loop_body = self.process_locals(node, loop_body, node.target.id) if auto_for: self.ldecls = {d for d in self.ldecls if d.id != node.target.id} loop = AutoFor(target, local_iter, loop_body) else: if node.target.id in self.scope[node] and not self.yields: self.ldecls = {d for d in self.ldecls if d.id != node.target.id} local_type = "typename decltype({})::reference ".format( local_target) else: local_type = "" loop_body_prelude = Statement("{} {}= *{}".format(local_type, target, local_target)) loop = For( "{0} {1} = {2}.begin()".format( local_target_decl, local_target, local_iter), "{0} < {1}.end()".format( local_target, local_iter), "++{0}".format(local_target), Block([loop_body_prelude, loop_body]) ) stmts = [prelude, loop] # in that case when can proceed to a reserve for comp in metadata.get(node, metadata.Comprehension): stmts.insert(1, Statement("pythonic::utils::reserve({0},{1})".format( comp.target, local_iter))) if break_handler: orelse = map(self.visit, node.orelse) orelse_label = Statement("{0}:".format(break_handler)) stmts.append(Block(orelse + [orelse_label])) return Block(self.process_omp_attachements(node, stmts, 1))
return False if not os.path.isdir(sys.argv[1]): return False if os.path.isfile(sys.argv[2]) or os.path.isdir(sys.argv[2]): return False return True assert verify_arguments(), "Usage: combine_gridded_database.py db_folder output.db" db_folder = sys.argv[1] conn = sqlite3.connect(sys.argv[2]) conn.execute('pragma mmap_size=5000000000;') md = metadata.get(db_folder) create_table_columns = "" all_columns = md["columns"] all_columns_data_types = md["columns_datatypes"] # create the columns string you'd send to a CREATE TABLE command in sql for i in range(0, len(all_columns)): col_title = all_columns[i] col_data_type = all_columns_data_types[i] create_table_columns += col_title + " " + col_data_type + "," create_table_str = "CREATE TABLE gaia (" + create_table_columns[:-1] + ")" conn.execute(create_table_str)