def basic_varchar(table, attr): if not attr.parameters: msg = "Internal error: Parameter of attribute " + \ attr.name + "'s data type disappeared. Sorry for that.\n" errprint(msg, ERRCODE["INTERNAL"]) max_range = attr.parameters[0] # the only param states the max range min_range = 2 #to prevent provoking users if max_range == 1: min_range = 1 #adjusting the max range if max_range > 10: max_range = 10 #regex = r'[BCDFGHJKLMNPQRSTVWXZ][aeiouy]([bcdfghjklmnpqrstvwxz][aeiouy])+' regex = r'[BCDFGHJKLMNPQRSTVWXZ][aeiouy]([bcdfghjklmnpqrstvwxz][aeiouy][bcdfghjklmnpqrstvwxz]?)+' #else: # regex = r'[BCDFGHJKLMNPQRSTVWXZ][aeiouy]([bcdfghjklmnpqrstvwxz][aeiouy]){1,2} (([bcdfghjklmnpqrstvwxz][aeiouy]){1,3})+' string = exrex.getone(regex) x = random.randint(2, max_range) # randomly generates varchar length for this iteration from range value = string[:x] # cuts the obtained string #checks for white space at the end of string and deletes it if yes if string.endswith(' '): string = string[:-1] return "\'" + value + "\'" # for string values
def dsl_generator(table_list, DEST, fill_cnt): #iniciates initiate_gen(table_list) if(DEST == "file"): home = os.path.expanduser('~') # get the home directory for this user pth = home + \ "/dsl.txt" # creates the path representing ~/dsl.txt try: fd = open(pth, 'w') except IOError: msg = "Runtime error: Did not manage to create a destination file '~/dsl.txt'." errprint(msg, ERRCODE["RUNTIME"]) else: fd = sys.stdout for table in table_list: fd.write("TABLE:" + table.name + "(" + str(fill_cnt) + ")\n") for attr in table.attr_list: fd.write("\t::" + attr.name + "\n") fd.write(get_dtype_line(attr) + "\n") fd.write(get_fill_line(attr) + "\n") fd.write(get_constr_line(attr) + "\n") fd.write("\n")
def table_filler(table): if not table_check(table): # checks if we know enough to fill this table return True # means there is an unifnished table for i in range(0, table.fill_count): flag = True timeout = 0 while flag and timeout < 100: flag = False result = get_values(table) values = result['values'] unique_values = result['unique_values'] if len(unique_values) > 0 and not check_unique_group(table, unique_values): flag = True # this set of values has already been used, we need to do it all again remove_last_saved(table) # will remove all values possibly saved during this generating timeout += 1 elif len(unique_values) > 0: # group is unique as checked in above branch table.unique_values.append(unique_values) # we append so this combo cannot be used again if timeout >= 100: msg = "Runtime error: The timeout for finding new unique value combination for table '" + table.name + "' exceeded.\n" \ "Tip: Check if the given fill method offers enough unique combinations.\n" errprint(msg, ERRCODE["RUNTIME"]) string = "INSERT INTO " + table.name + "\n" + "VALUES (" + \ values + ");\n" print string textbank_close(table) # closes the file if it's opened table.solved = True return False # no problems with this table
def p_multi_1param(p): '''multi_1param : idOrDtypeName COMMA | idOrDtypeName''' debug("multi_1param") global alter_attr global alter_table global TO_SET_CONSTR global PARAM_COUNTER alter_attr = None alter_name = None #finding the attribute to get altered for attr in alter_table.attr_list: if attr.name == p[1]: # p[1] stands for the attribute name in both cases alter_attr = attr break #did we find anything? if alter_attr is None: msg = "Input error: Couldn't find the given attribute name '" + p[7] \ + "' in the list of attributes of table '" + alter_table.name \ + "' while processing a multiparameter ADD CONSTRAINT.\n" errprint(msg, ERRCODE["INPUT"]) alter_attr.constraint_flag = True alter_attr.constraint_cnt += 1 # increments the cnt TO_SET_CONSTR.append(alter_attr) # we pass the attribute to alter
def check_regex_compatibility(attr): if not attr.data_type in ("VARCHAR", "CHAR", "INT"): msg = "Semantic Error: The given fill method '" + new_attribute.fill_method \ + "' incompatible with the given data type '" + new_attribute.data_type \ + "' in table '" + new_table.name + \ "', attribute '" + new_attribute.name + "'.\n" errprint(msg, ERRCODE["SEMANTIC"])
def check_collision(): global new_attribute if new_attribute.serial: if new_attribute.null or new_attribute.foreign_key: msg = "Semantic Error: Attribute '" + new_attribute.name + "' has a constraint uncompatible " \ + "with its data type '" + new_attribute.data_type + "'. Colliding constraint: " \ + new_attribute.constraint_type + ".\n" errprint(msg, ERRCODE["SEMANTIC"])
def p_alterHeader(p): 'alterHeader : ALTER TABLE ONLY IDENTIFIER' debug("alterHeader") global alter_table if not p[4] in name_dict.keys(): msg = "Input error: Table '" + p[4] + \ "'given in ALTER part couldn't be found.\n" errprint(msg, ERRCODE["INPUT"]) alter_table = name_dict[p[4]]
def create_table_dict(table_list): global TABLE_DICT for table in table_list: if table.name in TABLE_DICT: msg = "Semantic error: Duplicate in table names found!\n" errprint(msg, ERRCODE["SEMANTIC"]) TABLE_DICT[table.name] = table
def on_stop_check(): global checker_thread, thread_lock with thread_lock: if checker_thread is not None: printer.errprint('客户停止了自检\n') stop_thread(checker_thread) checker_thread = None socketio.emit('checker_state', 0) progressbar.update(0, 0) else: printer.errprint('后台并没有正在自检\n')
def check_valid(attr): method = attr.fill_method global new_table if method == 'fm_regex': #check_regex_compatibility(attr) #check if regex method can be used with given data type #edit: let's leave it free for all - on users responsibility try: re.compile(str(attr.fill_parameters[0])) #check if the given parameter is a valid regex except re.error: msg = "Semantic Error: Wrong parameter given to fill method '" + new_attribute.fill_method \ + "' in table '" + new_table.name + \ "', attribute '" + new_attribute.name + "'.\n" errprint(msg, ERRCODE["SEMANTIC"]) elif method == 'fm_textbank': if not attr.data_type in ("VARCHAR", "CHAR", "TEXT"): msg = "Semantic Error: The given fill method '" + new_attribute.fill_method \ + "' incompatible with the given data type '" + new_attribute.data_type \ + "' in table '" + new_table.name + \ "', attribute '" + new_attribute.name + "'.\n" errprint(msg, ERRCODE["SEMANTIC"]) elif method == 'fm_reference': string = attr.fill_parameters[0] pos = string.find(":") if pos == -1: # didn't find the colon msg = "Semantic Error: Wrong parameter given to fill method '" + new_attribute.fill_method \ + "' in table '" + new_table.name + \ "', attribute '" + new_attribute.name + "'.\n" errprint(msg, ERRCODE["SEMANTIC"]) new_table.fk = True # sets the flag that table contains a foreign key attr.foreign_key = True # this flag allows us to fill this attr properly (if different than foreign key data type given, we have to set it here) attr.fk_table = string[0:pos] # gets what is before the colon attr.fk_attribute = string[(pos+1):] if new_table.name == attr.fk_table: msg = "Semantic Error: Foreign key referencing the same table not supported. Table: '" \ + new_table.name + "', attribute: '" + attr.name + "'.\n" errprint(msg, ERRCODE["SEMANTIC"]) elif method == 'fm_default': if not attr.default: msg = "Semantic Error: Fill method fm_default stated but no DEFAULT value given.'.\n" errprint(msg, ERRCODE["SEMANTIC"])
def background_thread_check(): global checker_thread, dic_min_error_rev, thread_lock try: dic_min_error_rev = main.do_auto_check(dic_min_error_rev) file = open("./dic_min_error_rev", "w") file.write(json.dumps(dic_min_error_rev)) file.close() except Exception as ex: # Get current system exception ex_type, ex_value, ex_traceback = sys.exc_info() # Extract unformatter stack traces as tuples trace_back = traceback.extract_tb(ex_traceback) # Format stacktrace stack_trace = list() for trace in trace_back: stack_trace.append( "File : %s , Line : %d, Func.Name : %s, Message : %s" % (trace[0], trace[1], trace[2], trace[3])) printer.errprint("Exception type : %s " % ex_type.__name__) printer.errprint("Exception message : %s" % ex_value) printer.errprint("Stack trace : %s" % stack_trace) printer.errprint("检查意外结束") progressbar.update(0, 0) with thread_lock: print("线程结束") checker_thread = None socketio.emit('checker_state', 0)
def get_foreign(attr): #special case of prefilled tables already in db #or the foreign key is filled by inserting DEFAULT if attr.fk_table.fill_count == "FILLED" or attr.fk_attribute.serial: value = "(select " + attr.fk_attribute.name + " from " + attr.fk_table.name \ + " offset random() * (select count(*) from " + \ attr.fk_table.name + ") limit 1)" return value if not attr.fk_assigned: # we encounter this attr for the first time if (attr.unique or attr.primary_key) and not attr.unique_group: attr.fk_values = attr.fk_attribute.values_list[:] #duplicates the values list else: attr.fk_values = attr.fk_attribute.values_list # only assigns the existing list attr.fk_assigned = True # sets the flag - list of values has been set #gets list of the desired values to easily work with val_list = attr.fk_values length = len(val_list) #length_self = len(attr.values_list) I have no idea where did this come from xD if length == 0: i = 0 # we have one item left (unique-fk combo issue only) else: i = random.randint(0, length-1) # randomly chooses one index, minus 1 counts with empty endline #goes through only if not part of a group #I suppose the combo of single foreign key and group unique/pk is a bit wild but to make sure if (attr.unique or attr.primary_key) and not attr.unique_group: if len(val_list) != 0: # we have something to take from value = val_list[i] del val_list[i] #removes the value so we can't use it again else: msg = "Input error: Unique foreign key attribute '" + attr.name + "' cannot be filled " \ + "as the source attribute '" \ + attr.fk_attribute.name + "' doesn't offer enough unique values.\n" \ + "NOTE: Seeder can only work with values it's generating in this run - not with any others.\n" errprint(msg, ERRCODE["INPUT"]) else: value = val_list[i] # we don't care if it's repeated return value
def get_result(self, offset, count, search) -> list: rev_list = [] db = EasySqlite('rfp.db') sql = "SELECT * FROM " + self.CONST_TABLE_NAME + " LIMIT ?, ?" if search != "": db_res = db.execute( "select file_path from " + self.CONST_TABLE_NAME + "_fts where body like '%{0}%'".format(search), [], False, False) condition = '\'' + '\',\''.join(fp[0] for fp in db_res) + '\'' sql = "SELECT * FROM " + self.CONST_TABLE_NAME + \ " WHERE file_path in ({0}) LIMIT ?, ?".format(condition) for row in db.execute(sql, (offset, count), False, False): project = row[0] file_name = row[1] file_path = row[2] time = row[3] report_path = row[4] log = row[5] html_file_path = "" if not report_path == "": if not os.path.exists(report_path): printer.errprint("cannot find report file: " + report_path + ", src_file: " + file_path) continue report_filename = os.path.splitext(report_path)[0].split( "\\")[-1] html_file_path = "{0}\\{1}\\index.html".format( config.get_dir_pvs_report(), report_filename) # 如果没有网页报告则创建一个 if not os.path.exists(html_file_path): self.__convert_to_html(report_path) rev_list.append({ "project": project, "file": file_name, "file_path": file_path, "time": time, "html_path": html_file_path, "report_path": "download\\" + report_path, "log": [log] }) return rev_list
def basic_bit(table, attr): if not attr.parameters: msg = "Internal error: Parameter of attribute " + \ attr.name + "'s data type disappeared. Sorry for that.\n" errprint(msg, ERRCODE["INTERNAL"]) length = attr.parameters[0] regex = '[01]{' + str(length) + '}' value = exrex.getone(regex) if attr.data_type == "VARBIT": # we will randomly choose a length between 1 and max x = random.randint(1, length-1) value = value[:x] return value
def initiate_gen(table_list): for table in table_list: for attr in table.attr_list: if attr.constraint_flag and attr.constraint_cnt > 1: #TODO:check if unique and primary key can be together check1 = attr.unique and attr.primary_key check2 = attr.null and attr.primary_key check3 = attr.null and attr.not_null check = check1 or check2 or check3 if check: msg = "Input error: Not compatible constraints used for attribute '" \ + attr.name + "', table '" + table.name + "'.\n" errprint(msg, ERRCODE["INPUT"])
def basic_char(table, attr): if not attr.parameters: msg = "Internal error: Parameter of attribute " + \ attr.name + "'s data type disappeared. Sorry for that.\n" errprint(msg, ERRCODE["INTERNAL"]) length = attr.parameters[0] regex = r'[a-zA-Z0-9_]+' string = exrex.getone(regex) #if the generated string was too short while len(string) < length: string = string + exrex.getone(regex) value = string[:length] # cuts the obtained string return "\'" + value + "\'"
def p_sequenceBlock(p): 'sequenceBlock : SEQUENCE IDENTIFIER OWNED BY IDENTIFIER PERIOD IDENTIFIER SEMICOLON' debug("sequenceBlock") #we find the table if not p[5] in name_dict.keys(): msg = "Semantic error: Table '" + p[5] + \ "'given in ALTER SEQUENCE part couldn't be found.\n" errprint(msg, ERRCODE["SEMANTIC"]) seq_table = name_dict[p[5]] name = p[7] # name of the attribute seq_attr = None #search for the attribute for attr in seq_table.attr_list: if attr.name == name: seq_attr = attr break #we didn't find it, there is a prob (shouldn't happen with not-edited dump but you never know) if seq_attr is None: msg = "Semantic error: Couldn't find the attribute which SEQUENCE '" + p[2] \ + "' refers to in table '" + p[5] \ + "' while processing a " + constr + ".\n" errprint(msg, ERRCODE["INPUT"]) #now we simply change the data type to corresponding serial type if seq_attr.data_type == "integer": seq_attr.data_type = "serial" elif seq_attr.data_type == "bigint": seq_attr.data_type = "bigserial" #any other type that has a filling sequence gets serial dtype #gonna get 'DEFAULT' as insert value else: seq_attr.data_type = "serial"
def p_alterBody(p): '''alterBody : ADD CONSTRAINT IDENTIFIER PRIMARY KEY LPAREN idOrDtypeName RPAREN | ADD CONSTRAINT IDENTIFIER FOREIGN KEY LPAREN idOrDtypeName RPAREN REFERENCES IDENTIFIER LPAREN IDENTIFIER RPAREN | multi_attr_constr''' debug("alterBody") global alter_attr global alter_table alter_attr = None alter_name = None #the multi_attr constraint has been solved already #this goes just for the KEYs if len(p) != 2: constr = p[4] + " " + p[5] #finding the attribute to get altered for attr in alter_table.attr_list: if attr.name == p[7]: # p[7] stands for the attribute name in both cases alter_attr = attr break #did we find anything? if alter_attr is None: msg = "Input error: Couldn't find the given attribute name '" + p[7] \ + "' in the list of attributes of table '" + alter_table.name \ + "' while processing a " + constr + ".\n" errprint(msg, ERRCODE["INPUT"]) alter_attr.constraint_flag = True alter_attr.constraint_cnt += 1 # increments the cnt alter_attr.set_constraint(constr) #for foreign key if p[4] == 'FOREIGN': #existence of foreign table check if not p[10] in name_dict.keys(): msg = "Input error: Foreign table '" + \ p[10] + "' couldn't be found.\n" errprint(msg, ERRCODE["INPUT"]) alter_attr.fk_table = name_dict[p[10]] #gets the foreign table object for attr in alter_attr.fk_table.attr_list: if attr.name == p[12]: alter_attr.fk_attribute = attr break #existence of foreign attribute check if alter_attr.fk_attribute is None: msg = "Input error: Foreign attribute '" + p[12] \ + "' couldn't be found in table '" + p[10] \ + "' while processing a " + constr + ".\n" errprint(msg, ERRCODE["INPUT"])
def fm_textbank(table, attr): #opening the file if not attr.textbank_open: path = attr.fill_parameters[0] #opening file try: attr.textbank_fd = open(path, 'r') except IOError: msg = "Input error: The given file of path '" + \ path + "' cannot be oppened." errprint(msg, ERRCODE["INPUT"]) #getting line count for i, l in enumerate(attr.textbank_fd): pass attr.textbank_linecnt = i # ignores the last line assuming it's empty (else it would be i+1) attr.textbank_open = True # sets the flag x = True # more cycles flag timeout = 0 while x: if timeout >= 100: msg = "Runtime error: The timeout for finding unique value in given textbank '" + path + "' exceeded." errprint(msg, ERRCODE["RUNTIME"]) x = False attr.textbank_fd.seek(0) # sets at the beginning num = random.randint(1, attr.textbank_linecnt) #gets random number from 0 to the line count i = 1 # start index #cycle to find the right line for line in attr.textbank_fd: if i == num: # once we get to the wanted line value = line.rstrip( ) # it contains the value we want (cuts the newline) break; i = i + 1 # increments the index #some checking for if attr.data_type == "CHAR" or attr.data_type == "VARCHAR": length = attr.parameters[0] #NOTE: CHAR speciality. The databank is not checked for CHAR length # validity. If not suitable value is received, the received # string is either cut or multiply concatenated and then cut. # Provided textbank is fully at user's responsibility. if attr.data_type == "CHAR": while len(value) < length: value = value + value value = value[:length] # cuts to the desired size #now checking for UNIQUE/PRIMARY KEY #if this constraint present, we check the retrieved value #for presence in attribute.values_list. If it's not there yet, #the value is stored in the list and approved - else we repeat all above #This might get very time expensive with textbank method #so there is a 100 iterations timeout set if attr.constraint_type == "primary_key" or attr.constraint_type == "unique": if value in attr.values_list: x = True timeout += 1 else: break attr.textbank_fd.seek( 0) # for potential other attributes using the same textbank return "'" + value + "'"
def iniciate_fk(table_list): global TABLE_DICT create_table_dict(table_list) for table in table_list: if table.fill_count == "FILLED": # this table is prefilled in the database table.solved = True # so we are not filling it here if table.fk: # if this table contains foreign keys for attr in table.attr_list: # we cycle over its attributes #first compatibility-of-constraints check if attr.constraint_flag and attr.constraint_cnt > 1: #TODO:check if unique and primary key can be together check1 = attr.unique and attr.primary_key check2 = attr.null and attr.primary_key check3 = attr.null and attr.not_null check = check1 or check2 or check3 if check: msg = "Input error: Not compatible constraints used for attribute '" \ + attr.name + "', table '" + table.name + "'.\n" errprint(msg, ERRCODE["INPUT"]) #and the main part, getting the foreign key dependences done if attr.foreign_key: ftable_name = attr.fk_table # stores the name of the foreign table and its attribute fattr_name = attr.fk_attribute #checks if this table actually exists in our dictionary if ftable_name in TABLE_DICT: ftable = TABLE_DICT[ ftable_name] # foreign table object else: msg = "Input error: The given foreign table '" + \ ftable_name + "' doesn't exist." errprint(msg, ERRCODE["INPUT"]) #we know the table exists, now let's try to find the attribute fattr = None for f_attribute in ftable.attr_list: if f_attribute.name == fattr_name: # this is the attribute foreign key points to fattr = f_attribute # foreign attribute object break if fattr is None: msg = "Semantic error: The given foreign attribute '" + fattr_name + "' doesn't exist in table '" \ + ftable_name + "'." errprint(msg, ERRCODE["SEMANTIC"]) #types not compatible #print "FATTR", fattr.data_type, "ATTR", attr.data_type #print "FATTR KEY ->",COMPATIBLE[fattr.data_type] if fattr.data_type != attr.data_type and COMPATIBLE[fattr.data_type] != attr.data_type: msg = "Semantic error: The foreign-key-type attribute '" + attr.name + "'s data type doesn't correspond with " \ "the data type of the attribute it references to. Table: '" + table.name + "'\n" errprint(msg, ERRCODE["SEMANTIC"]) #data types might be the same but if one is array and other is not, it would mean trouble elif fattr.data_type == attr.data_type and not (fattr.array_flag == attr.array_flag): msg = "Semantic error: The foreign-key-type attribute '" + attr.name + "'s data type doesn't correspond with " \ "the data type of the attribute it references to. One is array, one is not. Table: '" + table.name + "'\n" errprint(msg, ERRCODE["SEMANTIC"]) #let's check that array of arrays of arrays doesn't refer to only an array elif fattr.data_type == attr.data_type and fattr.array_flag and attr.array_flag: if fattr.array_dim_cnt != attr.array_dim_cnt: msg = "Semantic error: The foreign-key-type attribute '" + attr.name + "' has a different count of " \ + "dimensions than the array attribute it references to. Table: '" + table.name + "'\n" errprint(msg, ERRCODE["SEMANTIC"]) #NOTE: As stated in postgresql docu, all arrays are now behaving as infinite, # max size given or not. So for now there is no reason to check compatibility # of individual array sizes. May change. #now we have both objects, we can store them in the variables attr.fk_table = ftable attr.fk_attribute = fattr attr.fk_attribute.fk_pointed = True attr.fk_attribute.fk_times += 1 # increments count of "how many points at me"
def get_values(table): unique_values = [] #to store all values that create a unique/pk group values = "" for attr in table.attr_list: new_val = None if attr.foreign_key: # foreign key is filled from existing values new_val = get_foreign(attr) elif attr.serial: new_val = "DEFAULT" # next sequence elif attr.array_flag: # array needs a special treatment new_val = get_array(table, attr) else: func = 'new_val = ' + attr.fill_method + '(table,attr)' try: exec func # new_val = fill_method(attributes) except AttributeError: msg = "Internal error. Trouble using '" + attr.fill_method + "' method on attribute '" \ + attr.name + "', table '" + table.name + "'.\n" #+ "NOTE: This may also be caused by a non existing method inserted.\n" errprint(msg, ERRCODE["INTERNAL"]) #these combinations are filled with embedded SELECT query so no checking for them test0 = attr.unique and attr.foreign_key and not attr.fk_attribute.serial test1 = attr.unique and attr.foreign_key and not attr.fk_table.fill_count == "FILLED" #serial is unique by its sequence, if it overflows because of low capacity, it's user's problem test2 = attr.unique and not attr.serial test3 = attr.primary_key and not attr.serial #if one test true, we must ensure the inserted value hasn't been used yet if test0 or test1 or test2 or test3: timeout = 0 while not check_unique(attr, new_val): # validity check if there is unique/primary key constraint if timeout >= 100: msg = "Runtime error: The timeout for finding new unique value for attribute '" + attr.name + "' exceeded.\n" \ "Tip: Check if the given fill method offers enough unique values.\n" errprint(msg, ERRCODE["RUNTIME"]) if attr.foreign_key: new_val = get_foreign(attr) else: exec func # we call the method again (and again) timeout += 1 #DEFAULT appearance chance if attr.default: chance = random.randint(0, 100) if chance < attr.default_value: new_val = 'DEFAULT' #NULL appearance chance if attr.null: chance = random.randint(0, 100) if chance < attr.constraint_parameters: new_val = 'NULL' values = values + str(new_val) + ", " # concatenates the new value with the rest and divides with a ',' if attr.fk_pointed or attr.unique or attr.primary_key: # we will need these values either for filling foreign key attributes if attr.unique_group and not attr.fk_pointed: # this value has meaning only in group unique_values.append(new_val) elif attr.unique_group and attr.fk_pointed: # value important alone and in group as well attr.values_list.append(new_val) unique_values.append(new_val) else: attr.values_list.append( new_val) # value important alone values = values[:-2] #removes the ',' from the end of the string once we end return {'values': values, 'unique_values': unique_values}
def get_array(table, attr): #as stated in the docu, only BOX type uses semicolon as a delimiter character if attr.data_type == "BOX": delim = ";" else: delim = "," #--------------------------- cnt = attr.array_dim_cnt val_list = [] total = 1 #for [x][y][z] we get value of x*y*z to know how many final values we want for x in attr.array_dim_size: total = total * x func = 'new_val = ' + attr.fill_method + '(table,attr)' #now we get the values and store them for i in range(0, total): try: exec func # new_val = fill_method(attributes) except AttributeError: msg = "Internal error. Trouble using '" + attr.fill_method + "' method on attribute '" \ + attr.name + "', table '" + table.name + "'.\n" \ # + "NOTE: This may also be caused by a non existing method inserted.\n" errprint(msg, ERRCODE["INTERNAL"]) if new_val[0] == "'" and new_val[-1] == "'": new_val = new_val[1:-1] # cuts both the ' new_val = "\"" + new_val + \ "\"" # double quotes if the val contained commas or curly braces val_list.append( new_val) # we append the newly received value to the list #now we'll put the array of cnt dimensions together (=the final value to fill the db with) old_list = val_list new_list = [] #cycles dimension-times for i in range(0, cnt): j = cnt - (i+1) # reverse the indexing so we go from back size = attr.array_dim_size[j] total = total / size # we now have two separate numbers: 'total'-times we want field of 'size' values #cycles (the current dimension cnt - 1)-times for x in range(0, total): # for [3][2][2] this happens 3*2 times, next cycle 3 times etc. new_val = "{" #cycles size-times of the currently last dimension for y in range(0, size): val = old_list.pop() new_val = new_val + val + delim new_val = new_val[:-1] + \ "}" # cut the last delimiter and ads the final '{' new_list.append(new_val) #now all old_list values should be used and in concatenated form stored in new_list #we replace old_list with new_list and create new new_list old_list = new_list new_list = [] #once we finally get out off this ugly cycle chaos, old_list should contain one item #made of all previous items etc. new_val = "'" + old_list.pop() + "'" return new_val
else: # stdout DEST = "stdout" if args.count: fill_cnt = args.count else: fill_cnt = 10 # default #opening given file try: f = open(args.src, 'r') except IOError: msg = "Input error: The given file of path '" + args.src + \ "' cannot be oppened.\n" errprint(msg, ERRCODE["INPUT"]) #opening temp file try: #tempf = open("tempfile", 'w') tempf = tempfile.TemporaryFile() except IOError: msg = "Runtime error: Did not manage to create a temporary file 'tempfile'\n." errprint(msg, ERRCODE["RUNTIME"]) #Filters the dump so we get only queries we are interested in preparse(f, tempf) #Close the dump file f.close()
def __gen_plog(self, max_rev): # 最早一个拥有错误的版本,用来以后检查的起始点 min_rev_has_error = max_rev os.system("if not exist {0} mkdir {0}".format( config.get_dir_pvs_plogs())) db = EasySqlite('rfp.db') for parent, dirnames, filenames in os.walk("temp", followlinks=True): progressbar.set_total(len(filenames)) for file in filenames: file_path = os.path.join(parent, file) filename = os.path.splitext(file)[0] output_file_path = "{0}\\{1}.plog".format( config.get_dir_pvs_plogs(), filename) if os.path.exists(output_file_path): os.remove(output_file_path) xmlRoot = etree.parse(file_path) pathEle = xmlRoot.find('./SourceFiles/Path') src_path = pathEle.text printer.aprint(self.get_name() + '文件{0}检查开始'.format(src_path)) cmd = 'pvs-studio_cmd.exe --target "{0}" --output "{1}" --platform "x64" --configuration "Release" --sourceFiles "{2}" --settings "{3}" --excludeProjects {4} 2>>pvs_err.log'.format( config.get_dir_sln(), output_file_path, file_path, config.get_path_pvs_setting(), config.get_exclude_projects()) ret = os.system(cmd) if (ret & 1) / 1 == 1: printer.errprint( "PVS-Studio: error (crash) during analysis of some source file(s)" ) if (ret & 2) / 2 == 1: printer.errprint("PVS-Studio: GeneralExeption") if (ret & 4) / 4 == 1: printer.errprint( "PVS-Studio: some of the command line arguments passed to the tool were incorrect" ) if (ret & 8) / 8 == 1: printer.errprint( "PVS-Studio: specified project, solution or analyzer settings file were not found" ) if (ret & 16) / 16 == 1: printer.errprint( "PVS-Studio: specified configuration and (or) platform were not found in a solution file" ) if (ret & 32) / 32 == 1: printer.errprint( "PVS-Studio: solution file or project is not supported or contains errors" ) if (ret & 64) / 64 == 1: printer.errprint( "PVS-Studio: incorrect extension of analyzed project or solution" ) if (ret & 128) / 128 == 1: printer.errprint( "PVS-Studio: incorrect or out-of-date analyzer license" ) if (ret & 256) / 256 == 1: has_error = True printer.warnprint( "PVS-Studio: some issues were found in the source code" ) if (ret & 512) / 512 == 1: printer.errprint( "PVS-Studio: some issues were encountered while performing analyzer message suppression" ) if (ret & 1024) / 1024 == 1: printer.errprint( "PVS-Studio: indicates that analyzer license will expire in less than a month" ) if ret == 0: has_error = False print("pvs-studio ret: " + str(ret)) cur_file_max_rev = 0 logs_json = [] logsRoot = xmlRoot.find('logs') for log in logsRoot.iter('log'): log_json = { "rev": log.attrib["rev"], "author": log.attrib["author"], "msg": log.attrib["msg"] } logs_json.append(log_json) revision = int(log.attrib["rev"]) if cur_file_max_rev < revision: cur_file_max_rev = revision db.execute( "delete from " + self.CONST_TABLE_NAME + " where file_path = ?", [src_path], False, True) db.execute( "delete from " + self.CONST_TABLE_NAME + "_fts where file_path = ?", [src_path], False, True) # 更新进度条用 progressbar.add(1) if has_error: plogXmlRoot = etree.parse(output_file_path) analysisLogs = plogXmlRoot.findall( 'PVS-Studio_Analysis_Log') ignoreCnt = 0 for analysisLog in analysisLogs: errCode = analysisLog.find('ErrorCode').text if errCode is None: continue if errCode.lower() in (code.lower() for code in self.excludedCodes): plogXmlRoot.getroot().remove(analysisLog) ignoreCnt = ignoreCnt + 1 if len(analysisLogs) == ignoreCnt: os.remove(output_file_path) continue plogXmlRoot.write(output_file_path) project = analysisLogs[0].find('Project').text filename = analysisLogs[0].find('ShortFile').text log_str = json.dumps(log_json) res_ignore = db.execute( "select * from " + self.CONST_TABLE_NAME + "_ignore" + " where rev=? and project=? and file=?", (cur_file_max_rev, project, filename), False, False) if filename == 'Dump.cpp': if len(res_ignore) == 0: print("Dump cannot find", cur_file_max_rev, project, filename) if len(res_ignore) != 0: os.remove(output_file_path) continue if min_rev_has_error < cur_file_max_rev: min_rev_has_error = cur_file_max_rev db.execute( "insert into " + self.CONST_TABLE_NAME + " values (?, ?, ?, current_timestamp, ?, ?);", (project, filename, src_path, output_file_path, log_str), False, True) body_str = '{0} {1} {2}'.format(project, filename, log_str) db.execute( "insert into " + self.CONST_TABLE_NAME + "_fts values (?, ?);", (src_path, body_str), False, True) printer.aprint(self.get_name() + '生成 {0} 的网页报告...'.format(src_path)) self.__convert_to_html(output_file_path) printer.aprint(self.get_name() + '文件{0}检查结束'.format(src_path)) return min_rev_has_error
args = arg_parser.parse_args() if args.source: # we've been given a custom source path src = args.source else: home = os.path.expanduser('~') # get the home directory for this user src = home + "/dsl.txt" # creates the path representing ~/dsl.txt #opening file try: f = open(src, 'r') except IOError: msg = "Input error: The source file couldn't be oppened." errprint(msg, ERRCODE["INPUT"]) #reading file #for line in f: # print line table_list = parser.dsl_parser(f) #Close file f.close() #Calls the db filler db_filler(table_list)
def p_error(p): msg = "Syntax error. Trouble with " + repr(str( p.value)) + ". Line:" + str(p.lineno) + ".\n" errprint(msg, ERRCODE["SYNTACTIC"]) global err err = True # sets the flag