def test_unsuccessful_provider_removal(): """ Here we give the module a text file without the provider written in it, it should tell us that it couldn't find the provider we gave it as an argument""" remove_provider = remove.RemoveProviderR() remove.web = WebDummy() # override the web variable in remove.py test_provider = "PROV" expected_providers_contents = ['What:\n', 'NOTPROV:\n', 'Test:'] # what we expect to see in providers.txt after GET # create the file with open(PROVIDERS_FILE_PATH, 'w') as f: f.writelines([ "What:", "\n", "NOTPROV:", "\n", "Test:" ]) assert remove_provider.GET(test_provider) == test_provider + " not found, couldn't remove" # read the file and see if it's the same with open(PROVIDERS_FILE_PATH, 'r') as f: provider_contents = f.readlines() delete_file(PROVIDERS_FILE_PATH) # delete the file assert provider_contents == expected_providers_contents
def check_randomised(self, iterations, stats=False, out_file=None): generator_obj = Generator(self.input_fn) failed_count = 0 folder_name = 'failed_testcases' os.makedirs(folder_name, exist_ok=True) if out_file: with open(out_file, 'w') as f: pass # TODO: optimize this routine using multithreading for t in range(iterations): file_name = os.path.join(folder_name, 'tc' + str(t)) generator_obj.generate(2, file_path=file_name) if not self.check_one(file_name, stats, out_file): failed_count += 1 else: delete_file(file_name) if failed_count == 0: print('Success! All test cases passed!') else: print('Failed! One or more test cases failed!') print('✅: {} ❌: {}'.format(iterations - failed_count, failed_count)) if failed_count and out_file: print('The complete summary can be found in', out_file) print('The failed test cases can be found in', folder_name, 'folder') return failed_count == 0
def test_successful_provider_removal(): """ Here we give the module a text file with PROVIDER: written in it, it should remove that line in the file """ remove_provider = remove.RemoveProviderR() remove.web = WebDummy() # override the web variable in remove.py test_provider = "PROV" expected_providers_contents = ['What:\n', 'Test:'] # what we expect to see in providers.txt after we call GET # create the file with open(PROVIDERS_FILE_PATH, 'w') as f: f.writelines([ "What:", "\n", test_provider + ":", "\n", "Test:" ]) assert remove_provider.GET(test_provider) == "removed " + test_provider # read the file and see if it has removed the line with the test_provider with open(PROVIDERS_FILE_PATH, 'r') as f: provider_contents = f.readlines() delete_file(PROVIDERS_FILE_PATH) # delete the file assert provider_contents == expected_providers_contents
def main(): logger = create_logger(PurePath(__file__).stem) timezone = getenv('TIMEZONE', None) if not timezone: try: ip_address = urlopen('https://canhazip.com/').read().decode( 'utf-8') web_text = urlopen( f'http://ip-api.com/json/{ip_address}').read().decode('utf-8') timezone = json_load(web_text).get('timezone', 'Etc/UTC') except: logger.debug( 'Unable to automatically divine timezone leaving set to UTC') sys_exit() if timezone: timezone_path = f'/usr/share/zoneinfo/{timezone}' if not path_exists(timezone_path): logger.debug( 'Timezone {timezone} does not seem to be installed on this system.' ) sys_exit() local_time_path = '/etc/localtime' if path_exists(local_time_path): delete_file(local_time_path) symlink(timezone_path, local_time_path) logger.info(f'Timezone {timezone} has been set sucessfully.')
def test_unsuccessful_provider_removal(): """ Here we give the module a text file without the provider written in it, it should tell us that it couldn't find the provider we gave it as an argument""" remove_provider = remove.RemoveProviderR() remove.web = WebDummy() # override the web variable in remove.py test_provider = "PROV" expected_providers_contents = [ 'What:\n', 'NOTPROV:\n', 'Test:' ] # what we expect to see in providers.txt after GET # create the file with open(PROVIDERS_FILE_PATH, 'w') as f: f.writelines(["What:", "\n", "NOTPROV:", "\n", "Test:"]) assert remove_provider.GET( test_provider) == test_provider + " not found, couldn't remove" # read the file and see if it's the same with open(PROVIDERS_FILE_PATH, 'r') as f: provider_contents = f.readlines() delete_file(PROVIDERS_FILE_PATH) # delete the file assert provider_contents == expected_providers_contents
def test_file_to_string(): test1 = 'This is a quote' test2 = 'Something else' filename = 'test-emotion-dict.txt' with open(filename, 'x+') as file: file.write(test1) assert file_to_string(filename) == test1 assert file_to_string(filename) != test2 delete_file(filename)
def backup(self): self.opml = requests.get(url=self.url, headers=self.headers).text with open(self.filename, "a") as tmpfile: tmpfile.write(self.opml) try: self.exporter.upload_file(self.filename, self.bucket, self.objectname) except S3Error as e: print(e) delete_file(self.filename)
def test_file_to_dict(): check = {'key': 'value'} check2 = {'key1': 'value1'} test = json.dumps({'key': 'value'}) filename = 'test-emotion-dict.json' with open(filename, 'x+') as file: file.write(test) assert file_to_dict(filename) == check assert file_to_dict(filename) != check2 delete_file(filename)
def _next_filename(self, count, extension): for i in range(self.backupCount - 1, 0, -1): sfn = '{}.{}.gz'.format(self.baseFilename, i) dfn = '{}.{}.gz'.format(self.baseFilename, i + 1) if exists(sfn): if exists(dfn): delete_file(dfn) rename_file(sfn, dfn) dfn = self.baseFilename + '.1.gz'
def finally_clean_up(self): """Used to clean up all temporary files at the end of the code execution TaskExecutor is running this method after each finished task """ for path in self.assigned: try: delete_file(path) except Exception: pass self.assigned = []
def test_save_macro(): ''' feeds save_macro a valid filename and dictionary/n tests if it writes to a file correctly ''' test_dict = {'m': 'macro', 'n': 'macro2'} test_filename = 'macros' save_macro(test_dict, test_filename) with open(f'{test_filename}.json') as file: data = file.read().strip() data_dict = json.loads(data) assert data_dict == test_dict delete_file(f'{test_filename}.json')
def unzip(path_in, path_out): """ """ old_content = set(listdir(path_out)) with ZipFile(path_in, 'r') as file: file.extractall(path_out) delete_file(path_in) new_content = set(listdir(path_out)) new_folder = list(new_content - old_content) new_folder = new_folder[0] new_folder = path_out + '/' + new_folder rename(new_folder, new_folder.replace('-master', ''))
def setup_access_file(*args): if utl_file_dh.get() and utl_file_cs.get(): filename1 = './e2Boston_RsrPlus-empty.mdb' time = datetime.now().strftime("%Y%m%d_%H%M_%S") filename2 = "./utilization_for_E2Boston" + time + ".mdb" copy_file(filename1, filename2) global log # I think this should fix the bug of having the old log in the current error msg log = [] try: write_access_file(filename2) except: delete_file(filename2) open_error_log_window() raise else: messagebox.showinfo(message='Choose both CS utilization file and DH utilization file.')
def run(self, **kwargs): """ @filepath: Path to the temporary uploaded file. @filename - Original name of the file. """ logger = self.get_logger() # Before we go any further, first verify that the file actually exists. filepath = kwargs.get('filepath') try: with open(filepath) as f: pass except IOError: logger.error("Couldn't find file to upload, tried %s" % filepath) return None else: UPLOADIT_PROCESS_FILE(**kwargs) # Remove tmp file :) delete_file(filepath) return
def setup_access_file(*args): if utl_file_dh.get() and utl_file_cs.get(): filename1 = './e2Boston_RsrPlus-empty.mdb' time = datetime.now().strftime("%Y%m%d_%H%M_%S") filename2 = "./utilization_for_E2Boston" + time + ".mdb" copy_file(filename1, filename2) global log # I think this should fix the bug of having the old log in the current error msg log = [] try: write_access_file(filename2) except: delete_file(filename2) open_error_log_window() raise else: messagebox.showinfo( message='Choose both CS utilization file and DH utilization file.')
def _save_archive(self, rollover_filename): archive_filename = rollover_filename + '.bz2' backup_filename = rollover_filename + '.bak' bzip_file = None try: if exists(archive_filename): delete_file(archive_filename) with open(backup_filename, 'rb') as log_file: bzip_file = bz2.BZ2File(archive_filename, 'wb') bzip_file.writelines(log_file) except: pass else: delete_file(backup_filename) finally: if bzip_file: bzip_file.close() # Clean up oldest files self._cleanup_oldest()
def test_successful_provider_removal(): """ Here we give the module a text file with PROVIDER: written in it, it should remove that line in the file """ remove_provider = remove.RemoveProviderR() remove.web = WebDummy() # override the web variable in remove.py test_provider = "PROV" expected_providers_contents = [ 'What:\n', 'Test:' ] # what we expect to see in providers.txt after we call GET # create the file with open(PROVIDERS_FILE_PATH, 'w') as f: f.writelines(["What:", "\n", test_provider + ":", "\n", "Test:"]) assert remove_provider.GET(test_provider) == "removed " + test_provider # read the file and see if it has removed the line with the test_provider with open(PROVIDERS_FILE_PATH, 'r') as f: provider_contents = f.readlines() delete_file(PROVIDERS_FILE_PATH) # delete the file assert provider_contents == expected_providers_contents
layers_found += 1 return layers_found if __name__ == '__main__': filename = get_filename_from_commannd_line() backupname = f"{filename}.bak" # Count the layers layercount = count_layers(filename) # Create a backup copy_file(filename, backupname) # Process file with open(backupname, "r") as src: with open(filename, "w") as dest: for line in src: match = LAYER_REGEX.match(line) if match: z_height = float(match.group(1)) z_layer = int(match.group(2)) + 1 percent = round(z_layer / layercount * 100, 1) dest.write( f"M117 {z_layer}/{layercount} ({percent}%) {z_height}mm\n" ) # noqa: E501 dest.write(line) # Delete backup delete_file(backupname)
def cleanup(self): """Delete the files at the specified paths""" for input_file in self.input_files: delete_file(input_file)
def tearDown(self): dir_path = self.dir_path if exists(dir_path): for filename in listdir(dir_path): delete_file(join(dir_path, filename))
def generate_png(self, user): from os.path import join import pygraphviz as pgv # NB: to work with utf8 label in node: all node must be added explicitly with # unicode label, and when edges are a created, nodes identified by their # labels encoded as string graph = pgv.AGraph(directed=True) # NB: "self.roots.all()" causes a strange additional query (retrieving of the base CremeEntity !).... has_perm_to_view = user.has_perm_to_view roots = [ root for root in RootNode.objects.filter( graph=self.id).select_related('entity') if not root.entity.is_deleted and has_perm_to_view(root.entity) ] add_node = graph.add_node add_edge = graph.add_edge # TODO: entity cache ? regroups relations by type ? ... CremeEntity.populate_real_entities([root.entity for root in roots ]) #small optimisation for root in roots: add_node(str(root.entity), shape='box') # add_node('filled box', shape='box', style='filled', color='#FF00FF') # add_node('filled box v2', shape='box', style='filled', fillcolor='#FF0000', color='#0000FF', penwidth='2.0') #default pensize="1.0" orbital_nodes = {} #cache for root in roots: subject = root.entity str_subject = str(subject) relations = subject.relations.filter(type__in=root.relation_types.all())\ .select_related('object_entity', 'type') Relation.populate_real_object_entities( relations) # Small optimisation for relation in relations: object_ = relation.object_entity if not user.has_perm_to_view(object_): continue uni_object = str(object_) str_object = uni_object orbital_node = orbital_nodes.get(object_.id) if not orbital_node: add_node(uni_object) orbital_nodes[object_.id] = str_object add_edge(str_subject, str_object, label=str(relation.type.predicate)) # add_edge('b', 'd', color='#FF0000', fontcolor='#00FF00', label='foobar', style='dashed') orbital_rtypes = self.orbital_relation_types.all() if orbital_rtypes: orbital_ids = orbital_nodes.keys() for relation in Relation.objects.filter( subject_entity__in=orbital_ids, object_entity__in=orbital_ids, type__in=orbital_rtypes).select_related('type'): add_edge(orbital_nodes[relation.subject_entity_id], orbital_nodes[relation.object_entity_id], label=str(relation.type.predicate), style='dashed') # print graph.string() graph.layout(prog='dot') # Algo: neato dot twopi circo fdp nop img_format = 'png' # Format: pdf svg img_basename = 'graph_{}.{}'.format(self.id, img_format) try: path = FileCreator(join(settings.MEDIA_ROOT, 'upload', 'graphs'), img_basename).create() except FileCreator.Error as e: raise self.GraphException(e) from e try: # graph.draw(join(dir_path, filename), format='png') # Format: pdf svg graph.draw(path, format=img_format) # Format: pdf svg except IOError as e: delete_file(path) raise self.GraphException(str(e)) from e fileref = FileRef.objects.create( # user=request.user, TODO filedata='upload/graphs/' + basename(path), basename=img_basename, ) return HttpResponseRedirect( reverse('creme_core__dl_file', args=(fileref.filedata, )))
def clear(path='.#'): delete_file(path + '.#')
def _execute(self, job): delay = self.get_delay(job) if delay is None: JobResult.objects.create( job=job, messages=[ _(u"The configured delay is invalid. " u"Edit the job's configuration to fix it."), ], ) else: for temp_file in FileRef.objects.filter(temporary=True, created__lt=now() - delay.as_timedelta()): full_path = temp_file.filedata.path if exists(full_path): try: delete_file(full_path) except Exception as e: JobResult.objects.create( job=job, messages=[ _(u'An error occurred while deleting the ' u'temporary file «{}»').format(full_path), _(u'Original error: {}').format(e), ], ) continue else: logger.warning( '_TempFilesCleanerType: the file %s has already been deleted.', full_path) try: temp_file.delete() except ProtectedError as e: logger.warning( 'The FileRef(id=%s) cannot be deleted because of its dependencies: %s', temp_file.id, e.args[1], ) JobResult.objects.create( job=job, messages=[ _(u'The temporary file with id={} cannot be ' u'deleted because of its dependencies.').format( temp_file.id), ], ) except Exception as e: logger.exception( 'Error when trying to delete the FileRef(id=%s)', temp_file.id) JobResult.objects.create( job=job, messages=[ _(u'The temporary file with id={} cannot be ' u'deleted because of an unexpected error.'). format(temp_file.id), _(u'Original error: {}').format(e), ], )
help='Summary of failing test cases will be stored in this file', default='edge_cases.txt') args = parser.parse_args() return args if __name__ == '__main__': args = parse_args() try: with open(args.input_fn) as f: file_content = f.read() with open('temp.py', 'w') as f: f.write(file_content) input_fn = __import__('temp').input_fn except Exception as e: print('Error importing input_fn') print(e) exit(0) finally: if 'temp.py' in listdir(): delete_file('temp.py') try: checker = Checker(args.correct_soln, args.soln, input_fn) checker.check_randomised(args.iterations, stats=args.verbose, out_file=args.output_file) except Exception as e: print('Some error occured with the execution of the files!') print(e) exit(0)
def cleanup(): for f in delete_after_run: delete_file(f)
def remove_temp_files(dirOutput, name): try: delete_file(os.path.join(dirOutput, "{}.aux".format(name))) except: pass try: delete_file(os.path.join(dirOutput, "{}.log".format(name))) except: pass try: delete_file(os.path.join(dirOutput, "{}.thm".format(name))) except: pass try: delete_file(os.path.join(dirOutput, "{}.fls".format(name))) except: pass try: delete_file(os.path.join(dirOutput, "{}.fdf_latexmk".format(name))) except: pass try: delete_file(os.path.join(dirOutput, "{}.toc".format(name))) except: pass try: delete_file(os.path.join(dirOutput, "{}.bbl".format(name))) except: pass try: delete_file(os.path.join(dirOutput, "{}.blg".format(name))) except: pass
def delete_note(self, identifier): """Deletes a note via its ID. Returns true if found and deleted, false otherwise.""" if identifier in self._notes: delete_file(self._notes[identifier].path) del self._notes[identifier] return False
def _cleanup_oldest(self): if self.backupCount > 0: for filename in self.getFilesToDelete(): delete_file(filename)