def get(self, project_id, job_id): ''' Clear job's cache ''' job = g.db.execute_one_dict( ''' SELECT j.name, branch from job j INNER JOIN build b ON b.id = j.build_id AND j.project_id = b.project_id LEFT OUTER JOIN "commit" c ON b.commit_id = c.id AND c.project_id = b.project_id WHERE j.id = %s AND j.project_id = %s ''', [job_id, project_id]) if not job: abort(404) key = 'project_%s_job_%s.tar.snappy' % (project_id, job['name']) storage.delete_cache(key) return OK('Cleared cache')
def check_storage(self): self.logger.debug("check storage") contents = ''.join(random.choice(string.ascii_letters + string.digits) for _ in range(15)) file_name = "checker_test_"+ str(uuid.uuid4()) path = "/tmp/" + file_name download_path = "" with open(path, 'w') as f: f.write(contents) try: with open(path, 'rb') as f: storage.upload_cache(f, file_name) self.logger.debug("upload file %s" % \ path) download_path = storage.download_cache(file_name) self.logger.debug("download file %s" % \ download_path) except Exception as e: self.logger.exception('Got exception on check storage') self.check_result = False finally: self.logger.debug("check api result: %s, retry times %s" % \ (self.check_result, self.retry_times)) for f in [path, download_path]: if os.path.exists(f): os.remove(f) storage.delete_cache(file_name)
def _gc_storage_job_cache(self, db): # Delete all cache of all jobs which have not # been executed in the last 7 days r = db.execute_many_dict(''' SELECT DISTINCT project_id, name FROM job WHERE created_at > NOW() - INTERVAL '14 days' EXCEPT SELECT DISTINCT project_id, name from job where created_at > NOW() - INTERVAL '7 days' ''') logger.info('Deleting caches of %s jobs', len(r)) for j in r: logger.info('Deleting cache %s/%s', j['project_id'], j['name']) key = 'project_%s_job_%s.tar.snappy' % (j['project_id'], j['name']) storage.delete_cache(key)
def get(self, project_id, build_id): jobs = g.db.execute_many_dict(''' SELECT j.name, branch from job j INNER JOIN build b ON b.id = j.build_id AND j.project_id = b.project_id LEFT OUTER JOIN "commit" c ON b.commit_id = c.id AND c.project_id = b.project_id WHERE b.id = %s AND j.project_id = %s ''', [build_id, project_id]) for j in jobs: key = 'project_%s_job_%s.tar.snappy' % (project_id, j['name']) storage.delete_cache(key) return OK('Cleared cache')
def _check_storage(self): file_name = "checker_test_"+ str(uuid.uuid4()) file_path = self._create_random_file("checker_test_"+ str(uuid.uuid4())) download_path = "" try: with open(file_path, 'rb') as f: self._storage_upload_with_metrics(f, file_name) self.logger.debug("Storage checking - Upload file %s" % file_path) download_path = self._storage_download_with_metrics(file_name) self.logger.debug("Storage checking - Download file %s" % download_path) return True except Exception as e: self.logger.exception('Got exception on check storage') self.storage_checker_errors.inc() return False finally: for f in [file_path, download_path]: if f is not None and os.path.exists(f): os.remove(f) storage.delete_cache(file_name)