def delete_old_artifacts(list_of_artifacts, chart_name): """Extract list of artifacts for this chart and only keep the last NUMBER_OF_CHARTS versions uploaded""" artifacts = list(filter(lambda x: chart_name == x[0], list_of_artifacts)) if len(artifacts) < NUMBER_OF_CHARTS: return artifacts.sort(key=lambda date: time.strptime(date[1], '%d-%b-%Y %H:%M')) for artifact in artifacts[:-(NUMBER_OF_CHARTS + 1)]: artifact_path = ArtifactoryPath( full_artifactory_url + artifact[2], auth=credentials, ) if artifact_path.exists(): print("Deleting artifact " + artifact[2]) artifact_path.unlink()
class ArtifWarehouse(FileStoreWarehouse): compass_cls = ArtifCompass def connect(self): self.client = ArtifactoryPath( os.path.join(self.address, 'artifactory', self.repo), auth=(self.compass.user, self.compass.pswd), verify=self.compass.check_certificate ) def assert_repo_exists(self): assert self.client.exists(), NhaStorageError("""The {} repository does not exist""".format(self.repo)) def format_artif_path(self, path): return self.client.joinpath(self.section, path) def upload(self, path_to, path_from=None, content=None): work = None try: if content is not None: work = Workpath.get_tmp() file_name = os.path.basename(path_to) work.deploy_text_file(name=file_name, content=content) path_from = work.join(file_name) dest_path = self.format_artif_path(path_to) dest_path.deploy_file(path_from) except Exception as e: raise NhaStorageError("Upload failed. Check if the artifact´s path is correct") from e finally: if work is not None: work.dispose() def download(self, path_from, path_to): uri = self.format_artif_path(path_from) try: with uri.open() as src: with open(path_to, "wb") as out: out.write(src.read()) except Exception as e: raise NhaStorageError("Download failed. Check if the remote artifact exists in the repository") from e def delete(self, hierarchy: StoreHierarchy, ignore=False): path = hierarchy.join_as_path() uri = self.format_artif_path(path) try: if uri.is_dir(): uri.rmdir() else: uri.unlink() return True except FileNotFoundError as e: message = "Delete from Artifactory failed. Check if the path exists: {}".format(uri) if ignore: self.LOG.warn(message) return False else: raise NhaStorageError(message) from e def get_download_cmd(self, path_from, path_to, on_board_perspective=True): if on_board_perspective: compass = self.compass_cls(perspective=Perspective.ON_BOARD) else: compass = self.compass curl = "curl {security} -O -u {user}:{pswd} {url}".format( security='' if compass.check_certificate else '--insecure', user=compass.user, pswd=compass.pswd, url=self.format_artif_path(path_from) ) move = "mkdir -p {dir} && mv {file} {path_to}".format( dir=os.path.dirname(path_to), file=os.path.basename(path_to), path_to=os.path.dirname(path_to) ) return ' && '.join([curl, move]) def lyst(self, path): path = self.format_artif_path(path) return [x.name for x in path.iterdir() if not x.is_dir()]
import os from artifactory import ArtifactoryPath JF_USER = os.environ['JF_USER'] JF_USER_TOKEN = os.environ['JF_USER_TOKEN'] ARTIFACT_TO_REMOVE = os.environ['ARTIFACT_TO_REMOVE'] path = ArtifactoryPath(ARTIFACT_TO_REMOVE, auth=(JF_USER, JF_USER_TOKEN)) if path.exists(): path.unlink()
def delete_artifact(artifact_url): url = ArtifactoryPath(artifact_url, apikey=api_key) if url.exists(): url.unlink()
log_file = open(log_name, 'w') log_msg = '-' print("Processing list....") for item in artifacts_list: file_args = aql_search.file_search(repo, path, item["name"], days) file_list = aql.aql(*file_args) flag = aql_search.check_file_list(file_list, days) if flag: continue for file in file_list: file_url = art_url + file['repo'] + "/" + file['path'] + "/" + file[ 'name'] full_path = ArtifactoryPath(file_url, apikey=art_apikey) total_file_count += 1 if delete: if full_path.exists(): log_msg = "deleting: " + file_url full_path.unlink() else: log_msg = "dry run: " + file_url if 'downloaded' in file['stats'][0]: log_msg += "\n Last downloaded: " + file['stats'][0][ 'downloaded'] log_file.write(log_msg + "\n") if verbose: print(log_msg) run_stop = timeit.default_timer() running_time = run_stop - run_start
type=str, help="URL to custom BlobConverter URL to be used for conversion", required=False) args = parser.parse_args() if None in (args.username, args.password): parser.print_help() sys.exit(1) if args.blobconverter_url is not None: blobconverter.set_defaults(url=args.blobconverter_url) path = ArtifactoryPath( "https://artifacts.luxonis.com/artifactory/blobconverter-backup/blobs", auth=(args.username, args.password)) if not path.exists(): path.mkdir() priority_models = [ "mobilenet-ssd", "efficientnet-b0", "vehicle-license-plate-detection-barrier-0106", "vehicle-detection-adas-0002", "license-plate-recognition-barrier-0007" "vehicle-attributes-recognition-barrier-0039", "face-detection-retail-0004", "landmarks-regression-retail-0009" ] backup_shaves = range(1, 17) for model_name, shaves in itertools.product(priority_models, backup_shaves): print("Deploying {} with {} shaves...".format(model_name, shaves)) try: path.deploy_file(blobconverter.from_zoo(model_name, shaves=shaves))