import os from artifactory import ArtifactoryPath from datetime import datetime, timedelta ## required variables #artifactory_url = 'http://artifact.corp.continuum.net:8081' artifactory_url = os.environ['artifactory_url'] #repo_name = ['dt-dev_its-portal-net','dt_dev_yash'] repo_name = os.environ['repo_name'] #retention_period = '8mo' retention_period = os.environ['retention_period'] # dry run to list the delete info dry_run = os.environ['dry_run'] ret_time = "2018-08-03" aql = ArtifactoryPath("{}/artifactory/".format(artifactory_url), auth=('repluser', 'AP49A5SMDpZuQb7e9g7Tn5c45fbUfJkZMzmUSM')) artifacts = aql.aql( "builds.find", { "name": { "$match": "dev_its-portal-net" }, "created": { "$lt": "{}".format(ret_time) }, "promotion.status": { "$ne": "Released" } }, ".include", ["promotion.status"]) #"promotion.status": {"$ne":"released"}}) print(artifacts)
def artifactory_token(artifactory_server, artifactory_auth_token): artifactory_ = ArtifactoryPath(artifactory_server, token=artifactory_auth_token) yield artifactory_
def get_packages(self, source, parser, downloader, packages_matches, property_validate=True): # TODO move request debug and thhp debug into comanline option # http.client.HTTPConnection.debuglevel = 1 # requests_log = logging.getLogger("requests.packages.urllib3") # requests_log.setLevel(logging.DEBUG) # requests_log.propagate = True _art_auth_etc = source.get_auth_params() _packages_found = OrderedDict() self._log.info('parser: {}'.format(parser._name)) repo_returned_packages_all = [] for _path in source.get_paths(packages_matches): _tmp_params = Dict(_path.params) self._log.info('repo: {}'.format(_tmp_params.repo)) session.auth = _art_auth_etc['auth'] aql = ArtifactoryPath(f"{_tmp_params.server}", session=session) _path_fixed, _path_pattern, _file_name_pattern = parser.split_fixed_pattern_with_file_name(_path.path) _package_versions_with_contracts, packages_with_invalid_naming_convention = \ self.find_package_versions(_file_name_pattern, _path_pattern, aql, _tmp_params.repo) if _package_versions_with_contracts: _package_versions_with_all_contracts, package_versions_with_missing_contracts = \ remove_package_versions_with_missing_contracts( _package_versions_with_contracts, _path.params['contracts'] ) for p, missing_contracts in package_versions_with_missing_contracts.items(): self._log.info(f"Skip {p} - missing contracts {missing_contracts}") if _package_versions_with_all_contracts: repo_returned_packages_all += _package_versions_with_all_contracts package_names = [x.package_name for x in packages_matches] bundle = Bundle(package_names, repo_returned_packages_all, downloader._config.trigger_packages, enable_tp_hides_higher_version=False) bundle_packages = bundle.calculate().values() self._log.info('Final bundle packages with contracts:') print_packages_by_contracts_scheme(self._log, bundle_packages) for p in bundle_packages: _packages_found[p.name] = Package(p.name, p.art_path, p, {}, downloader, self, parser, {}, {}) for p in package_names: if p not in _packages_found.keys(): _packages_found[p.name] = None return _packages_found
""" Create AQL query from string or list or dict arguments """ aql_query_text = "" for arg in args: if isinstance(arg, dict): arg = "({})".format(json.dumps(arg)) elif isinstance(arg, list): arg = "({})".format(json.dumps(arg)).replace("[", "").replace("]", "") aql_query_text += arg return aql_query_text # API_KEY aql = ArtifactoryPath("http://" + jfrogURL + "/artifactory/api/search/aql", apikey=apiKey) #Forming Arguments for search. args = [ "items.find", { "$and": [{ "repo": { "$eq": repoName }, "name": { "$match": "*." + artType }, "stat.downloads": { "$gt": "0" } }]
def get_usedby(self, source, parser, downloader, list_or_file_path, property_validate=True): """ :param source: :param parser: :param downloader: :param list_or_file_path: :param property_validate: for `root` packages we need check property, bad if we find packages from `lock` file, we can skip validate part :return: """ _auth_type = source.args['auth_type'].lower( ) if 'auth_type' in source.args else 'simple' _art_auth_etc = {} if 'auth' in source.args: self.search_auth(list_or_file_path, source) if _auth_type == 'simple': _art_auth_etc['auth'] = HTTPBasicAuth( *tuple(source.args['auth'])) session.auth = _art_auth_etc['auth'] # elif _auth_type == 'cert': # _art_auth_etc['cert'] = os.path.realpath(os.path.expanduser(source.args['auth'])) if 'auth' not in _art_auth_etc: msg = 'You have to set auth parameter for sources with artifactory-aql adapter' # self._log.error(msg) raise CrosspmException(CROSSPM_ERRORCODE_ADAPTER_ERROR, msg) if 'verify' in source.args: _art_auth_etc['verify'] = source.args['verify'].lower in [ 'true', 'yes', '1' ] else: _art_auth_etc['verify'] = False _secret_variables = self._config.secret_variables _pkg_name_col = self._config.name_column _packages_found = OrderedDict() _pkg_name_old = "" for _paths in parser.get_paths(list_or_file_path, source): _packages = [] _params_found = {} _params_found_raw = {} last_error = '' _pkg_name = _paths['params'][_pkg_name_col] if _pkg_name != _pkg_name_old: _pkg_name_old = _pkg_name self._log.info('{}: {}'.format( _pkg_name, { k: v for k, v in _paths['params'].items() if (k not in (_pkg_name_col, 'repo') and k not in _secret_variables) })) for _sub_paths in _paths['paths']: _tmp_params = dict(_paths['params']) self._log.info('repo: {}'.format(_sub_paths['repo'])) _tmp_params['repo'] = _sub_paths['repo'] try: _artifactory_server = _tmp_params['server'] _search_repo = _tmp_params['repo'] # TODO: Попробовать использовать lru_cache для кеширования кучи запросов _aql_query_url = '{}/api/search/aql'.format( _artifactory_server) _aql_query_dict = { "repo": { "$eq": _search_repo, }, } _usedby_aql = parser.get_usedby_aql(_tmp_params) if _usedby_aql is None: continue _aql_query_dict.update(_usedby_aql) query = 'items.find({query_dict}).include("*", "property")'.format( query_dict=json.dumps(_aql_query_dict)) session.auth = _art_auth_etc['auth'] r = session.post(_aql_query_url, data=query, verify=_art_auth_etc['verify']) r.raise_for_status() _found_paths = r.json() for _found in _found_paths['results']: _repo_path = "{artifactory}/{repo}/{path}/{file_name}".format( artifactory=_artifactory_server, repo=_found['repo'], path=_found['path'], file_name=_found['name']) _repo_path = ArtifactoryPath(_repo_path, **_art_auth_etc) _found_properties = { x['key']: x.get('value', '') for x in _found['properties'] } _matched, _params, _params_raw = parser.validate_path( str(_repo_path), _tmp_params) _params_found[_repo_path] = { k: v for k, v in _params.items() } _params_found_raw[_repo_path] = { k: v for k, v in _params_raw.items() } _params = _tmp_params _packages += [_repo_path] _params_found[_repo_path].update( {k: v for k, v in _params.items()}) _params_found[_repo_path]['filename'] = str( _repo_path.name) _params_raw = _params_found_raw.get(_repo_path, {}) params_found = {} # TODO: Проставление params брать из config.yaml usedby params = parser.get_params_from_properties( _found_properties) params.update( parser.get_params_from_path(str(_repo_path))) _package = Package(params[_pkg_name_col], _repo_path, params, downloader, self, parser, params_found, _params_raw) _package.find_usedby(None, property_validate=False) _packages_found[str(_repo_path)] = _package # _package.find_dependencies(_deps_file, property_validate=False) _mark = 'chosen' self._log.info(' {}: {}'.format( _mark, str(_repo_path))) except RuntimeError as e: try: err = json.loads(e.args[0]) except Exception: err = {} if isinstance(err, dict): # Check errors # :e.args[0]: { # "errors" : [ { # "status" : 404, # "message" : "Not Found" # } ] # } for error in err.get('errors', []): err_status = error.get('status', -1) err_msg = error.get('message', '') if err_status == 401: msg = 'Authentication error[{}]{}'.format( err_status, (': {}'.format(err_msg)) if err_msg else '') elif err_status == 404: msg = last_error else: msg = 'Error[{}]{}'.format( err_status, (': {}'.format(err_msg)) if err_msg else '') if last_error != msg: self._log.error(msg) last_error = msg return _packages_found
def artifactory(art_uri, art_auth): artifactory_ = ArtifactoryPath(art_uri, auth=art_auth) yield artifactory_
def get_artifacts_urls(artifactory): path = ArtifactoryPath(artifactory, apikey=api_key) return path
def test_stat(self): """ Test file stat. Check that stat(ArtifactoryPath) can take argument :return: """ a = self.cls() # Regular File path = ArtifactoryPath( "http://artifactory.local/artifactory/ext-release-local/org/company/tool/1.0/tool-1.0.tar.gz" ) constructed_url = ( "http://artifactory.local/artifactory" "/api/storage" "/ext-release-local/org/company/tool/1.0/tool-1.0.tar.gz") responses.add( responses.GET, constructed_url, status=200, json=self.file_stat, ) stats = a.stat(path) self.assertEqual( stats.ctime, dateutil.parser.parse("2014-02-24T21:20:59.999+04:00")) self.assertEqual( stats.mtime, dateutil.parser.parse("2014-02-24T21:20:36.000+04:00")) self.assertEqual(stats.created_by, "someuser") self.assertEqual(stats.modified_by, "anotheruser") self.assertEqual(stats.mime_type, "application/octet-stream") self.assertEqual(stats.size, 26776462) self.assertEqual(stats.sha1, "fc6c9e8ba6eaca4fa97868ac900570282133c095") self.assertEqual( stats.sha256, "fc6c9e8ba6eaca4fa97868ac900570282133c095fc6c9e8ba6eaca4fa97868ac900570282133c095", ) self.assertEqual(stats.md5, "2af7d54a09e9c36d704cb3a2de28aff3") self.assertEqual(stats.is_dir, False) # Directory path = ArtifactoryPath( "http://artifactory.local/artifactory/libs-release-local") constructed_url = ("http://artifactory.local/artifactory" "/api/storage" "/libs-release-local") responses.add( responses.GET, constructed_url, status=200, json=self.dir_stat, ) stats = a.stat(path) self.assertEqual( stats.ctime, dateutil.parser.parse("2014-02-18T15:35:29.361+04:00")) self.assertEqual( stats.mtime, dateutil.parser.parse("2014-02-18T15:35:29.361+04:00")) self.assertEqual(stats.created_by, None) self.assertEqual(stats.modified_by, None) self.assertEqual(stats.mime_type, None) self.assertEqual(stats.size, 0) self.assertEqual(stats.sha1, None) self.assertEqual(stats.sha256, None) self.assertEqual(stats.md5, None) self.assertEqual(stats.is_dir, True)
def md5(self): try: return ArtifactoryPath.stat(self.pkg).md5 except AttributeError: return md5sum(self.packed_path)
def delete_artifact(artifact_url): url = ArtifactoryPath(artifact_url, apikey=api_key) if url.exists(): url.unlink()
unciphered_text = "thisismycreds" try: reponame = sys.argv[1] #daysdownloaded = sys.argv[2] #dayscreated = int(sys.argv[3]) #print("repo: %s downloaded archive days: %s Non-downloaded Artifacts: %s" % (reponame, daysdownloaded, dayscreated)) except: print("No arguments passed or argument incomplete or invalid argument.") sys.exit() aql = ArtifactoryPath( "http://artifactory-test-am2.devops.aig.net/artifactory/", auth=("demouser", unciphered_text), auth_type=HTTPBasicAuth, ) args = [ "items.find", { "repo": reponame, "$and": [ { "type": "file" }, # {"created_by":"abanzon"}, # {"size":{"$gt":"0"}} # {"created": {"$gt":"2020-05-05"}}
def get_packages(self, source, parser, downloader, list_or_file_path): _auth_type = source.args['auth_type'].lower( ) if 'auth_type' in source.args else 'simple' _art_auth = {} if 'auth' in source.args: if _auth_type == 'simple': _art_auth['auth'] = tuple(source.args['auth']) elif _auth_type == 'cert': _art_auth['cert'] = os.path.realpath(source.args['auth']) if 'verify' in source.args: _art_auth['verify'] = source.args['verify'].lower in [ 'true', 'yes', '1' ] _pkg_name_col = self._config.name_column _packages_found = {} _pkg_name_old = "" for _paths in parser.get_paths(list_or_file_path, source): _packages = [] _pkg_name = _paths['params'][_pkg_name_col] if _pkg_name != _pkg_name_old: _pkg_name_old = _pkg_name print_stdout('{}: {}'.format( _pkg_name, { k: v for k, v in _paths['params'].items() if k != _pkg_name_col })) for _path in _paths['paths']: _path_fixed, _path_pattern = parser.split_fixed_pattern(_path) _repo_paths = ArtifactoryPath(_path_fixed, **_art_auth) for _repo_path in _repo_paths.glob(_path_pattern): _mark = 'found' if parser.validate_path(str(_repo_path), _paths['params']): _mark = 'match' if parser.validate(_repo_path.properties, 'properties', _paths['params']): _mark = 'valid' _packages += [_repo_path] print_stdout(' {}: {}'.format(_mark, str(_repo_path))) _package = None if _packages: _packages = parser.filter_one(_packages, _paths['params']) if type(_packages) is dict: _packages = [_packages] if len(_packages) == 1: # one package found: ok! # _stat = _packages[0]['path'].stat() # _stat = {k: getattr(_stat, k, None) for k in ('ctime', # 'mtime', # 'md5', # 'sha1', # 'size')} _package = Package(_pkg_name, _packages[0]['path'], _paths['params'], downloader, self, parser, _packages[0]['params']) # , _stat) _mark = 'chosen' print_stdout(' {}: {}'.format(_mark, str(_packages[0]['path']))) elif len(_packages) > 1: # TODO: multiple packages found: wtf?! raise CrosspmException( CROSSPM_ERRORCODE_MULTIPLE_DEPS, 'Multiple instances found for package [{}] not found.'. format(_pkg_name)) else: # Package not found: may be error, but it could be in other source. pass else: # Package not found: may be error, but it could be in other source. pass # _pkg_name = self._config.get_column_name(0) # raise CrosspmException( # CROSSPM_ERRORCODE_PACKAGE_NOT_FOUND, # 'Package [{}] not found.'.format(_pkg_name) # ) if (_package is not None) or (not self._config.no_fails): _added, _package = downloader.add_package(_pkg_name, _package) else: _added = False if _package is not None: _pkg_name = _package.get_name_and_path(True) if _added or (_package is not None): if (_package is not None) or (not self._config.no_fails): if (_package is not None) or (_packages_found.get( _pkg_name, None) is None): _packages_found[_pkg_name] = _package if _added and (_package is not None): if downloader.do_load: _package.download(downloader.packed_path) _deps_file = _package.get_file( self._config.deps_lock_file_name, downloader.temp_path) if _deps_file: _package.find_dependencies(_deps_file) return _packages_found
#artifactory_url = 'http://artifact.corp.continuum.net:8081' artifactory_url = os.environ['artifactory_url'] #repo_name = ['dt-dev_its-portal-net','dt_dev_yash'] repo_name = os.environ['repo_name'] #retention_period = '8mo' retention_period = os.environ['retention_period'] ## to validate the retention period variable if "mo" in retention_period or "w" in retention_period: print() else: print("Exiting due to variable declaration issue") sys.exit() repo_name = repo_name.split(',') aql = ArtifactoryPath("{}/artifactory/".format(artifactory_url), auth=('repluser', 'AP49A5SMDpZuQb7e9g7Tn5c45fbUfJkZMzmUSM')) # multiple repo list for r in repo_name: build_numbers = [] artifacts = aql.aql("items.find", { "type": "folder", "repo": "{}".format(r) }) build_info = [] for a in range(len(artifacts)): if (artifacts[a]["name"] != "."): build_info.append(int(artifacts[a]["name"])) build_max_no = max(build_info) for b in range(len(artifacts)):
help='Show files which are/could be deleted') parser.add_argument('--show', action='store_true', help='Show only result') try: options = parser.parse_args() except: sys.exit(0) repo = options.repo path = options.path days = options.time delete = options.delete verbose = options.verbose timesetting = options.timesetting aql = ArtifactoryPath(art_url, apikey=art_apikey) print("Searching....") args = aql_search.package_search(repo, path, days, timesetting) artifacts_list = aql.aql(*args) if options.show: print(len(artifacts_list)) exit() total_file_count = 0 if delete: log_name = log_path + "/log-deleting-" + datetime.now().strftime( "%Y%m%d-%H%M%S") + ".log" else: log_name = log_path + "/log-dry_run-" + datetime.now().strftime( "%Y%m%d-%H%M%S") + ".log"
import os from artifactory import ArtifactoryPath JF_USER = os.environ['JF_USER'] JF_USER_TOKEN = os.environ['JF_USER_TOKEN'] ARTIFACT_TO_REMOVE = os.environ['ARTIFACT_TO_REMOVE'] path = ArtifactoryPath(ARTIFACT_TO_REMOVE, auth=(JF_USER, JF_USER_TOKEN)) if path.exists(): path.unlink()
class TestArtifactoryPathGetAll(unittest.TestCase): # TODO: test repositories and permissions def setUp(self): self.arti = ArtifactoryPath("http://b.com/artifactory") self.users_request_url = f"{self.arti.drive}/api/security/users" self.users = [ { "name": "user_1", "uri": "http://b.com/artifactory/api/security/users/user_1", "realm": "internal", }, { "name": "user_2", "uri": "http://b.com/artifactory/api/security/users/user_2", "realm": "internal", }, ] self.user_1 = {"name": "user_1", "email": "*****@*****.**"} self.user_2 = {"name": "user_2", "email": "*****@*****.**"} self.groups_request_url = f"{self.arti.drive}/api/security/groups" self.groups = [ { "name": "group_1", "uri": "http://b.com/artifactory/api/security/groups/group_1", }, { "name": "group_2", "uri": "http://b.com/artifactory/api/security/groups/group_2", }, ] self.group_1 = { "name": "group_1", "realm": "internal", } self.group_2 = { "name": "group_2", "realm": "internal", } self.projects_request_url = ( f"{self.arti.drive.rstrip('/artifactory')}/access/api/v1/projects") self.projects = [ { "project_key": "project_key_1", "description": "description_1", }, { "project_key": "project_key_2", "description": "description_2", }, ] self.project_1 = { "project_key": "project_key_1", "description": "description_1", "admin_privileges": {}, } self.project_2 = { "project_key": "project_key_2", "description": "description_2", "admin_privileges": {}, } def test_get_users(self): with responses.RequestsMock() as rsps: rsps.add(responses.GET, self.users_request_url, json=self.users, status=200) rsps.add( responses.GET, f"{self.users_request_url}/user_1", json=self.user_1, status=200, ) rsps.add( responses.GET, f"{self.users_request_url}/user_2", json=self.user_2, status=200, ) results = self.arti.get_users(lazy=False) for user in results: self.assertIsInstance(user, User) self.assertEqual(results[0].name, "user_1") self.assertEqual(results[0].email, "*****@*****.**") self.assertEqual(results[1].name, "user_2") self.assertEqual(results[1].email, "*****@*****.**") self.assertEqual(len(rsps.calls), 3) self.assertEqual(rsps.calls[0].request.url, self.users_request_url) self.assertEqual(rsps.calls[1].request.url, f"{self.users_request_url}/user_1") self.assertEqual(rsps.calls[2].request.url, f"{self.users_request_url}/user_2") def test_get_users_lazy(self): with responses.RequestsMock() as rsps: rsps.add(responses.GET, self.users_request_url, json=self.users, status=200) results = self.arti.get_users(lazy=True) for user in results: self.assertIsInstance(user, User) self.assertEqual(results[0].name, "user_1") self.assertIsNone(results[0].email) self.assertEqual(results[1].name, "user_2") self.assertIsNone(results[1].email) self.assertEqual(len(rsps.calls), 1) self.assertEqual(rsps.calls[0].request.url, self.users_request_url) def test_get_groups(self): with responses.RequestsMock() as rsps: rsps.add(responses.GET, self.groups_request_url, json=self.groups, status=200) rsps.add( responses.GET, f"{self.groups_request_url}/group_1", json=self.group_1, status=200, ) rsps.add( responses.GET, f"{self.groups_request_url}/group_2", json=self.group_2, status=200, ) results = self.arti.get_groups(lazy=False) for group in results: self.assertIsInstance(group, Group) self.assertEqual(results[0].name, "group_1") self.assertEqual(results[0].realm, "internal") self.assertEqual(results[1].name, "group_2") self.assertEqual(results[1].realm, "internal") self.assertEqual(len(rsps.calls), 3) self.assertEqual(rsps.calls[0].request.url, self.groups_request_url) self.assertEqual(rsps.calls[1].request.url, f"{self.groups_request_url}/group_1") self.assertEqual(rsps.calls[2].request.url, f"{self.groups_request_url}/group_2") def test_get_groups_lazy(self): with responses.RequestsMock() as rsps: rsps.add(responses.GET, self.groups_request_url, json=self.groups, status=200) results = self.arti.get_groups(lazy=True) for group in results: self.assertIsInstance(group, Group) self.assertEqual(results[0].name, "group_1") self.assertEqual(results[0].realm, "artifactory") self.assertEqual(results[1].name, "group_2") self.assertEqual(results[1].realm, "artifactory") self.assertEqual(len(rsps.calls), 1) self.assertEqual(rsps.calls[0].request.url, self.groups_request_url) def test_get_projects(self): with responses.RequestsMock() as rsps: rsps.add(responses.GET, self.projects_request_url, json=self.projects, status=200) rsps.add( responses.GET, f"{self.projects_request_url}/project_key_1", json=self.project_1, status=200, ) rsps.add( responses.GET, f"{self.projects_request_url}/project_key_2", json=self.project_2, status=200, ) results = self.arti.get_projects(lazy=False) for project in results: self.assertIsInstance(project, Project) self.assertEqual(results[0].project_key, "project_key_1") self.assertEqual(results[0].description, "description_1") self.assertEqual(results[1].project_key, "project_key_2") self.assertEqual(results[1].description, "description_2") self.assertEqual(len(rsps.calls), 3) self.assertEqual(rsps.calls[0].request.url, self.projects_request_url) self.assertEqual(rsps.calls[1].request.url, f"{self.projects_request_url}/project_key_1") self.assertEqual(rsps.calls[2].request.url, f"{self.projects_request_url}/project_key_2") def test_get_projects_lazy(self): with responses.RequestsMock() as rsps: rsps.add(responses.GET, self.projects_request_url, json=self.projects, status=200) results = self.arti.get_projects(lazy=True) for project in results: self.assertIsInstance(project, Project) self.assertEqual(results[0].project_key, "project_key_1") self.assertEqual(results[0].description, "") self.assertEqual(results[1].project_key, "project_key_2") self.assertEqual(results[1].description, "") self.assertEqual(len(rsps.calls), 1) self.assertEqual(rsps.calls[0].request.url, self.projects_request_url)
def program_esp32(wd): import sys import glob import serial def serial_ports(): """ Lists serial port names :raises EnvironmentError: On unsupported or unknown platforms :returns: A list of the serial ports available on the system """ if sys.platform.startswith('win'): ports = ['COM%s' % (i + 1) for i in range(256)] elif sys.platform.startswith('linux') or sys.platform.startswith('cygwin'): # this excludes your current terminal "/dev/tty" ports = glob.glob('/dev/tty[A-Za-z]*') elif sys.platform.startswith('darwin'): ports = glob.glob('/dev/tty.*') else: raise EnvironmentError('Unsupported platform') result = [] for port in ports: try: s = serial.Serial(port) s.close() result.append(port) except (OSError, serial.SerialException): pass return result from artifactory import ArtifactoryPath #get files from artifactory to espfw from artifactory import ArtifactoryPath path = ArtifactoryPath( "http://artifactory:8081/artifactory/SircApplicationMain/ImageInjectTools/build_output/") # create esp32 dir espfw_dir = str(Path(wd) / 'espfw') Path(espfw_dir).mkdir(parents=True, exist_ok=True) # create filw for every artifact for file in path: with file.open() as fd: with open( Path(espfw_dir)/file.name, "wb") as out: out.write(fd.read()) print(f'{file.name} was copied to {espfw_dir}') #find available com serial_ports = serial_ports() print(f'available serial ports: {serial_ports}') for serial_port in serial_ports: try: # program flash program_cmd = f'python {espfw_dir}/esptool.py -p {serial_port} -b 460800 --after hard_reset write_flash --flash_mode dio --flash_size detect \ --flash_freq 40m 0x1000 {espfw_dir}/bootloader.bin 0x8000 {espfw_dir}/partition-table.bin 0x10000 {espfw_dir}/spi-and-eth.bin' print(f'Program ESP32 wait...') os.system(program_cmd) print(f'Success program esp32 with {serial_port}') except: print(f'failed to program esp32 with com: {serial_port} cmd: {program_cmd}')
def get_packages(self, source, parser, downloader, list_or_file_path, property_validate=True): """ :param source: :param parser: :param downloader: :param list_or_file_path: :param property_validate: for `root` packages we need check property, bad if we find packages from `lock` file, we can skip validate part :return: """ _auth_type = source.args['auth_type'].lower( ) if 'auth_type' in source.args else 'simple' _art_auth_etc = {} if 'auth' in source.args: self.search_auth(list_or_file_path, source) if _auth_type == 'simple': _art_auth_etc['auth'] = HTTPBasicAuth( *tuple(source.args['auth'])) session.auth = _art_auth_etc['auth'] # elif _auth_type == 'cert': # _art_auth_etc['cert'] = os.path.realpath(os.path.expanduser(source.args['auth'])) if 'auth' not in _art_auth_etc: msg = 'You have to set auth parameter for sources with artifactory-aql adapter' # self._log.error(msg) raise CrosspmException(CROSSPM_ERRORCODE_ADAPTER_ERROR, msg) if 'verify' in source.args: _art_auth_etc['verify'] = source.args['verify'].lower in [ 'true', 'yes', '1' ] else: _art_auth_etc['verify'] = False _pkg_name_column = self._config.name_column _secret_variables = self._config.secret_variables _packages_found = OrderedDict() _pkg_name_old = "" _packed_exist = False _packed_cache_params = None self._log.info('parser: {}'.format(parser._name)) for _paths in parser.get_paths(list_or_file_path, source): # If "parser"-column specified - find only in this parser parser_names = _paths['params'].get('parser') if parser_names and parser_names != "*": self._log.info("Specified parsers: {}".format(parser_names)) parsers = parser_names.split(',') if parser._name not in parsers: self._log.info("Skip parser: {}".format(parser._name)) continue _packages = [] _params_found = {} _params_found_raw = {} last_error = '' _pkg_name = _paths['params'][_pkg_name_column] if _pkg_name != _pkg_name_old: _pkg_name_old = _pkg_name self._log.info('{}: {}'.format( _pkg_name, { k: v for k, v in _paths['params'].items() if (k not in (_pkg_name_column, 'repo') and k not in _secret_variables) })) for _sub_paths in _paths['paths']: _tmp_params = dict(_paths['params']) self._log.info('repo: {}'.format(_sub_paths['repo'])) for _path in _sub_paths['paths']: _tmp_params['repo'] = _sub_paths['repo'] # ------ START ---- # HACK for prefer-local if self._config.prefer_local and not parser.has_rule( 'properties'): params = parser.get_params_with_extra( 'path', _paths['params']) for param in params: param['repo'] = _tmp_params['repo'] _path_packed = downloader.cache.path_packed( None, param) _packed_exist = os.path.isfile(_path_packed) if _packed_exist: self._log.info( "Skip searching, use package cache in path {}" .format(_path_packed)) _packed_cache_params = param break # break check local cache if _packed_exist: break # break connect to artifactory # ------ END ---- _path_fixed, _path_pattern, _file_name_pattern = parser.split_fixed_pattern_with_file_name( _path) try: _artifactory_server = _tmp_params['server'] _search_repo = _tmp_params['repo'] # Get AQL path pattern, with fixed part path, without artifactory url and repository name _aql_path_pattern = _path_fixed[len(_artifactory_server ) + 1 + len(_search_repo) + 1:] if _path_pattern: _aql_path_pattern = _aql_path_pattern + "/" + _path_pattern _aql_query_url = '{}/api/search/aql'.format( _artifactory_server) _aql_query_dict = { "repo": { "$eq": _search_repo, }, "path": { "$match": _aql_path_pattern, }, "name": { "$match": _file_name_pattern, }, } # Remove path if is empty string if not _aql_path_pattern: _aql_query_dict.pop('path') query = 'items.find({query_dict}).include("*", "property")'.format( query_dict=json.dumps(_aql_query_dict)) session.auth = _art_auth_etc['auth'] r = session.post(_aql_query_url, data=query, verify=_art_auth_etc['verify']) r.raise_for_status() _found_paths = r.json() for _found in _found_paths['results']: _repo_path = "{artifactory}/{repo}/{path}/{file_name}".format( artifactory=_artifactory_server, repo=_found['repo'], path=_found['path'], file_name=_found['name']) _repo_path = ArtifactoryPath( _repo_path, **_art_auth_etc) _mark = 'found' _matched, _params, _params_raw = parser.validate_path( str(_repo_path), _tmp_params) if _matched: _params_found[_repo_path] = { k: v for k, v in _params.items() } _params_found_raw[_repo_path] = { k: v for k, v in _params_raw.items() } _mark = 'match' # Check if it's `root` packages or from `lock` file # ALSO, if from `lock` and have * in name - validate with property property_validate_tmp = property_validate or '*' in _file_name_pattern # If have not rule in config, skip this part if parser.has_rule('properties' ) and property_validate_tmp: _found_properties = { x['key']: x.get('value', '') for x in _found['properties'] } _valid, _params = parser.validate( _found_properties, 'properties', _tmp_params, return_params=True) else: _valid, _params = True, {} if _valid: _mark = 'valid' _packages += [_repo_path] _params_found[_repo_path].update( {k: v for k, v in _params.items()}) _params_found[_repo_path][ 'filename'] = str(_repo_path.name) _params_found[_repo_path][ 'parser'] = parser._name self._log.debug(' {}: {}'.format( _mark, str(_repo_path))) except RuntimeError as e: try: err = json.loads(e.args[0]) except Exception: err = {} if isinstance(err, dict): # Check errors # :e.args[0]: { # "errors" : [ { # "status" : 404, # "message" : "Not Found" # } ] # } for error in err.get('errors', []): err_status = error.get('status', -1) err_msg = error.get('message', '') if err_status == 401: msg = 'Authentication error[{}]{}'.format( err_status, (': {}'.format(err_msg)) if err_msg else '') elif err_status == 404: msg = last_error else: msg = 'Error[{}]{}'.format( err_status, (': {}'.format(err_msg)) if err_msg else '') if last_error != msg: self._log.error(msg) last_error = msg _package = None # HACK for prefer-local if _packed_exist: # HACK - Normalize params for cached archive for key, value in _packed_cache_params.items(): if isinstance(value, list): value = ['' if x is None else x for x in value] _packed_cache_params[key] = value _package = Package(_pkg_name, None, _paths['params'], downloader, self, parser, _packed_cache_params, list_or_file_path['raw'], {}, in_cache=True) # END HACK if _packages: _tmp = copy.deepcopy(_params_found) _packages = parser.filter_one(_packages, _paths['params'], _tmp) if isinstance(_packages, dict): _packages = [_packages] if len(_packages) == 1: _stat_pkg = self.pkg_stat(_packages[0]['path']) _params_raw = _params_found_raw.get( _packages[0]['path'], {}) _params_tmp = _params_found.get(_packages[0]['path'], {}) _params_tmp.update({ k: v for k, v in _packages[0]['params'].items() if k not in _params_tmp }) _package = Package(_pkg_name, _packages[0]['path'], _paths['params'], downloader, self, parser, _params_tmp, _params_raw, _stat_pkg) _mark = 'chosen' self._log.info(' {}: {}'.format( _mark, str(_packages[0]['path']))) elif len(_packages) > 1: raise CrosspmException( CROSSPM_ERRORCODE_MULTIPLE_DEPS, 'Multiple instances found for package [{}] not found.'. format(_pkg_name)) else: # Package not found: may be error, but it could be in other source. pass else: # Package not found: may be error, but it could be in other source. pass if (_package is not None) or (not self._config.no_fails): _added, _package = downloader.add_package(_pkg_name, _package) else: _added = False if _package is not None: _pkg_name = _package.name if _added or (_package is not None): if (_package is not None) or (not self._config.no_fails): if (_package is not None) or (_packages_found.get( _pkg_name, None) is None): _packages_found[_pkg_name] = _package if _added and (_package is not None): if downloader.do_load: _package.download() _deps_file = _package.get_file( self._config.deps_lock_file_name) if downloader.recursive: if _deps_file: _package.find_dependencies(_deps_file, property_validate=False) elif self._config.deps_file_name: _deps_file = _package.get_file( self._config.deps_file_name) if _deps_file and os.path.isfile(_deps_file): _package.find_dependencies( _deps_file, property_validate=False) # HACK for not found packages _package_names = [ x[self._config.name_column] for x in list_or_file_path['raw'] ] _packages_found_names = [x.name for x in _packages_found.values()] for package in _package_names: if package not in _packages_found_names: _packages_found[package] = None return _packages_found
p = Properties() with open("bambo-test.properties", "rb") as f: p.load(f) build = p["BUILD_NUMMBER"].data planName = p["REPO_NAME"].data print("BUILD NUMBER IS =====> " + build) print("REPO NAME IS =====> " + planName) url = os.environ[ 'bamboo_artifactory_url'] + "com/services/{0}/{1}-0.0.1-SNAPSHOT.war".format( build, planName) print("Artifactory URL ====> " + url) auth = (os.environ['bamboo_artifactory_username'], os.environ['bamboo_artifactory_password']) path = ArtifactoryPath(url, auth=auth) with path.open() as fd, open("{0}-0.0.1-SNAPSHOT.war".format(planName), "wb") as out: out.write(fd.read()) src = "{0}-0.0.1-SNAPSHOT.war".format(planName) dst = "C:\\apache-tomcat-9.0.44\\webapps\\{0}-0.0.1-SNAPSHOT.war".format( planName) shutil.copyfile( src, dst, ) print(" Deployment Sucessful") print("==================API Deployment Ends ====================")
def _find_artifact_url(self, artifactory_url, file_prefix): branch_builds = ArtifactoryPath(artifactory_url) latest_build_path = max(branch_builds, key=self._extract_build_number) package_artifact_path = self._find_artifact(latest_build_path, file_prefix) return package_artifact_path
def get_packages(self, source, parser, downloader, list_or_file_path): _auth_type = source.args['auth_type'].lower() if 'auth_type' in source.args else 'simple' _art_auth = {} if 'auth' in source.args: if _auth_type == 'simple': _art_auth['auth'] = tuple(source.args['auth']) elif _auth_type == 'cert': _art_auth['cert'] = os.path.realpath(source.args['auth']) if 'verify' in source.args: _art_auth['verify'] = source.args['verify'].lower in ['true', 'yes', '1'] _pkg_name_col = self._config.name_column _packages_found = {} _pkg_name_old = "" for _paths in parser.get_paths(list_or_file_path, source): _packages = [] _pkg_name = _paths['params'][_pkg_name_col] if _pkg_name != _pkg_name_old: _pkg_name_old = _pkg_name print_stdout( '{}: {}'.format(_pkg_name, {k: v for k, v in _paths['params'].items() if k != _pkg_name_col})) for _path in _paths['paths']: _path_fixed, _path_pattern = parser.split_fixed_pattern(_path) _repo_paths = ArtifactoryPath(_path_fixed, **_art_auth) for _repo_path in _repo_paths.glob(_path_pattern): _mark = 'found' if parser.validate_path(str(_repo_path), _paths['params']): _mark = 'match' if parser.validate(_repo_path.properties, 'properties', _paths['params']): _mark = 'valid' _packages += [_repo_path] print_stdout(' {}: {}'.format(_mark, str(_repo_path))) _package = None if _packages: _packages = parser.filter_one(_packages, _paths['params']) if type(_packages) is dict: _packages = [_packages] if len(_packages) == 1: # one package found: ok! # _stat = _packages[0]['path'].stat() # _stat = {k: getattr(_stat, k, None) for k in ('ctime', # 'mtime', # 'md5', # 'sha1', # 'size')} _package = Package(_pkg_name, _packages[0]['path'], _paths['params'], downloader, self, parser, _packages[0]['params']) # , _stat) _mark = 'chosen' print_stdout(' {}: {}'.format(_mark, str(_packages[0]['path']))) elif len(_packages) > 1: # TODO: multiple packages found: wtf?! raise CrosspmException( CROSSPM_ERRORCODE_MULTIPLE_DEPS, 'Multiple instances found for package [{}] not found.'.format(_pkg_name) ) else: # Package not found: may be error, but it could be in other source. pass else: # Package not found: may be error, but it could be in other source. pass # _pkg_name = self._config.get_column_name(0) # raise CrosspmException( # CROSSPM_ERRORCODE_PACKAGE_NOT_FOUND, # 'Package [{}] not found.'.format(_pkg_name) # ) if (_package is not None) or (not self._config.no_fails): _added, _package = downloader.add_package(_pkg_name, _package) else: _added = False if _package is not None: _pkg_name = _package.get_name_and_path(True) if _added or (_package is not None): if (_package is not None) or (not self._config.no_fails): if (_package is not None) or (_packages_found.get(_pkg_name, None) is None): _packages_found[_pkg_name] = _package if _added and (_package is not None): if downloader.do_load: _package.download(downloader.packed_path) _deps_file = _package.get_file(self._config.deps_lock_file_name, downloader.temp_path) if _deps_file: _package.find_dependencies(_deps_file) return _packages_found
def main(): # creating AnsibleModule object with acceptable attributes module = AnsibleModule(argument_spec=dict( name=dict(required=True, type='str'), jar_version=dict(required=True, type='str'), environment=dict(required=True, type='str'), ), supports_check_mode=True) # condition for check mode/dry run if module.check_mode: module.exit_json(changed=False) # initialising the passed attributes as variables name = module.params['name'] jar_version = module.params['jar_version'] environment = module.params['environment'] # killing the microservice if it is running on the server if is_running(name): pid = demolish() time.sleep(2) if is_running(name): msg = "Program could not be stopped. Please check the host." module.fail_json(msg=msg) else: program_status = "Program '{0}' having PID '{1}' has been killed on this host.".format( name, pid) else: program_status = "Program '{0}' is not running on this host.".format( name) # copying the properties file and timestamping the previous property file. Creating the property file path if it does not exist. property_dir = "/home/ngaurav/{0}/resources/".format(name) property_file = "/home/ngaurav/{0}/resources/application-{1}.properties".format( name, environment) property_src = "/root/test/application-{0}.properties".format(environment) if filepath_exists(property_file): timestamp(property_file) copy_reply = copy(property_src, property_file) property_file_status = "Property file has been timestamped. New property {0}.".format( copy_reply) elif filepath_exists(property_dir): file_copy_status = copy(property_src, property_file) property_file_status = "There exists no previous property file. {0} ".format( file_copy_status) else: path_create(property_dir) property_file_status = "File path has been created." # deleting previous backup jar and creating the backup directory if it does not exist. backup_jar = "/dev/shm/eSource/Backup_Deployment/Artifacts/{0}-{1}.jar".format( name, jar_version) backup_dir = "/dev/shm/eSource/Backup_Deployment/Artifacts/" current_jar = "/dev/shm/eSource/Current_Deployment/Artifacts/{0}-{1}.jar".format( name, jar_version) current_dir = "/dev/shm/eSource/Current_Deployment/Artifacts/" if filepath_exists(backup_jar): delete(backup_jar) backup_jar_status = "Backup jar has been deleted." elif filepath_exists(backup_dir): backup_jar_status = "Backup directory exists but there is no backup jar yet." else: path_create(backup_dir) backup_jar_status = "Backup directory did not exist previously. It has been created." # backing up the current jar to backup directory. Creating the current deployment directory if it does not exist. if filepath_exists(current_dir): copyjar_reply = copy(current_jar, backup_dir) current_jar_status = "The current jar {0} for backup".format( copyjar_reply) else: path_create(current_dir) current_jar_status = "The directory for current deployments did not exist. It has been created." if filepath_exists(current_jar): delete(current_jar) #downloading the latest jar and moving it to the current deployment path path = ArtifactoryPath( "http://ec2-34-210-28-18.us-west-2.compute.amazonaws.com:8081/artifactory/libs-release-local/com/wlt/cel/{0}/{1}/{0}-{1}.jar" .format(name, jar_version), auth=('admin', 'password')) with path.open() as fd: with open("{0}-{1}.jar".format(name, jar_version), "wb") as out: out.write(fd.read()) here = "/root/module/{0}-{1}.jar".format(name, jar_version) move(here, current_dir) jar_download_status = " Downloaded latest jar." #starting up jar and checking if it is running with backup steps in case new jar does not start delete_current_jar = '{0}{1}-{2}.jar'.format(current_dir, name, jar_version) p = subprocess.Popen( 'java -jar -Dspring.profiles.active={0} -Dspring.config.location={1} {2}{3}-{4}.jar' .format(environment, property_dir, current_dir, name, jar_version), shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) if is_running(name): running_jar_status = 'New jar has been started.' else: delete(delete_current_jar) move(backup_jar, current_dir) p = subprocess.Popen( 'java -jar -Dspring.profiles.active={0} -Dspring.config.location={1} {2}{3}-{4}.jar' .format(environment, property_dir, current_dir, name, jar_version), shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) if is_running(name): running_jar_status = "Failed jar has been removed. The backup jar has been started." else: running_jar_status = "Failed jar has been removed. The backup jar could not be started. Please check the host manually." module.exit_json(program_status=program_status, property_file_status=property_file_status, backup_jar_status=backup_jar_status, current_jar_status=current_jar_status, jar_download_status=jar_download_status, running_jar_status=running_jar_status)
def get_path(self, path): return ArtifactoryPath(self.server + '/' + path + "/", auth=self.get_auth(), verify=self.ssl_verify)
import yaml import sys from artifactory import ArtifactoryPath from requests.auth import HTTPBasicAuth import re ## required variables #artifactory_url = 'http://artifact.corp.continuum.net:8081' artifactory_url = os.environ['artifactory_url'] # artifactory user name artifactory_username = '******' # artifactoy password artifactory_password = os.environ['artifactory_password'] aql = ArtifactoryPath("{}/artifactory/".format(artifactory_url), auth=('{}'.format(artifactory_username), '{}'.format(artifactory_password))) f = open("manifest_source_file", 'w') global_manifest = {} global_manifest['packages'] = [] class windows_binary_version: def __init__(self, repo_name, max_build_no): self.repo_name = repo_name self.max_build_no = max_build_no def windows_binary_version_artifactory_call(self): build_info = requests.get("{}/artifactory/api/build/dev_{}/{}".format( artifactory_url, self.repo_name, self.max_build_no),
#!/usr/bin/env python pxe_tool_url = "http://" projectxxx_url = "http://artifacts.companyxxx.com.cn/artifactory/api/storage/zxproject0xxx-release-local/project0xxx2.0/projectxxx/Bugfix_P8B2/" from artifactory import ArtifactoryPath # path = ArtifactoryPath(pxe_tool_url) path = ArtifactoryPath(projectxxx_url) for p in path: print p
import sys import json from artifactory import ArtifactoryPath from pprint import pprint #aql = ArtifactoryPath("http://35.193.65.132/artifactory", apikey="AKCp5ekmesKJi3ccYGVL5EAxGTtwvQyHwKrkQ9jH3D4KFdL91BLERDphTXjSAPJMp2x3zwXTC")# path to artifactory, NO repo #print ("The script has the name %s" % (sys.argv[0])") # Count the arguments #arguments=len(sys.argv) - 1 #print ("The script is called with %i arguments" % (arguments)) #art_url=sys.argv[1] #apikey=sys.argv[2] art_url="http://jfrog.local/artifactory" key="AKCp5ekmesKJi3ccYGVL5EAxGTtwvQyHwKrkQ9jH3D4KFdL91BLERDphTXjSAPJMp2x3zwXTC" aql = ArtifactoryPath("http://35.193.65.132/artifactory", apikey="AKCp5ekmesKJi3ccYGVL5EAxGTtwvQyHwKrkQ9jH3D4KFdL91BLERDphTXjSAPJMp2x3zwXTC")# path to artifactory, NO repo #aql = ArtifactoryPath(art_url."/artifactory", apikey=key)# path to artifactory, NO repo artifacts = aql.aql("items.find()", ".include", ["name","stat.downloads"]) # The following steps will filter the artifacts list and give a list of only jars jarlistall=[] for artifactdetails in artifacts: for key, value in artifactdetails.items(): count = 0 if "jar" in value: jarlistall.append(artifactdetails) #print (jarlistall) total_jars=len(jarlistall) print("total_jars=", total_jars)
password = os.getenv('HELM_CHART_MUSEUM_TOKEN') if not password: exit_environment_not_set('HELM_CHART_MUSEUM_TOKEN') artifactory_url = os.getenv('HELM_CHART_ARTIFACTORY_URL') if not artifactory_url: exit_environment_not_set('HELM_CHART_ARTIFACTORY_URL') helm_repo = os.getenv('HELM_CHART_MUSEUM_REPO') if not helm_repo: exit_environment_not_set('HELM_CHART_MUSEUM_REPO') # Login to artifactory credentials = (username, password) full_artifactory_url = artifactory_url + helm_repo + '/' artifactory_path = ArtifactoryPath( full_artifactory_url, auth=credentials, ) artifactory_path.touch() # List from html website as GET API does not provide creation timestamp r = requests.get(full_artifactory_url) # Split response body per artifact artifact_list = re.search("<a href(.*)</pre>", str(r.content)).group(0).split('<a href') artifact_list_trimmed = list(filter(lambda x: x != "", artifact_list)) # Extract artifact name and date clean_artifact_map = map( lambda p: re.search(">(.*)</a>[ ]+(.*) ", str(p)).group(1, 2), artifact_list_trimmed,
def artifactory(artifactory_server, artifactory_auth): artifactory_ = ArtifactoryPath(artifactory_server, auth=artifactory_auth) yield artifactory_
def setUp(self): self.aql = ArtifactoryPath("http://b/artifactory")
from artifactory import ArtifactoryPath path = ArtifactoryPath( "https://na.artifactory.swg-devops.com/artifactory/txo-dswim-esb-deployment-generic-virtual", auth=('*****@*****.**', 'AP6LE3b5ML1LjbFAWiXjZtnpsUr')) for p in path: print(p)
'-url', '--blobconverter-url', type=str, help="URL to custom BlobConverter URL to be used for conversion", required=False) args = parser.parse_args() if None in (args.username, args.password): parser.print_help() sys.exit(1) if args.blobconverter_url is not None: blobconverter.set_defaults(url=args.blobconverter_url) path = ArtifactoryPath( "https://artifacts.luxonis.com/artifactory/blobconverter-backup/blobs", auth=(args.username, args.password)) if not path.exists(): path.mkdir() priority_models = [ "mobilenet-ssd", "efficientnet-b0", "vehicle-license-plate-detection-barrier-0106", "vehicle-detection-adas-0002", "license-plate-recognition-barrier-0007" "vehicle-attributes-recognition-barrier-0039", "face-detection-retail-0004", "landmarks-regression-retail-0009" ] backup_shaves = range(1, 17) for model_name, shaves in itertools.product(priority_models, backup_shaves): print("Deploying {} with {} shaves...".format(model_name, shaves))