def results(self): if self.per_theme: results_list = [] for theme in self.feature_selection.themes: results_list.append(Artifact([os.path.join(self.stage_dir, slugify(theme)) + ".gpkg"], Geopackage.name, theme=theme)) return results_list else: return [Artifact([self.output_gpkg], Geopackage.name)]
def __init__(self, extention): ''' :param extention: The extention of the source archive. :type extention: str ''' Artifact.__init__(self, 'archive.{}'.format(extention), 'Source code ({})'.format(extention), '', '', 'compression')
def add_artifact(self, collections, choice=0, lock=0, fake=Faker()): artifact = Artifact(self, collections, choice=choice, lock=lock, fake=Faker()) r = requests.post("http://127.0.0.1:5300/request/add-artifact", json=artifact.__dict__()) print(r.text)
def results(self): results_list = [] for theme in self.feature_selection.themes: results_list.append( Artifact([os.path.join(self.stage_dir, theme) + ".gpkg"], Geopackage.name, theme=theme)) return results_list
def results(self): results_list = [] one_zipfile_contents = [] for theme in self.feature_selection.themes: for geom_type in self.feature_selection.geom_types(theme): basename = os.path.join( self.output_dir, slugify(theme) + "_" + geom_type) + ".kml" if self.per_theme: results_list.append( Artifact([basename], KML.name, theme=theme)) else: one_zipfile_contents.append(basename) if not self.per_theme: results_list.append( Artifact(one_zipfile_contents, KML.name, basename="kml")) return results_list
def results(self): results_list = [] for theme in self.feature_selection.themes: for geom_type in self.feature_selection.geom_types(theme): basename = os.path.join(self.output_dir, theme + "_" + geom_type) + ".kml" results_list.append(Artifact([basename], KML.name, theme=theme)) return results_list
def load(self, artifact_id): """ Load artifacts index for specified artifact id from storage :param artifact_id: artifact id to load :return: None """ s = self.driver.read_index(artifact_id) xs = json.loads(s, encoding='utf-8') if s else [] self.artifacts[artifact_id] = [Artifact.from_dict(x) for x in xs]
def __init__(self, folder, desc_file, desc_ext): ''' :param folder: The folder where files can be found. :param desc_file: The name of the description file. :param desc_ext: The extention of the description file. :type folder: str :type desc_file: str :type desc_ext: str ''' try: description = json.load(open(desc_file)) except json.decoder.JSONDecodeError: print('CRITICAL Description file {} could not be read'.format( desc_file)) exit(1) self.tag = description['version'] self.job = description['job'] file_name = desc_file[:-len(desc_ext)] Artifact.__init__(self, file_name, description['category'], description['arch'], description['type'], 'package')
def results(self): results_list = [] one_zipfile_contents = [] for theme in self.feature_selection.themes: for geom_type in self.feature_selection.geom_types(theme): basename = os.path.join(self.output_dir,slugify(theme)+"_"+geom_type) if os.path.isfile(basename+".shp"): shpset = [ basename+".shp", basename+".cpg", basename+".dbf", basename+".prj", basename+".shx", ] if self.per_theme: results_list.append(Artifact(shpset,Shapefile.name,theme=theme,basename=basename+".shp")) else: one_zipfile_contents = one_zipfile_contents + shpset if not self.per_theme: results_list.append(Artifact(one_zipfile_contents,Shapefile.name,basename="shp")) return results_list
def _generate_ads_file(self): ''' Create the artifact deployer script file ''' # Retrieve artifacts from local deployment repo artifacts = Artifact.gather_artifacts(self._localDeploymentPath) group_section_list = [] for key in artifacts: values = artifacts[key] gav = key.split(':') group = gav[0] aid = gav[1] version = gav[2] strippedVersion = version strippedVersion = re.sub(r'\-(?i)(SNAPSHOT|RELEASE|MILESTONE)$', '', version) # Rollback this change as it causes a regression concatenating version twice 1.42.2-1.42.2 # project_version = self.build_cfg.version() # if project_version is not None: # strippedVersion = strippedVersion + '-' + project_version group_section_list.append('artifact "%s", group:"%s", version:"%s" , { ' % (aid, group, strippedVersion)) for artifactObj in values: log.info('artifact to deploy ' + artifactObj.path) fileLine = '\t\t file "%s"' % artifactObj.path.replace('\\', '/') if not artifactObj.classifier == '': fileLine = fileLine + ', classifier:"%s"' % (artifactObj.classifier) if not artifactObj.extension == '': fileLine = fileLine + ', extension:"%s"' % (artifactObj.extension) group_section_list.append(fileLine) # Removed this restriction according to the new wanted behaviour see BESTL-8564 # # Check that all submodules POMs have the same version as the main (Reactor) POM # project_version = self.build_cfg.version() # strippedVersion = re.sub(r'\-(?i)(SNAPSHOT|RELEASE|MILESTONE)$', '', artifactObj.version) # if strippedVersion != project_version: # errorMessage = 'the following sub module POM %s:%s:%s has different version from the main POM %s' % (artifactObj.gid, artifactObj.aid,artifactObj.version,project_version) # errorMessage= errorMessage + ' All sub modules POM must have the same version as the main POM ' # raise XmakeException( errorMessage) group_section_list.append('\n\t}') export_ads_template_file = join(inst.get_installation_dir(), 'xmake', 'template', 'maven', 'export.ads') with open(export_ads_template_file, 'r') as f: export_ads = f.read() export_ads = Template(export_ads).substitute(groupList='\n\t'.join(group_section_list)) with open(self._ads, 'w') as f: f.write(export_ads)
def results(self): results_list = [] for theme in self.feature_selection.themes: for geom_type in self.feature_selection.geom_types(theme): basename = os.path.join(self.output_dir,theme+"_"+geom_type) if os.path.isfile(basename+".shp"): shpset = [ basename+".shp", basename+".cpg", basename+".dbf", basename+".prj", basename+".shx", ] results_list.append(Artifact(shpset,Shapefile.name,theme=theme)) return results_list
def run(self,results_list): zips = [] for a in results_list: # the created zipfile must end with only .zip for the HDX geopreview to work zipfile_name = self.job_name + "_" + os.path.basename(a.parts[0]).replace('.','_') + ".zip" zipfile_path = os.path.join(self.stage_dir,zipfile_name) with zipfile.ZipFile(zipfile_path,'w',zipfile.ZIP_DEFLATED) as z: for filename in a.parts: z.write(filename,self.job_name + "_" + os.path.basename(filename)) z.write(self.boundary_path,"boundary.geojson") target_path = os.path.join(self.target_dir,zipfile_name) shutil.move(zipfile_path,target_path) zips.append(target_path) # side effect self._zipped_resources.append(Artifact([target_path],a.format_name,theme=a.theme)) return zips
def upload(self, local_path, artifact=None, force=False, print_only=False): """ Upload local artifact and update index :param local_path: source file path to upload :param artifact: artifact object to upload If artifact is None, generate artifact object from local_path. Revision will be updated. :param force: :param print_only: :return: None """ art = deepcopy(artifact) if not artifact: art = Artifact.from_path(self.group_id, local_path) bi = art.basic_info fi = art.file_info # check if the artifact is already in index revisions = self._get_artifacts(bi.artifact_id, bi.version, bi.packaging) xs = [ x for x in revisions if (x.file_info.size, x.file_info.md5) == (fi.size, fi.md5) ] if xs and not force: logging.warn('Already uploaded as:\n%s' % xs[0]) return # increment revision latest = self._get_latest_artifact(bi.artifact_id, bi.version, bi.packaging) current_revision = latest[0].basic_info.revision if latest else 0 bi.revision = current_revision + 1 # upload file if print_only: logging.info('Would upload artifact: \n\n%s\n' % art) return logging.info('Uploading artifact: \n%s\n' % art) self.driver.upload(local_path, bi.s3_path(), fi.md5) # update index self.artifacts[bi.artifact_id].append(art)
def _store_build_dependencies(self): ''' Store build dependencies in this format [group:artifact:version:type::classifier] ie: log4j-1.2.12-debug.jar --> log4j:log4j:1.2.12:jar::debug The file will be saved in [component_dir]/gen/tmp/dependencies ''' artifacts = Artifact.gather_artifacts(self._maven_repository_dir) lines = [] for key in artifacts: values = artifacts[key] for artifact in values: str_key = ':'.join([key, artifact.extension]) if artifact.classifier: str_key = '::'.join([str_key, artifact.classifier]) lines.append(str_key) with open(self._maven_build_dependencies_file, 'w') as f: f.writelines(['%s\n' % line for line in lines]) self.build_cfg.add_metadata_file(self._maven_build_dependencies_file) log.info('found ' + str(len(lines)) + ' dependencies')
def generateEncounter(): enc_dict = {} if random.choice([True, False]): #Generates Gold g = Gold() else: g = None enc_dict["gold"] = g if random.choice([True, False]): #Generates items i = random.choice([Weapon(), Potion(), Artifact()]) else: i = None enc_dict["item"] = i if random.choice([True, False]): enc_dict["monster"] = Monster() enc_dict["trap"] = None else: enc_dict["trap"] = Trap() enc_dict["monster"] = None return enc_dict
else: options = dict(opts) base = options.get("-m") if not base: base = options.get("--maven-repo") if not base: base = "https://repo1.maven.org/maven2" username = options.get("-u") if not username: username = options.get("--username") password = options.get("-p") if not password: options.get("--password") dl = Downloader(base, username, password) artifact = Artifact.parse(args[0]) filename = None if len(args) == 2: filename = args[1] try: if dl.download(artifact, filename): sys.exit(0) else: usage() sys.exit(1) except RequestException, e: print e.msg sys.exit(1)
def results(self): return [Artifact([self.work_dir + "/gmapsupp.img"], GarminIMG.name)]
def results(self): return [Artifact([self.output_xml], OSM_XML.name)]
def results(self): return [Artifact([self.output_pbf], OSM_PBF.name)]
print(f'✅ {libssl}\n') install_name_tool('-change', libcrypto, libssl) print(f'✅ {libusbmuxd}\n') install_name_tool('-change', libplist, libusbmuxd) print(f'✅ {libimobiledevice}\n') for library in [libssl, libcrypto, libplist, libusbmuxd]: install_name_tool('-change', library, libimobiledevice) # RUN SCRIPT if __name__ == "__main__": artifact = None try: artifact = Artifact(bucket_name='ios-native-qustodio-dev') dependencies_hash = artifact.get_hash_from( file_content=DEPENDENCIES_FILE) artifact_name = f'{dependencies_hash}.zip' print('depenencies hash: ', dependencies_hash) print('ROOT_PATH: ', ROOT_PATH) if not os.path.isdir(INSTALL_DIR): if artifact.try_download_and_unzip_artifact( artifact_name, LIBIMOBILEDEVICE_NODE_ARTIFACT, unzip_path=get_relative_path('../')): exit(0)
class Ruin(object): def __init__(self): self.challenge_rating = random.randint(2, 10) self.name = (templates.Template("{{output}}") .render(output="{{old1}}|{{old1}} {{old2}}|The {{adj}} {{noun}}", old1=old_language_generator.random_word(), old2=old_language_generator.random_word(), adj=vocab.get_adj(), noun=vocab.get_noun()).title()) self.location_description = templates.Template('{{sentence}}').render(sentence="{{name}} is {{locationphrase}} {{placement}}.", name=self.name.title(), locationphrase="located in|located on|constructed on|located under", placement="a_or_an {{adj}} tree|a_or_an {{adj}} plain|a_or_an {{adj}} city|a_or_an {{adj}} rift|a_or_an {{adj}} mountain", adj="alien|obsidion|crystal|spikey|giant|flooded|ruined|volcanic|cursed|poisoned|haunted|broken") self.parts_description = (templates.Template('{{sentence}}') .render(sentence="{{segment}} of {{name}} are {{state}}.", segment="Parts|Some areas|Regions|Some rooms", name=self.name.title()+"|it", state="cursed|corrupted|flooded|{{adj}} hot|{{adj}} cold|frozen|foggy|inaccessible|flooded", adj="incredibly|somewhat|unbearably")) self.circumstances_description = (templates.Template('{{sentence}}') .render(sentence="A_or_an {{outside_thing}} is happening outside.|The ruin is {{ruin_becoming}}.", outside_thing="massive storm|battle between raiders|solar eclipse|massive flood|windstorm|blizzard|lunar eclipse", ruin_becoming="flooding|coming to life|sinking into the earth|collapsing slowly|burning|larger on the inside than the outside")) self.artifact = Artifact() self.race = monsters.get_race(self.challenge_rating) self.race_description = (templates.Template('{{sentence}}') .render(sentence="It is occupied by {{plural_race}}.", plural_race=self.race)) self.villain = Villain(self) self.villain_sentence = (templates.Template('{{sentence}}') .render(sentence="{{villain}}, a_or_an {{villain_type}} is here.", villain=md_writer.phrase_with_anchor(self.villain.__str__()), villain_type=self.villain.monster.name)) self.race_villain_relation_sentence = (templates.Template("{{sentence}}") .render(sentence="The {{race_name}} {{relation}} {{villain}}.", race_name=self.race, relation="are the slaves of|have been charmed by|are ruled by|worship|are the minions of|are the soldiers of|are battling", villain=self.villain.__str__())) self.entrance = Room(self) self.entrance.set_connection('south', 'entrance') self.rooms = [self.entrance] rooms_to_build = random.randint(5, 15) while len(self.rooms) < rooms_to_build: random_room = choice(self.rooms) new_room = random_room.add_connected_room() if new_room is not None: self.rooms.append(new_room) random_room.artifact = self.artifact random_room = choice(self.rooms) random_room.villain = self.villain def room_in_position(self, pos): for room in self.rooms: if pos[0] == 0 and pos[1] == -1: return True if pos[0] == room.pos[0] and pos[1] == room.pos[1]: return True return False def render(self): ruin_save_name = self.name.replace(" ", "-") ruin_map_file_name = ruin_save_name + ".png" grapher.save_graph(self, ruin_map_file_name) md_writer.print_title("Ruin Dogs") md_writer.print_sub_title(self.name) md_writer.print_chapter_heading("Overview") md_writer.print_chapter_sentence(self.location_description) md_writer.print_chapter_sentence(self.parts_description) md_writer.print_chapter_sentence(self.circumstances_description) md_writer.print_chapter_sentence(self.race_description) md_writer.print_chapter_sentence(self.villain_sentence) md_writer.print_chapter_sentence(self.race_villain_relation_sentence) md_writer.print_chapter_sentence(self.villain.motivation_description) md_writer.end_paragraph() md_writer.end_chapter() md_writer.print_chapter_heading("Artifact") self.artifact.render() md_writer.end_paragraph() md_writer.end_chapter() md_writer.print_chapter_heading("Locations") md_writer.end_chapter() md_writer.insert_image('../'+md_writer.output_folder+'/images/' + ruin_map_file_name, 'layout') for room in self.rooms: room.render() md_writer.end_paragraph() md_writer.end_novel(css='http://mattfister.github.io/ruindogs/base.css')
def results(self): return [Artifact([self.work_dir + "/Osmand_2.obf"], OsmAndOBF.name)]
def __init__(self): self.challenge_rating = random.randint(2, 10) self.name = (templates.Template("{{output}}") .render(output="{{old1}}|{{old1}} {{old2}}|The {{adj}} {{noun}}", old1=old_language_generator.random_word(), old2=old_language_generator.random_word(), adj=vocab.get_adj(), noun=vocab.get_noun()).title()) self.location_description = templates.Template('{{sentence}}').render(sentence="{{name}} is {{locationphrase}} {{placement}}.", name=self.name.title(), locationphrase="located in|located on|constructed on|located under", placement="a_or_an {{adj}} tree|a_or_an {{adj}} plain|a_or_an {{adj}} city|a_or_an {{adj}} rift|a_or_an {{adj}} mountain", adj="alien|obsidion|crystal|spikey|giant|flooded|ruined|volcanic|cursed|poisoned|haunted|broken") self.parts_description = (templates.Template('{{sentence}}') .render(sentence="{{segment}} of {{name}} are {{state}}.", segment="Parts|Some areas|Regions|Some rooms", name=self.name.title()+"|it", state="cursed|corrupted|flooded|{{adj}} hot|{{adj}} cold|frozen|foggy|inaccessible|flooded", adj="incredibly|somewhat|unbearably")) self.circumstances_description = (templates.Template('{{sentence}}') .render(sentence="A_or_an {{outside_thing}} is happening outside.|The ruin is {{ruin_becoming}}.", outside_thing="massive storm|battle between raiders|solar eclipse|massive flood|windstorm|blizzard|lunar eclipse", ruin_becoming="flooding|coming to life|sinking into the earth|collapsing slowly|burning|larger on the inside than the outside")) self.artifact = Artifact() self.race = monsters.get_race(self.challenge_rating) self.race_description = (templates.Template('{{sentence}}') .render(sentence="It is occupied by {{plural_race}}.", plural_race=self.race)) self.villain = Villain(self) self.villain_sentence = (templates.Template('{{sentence}}') .render(sentence="{{villain}}, a_or_an {{villain_type}} is here.", villain=md_writer.phrase_with_anchor(self.villain.__str__()), villain_type=self.villain.monster.name)) self.race_villain_relation_sentence = (templates.Template("{{sentence}}") .render(sentence="The {{race_name}} {{relation}} {{villain}}.", race_name=self.race, relation="are the slaves of|have been charmed by|are ruled by|worship|are the minions of|are the soldiers of|are battling", villain=self.villain.__str__())) self.entrance = Room(self) self.entrance.set_connection('south', 'entrance') self.rooms = [self.entrance] rooms_to_build = random.randint(5, 15) while len(self.rooms) < rooms_to_build: random_room = choice(self.rooms) new_room = random_room.add_connected_room() if new_room is not None: self.rooms.append(new_room) random_room.artifact = self.artifact random_room = choice(self.rooms) random_room.villain = self.villain
def execute_deployment(build_cfg): '''performs the xmake DEPLOY phase (deploys contents exported in the EXPORT phase to the specified maven repository)''' if not build_cfg.do_deploy(): log.info( "skipping deployment, because the according option '-d' was not set" ) return if build_cfg.do_custom_deploy(): log.info("skipping nexus deployment, because of custom deployment") return if not os.path.exists(build_cfg.export_file()): log.warning("no export file found at: " + build_cfg.export_file()) return # TODO: consider breaking w/ err code isDiskDeployment = build_cfg.export_repo().startswith("file://") isStaging = build_cfg.is_release() # Deploy to nexus if not isDiskDeployment and not isStaging: _nexus_deployment(build_cfg, build_cfg.export_repo()) return # Deploy to disk diskDeploymentPath = join(build_cfg.temp_dir(), 'stagingDeployment') local_repo_url = urlparse.urljoin('file:', urllib.pathname2url(diskDeploymentPath)) log.info('deploying on disk {}'.format(diskDeploymentPath)) _disk_deployment(build_cfg, local_repo_url) # Find artifacts in deployment folder artifacts = Artifact.gather_artifacts(diskDeploymentPath) if isStaging: (jsonUserAgent, nexusApi, repoDescription) = _nexus_staging_init(build_cfg) repoId = _nexus_staging_create(build_cfg, jsonUserAgent, nexusApi, repoDescription) status = _nexus_staging_push(build_cfg, jsonUserAgent, nexusApi, repoId, diskDeploymentPath) # If staging repo id set on commmand line, do not close it, except if promoting is requested in the same life cycle if not build_cfg.get_staging_repoid_parameter( ) or build_cfg.do_promote(): status = _nexus_staging_close(build_cfg, nexusApi, repoId) # Create deployment descriptor file log.info("creating deploy.json file") if status and 'repositoryURI' in status: _create_deployment_file_descriptor(build_cfg, artifacts, local_repo_url, status['repositoryURI']) else: log.error( 'cannot create deployment file. Staging repository status returned by nexus does not contain the repository URI' ) else: log.info("deployment in a file share. Creating deploy.json file") # Create deployment descriptor file _create_deployment_file_descriptor(build_cfg, artifacts, local_repo_url, build_cfg.export_repo()) # Deploy really to the definive disk place _disk_deployment(build_cfg, build_cfg.export_repo())