def loop(func, info=''): #print info #widgets = ['Processed: ', Counter(), ' lines (', Timer(), ')'] widgets = [Percentage(), Bar('>'), info, ReverseBar('<'), ETA()] widgets = [Percentage(), Bar('>'), info, ReverseBar('<'), Timer()] pbar = ProgressBar(widgets=widgets, maxval=processSize) for x in pbar(i for i in range(processSize)): func()
def __init__(self, H, D, psi=None): self.psi = copy.deepcopy(psi) self.H = H self.D = D #no trial given use random MPS if psi is None: if type(H) is periodic_MPO: self.psi = mps.random(self.H.length, np.size(self.H.node[0].tensor, axis=0), D, boundary="periodic") elif type(H) is open_MPO: self.psi = mps.random(self.H.length, np.size(self.H.node[0].tensor, axis=0), D, boundary="open") self.length = self.psi.length #values to track self.energy = None self.energy_vals = [] self.e_diff = None self.variance = None self.variance_vals = [] self.H2 = self.H.dot(self.H) self.var_network = rail_network(self.psi, self.psi, self.H2) print("Right Normalize trial state") self.psi.right_normalize(norm=True) self.network = rail_network(self.psi, self.psi, self.H) #combine left/right sides of <psi|psi>, update iteratively to update info self.R = dict() self.L = dict() self.var_R = dict() self.var_L = dict() print("Build initial right blocks") self.R[self.network.length - 1] = collapsed_layer.factory( layer(self.network, self.network.length - 1)) temp = layer(self.network, self.network.length - 1) pbar = ProgressBar(widgets=['Energy Network', ReverseBar()]) for n in pbar(range(self.length - 2, 0, -1)): self.R[n] = combine_clayer_layer.new_collapsed_layer( layer(self.network, n), self.R[n + 1]) self.var_R[self.network.length - 1] = collapsed_layer.factory( layer(self.var_network, self.var_network.length - 1)) pbar = ProgressBar(widgets=['Variance Network', ReverseBar()]) for n in pbar(range(self.length - 2, 0, -1)): self.var_R[n] = combine_clayer_layer.new_collapsed_layer( layer(self.var_network, n), self.var_R[n + 1])
def loop_with_param_clean(func, clean, params={}, info=''): widgets = [Percentage(), Bar('>'), info, ReverseBar('<'), Timer()] pbar = ProgressBar(widgets=widgets, maxval=processSize) for x in pbar(i for i in range(processSize)): func(params) if x % processSlide == 0: clean(params)
def hosts_disable_all(): """ status de host 0 = enabled status de host 1 = disabled """ logger.info('Disabling all hosts, in blocks of 1000') hosts = zapi.host.get(output=['hostid'], search={'status': 0}) maxval = int(ceil(hosts.__len__()) / 1000 + 1) bar = ProgressBar( maxval=maxval, widgets=[Percentage(), ReverseBar(), ETA(), RotatingMarker(), Timer()]).start() i = 0 for i in xrange(maxval): block = hosts[:1000] del hosts[:1000] result = zapi.host.massupdate(hosts=[x for x in block], status=1) i += 1 bar.update(i) bar.finish() logger.info('Done') return
def print_publish_status(image_object, source, image, published_image, builder, account_name): status = published_image.status statusWidget = progressbar_widget.Status() statusWidget.status = status widgets = [Bar('>'), ' ', statusWidget, ' ', ReverseBar('<')] progress = ProgressBar(widgets=widgets, maxval=100).start() while not (status.complete or status.error or status.cancelled): statusWidget.status = status progress.update(status.percentage) status = call_status_publish_webservice(image_object, source, image, published_image) time.sleep(2) statusWidget.status = status progress.finish() if status.error: printer.out( "Publication to '" + builder["account"]["name"] + "' error: " + status.message + "\n" + status.errorMessage, printer.ERROR) if status.detailedError: printer.out(status.detailedErrorMsg) elif status.cancelled: printer.out("\nPublication to '" + builder["account"]["name"] + "' canceled: " + status.message.printer.WARNING) else: printer.out("Publication to " + account_name + " is ok", printer.OK) published_image = image_object.get_publish_image_from_publish_id( published_image.dbId) if published_image.cloudId is not None and published_image.cloudId != "": printer.out("Cloud ID : " + published_image.cloudId)
def proxy_passive_to_active(): """ status de prxy 5 = active status de prxy 6 = passive """ logger.info('Change all proxys to active') proxys = zapi.proxy.get(output=['shorten', 'host'], filter={'status': 6}) if (proxys.__len__() == 0): logger.info('Done') return bar = ProgressBar( maxval=proxys.__len__(), widgets=[Percentage(), ReverseBar(), ETA(), RotatingMarker(), Timer()]).start() i = 0 for x in proxys: i += 1 proxyid = x['proxyid'] result = zapi.proxy.update(proxyid=proxyid, status=5) logger.echo = False logger.debug('Changed from passive to active proxy: %s' % (x['host'])) bar.update(i) bar.finish() logger.echo = True logger.info('Done') return
def do_download(self): self.__sprint("Fetching artist info...") self.fetch_info() self.__sprint(" done\n") self.__sprint("Fetching song info...") self.fetch_songs() self.__sprint(" done\n") self.build_music_list() for music in self.musics: if os.path.isfile(music.dest_name): continue with open(music.dest_name, 'wb') as mp3: resp = request.urlopen(music.url) total_size = int(resp.headers['Content-Length'].strip()) print(music.dest_name) widgets = [Bar('>'), ' ', ETA(), ' ', ReverseBar('<')] self.pbar = ProgressBar(widgets=widgets, maxval=total_size).start() while True: read = resp.read(8192) if (not read): break mp3.write(read) self.__update_progress(mp3.tell(), total_size) self.pbar.finish() self.__sprint('\n') if self.args.write_id3: self.__sprint("Writing ID3 Metadata...") self.do_write_id3(music) self.__sprint(" done\n")
def deleteHostsByHostgroup(groupname): hostgroup = zapi.hostgroup.get(output=['groupid'], filter={'name': groupname}) if hostgroup.__len__() != 1: logger.error('Hostgroup not found: %s\n\tFound this: %s' % (groupname, hostgroup)) groupid = int(hostgroup[0]['groupid']) hosts = zapi.host.get(output=['name', 'hostid'], groupids=groupid) total = len(hosts) logger.info('Hosts found: %d' % (total)) if (args.run): x = 0 bar = ProgressBar(maxval=total, widgets=[ Percentage(), ReverseBar(), ETA(), RotatingMarker(), Timer() ]).start() logger.echo = False for host in hosts: x = x + 1 bar.update(x) logger.debug('(%d/%d) >> Removing >> %s' % (x, total, host)) out = zapi.globo.deleteMonitors(host['name']) bar.finish() logger.echo = True else: logger.info('No host removed due to --no-run arg. Full list of hosts:') for host in hosts: logger.info('%s' % host['name']) return
def main(): ''' Controls general flow of operations ''' # If it exists, use the cached data of hosts and items if (os.path.isfile(move_items_file)): with open(move_items_file) as infile: hosts = json.load(infile) logger.info('Cache loaded from file (%s)' % move_items_file) else: hosts = getItems() with open(move_items_file, 'w') as outfile: json.dump(hosts, outfile) logger.info('Cache written to file (%s)' % move_items_file) for host in hosts: logger.info('Geting trends data of host: %s' % host['name']) host['trends'] = list() host['trends_uint'] = list() if host['itens'].__len__() > 0: bar = ProgressBar(maxval=host['itens'].__len__(), widgets=[ Percentage(), ReverseBar(), ETA(), RotatingMarker(), Timer() ]).start() i = 0 for item in host['itens']: temp = getTrends(hostname=host['name'], item=item) i += 1 if args.loglevel.upper() != 'DEBUG': bar.update(i) if temp['table'] == 'trends': for value in temp['values']: host['trends'].append( '(%d, %d, %d, %d, %d, %d)' % (int(item['itemid']), int(value[1]), int(value[2]), int(value[3]), int(value[4]), int(value[5]))) elif temp['table'] == 'trends_uint': for value in temp['values']: host['trends_uint'].append( '(%d, %d, %d, %d, %d, %d)' % (int(item['itemid']), int(value[1]), int(value[2]), int(value[3]), int(value[4]), int(value[5]))) else: logger.warning('Unknown value type: %s' % temp['table']) bar.finish() ''' Now, we send in blocks of up to ~1M values to generate the SQL files ''' if host['trends'].__len__() > 0: createSQL(table='trends', values=host['trends'], name=host['name']) elif host['trends_uint'].__len__() > 0: createSQL(table='trends_uint', values=host['trends_uint'], name=host['name']) else: logger.warning('No data from %s found to be sent.' % host['name'])
def example3(): widgets = [Bar('>'), ' ', ETA(), ' ', ReverseBar('<')] pbar = ProgressBar(widgets=widgets, maxval=10000000).start() for i in range(1000000): # do something pbar.update(10 * i + 1) pbar.finish()
def __init__(self, closeup_image_info_list, save_path, min_size=25000): """ Initialize the image updater. """ self._closeup_image_info_list = closeup_image_info_list self._min_size = min_size self._save_path = save_path self._widgets = [Bar('>'), ' ', ETA(), ' ', ReverseBar('<')]
def rerun_pixie(waitTime=wait, frequency=freq): p = pxp.spawn('pixie_ldf_c_slim junk') p.send('shm\ngo\n') print('sleeping') pBar = ProgressBar(widgets=[ReverseBar('<')], maxval=waitTime).start() for i in range(0, round(waitTime / frequency)): pxp.time.sleep(frequency) pBar.update(frequency * (i + 1)) return (p)
def blitzkrieg(f, inputs, it): setInputValues(f, inputs) print "Submitting..." widgets = [Bar('>'), Percentage(), ' - ', ETA(), ' ', ReverseBar('<')] pbar = ProgressBar(widgets=widgets, maxval=it) pbar.start() for i in range(it): post(form) pbar.update(i + 1) pbar.finish()
def create_progress_bar_openstack(bar_status): bar_status.message = "Retrieving information from OpenStack" bar_status.percentage = 0 statusWidget = progressbar_widget.Status() statusWidget.status = bar_status widgets = [Bar('>'), ' ', statusWidget, ' ', ReverseBar('<')] progress = ProgressBar(widgets=widgets, maxval=100).start() progress.start() bar_status.percentage = 10 progress.update(bar_status.percentage) return progress
def inference(self, file_name_list): self.model.cuda() feature_list = [] color_list = [] batch_list = [] widgets = [ Timer(format='ET: %(elapsed)s'), Bar('>'), ' ', 'Inferencing: ', Percentage(), ' ', ReverseBar('<'), ETA() ] pbar = ProgressBar(widgets=widgets, max_value=len(file_name_list)) for i, name in enumerate(file_name_list): img = Image.open(name) img = self.compose(img) #print(img) batch_list.append(img) if (i + 1) % self.batch_size == 0: if self.output_color == False: features = self.model( Variable(torch.stack(batch_list)).cuda()) feature_list.append(features.cpu().data) else: features, colors = self.model( Variable(torch.stack(batch_list)).cuda()) feature_list.append(features.cpu().data) color_list.append(colors.cpu().data) batch_list = [] pbar.update(i) pbar.finish() if len(batch_list) > 0: if self.output_color == False: features = self.model(Variable(torch.stack(batch_list)).cuda()) feature_list.append(features.cpu().data) else: features, colors = self.model( Variable(torch.stack(batch_list)).cuda()) feature_list.append(features.cpu().data) color_list.append(colors.cpu().data) batch_list = [] self.model.cpu() if self.output_color == False: feature_list = torch.cat(feature_list, dim=0) return feature_list else: feature_list = torch.cat(feature_list, dim=0) color_list = torch.cat(color_list, dim=0) return feature_list, color_list
def sleep_bar(secs): """Sleep with a bar, or not""" try: # From progressbar example #3, https://github.com/niltonvolpato/python-progressbar/blob/master/examples.py#L67 from progressbar import Bar, ETA, ProgressBar, ReverseBar widgets = [Bar('>'), ' ', ETA(), ' ', ReverseBar('<')] pbar = ProgressBar(widgets=widgets, maxval=100).start() for i in range(100): # do something time.sleep(secs / 110.) pbar.update(i) pbar.finish() except ImportError: time.sleep(secs)
def topicSelect_mongo(): widgets = [ Percentage(), Bar('>'), 'select topics', ReverseBar('<'), Timer() ] progressSize = mongo_tweets.count() #progressSize = 10000000 progressSlide = progressSize / 100 with ProgressBar(widgets=widgets, maxval=progressSize) as progress: counter = 0 #tweets = mongo_tweets.limit(progressSize) for tweet in mongo_tweets.find().limit(progressSize): text = tweet['text'] #print text regex = r'#\w+' matchs = re.findall(regex, text) if not matchs: progress.update() continue #print text for words in matchs: words = words.lower() topicDict.update({words: topicDict.get(words, 0) + 1}) counter += 1 progress.update(counter) if counter % progressSlide == 0: __clean([ 2, ]) if counter == progressSize: break cnt = 0 sum = 0 print 'start output' for key, value in topicDict.iteritems(): output.write('%s\t:%d\n' % (key, value)) cnt += 1 sum += value log.write('%d hashtags with %d displays' % (cnt, sum)) print '%d hashtags with %d displays' % (cnt, sum) pass
def createSQL(table, values, name='insert'): ''' Generate the SQL insert line, breaking each insert to up to ~1k values and up to ~1k insert's (~1M values total for each SQL file) ''' logger.info('Generating SQL file') queryInsert = 'INSERT INTO %s (itemid,clock,num,value_min,value_avg,value_max) VALUES' % table i = 0 # Controls the progress bar x = 0 # Controls number of inserts in one line y = 0 # Controls number of lines in one file z = 0 # Controls number of file name valuesLen = values.__len__() sqlFile = '%s.sql.%d' % (name, z) logger.debug('Total itens for %s: %d' % (name, valuesLen)) if valuesLen > 0: bar = ProgressBar(maxval=valuesLen, widgets=[ Percentage(), ReverseBar(), ETA(), RotatingMarker(), Timer() ]).start() for value in values: i += 1 x += 1 if x != 1: # First line only sqlInsert = '%s,%s' % (sqlInsert, value) else: sqlInsert = value if y >= 1000: # If there is more than 1k lines, write to new file z += 1 y = 0 if x >= 1000 or i == valuesLen: # If there is more than 1k values or we finished our list, write to file sqlFile = '%s.sql.%d' % (name, z) fileAppend(f=sqlFile, content='%s %s;\n' % (queryInsert, sqlInsert)) x = 0 y += 1 sqlInsert = '' if args.loglevel.upper( ) != 'DEBUG': # Dont print progressbar if in debug mode bar.update(i) bar.finish() else: logger.warning('No values received')
def parse_pin_list(self, images_uri_list): i = 1 widgets = [Bar('>'), ' ', ETA(), ' ', ReverseBar('<')] pbar = ProgressBar(widgets=widgets, maxval=len(images_uri_list) + 1).start() for image_uri in images_uri_list: i += 1 r = requests.get("http://pinterest.com" + image_uri, headers=self.headers, cookies=cookies) closeup_image_html = r.text img = self.parse_closeup_image(closeup_image_html) img.uri = image_uri self.images.append(img) pbar.update(i) pbar.finish()
def discovery_disable_all(status=0): """ Alterar status de todos os discoveries *auto* Status 0 = enable Status 1 = disable """ logger.info('Disabling all network discoveries') druleids = zapi.drule.get( output=['druleid', 'iprange', 'name', 'proxy_hostid', 'status'], selectDChecks='extend', filter={'status': 0}) if (druleids.__len__() == 0): logger.info('Done') return bar = ProgressBar( maxval=druleids.__len__(), widgets=[Percentage(), ReverseBar(), ETA(), RotatingMarker(), Timer()]).start() i = 0 for x in druleids: params_disable = { 'druleid': x['druleid'], 'iprange': x['iprange'], 'name': x['name'], 'dchecks': x['dchecks'], 'status': 1 } out = zapi.drule.update(**params_disable) logger.echo = False if out: logger.debug('\tNew status: %s (%s) --> %d' % (x['name'], out['druleids'], status)) else: logger.warning('\tFAILED to change status: %s (%s) --> %d' % (x['name'], out['druleids'], status)) i += 1 bar.update(i) logger.echo = True bar.finish() logger.info('Done') return
def handle_scan_run_status(self, my_scanned_instance, running): for current_scan in my_scanned_instance.scans.scan: if (not current_scan.status.complete and not current_scan.status.error and not current_scan.status.cancelled): scan_status = current_scan.status status_widget = progressbar_widget.Status() status_widget.status = scan_status widgets = [Bar('>'), ' ', status_widget, ' ', ReverseBar('<')] progress = ProgressBar(widgets=widgets, maxval=100).start() while not (scan_status.complete or scan_status.error or scan_status.cancelled): scan_status = self.update_scan_run_status(status_widget, scan_status, progress, my_scanned_instance, current_scan) status_widget.status = scan_status progress.finish() self.print_scan_run_result_status(scan_status) running = False break else: pass return running
def infer(self, img): self.model.cuda() feature_list = [] color_list = [] batch_list = [] widgets = [ Timer(format='ET: %(elapsed)s'), Bar('>'), ' ', 'Inferencing: ', Percentage(), ' ', ReverseBar('<'), ETA() ] #pbar = ProgressBar(widgets=widgets, max_value=len(file_name_list)) #for i,name in enumerate(file_name_list): #img = Image.open(name) img = Image.fromarray(img) img = self.compose(img) img_list = [img] if len(img_list) > 0: if self.output_color == False: features = self.model(Variable(torch.stack(img_list)).cuda()) feature_list.append(features.cpu().data) else: features, colors = self.model( Variable(torch.stack(img_list)).cuda()) feature_list.append(features.cpu().data) color_list.append(colors.cpu().data) self.model.cpu() if self.output_color == False: feature_list = torch.cat(feature_list, dim=0) return feature_list else: feature_list = torch.cat(feature_list, dim=0) color_list = torch.cat(color_list, dim=0) return feature_list, color_list else: raise Excpetion('Invalid detection')
def desabilitaItensNaoSuportados(): query = {"output": "extend", "filter": {"state": 1}, "monitored": True} filtro = input('Qual a busca para key_? [NULL = ENTER]') if filtro.__len__() > 0: query['search'] = {'key_': filtro} limite = input('Qual o limite de itens? [NULL = ENTER]') if limite.__len__() > 0: try: query['limit'] = int(limite) except: print('Limite invalido') input("Pressione ENTER para voltar") main() opcao = input("Confirma operação? [s/n]") if opcao == 's' or opcao == 'S': itens = zapi.item.get(query) print('Encontramos {} itens'.format(itens.__len__())) bar = ProgressBar(maxval=itens.__len__(), widgets=[ Percentage(), ReverseBar(), ETA(), RotatingMarker(), Timer() ]).start() i = 0 for x in itens: zapi.item.update({"itemid": x['itemid'], "status": 1}) i += 1 bar.update(i) bar.finish() print("Itens desabilitados!!!") print() input("Pressione ENTER para continuar") main()
def proxy_passive_to_localhost(): logger.info('Change all passive proxys to localhost') proxys = zapi.proxy.get(output=['extend', 'host'], filter={'status': 6}, selectInterface='extend') if (proxys.__len__() == 0): logger.info('Done') return bar = ProgressBar( maxval=proxys.__len__(), widgets=[Percentage(), ReverseBar(), ETA(), RotatingMarker(), Timer()]).start() i = 0 for x in proxys: proxyid = x['proxyid'] params = { 'proxyid': proxyid, 'status': 6, 'interface': { 'interfaceid': x['interface']['interfaceid'], 'dns': 'localhost', 'ip': '127.0.0.1' } } result = zapi.proxy.update(**params) logger.debug('Proxy changed to localhost: %s' % (x['host'])) i += 1 logger.echo = False bar.update(i) logger.echo = True bar.finish() logger.info('Done') return
tVec_d = tVec_d_from_old_parfile(geomParams, det_origin) detector_params = np.hstack( [geomParams[3:6, 0], tVec_d.flatten(), 0., np.zeros(3)]) use_tth_max = parser.get('pull_spots', 'use_tth_max') if use_tth_max.strip() == '1' or use_tth_max.strip().lower() == 'true': excl = np.zeros_like(pd.exclusions, dtype=bool) pd.exclusions = excl excl = pd.getTTh() > detector.getTThMax() pd.exclusions = excl pass phi, n = rot.angleAxisOfRotMat(rot.rotMatOfQuat(qbar)) if have_progBar: widgets = [Bar('>'), ' ', ETA(), ' ', ReverseBar('<')] pbar = ProgressBar(widgets=widgets, maxval=len(qbar.T)).start() pass print "pulling spots for %d orientations..." % len(qbar.T) for iq, quat in enumerate(qbar.T): if have_progBar: pbar.update(iq) exp_map = phi[iq] * n[:, iq] grain_params = np.hstack( [exp_map.flatten(), 0., 0., 0., 1., 1., 1., 0., 0., 0.]) sd = xrdutil.pullSpots(pd, detector_params, grain_params, reader, filename=pull_filename % iq, eta_range=etaRange,
host['timediff'] = timediff hosts_exclude.append(host) else: logger.debug("No matches for host: %s" % host) hosts_no_match.append(host) """ Perform (or not >> --no-run) the removal of preveously identified hosts """ total = hosts_exclude.__len__() logger.info("Hosts to remove: %d" % total) if args.run and total > 0: x = 0 bar = ProgressBar( maxval=total, widgets=[Percentage(), ReverseBar(), ETA(), RotatingMarker(), Timer()]).start() logger.echo = False for host in hosts_exclude: x += 1 bar.update(x) logger.debug("(%d/%d) >> Removing >> %s" % (x, total, host)) out = zapi.host.delete(host['hostid']) bar.finish() logger.echo = args.verbose total = hosts_no_match.__len__() logger.info("Other hosts without timestamp to remove: %d" % total) if args.run and total > 0 and args.matches:
def do_import(self, args): try: # add arguments doParser = self.arg_import() try: doArgs = doParser.parse_args(shlex.split(args)) except SystemExit as e: return printer.out("Import scan id [" + doArgs.id + "] ...") myScannedInstances = self.api.Users( self.login).Scannedinstances.Getall(Includescans="true") if myScannedInstances is None or not hasattr( myScannedInstances, 'scannedInstances'): printer.out("scan not found", printer.ERROR) return else: myScan = None for myScannedInstance in myScannedInstances.scannedInstances.scannedInstance: for scan in myScannedInstance.scans.scan: if str(scan.dbId) == doArgs.id: myScan = scan myRScannedInstance = myScannedInstance break if myScan is not None: break if myScan is not None and myScan.status.complete and not myScan.status.error and not myScan.status.cancelled: myScanImport = scanImport() myScanImport.applianceName = doArgs.name myScanImport.applianceVersion = doArgs.version myScanImport.orgUri = (self.api.Users( self.login).Orgs().Getall()).orgs.org[0].uri rScanImport = self.api.Users(self.login).Scannedinstances( myRScannedInstance.dbId).Scans( myScan.dbId).Imports().Import(myScanImport) status = rScanImport.status statusWidget = progressbar_widget.Status() statusWidget.status = status widgets = [Bar('>'), ' ', statusWidget, ' ', ReverseBar('<')] progress = ProgressBar(widgets=widgets, maxval=100).start() while not (status.complete or status.error or status.cancelled): statusWidget.status = status progress.update(status.percentage) status = (self.api.Users(self.login).Scannedinstances( myRScannedInstance.dbId).Scans( myScan.dbId).Imports().Status.Get( I=rScanImport.uri)).statuses.status[0] time.sleep(2) statusWidget.status = status progress.finish() if status.error: printer.out( "Importing error: " + status.message + "\n" + status.errorMessage, printer.ERROR) if status.detailedError: printer.out(status.detailedErrorMsg) elif status.cancelled: printer.out("Importing canceled: " + status.message, printer.WARNING) else: printer.out("Importing ok", printer.OK) except KeyboardInterrupt: printer.out("\n") if generics_utils.query_yes_no("Do you want to cancel the job ?"): if 'myRScannedInstance' in locals() and 'myScan' in locals() and 'rScanImport' in locals() \ and hasattr(myRScannedInstance, 'dbId') and hasattr(myScan, 'dbId') and hasattr(rScanImport, 'dbId'): self.api.Users(self.login).Scannedinstances( myRScannedInstance.dbId).Scans(myScan.dbId).Imports( rScanImport.dbId).Status.Cancel() else: printer.out("Exiting command") except ArgumentParserError as e: printer.out("ERROR: In Arguments: " + str(e), printer.ERROR) self.help_import() except Exception as e: return handle_uforge_exception(e)
def do_build(self, args): try: #add arguments doParser = self.arg_build() doArgs = doParser.parse_args(shlex.split(args)) #if the help command is called, parse_args returns None object if not doArgs: return 2 #-- template = validate(doArgs.file) if template is None: return 2 if doArgs.id: myAppliance = self.api.Users( self.login).Appliances().Getall(Query="dbId==" + doArgs.id) myAppliance = myAppliance.appliances.appliance else: #Get template which correpond to the template file myAppliance = self.api.Users(self.login).Appliances().Getall( Query="name=='" + template["stack"]["name"] + "';version=='" + template["stack"]["version"] + "'") myAppliance = myAppliance.appliances.appliance if myAppliance is None or len(myAppliance) != 1: printer.out("No template found on the plateform") return 0 myAppliance = myAppliance[0] rInstallProfile = self.api.Users(self.login).Appliances( myAppliance.dbId).Installprofile("").Getdeprecated() if rInstallProfile is None: printer.out( "No installation found on the template '" + template["stack"]["name"] + "'", printer.ERROR) return 0 try: i = 1 if doArgs.junit is not None: test_results = [] for builder in template["builders"]: try: printer.out("Generating '" + builder["type"] + "' image (" + str(i) + "/" + str(len(template["builders"])) + ")") if doArgs.junit is not None: test = TestCase('Generation ' + builder["type"]) test_results.append(test) start_time = time.time() format_type = builder["type"] targetFormat = generate_utils.get_target_format_object( self.api, self.login, format_type) if targetFormat is None: printer.out("Builder type unknown: " + format_type, printer.ERROR) return 2 myimage = image() myinstallProfile = installProfile() if rInstallProfile.partitionAuto: if "installation" in builder: if "swapSize" in builder["installation"]: myinstallProfile.swapSize = builder[ "installation"]["swapSize"] if "diskSize" in builder["installation"]: myinstallProfile.diskSize = builder[ "installation"]["diskSize"] else: myinstallProfile.swapSize = rInstallProfile.swapSize myinstallProfile.diskSize = rInstallProfile.partitionTable.disks.disk[ 0].size func = getattr( generate_utils, "generate_" + generics_utils.remove_special_chars( targetFormat.format.name), None) if func: myimage, myinstallProfile = func( myimage, builder, myinstallProfile, self.api, self.login) else: printer.out("Builder type unknown: " + format_type, printer.ERROR) return 2 if myimage is None: return 2 myimage.targetFormat = targetFormat myimage.installProfile = myinstallProfile if doArgs.simulated is not None and doArgs.simulated: myimage.simulated = True if doArgs.forced is not None and doArgs.forced: myimage.forceCheckingDeps = True rImage = self.api.Users(self.login).Appliances( myAppliance.dbId).Images().Generate(myimage) status = rImage.status statusWidget = progressbar_widget.Status() statusWidget.status = status widgets = [ Bar('>'), ' ', statusWidget, ' ', ReverseBar('<') ] progress = ProgressBar(widgets=widgets, maxval=100).start() while not (status.complete or status.error or status.cancelled): statusWidget.status = status progress.update(status.percentage) status = self.api.Users(self.login).Appliances( myAppliance.dbId).Images( rImage.dbId).Status.Get() time.sleep(2) statusWidget.status = status progress.finish() if status.error: printer.out( "Generation '" + builder["type"] + "' error: " + status.message + "\n" + status.errorMessage, printer.ERROR) if status.detailedError: printer.out(status.detailedErrorMsg) if doArgs.junit is not None: test.elapsed_sec = time.time() - start_time test.add_error_info( "Error", status.message + "\n" + status.errorMessage) elif status.cancelled: printer.out( "Generation '" + builder["type"] + "' canceled: " + status.message, printer.WARNING) if doArgs.junit is not None: test.elapsed_sec = time.time() - start_time test.add_failure_info("Canceled", status.message) else: printer.out( "Generation '" + builder["type"] + "' ok", printer.OK) printer.out("Image URI: " + rImage.uri) printer.out("Image Id : " + generics_utils.extract_id(rImage.uri)) if doArgs.junit is not None: test.elapsed_sec = time.time() - start_time #the downloadUri already contains downloadKey at the end if rImage.downloadUri is not None: test.stdout = self.api.getUrl( ) + "/" + rImage.downloadUri i += 1 except Exception as e: if is_uforge_exception(e): print_uforge_exception(e) if doArgs.junit is not None and "test_results" in locals( ) and len(test_results) > 0: test = test_results[len(test_results) - 1] test.elapsed_sec = time.time() - start_time test.add_error_info("Error", get_uforge_exception(e)) else: raise if doArgs.junit is not None: testName = myAppliance.distributionName + " " + myAppliance.archName ts = TestSuite("Generation " + testName, test_results) with open(doArgs.junit, 'w') as f: TestSuite.to_file(f, [ts], prettyprint=False) return 0 except KeyError as e: printer.out("unknown error in template file", printer.ERROR) except ArgumentParserError as e: printer.out("ERROR: In Arguments: " + str(e), printer.ERROR) self.help_build() except KeyboardInterrupt: printer.out("\n") if generics_utils.query_yes_no("Do you want to cancel the job ?"): if 'myAppliance' in locals() and 'rImage' in locals( ) and hasattr(myAppliance, 'dbId') and hasattr(rImage, 'dbId'): self.api.Users(self.login).Appliances( myAppliance.dbId).Images(rImage.dbId).Status.Cancel() else: printer.out("Impossible to cancel", printer.WARNING) else: printer.out("Exiting command") except Exception as e: print_uforge_exception(e) if doArgs.junit is not None and "test_results" in locals( ) and len(test_results) > 0: test = test_results[len(test_results) - 1] if "start_time" in locals(): elapse = time.time() - start_time else: elapse = 0 test.elapsed_sec = elapse test.add_error_info("Error", get_uforge_exception(e)) else: return 2 finally: if "doArgs" in locals( ) and doArgs.junit is not None and "test_results" in locals( ) and len(test_results) > 0: if "myAppliance" in locals(): testName = myAppliance.distributionName + " " + myAppliance.archName else: testName = "" ts = TestSuite("Generation " + testName, test_results) with open(doArgs.junit, 'w') as f: TestSuite.to_file(f, [ts], prettyprint=False)
def publish_builder(self, builder, template, appliance, rInstallProfile, i, comliantImage): try: if comliantImage is None: comliantImage = self.get_image_to_publish( builder, template, appliance, i) # get target format to define publish method format_type = comliantImage.targetFormat.format.name publishMethod = getattr( publish_utils, "publish_" + generics_utils.remove_special_chars(format_type), None) if publishMethod: mypImage = publishMethod(builder) if mypImage is None: return 2 else: printer.out("Builder type unknown: " + format_type, printer.ERROR) return 2 mypImage.imageUri = comliantImage.uri mypImage.applianceUri = appliance.uri mypImage.credAccount = self.get_account_to_publish(builder) account_name = mypImage.credAccount.name rpImage = self.api.Users(self.login).Appliances( appliance.dbId).Images(comliantImage.dbId).Pimages().Publish( body=mypImage, element_name="ns1:publishImage") status = rpImage.status statusWidget = progressbar_widget.Status() statusWidget.status = status widgets = [Bar('>'), ' ', statusWidget, ' ', ReverseBar('<')] progress = ProgressBar(widgets=widgets, maxval=100).start() while not (status.complete or status.error or status.cancelled): statusWidget.status = status progress.update(status.percentage) status = self.api.Users(self.login).Appliances( appliance.dbId).Images(comliantImage.dbId).Pimages( rpImage.dbId).Status.Get() time.sleep(2) statusWidget.status = status progress.finish() if status.error: printer.out( "Publication to '" + builder["account"]["name"] + "' error: " + status.message + "\n" + status.errorMessage, printer.ERROR) if status.detailedError: printer.out(status.detailedErrorMsg) elif status.cancelled: printer.out("\nPublication to '" + builder["account"]["name"] + "' canceled: " + status.message.printer.WARNING) else: printer.out("Publication to " + account_name + " is ok", printer.OK) rpImage = self.api.Users(self.login).Appliances( appliance.dbId).Images(comliantImage.dbId).Pimages( rpImage.dbId).Get() if rpImage.cloudId is not None and rpImage.cloudId != "": printer.out("Cloud ID : " + rpImage.cloudId) return 0 except KeyboardInterrupt: printer.out("\n") if generics_utils.query_yes_no("Do you want to cancel the job ?"): if 'appliance' in locals() and 'comliantImage' in locals() and 'rpImage' in locals() \ and hasattr(appliance, 'dbId') and hasattr(comliantImage, 'dbId') and hasattr(rpImage, 'dbId'): self.api.Users(self.login).Appliances( appliance.dbId).Images(comliantImage.dbId).Pimages( rpImage.dbId).Cancel.Cancel() else: printer.out("Impossible to cancel", printer.WARNING) else: printer.out("Exiting command") raise KeyboardInterrupt
def main(): parser = argparse.ArgumentParser() parser.add_argument('-t', '--time', default='25m') parser.add_argument('comment', nargs="?", default="No Comment", help="Project?:Type?:Comment") parser.add_argument('-v', '--verbose', action='store_const', const=logging.INFO, dest='loglevel', help='increase output verbosity.') parser.add_argument('-d', '--debug', action='store_const', const=logging.DEBUG, dest='loglevel', default=logging.WARNING, help='show debug output (even more than -v).') parser.add_argument('-s', '--song', default='Bit Rush League of Legends', help='spotify search query.') parser.add_argument('-k', '--keepsong', action='store_true', help='Farfignewton') parser.add_argument('-l', '--logpath', default=os.path.expanduser('~/Dropbox/tasks.log')) parser.add_argument('-x', '--disable', action='store_true') args = parser.parse_args() start = datetime.datetime.now() parts = args.comment.split(':') project = "Unknown" task_type = "??" comment = args.comment if len(parts) == 2: project, comment = parts elif len(parts) == 3: project, task_type, comment = parts task = {"Type": task_type, "Project": project, "Comment": comment, "Start": str(start)} logging.basicConfig(level=args.loglevel) seconds = get_time(args.time) task_start = datetime.datetime.now() logging.info('Task start: %s' % task_start) logging.info('Starting a timer for %s seconds' % seconds) logging.info('Task comment: %s' % args.comment) print('Press Ctrl+C to end early') widgets = [Bar('>'), ' ', ETA(), ' ', ReverseBar('<')] progress = ProgressBar(widgets=widgets) broken = False try: for i in progress(xrange(seconds)): time.sleep(1) #This will make sure we break out properly if the computer goes to sleep while #a task is running. Give it a small buffer (10 s) so that we don't always hit this #break point if (datetime.datetime.now() - task_start).total_seconds() > (seconds + 10): break except KeyboardInterrupt: logging.debug("breaking progress loop (keyboard interrupt)") broken = True # don't execute the actions if they are disable by the user # or we receive a sigint if args.disable or broken: actions = [] else: # I am disabling the spotify action until we add a better way # to configure which actions do and don't execute. #actions = [Spotify(args.song, args.keepsong), DimScreen()] actions = [DimScreen()] for action in actions: action.start() planned_end = datetime.datetime.now() task["Planned End"] = str(planned_end) logging.info('Task end (planned): %s' % planned_end) raw_input('Press enter to end...') for action in actions: action.end() actual_end = datetime.datetime.now() task["Actual End"] = str(actual_end) task["Duration"] = str(actual_end - start) logging.info('Task end (actual): %s' % actual_end) log_exists = os.path.isfile(args.logpath) with open(args.logpath, 'a+') as f: if log_exists: #The whole point of this code is allow us to #append another JSON dictionary (the task) to #the end of the list (with reading everything in). #Go to the end of the file f.seek(-1, os.SEEK_END) pos = f.tell() #Work backward until the last "]" is found. while pos > 0 and f.read(1) != "]": pos -= 1 f.seek(pos, os.SEEK_SET) #If we aren't at the start of the file, remove the #"]" and everything after. if pos > 0: f.seek(pos, os.SEEK_SET) f.truncate() f.write(",") else: f.write("[") json.dump(task, f, indent=4) f.write("]")