def generate_list(**kwargs):
    """
    :kwargs: startweight, exceptionals
    """
    list_of_exceptionals = []
    initial_strategy = Quasicone.Strategy.initial()

    for mu, quasicone in enumerate(Quasicone.Iterator.iterator()):
        # print 'list_of_exceptionals line 24', mu, '\n', quasicone
        new_instance = \
            Apply_strategy(quasicone, initial_strategy, startweight=kwargs['startweight'])
        #print 'new_instance', new_instance
        new_instance.enumerator.append(mu)
        _logger.debug('new_instance.successful: {} -> \n {}'.format(
            new_instance.successful, new_instance._C
        ))
        if new_instance.successful: pass # do nothing
        else: list_of_exceptionals.append(new_instance)

    from utils import to_file
    to_file(list_of_exceptionals, kwargs['exceptionals'])

    import TeX
    filename = 'quasicones_rank{}.tex'.format( str(Quasicone.Iterator.n - 1) )
    TeX.to_file(TeX.Quasicones_to_TeX(list_of_exceptionals), filename)
def finish(buckets, num_bytes, X, V, row_reorder, col_reorder, filename,
           recons):
    compressed_X = _reconstruct(X, buckets, row_reorder, col_reorder, recons)

    # Print stats and send to file.
    utils.print_stats(X, compressed_X, num_bytes)
    utils.to_file(filename, V, compressed_X)
Example #3
0
    def main(self):
        self.get_argparse()
        args = self.args
        if args.sub == "invoke":
            self.set_provider()
            args.params = json.loads(args.params)
            result = self.elastic_invoke()
            output_fname = self.gen_filename(self.function_name)
        elif args.sub == "log":
            result = self.log_with_result()
            output_fname = self.gen_filename(args.res_fname, "log.combined")
        else:
            self.parser.print_help()
            sys.exit(-2)

        utils.to_file(output_fname, result)
Example #4
0
 def _ibm_lwr(self, invoked_list, log_data):
     from ibm import get_activation as ibm_log
     res = []
     rdict = {}
     num = 0
     for i in invoked_list:
         i_size = int(i['invoke_size'])
         for k, v in i['result'].items():
             if k == 'client_info':
                 continue
             key = v['activationId']
             tmp = ibm_log.read_activation_through_rest([key])
             rdict[key] = tmp[key]
             msec = (tmp[key]['duration'])
             if msec:
                 res.append([num, msec, i_size])
             num += 1
     utils.to_file(
         "{}.{}.{}".format(self.args.target, self.args.res_fname,
                           ".activation"), rdict)
     return res
def generate_list(**kwargs):
    T = MapTree(**kwargs)
    T.run()
    #To_File(T.Tree)
    #Output_as_Graph(Tree_to_TeX(T.Tree))
    #print "len(MapTree.list_of_successful): ",  len(MapTree.list_of_successful)
    #print(T)


    count = 0
    list_of_unsolved = []
    for C_init in MapTree.list_of_C_init:
        #if C_init not in MapTree.list_of_successful:
            #if C_init not in list_of_unsolved:
        count += 1
        C = p.array(C_init)
        #print count, ". \n", C, " defect: ".ljust(9), Q.Defect(C)
        list_of_unsolved.append(C)

    #this part prints the tree as a graph
    #from Tree_to_TikZ_Graph import *
    #inputfile = "tree_for_graph_r4"
    #with open(inputfile, "r") as file: Tree = pickle.load(file)
    ##print(Tree)
    #print(Output_as_Graph(Tree_to_TeX(Tree)))


    list_of_unsolved_array = [p.array(C_init) for C_init in list_of_unsolved]
    #as file with Python-list

    from utils import to_file
    outputfile = 'unsolved_after_TreeMap.pi'
    to_file(list_of_unsolved_array, outputfile)

    #as TeX-formatted output
    import TeX
    filename = 'unsolved_after_TreeMap.tex'
    TeX.to_file(TeX.Quasicones_to_TeX(list_of_unsolved_array), filename)
if inputfile:
    file = open(inputfile, "r")
    list_of_exceptionals = pickle.load(file)
    file.close()
else: print "no inputfile indicated; use option -i [filename]"


non_success_counter = 0
list_of_extraexceptionals = []

for mu, C in enumerate(list_of_exceptionals):
    sublist = []
    for nu, strg in enumerate(Quasicone.Strategy.iterator(n)):
        #print C._C
        new_instance = Apply_strategy(startweight = startweight,
                                        quasicone = C._C,
                                        list_of_operators = strg)
        new_instance.enumerator += [mu,nu]
        if new_instance.successful: break
        sublist.append(new_instance)
    else:           #only if all of the strategies do not accomplish
        non_success_counter += 1
        #list_of_extraexceptionals.append(sublist)
        list_of_extraexceptionals = p.concatenate((list_of_extraexceptionals, sublist))

from utils import to_file
to_file(list_of_extraexceptionals, outputfile)

import TeX
print(TeX.Output(TeX.Quasicones_to_TeX(list_of_extraexceptionals)))
Example #7
0
def main():
    logging.basicConfig(level=logging.INFO)
    parser = ArgumentParser(description='Monitor GPU used.')
    parser.add_argument('--monitor-interval',
                        help='monitor once per seconds,default is 60s.',
                        default=60)
    parser.add_argument('--log-dir',
                        help='dir for log file ,logging once a day',
                        default='log')
    parser.add_argument(
        '--log-time-unit',
        help='time unit for logging summary file,default is hour',
        choices=['day', 'hour', 'minute', 'second'],
        default='hour')
    parser.add_argument(
        '--save-interval',
        help='save monitor logging to file interval,default is 1 day.',
        default=1)
    parser.add_argument('--log-storage-date',
                        help='Max days to save log,default is 90 days.',
                        default=90)
    parser.add_argument('--delete-summary',
                        help='Delete summary or not,default is True.',
                        default=True)
    parser.add_argument('--check-docker',
                        help='Check pid that run in docker.',
                        default=True)
    parser.add_argument(
        '--docker-as-user',
        help=
        'Whether use docker name as user name,if true,docker name must be ${USER}_xxx',
        default=True)
    parser.add_argument('--print-log',
                        help='print log in console',
                        default=True)

    args = parser.parse_args()
    is_console = args.print_log
    docker_as_user = args.docker_as_user
    check_docker = args.check_docker
    log_dir = args.log_dir
    log_time_unit = args.log_time_unit
    log_storage_date = args.log_storage_date
    delete_summary = args.delete_summary
    # if os.path.exists(log_dir):
    #     if is_console:
    #         logging.warn('log_dir exists')
    os.makedirs(log_dir, exist_ok=True)
    today = datetime.now()
    if is_console:
        logging.info('Process start time:{}'.format(today))
    # start=time.clock()
    # monitor_nowtime(is_console)
    # monitor_used_time=time.clock()-start
    # if is_console:
    #     logging.info('monitor used time:{}'.format(monitor_used_time))
    monitor_interval = args.monitor_interval
    while True:
        start = perf_counter()

        now_time = datetime.now()
        if is_console:
            logging.info('Start monitor time:{}'.format(now_time))
        if now_time.day - today.day == args.save_interval:
            # 新的一天,记录
            if is_console:
                logging.info('Date {},writing to file'.format(
                    today.strftime('%Y-%m-%d')))

            to_file(monitor_interval=monitor_interval,
                    log_dir=log_dir,
                    date=today.strftime('%Y-%m-%d'),
                    log_time_unit=log_time_unit,
                    docker_as_user=docker_as_user)
            today = now_time
            delete_file(log_dir, today + relativedelta(days=-log_storage_date),
                        delete_summary)

        monitor_nowtime(log_dir,
                        now_time.strftime('%Y-%m-%d'),
                        now_time.strftime('%H:%M'),
                        is_console=is_console,
                        check_docker=check_docker)
        # to_file(monitor_interval=monitor_interval,
        #         log_dir=log_dir,
        #         date=today.strftime('%Y-%m-%d'),
        #         log_time_unit=log_time_unit,
        #         docker_as_user=docker_as_user)

        time.sleep(monitor_interval - (perf_counter() - start))
def create_zipf_resume(min_year, max_year, total_words = 200):
	 print 'Creando resumen de zipf por años....'
	 zipfRunner = zf.ZipfRunner()
	 res = zipfRunner.get_all_years(min_year, max_year, total_words)
	 utils.to_file(res, 'years_zipf.json')			
Example #9
0
def create_zipf_resume(min_year, max_year, total_words=200):
    print 'Creando resumen de zipf por años....'
    zipfRunner = zf.ZipfRunner()
    res = zipfRunner.get_all_years(min_year, max_year, total_words)
    utils.to_file(res, 'years_zipf.json')