def do_all_filtering(data_file, sent_filename, exclude_filename, months, years):
    log.info("FILTERING with months=" + " ".join(months) + " and years=" + " ".join(years))
    log.info("for data file " + data_file)
    all_records = get_isi_all_fields(data_file)
    log.info("STARTING with n=" + str(len(all_records)))
    unique = get_unique_items(all_records)
    articles = get_filtered_dict(unique, lambda k, v: k == "type" and v == "Article")
    articles_of_months = get_filtered_dict(articles, lambda k, v: k == "data_month" and v in months)
    articles_of_years = get_filtered_dict(articles_of_months, lambda k, v: k == "year" and v in years)
    one_email_per_row = get_one_email_per_row(articles_of_years)
    log.info("AFTER FILTERING for months and years, ONE EMAIL PER ROW, n=" + str(len(one_email_per_row)))
    (first_occurrence, dupes) = email_first_occurrence(one_email_per_row, True)
    log.info("ELIMINATED because not first occurance, n=" + str(len(dupes)))
    log.info(list_of_emails(dupes))
    (first_occurrence2, dupes2) = email_not_in_already_sent(first_occurrence, sent_filename)
    log.info("ELIMINATED because already sent, n=" + str(len(dupes2)))
    log.info(list_of_emails(dupes2))
    (first_occurrence3, dupes3) = filter_unsubscribe_list(first_occurrence2)
    log.info("ELIMINATED because unsubscribe, n=" + str(len(dupes3)))
    log.info(list_of_emails(dupes3))
    (first_occurrence4, dupes4) = filter_exclude_list(first_occurrence3, exclude_filename)
    log.info("ELIMINATED because on exclude list, n=" + str(len(dupes4)))
    log.info(list_of_emails(dupes4))
    all_dupes = dupes + dupes2 + dupes3 + dupes4
    keepers = first_occurrence4
    log.info("KEEPING these, n=" + str(len(keepers)))
    log.debug(list_of_emails(keepers))
    return(keepers, all_dupes)

   
    
        
 def test_prep_email(self):
     test_dict = {'journal': 'Am. Nat.', 'volume_issue': '176_6', 'emails': ['*****@*****.**'], 'pretty_month': 'OCT', 'year': '2010', 'type': 'Article', 'data_month': 'OCT'}
     test_dict["q"] = "1"
     (survey_url, shortened_url) = get_survey_url(test_dict)
     test_dict["url"] = shortened_url
     email_html_body_template = open(email_template_initial, "r").read()
     email_body = contact_corresponding.get_email_text(email_html_body_template, test_dict)
     log.debug(email_body)
Ejemplo n.º 3
0
    def genny(name, t):
        if t.kind == TypeKind.TYPEDEF:
            t = t.get_canonical()

        log.debug(f'name {name} type kind {t.kind}')

        if t.kind == TypeKind.POINTER:
            name = f'*{name}'
        yield f'print {name}'
Ejemplo n.º 4
0
 def send_cmd(self, cmd):
     """发送命令取得返回值"""
     try:
         stdin, stdout, stderr = self.client.exec_command(cmd)
         log.debug(f'{self.user}登录{self.host}执行了{cmd}命令')
         print(stdout, stderr)
         return stdout
     except SSHException as e:
         log.debug(f'{self.user}登录{self.host}执行了{cmd}命令未成功,原因:{e}')
         print(e)
Ejemplo n.º 5
0
def get_clang_flags(args):
    """
    Aggregate clang flags from args and Makefile if specified
    """
    clang_flags = args.clang_flags[0].split() if args.clang_flags else []
    if args.directory:
        makefile = args.directory / 'Makefile'
        log.debug(f'path to Makefile: {makefile}')
        assert (makefile.is_file())
        clang_flags += [f'-I{args.directory.absolute()}']
        clang_flags += re.findall(r'-I[^\s]+', open(makefile, 'r').read())
    return clang_flags
Ejemplo n.º 6
0
def gen_patch(file, line, parms, array_expressions):
    stmts = []

    stmts.append(f'b {file.split("/")[-1]}:{line}')

    arrays = dict(a.split(':') for a in array_expressions)
    log.debug(f'{len(parms)} parameters')
    printfs = list(gen_printfs(parms, arrays))
    log.debug(printfs)
    stmts += printfs

    return stmts
Ejemplo n.º 7
0
    def solve(self, from_pos):
        '''@param
            from_pos: a tuple (x, y) representing the path's start pos.
            to_area: a list of tuples (x, y) representing the path's possible end pos.
        @return
            If the path is found, return the path as a list of tuples (x,y).
            Otherwise, return None
        '''

        #result = bfs_path.bfs(from_pos[0],from_pos[1], self.to_area, self.map)
        result = self.a_star_solver.solve(from_pos)
        log.debug("Path: " + str(result))
        return result
def do_reminder_filtering(sent_filename, exclude_filename, months, years):
    (hasnt_been_sent_reminder, hasnt_been_sent_reminder) = do_reminder_filtering_part1(sent_filename, exclude_filename, months, years, "REMINDER")
    
    (first_occurrence3, dupes3) = filter_unsubscribe_list(hasnt_been_sent_reminder)
    log.info("ELIMINATED because unsubscribe, n=" + str(len(dupes3)))
    log.info(list_of_emails(dupes3))
    (first_occurrence4, dupes4) = filter_exclude_list(first_occurrence3, exclude_filename)
    log.info("ELIMINATED because on exclude list, n=" + str(len(dupes4)))
    log.info(list_of_emails(dupes4))
    all_dupes = hasnt_been_sent_reminder + dupes3 + dupes4
    keepers = first_occurrence4
    log.info("KEEPING these, n=" + str(len(keepers)))
    log.debug(list_of_emails(keepers))
    return(keepers, all_dupes)
Ejemplo n.º 9
0
 def __init__(self, host, password, user):
     self.host = host
     self.password = password
     self.user = user
     try:
         self.client = SSHClient()
         # 我们第一次连接时会去核实host它会去找known_host_key的这个文件,没有按如下处理
         self.client.set_missing_host_key_policy(AcceptPolicy())
         # 我们hostkey不在时的规定:set policy to use when connecting to servers without a known_host_key,然后我们另起炉灶
         self.client.connect(hostname=host,
                             username=user,
                             password=password,
                             timeout=10)
         log.debug(f'{user}成功登录{host}主机')
     except Exception as e:
         print(e)
         log.debug(f'{user}登录{host}主机失败:原因{e}')
         self.client = None
Ejemplo n.º 10
0
    def do_wizard(_, args, install_sh):
        """
        Get Pin installation from root.
        If Pin is not at the expected location, do the interactive wizard with install_sh.
        """
        root = args.pin_root
        pin = Pin(args)
        if not pin.is_valid():
            log.warn(f'{root} is not a valid Pin installation.')
            if not install_sh.is_file():
                log.error(f'Could not execute {install_sh}.')
                exit(1)
            else:
                log.warn(
                    f'See {install_sh} for the recommended method for installing Pin.'
                )
                yn = input(
                    f'Should I install it at {root}? [type y to install, anything else to quit]: '
                )
                if yn == 'y':
                    cmd = f'bash {install_sh.absolute()} {root.name}'
                    log.debug(
                        f'Running Bash script install.sh with "{cmd}" in directory "{root}"'
                    )
                    proc = subprocess.Popen(cmd.split(),
                                            cwd=root.parent,
                                            stdout=subprocess.PIPE,
                                            stderr=subprocess.STDOUT)
                    stdout, _ = proc.communicate()
                    for l in stdout.decode().splitlines():
                        log.info(f'**[{install_sh.name}]** {l}')
                    if proc.returncode == 0:
                        log.info(f'Ran {install_sh} successfully.')
                    else:
                        log.error(f'Could not execute {install_sh}.')
                        exit(1)
                else:
                    exit(1)

        pin = Pin(args)
        if not pin.is_valid():
            log.error(f'Something is wrong with the Pin environment at {root}')

        return pin
Ejemplo n.º 11
0
def parse(dirname, buggy_dirname, assertion):
    dirname = os.path.abspath(dirname)
    m = re.match(
        r'([^,]+),\s*(before|after)\s*line\s*([0-9]+)\s*\((.*)\),\s*(assert\(.*\);)',
        assertion)
    file_path = os.path.join(dirname, m.group(1))
    buggy_file_path = os.path.join(dirname, m.group(1))
    before_after = m.group(2)
    line_no = int(m.group(3))
    expr = m.group(4).strip()
    assert_stmt = m.group(5)
    assert_args = re.match(r'assert\((.*)\);', assert_stmt).group(1)
    my_assert_stmt = f'if (!{assert_args}) {{*((int*)0) = 0;}} // my_assert'
    my_assert_stmt += '\n'

    log.info(f'{before_after} {file_path}:{line_no} "{my_assert_stmt}"')

    fromlines = open(file_path, 'r').readlines()

    matchto = [l.strip() for l in fromlines[line_no - 2:line_no + 2]]
    matches = difflib.get_close_matches(expr, matchto)
    log.debug(f'close matching "{expr}"')
    assert (len(matches) > 0)
    new_line_no = line_no - 2 + matchto.index(matches[0]) + 1
    if new_line_no != line_no:
        log.warn(f'switched line number to {new_line_no}')
        line_no = new_line_no
    log.debug(f'close matched {file_path}:{line_no} "{fromlines[line_no-1]}"')

    if before_after == 'before':
        tolines = fromlines[:line_no - 1] + [my_assert_stmt
                                             ] + fromlines[line_no - 1:]
    elif before_after == 'after':
        tolines = fromlines[:line_no] + [my_assert_stmt] + fromlines[line_no:]
    else:
        log.critical(f'before_after is not valid: {before_after}')
        return

    unidiff = difflib.unified_diff(fromlines,
                                   tolines,
                                   fromfile=buggy_file_path,
                                   tofile=buggy_file_path)
    patch = ''.join(unidiff)
    return patch
Ejemplo n.º 12
0
def stmtgen(parameters):
    """
    Get declaration and initializer statements for the given parameters
    """
    decls = []
    inits = []

    for i, parm in enumerate(parameters):
        stmts = list(stmts_for_param(parm.type, parm.displayname))
        parm_decls, parm_inits = zip(*stmts)
        log.info(
            f'parameter {pp(parm)}({i}) produces {len(parm_decls)} local variable declarations and {len(parm_inits)} initializer statements'
        )
        for v, i in stmts:
            log.debug(f'local variable {v} has initializer(s) {i}')
        decls += (i for ilist in parm_decls for i in ilist)
        inits += (i for ilist in parm_inits for i in ilist)

    return decls, inits
Ejemplo n.º 13
0
def upload_many(client):
    """批量上传,在upload目录的文件将全部被上传"""
    filedir = os.path.dirname(os.path.abspath(__file__))
    filepath = os.path.join(filedir, 'upload')
    f = os.walk(filepath)
    for i in f:
        u_list = i[2]
        base_path = i[0]
    try:
        ret = {'status': 0, 'msg': 'ok'}
        if client:
            ftp_client = client.open_sftp()
            for local_file in u_list:
                local_file_path = os.path.join(base_path, local_file)
                remote_file_path = 'upload/' + local_file
                ftp_client.put(local_file_path, remote_file_path)
                log.debug(f'{local_file_path}上传到远程服务器端{remote_file_path}')
                print('上传成功')
            ftp_client.close()

        else:
            ret['status'] = 1
            ret['msg'] = 'error'
            log.debug('上传服务的连接未建立成功')
    except Exception as e:
        ret['status'] = 1
        ret['msg'] = e
        log.debug(f'上传服务中出现错误:{e}')
    return ret
Ejemplo n.º 14
0
def find(node, selector, verbose=False):
    """
    Return all node's descendants of a certain kind
    """

    if verbose:
        log.debug(f'find: walked node {pp(node)}')

    found = []

    if isinstance(selector, CursorKind):
        if node.kind == selector:
            found.append(node)
    elif callable(selector):
        if selector(node):
            found.append(node)

    # Recurse for children of this node
    for child in node.get_children():
        found += find(child, selector, verbose)

    return found
Ejemplo n.º 15
0
def parse_args():
    parser = argparse.ArgumentParser()
    parser.add_argument('filter',
                        nargs='?',
                        help='Filter bugs to a certain filter')
    parser.add_argument(
        '-l',
        '--log-level',
        help='Display logs at a certain level (DEBUG, INFO, ERROR)')
    parser.add_argument('-v',
                        '--verbose',
                        action='store_true',
                        help='Display verbose logs in -lDEBUG')
    arguments = parser.parse_args()

    if arguments.log_level:
        log.setLevel(logging.getLevelName(arguments.log_level))
    if arguments.verbose:
        global verbose
        verbose = True
        log.debug(f'verbose logging enabled')

    return arguments
Ejemplo n.º 16
0
def download(client, remote_file_path, local_file_path):
    """get file from remote server"""
    try:
        ret = {'status': 0, 'msg': 'ok'}
        if client:
            ftp_client = client.open_sftp()
            ftp_client.get(remote_file_path, local_file_path)
            log.debug(f'远程服务器端文件{remote_file_path}下载到本地{local_file_path }')
            print('下载成功')
            ftp_client.close()

        else:
            ret['status'] = 1
            ret['msg'] = 'error'
            log.debug('下载服务的连接未建立成功')
    except Exception as e:
        ret['status'] = 1
        ret['msg'] = e
        log.debug(f'下载服务中出现错误:{e}')
    return ret
Ejemplo n.º 17
0
def upload(client, local_file_path, remote_file_path):
    """put local file to remote server"""
    try:
        ret = {'status': 0, 'msg': 'ok'}
        if client:
            ftp_client = client.open_sftp()
            ftp_client.put(local_file_path, remote_file_path)
            log.debug(f'{local_file_path}上传到远程服务器端{remote_file_path}')
            print('上传成功')
            ftp_client.close()

        else:
            ret['status'] = 1
            ret['msg'] = 'error'
            log.debug('上传文件至服务器的连接未建立成功')
    except Exception as e:
        ret['status'] = 1
        ret['msg'] = e
        log.debug(f'上传服务中出现错误:{e}')
    return ret
Ejemplo n.º 18
0
def main():
    global args
    args = parse_args()

    target = Path(args.target[0])
    target_args = args.target[1:]
    try:
        dynamic_locations = args.pin.run(target, target_args)
    except Exception as e:
        log.error(e)
        log.error(traceback.format_exc())
        return -1
    log.debug(f'{len(dynamic_locations)} logs')

    for l in dynamic_locations:
        if Path(l.filepath).exists():
            log.debug(f'dynamic location {l}')
        elif args.verbose:
            log.debug(f'dynamic location {l}')
            log.debug(f'^^^ file does not exist ^^^')

    dynamic_locations = [
        d for d in dynamic_locations if Path(d.filepath).exists()
    ]

    static_locations = []
    clang_include_paths = [f'-I{p}' for p in args.clang_include_paths]
    static_locations = get_static_locations(dynamic_locations,
                                            clang_include_paths)

    # Store only filepath and lineno and dedup
    all_locations = slim(dynamic_locations, args.include_column,
                         args.include_code)
    if args.include_static:
        all_locations += slim(static_locations, args.include_column,
                              args.include_code)

    if len(all_locations) == 0:
        log.error('No traces generated. Check if the source file was moved.')
        return 1

    # Output trace locations to file
    if args.output_file:
        output_stream = open(args.output_file, 'w')
    else:
        output_stream = sys.stdout
    for l in all_locations:
        s = f'{l.filepath}:{l.lineno}'
        if args.include_column:
            s += f':{l.column}'
        if args.include_code:
            s += f':{l.code}'
        s += '\n'
        output_stream.write(s)
    if output_stream is not sys.stdout:
        output_stream.close()

    debug_info = debug_print_code(all_locations)
    for filepath, content in debug_info.items():
        log.debug(filepath)
        for lineno, text in content:
            log.debug(f'{lineno:4} {text}')
    return 0
Ejemplo n.º 19
0
def main():
    log.setLevel(logging.INFO)

    args = parse_args()

    if args.log_level:
        log.setLevel(args.log_level)
        log.debug(f'setting log level to {args.log_level}')

    seg_c = args.segment_file
    orig_dir = args.original_file
    log.debug(f'segment: {seg_c}, original: {orig_dir}')

    clang_args = args.clang_args.split()
    if clang_args:
        log.debug(f' provided clang args: {clang_args}')

    if args.verbose:
        global verbose
        verbose = True
        log.debug(f'verbose logging enabled')

    seg_cur = parse(seg_c, clang_args)
    seg_target = select_target(seg_cur, target_name=args.target)
    parms = list(seg_target.get_arguments())

    log.debug(f'target: {pp(seg_target)}')
    import re
    target_name = re.match(r'helium_(.*)', seg_target.spelling).group(1)
    
    from pathlib import Path
    for orig_c in Path(orig_dir).glob('**/*.c'):
        orig_cur = parse(orig_c)
        orig_funcdecls = find(orig_cur, CursorKind.FUNCTION_DECL, verbose)
        orig_target = next((f.get_definition() for f in orig_funcdecls if is_the_same(f, seg_target) and f.get_definition() is not None), None)
        if orig_target is not None:
            break
    log.debug(f'target: {pp(orig_target)}')
    orig_body = find(orig_target, lambda c: c is not None and c.kind.is_statement(), verbose=verbose)
    first_stmt = next(iter(orig_body))
    first_stmt_file, first_stmt_line = first_stmt.location.file.name, first_stmt.location.line

    diff = gen_patch(first_stmt_file, first_stmt_line, parms, args.array)
    print('\n'.join(diff))
def send_to_contact_list(fake_or_real, contact_list, subject, email_template, q, years, contact_note_append=""):
    ### DOESN'T HANDLE FILTERING BY YEARS YET!!!    
    log.debug("SENDING EMAIL TO GROUPS")
    for journal in set([d["journal"] for d in contact_list]):
        log.debug("JOURNAL: " + journal)
        journals_records = contact_corresponding.get_filtered_dict(contact_list, lambda k, v: k == "journal" and v == journal)
        for month in set([d["data_month"] for d in contact_list]):
            log.debug("MONTH: " + month)
            month_records = contact_corresponding.get_filtered_dict(journals_records, lambda k, v: k == "data_month" and v == month)
            log.debug("n = " + str(len(month_records)))
            represetative_sample = month_records[0]
            represetative_sample["q"] = q
            (survey_url, shortened_url) = get_survey_url(represetative_sample)
            represetative_sample["url"] = shortened_url
            email_html_body_template = open(email_template, "r").read()
            email_body = contact_corresponding.get_email_text(email_html_body_template, represetative_sample)
            log.debug(email_body[0:200])
            bcc_list = [d["single_email"] for d in month_records]
            log.debug("Length of bcc list:" + str(len(bcc_list)))
            log.debug("BCC list:")
            log.debug(bcc_list)
            if fake_or_real == "REAL":
                success = send_it_already(subject, email_body, bcc_list)
                if success:
                    log.info("******* SENT FOR REAL ***********")
                else:
                    log.info("******* FAILED TO SEND ***********")
            else:
                success = send_it_already(subject, email_body, ["*****@*****.**"])
                log.info("--- just sent it to myself--------")
            if success:
                update_sent_file(sent_filename, bcc_list, "\t".join([" ".join(years), month, journal, contact_note_append]))
Ejemplo n.º 21
0
 def close(self):
     """关闭连接"""
     self.client.close()
     log.debug(f'{self.user}登录的{self.host}主机断开连接')
Ejemplo n.º 22
0
def main():
    cli = control_line.ControlLine()
    cli.run()
    cfg = config.Config()
    config_path = os.path.join(os.path.dirname(os.path.abspath(__file__)),
                               'ssh_login.cfg')

    # 登录服务器
    if cli.args.shell:
        ips = cli.args.filename
        for host_ip in ips:
            if cfg.read_config(config_path, host_ip):
                dic = dict(cfg.read_config(config_path, host_ip))
                user = dic['user']
                password = dic['password']
                host = host_ip
                ssh = MySSH(host=host, password=password, user=user)
                if ssh.client:
                    print('登录成功')
                else:
                    print('登录失败')
            else:
                print(f'配置文件中不含{host_ip}主机')
                log.debug(f'配置文件中不含{host_ip}主机')

    # 上传文件
    elif cli.args.upload:
        if len(cli.args.filename) == 2:
            try:
                local_file = cli.args.filename[0]
                remote_server_file = os.path.join('download/',
                                                  cli.args.filename[1])
            except Exception as e:
                print(e)
                return None

            ssh = MySSH('192.168.23.129', '2587539619', 'root')
            if ssh.client:
                ret = upload(ssh.client, local_file, remote_server_file)
                print(ret)
        elif len(cli.args.filename) == 0:
            ssh = MySSH('192.168.23.129', '2587539619', 'root')
            if ssh.client:
                upload_many(ssh.client)
        else:
            print('输入参数不合规范')
            print(' 参数一为本地文件,参数二为远程服务器文件')
            log.debug('上传文件时,输入参数不合规范')
    # 下载文件
    elif cli.args.download:
        if len(cli.args.filename) == 2:
            try:
                remote_server_file = cli.args.filename[0]
                local_file = cli.args.filename[1]
            except Exception as e:
                print(e)
                return None
            ssh = MySSH('192.168.23.129', '2587539619', 'root')
            if ssh.client:
                ret = download(ssh.client, remote_server_file, local_file)
                print(ret)
        else:
            log.debug('下载文件时,输入的参数不合规范')
            print('输入参数不合规范')
            print('参数一为远程服务器文件,参数二为本地文件')

    # 控制台交互
    elif cli.args.console:
        ssh = MySSH('192.168.23.129', '2587539619', 'root')
        log.debug('root用户登录192.168.23.129主机操作控制台')
        while True:
            cmd = input('请输入shell命令,[q]-退出')
            if cmd == 'q':
                ssh.close()
                break
            log.debug(f'输入了命令{cmd}')
            stdout = ssh.send_cmd(cmd)
            print(stdout.read().decode())
Ejemplo n.º 23
0
def stmts_for_param(type, varname, stack=[]):
    """
    Yields input variables for type t's fields, down to primitives
    """

    type = type.get_canonical()

    decls = []
    inits = []
    shift_argv = 'shift_argi()'

    log.debug(f'variable {varname} type {type.spelling} (kind {type.kind})')

    if not (type.kind == TypeKind.FUNCTIONPROTO or
            (type.kind == TypeKind.POINTER
             and type.get_pointee().kind == TypeKind.FUNCTIONPROTO)):
        decls.append(f'{type.spelling} {varname.replace(".", "_")};')

    if type.kind == TypeKind.ELABORATED or type.kind == TypeKind.RECORD:
        td = type.get_declaration()
        children = list(td.get_children())
        inits.append(f'// assign fields for {varname}')
        if any(children):
            for child in children:
                child_varname = f'{varname}.{child.spelling}'
                if child.kind == CursorKind.UNION_DECL:
                    pass
                elif child.type.get_declaration(
                ).kind == CursorKind.UNION_DECL:
                    inits.append(
                        f'// TODO union {child_varname} = <{", ".join(c.spelling for c in child.type.get_declaration().get_children())}>;'
                    )
                elif child.type.kind == TypeKind.POINTER:
                    if child.type.spelling in (
                            s.spelling for s in stack
                    ) or child.type.get_pointee() == type:
                        inits.append(
                            f'// TODO recursive {child_varname} = <{type.spelling}>;'
                        )
                    else:
                        if child.type.get_pointee().kind == TypeKind.CHAR_S:
                            inits.append(f'{child_varname} = {shift_argv};')
                        elif child.type.spelling in (s.spelling
                                                     for s in stack):
                            pass
                        else:
                            valname = f'{child.spelling.replace(".", "_")}_v'
                            yield from stmts_for_param(
                                child.type.get_pointee(),
                                valname,
                                stack=stack + [child.type])
                            inits.append(f'{child_varname} = &{valname};')
                else:
                    child_inits = zip(*stmts_for_param(child.type,
                                                       f'{child_varname}',
                                                       stack=stack +
                                                       [child.type]))
                    yield from (([], c) for l in child_inits for c in l)
        else:
            log.warning(
                f'no fields found for type {type.spelling} (kind {type.kind})')
    elif type.kind == TypeKind.POINTER:
        if type.get_pointee().kind == TypeKind.CHAR_S:
            inits.append(f'{varname} = {shift_argv};')
        elif type.get_pointee().kind == TypeKind.FUNCTIONPROTO:
            inits.append(f'// TODO functionptr {varname} = <{type.spelling}>;')
        else:
            valname = f'{varname}_v'
            yield from stmts_for_param(type.get_pointee(),
                                       valname,
                                       stack=stack + [type])
            if type.get_pointee().kind != TypeKind.FUNCTIONPROTO:
                inits.append(f'{varname} = &{valname};')
    elif type.kind == TypeKind.INT or \
        type.kind == TypeKind.SHORT or \
        type.kind == TypeKind.LONG or \
        type.kind == TypeKind.LONGLONG or \
        type.kind == TypeKind.INT128 or \
        type.kind == TypeKind.ENUM:
        inits.append(f'{varname} = atoi({shift_argv});')
    elif type.kind == TypeKind.UINT or \
        type.kind == TypeKind.ULONG or \
        type.kind == TypeKind.ULONGLONG or \
        type.kind == TypeKind.UINT128:
        inits.append(f'{varname} = strtoul({shift_argv}, NULL, 10);')
    elif type.kind == TypeKind.DOUBLE or type.kind == TypeKind.LONGDOUBLE:
        inits.append(f'{varname} = strtod({shift_argv}, NULL);')
    elif type.kind == TypeKind.FLOAT:
        inits.append(f'{varname} = atof({shift_argv});')
    elif type.kind == TypeKind.CHAR_S:
        inits.append(f'{varname} = {shift_argv}[0];')
    elif type.kind == TypeKind.FUNCTIONPROTO:
        pass
    else:
        inits.append(f'// TODO unhandled {varname} = <{type.spelling}>;')

    yield decls, inits
Ejemplo n.º 24
0
def parse_args(argv=sys.argv, do_wizard=True):
    file_dir = Path(__file__).parent
    default_pinroot = str(file_dir / 'pin-3.16')

    if '--' in argv:
        after_dash = argv[argv.index('--') + 1:]
        argv = argv[:argv.index('--')]
    else:
        after_dash = None

    parser = argparse.ArgumentParser()
    parser.add_argument(
        '-l',
        '--log-level',
        help='Display logs at a certain level (DEBUG, INFO, ERROR)',
        default='WARN')
    parser.add_argument('-v',
                        '--verbose',
                        action='store_true',
                        help='Display verbose logs in -lDEBUG')
    parser.add_argument('-k',
                        '--keep-logfile',
                        action='store_true',
                        help='Keep the log file after running Pin')
    parser.add_argument('-s',
                        '--include_static',
                        action='store_true',
                        help='Output static trace')
    parser.add_argument('--include_code',
                        action='store_true',
                        help='Output code statements')
    parser.add_argument('--include_column',
                        action='store_true',
                        help='Output column numbers')
    parser.add_argument(
        '-p',
        '--pin-root',
        type=str,
        help=f'Use an alternative path to Pin root. Default: {default_pinroot}',
        default=default_pinroot)
    parser.add_argument('-o',
                        '--output-file',
                        type=str,
                        help='Output to a file')
    parser.add_argument(
        '-I',
        default=[],
        dest='clang_include_paths',
        action='append',
        help='Include paths to pass to Clang (same as clang\'s -I flag)')
    parser.add_argument('--clang_library_file',
                        type=str,
                        help='Library file to load for Libclang stuff')
    arguments = parser.parse_args(argv[1:])

    if after_dash is None:
        raise argparse.ArgumentTypeError(
            'A delimiter -- before the command is required')

    # arguments.target = arguments.target.split()
    arguments.target = after_dash

    if arguments.log_level:
        log.setLevel(logging.getLevelName(arguments.log_level))

    if arguments.verbose:
        global verbose
        verbose = True
        log.debug(f'verbose logging enabled')

    if arguments.pin_root:
        arguments.pin_root = Path.cwd() / arguments.pin_root

    if do_wizard:
        arguments.pin = Pin.do_wizard(arguments, file_dir / 'install.sh')
    else:
        arguments.pin = Pin(arguments)

    if arguments.clang_library_file:
        log.debug(
            f'Setting clang library file to {arguments.clang_library_file}')
        Config.set_library_file(arguments.clang_library_file)

    log.debug(f'arguments: {arguments}')

    return arguments
Ejemplo n.º 25
0
    def tick(self, ped_list, time_tick):
        '''[IN]
            ped_list: a list of current pedestrians
            time_tick: how much time to pass
        [EFFECT]
            Update ped_list to the time after tick_tick passed.
        '''
        # cal desire force
        desire_list = []
        for ped in ped_list:
            desire_list.append(self.calDesire(ped))

        log.debug("Desire forces: " + str(desire_list))

        # cal force comes from wall
        wall_list = []
        for ped in ped_list:
            ped_vel = np.array(ped.vel)
            wall_area = self.map.getWallArea()
            wall_joint = np.array((0, 0))
            for wall in wall_area:
                dist = calDistance(wall, ped.pos)/self.map.scale
                rad_diff = ped.radius - dist
                unit_vec = calUnitVector(ped.pos, wall)
                orth_vec = calOrthogonalVector(unit_vec)

                # cal social force
                wall_joint = wall_joint + self.calSocial(unit_vec, rad_diff, ped.A, ped.B)

                # cal physical force
                if rad_diff > 0:
                    # touched
                    wall_joint = wall_joint + self.calPhysical(unit_vec, orth_vec, rad_diff, ped_vel)
            wall_list.append(wall_joint)

        log.debug("Wall froces: " + str(wall_list))

        # cal force comes from other pedestrians
        others_list = []
        for ped_i in ped_list:
            ped_vel_i = np.array(ped_i.vel)
            others_joint = np.array((0,0))
            for ped_j in ped_list:
                if ped_i == ped_j:
                    continue
                ped_vel_j = np.array(ped_j.vel)
                dist = calDistance(ped_j.pos, ped_i.pos)
                rad_diff = ped_i.radius + ped_j.radius - dist
                unit_vec = calUnitVector(ped_i.pos, ped_j.pos)
                orth_vec = calOrthogonalVector(unit_vec)

                # cal social force
                others_joint = others_joint + self.calSocial(unit_vec, rad_diff, ped_i.A, ped_i.B)

                # cal physical force
                if rad_diff > 0:
                    others_joint = others_joint + self.calPhysical(unit_vec, orth_vec, rad_diff, ped_vel_j, ped_vel_i)
            others_list.append(others_joint)

        log.debug("Others forces: " + str(others_list))

        # cal joint force
        joint_list = []
        for desire, wall, others in zip(desire_list, wall_list, others_list):
            joint = np.array((0, 0))
            joint = joint + desire
            joint = joint + wall
            joint = joint + others
            joint_list.append(joint)
        
        # cal new position(using old velocity)
        for ped in ped_list:
            pos = np.array(ped.pos)
            vel = np.array(ped.vel)
            pos = pos + vel* self.map.scale*time_tick
            ped.pos = (int(pos[0]), int(pos[1]))

        # cal new velocity
        for ped, joint in zip(ped_list, joint_list):
            vel = np.array(ped.vel)
            vel = vel + joint*time_tick
            ped.vel = (float(vel[0]), float(vel[1]))

        # update experiment's time
        self.time = self.time + time_tick
Ejemplo n.º 26
0
def get_static_locations(dynamic_locations, clang_include_paths):
    """
    Get locations for certain constructs which are only available statically.
    - Variable declarations without any executable code "int i;"
    - Case statements "case foo:"
    - Default statements "default: "
    """
    static_locations = []

    def ancestor_node(n):
        """
        Get the nearest significant ancestor.
        """
        if n.kind == CursorKind.FUNCTION_DECL:
            return n
        else:
            if n.semantic_parent is None:
                return n
            else:
                return ancestor_node(n.semantic_parent)

    def good(n):
        """
        Node should be added to the trace.
        """
        if n.kind in (CursorKind.VAR_DECL, CursorKind.CASE_STMT,
                      CursorKind.DEFAULT_STMT):
            return True
        else:
            return False

    filepaths = defaultdict(list)
    for l in dynamic_locations:
        filepaths[l.filepath].append(l)
    for filepath, locations in filepaths.items():
        log.debug(
            f'Parsing source file {filepath} with args {clang_include_paths}')
        root = nodeutils.parse(filepath, clang_include_paths)
        ancestors = []
        file = File.from_name(root.translation_unit, filepath)
        for l in locations:
            source_location = SourceLocation.from_position(
                root.translation_unit, file, l.lineno, l.column)
            node = Cursor.from_location(root.translation_unit, source_location)
            l.node = node
            if node.kind.is_invalid():
                continue
            ancestor = ancestor_node(node)
            if ancestor not in ancestors:
                log.debug(
                    f'node {nodeutils.pp(node)} has ancestor {nodeutils.pp(ancestor)}'
                )
                ancestors.append(ancestor)
        for a in ancestors:
            if a.kind.is_translation_unit():
                continue  # Do not include global constructs
            else:
                nodes = nodeutils.find(a, good)
                locations = [
                    Location(n.location.file.name, n.location.line,
                             n.location.column, n) for n in nodes
                ]
                for l in locations:
                    log.debug(f'static location {l}')
                static_locations += locations

    return static_locations