예제 #1
0
def main():
    parser = Netrepl_Parser('Connect to netrepl and run'
                            'a command.',
                            debug=_debug)
    parser.parser.add_argument('-t', '--timeout',
                               type=int, required=False, default=60,
                               help='Seconds to wait for command '
                                    'to finish (default 60)')
    parser.parser.add_argument('-c', '--command',
                               type=str, nargs="+", required=True,
                               help='A command to execute remotely.')

    con = parser.connect()
    data = con.repl_command(" ".join(parser.args.command),
                            timeoutms=parser.args.timeout * 1000,
                            interrupt=True)
    if data is None:
        if _debug: print(_debug, 'Timeout occurred, data discarded.')
    else:
        if _debug: print(_debug, 'Data successfully received.')
    if _debug: print("{} Closing connection.".format(_debug))
    con.repl_normal()  # normal repl
    con.close(report=True)
    if _debug: print("{} Connection closed.".format(_debug))
    if data is None:
        sys.exit(1)  # not successful
    else:
        if _debug: print(_debug, "Output follows starting from next line.")
        try:
            sys.stdout.write(data.decode())
        except:
            if _debug:
                print("{} Got some weird data of len "
                      "{}: >>{}<<".format(_debug, len(data), data))
        sys.stdout.flush()
예제 #2
0
def main():
    parser = Netrepl_Parser('Copy recursively a local file or directory to a '
                            'netrepl enabled device.',
                            debug=_debug)
    parser.parser.add_argument('-t', '--timeout', nargs="?",
                               type=int, required=False, default=600,
                               help='seconds to wait for process '
                                    'to finish (default 600s)')
    parser.parser.add_argument('-s', '--src', '--source', nargs="?",
                               type=str, required=False, default="./",
                               help='source directory. '
                                    'Default: current directory (./). '
                                    'If source should be a directory it has to '
                                    'be postfixed by a slash (/).')
    parser.parser.add_argument('-d', '--dest', '--destination',
                               type=str, required=False, nargs="?", default="/",
                               help='destination directory on netrepl device). '
                                    'Default: netrepl root (/) directory. '
                                    'If this is postfixed by a slash (/) and '
                                    'source is a directory copy only content '
                                    'of source into destination. If there is '
                                    'no slash, create teh source directory in '
                                    'destination. This will always get a /'
                                    'added to its beginning and therfore '
                                    'absolute on the remote.')
    parser.parser.add_argument('--delete', '--del',
                               required=False, action="store_true",
                               help='if given, delete locally absent files '
                                    'also on destination.')
    parser.parser.add_argument('-y', '--sync',
                               required=False, action="store_true",
                               help='if given, use list of hash codes '
                                    'to determine which files have to '
                                    'be copied and which can be skipped.')
    parser.parser.add_argument('-n', '--dry', '--dryrun',
                               required=False, action="store_true",
                               help='if given, only show what would be done, '
                                    'do not copy or delete actually anything.')
    parser.parser.add_argument('--reset', '--reboot',
                               required=False, action="store_true",
                               help='if given, reset node after copy.')
    parser.parser.add_argument('-x', '--exclude', '--ignore',
                               required=False, type=str, nargs="+", default="",
                               help='files or directories to exclude. '
                                    'Tries to match path or filename '
                                    '(like rsync).'
                                    'Multiple files and directories can be '
                                    'specified here.')
    parser.parser.add_argument('-o', '--only-create', '--create-only',
                               required=False, type=str, nargs="+", default="",
                               help='files or directories to only create, '
                                    'when they are not yet present in '
                                    'destination. '
                                    'A slash in the end is ignored. '
                                    'Multiple files and directories can be '
                                    'specified here.')
    # TODO: what about symlinks?

    con = parser.connect()
    source = parser.args.src
    src_dir = os.path.dirname(source)
    if len(src_dir) > 0 and not src_dir.endswith("/"):
        src_dir += "/"
    src_file = os.path.basename(source)
    dest = parser.args.dest
    dest_base_dir = os.path.dirname(dest)
    if not dest_base_dir.endswith("/"):
        dest_base_dir += "/"
    if not dest_base_dir.startswith("/"):
        dest_base_dir = "/" + dest_base_dir
    dest_prefix_dir = os.path.basename(dest)
    if dest_prefix_dir:
        dest_prefix_dir += "/"

    # create source list
    if src_file:  # got a (single) file
        src_list = [src_file]
    else:
        # just got a directory, so let's generate it
        src_list = []
        for root, dirs, files in os.walk(src_dir):
            if not root.endswith("/"):
                root += "/"
            root = root[len(src_dir):]
            for d in dirs:
                src_list.append(root + d + "/")
            for filename in files:
                path = root + filename  # skip ./
                src_list.append(path)

    # interrupt what is running
    con.repl_interrupt()
    time.sleep(1)  # wait a bit

    # get destination file-list with hashes
    if src_file and len(src_list) == 1:  # if we only copy one file
        # (only small hashlist necessary)
        search_path = "/" + src_list[0]
    else:
        search_path = dest_base_dir + dest_prefix_dir

    # if sync requested get hash-files
    dest_hashes = {}
    if parser.args.sync:
        print("Connecting and requesting destination hash list.")
        data = con.repl_command("import hashlist; "
                                "hashlist.hashlist(root=\"{}\")".format(
            search_path),
            timeoutms=parser.args.timeout * 1000)
        if data is None:  # timeout
            print(
                "Problem with initial connection - timeout occurred, aborting.")
        else:
            dest_hashes = {}
            for line in data.split(b"\n"):
                line = line[:-1]
                parts = line.split(b" ")
                if len(parts) and parts[0]:
                    key = b" ".join(parts[1:]).decode()[
                          len(dest_base_dir + dest_prefix_dir):]
                    if key:
                        dest_hashes[key] \
                            = bytes(parts[0]).decode()

            if _debug: print(_debug, "src_list:", src_list)  # debug
            if _debug: print(_debug, "dest_hashes:", dest_hashes)  # debug

    # Compute copy, delete, exclude, create only lists
    exclude_list = parser.args.exclude
    only_create_list = parser.args.only_create
    # create copy_list
    copy_list = []
    for f in src_list:
        # check if file needs to be excluded due to being on exclude list
        match = False
        for e in exclude_list:
            if e.startswith("/"):  # only allow path matches
                e = e[1:]
                if e.endswith("/"):
                    if f.startswith(e):
                        match = True
                        break
                else:
                    if f.startswith(e + "/") or f == e:
                        match = True
                        break
            else:  # allow also file match
                if e.endswith("/"):
                    if f.startswith(e):
                        match = True
                        break
                else:
                    if f.startswith(e + "/"):
                        match = True
                        break
                    if os.path.basename(f) == e or f == e:
                        match = True
                        break
        if match: continue  # don't need to look further
        # check if file needs to be excluded due to being in only-create
        match = False
        for o in only_create_list:
            if o.startswith("/"):
                if o[1:] == f:
                    match = True
                    break
            if f == o or os.path.basename(f) == o:
                match = True
                break
        if match:  # is in create_only list, so check if it's already at destin.
            if f in dest_hashes:
                continue  # then we don't need to add it
        # nothing matches, so add it
        copy_list.append(f)

    # create delete list
    delete_list = []
    if parser.args.delete:  # only when option set
        for f in dest_hashes:
            # might be a directory
            if f not in src_list:
                if not f.endswith("/"):
                    if f + '/' not in src_list:
                        delete_list.append(f)
                else:
                    delete_list.append(f)

    not_dryrun = not parser.args.dry
    # process delete list
    for f in delete_list:
        print("Deleting {}.".format(f))
        if not_dryrun:  # TODO: check that deleting directories works
            if f.endswith("/"):  # dir
                con.rmdir(dest_base_dir + dest_prefix_dir + f[:-1])
            else:  # file
                con.rm(dest_base_dir + dest_prefix_dir + f)
    # process copy list
    if dest_base_dir and dest_base_dir != "/":  # if there is one
        # always try to create it
        if not_dryrun:
            con.mkdir(dest_base_dir[:-1], nofail=True)
    if dest_prefix_dir:  # need to prefixdir?
        if not_dryrun:
            con.mkdir(dest_base_dir + dest_prefix_dir[:-1], nofail=True)
    for f in copy_list:
        if f.endswith("/"):  # directory
            if f[:-1] in dest_hashes: f = f[:-1]
            if f in dest_hashes:
                if dest_hashes[f] == "<dir>":
                    print("Skipping directory {} as it's already"
                          " on remote.".format(f))
                else:
                    print("Caution, remote has a file where"
                          " there should be a "
                          "directory ({}). Aborting.".format(f))
                    break
            else:
                print("Creating directory {}.".format(f))
                if not_dryrun:
                    con.mkdir(dest_base_dir + dest_prefix_dir + f)
        else:  # it is a file
            h = sha256()
            hf = open(src_dir + f, "rb")
            while True:
                b = hf.read(1024)
                if len(b) == 0: break
                h.update(b)
            hf.close()
            digest = binascii.hexlify(h.digest()).decode()
            if f in dest_hashes and digest == dest_hashes[f]:
                print("Skipping {} due to equal hash.".format(f))
            else:
                print("Copying {}.".format(f))
                if not_dryrun:
                    con.upload(src_dir + f, dest_base_dir
                               + dest_prefix_dir + f,
                               f in dest_hashes)

    if not_dryrun:
        if parser.args.reset:
            print("Resetting device.")
            con.reset()

    else:
        print("This was a dryrun, no actual changes executed.")

    if _debug: print("{} Closing connection.".format(_debug))
    con.repl_normal()  # normal repl
    con.close(report=True)
    if _debug: print("{} Connection closed.".format(_debug))
예제 #3
0
def main():
    global quit_flag, input_buffer_lock, input_buffer

    parser = Netrepl_Parser(
        'Connect to netrepl and open an'
        'interactive terminal.', debug=_debug)

    con = parser.connect()
    if _debug is not None:
        print(_debug, 'Press ctrl-] to quit.\n')
        print
        print("Try to type help to get an initial help screen.")
    # Do not request help-screen, gets too confusing later
    #if _debug: print(_debug,'Requesting startscreen.')
    #con.send(b"\r\nhelp\r\n")

    fd = sys.stdin.fileno()
    old_settings = termios.tcgetattr(fd)
    tty.setraw(fd)

    input_thread = threading.Thread(target=char_reader)
    input_thread.start()

    while not quit_flag:
        # Check if we received anything via network
        # Get the list sockets which are readable
        read_sockets, write_sockets, error_sockets = \
            select.select([con.socket], [], [], 0.01)

        for sock in read_sockets:
            # incoming message from remote server
            if sock == con.socket:
                data = con.receive()
                l = len(data)

                # TODO: figure out how to detect connection close
                # #print("recvd:",data)
                # if not data:
                #     print('\nterminal: Connection closed.')
                #     sys.exit()
                # else:
                if l > 0:
                    # print data
                    try:
                        sys.stdout.write(bytes(data[0:l]).decode())
                    except:
                        if _debug:
                            print("\r\n{} Got some weird data of len "
                                  "{}: >>{}<<\r\n".format(
                                      _debug, len(data), data))
                    #print("data:", str(data[0:l]))
                    sys.stdout.flush()

        if len(input_buffer) > 0:
            # user entered a message
            input_buffer_lock.acquire()
            send_buffer = input_buffer.encode()
            input_buffer = ""
            input_buffer_lock.release()
            #msg = sys.stdin.readline().strip()+'\r\n'
            #print("\r\nmsg {} <<\r\n".format(send_buffer))
            con.send(send_buffer)
            #cs.send(send_buffer+b'\r\n')
            #cs.send(send_buffer+b'!')

    input_thread.join()  # finsih input_thread
    if _debug: print("\r\n{} Closing connection.\r".format(_debug))
    con.repl_normal()  # normal repl
    con.close(report=True)
    termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
    if _debug: print("\r\n{} Connection closed.\r\n".format(_debug))