Example #1
0
    def process(self):
        while (True):
            start_time = time.time()
            # Capture frame-by-frame
            self.extFrames.clear()
            for cam in self.extCams:
                ret, frame = cam.read()
                frame = self.f.unwarp(frame)
                self.extFrames.append(frame)
            # Our operations on the frame come here
            # Display the resulting frame
            if self.stitch:
                # s = CVStitcher(args.debug)
                s = Stitcher(self.debug)
                pano = s.stitch(self.extFrames)
                # cv2.resize(pano, (320, 180), interpolation=cv2.INTER_AREA)
                cv2.imshow("pano", pano)
            if self.debug:
                for i in range(len(self.extCams)):
                    cv2.imshow(
                        'frame' + str(i),
                        cv2.resize(self.extFrames[i], (480, 270),
                                   interpolation=cv2.INTER_AREA))
                for cam in self.extCams:
                    print(self.decode_fourcc(cam.get(cv2.CAP_PROP_FOURCC)))
            if cv2.waitKey(1) & 0xFF == ord('q'):
                break
            fps = 1.0 / (time.time() - start_time)
            print("[INFO] approx. FPS: {:.2f}".format(fps))

        # Cleanly exit.
        for cam in self.extCams:
            cam.release()
        cv2.destroyAllWindows()
Example #2
0
class Video2PDF:
    '''
        Class to convert an mp4 video to a PDF
        '''
    def __init__(self):
        self.quilt = None

    def random_string(self, length):
        letters = string.ascii_letters + string.digits
        return ''.join(random.choice(letters) for i in range(length))

    def run(self, path):
        self.stitcher = Stitcher(path)
        self.stitcher.stitch()
        self.pdfifier = PDFifier()
        self.quilt = self.stitcher.get_quilt()

        name = self.random_string(16)
        self.pdfifier.makePDF(self.quilt, name)

        filename = name

        if (os.path.isfile(path + ".mp3")):
            os.remove(path + ".mp3")
        if (os.path.isdir("audio_segments")):
            shutil.rmtree("audio_segments")
        if (os.path.isdir("slides")):
            shutil.rmtree("slides")

        return filename
Example #3
0
def main():
    stitcher = Stitcher()
    while True:
        leftCam = cv2.VideoCapture(2)
        rearCam = cv2.VideoCapture(1)
        rightCam = cv2.VideoCapture(0)

        #ap = argparse.ArgumentParser()
        #ap.add_argument('-i', '--input-dir', required=True, help='Path to image input directory')
        #args = vars(ap.parse_args())

        #dir = args['input_dir'].strip(' ')
        #input_dir = os.path.join(os.getcwd(), dir)
        #image_files = sorted(os.listdir(input_dir))
        #image_files = [os.path.join(input_dir, x) for x in image_files]
        #images = [imutils.resize(cv2.imread(x), width=400) for x in image_files]
        images = []
        images.append(leftCam.read()[1])
        images.append(rearCam.read()[1])
        images.append(rightCam.read()[1])
        images = [imutils.resize(cv2.flip(x, 1), width=400) for x in images]

        try:
            result = stitcher.stitch(images)
            cv2.imshow("Stitched", result)
        except RuntimeError:
            cv2.waitKey(100)
            continue

        cv2.waitKey(100)
Example #4
0
def main():
    parser = argparse.ArgumentParser(
        description='A small utility that creates a big map.')

    parser.add_argument(
        '--start',
        required=True,
        help=
        'top-left point of the region of interest (comma-separated lat/lng pair)'
    )
    parser.add_argument(
        '--end',
        required=True,
        help=
        'bottom-right point of the region of interest (comma-separated lat/lng pair)'
    )
    parser.add_argument('--zoom', type=int, required=True, help='zoom level')
    parser.add_argument('--out', required=True, help='output directory')
    parser.add_argument('--transit',
                        help='enable transit layer',
                        action='store_true')

    args = parser.parse_args()
    start = map(float, args.start.split(','))
    end = map(float, args.end.split(','))
    tile_size_px = 500

    print('[bigmapmaker] Starting.')
    screenshotter = Screenshotter(start, end, args.zoom, args.out,
                                  args.transit, tile_size_px)
    screenshotter.fetch_tiles()
    print('[bigmapmaker] Done with fetching, moving on to stitching.')
    stitcher = Stitcher(args.out, tile_size_px)
    stitcher.stitch()
    print('[bigmapmaker] Done.')
Example #5
0
    def stitchFolder(self, targetFolder):
        stitcherHandler = Stitcher()
        filesToStitch = []
        onlyFiles = [
            f for f in os.listdir(targetFolder)
            if isfile(join(targetFolder, f))
        ]
        for files in onlyFiles:
            if (files.endswith(".jpg")):
                filesToStitch.append(targetFolder + "/" + files)

        if (len(filesToStitch) < 2):
            return
        parentName = os.path.split(os.path.dirname(filesToStitch[0]))[1]
        outputFile = os.path.join(os.path.dirname(filesToStitch[0]),
                                  parentName + ".tiff")
        logFile = os.path.join(os.path.dirname(filesToStitch[0]),
                               parentName + "_log.txt")
        filesToStitch.sort()
        print(filesToStitch)
        print(outputFile)
        print("Logging to: " + logFile)
        if (outputFile is None):
            exit()

        stitcherHandler.stitchFileList(filesToStitch, outputFile, logFile,
                                       self.progressCallback,
                                       self.maskBox.IsChecked(),
                                       self.scalePath,
                                       self.verticalCore.IsChecked())

        if (self.archiveImages.IsChecked()):
            self.ArchiveQueue.put(targetFolder)
Example #6
0
def main(_):
    assert FLAGS.out
    assert FLAGS.db and os.path.exists(FLAGS.db)

    picpac_config = dict(
        seed=2016,
        #loop=True,
        shuffle=True,
        reshuffle=True,
        #resize_width=256,
        #resize_height=256,
        round_div=FLAGS.stride,
        batch=1,
        split=1,
        split_fold=0,
        annotate='json',
        channels=FLAGS.channels,
        stratify=True,
        pert_color1=20,
        pert_angle=20,
        pert_min_scale=0.8,
        pert_max_scale=1.2,
        #pad=False,
        pert_hflip=True,
        pert_vflip=True,
        channel_first=False  # this is tensorflow specific
        # Caffe's dimension order is different.
    )

    stream = picpac.ImageStream(FLAGS.db,
                                perturb=False,
                                loop=False,
                                **picpac_config)

    gal = Gallery(FLAGS.out)
    cc = 0
    with Model(FLAGS.model, name=FLAGS.name, prob=True) as model:
        for images, _, _ in stream:
            #images *= 600.0/1500
            #images -= 800
            #images *= 3000 /(2000-800)
            _, H, W, _ = images.shape
            if FLAGS.max_size:
                if max(H, W) > FLAGS.max_size:
                    continue
            if FLAGS.patch:
                stch = Stitcher(images, FLAGS.patch)
                probs = stch.stitch(model.apply(stch.split()))
            else:
                probs = model.apply(images)
            cc += 1
            save(gal.next(), images, probs)
            if FLAGS.max and cc >= FLAGS.max:
                break
    gal.flush()
    pass
Example #7
0
 def __init__(self, bot):
     self.bot = bot
     self.sql_query = SQLQuery(initialize_connection())
     self.manager = Manager()
     self.stitcher = Stitcher()
     self.dbl_token = config('DBL_TOKEN')
     self.webhook_auth_token = config('ALICE_WEBHOOK_AUTH_TOKEN')
     self.dblpy = dbl.DBLClient(self.bot,
                                self.dbl_token,
                                autopost=True,
                                webhook_path='/dblwebhook',
                                webhook_auth=self.webhook_auth_token,
                                webhook_port=environ.get("PORT", 8000))
Example #8
0
    def startNoah(self, path):
        #data is a string with the text
        self.stitcher = Stitcher(path)
        self.stitcher.stitch()
        self.pdfifier = PDFifier()
        self.quilt = self.stitcher.get_quilt()

        self.image_num = 0
        image_map = QPixmap(self.quilt[0].slide_path)
        data = self.fix_text(self.quilt[0].script)
        self.slideImage.setPixmap(image_map)
        self.textArea.setText('\n'.join(data))
        self.repaint()
Example #9
0
def stitch(img1, img2, sigma=2.0, levels=None, flag_fast=False):
    if flag_fast:
        if img1.shape[1] > 400:
            img1 = cv2.resize(
                img1,
                (width, int(img1.shape[0] * float(width) / img1.shape[1])))
        if img2.shape[1] > 400:
            img2 = cv2.resize(
                img2,
                (width, int(img2.shape[0] * float(width) / img2.shape[1])))

    # stitch the images together to create a panorama
    stitcher = Stitcher()
    return stitcher.stitch([img1, img2], sigma=sigma, levels=levels)
Example #10
0
def template(directory: str,
             in_file: str,
             out_file: str = None,
             passes: int = -1,
             logger=None):
    OUT_PREFIX = 'out_'
    FEEDBACK = 'Finished stitching in {0}'
    ERROR = 'Error: {0}!'

    if out_file == None:
        out_file = OUT_PREFIX + in_file
    try:
        Stitcher(directory, in_file, out_file, passes, logger)
        if logger is None:
            print(FEEDBACK.format(os.path.join(directory, out_file)))
        else:
            logger.info(FEEDBACK.format(os.path.join(directory, out_file)))
    except Exception as e:
        if logger is None:
            print(ERROR.format(e))
        else:
            logger.error(e)
        raise
    print()
    return
Example #11
0
 def write(self, name='fullfile.csv'):
     stitch = Stitcher(self.fpath)
     files = len(self.successful) - 1
     try:
         stitch.import_vf(files)
     except Exception as e:
         print("VF files failed. Reason: {}".format(e))
     try:
         stitch.import_franklin()
     except:
         print("Whoops, no franklin.")
     try:
         stitch.import_hamilton()
     except Exception as e:
         print("whoops, no Hamilton: {}".format(e))
     stitch.write(fname=name)
Example #12
0
 def __init__(self, path=None):
     # Initialize stitcher object and pdf object
     self.path = path
     if (path == None):
         self.stitcher = None
     else:
         self.stitcher = Stitcher(self.path)
     self.quilt = None
Example #13
0
def homography_matrices(video):
    stitcher = Stitcher()

    initialized = False
    for i, frame in enumerate(video):
        stdout.write('{}\r'.format(i))
        stdout.flush()

        if initialized is False:
            initialized = True
            homography_matrices = np.array([np.identity(3)])
            prev_frame = frame
            continue

        if i % 20 == 0:
            homography_matrices = np.append(homography_matrices,
                                            [np.identity(3)],
                                            axis=0)
            prev_frame = frame
            continue

        H = Stitcher().find_homography(frame, prev_frame)

        homography_matrices = np.append(homography_matrices, [H], axis=0)

    initialized = False
    for i, frame in enumerate(video):
        stdout.write('{}\r'.format(i))
        stdout.flush()

        if initialized is False:
            initialized = True
            reduced_H = np.identity(3)
            prev_frame = frame
            continue

        if i % 20 == 0:
            H = Stitcher().find_homography(frame, prev_frame)
            reduced_H = reduced_H @ H
            prev_frame = frame

        # H = reduced_H @ H
        homography_matrices[i] = reduced_H @ homography_matrices[i]

    return homography_matrices
Example #14
0
    def run(self, path):
        self.stitcher = Stitcher(path)
        self.stitcher.stitch()
        self.pdfifier = PDFifier()
        self.quilt = self.stitcher.get_quilt()

        name = self.random_string(16)
        self.pdfifier.makePDF(self.quilt, name)

        filename = name

        if (os.path.isfile(path + ".mp3")):
            os.remove(path + ".mp3")
        if (os.path.isdir("audio_segments")):
            shutil.rmtree("audio_segments")
        if (os.path.isdir("slides")):
            shutil.rmtree("slides")

        return filename
Example #15
0
    def __init__(self, logger, ip, port, configfps, ingestproxy, mcip):

        self._logger = logger
        self._ip = ip
        self._port = port

        # read all config files and add channels
        for configfp in configfps:
            Channel.append(configfp)

        # create shared memdb
        self._memdb = MemDB()

        # handler class for responding to http requests
        self._myhandler = makehandlerclass(self._logger.getChild("HTTPServer"), ingestproxy, mcip, self._memdb)
#        self._myhandler.protocol_version = "HTTP/1.1"  #-->Do not use HTTP1.1 because handler does not support 206 and byte requests easily.
        self._myhandler.server_version = "m2u"
        self._httpd = None

        # stitcher for concatenating RTP slices into fragments
        self._stitcher = Stitcher(name="stitcher", args=(self._logger.getChild("Stitcher"), self._memdb))
Example #16
0
 def __init__(self, torrent):
     self.torrent = torrent
     self.torrent_state = 'random'
     self.reactor = Reactor()
     self.reactor_activated = False
     self.peer_id = '-TZ-0000-00000000000'
     self.peers = [] 
     self.decode_torrent_and_setup_pieces()
     self.handshake = self.build_handshake()
     self.setup_tracker()
     self.stitcher = Stitcher(self)
     self.setup_peers()
Example #17
0
    def stitch(self):
        stitcherHandler = Stitcher()
        print(self.fileList)
        print(self.outputFile)
        if (self.outputFile is None):
            exit()

        parentName = os.path.split(os.path.dirname(self.fileList[0]))[1]
        outputFile = os.path.join(
            os.path.dirname(self.fileList[0]),
            parentName + str(len(self.fileList)) + ".tiff")
        self.finishedFiles = []
        self.finishedFiles.append(outputFile)

        _thread.start_new_thread(
            stitcherHandler.stitchFileList,
            (self.fileList, outputFile, self.progressCallback,
             self.maskImages.get()))
Example #18
0
class DASHProxy:

    def __init__(self, logger, ip, port, configfps, ingestproxy, mcip):

        self._logger = logger
        self._ip = ip
        self._port = port

        # read all config files and add channels
        for configfp in configfps:
            Channel.append(configfp)

        # create shared memdb
        self._memdb = MemDB()

        # handler class for responding to http requests
        self._myhandler = makehandlerclass(self._logger.getChild("HTTPServer"), ingestproxy, mcip, self._memdb)
#        self._myhandler.protocol_version = "HTTP/1.1"  #-->Do not use HTTP1.1 because handler does not support 206 and byte requests easily.
        self._myhandler.server_version = "m2u"
        self._httpd = None

        # stitcher for concatenating RTP slices into fragments
        self._stitcher = Stitcher(name="stitcher", args=(self._logger.getChild("Stitcher"), self._memdb))


    def serve_requests(self):

        try:
            # start stitcher
            self._stitcher.start()

            # start HTTP server
            self._httpd = HTTPServer((self._ip, self._port), self._myhandler)
            self._logger.info("Handling requests on %s:%d" % (self._ip, self._port))

            # This will block and periodically check the shutdown signal
            self._httpd.serve_forever()
        except:
            self.stop()
            raise


    def stop(self):
        if self._httpd is not None:
            self._httpd.shutdown()
            self._httpd = None

        if self._stitcher is not None:
            self._stitcher.stop()
            self._stitcher.join(1)
            self._stitcher = None
Example #19
0
    def stitchFolder(self, targetFolder):
        stitcherHandler = Stitcher()
        filesToStitch = []
        onlyFiles = [
            f for f in os.listdir(targetFolder)
            if isfile(join(targetFolder, f))
        ]
        for files in onlyFiles:
            if (files.endswith(".jpg")):
                filesToStitch.append(targetFolder + "/" + files)

        parentName = os.path.split(os.path.dirname(filesToStitch[0]))[1]
        outputFile = os.path.join(os.path.dirname(filesToStitch[0]),
                                  parentName + ".tiff")

        print(filesToStitch)
        print(outputFile)
        if (outputFile is None):
            exit()

        _thread.start_new_thread(
            stitcherHandler.stitchFileList,
            (filesToStitch, outputFile, "test.txt", self.progressCallback,
             self.maskImages.get(), None))
Example #20
0
class MyWindow(QtWidgets.QMainWindow, Ui_MainWindow):
    def __init__(self):
        QtWidgets.QMainWindow.__init__(self)
        Ui_MainWindow.__init__(self)
        self.setupUi(self)

        self.startButton.clicked.connect(self.openFileNameDialog)
        self.nextButton.clicked.connect(self.nextSlide)
        self.previousButton.clicked.connect(self.prevSlide)
        self.pdfButton.clicked.connect(self.genPDF)

        self.quilt = None
        self.image_num = 0

    def startNoah(self, path):
        #data is a string with the text
        self.stitcher = Stitcher(path)
        self.stitcher.stitch()
        self.pdfifier = PDFifier()
        self.quilt = self.stitcher.get_quilt()

        self.image_num = 0
        image_map = QPixmap(self.quilt[0].slide_path)
        data = self.fix_text(self.quilt[0].script)
        self.slideImage.setPixmap(image_map)
        self.textArea.setText('\n'.join(data))
        self.repaint()

    def nextSlide(self):
        if (self.quilt != None):
            if (self.image_num < len(self.quilt) - 1):
                self.image_num += 1
                image_map = QPixmap(self.quilt[self.image_num].slide_path)
                self.slideImage.setPixmap(image_map)
                data = self.fix_text(self.quilt[self.image_num].script)
                self.textArea.setText('\n'.join(data))
                self.repaint()

    def prevSlide(self):
        if (self.quilt != None):
            if (self.image_num > 0):
                self.image_num -= 1
                image_map = QPixmap(self.quilt[self.image_num].slide_path)
                self.slideImage.setPixmap(image_map)
                data = self.fix_text(self.quilt[self.image_num].script)
                self.textArea.setText('\n'.join(data))
                self.repaint()

    def fix_text(self, script):
        first = True
        fixed = list()
        for segment in script:
            new_text = ""
            for text in segment:
                for word in text:
                    if (first):
                        first = False
                        new_text += word
                    else:
                        new_text += (" " + word)
            fixed.append(new_text)
        return fixed

    def openFileNameDialog(self):
        options = QFileDialog.Options()
        options |= QFileDialog.DontUseNativeDialog
        path, _ = QFileDialog.getOpenFileName(
            self,
            "QFileDialog.getOpenFileName()",
            "",
            "All Files (*);;Python Files (*.py)",
            options=options)
        path = os.path.basename(path)
        if path:
            self.startNoah(path)

    def genPDF(self):
        print("Generating a pdf...")
        if (self.quilt != None):
            self.pdfifier.makePDF(self.quilt)
        print("Done!")
Example #21
0
    "Stack several image files to create digital long exposure photographies")
parser.add_argument("--align",
                    action="store_true",
                    help="run only the aligner, do not compress")
parser.add_argument("--transform",
                    action="store_true",
                    help="run only the aligner and transform, do not compress")
parser.add_argument("--stitch",
                    action="store_true",
                    help="stitch images for panoramic formats")
args = parser.parse_args()

# --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- ---

aligner = Aligner()
stitcher = Stitcher()
stacker = Stacker(aligner)
input_images_aligner = []
input_images_stitcher = []
input_images_stacker = []

# transform to absolute paths
BASE_DIR = os.path.dirname(os.path.realpath(__file__))

# --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- ---

# init aligner

if args.align or args.transform:

    # expand all paths
Example #22
0
u0 = np.zeros((ny, nx, nF[0]))
v0 = np.zeros((ny, nx, nF[0]))
for i in range(nF[0].astype(int)):
    u0[:, :, i] = i / 5.
    v0[:, :, i] = 2 * i / 5.

#Initialize data
us = np.zeros((ny, nx, np.sum(nF)))
vs = np.zeros((ny, nx, np.sum(nF)))
us[:, :, 0:nF[0]] = u0
vs[:, :, 0:nF[0]] = v0

#Load in optic flow data
vidx = 1
#Load MFSF data
u1 = np.zeros((ny, nx, nF[vidx]))
v1 = np.zeros((ny, nx, nF[vidx]))
for i in range(nF[0].astype(int)):
    v1[:, :, i] = -i / 5.
#Make a Stitcher
thestitch = Stitcher(u1, v1)
self = thestitch
(u, v) = thestitch.run(u0, v0)

us[:, :, np.sum(nF[0:vidx]):np.sum(nF[0:vidx + 1])] = u
vs[:, :, np.sum(nF[0:vidx]):np.sum(nF[0:vidx + 1])] = v

#Save output matrix
#mdict = {'u':us, 'v':vs, 'parmsOF':params, 'info':info}
#savemat(args.fn_out, mdict)
Example #23
0
from stitcher import Stitcher
import argparse
import imutils
import cv2

ap = argparse.ArgumentParser()
ap.add_argument("-f", "--first", required=False,
                help="path to the first image")
ap.add_argument("-s", "--second", required=False, help="path to the second "
                                                       "image")
args = vars(ap.parse_args())

imageA = cv2.imread(args["first"] if args["first"] else
                    "../../../data/input/church_1.jpg")
imageB = cv2.imread(args["second"] if args["first"] else
                    "../../../data/input/church_2.jpg")

stitcher = Stitcher()
result, matches = stitcher.stitch([imageA, imageB], show_matches=True)

cv2.imwrite("../../../data/output/keypoints_matches.png", matches)
cv2.imwrite("../../../data/output/stitch_result.png", result)
Example #24
0
        except ValueError as e:
            return 0


# --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- ---

parser = argparse.ArgumentParser(description="Stack several image files to create digital long exposure photographies")
parser.add_argument("--align", action="store_true", help="run only the aligner, do not compress")
parser.add_argument("--transform", action="store_true", help="run only the aligner and transform, do not compress")
parser.add_argument("--stitch", action="store_true", help="stitch images for panoramic formats")
args = parser.parse_args()

# --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- ---

aligner = Aligner()
stitcher = Stitcher()
stacker = Stacker(aligner)
input_images_aligner = []
input_images_stitcher = []
input_images_stacker = []

# transform to absolute paths
BASE_DIR = os.path.dirname(os.path.realpath(__file__))

# --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- ---

# init aligner

if args.align or args.transform:

    # expand all paths
Example #25
0
  def process(self):
    '''
    Starts loading the next section.
    '''
    # do nothing while workers are not available
    if self._active_workers.full():
      return

    #
    # here we have at least 1 worker available
    #

    #
    # viewing has higher priority, so check if we have anything
    # in the viewing queue
    #
    if len(self._viewing_queue) != 0:

      for view in self._viewing_queue:

        # check if we have the tiles required for this view
        allLoaded = True

        for tile in view._tiles:
          if tile._status.isVirgin():
            # we need to load this tile
            tile._status.loading()
            self._loading_queue.append(tile)
            allLoaded = False
            print 'We need tile', tile
          elif tile._status.isLoading():
            # the tile is still loading
            allLoaded = False
            

        if allLoaded:
          #
          # we have all the tiles and
          # now we can stitch the view
          #
          self._viewing_queue.remove(view)
          view._status.loading()
          print 'Stitching', view

          # now it is time to calculate the bounding box for this view
          bbox = View.calculateBB(view._tiles, view._zoomlevel)
          # print bbox
          view._bbox = bbox # re-attach the bounding box (since something could have changed)

          # allocate shared mem for view
          memory = mp.RawArray(ctypes.c_ubyte, bbox[1]*bbox[3])
          view._memory = memory # we need to keep a reference
          view._imagedata = Stitcher.shmem_as_ndarray(memory)

          # start worker
          args = (self, view)
          worker = mp.Process(target=Stitcher.run, args=args)
          self._active_workers.put(1) # increase worker counter
          worker.start()


    #
    # loading has lower priority
    # check if we have anything in the loading queue
    #
    if len(self._loading_queue) != 0:
      tile = self._loading_queue.pop(0)

      #zoomlevels = [0, 1, 2, 3, 4, 5] # TODO dynamically

      # allocate shared mem for tile and for each zoom level
      for z in self._zoomlevels:
        divisor = 2**z
        tile_width = tile._bbox[1] / divisor
        tile_height = tile._bbox[3] / divisor # TODO maybe int?
        memory = mp.RawArray(ctypes.c_ubyte, tile_width*tile_height)
        imagedata = Loader.shmem_as_ndarray(memory)
        tile._levels.append(Level(memory, imagedata))

      # start worker
      args = (self, tile)
      worker = mp.Process(target=Loader.run, args=args)
      self._active_workers.put(1) # increase worker counter
      worker.start()
      return # jump out
Example #26
0
    def run(self):
        self._run = True

        # joining MC group
        self._sock.bind(('', self._mcast_port))  # may causes issues in windows
        self._sock.setsockopt(
            socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP,
            socket.inet_aton(self._mcast_grp) + socket.inet_aton(self._mcip))

        #        self._sock.setsockopt(socket.SOL_IP, socket.IP_MULTICAST_IF, socket.INADDR_ANY if self._mcip == '0.0.0.0' else socket.inet_aton(self._mcip))
        #        mreq = struct.pack('4sl', socket.inet_aton(self._mcast_grp), socket.INADDR_ANY if self._mcip == '0.0.0.0' else socket.inet_aton(self._mcip))

        self._logger.debug(
            "Receiver thread started for %s:%d (ssrc: %d) on %s" %
            (self._mcast_grp, self._mcast_port, self._ssrc, self._mcip))

        while self._run:
            try:
                data, addr = self._sock.recvfrom(1500)
                rtp_pkt = RTP()
                rtp_pkt.unpack(data)

                # drop inproper packages
                if rtp_pkt.version != 2:
                    self._logger.warning('invalid RTP format')
                    continue

                if int(rtp_pkt.ssrc) != self._ssrc:
                    self._logger.warning(
                        'Foregin RTP stream (ssrc=%d, but expecting %d)' %
                        (int(rtp_pkt.ssrc),
                         self._ssrc))  # TODO: implement received from...
                    continue

                if rtp_pkt.x == 1:
                    rtp_pkt = RTPEXT()
                    rtp_pkt.unpack(data)

                    if rtp_pkt.id == RTPMABRDATA.ID:
                        rtp_pkt = RTPMABRDATA()
                        rtp_pkt.unpack(data)
                    elif rtp_pkt.id == RTPMABRSTITCHER.ID:
                        rtp_pkt = RTPMABRSTITCHER()
                        rtp_pkt.unpack(data)
                    else:
                        self._logger.warning('Non MABR RTP packet (id=%02x)' %
                                             rtp_pkt.id)
                        continue
                else:
                    self._logger.warning(
                        'Non MABR RTP packet (RTP extension is missing)')
                    continue

                # store data
                if not self._memdb.set(
                        Slice.getmemcachedkey(rtp_pkt.ssrc, rtp_pkt.seq),
                        rtp_pkt.data):
                    self._logger.warning(
                        'ssrc: %s, seq: %d, cannot store RTP packet ' %
                        (rtp_pkt.ssrc, rtp_pkt.seq))
#                else:
#                    self._logger.debug('RTP packet stored: ssrc=%s, seq=%d' % (rtp_pkt.ssrc, rtp_pkt.seq))

# trigger stitcher
                if rtp_pkt.id == RTPMABRSTITCHER.ID:
                    Stitcher.stitch(rtp_pkt.ssrc, rtp_pkt.burstseqfirst,
                                    rtp_pkt.burstseqlast, rtp_pkt.chunknumber,
                                    rtp_pkt.checksum, self._logger)

            except socket.timeout:
                pass
            except Exception as e:
                self._logger.warning("Oops: %s" % e.message)
                self._logger.debug(traceback.format_exc())
Example #27
0
from stitcher import Stitcher

image_list = ["cam4.png", "cam8.png"]  #,"images/image_3.png"]

stitcher = Stitcher(image_list)
stitcher.process_images()
Example #28
0
from stitcher import Stitcher
import cv2

# 读取拼接图片
imageA = cv2.imread("../pic/left_01.png")
imageB = cv2.imread("../pic/right_01.png")

# 把图片拼接成全景图
stitcher = Stitcher()
(result, vis) = stitcher.stitch([imageA, imageB], showMatches=True)

# 显示所有图片
cv2.imshow("Image A", imageA)
cv2.imshow("Image B", imageB)
cv2.imshow("Keypoint Matches", vis)
cv2.imshow("Result", result)
cv2.waitKey(0)
Example #29
0
from stitcher import Stitcher

#l = cv2.imread('../images/image_5.png',0)
#r = cv2.imread('../images/image_6.png',0)
image_list = ["images/image_5.png","images/image_4.png","images/image_6.png"]#,"images/image_3.png"]

stitcher = Stitcher(image_list)
stitcher.process_images()


#matches = matcher.match_keypoints(l_kp,r_kp)
Example #30
0
class TopGG(commands.Cog):
    def __init__(self, bot):
        self.bot = bot
        self.sql_query = SQLQuery(initialize_connection())
        self.manager = Manager()
        self.stitcher = Stitcher()
        self.dbl_token = config('DBL_TOKEN')
        self.webhook_auth_token = config('ALICE_WEBHOOK_AUTH_TOKEN')
        self.dblpy = dbl.DBLClient(self.bot,
                                   self.dbl_token,
                                   autopost=True,
                                   webhook_path='/dblwebhook',
                                   webhook_auth=self.webhook_auth_token,
                                   webhook_port=environ.get("PORT", 8000))

    @commands.Cog.listener()
    async def on_dbl_vote(self, data):
        user_id = int(data['user'])
        user = await self.bot.fetch_user(user_id)
        thumbnail_data = self.stitcher.stitch_images(
            f'https://cdn.discordapp.com/avatars/{user.id}/{user.avatar}.png?size=1024',
            './static/images/medal.png')

        try:
            query_string_params = data['query'][1:].split('&')
            param_dict = {
                param.split('=')[0]: int(param.split('=')[1])
                for param in query_string_params
            }
        except Exception as e:
            param_dict = {}
            print(e, param_dict)

        try:
            if param_dict:
                guild_triggered_in = [
                    guild for guild in self.bot.guilds
                    if guild.id == param_dict.get('guild')
                ][0]
                channel_trigged_in = guild_triggered_in.get_channel(
                    param_dict.get('channel'))
                embed = self.manager.create_embed(
                    f'{user.name} has voted!',
                    'Thank you valued patron for supporting alice.'
                    ' Your contribution will not go in vain as I award you the highest prestige I can bestow.'
                    ' Your name will echo through the decades to come, enscribed with the flow of light on alloy whose origins stem from the creation of Earth.'
                    ' Now come hero, accept your award and be off to privilege our world with more of your good deeds.',
                    0xFFA500,
                    'attachment://user_awarded.png', ['Award'],
                    ['You got a one of a kind medallion!'],
                    footer=[
                        f'{user.name}  \u2022  {self.manager.current_time()}',
                        user.avatar_url
                    ])
                await channel_trigged_in.send(embed=embed,
                                              file=discord.File(
                                                  thumbnail_data,
                                                  'user_awarded.png'))

                self.sql_query.update_by_increment('guilds', ['vote_count'],
                                                   ['guild_id'],
                                                   [[param_dict.get('guild')]])

            else:
                embed = self.manager.create_embed(
                    'You voted!',
                    'Thank you valued patron for supporting alice.'
                    ' Your contribution will not go in vain as I award you the highest prestige I can bestow.'
                    ' Your name will echo through the decades to come, enscribed with the flow of light on alloy whose origins stem from the creation of Earth.'
                    ' Now come hero, accept your award and be off to privilege our world with more of your good deeds.',
                    0xFFA500,
                    'attachment://user_awarded.png', ['Award'],
                    ['You got a one of a kind medallion!'],
                    footer=[
                        f'{user.name}  \u2022  {self.manager.current_time()}',
                        user.avatar_url
                    ])
                await user.send(embed=embed,
                                file=discord.File(thumbnail_data,
                                                  'user_awarded.png'))

        except Exception as e:
            print('dbl_vote ' + str(e))

    @commands.Cog.listener()
    async def on_dbl_test(self, data):
        print(data)
Example #31
0
class Client(object):
    def __init__(self, torrent):
        self.torrent = torrent
        self.torrent_state = 'random'
        self.reactor = Reactor()
        self.reactor_activated = False
        self.peer_id = '-TZ-0000-00000000000'
        self.peers = [] 
        self.decode_torrent_and_setup_pieces()
        self.handshake = self.build_handshake()
        self.setup_tracker()
        self.stitcher = Stitcher(self)
        self.setup_peers()

    def decode_torrent_and_setup_pieces(self):
        f = open(self.torrent, 'r')
        metainfo = B.bdecode(f.read())
        data = metainfo['info']  # Un-bencoded dictionary
        self.info_hash = H.sha1(B.bencode(data)).digest()
        self.announce_url = self.find_http_announce_url(metainfo)
        #self.announce_url = 'http://tracker.ccc.de:6969/announce'
        self.file_name = data['name'] # Dir name if multi, otherwise file name
        self.piece_length = data['piece length']
        if 'files' in data: # Multifile torrent
            self.setup_multi_file_info(data)
        else:
            self.setup_single_file_info(data)
        self.setup_download_directory()
        self.check_if_dload_file_exists()
        self.setup_pieces(self.piece_length, data['pieces'])

    def find_http_announce_url(self, metainfo):
        print metainfo.keys()
#        print metainfo['announce-list']

        if self.is_http_url(metainfo['announce']):
            return metainfo['announce']
        elif 'announce-list' in metainfo.keys():
            for url in metainfo['announce-list']:
                url = url[0]
                if self.is_http_url(url):
                    print url
                    return url
        raise SystemExit('UDP announce urls are not supported. Currently only HTTP is supported.')

    def is_http_url(self, url):
        return 'http://' in url

    def setup_multi_file_info(self, metainfo):
        self.is_multi_file = True
        self.files = metainfo['files'] # dictionary of file lengths + paths
        self.file_length = sum([file_dict['length'] for file_dict in self.files]) # file_length = total # bytes to dload

    def setup_single_file_info(self, metainfo):
        self.is_multi_file = False
        self.file_length = metainfo['length']

    def build_handshake(self):
        logging.info('Building handshake')
        pstr = 'BitTorrent protocol'
        handshake = struct.pack('B' + str(len(pstr)) + 's8x20s20s',
                                # 8x => reserved null bytes
                                len(pstr),
                                pstr,
                                self.info_hash,
                                self.peer_id
                                )
        assert handshake != None
        assert len(handshake) == 49 + len(pstr)
        logging.info('Handshake constructed.')
        return handshake

    def setup_tracker(self):
        self.tracker = Tracker(self, self.announce_url)

    def setup_peers(self):
        peer_ips = self.tracker.send_request_and_parse_response()
        self.connect_to_peers(peer_ips)

    def connect_to_peers(self, peer_tuples):
        peers = [Peer(ip, port, self) for ip, port in peer_tuples]
        logging.debug('Attempting to connect to peers %s', peer_tuples)
        for peer in peers:
            try:
                if peer.ip == self.get_self_ip():
                    logging.info('Skipping peer; cannot connect to self')
                    continue
                peer.connect()
                peer_handshake = peer.send_and_receive_handshake(self.handshake)
                logging.debug('Handshake returned.')
                if peer.verify_handshake(peer_handshake, self.info_hash):
                    logging.debug('Handshake verified. Adding peer to peer list')
                    self.add_peer(peer)
                    if not self.reactor_activated:
                        self.activate_reactor()
                        self.reactor_activated = True
            except IOError as e:
                logging.warning('Error in construct_peers! %s', e)
        self.manage_requests(5)

    def get_self_ip(self):
        # http://stackoverflow.com/questions/166506/finding-local-ip-addresses-using-pythons-stdlib/166520#166520
        s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
        s.connect(('8.8.8.8', 80))
        ip = s.getsockname()[0]
        s.close()
        return ip

    def add_peer(self, peer):
        logging.info('Adding peer %s to peer list (in add_peer)', peer)
        self.peers.append(peer)
        self.reactor.add_peer_socket(peer)

    def activate_reactor(self):
        logging.debug('Activating reactor.')
        self.reactor.get_data()

    def process_raw_hash_list(self, hash_list, size):
        tmp_hash = ''
        piece_hashes = []
        for char in hash_list:
            if len(tmp_hash) < size:
                tmp_hash = tmp_hash + char
            else:
                piece_hashes.append(tmp_hash)
                tmp_hash = char
        piece_hashes.append(tmp_hash)
        return piece_hashes

    def setup_pieces(self, length, hash_bytes):
        hash_list = self.process_raw_hash_list(hash_bytes, 20)
        logging.info('setting up pieces for file length, %s',  length)
        pieces = []
        self.num_pieces = len(hash_list)
        logging.info('dividing up file into %s pieces', self.num_pieces)
        self.bitfield = BitArray(self.num_pieces)
        last_piece_length = self.file_length - (self.num_pieces - 1) * length
        for i in range(self.num_pieces):
            if i == self.num_pieces - 1:
                length = last_piece_length
            pieces.append(Piece(self, i, length, hash_list[i], self.dload_dir))
        self.pieces = pieces
        self.piece_queue = PieceQueue(pieces)

    def setup_download_directory(self):
        dir_name = self.torrent
        if dir_name.endswith('.torrent'):
            dir_name = dir_name[:-8]
        self.dload_dir = os.path.join(os.path.abspath(os.curdir), dir_name)
        try:
            os.makedirs(self.dload_dir)
        except OSError:
            if not os.path.isdir(self.dload_dir):
                raise SystemExit('Cannot create directory to download torrent files into. Please check if a file named ' + dir_name + ' exists') 
                # raise OSError('Cannot create directory to download torrent files to.')

    def check_if_dload_file_exists(self):
        file_path = os.path.join(self.dload_dir, self.file_name)
        if os.path.exists(file_path):
            raise SystemExit('This file has already been downloaded.')
            # Do something to cancel the rest of the setup

    def add_piece_to_queue(self, piece):
        self.piece_queue.put(piece)

    def add_piece_to_bitfield(self, index):
        if not self.bitfield[index]:
            self.bitfield.invert(index)
            self.manage_requests()
        else:
            logging.warning('Should never get save same piece more than once!')

    def add_peer_to_piece_peer_list(self, piece_index, peer):
        # print 'Adding piece', piece_index, 'to peer', peer
        self.pieces[piece_index].add_peer_to_peer_list(peer)

    def manage_requests(self, num_pieces=1):
        logging.info('Sending more piece requests')
        logging.info('Piece queue has %s pieces', self.piece_queue.length())
        if not self.piece_queue.empty():
            self.manage_piece_queue_state();
            for i in xrange(num_pieces):
                self.request_next_piece();
            logging.info('Cleaning up piece queue')
        else:
            # Count outstanding requests to decide when to go into endgame
            self.torrent_state = 'endgame'
            self.start_endgame()

    def start_endgame(self):
        self.blasted_requests = []
        for i in xrange(ENDGAME_MAX_BLASTS):
            self.send_endgame_request()
            
    def send_endgame_request(self):
        block_info = self.select_outstanding_request()
        if block_info:
            self.blasted_requests.append(block_info)
            self.pieces(block_info[0]).request_block_endgame(block_info)

    def select_outstanding_request(self):
        # TODO: Use filter instead of picking at random
        peers_with_requests = filter(lambda peer: len(peer.outstanding_requests) > 0, self.peers)
        if len(peers_with_requests):
            peer = random.choice(peers_with_requests)
            block_info = random.choice(peer.outstanding_requests)
            return block_info

    def manage_piece_queue_state(self):
        # This should probably only get called occasionally
        logging.debug('Have received %s pieces, need %s more', self.bitfield.count(1), self.bitfield.count(0))
        if self.bitfield.count(1) > PIECE_THRESHOLD and self.piece_queue.length() > PIECE_THRESHOLD:
            self.piece_queue.update_piece_order()
            self.torrent_state = 'rarest_first'

    # DISPATCHES TO PIECE
    def request_block(self, block_info):
        piece_index = block_info[0]
        self.pieces[piece_index].request_block(block_info)

    def request_next_piece(self):
        next_piece = self.piece_queue.get_next_piece(self.torrent_state)
        logging.info('Requesting piece %s', next_piece)
        if next_piece:
            try:
                next_piece.request_all_blocks()
            except IndexError as e:
                self.piece_queue.put(next_piece)
                logging.error(e)

    def add_block(self, block_info, block):
        (piece_index, begin, block_length) = block_info
        logging.info('Writing block of length %s at index %s for piece %s',
                block_length, begin, piece_index)
        piece = self.pieces[piece_index]
        logging.info('Piece has index %s', piece.index)
        piece.add_block(begin, block)
        self.tracker.update_download_stats(block_length)
        if self.torrent_state == 'endgame' and block_info in self.blasted_requests:
            piece = self.pieces[block_info[0]]
            piece.cancel_block(block_info, self)
            if self.bitfield.count(0) > 0:
                self.send_endgame_request()
        if self.num_pieces - self.bitfield.count(1) == 0:
            self.finalize_download()

    def finalize_download(self):
        logging.info('Finalizing download')
        if not self.tracker.is_download_complete():
            raise SystemExit('Download didnt complete. Shutting down.')
        self.stitch_files()
        self.tracker.send_completed_msg_to_tracker_server()
        logging.info('Shutting down connection with peers')
        for peer in self.peers:
            peer.close()
        print 'Quitting client'
        logging.info('Download completed. Quitting client.')
        sys.exit()

    def stitch_files(self):
        print 'stitching...'
        logging.info('Wrote all pieces, stitching them together')
        self.stitcher.stitch()
        logging.info('Stitching completed.')

    def finalize_piece(self, piece):
        if piece.check_info_hash():
            logging.debug('Yay! Correct info hash!')
            self.add_piece_to_bitfield(piece_index)
        else:
            logging.debug('Incorrect infohash, starting over with piece %s', piece_index)
            piece.reset()