Exemple #1
0
def hash_aa(unique_id: Union[int, float, str],
            log: logging.Logger = None) -> Optional[str]:
    """Return hashed string code for the double unique ids from aa - zz.

  The unique id is fetched from the database and should range from 
  1 to 676 values. The hashing is done purely on the ideology of
  python dictionaries.
  This function is suitable for hashing values in range of 00-99.

  Args:
    unique_id: Integer, float or string value from database.

  Returns:
    Hashed string from h_676 dictionary.

  Notes:
    Values greater than 676 will return None.
  """
    log = _log(__file__) if log is None else log
    try:
        if unique_id > 676:
            raise exceptions.HashValueLimitExceedError
        if unique_id == 0:
            raise exceptions.HashValueHasZeroError
    except exceptions.HashValueLimitExceedError as error:
        log.error(f'HashValueLimitExceedError: {error}')
    except exceptions.HashValueHasZeroError as error:
        log.error(f'HashValueHasZeroError: {error}')
    finally:
        return h_676.get(int(unique_id), None)
Exemple #2
0
def hash_aaa(unique_id: Union[int, float, str],
             log: logging.Logger = None) -> Optional[str]:
    """Return hashed string code for single unique id from aaa - zzz.

  The unique id is fetched from the database and should range from 
  1 to 17576 values. Similar to `hash_aa()`, the hashing is done
  purely on the ideology of python dictionaries.
  This function is ideal for covering almost all the possible ranges
  for the customers.

  Args:
    unique_id: Integer, float or string value from database.

  Returns:
    Hashed string from h_17k dictionary.

  Notes:
    Values greater than 17576 will return None.
  """
    log = _log(__file__) if log is None else log
    try:
        if unique_id > 17576:
            raise exceptions.HashValueLimitExceedError
        if unique_id == 0:
            raise exceptions.HashValueHasZeroError
    except exceptions.HashValueLimitExceedError as error:
        log.error(f'HashValueLimitExceedError: {error}')
    except exceptions.HashValueHasZeroError as error:
        log.error(f'HashValueHasZeroError: {error}')
    finally:
        return h_17k.get(int(unique_id), None)
def trigger_utc_capture(bucket_name: str,
                        order_name: str,
                        start_time: str,
                        end_time: str,
                        camera_timezone: str,
                        camera_address: str,
                        camera_username: str = 'xames3',
                        camera_password: str = 'iamironman',
                        camera_port: Union[int, str] = 554,
                        camera_timeout: Union[float, int] = 30.0,
                        timestamp_format: str = '%H:%M:%S',
                        log: logging.Logger = None) -> str:
  """Starts video recording as per the triggering point."""
  log = _log(__file__) if log is None else log
  run_date = datetime.now().strftime('%Y-%m-%d')
  _start_time = f'{run_date} {start_time}'
  _start_time = datetime_to_utc(_start_time,
                                camera_timezone,
                                '%Y-%m-%d %H:%M:%S')
  log.info('Video processing engine is scheduled to start '
           f'recording at {_start_time}.')
  while True:
    if str(now()) == str(_start_time):
      log.info('Video processing engine has started recording.')
      recorded_file =  start_live_recording(bucket_name, order_name, start_time,
                                            end_time, camera_address,
                                            camera_username, camera_password,
                                            camera_port, camera_timeout,
                                            timestamp_format, log)
      log.info('Video processing engine has stopped recording.')
      if recorded_file is None:
        return 'RecordingError'
      else:
        return recorded_file
    time.sleep(1.0)
Exemple #4
0
def unhash_timestamp(hashed_timestamp: str,
                     timestamp_format: str = '%m%d%y%H%M%S',
                     unix_time: bool = False,
                     log: logging.Logger = None) -> Union[datetime, float]:
    """Returns unhashed timestamp value.

  Returns the unhashed timestamp as per requirement.

  Args:
    hashed_timestamp: Hashed timestamp to unhash.
    timestamp_format: The format of hashed timestamp.
    unix_time: Boolean (default: False) value if unix time to be used.

  Returns:
    Datetime object or a Unix time (float) value of the hashed time.
  """
    log = _log(__file__) if log is None else log
    temp = hashed_timestamp.replace(hashed_timestamp[0],
                                    unhash_a(hashed_timestamp[0], log))
    temp = temp.replace(temp[5],
                        str(int(unhash_a(hashed_timestamp[5], log)) - 1))
    if unix_time:
        return time.mktime(
            datetime.strptime(temp, timestamp_format).timetuple())
    else:
        return datetime.strptime(temp, timestamp_format)
Exemple #5
0
def order_name(store_id: Union[int, float, str],
               area_code: str,
               camera_id: Union[int, float, str],
               timestamp: Optional[datetime] = None,
               log: logging.Logger = None) -> str:
    """Generate an unique order name.

  Generate an unique string based on order details.

  Args:
    store_id: Store Id from store_id table from 1 - 99999.
    area_code: Area code from area_id table (p -> Parking lot).
    camera_id: Camera Id from camera_id table from 1 - 99.
    timestamp: Current timestamp (default: None).

  Returns:
    Unique string based on the order details.

  Raises:
    TypeError: If any positional arguments are skipped.
  """
    log = _log(__file__) if log is None else log
    try:
        if store_id == 0 or camera_id == 0:
            raise exceptions.OrderNameZeroError
    except exceptions.OrderNameZeroError as error:
        log.error(f'OrderNameZeroError: {error}')
        return 'ordererror'
    else:
        return '{:0>5}{}{:0>2}{}'.format(int(store_id), area_code,
                                         int(camera_id),
                                         hash_timestamp(timestamp, log))
Exemple #6
0
def hash_a(unique_id: Union[int, float, str],
           log: logging.Logger = None) -> Optional[str]:
    """Return hashed string code for single unique id from a - z.

  The unique id is fetched from the database and should range from 
  1 to 26 values.
  This function is ideal for hashing hours & months in a timestamp.

  Args:
    unique_id: Integer, float or string value from database.

  Returns:
    Hashed string from h_26 dictionary.

  Notes:
    Values greater than 26 will return None.
  """
    log = _log(__file__) if log is None else log
    try:
        if unique_id > 26:
            raise exceptions.HashValueLimitExceedError
        if unique_id == 0:
            raise exceptions.HashValueHasZeroError
    except exceptions.HashValueLimitExceedError as error:
        log.error(f'HashValueLimitExceedError: {error}')
    except exceptions.HashValueHasZeroError as error:
        log.error(f'HashValueHasZeroError: {error}')
    finally:
        return h_26.get(int(unique_id), None)
Exemple #7
0
def bucket_name(country_code: str,
                customer_id: Union[int, float, str],
                contract_id: Union[int, float, str],
                order_id: Union[int, float, str],
                log: logging.Logger = None) -> str:
    """Generate an unique bucket name.

  The generated name represents the hierarchy of the stored video.

  Args:
    country_code: 2 letter country code (eg: India -> IN).
    customer_id: Customer Id from customer_id table from 1 - 9999.
    contract_id: Contract Id from contract_id table from 1 - 99.
    order_id: Order Id from order_id table from 1 - 99.

  Returns:
    Unique string name for S3 bucket.

  Raises:
    TypeError: If any positional arguments are skipped.
  """
    log = _log(__file__) if log is None else log
    try:
        if customer_id == 0 or contract_id == 0 or order_id == 0:
            raise exceptions.BucketNameZeroError
    except exceptions.BucketNameZeroError as error:
        log.error(f'BucketNameError: {error}')
        return 'bucketerror'
    else:
        return '{}{:0>4}{:0>2}{:0>2}'.format(hash_country_code(country_code),
                                             int(customer_id),
                                             int(contract_id), int(order_id))
Exemple #8
0
def unhash_country_code(hashed_code: str,
                        log: logging.Logger = None) -> Optional[str]:
    """Return unhashed country code."""
    log = _log(__file__) if log is None else log
    try:
        return dict(map(reversed, h_country.items()))[hashed_code]
    except (KeyError, ValueError):
        log.error('KeyError or ValueError was raised.')
        return None
def start_live_recording(bucket_name: str,
                         order_name: str,
                         start_time: str,
                         end_time: str,
                         camera_address: str,
                         camera_username: str = 'xames3',
                         camera_password: str = 'iamironman',
                         camera_port: Union[int, str] = 554,
                         camera_timeout: Union[float, int] = 30.0,
                         timestamp_format: str = '%H:%M:%S',
                         log: logging.Logger = None) -> Optional[str]:
  """Saves videos based on time duration."""
  log = _log(__file__) if log is None else log
  run_date = datetime.now().strftime('%Y-%m-%d')
  start_time, end_time = f'{run_date} {start_time}', f'{run_date} {end_time}'
  duration = calculate_duration(start_time, end_time, timestamp_format, True)
  force_close = datetime.strptime(
    end_time, '%Y-%m-%d %H:%M:%S').replace(tzinfo=timezone.utc).timestamp()
  vid_type = video_type(True, True, True)
  temp_path = os.path.join(live,
                           f'{bucket_name}{order_name}_{timestamp_dirname()}')
  if not os.path.isdir(temp_path):
    os.mkdir(temp_path)
  temp_file = os.path.join(temp_path,
                           f'{bucket_name}{order_name}{vid_type}.mp4')
  url = configure_camera_url(camera_address, camera_username,
                             camera_password, int(camera_port))
  slept_duration, idx = 0, 1
  if duration != 0:
    try:
      while True:
        if camera_live(camera_address, camera_port, camera_timeout, log):
          file = filename(temp_file, idx)
          log.info('Recording started for selected camera.')
          os.system(ffmpeg_str(url, file, duration, camera_timeout))
          stop_utc = now().replace(tzinfo=timezone.utc).timestamp()
          stop_secs = now().second
          _old_file = file_size(file)
          old_duration = stop_secs if _old_file == '300.0 bytes' else drn(file)
          duration = duration - old_duration - slept_duration
          slept_duration = 0
          idx += 1
          if (force_close <= stop_utc) or (duration <= 0):
            output = concate_videos(temp_path, delete_old_files=True)
            if output:
              return output
        else:
          log.warning('Unable to record because of poor network connectivity.')
          slept_duration += camera_timeout
          log.warning('Compensating lost time & attempting after 30 secs.')
          time.sleep(camera_timeout)
    except Exception as error:
      log.critical(f'Something went wrong because of {error}')
Exemple #10
0
def check_internet(timeout: Union[float, int] = 10.0,
                   log: logging.Logger = None) -> bool:
    """Check the internet connectivity."""
    # You can find the reference code here:
    # https://gist.github.com/yasinkuyu/aa505c1f4bbb4016281d7167b8fa2fc2
    log = _log(__file__) if log is None else log
    try:
        socket.create_connection((dev.PING_URL, dev.PING_PORT),
                                 timeout=timeout)
        log.info('Internet connection available.')
        return True
    except OSError:
        pass
    log.warning('Internet connection unavailable.')
    return False
def camera_live(camera_address: str,
                camera_port: Union[int, str] = 554,
                timeout: Union[float, int, str] = 10.0,
                log: logging.Logger = None) -> bool:
    """Check if any camera connectivity is available."""
    # You can find the reference code here:
    # https://gist.github.com/yasinkuyu/aa505c1f4bbb4016281d7167b8fa2fc2
    log = _log(__file__) if log is None else log
    try:
        timeout = float(timeout)
        camera_port = int(camera_port)
        socket.create_connection((camera_address, camera_port),
                                 timeout=timeout)
        log.info('Camera connected to the network.')
        return True
    except OSError:
        pass
    log.warning('Camera not connected to any network.')
    return False
Exemple #12
0
def hash_timestamp(now: datetime = None, log: logging.Logger = None) -> str:
    """Return converted timestamp.

  Generate 'hashed' timestamp for provided instance in 'MMDDYYHHmmSS'.

  Args:
    now: Current timestamp (default: None).

  Returns:
    Hashed timestamp in MMDDYYHHmmSS.
  """
    log = _log(__file__) if log is None else log
    if now is None:
        now = datetime.now().replace(microsecond=0)
    return '{}{:0>2}{:0>2}{}{:0>2}{:0>2}'.format(hash_a(now.month, log),
                                                 str(now.day),
                                                 str(now.year)[2:],
                                                 hash_a(now.hour + 1, log),
                                                 str(now.minute),
                                                 str(now.second))
Exemple #13
0
def unhash_aaa(value: str, log: logging.Logger = None) -> Optional[str]:
    """Return unhashed number from range 1 - 17576.

  Similar to unhash_a(), this function converts the `hashed string`
  value back to it's numeric form.

  Args:
    value: String to be unhashed.

  Returns:
    Unhashed number.

  Raises:
    KeyError: If an invalid value is passed for unhashing.
    ValueError: If the value to be unhashed is greater than the range.
  """
    log = _log(__file__) if log is None else log
    try:
        return str(dict(map(reversed, h_17k.items()))[value])
    except (KeyError, ValueError):
        log.error('KeyError or ValueError was raised.')
        return None
Exemple #14
0
def hash_area_code(area: str, log: logging.Logger = None) -> Optional[str]:
    """Return hashed string code.

  Args:
    area: Area to be hashed.

  Returns:
    Character representing the area.

  Notes:
    Refer documentation for the area code hashes.

  Raises:
    KeyError: If the key is not found.
    ValueError: If the value is not found.
  """
    log = _log(__file__) if log is None else log
    try:
        return dict(map(reversed, h_area.items()))[area]
    except (KeyError, ValueError):
        log.error('KeyError or ValueError was raised.')
        return None
def trigger_live_capture(bucket_name: str,
                         order_name: str,
                         start_time: str,
                         end_time: str,
                         camera_address: str,
                         camera_username: str = 'xames3',
                         camera_password: str = 'iamironman',
                         camera_port: Union[int, str] = 554,
                         camera_timeout: Union[float, int] = 30.0,
                         timestamp_format: str = '%H:%M:%S',
                         log: logging.Logger = None) -> Optional[str]:
  """Starts video recording as per the triggering point."""
  log = _log(__file__) if log is None else log
  run_date = datetime.now().strftime('%Y-%m-%d')
  _start_time = f'{run_date} {start_time}'
  while True:
    if str(now()) >= _start_time:
      return start_live_recording(bucket_name, order_name, start_time,
                                  end_time, camera_address, camera_username,
                                  camera_password, camera_port, camera_timeout,
                                  timestamp_format, log)
    time.sleep(1.0)
def redact_faces(file: str,
                 use_ml_model: bool = True,
                 smooth_blur: bool = True,
                 resize: bool = True,
                 resize_width: int = 640,
                 debug_mode: bool = True,
                 log: logging.Logger = None) -> Optional[str]:
    """Apply face redaction in video using CaffeModel."""
    log = _log(__file__) if log is None else log

    x0, y0, x1, y1 = 0, 0, 0, 0
    boxes, temp_csv_entries = [], []
    face_count = {}

    directory = os.path.join(os.path.dirname(file), f'{Path(file).stem}')

    if not os.path.isdir(directory):
        os.mkdir(directory)

    temp_file = os.path.join(directory, f'{Path(file).stem}_redact.mp4')

    if debug_mode:
        log.info('Debug mode - Enabled.')

    log.info(f'Redacting faces from "{os.path.basename(file)}".')

    try:
        stream = cv2.VideoCapture(file)
        fps = stream.get(cv2.CAP_PROP_FPS)
        width, height = (int(stream.get(cv2.CAP_PROP_FRAME_WIDTH)),
                         int(stream.get(cv2.CAP_PROP_FRAME_HEIGHT)))

        if resize:
            width, height = resize_width, int(height *
                                              (resize_width / float(width)))

        save = cv2.VideoWriter(filename(temp_file, 1),
                               cv2.VideoWriter_fourcc(*'mp4v'), fps,
                               (width, height))

        while True:
            valid_frame, frame = stream.read()

            if not valid_frame:
                break

            if frame is None:
                break

            if resize:
                frame = rescale(frame, resize_width)

            height, width = frame.shape[:2]

            if use_ml_model:
                rgb = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
                faces = face_detector.detect_faces(rgb)

                for face_idx in faces:
                    # Considering detections which have confidence score higher than the
                    # set threshold.
                    if face_idx['confidence'] > 0.75:
                        x0, y0, x1, y1 = face_idx['box']
                        x0, y0 = abs(x0), abs(y0)
                        x1, y1 = x0 + x1, y0 + y1

                        face = frame[y0:y1, x0:x1]

                        if debug_mode:
                            draw_bounding_box(frame, (x0, y0), (x1, y1),
                                              color.red)
                        try:
                            if smooth_blur:
                                frame[y0:y1, x0:x1] = cv2.GaussianBlur(
                                    frame[y0:y1, x0:x1], (21, 21), 0)
                            else:
                                frame[y0:y1, x0:x1] = pixelate(face)
                        except Exception:
                            pass

                    boxes.append([x1, y1])
                    face_occurence = s2d(
                        int(stream.get(cv2.CAP_PROP_POS_MSEC) / 1000))

                    if face_occurence not in face_count.keys():
                        face_count[face_occurence] = []

                    face_count[face_occurence].append(len(boxes))
            else:
                face_cascade = cv2.CascadeClassifier(frontal_haar)
                gray_frame = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
                faces = face_cascade.detectMultiScale(gray_frame, 1.3, 5)

                for (x0, y0, x1, y1) in faces:
                    if debug_mode:
                        draw_bounding_box(frame, (x0, y0), (x0 + x1, y0 + y1),
                                          color.red)
                    try:
                        if smooth_blur:
                            frame[y0:(y0 + y1),
                                  x0:(x0 + x1)] = cv2.GaussianBlur(
                                      frame[y0:(y0 + y1), x0:(x0 + x1)],
                                      (21, 21), 0)
                        else:
                            frame[y0:(y0 + y1),
                                  x0:(x0 + x1)] = pixelate(frame[y0:(y0 + y1),
                                                                 x0:(x0 + x1)])
                    except Exception:
                        pass
                    boxes.append([x1, y1])
                    face_occurence = s2d(
                        int(stream.get(cv2.CAP_PROP_POS_MSEC) / 1000))

                    if face_occurence not in face_count.keys():
                        face_count[face_occurence] = []

                    face_count[face_occurence].append(len(boxes))

            boxes = []
            save.write(frame)

            if debug_mode:
                cv2.imshow('Video Processing Engine - Redaction', frame)

            if cv2.waitKey(1) & 0xFF == int(27):
                break

        stream.release()
        save.release()
        cv2.destroyAllWindows()

        with open(os.path.join(directory, f'{Path(file).stem}.csv'),
                  'a',
                  encoding=dev.DEF_CHARSET) as csv_file:
            log.info('Logging detections into a CSV file.')
            _file = csv.writer(csv_file, quoting=csv.QUOTE_MINIMAL)
            _file.writerow(['Max no. of detections per second', 'Time frame'])
            temp_csv_entries = [(max(v), k) for k, v in face_count.items()]
            _file.writerows(temp_csv_entries)

        log.info('Applying H264 encoding for bypassing browser issues.')
        os.system(
            f'ffmpeg -loglevel error -y -i {filename(temp_file, 1)} -vcodec '
            f'libx264 {temp_file}')

        return temp_file
    except Exception as error:
        log.critical(f'Something went wrong because of {error}')
Exemple #17
0
def live(bucket_name: str,
         order_name: str,
         run_date: str,
         start_time: str,
         end_time: str,
         camera_address: str,
         camera_username: str = 'xames3',
         camera_password: str = 'iamironman',
         camera_port: Union[int, str] = 554,
         camera_timeout: Union[float, int, str] = 30.0,
         timestamp_format: str = '%H:%M:%S',
         log: logging.Logger = None) -> Optional[str]:
    """Record live videos based on time duration using FFMPEG.

  Args:
    bucket_name: S3 bucket name.
    order_name: Order name.
    run_date: Date when to record the video.
    start_time: Time when to start recording the video.
    end_time: Time when to stop recording the video.
    camera_address: Camera's IP address.
    camera_username: Camera username.
    camera_password: Camera password.
    camera_port: Camera port number.
    camera_timeout: Maximum time to wait until disconnection occurs.
    timestamp_format: Timestamp for checking the recording start time.
    log: Logger object.
  """
    log = _log(__file__) if log is None else log

    camera_port = int(camera_port)
    camera_timeout = float(camera_timeout)

    start_time, end_time = f'{run_date} {start_time}', f'{run_date} {end_time}'
    duration = calculate_duration(start_time, end_time, timestamp_format, True)
    force_close = datetime.strptime(end_time, '%Y-%m-%d %H:%M:%S')
    force_close = force_close.replace(tzinfo=timezone.utc).timestamp()

    vid_type = video_type(True, True, True)
    temp = os.path.join(_lr, f'{bucket_name}{order_name}')

    if not os.path.isdir(temp):
        os.mkdir(temp)
    temp_file = os.path.join(temp, f'{bucket_name}{order_name}{vid_type}.mp4')

    url = configure_camera_url(camera_address, camera_username,
                               camera_password, camera_port)
    slept_duration, idx = 0, 1

    if duration != 0:
        try:
            while True:
                if camera_live(camera_address, camera_port, camera_timeout,
                               log):
                    file = filename(temp_file, idx)
                    log.info('Recording started for selected camera.')
                    os.system(ffmpeg_str(url, file, duration, camera_timeout))

                    stop_utc = now().replace(tzinfo=timezone.utc).timestamp()
                    stop_secs = now().second

                    _old_file = file_size(file)
                    old_duration = stop_secs if _old_file == '300.0 bytes' else drn(
                        file)
                    duration = duration - old_duration - slept_duration

                    slept_duration = 0
                    idx += 1
                    if (force_close <= stop_utc) or (duration <= 0):
                        output = concate_videos(temp, delete_old_files=True)
                        if output:
                            return output
                else:
                    log.warning(
                        'Unable to record because of poor network connectivity.'
                    )
                    slept_duration += camera_timeout
                    log.warning(
                        'Compensating lost time & attempting after 30 secs.')
                    time.sleep(camera_timeout)
        except Exception as error:
            log.critical(f'Something went wrong because of {error}')
Exemple #18
0
def track_motion(file: str,
                 precision: int = 1500,
                 resize: bool = True,
                 resize_width: int = 640,
                 debug_mode: bool = True,
                 log: logging.Logger = None) -> Optional[str]:
    """Track motion in the video using Background Subtraction method."""
    log = _log(__file__) if log is None else log
    kcw = KeyClipWriter(bufSize=32)
    consec_frames, x0, y0, x1, y1 = 0, 0, 0, 0, 0
    boxes, temp_csv_entries = [], []
    directory = os.path.join(os.path.dirname(file), f'{Path(file).stem}')
    if not os.path.isdir(directory):
        os.mkdir(directory)
    temp_file = os.path.join(directory, f'{Path(file).stem}_motion.mp4')
    idx = 1
    if debug_mode:
        log.info('Debug mode - Enabled.')
    log.info(f'Analyzing motion for "{os.path.basename(file)}".')
    try:
        stream = cv2.VideoCapture(file)
        fps = stream.get(cv2.CAP_PROP_FPS)
        first_frame = None
        while True:
            valid_frame, frame = stream.read()
            if not valid_frame:
                break
            if frame is None:
                break
            if resize:
                frame = rescale(frame, resize_width)
            update_frame = True
            gray_frame = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
            gray_frame = cv2.GaussianBlur(gray_frame, (21, 21), 0)
            if first_frame is None:
                first_frame = gray_frame
                continue
            frame_delta = cv2.absdiff(first_frame, gray_frame)
            threshold = cv2.threshold(frame_delta, 25, 255,
                                      cv2.THRESH_BINARY)[1]
            threshold = cv2.dilate(threshold, None, iterations=2)
            contours = cv2.findContours(threshold.copy(), cv2.RETR_EXTERNAL,
                                        cv2.CHAIN_APPROX_SIMPLE)
            contours = imutils.grab_contours(contours)
            for contour in contours:
                if cv2.contourArea(contour) < precision:
                    continue
                if debug_mode:
                    (x0, y0, x1, y1) = cv2.boundingRect(contour)
                    draw_bounding_box(frame, (x0, y0), (x0 + x1, y0 + y1))
                consec_frames = 0
                if not kcw.recording:
                    kcw.start(filename(temp_file, idx),
                              cv2.VideoWriter_fourcc(*'mp4v'), fps)
                    idx += 1
                boxes.append([x1, y1])
                status = motion_meta(len(boxes),
                                     stream.get(cv2.CAP_PROP_POS_MSEC))
                # log.info(status)
                temp_csv_entries.append(status)
            boxes = []
            if update_frame:
                consec_frames += 1
            kcw.update(frame)
            if kcw.recording and consec_frames == 32:
                log.info(
                    'Extracting buffered portion of video with detected motion.'
                )
                kcw.finish()
            if debug_mode:
                cv2.imshow('Video Processing Engine - Motion Detection', frame)
            if cv2.waitKey(1) & 0xFF == int(27):
                disconnect(stream)
        if kcw.recording:
            kcw.finish()
        if len(os.listdir(directory)) < 1:
            return file
        concate_temp = concate_videos(directory, delete_old_files=True)
        with open(os.path.join(directory, f'{Path(file).stem}.csv'),
                  'a',
                  encoding=dev.DEF_CHARSET) as csv_file:
            log.info('Logging detections into a CSV file.')
            _file = csv.writer(csv_file,
                               delimiter='\n',
                               quoting=csv.QUOTE_MINIMAL)
            _file.writerow(temp_csv_entries)
        if concate_temp:
            if os.path.isfile(concate_temp):
                log.info(
                    'Applying H264 encoding for bypassing browser issues.')
                os.system(
                    f'ffmpeg -loglevel error -y -i {concate_temp} -vcodec '
                    f'libx264 {temp_file}')
                log.info('Cleaning up archived files.')
                os.remove(concate_temp)
                return temp_file
    except Exception as error:
        log.critical(f'Something went wrong because of {error}')
Exemple #19
0
import time
from datetime import datetime, timezone
from typing import Union

import pika

from video_processing_engine.utils.boto_wrap import access_file_update
from video_processing_engine.utils.common import datetime_to_utc, now
from video_processing_engine.utils.common import seconds_to_datetime as s2d
from video_processing_engine.utils.fetch import (download_from_azure,
                                                 download_from_google_drive,
                                                 download_using_ftp)
from video_processing_engine.utils.logs import log as _log
from video_processing_engine.utils.paths import downloads

log = _log(__file__)


def pika_connect():
    credentials = pika.PlainCredentials('test', 'inpoint20200318')
    connection = pika.BlockingConnection(
        pika.ConnectionParameters(host='159.89.52.183',
                                  credentials=credentials,
                                  virtual_host='testvm'))
    channel = connection.channel()
    channel.queue_declare(queue='uat_file-transfer-Q')
    return channel


def compute(json_obj: Union[bytes, str]):
    try: