예제 #1
0
def main(args):
    if not os.path.exists(args.server_stream):
        Log.error("server_stream file not exist.")
        return

    Log.info("Start parsing iSmartAlarm diagnotics stream...")

    isap = ISADiagnoticsStreamParser(args.server_stream)
    unstructured_log = isap.get_unstructured_log()
    sensor_log = isap.get_sensor_log()

    with Elastic(index='unstructured_log',
                 doc_type='unstructured_log') as elastic:
        datetime_log = []

        for log in unstructured_log:
            if 'data_type' in log.keys():
                if log['data_type'] == 'datetime':
                    datetime_log.append(log)
        elastic.upload(datetime_log, 'data')

    with Elastic(index='sensor_log', doc_type='sensor_log') as elastic:
        elastic.upload(sensor_log, 'datetime')

    Log.info("Successfully upload server_stream data.")

    del isap
예제 #2
0
    def parse(self):
        """Parse timeline from database."""
        Log.debug("Extracting activities from database...")

        with sqlite3.connect(self.database) as con:
            con.row_factory = dict_factory
            cur = con.cursor()

            cur.execute("SELECT * FROM TIMELINE")
            activities = cur.fetchall()

        documents = []

        for activity in activities:
            documents.append({
                'time':
                self.convert_time(activity['date'], activity['time']),
                'user':
                activity['user'],
                'short':
                activity['short'],
                'desc':
                activity['desc'],
                'notes':
                activity['notes']
            })

        Log.info("Successfully parsed data from database.")

        self.save(documents)
예제 #3
0
    def save(self, tables):
        """Save history into elasticsearch."""
        for key in tables.keys():
            data = tables[key]['data']
            if data:
                for i in range(len(data)):
                    data[i]['date'] = to_datetime(data[i]['date'])

                with Elastic(index=key.lower(), doc_type=key.lower()) as elastic:
                    elastic.upload(data, 'date')
                
                Log.info(f"Successfully uploaded {key} data into elasticsearch.")
    def save(self, data):
        """Save activity into elasticsearch."""
        activities = [json.loads(activity['Json']) for activity in data]

        for i in range(len(activities)):
            activities[i]['created_at'] = to_datetime(
                activities[i]['created_at'])

        with Elastic(index='wink', doc_type='activity') as elastic:
            elastic.upload(activities, 'created_at')

        Log.info(
            "Successfully uploaded wink activity data into elasticsearch.")
예제 #5
0
    def save(self, merge, frame):
        """Convert and save into playable video."""
        Log.info("Converting video file codec format...")

        for video in self.rawvideos:
            os.system(f"ffmpeg -f h264 -r 10 -i {video} -c copy {video.split('.')[0]}.mp4")

            # remove original file
            if os.path.exists(video):
                os.remove(video)

        Log.info("Successfully convert the video file codec.")

        if merge:
            Log.info("Merging videos..")

            videos = '|'.join([video.split('.')[0] + ".mp4" for video in self.rawvideos])
            os.system(f"ffmpeg -f concat -i \"concat:{videos}\" -c copy video.mp4")

            for video in self.rawvideos:
                os.remove(f"{video.split('.')[0]}.mp4")

            Log.info(f"Successfully merged {len(self.rawvideos)} videos.")
예제 #6
0
    def extract(self, merge, frame, add_timeline):
        """Extract frames from database."""
        Log.debug("Extracting videos from database...")

        if frame:
            with sqlite3.connect(self.database) as con:
                cur = con.cursor()
                cur.execute("SELECT frame_time, gop_start_rowid, sps_bytes, pps_bytes, frame_bytes, chunk_complete FROM frame_raw_data_table")
                rows = cur.fetchall()
            sps_bytes = None
            pps_bytes = None
            videobuf = None
            count = 0

            timestamps_by_video = {}
            frames_by_video = {}

            for frame in rows:
                frame_time, gop_start_rowid, _sps_bytes, _pps_bytes, frame_bytes, chunk_complete = frame

                if gop_start_rowid == -1:
                    # set new sps and pps bytes
                    sps_bytes = _sps_bytes
                    pps_bytes = _pps_bytes
                    videobuf = pps_bytes + sps_bytes + frame_bytes
                    timestamps_by_video[count] = [frame_time]
                else:
                    videobuf = videobuf + frame_bytes
                    timestamps_by_video[count].append(frame_time)

                if chunk_complete == 1:
                    frames_by_video[count] = videobuf
                    sps_bytes = None
                    pps_bytes = None
                    videobuf = None
                    count += 1

            if videobuf:
                frames_by_video[count] = videobuf

            for key in frames_by_video.keys():
                # save h264 file
                with open(os.path.join(self.output, f'{key}.h264'), 'wb') as f:
                    f.write(frames_by_video[key])

                i = 0
                for timestamp in timestamps_by_video[key]:
                    os.system(f'ffmpeg -i {self.output}/{key}.h264 -c:v libx264 -filter:v "select=gte(n\,{i})" -frames:v 1 -f h264 {self.output}/{key}_{i}.h264')
                    os.system(f'ffmpeg -i {self.output}/{key}_{i}.h264 -frames:v 1 -f image2 {self.output}/{self._gen_filename(timestamp)}.png')
                    os.remove(f'{self.output}/{key}_{i}.h264')
                    i += 1

                os.remove(f'{self.output}/{key}.h264')
            Log.info(f"Successfully saved image by frame.")

        else:
            with sqlite3.connect(self.database) as con:
                cur = con.cursor()
                cur.execute("SELECT * FROM frame_raw_data_table")
                rows = cur.fetchall()

            videobuf = ""  # temporary buffer for constructing video
            videoname = ""  # name of video file
            count = 0  # video file counter

            for row in rows:
                if row[4]:
                    if videoname:
                        with open(videoname, "wb") as f:
                            f.write(videobuf)
                        self.rawvideos.append(videoname)

                    videobuf = row[5]
                    videobuf += row[4]
                    videobuf += row[6]

                    videoname = os.path.join(self.output, f"{count}.tmp")
                    self.videotimes[videoname] = [row[0]]

                    count += 1
                else:
                    videobuf = videobuf + row[6]

                    if row[0] not in self.videotimes[videoname]:
                        self.videotimes[videoname].append(row[0])

            if videobuf:
                with open(videoname, "wb") as f:
                    f.write(videobuf)
                self.rawvideos.append(videoname)

            Log.info(f"Successfully extrated {count} video files.")

            self.save(merge)

            documents = []

            for filename in self.videotimes.keys():
                runtime = self.videotimes[filename]
                start, end = to_datetime(runtime[0]), to_datetime(runtime[-1])
                filename = os.path.basename(filename).replace('tmp', 'mp4')

                documents.append({
                    'start_time': start,
                    'end_time': end,
                    'filename': filename
                })

            # write history as file
            with open(os.path.join(self.output, 'video_list.txt'), 'w') as f:
                for document in documents:
                    f.write(f"{document['filename']}: {document['start_time']} - {document['end_time']}\n")

            # upload to elasticsearch for add timeline
            if add_timeline:
                with Elastic(index='nest', doc_type='video') as elastic:
                    elastic.upload(documents, 'start_time')
예제 #7
0
    def save(self, documents):
        with Elastic(index='alexa', doc_type='activity') as elastic:
            elastic.upload(documents, 'time')

        Log.info("Successfully uploaded data into elasticsearch.")