def getGCSMp4(googleServices, settings, hpwrenSource, qNum): """Extract images from Q MP4 video into GCS folder Args: googleServices (): Google services and credentials settings (): settings module hpwrenSource (dict): Dictionary containing various HPWREN source information qNum (int): Q number (1-8) where each Q represents 3 hour period Returns: list of files in GCS bucket with metadata """ ffmpegParsedGCS = goog_helper.parseGCSPath(settings.ffmpegFolder) folderName = hpwrenSource['cameraID'] + '__' + hpwrenSource['dateDirName'] + 'Q' + str(qNum) folderPath = ffmpegParsedGCS['name'] + '/' + folderName files = goog_helper.listBucketEntries(ffmpegParsedGCS['bucket'], prefix=(folderPath + '/')) logging.warning('Found %d GCS files', len(files)) if not files: logging.warning('Calling Cloud Function for folder %s', folderName) uploadDir = goog_helper.repackGCSPath(ffmpegParsedGCS['bucket'],folderPath) gcfRes = gcfFfmpeg(settings.ffmpegUrl, googleServices, hpwrenSource, qNum, uploadDir) logging.warning('Cloud function result %s', gcfRes) files = goog_helper.listBucketEntries(ffmpegParsedGCS['bucket'], prefix=(folderPath + '/')) # logging.warning('GDM4: files %d %s', len(files), files) imgTimes = [] for filePath in files: fileName = filePath.split('/')[-1] nameParsed = parseFilename(fileName) imgTimes.append({ 'time': nameParsed['unixTime'], 'id': goog_helper.repackGCSPath(ffmpegParsedGCS['bucket'], filePath), 'name': fileName }) return imgTimes
def loadModel(modelPath): """Load from given keras model Args: modelPath (str): path to model dir Returns: Model object """ # if model is on GCS, download it locally first gcsModel = goog_helper.parseGCSPath(modelPath) if gcsModel: tmpDir = tempfile.TemporaryDirectory() goog_helper.downloadBucketDir(gcsModel['bucket'], gcsModel['name'], tmpDir.name) localPath = tmpDir.name else: localPath = modelPath return tf.keras.models.load_model(localPath)
def downloadGCSFileAtTime(outputDir, closestEntry): """Download HPWREN image from GCS folder from ffmpeg Google Cloud Function Args: outputDir (str): Output directory path closestEntry (dict): Desired timestamp and GCS file Returns: Local filesystem path to downloaded image """ imgPath = os.path.join(outputDir, closestEntry['name']) logging.warning('Local file %s', imgPath) if os.path.isfile(imgPath): logging.warning('File %s already downloaded', imgPath) return imgPath parsedPath = goog_helper.parseGCSPath(closestEntry['id']) goog_helper.downloadBucketFile(parsedPath['bucket'], parsedPath['name'], imgPath) return imgPath
def __init__(self, args, dbManager, minusMinutes, stateless, modelLocation=None): self.dbManager = dbManager self.args = args self.minusMinutes = minusMinutes self.stateless = stateless if not modelLocation: modelLocation = settings.model_file self.modelId = '/'.join(modelLocation.split('/')[-2:]) # the last two dirpath components # if model is on GCS, download it locally first gcsModel = goog_helper.parseGCSPath(modelLocation) if gcsModel: tmpDir = tempfile.TemporaryDirectory() goog_helper.downloadBucketDir(gcsModel['bucket'], gcsModel['name'], tmpDir.name) modelLocation = tmpDir.name if testMode: self.model = None elif useFrozen: self.model = tf_helper.loadFrozenModelTf2(modelLocation) else: self.model = tf_helper.loadModel(modelLocation)