Example #1
0
def uploadFiles(filenames):
  photo_ids=set()
  for filename in filenames:
    tempname=ts+'_'+os.path.basename(filename)
    logger.info(tempname)
    # get sha1 to also send for verification
    sha1=util.hashfile(filename)
    fields={'files': (tempname, open(filename, 'rb') ) }
    fields['sha1'] = sha1
    if args.tags:
      fields['tags'] = args.tags
    if args.photoset:
      fields['photoset'] = args.photoset
    if args.tags:
      fields['tags'] = args.tags
    if args.privacy:
      fields['privacy'] = args.privacy
    m = MultipartEncoder(fields=fields)
    try:
      r = requests.post(args.apiurl, data=m,headers={'Content-Type': m.content_type})
    except Exception, e:
      raise e
    else:
      logger.info(filename+' -> '+tempname+' finished! ({0} {1})'.format(r.status_code, r.reason))
    photo_ids.add(r.json()['photo_ids'][0])
Example #2
0
def main():
  """Main program"""
  logger.info('Starting Import')
  if args.photoset:
    photoset_id = photosetsCreate(args.photoset)

  for filename in args.files:
    # log what we're doing
    logger.info('Processing file %s', filename)

    # set some variables
    dateTaken=getDateTaken(filename)
    fileType = getfileType(os.path.basename(filename))
    sha1=util.hashfile(filename)

    # insert pic into db
    photo_id = addPhotoToDB(sha1=sha1,fileType=fileType,dateTaken=dateTaken)

    # set photo privacy
    if args.privacy:
      setPhotoPrivacy(photo_id=photo_id,privacy=args.privacy)

    # archive the photo
    archivedPhoto=archivePhoto(filename,sha1,fileType,localArchivePath,args.S3,photo_id)

    # generate thumbnails
    thumbFilenames = util.genThumbnails(sha1,fileType,app.config,regen=args.regen)

    # send thumbnails to S3
    S3success = False
    if args.S3 == True:
      if checkImportStatusS3(photo_id) == False:
        for thumbFilename in thumbFilenames:
          S3success = aws.uploadToS3(localArchivePath+'/'+thumbFilename,thumbFilename,app.config,regen=args.regen,policy='public-read')

    # save import meta
    saveImportMeta(photo_id,filename,importSource=args.importsource,S3=S3success)

    # add tags
    if args.tags:
      tags = args.tags.split(',')
      for tag in tags:
        photosAddTag(photo_id,tag)

    # add dirtags
    if args.dirtags:
      ignoreTags=app.config['IGNORETAGS']
      dirTags(photo_id,filename,ignoreTags)

    # add parent dir tag
    if args.parentdirphotoset:
      parentDirPhotoSet(photo_id,filename)

    # add to photoset
    if args.photoset:
      photosetsAddPhoto(photoset_id,photo_id)

  # main
  logger.info('Import Finished')
Example #3
0
    def __init__(self, item, event, result_type):
        self.item = item
        self.event = event
        self.result_type = result_type

        abspath = item.path
        if os.path.isfile(abspath):
            h = util.hashfile(abspath)
        elif os.path.isdir(abspath):
            h = util.hashfiles([
                os.path.join(abspath, f) for f in os.listdir(abspath)
                if os.path.isfile(os.path.join(abspath, f))
            ])
        else:
            #???
            h = 0
        self.hash = h
Example #4
0
 def sync(self, sock):
     util.send(sock, "ls -l")
     remote_data = util.recv(sock)
     remote_data = json.loads(remote_data)
     client_data = commands.ls(self.basedir, long=True)
     client_data = json.loads(client_data)
     client_filelist = list(map(lambda f: f["name"], client_data))
     remote_filelist = list(map(lambda f: f["name"], remote_data))
     cue_download = []
     cue_upload = []
     for file in remote_data:
         if file["name"] not in client_filelist:
             cue_download.append(file["name"])
     for file in client_data:
         if file["name"] not in remote_filelist:
             cue_upload.append(file["name"])
     for file in remote_data:
         if not file["name"] in client_filelist:
             continue
         # do not request hash if file was modified before previous sync
         if file['name'] in self.last_synced and file["mtime"] < self.last_synced[file['name']]:
             continue
         util.send(sock, f"hash {file['name']}")
         hsh = util.recv(sock)
         local_hsh = util.hashfile(os.path.join(self.basedir, file['name']))
         if hsh != local_hsh: # IMPORTANT LOGIC
             local_file = [f for f in client_data if f.get('name')== f['name']][0]
             if file['mtime'] > local_file['mtime']:
                 # server updated file after client
                 cue_download.append(file['name'])
             else:
                 # client updated dile after server
                 cue_upload.append(file['name'])
         else: # mtime differs but file is in sync as compared by hash
             self.last_synced[file['name']] = time.time()
     for filename in cue_upload:
         util.send(sock, f"upload {filename}")
         file_transfer.upload_file(sock, self.basedir, filename)
         self.last_synced[filename] = time.time()
     for filename in cue_download:
         util.send(sock, f"download {filename}")
         file_transfer.download_file(sock, self.basedir, filename)
         self.last_synced[filename] = time.time()
Example #5
0
    def handle_command(self):
        import util
        import glob

        interesting = {
            "-1": "reads1",
            "-2": "reads2",
            "-x": "btindex",
            "--un": "unaligned",
            "-S": "out",
            "-U": "reads",
        }
        meta = {"leftover": []}
        skip = False
        fields = self.cmd_tokens
        for field_i, field in enumerate(fields):
            if skip:
                skip = False
                continue

            if field in interesting:
                try:
                    if field == "-x":
                        h = util.hashfiles(glob.glob(fields[field_i + 1] +
                                                     "*"))
                    else:
                        #h = util.get_file_record(fields[field_i + 1])["digest"]
                        h = util.hashfile(fields[field_i + 1])
                except:
                    pass
                    h = 0
                meta[interesting[field]] = "%s (%s)" % (fields[field_i + 1], h)
                skip = True
                continue

            meta["leftover"].append(field)
        return meta
Example #6
0
def processPhoto(filename,localSha1='0'):
  # log what we're doing
  logger.info('Processing file %s', filename)

  # set some variables
  dateTaken=getDateTaken(filename)
  fileType = getfileType(os.path.basename(filename))
  sha1=util.hashfile(filename)

  # check sha1 local against sha1 server
  if localSha1 == '0':
    logger.info('no SHA1 sent. oh well.')
  elif localSha1 != sha1:
    logger.error('SHA1 signatures DO NOT MATCH!')
  elif localSha1 == sha1:
    logger.info('SHA1 verified.')
  else:
    logger.info('SHA1 unknown state')

  # insert pic into db
  photo_id = addPhotoToDB(sha1=sha1,fileType=fileType,dateTaken=dateTaken)

  # archive the photo
  archivedPhoto=archivePhoto(filename,sha1,fileType,localArchivePath,uploadToS3,photo_id)

  # generate thumbnails
  thumbFilenames = util.genThumbnails(sha1,fileType,app.config)
  # send thumbnails to S3
  if checkImportStatusS3(photo_id) == False:
    for thumbFilename in thumbFilenames:
      S3success = aws.uploadToS3(localArchivePath+'/'+thumbFilename,thumbFilename,app.config,regen=True,policy='public-read')
  else:
    S3success = False

  # save import meta
  saveImportMeta(photo_id,filename,importSource=os.uname()[1],S3=S3success,sha1=sha1)
  return(photo_id)
Example #7
0
def hashfile(base_dir, filename):
    toret = ""
    filename = os.path.join(base_dir, filename)
    print(util.hashfile(filename))
    return util.hashfile(filename)