def putGeneralInfo(photo_id, metadata, info = None):
    owner = contains(metadata, 'Exif.Image.Artist')
    comment = contains(metadata, 'Exif.Photo.UserComment')
    if owner and comment:
        Logger().debug("%s already have a owner and a comment" % (photo_id))
        return metadata

    if not info:
        from flickr_download_helper.api import API
        info = API(False).getPhotoInfo(photo_id)

    if not info:
        Logger().warn("Couldn't get info for photo %s" % (photo_id))
        return metadata

    if 'owner' in info and metadata and not owner:
        if 'username' in info['owner'] and 'realname' in info['owner']:
            metadata = writeMetadata(metadata, 'Exif.Image.Artist',
                "%s (%s)" % (encode(info['owner']['username']),
                encode(info['owner']['realname'])))
        elif 'username' in info['owner']:
            metadata = writeMetadata(metadata, 'Exif.Image.Artist',
                "%s" % (encode(info['owner']['username'])))
        elif 'realname' in info['owner']:
            metadata = writeMetadata(metadata, 'Exif.Image.Artist',
            "(%s)" % (encode(info['owner']['realname'])))

    a_userComment = []
    if info.get('title'):
        a_userComment.append('<title>%s</title>' % (encode(info['title'])))

    if info.get('description'):
        a_userComment.append('<description>%s</description>' % (encode(info['description'])))

    if len(info.get('tags', {}).get('tag', [])):
        a_userComment.append('<tags>%s</tags>' % (', '.join(map(encode, info['tags']['tag']))))

    if len(a_userComment) and comment:
        metadata = writeMetadata(metadata, 'Exif.Photo.UserComment',
            '<xml>%s</xml>' % ('\n'.join(map(encode, a_userComment))))

    return metadata
    def fill_opt(self):
        flickr_api = API()
        Logger().info("\n== retrieve from URL")
        url = self.parse()

        if '@' in url[1]:
            OPT.user_id = url[1]
        else:
            OPT.url = url[1]

        if url[0] == FDHPR.USER:
            pass
        elif url[0] == FDHPR.TAG:
            OPT.tags = (url[2])
            user = flickr_api.getUserFromAll(OPT.url)
            OPT.user_id = user['id']
            OPT.url = None
        elif url[0] == FDHPR.SET:
            OPT.photoset_id = url[2]
        elif url[0] == FDHPR.COLLECTION:
            OPT.collection_id = url[2]
        elif url[0] == FDHPR.PHOTO:
            OPT.url = OPT.user_id = None
            OPT.photo_ids = (url[1])
        elif url[0] == FDHPR.PROFILE:
            OPT.url = OPT.user_id = None
            Logger().warn("I don't know what to do with that! %s" % (OPT.get_url))
        elif url[0] == FDHPR.PHOTOSETS:
            OPT.sort_by_photoset = True
        elif url[0] == FDHPR.GROUP:
            Logger().error("Don't know how to get group")
        elif url[0] == FDHPR.INGROUP:
            group = flickr_api.searchGroup(url[2])
            OPT.group_id = group['id']
        elif url[0] == FDHPR.SEARCH:
            OPT.search = url[2]
        elif url[0] in (FDHPR.ERROR, FDHPR.ERROR_NOURL, FDHPR.ERROR_NOTFLICKR):
            OPT.url = OPT.user_id = None
            Logger().error("error parsing OPT.get_url : %s" % (url[0]))
def getContactsPhotos():
    api = API()
    Logger().debug("#######################################")

    # get the list of favorites
    setattr(OPT, 'has_been_download', {})
    contacts = []
    no_static_contacts = False

    if OPT.smart:
        contacts = [{'nsid': nsid} for nsid in \
            flickr_download_helper.getRecentlyUploadedContacts()]
    elif len(OPT.contact_ids):
        contacts = [{'nsid': nsid} for nsid in OPT.contact_ids]
        no_static_contacts = True
    else:
        contacts = api.getContactList()
        # TODO should keep new added contacts

        if OPT.check_old_contacts:
            import pickle
            f = open(OPT.contact_to_remove, 'rb')
            to_remove = pickle.load(f)
            f.close()

            contacts = [contact['nsid'] for contact in contacts]
            contacts = list(set(contacts) - set(to_remove))
            contacts = [{'nsid': nsid} for nsid in contacts]

    Logger().info("will look at %d contacts" % len(contacts))
    INS['failure_level'] = 10

    static_ids = []
    contacts_ids = []
    if not no_static_contacts:
        contacts_ids = getStaticContactList()
        static_ids = list(contacts_ids)
        Logger().info("static contacts %s" % (str(contacts_ids)))
        INS['failure_level'] += len(contacts_ids)

    for c in contacts:
        if OPT.only_collect:
            if c['nsid'] in OPT.only_collect:
                contacts_ids.append(c['nsid'])
        elif c['nsid'] != '52256782@N02': # TODO put the rejected in the conf file
            contacts_ids.append(c['nsid'])

    if OPT.scan_groups:
        INS['put_group_in_session'] = True
        groups = api.getUserGroups(OPT.my_id, page = 1)

        for i, group in enumerate(groups):
            if group['nsid'] in OPT.skiped_group:
                continue

            Logger().warn("scan_group %d/%d" % (i, len(groups)))

            INS['groups'] = {}
            INS['temp_groups'] = {}
            OPT.group_id = group['nsid']

            for contacts_id in contacts_ids:
                OPT.user_id = contacts_id
                ret = getContactPhotos()
                if not ret:
                    break

            del INS['groups']
            del INS['temp_groups']

    else:
        for contacts_id in contacts_ids:
            OPT.user_id = contacts_id
            ret = getContactPhotos()

            if not ret and contacts_id not in static_ids:
                break

    users = ', '.join(OPT.has_been_download.keys())

    totals = [0, 0]

    for t in OPT.has_been_download.values():
        totals[0] += t[0]
        totals[1] += t[1]

    if totals[0]:
        Logger().warn("got %i files (%i) for users : %s" % (
            totals[0], totals[1], users))
    else:
        Logger().warn("didn't download anything")

    Logger().debug("#######################################")
This small program can be used to sort a flickr dir depending on flickr folders.
"""

import sys
import os
import os.path
import shutil
import traceback
from flickr_download_helper.logger import Logger
from flickr_download_helper.api import API
from flickr_download_helper.config import OPT
from flickr_download_helper.existing import Existing

if __name__ == "__main__":
    try:
        api = API()

        user = api.getUser()
        user_id = user['id']

        Logger().info("\n== getting user (%s) photoset" % user_id)
        OPT.sort_by_user = True

        photosets = api.getUserPhotosets(user_id)

        existing = Existing(user_id, user['username'])
        photo_dir = os.path.join(OPT.photo_dir, user['username'])
        files = os.listdir(photo_dir)
        cache = dict([
            (p.split('_')[0], os.path.join(photo_dir, p)) for p in files])
#!/usr/bin/python

import os

from flickr_download_helper.config import OPT
from flickr_download_helper.api import API

api = API(False)

groups = api.getUserGroups(OPT.my_id, page=1)

to_remove = []
for group in groups:
    total = api.countGroupPhotos(group["nsid"])
    gpath = os.path.join(OPT.groups_full_content_dir, group["nsid"])
    if not os.path.exists(gpath) and total > 20000:
        to_remove.append(group["nsid"])

print ":".join(to_remove)
#!/usr/bin/python

from flickr_download_helper.config import OPT, INS
from flickr_download_helper.logger import Logger

from flickr_download_helper.api import API

import os
import shutil

api = API(False)

INS['put_group_in_session'] = True
groups = api.getUserGroups(OPT.my_id, page = 1)
for index, group in enumerate(groups):
    group_id = group['nsid']
    if group_id in OPT.skiped_group:
        continue

    Logger().warn("scan_group %d/%d"%(index, len(groups)))
    api.groupFromScratch(group_id)
    filepath = "%s_0" % os.path.join(OPT.groups_full_content_dir, group_id)
    shutil.move(filepath, os.path.join(OPT.groups_full_content_dir, group_id))
#!/usr/bin/python

from flickr_download_helper.api import API
from flickr_download_helper.config import OPT


# init of the flickr api
api = API()


def encode(string):
    if not isinstance(string, (str, unicode)):
        return str(string)
    try:
        return string.encode('latin1')
    except:
        return string.encode('utf8')


line = []

c = api.getUserFromID(OPT.user_id, True)

if OPT.getContactFields:
    for field in OPT.getContactFields:
        if field in c:
            line.append(c[field].strip())


print "\t".join(map(encode, line))
Exemple #8
0
from flickr_download_helper.config import OptConfigReader, OPT
from flickr_download_helper.logger import Logger
from flickr_download_helper.api import API

from flickr_download_helper.html_widget import FDH_page
import cgi

config = OptConfigReader()
config.setup()

## start the logger
Logger().setup()

# init of the flickr api
api = API(False)

###########################################

form = cgi.FieldStorage()
if 'user_id' in form:
    user_id = form['user_id'].value
else:
    user_id = '53753127@N03'

user = api.getUserFromAll(user_id)
username = user['username']

page = FDH_page()
page.init(css = ('pouet.css'))
#!/usr/bin/python
"""
Generate the cache of old contacts to be able to only look at active contacts.

"""


import pickle
from datetime import datetime, timedelta
from flickr_download_helper.config import OPT
from flickr_download_helper.api import API

api = API(False)
contacts = api.getContactList()
now = datetime.now()

to_remove = {}
deltas = {}


for delta in (30, 60, 90, 180):
    deltas[delta] = (now - timedelta(days=delta)).strftime('%s')


def has_photos(nsid, delta):
    """check that this contact has some photos since delta"""
    return len(api.getUserPhotos(nsid, delta, limit=2)) != 0


for contact in [contact['nsid'] for contact in contacts]:
    for delta in (180, 90, 60, 30):
form = cgi.FieldStorage()
user = None
if 'user' in form: user = form['user'].value

## Load configuration from file
config = OptConfigReader()
config.setup()

## start the logger
Logger().setup()

###########################

# init of the flickr api
api = API()

if user:
    user = api.getUserFromAll()
    page.h3(user['username'], onclick='alert("'+user['id']+'");')
    page.a('rss', href='rss.py?user_id=%s'%(user['id']),)
    page.br()
    photos = api.getUserPhotos(user['id'])
    existing = Existing().grepPhotosExists(photos)
    existing_ids = map(lambda e:e['id'], existing)
    urls = api.getPhotoURLFlickr(photos, True, True)
    for id in urls:
        if id in existing_ids:
            style = 'border:1px red solid;'
        else:
            style = 'border:1px blue solid;'
#!/usr/bin/python

from flickr_download_helper.api import API
import flickr_download_helper


api = API(False)

photo_id = '5084025880'

def display(w):
    print "%s:%s => %s" % (w['tagspace'], w['label'], w['raw'])


exif = api.getPhotoExif(api, token, photo_id)
print exif



#!/usr/bin/python

from flickr_download_helper.api import API
from flickr_download_helper.config import OptConfigReader, OPT, OptReader
from flickr_download_helper.logger import Logger
from flickr_download_helper.proxy import FDHProxySettings
import os

# init of the flickr api
api = API(False)


def encode(string):
    if not isinstance(string, (str, unicode)): return str(string)
    try: return string.encode('latin1')
    except: return string.encode('utf8')


contacts = api.getContactList()


if OPT.check_old_contacts:
    import pickle
    f = open(OPT.contact_to_remove, 'rb')
    to_remove = pickle.load(f)
    f.close()

    contacts = map(lambda c: c['nsid'], contacts)
    contacts = list(set(contacts) - set(to_remove))
    contacts = map(lambda c: {'nsid': c}, contacts)
#!/usr/bin/python

from flickr_download_helper.api import API
from flickr_download_helper.config import OPT


api = API(False)

if OPT.group_id:
    api.groupFromScratch(OPT.group_id)