def main():
    '''
    save access
    '''
    accesslog_path="""/tmp/sohu.log"""
    
    e = queue.loadDataSpring ()
    urlset=e.job_exectt()
    
    f = open(accesslog_path, 'wb') 
    for i in urlset:
        uri="http://image.ganjistatic1.com"+i
        f.write(str(uri) + '\n')
        f.flush()

    f.close()
    
    
    
    # must use Manager queue here, or will not work
    manager = mp.Manager()
    q = manager.Queue()    
    pool = mp.Pool(mp.cpu_count() + 2)

    # put listener to work first
    watcher = pool.apply_async(listener, (q,))

    # fire off workers
    jobs = []
    f = open(accesslog_path, 'rb') 
    
    try:
        for i in range(80):
            sohu_uri=f.readline()
            job = pool.apply_async(worker2, (i,sohu_uri, q))
            jobs.append(job)
    
        # collect results from the workers through the pool result queue
        for job in jobs: 
            job.get()
    except Exception as inst:
        print inst
        q.put('kill123')
        f.close()
        pool.close()
        
    # now we are done, kill the listener
    q.put('kill')
    f.close()
    pool.close()
#!/usr/local/bin/python2.7
# encoding: utf-8
'''
Created on 2015年7月12日

@author: a11
'''
import HttpHelper
import queue


if __name__ == '__main__':
    accesslog_path="""/tmp/sohu.log"""
    
    e = queue.loadDataSpring ()
    urlset=e.job_exectt()
    
    f = open(accesslog_path, 'ab') 
    for i in urlset:
        uri="http://image.ganjistatic1.com"+i
        f.write(str(uri) + '\n')
        f.flush()

    f.close()
    
    client = HttpHelper.HttpHelper()
    f = open(accesslog_path, 'rb') 
    
    
    with open(accesslog_path) as f:
        for ganji_uri in f: