Exemplo n.º 1
0
def upload_photo():
    """上传照片到七牛,并返回私有链接地址"""
    from qiniu import Auth
    from qiniu import put_file

    global config
    progress_handler = lambda progress, total: progress

    photo_path = http_get("http://127.0.0.1:9876/photo/shot").content
    # Upload to qiniu
    mime_type = "image/jpeg"
    auth = Auth(str(config["qiniu"]["api_key"]), str(config["qiniu"]["secret"]))
    print auth
    filename = os.path.basename(photo_path)
    print "filename: ", filename, type(filename)
    token = auth.upload_token(str(config["qiniu"]["bucket"]))
    print token
    ret, info = put_file(token, filename, photo_path, {}, mime_type, progress_handler=progress_handler)
    print "uploaded: ", ret, info
    try:
        os.remove(photo_path)
    except Exception:
        pass

    # Return URL
    base_url = "{}/{}".format(str(config["qiniu"]["domain"]), filename)
    return auth.private_download_url(base_url, expires=3600)
Exemplo n.º 2
0
class QiniuPro():
    def __init__(self):
        self.auth = Auth(ACCESS_KEY, SECRET_KEY)

    def download_private_url(self, key, expires=7200):
        base_url = '%s/%s' % (DOMAIN, key)
        private_url = self.auth.private_download_url(base_url, expires)
        return private_url
Exemplo n.º 3
0
def get_download_file_url(file_name):
    """获取文件url"""

    cache_key = '_'.join((settings.CACHE_PREFIX, file_name))
    private_url = cache.get(cache_key)
    if private_url:
        return private_url

    q = Auth(settings.QINIU_KEY, settings.QINIU_TOKEN)
    base_url = 'http://%s/%s' % (settings.QINIU_HOST, file_name)
    private_url = q.private_download_url(base_url, expires=3600*24)

    cache.set(cache_key, private_url, 3600*24)

    return private_url
Exemplo n.º 4
0
def GetQiniuDlToken(key):

    if key is '' or key is None:
        return False,'文件名为空'
    #开发者账号授权
    q = Auth(access_key, secret_key)
    private_url = ''
    #生成私有的基本URL
    base_url = 'http://%s/%s' % (bucket_url, key)
    try:
        #生成私密url
        private_url = q.private_download_url(base_url, 3600)
        return True,private_url
    except:
        print '第三方获取上传凭证函数异常'
        return False,private_url
Exemplo n.º 5
0
def test_fetch():
    q = Auth(access_key, secret_key)
    bm = BucketManager(q)
    ret, info = bm.stat(bucket, mykey)
    print(info)
    print(ret)

    bucket_domain = '7xpb8s.com1.z0.glb.clouddn.com'
    base_url = 'http://%s/%s' % (bucket_domain, mykey)
    private_url = q.private_download_url(base_url, expires=3600)
    print(private_url)
    r = requests.get(private_url)
    assert r.status_code == 200

    ret, info = bm.fetch(private_url, bucket, mykey)
    print(info)
    print(ret)
Exemplo n.º 6
0
def list_file(username, path): # ok
    user = User.objects.filter(username=username)
    if bool(user):
        user = user[0]
    else :
        return False
    q = Auth(settings.QINIU_ACCESS_KEY, settings.QINIU_SECRET_KEY)  # 授权
    files = FileInfo.objects.filter(owner=user,file_path=path)  #
    filelist = {}
    for f in files:
        if f.file_type == 'dir':  # 文件夹不需要下载 ,但可以设置另外的子目录获取的url,待定
            filelist[f.file_name] = ""
            continue
        if path == '/':
            path = ""
        base_url = ''.join([settings.QINIU_DOMAIN,username, path, '/', f.file_name])
        filelist[f.file_name] =  q.private_download_url(base_url, expires=3600)
    return json.dumps(filelist,ensure_ascii=False,encoding="utf-8",sort_keys=True)
Exemplo n.º 7
0
def qiniu_private(base_url):
    """
    get private resource
    """
    cache_key = 'st:qiniu:' + base_url

    cache_value = cache.get(cache_key)
    if cache_value:
        return cache_value

    q = Auth(get_qiniu_config('QINIU_ACCESS_KEY'),
             get_qiniu_config('QINIU_SECRET_KEY'))
    expire = 3600 if not hasattr(settings,
                                 'QINIU_PREVIEW_EXPIRE') else settings.QINIU_PREVIEW_EXPIRE
    private_url = q.private_download_url(base_url, expires=expire)

    cache.set(cache_key, private_url, timeout=max(10, expire - 10))

    return private_url
Exemplo n.º 8
0
    def content_process(self, content):
        # render content included gist
        content = re.sub(r'http(s)?:\/\/gist.github.com\/(\d+)(.js)?', r'<script src="http://gist.github.com/\2.js"></script>', content)
        # render sinaimg pictures
        content = re.sub(r'(http:\/\/\w+.sinaimg.cn\/.*?\.(jpg|gif|png))', r'<img src="\1" />', content)
        # render qiniu private image link
        qiniu_re = re.compile(r'(%s/.*?\.(?:jpg|jpeg|gif|png))' % bucket_domain)
        image_urls = qiniu_re.findall(content)
        q = Auth(access_key, secret_key)
        private_url = "jk"
        for image_url in image_urls:
            private_url = q.private_download_url(image_url, expires=10)
            #private_url = re.sub(r'&', '&amp;', private_url)
            content = re.sub(image_url, r'<img src="%s" />' % private_url, content)

        # render @ mention links
        content = re.sub(r'@(\w+)(\s|)', r'@<a href="/u/\1">\1</a> ', content)
        # render youku videos
        content = re.sub(r'http://v.youku.com/v_show/id_(\w+).html', r'<iframe height=498 width=510 src="http://player.youku.com/embed/\1" frameborder=0 allowfullscreen style="width:100%;max-width:510px;"></iframe>', content)
        return content
Exemplo n.º 9
0
def GetQiniuDlToken(key,url='7xouyw.com1.z0.glb.clouddn.com',commonFlag=False):

    if key is '' or key is None:
        return False,'文件名为空'

    if isSrc(key):
        return True,key

    #公共空间,不需要生成下载凭证
    if commonFlag:
        return True,'http://%s/%s' % (url, key)

    #开发者账号授权
    q = Auth(access_key, secret_key)
    private_url = ''
    #生成私有的基本URL
    base_url = 'http://%s/%s' % (url, key)
    try:
        #生成私密url
        private_url = q.private_download_url(base_url, 3600)
        return True,private_url
    except:
        debug( '第三方获取上传凭证函数异常')
        return False,private_url
Exemplo n.º 10
0
class QiniuStorage(object):
    '''
    七牛云的文件上传、显示、删除
    @auth:ZWJ
    '''
    def __init__(self, access_key, secret_key, bucket_name, bucket_domain):
        """                                      
        @para:            
            access_key:公钥    
            secret_key:私钥                 
            bucket_name: 要上传的空间
            bucket_domain:获取文件url路径时对应的私有域名
        """
        self.auth = Auth(access_key, secret_key)
        self.bucket_name = bucket_name
        self.bucket_domain = bucket_domain
        self.bucket_manager = BucketManager(self.auth)

    def put_data(self, name, data):
        """
        @def:put_data
        @def_fun: 文件流上传
            空间里的文件名不能重复,所以用_newname生成新文件名                                  
        @para:            
            name: 文件名   
            data: 上传二进制流                      
        @ret:上传后的url路径        
        """
        #上传到七牛后保存的文件名
        key = self._newname(name)
        #生成上传 Token,可以指定过期时间等
        token = self.auth.upload_token(self.bucket_name, key)
        if hasattr(data, 'chunks'):
            data = b''.join(c for c in data.chunks())
        ret, info = put_data(token, key, data)  #上传文件流到七牛
        if ret is None or ret['key'] != key:
            raise QiniuError(info)
        return self.get_url(key)

    def _newname(self, name):
        '''加上6位日期和6位时间标识 PG.jpg --> PG_170211_044217.jpg '''
        root, ext = splitext(basename(name))
        time = datetime.now().strftime('_%y%m%d_%H%M%S')
        return '{}{}{}'.format(root, time, ext)

    def get_url(self, key):
        '''        
        @def:get_url
        @def_fun: 返回七牛云上文件名为key的文件对应的url地址
           如果是公有空间,该地址可以直接访问文件;私有空间则需用private_download_url                                  
        @para:            
            key: 七牛云上的文件名                                    
        @ret:域名加文件名生成的url路径
        '''
        url = 'http://{}/{}'.format(self.bucket_domain, key)
        return url

    def private_download_url(self, url, expires=7200):
        """
        @def:private_download_url
        @def_fun: 生成私有资源下载链接                                       
        @para:            
            url: 私有空间资源的原始URL   
            expires: 下载凭证有效期,默认为7200s                     
        @ret:私有资源的下载链接        
        """
        return self.auth.private_download_url(url, expires)

    def put_file(self, filePath):
        """
        @def:put_file
        @def_fun: 本地文件上传
            空间里的文件名不能重复,所以用_newname生成新文件名                                  
        @para:            
            filePath: 待上传文件在磁盘中的绝对路径                              
        @ret:上传后的url路径        
        """
        key = self._newname(filePath)
        token = self.auth.upload_token(self.bucket_name, key)
        ret, info = put_file(token, key, filePath)
        if ret is None or ret['key'] != key:
            raise QiniuError(info)
        return self.get_url(key)

    def exists(self, key):
        '''检测七牛云上是否有文件名为key的文件'''
        bucket = self.bucket_manager
        ret, info = bucket.stat(self.bucket_name, key.split('/')[-1])
        return ret is not None

    def delete(self, key):
        '''删除七牛云上文件名为key的文件'''
        if not self.exists(key):
            return '{} not exist in qiniu_cloud'.format(key)
        bm = self.bucket_manager
        ret, info = bm.delete(self.bucket_name, key.split('/')[-1])
        if ret == {}:
            return 'success to delete {} in qiniu_cloud'.format(key)
        else:
            return info

    def ls_files(self, prefix="", limit=None):
        """
        @def:ls_file
        @def_fun: 显示七牛云上的文件名
        具体规格参考:
        http://developer.qiniu.com/docs/v6/api/reference/rs/list.html                                   
        @para:            
            prefix:     列举前缀            
            limit:      单次列举个数限制                            
        @ret:  文件名组成的set()集合    
        """
        files = set()
        dirlist = bucket_lister(self.bucket_manager, self.bucket_name, prefix,
                                limit)
        for item in dirlist:
            files.add(item['key'])
        return files
Exemplo n.º 11
0
def get_download_file_url(file_name):
    """获取文件url"""
    q = Auth(settings.QINIU_KEY, settings.QINIU_TOKEN)
    base_url = 'http://%s/%s' % (settings.QINIU_HOST, file_name)
    private_url = q.private_download_url(base_url, expires=3600 * 24)
    return private_url
Exemplo n.º 12
0
class EntryService(object):
    """EntryService."""

    def __init__(self):
        self.entries = {}
        self.pages = {}
        self.urls = []
        self.by_tags = {}
        self.by_categories = {}
        self.by_months = {}
        self.models = Models()
        self.types = self.models.types()
        self.params = self.models.params()
        
        self.qiniu = Auth(config.qiniu_ak, config.qiniu_sk)
        self.blog_bucket = BucketManager(self.qiniu)
        self.blog_prefix = "raw/"
        self._init_blog()

    def _init_blog(self):
        """
        Initialize blog
            - all entries in entry_dir
            - all pages in page_dir
            - others
        """
        cloud_list = {}
        ret, _, info = self.blog_bucket.list(config.qiniu_bucket, prefix = "raw/")
        file_list = eval(info.text_body)["items"]
        for f in file_list:
            if f["key"].startswith(self.blog_prefix) and f["mimeType"] == "text/markdown":
                file_name = f["key"][len(self.blog_prefix):]
                print "Find qiniu file %s" %  file_name
                cloud_list[file_name] = (f["key"], f["fsize"])
        print cloud_list

        for root, _, files in os.walk(config.entry_dir):
            for f in files:
                if f in cloud_list:
                    print "Find local file %s/%s" % (root, f)
                    del cloud_list[f]
                self.add_entry(False, root + '/' + f)

        for f in cloud_list:
            info = cloud_list[f]
            self.download_file(f, info[0])
            print "Download file %s/%s" % (root,f)
            self.add_entry(False, root + '/' + f)

        for root, _, files in os.walk(config.page_dir):
            for f in files:
                self._add_page(root + '/' + f)
        self._init_miscellaneous(self.types.add, self.entries.values())

    def download_file(self, f, key):
        base_url = 'http://%s/%s' % (config.qiniu_bucket_domain, key)
        private_url = self.qiniu.private_download_url(base_url, expires=3600)
        urllib.urlretrieve(private_url, "%s/%s" % (config.entry_dir, f))
        print(private_url)

    def add_entry(self, inotified, path):
        """
        Add entry
        """
        entry = self._init_entry(self.types.entry, path)
        if entry is not None:
            self.entries[entry.url] = entry
            if inotified:
                self._init_miscellaneous(self.types.add, [entry])

    def delete_entry(self, path):
        """
        Delete entry
        """
        for entry in self.entries.values():
            if path == os.path.abspath(entry.path):
                print "delete success!"
                self.entries.pop(entry.url)
                self._init_miscellaneous(self.types.delete, [entry])

    def _add_page(self, path):
        """
        Add page
        """
        page = self._init_entry(self.types.page, path)
        if page is not None:
            self.pages[page.url] = page

    def _init_entry(self, entry_type, path):
        """
        initialize single entry
        """
        url, raw_url, name, date, time, content = self._init_file(path, entry_type)
        if url is not None:
            entry = self.models.entry(entry_type)
            entry.path = path
            entry.name = name
            entry.url = url
            entry.raw_url = raw_url
            entry.date = date
            entry.time = time
            header, title, categories, tags = extract.parse(entry)
            entry.content = content
            content = content.replace(header, '')
            entry.html = markdown.markdown(content)
            # FIXME How to extract the excerpt of an entry
            entry.excerpt = content[:200] + ' ... ...'
            entry.categories = categories
            entry.tags = tags
            return entry
        return None

    def _init_file(self, file_path, entry_type):
        """
        Initialize single file
        """
        # FIXME: how to determine the publish time of an entry
        content, nones = None, [None for _ in xrange(6)]
        try:
            content = codecs.open(file_path, mode='r', encoding='utf-8').read()
        except:
            return nones
        if content is None or len(content.strip()) == 0:
            return nones
        date, mtime = None, None
        name, _ = os.path.splitext(os.path.basename(file_path))
        chars = ['_', '-', '~']
        pattern = r'\d{4}-\d{1,2}-\d{1,2}'
        match = re.search(pattern, name)
        if match:
            y, m, d = match.group().split('-')
            try:
                date = datetime.date(int(y), int(m), int(d))
            except:
                print traceback.format_exc()
                print file_path
            name = name[len(match.group()):]
            for c in chars:
                if name.startswith(c):
                    name = name[1:]
        stat = os.stat(file_path)
        mtime = datetime.datetime.fromtimestamp(stat.st_mtime)
        if date is None:
            date = mtime
        prefix, url_prefix, raw_prefix = date.strftime(config.url_date_fmt), '', ''
        if entry_type == self.types.entry:
            url_prefix = config.entry_url + '/' + prefix + '/'
            raw_prefix = config.raw_url + '/' + prefix + '/'
        elif entry_type == self.types.page:
            url_prefix = '/'
            raw_prefix = config.raw_url + '/'
        date = date.strftime(config.date_fmt)
        time = date + mtime.strftime(config.time_fmt)[len('yyyy-mm-dd'):]
        url = url_prefix + name + config.url_suffix
        raw_url = raw_prefix + name + config.raw_suffix
        for c in chars:
            name = name.replace(c, ' ')
        return url, raw_url, name, date, time, content

    def _init_miscellaneous(self, init_type, entries):
        """
        Initialize miscellaneous
            - tags
            - categories
            - archives
            - all urls
        """
        for entry in entries:
            self._init_tag(init_type, entry.url, entry.tags)
            self._init_category(init_type, entry.url, entry.categories)
            self._init_monthly_archive(init_type, entry.url)
        self.urls = sorted(self.entries.keys(), reverse=True)
        self._init_params()

    def _init_subscribe(self):
        """
        Initialize subscriptions
        """
        if not self.urls:
            time = datetime.datetime.now().strftime(config.time_fmt)
        else:
            time = self.entries[self.urls[0]].time
        return self.models.subscribe(time)

    def _init_tag(self, init_type, url, tags):
        """
        Initialize tags
        """
        for tag in tags:
            if tag not in self.by_tags:
                if init_type == self.types.add:
                    self.by_tags[tag] = self.models.tag(tag, url)
                if init_type == self.types.delete:
                    pass
            else:
                if init_type == self.types.add:
                    self.by_tags[tag].urls.insert(0, url)
                    self.by_tags[tag].count += 1
                if init_type == self.types.delete:
                    self.by_tags[tag].count -= 1
                    self.by_tags[tag].urls.remove(url)
                    if self.by_tags[tag].count == 0:
                        self.by_tags.pop(tag)

    def _init_category(self, init_type, url, categories):
        """
        Initialize categories
        """
        for category in categories:
            if category not in self.by_categories:
                if init_type == self.types.add:
                    self.by_categories[category] = \
                        self.models.category(category, url)
                if init_type == self.types.delete:
                    pass
            else:
                if init_type == self.types.add:
                    self.by_categories[category].urls.insert(0, url)
                    self.by_categories[category].count += 1
                if init_type == self.types.delete:
                    self.by_categories[category].count -= 1
                    self.by_categories[category].urls.remove(url)
                    if self.by_categories[category].count == 0:
                        self.by_categories.pop(category)

    def _init_monthly_archive(self, init_type, url):
        """
        Initialize archives
        """
        start = len(config.entry_url) + 1
        end = start + len('/yyyy/mm')
        month = url[start:end]
        if month not in self.by_months:
            if init_type == self.types.add:
                self.by_months[month] = \
                    self.models.monthly_archive(self.types.entry, month, url)
            if init_type == self.types.delete:
                pass
        else:
            if init_type == self.types.add:
                self.by_months[month].urls.insert(0, url)
                self.by_months[month].count += 1
            else:
                self.by_months[month].count -= 1
                self.by_months[month].urls.remove(url)
                if self.by_months[month].count == 0:
                    self.by_months.pop(month)

    def _init_params(self):
        """
        Initialize global params
        :return:
        """
        self.params.subscribe = self._init_subscribe()
        self.params.primary.tags = self._init_tags_widget()
        self.params.primary.recently_entries = self._init_recently_entries_widget()
        self.params.secondary.categories = self._init_categories_widget()
        self.params.secondary.calendar = self._init_calendar_widget()
        self.params.secondary.archive = self._init_archive_widget()

    def _init_related_entries(self, url):
        """
        Initialize related entries
        """
        # FIXME: related entries
        try:
            index = self.urls.index(url)
        except:
            print traceback.format_exc()
            return None
        urls = self.urls[:index]
        urls.extend(self.urls[index + 1:])
        urls = random.sample(urls, min(len(urls), 10))
        return [self.entries.get(url) for url in sorted(urls, reverse=True)]

    def _init_abouts_widget(self, about_types=None, url=None):
        """
        Initialize abouts widget
        :param about_types:
        :param url:
        :return:
        """
        about_types = about_types or []
        abouts = []
        for about_type in about_types:
            about = self.models.about(about_type)
            if about_type == self.types.entry and url is not None:
                try:
                    i = self.urls.index(url)
                    p, n = i + 1, i - 1
                except:
                    print traceback.format_exc()
                    p, n = 999999999, -1
                if p < len(self.urls):
                    url = self.urls[p]
                    about.prev_url = url
                    about.prev_name = self.entries[url].name
                if n >= 0:
                    url = self.urls[n]
                    about.next_url = url
                    about.next_name = self.entries[url].name
            if about_type == self.types.archive:
                about.prev_url = '/'
                about.prev_name = 'main index'
            elif about_type == self.types.blog:
                about.prev_url = '/'
                about.prev_name = 'main  index'
                about.next_url = config.archive_url
                about.next_name = 'archives'
            abouts.append(about)
        return abouts

    def _init_tags_widget(self):
        """
        Initialize tags widget
        """
        # FIXME: calculate tags' rank
        tags = sorted(self.by_tags.values(), key=lambda v: v.count, reverse=True)
        ranks = config.ranks
        div, mod = divmod(len(tags), ranks)
        if div == 0:
            ranks, div = mod, 1
        for r in range(ranks):
            s, e = r * div, (r + 1) * div
            for tag in tags[s:e]:
                tag.rank = r + 1
        return tags

    def _init_recently_entries_widget(self):
        """
        Initialize recently entries widget
        :return:
        """
        return [self.entries[url] for url in self.urls[:config.recently]]

    def _init_calendar_widget(self):
        """
        Initialize calender widget
        :return:
        """
        date = datetime.datetime.today().strftime(config.date_fmt)
        if len(self.urls) > 0:
            date = self.entries[self.urls[0]].date
        calendar = self.models.calendar(date)
        y, m = calendar.month.split('-')
        for url in self.urls:
            _, _, _, _, d, _ = url.split('/')
            prefix = config.entry_url + '/' + y + '/' + m + '/' + d
            d = int(d)
            if url.startswith(prefix):
                calendar.counts[d] += 1
                if calendar.counts[d] > 1:
                    start = len(config.entry_url)
                    end = start + len('/yyyy/mm/dd')
                    calendar.urls[d] = config.archive_url + url[start:end]
                else:
                    calendar.urls[d] = url
            else:
                break
        return calendar

    def _init_categories_widget(self):
        """
        Initialize categories widget
        :return:
        """
        return sorted(self.by_categories.values(), key=lambda c: c.name)

    def _init_archive_widget(self):
        """
        Initialize archive widget
        :return:
        """
        return sorted(self.by_months.values(), key=lambda m: m.url, reverse=True)

    def _find_by_query(self, query, start, limit):
        """
        Find by query
        :param query:
        :param start:
        :param limit:
        :return:
        """
        # FIXME: how to search in the content of entries
        queries = [q.lower() for q in query.split(' ')]
        urls = []
        for query in queries:
            for entry in self.entries.values():
                try:
                    entry.content.index(query)
                    urls.append(entry.url)
                except:
                    print
        return self._find_by_page(sorted(urls), start, limit)

    def _find_by_page(self, urls, start, limit):
        """
        Find by page
        :param urls:
        :param start:
        :param limit:
        :return:
        """
        if urls is None or start < 0 or limit <= 0:
            return [], 0
        total = len(urls)
        urls = sorted(urls, reverse=True)
        s, e = (start - 1) * limit, start * limit
        if s > total or s < 0:
            return [], 0
        return [self.entries[url] for url in urls[s:e]], total

    def _paginate(self, pager_type, value, total, start, limit):
        """
        Pagination
        :param pager_type:
        :param value:
        :param total:
        :param start:
        :param limit:
        :return:
        """
        if limit <= 0:
            return self.models.pager(pager_type, value, total, 0, start, limit)
        pages, mod = divmod(total, limit)
        if mod > 0:
            pages += 1
        return self.models.pager(pager_type, value, total, pages, start, limit)

    def find_by_url(self, entry_type, url):
        """
        Find content by url
        :param entry_type:
        :param url:
        :return:
        """
        entry, abouts = None, [self.types.blog]
        if entry_type == self.types.entry:
            entry = self.entries.get(url)
            abouts.insert(0, self.types.entry)
        elif entry_type == self.types.page:
            entry = self.pages.get(url)
        self.params.entry = entry
        self.params.entries = self._init_related_entries(url)
        self.params.error = self.models.error(url=url)
        self.params.primary.abouts = self._init_abouts_widget(abouts, url)
        return self.params

    def find_raw(self, raw_url):
        """
        Find the raw content by raw_url
        :param raw_url:
        :return:
        """
        page_url = raw_url.replace(config.raw_url, '').replace(config.raw_suffix, config.url_suffix)
        page = self.find_by_url(self.types.page, page_url).entry
        if page is not None and page.raw_url == raw_url:
            return page.content

        entry_url = raw_url.replace(config.raw_url, config.entry_url).replace(config.raw_suffix, config.url_suffix)
        entry = self.find_by_url(self.types.entry, entry_url).entry
        if entry is not None and entry.raw_url == raw_url:
            return entry.content
        return None

    def archive(self, archive_type, url, start=1, limit=999999999):
        """
        Archives
        :param archive_type:
        :param url:
        :param start:
        :param limit:
        :return:
        """
        self.params.error = self.models.error(url=url)

        if archive_type == self.types.raw:
            url = url.replace(config.raw_url, config.archive_url)

        entries, count, = [], 0
        archive_url = url.replace(config.archive_url, '').strip('/')
        prefix = url.replace(config.archive_url, config.entry_url)
        pattern = r'\d{4}/\d{2}/\d{2}|\d{4}/\d{2}|\d{4}'
        match = re.search(pattern, archive_url)
        if match and match.group() == archive_url or archive_url == '':
            urls = [url for url in self.urls if url.startswith(prefix)]
            entries, _ = self._find_by_page(urls, start, limit)
            count = len(entries)
        else:
            entries = None
        if archive_url == '':
            archive_url = self.types.all

        self.params.entries = entries
        self.params.archive = self.models.archive(archive_type, url, archive_url, url, count)
        self.params.primary.abouts = self._init_abouts_widget([self.types.archive])
        return self.params

    def search(self, search_type, url, value='', start=config.start, limit=config.limit):
        """
        Search the site
        :param search_type:
        :param url:
        :param value:
        :param start:
        :param limit:
        :return:
        """
        entries, total, abouts = None, 0, [self.types.blog]
        if search_type == self.types.query:
            entries, total = self._find_by_query(value, start, limit)
        elif search_type == self.types.tag:
            if self.by_tags.get(value) is None:
                entries = None
            else:
                entries, total = self._find_by_page(self.by_tags.get(value).urls, start, limit)
        elif search_type == self.types.category:
            if self.by_categories.get(value) is None:
                entries = None
            else:
                entries, total = self._find_by_page(self.by_categories.get(value).urls, start, limit)
        elif search_type == self.types.index:
            entries, total = self._find_by_page(self.urls, start, limit)
            abouts = []
        self.params.error = self.models.error(url=url)
        self.params.entries = entries
        self.params.search = self.models.search(search_type, value, total)
        self.params.pager = self._paginate(search_type, value, total, start, limit)
        self.params.primary.abouts = self._init_abouts_widget(abouts)
        return self.params

    def error(self, url):
        """
        Error params
        :param url:
        :return:
        """
        self.params.error = self.models.error(url=url)
        self.params.primary.abouts = self._init_abouts_widget([self.types.blog])
        return self.params
path = os.path.join(path, r'dest_photo')




for root, dirs, files in os.walk(path):
    print(files)
    fileLength = len(files)
    # filename = os.path.split(path)[1]

    for i in range(fileLength):
        localfile = path + "\%s" % files[i]  # 照片路径
        # file_name= os.path.splitext(files[i])  #文件名
        img_url = url + files[i] + para
        # 可以设置token过期时间
        private_url = q.private_download_url(img_url, expires=3600)
        r = requests.get(private_url)
        img_name = os.getcwd()
        img_name = os.path.join(img_name, r'restore_photo')
        if not os.path.isdir(img_name):
            os.mkdir(img_name)
        img_name =img_name + "\%s" % files[i]
        print(img_name)
        with open(img_name, 'wb') as f:
            for chunk in r.iter_content(chunk_size=1024):
                f.write(chunk)
        



Exemplo n.º 14
0
def qiniu_download(address, file_path):
    access_key = 'ZSC-X2p4HG5uvEtfmn5fsTZ5nqB3h54oKjHt0tU6'
    secret_key = 'Ya8qYwIDXZn6jSJDMz_ottWWOZqlbV8bDTNfCGO0'
    q = Auth(access_key, secret_key)
    if address.startswith('qiniu:'):
        address = address.replace('qiniu:', '')
    private_url = q.private_download_url(address, expires=3600)

    if os.path.isdir(file_path):
        file_path = os.path.join(file_path, str(uuid.uuid4()))

    try:
        fpath, _ = urllib.request.urlretrieve(private_url, file_path)
        statinfo = os.stat(fpath)
        size = statinfo.st_size

        if size == 0:
            logger.error('couldnt download data')
            return None

        return file_path
    except:
        logger.error('couldnt download data')
        return None


# import tarfile
# import subprocess
#
# def _recursive_tar(root_path, path, tar, ignore=None):
#   if path.split('/')[-1][0] == '.':
#     return
#
#   if os.path.isdir(path):
#     for sub_path in os.listdir(path):
#       _recursive_tar(root_path, os.path.join(path, sub_path), tar)
#   else:
#     if ignore is not None:
#       if path.split('/')[-1] == ignore:
#         return
#     arcname = os.path.relpath(path, root_path)
#     tar.add(path, arcname=arcname)
#
# random_code_package_name = str(uuid.uuid4())
# code_tar_path = os.path.join('/Users/Jian/Downloads/aaa', '%s_code.tar.gz' % random_code_package_name)
# tar = tarfile.open(code_tar_path, 'w:gz')
# for sub_path in os.listdir('/Users/Jian/Downloads/aaa'):
#   _recursive_tar('/Users/Jian/Downloads/aaa',
#                       os.path.join('/Users/Jian/Downloads/aaa', sub_path),
#                       tar,
#                       ignore='%s_code.tar.gz' % random_code_package_name)
# tar.close()
#
# crypto_code = str(uuid.uuid4())
#
# crypto_shell = 'openssl enc -e -aes256 -in %s -out %s -k %s' % (
# '%s_code.tar.gz' % random_code_package_name,
# '%s_code_ssl.tar.gz' % random_code_package_name,
# crypto_code)
# subprocess.call(crypto_shell, shell=True, cwd='/Users/Jian/Downloads/aaa')
#
# # 解密
# decrypto_shell = 'openssl enc -d -aes256 -in %s -out %s -k %s'%('%s_code_ssl.tar.gz' % random_code_package_name,
#                                                                               '%s_code.tar.gz' % random_code_package_name,
#                                                                               crypto_code)
# subprocess.call(decrypto_shell, shell=True, cwd='/Users/Jian/Downloads/aaa')

# # qiniu_address = qiniu_upload(code_tar_path, bucket='mltalker', max_size=10)
# # print(qiniu_address)
#
# qiniu_download('http://pbzz7zw0y.bkt.clouddn.com/14697433-5bd9-4b3c-a392-ba72172c666e_code.tar.gz', '/Users/Jian/Downloads/zj')
Exemplo n.º 15
0
Arquivo: lib.py Projeto: yubang/blog
def get_download_file_url(file_name):
    """获取文件url"""
    q = Auth(settings.QINIU_KEY, settings.QINIU_TOKEN)
    base_url = 'http://%s/%s' % (settings.QINIU_HOST, file_name)
    private_url = q.private_download_url(base_url, expires=3600*24)
    return private_url
Exemplo n.º 16
0
def get_private_url(username, path,filename):
    q = Auth(settings.QINIU_ACCESS_KEY, settings.QINIU_SECRET_KEY)  # 授权
    base_url = ''.join([settings.QINIU_DOMAIN,username, path, '/', filename])
    return q.private_download_url(base_url, expires=3600)
Exemplo n.º 17
0
    def schedule_download_files(self,
                                order_id,
                                code_address,
                                code_address_code=None):
        workspace = os.path.join(self.workspace, order_id)
        if not os.path.exists(workspace):
            os.makedirs(workspace)

        if code_address is None or code_address == '':
            with open(os.path.join(workspace, 'download.success'), 'w'):
                pass
            return

        if code_address.startswith('ipfs:'):
            # download from ipfs
            response = ipfs_download(code_address.replace('ipfs:'), workspace)
            if response:
                self.log.info(
                    'IPFS_DONWLOAD_SUCCESS: download dependent files for order %s'
                    % order_id)
                try:
                    with tarfile.open(os.path.join(workspace, 'code.tar.gz'),
                                      'r:gz') as tar:
                        tar.extractall(workspace)

                    with open(os.path.join(workspace, 'download.success'),
                              'w'):
                        pass
                except:
                    # clear incomplete data
                    # if os.path.exists(os.path.join(workspace, 'code')):
                    #   shutil.rmtree(os.path.join(workspace, 'code'))
                    if os.path.exists(os.path.join(workspace, 'code.tar.gz')):
                        os.remove(os.path.join(workspace, 'code.tar.gz'))
                    self.log.error('IPFS_DOWNLOAD_ERROR: for order %s' %
                                   order_id)
                    with open(os.path.join(workspace, 'download.fail'), 'w'):
                        pass
            else:
                # clear incomplete data
                if os.path.exists(os.path.join(workspace, 'code.tar.gz')):
                    os.remove(os.path.join(workspace, 'code.tar.gz'))
                self.log.error('IPFS_DOWNLOAD_ERROR: for order %s' % order_id)
                with open(os.path.join(workspace, 'download.fail'), 'w'):
                    pass
        elif code_address.startswith('qiniu:'):
            access_key = 'ZSC-X2p4HG5uvEtfmn5fsTZ5nqB3h54oKjHt0tU6'
            secret_key = 'Ya8qYwIDXZn6jSJDMz_ottWWOZqlbV8bDTNfCGO0'
            q = Auth(access_key, secret_key)
            base_url = code_address.replace('qiniu:', '')
            private_url = q.private_download_url(base_url, expires=3600)
            key = base_url.split('/')[-1]  # code.tar.gz

            try:
                fpath, _ = urllib.request.urlretrieve(
                    private_url, os.path.join(workspace, key))
                statinfo = os.stat(fpath)
                size = statinfo.st_size

                if size == 0:
                    self.log.error(
                        'FILE_DOWNLOAD_ERROR: empty file for order %s' %
                        order_id)
                else:
                    try:
                        if code_address_code is not None:
                            decrypto_shell = 'openssl enc -d -aes256 -in %s -out %s -k %s' % (
                                key, key.replace('_ssl',
                                                 ''), code_address_code)
                            subprocess.call(decrypto_shell,
                                            shell=True,
                                            cwd=workspace)

                        with tarfile.open(
                                os.path.join(workspace,
                                             key.replace('_ssl', '')),
                                'r:gz') as tar:
                            tar.extractall(workspace)

                        if os.path.getsize(
                                os.path.join(workspace,
                                             key.replace('_ssl', ''))) == 0:
                            self.log.error(
                                'FILE_DOWNLOAD_ERROR: for order %s' % order_id)
                            with open(os.path.join(workspace, 'download.fail'),
                                      'w'):
                                pass
                            return

                        # clear tar file
                        os.remove(
                            os.path.join(workspace, key.replace('_ssl', '')))
                        os.remove(os.path.join(workspace, key))

                        self.log.info(
                            'FILE_DONWLOAD_SUCCESS: download dependent files for order %s'
                            % order_id)
                        with open(os.path.join(workspace, 'download.success'),
                                  'w'):
                            pass
                    except:
                        # clear incomplete data
                        # if os.path.exists(os.path.join(workspace, 'code')):
                        #   shutil.rmtree(os.path.join(workspace, 'code'))
                        # if os.path.exists(os.path.join(workspace, 'code.tar.gz')):
                        #   os.remove(os.path.join(workspace, 'code.tar.gz'))
                        self.log.error('FILE_DOWNLOAD_ERROR: for order %s' %
                                       order_id)
                        with open(os.path.join(workspace, 'download.fail'),
                                  'w'):
                            pass
            except:
                self.log.error('FILE_DOWNLOAD_ERROR: for order %s' % order_id)
                with open(os.path.join(workspace, 'download.fail'), 'w'):
                    pass
Exemplo n.º 18
0
class qiniu():
    def __init__(self):
        self.access_key = "AK"
        self.secret_key = "SK"
        self.mdomain = "YourDomain"
        self.mauth = Auth(self.access_key, self.secret_key)
    def upload(self, upload_dir, upload_bucket):
        if os.path.isdir(upload_dir):
            if os.path.isdir(upload_dir):
                up_file_list = []
                for root, dirs, files in os.walk(upload_dir,topdown=True):
                    for v_file in files:
                        up_file_list.append(os.path.join(root, v_file))
                for file_name in up_file_list:
                    token = self.mauth.upload_token(upload_bucket, file_name)
                    ret, info = put_file(token, file_name, file_name)
                    # print(info)
                    assert ret['key'] == file_name
                    assert ret['hash'] == etag(file_name)
                    print ret
            elif os.path.isfile(upload_dir):
                token = self.mauth.upload_token(upload_bucket, file_name)
                ret, info = put_file(token, file_name, file_name)
                assert ret['key'] == file_name
                assert ret['hash'] == etag(file_name)
                print ret
        elif os.path.isfile(upload_dir):
            file_name = upload_dir
            token = self.mauth.upload_token(upload_bucket, file_name)
            ret, info = put_file(token, file_name, file_name)
            assert ret['key'] == file_name
            assert ret['hash'] == etag(file_name)
            print ret
    def list(self, bucket_name, prefix, limit=5):
        bucket = BucketManager(self.mauth)
        delimiter = None
        marker = None
        ret, eof, info = bucket.list(bucket_name, prefix, marker, limit, delimiter)
        for item in ret.get('items'):
            print "{Name: %s Size:%s Hash:%s}" % (item['key'], item['fsize'], item['hash'])
    def download(self, filename, output_dir):
        base_url = 'http://%s/%s' % (self.mdomain, filename)
        private_url = self.mauth.private_download_url(base_url)
        print private_url
        if os.path.exists(output_dir):
            os.chdir(output_dir)
        else:
            os.makedirs(output_dir)
            os.chdir(output_dir)
        res = requests.get(private_url, stream=True)
        assert res.status_code == 200
        print res.status_code
        with closing(res) as r:
            accepts = 0
            chunk_size = 512
            with open(os.path.basename(filename), "wb") as code:
                for chunk in r.iter_content(chunk_size=chunk_size):
                #downlaod big data optimization;old mathed is  code.write(r.content)
                    if chunk:  # filter out keep-alive new chunks
                        code.write(chunk)
                        accepts += len(chunk)
                        # print accepts, int(r.headers['Content-Length'])
                        progress = round(float(accepts) / int(r.headers['Content-Length']), 4) * 100
                        sys.stdout.write('\r' + 'Now downlaod ' + str(progress) + '%')
                        sys.stdout.flush()
                print '\n'

    def delete(self, bucket_name, del_list):
        mbucket = BucketManager(self.mauth)
        del_item = build_batch_delete(bucket_name, del_list)
        ret, info = mbucket.batch(del_item)
        print info
Exemplo n.º 19
0
def sort_out_list(request, data):
    # 构建鉴权对象
    q = Auth(configs.get('qiniu').get('AK'), configs.get('qiniu').get('SK'))
    new_data = []
    for obj in data:

        # 获取商品图片链接,整理成数组
        imgs = []
        private_url = q.private_download_url(obj.get('goods_img1'),
                                             expires=3600)
        imgs.append(private_url)
        if obj.get('goods_img2'):
            private_url = q.private_download_url(obj.get('goods_img2'),
                                                 expires=3600)
            imgs.append(private_url)
        if obj.get('goods_img3'):
            private_url = q.private_download_url(obj.get('goods_img3'),
                                                 expires=3600)
            imgs.append(private_url)
        if obj.get('goods_img4'):
            private_url = q.private_download_url(obj.get('goods_img4'),
                                                 expires=3600)
            imgs.append(private_url)

        # 获取商品用户头像
        product_username = obj.get('username')
        try:
            product_user = User.objects.get(username__exact=product_username)
        except User.DoesNotExist:
            product_user = None
        if product_user.user_image_url:
            product_avatar_url = q.private_download_url(
                product_user.user_image_url, expires=3600)
        else:
            product_avatar_url = None

        # 获取用户的收藏状态
        try:
            c = Collection.objects.get(username=str(request.user),
                                       product_id=obj.get('id'))
        except Collection.DoesNotExist:
            c = None
        if c:
            collect_state = True
        else:
            collect_state = False

        # 对时间字符串进行整理
        date = obj.get('c_time').split(".")[0]
        year = date.split("T")[0]
        time = date.split("T")[1]

        new_obj = {
            'product_id': obj.get('id'),
            'user_id': product_user.id,
            'username': obj.get('username'),
            'avatar_url': product_avatar_url,
            'goods_price': obj.get('goods_price'),
            'goods_img_url': imgs,
            'collect_state': collect_state,
            'title': obj.get('title'),
            'description': obj.get('description'),
            'time': year + " " + time
        }
        new_data.append(new_obj)

    msg = {'stateCode': 200, 'list': new_data}

    return msg
Exemplo n.º 20
0
# -*- coding: utf-8 -*-
# flake8: noqa

import os

from qiniu import Auth

# 生成私有 url

# 需要填写你的 Access Key 和 Secret Key 要上传的空间
access_key = os.getenv('QINIU_ACCESS_KEY')
secret_key = os.getenv('QINIU_SECRET_KEY')

# 构建鉴权对象
q = Auth(access_key, secret_key)

# 生成私有 url
url = 'http://crawler-private.iamlj.com/1.png'
time = 3600
# 打印
private_url = q.private_download_url(url, time)

print private_url
Exemplo n.º 21
0
class qiniu():
    def __init__(self):
        self.access_key = "AK"
        self.secret_key = "SK"
        self.mdomain = "YourDomain"
        self.mauth = Auth(self.access_key, self.secret_key)

    def upload(self, upload_dir, upload_bucket):
        if os.path.isdir(upload_dir):
            if os.path.isdir(upload_dir):
                up_file_list = []
                for root, dirs, files in os.walk(upload_dir, topdown=True):
                    for v_file in files:
                        up_file_list.append(os.path.join(root, v_file))
                for file_name in up_file_list:
                    token = self.mauth.upload_token(upload_bucket, file_name)
                    ret, info = put_file(token, file_name, file_name)
                    # print(info)
                    assert ret['key'] == file_name
                    assert ret['hash'] == etag(file_name)
                    print ret
            elif os.path.isfile(upload_dir):
                token = self.mauth.upload_token(upload_bucket, file_name)
                ret, info = put_file(token, file_name, file_name)
                assert ret['key'] == file_name
                assert ret['hash'] == etag(file_name)
                print ret
        elif os.path.isfile(upload_dir):
            file_name = upload_dir
            token = self.mauth.upload_token(upload_bucket, file_name)
            ret, info = put_file(token, file_name, file_name)
            assert ret['key'] == file_name
            assert ret['hash'] == etag(file_name)
            print ret

    def list(self, bucket_name, prefix, limit=5):
        bucket = BucketManager(self.mauth)
        delimiter = None
        marker = None
        ret, eof, info = bucket.list(bucket_name, prefix, marker, limit,
                                     delimiter)
        for item in ret.get('items'):
            print "{Name: %s Size:%s Hash:%s}" % (item['key'], item['fsize'],
                                                  item['hash'])

    def download(self, filename, output_dir):
        base_url = 'http://%s/%s' % (self.mdomain, filename)
        private_url = self.mauth.private_download_url(base_url)
        print private_url
        if os.path.exists(output_dir):
            os.chdir(output_dir)
        else:
            os.makedirs(output_dir)
            os.chdir(output_dir)
        res = requests.get(private_url, stream=True)
        assert res.status_code == 200
        print res.status_code
        with closing(res) as r:
            accepts = 0
            chunk_size = 512
            with open(os.path.basename(filename), "wb") as code:
                for chunk in r.iter_content(chunk_size=chunk_size):
                    #downlaod big data optimization;old mathed is  code.write(r.content)
                    if chunk:  # filter out keep-alive new chunks
                        code.write(chunk)
                        accepts += len(chunk)
                        # print accepts, int(r.headers['Content-Length'])
                        progress = round(
                            float(accepts) / int(r.headers['Content-Length']),
                            4) * 100
                        sys.stdout.write('\r' + 'Now downlaod ' +
                                         str(progress) + '%')
                        sys.stdout.flush()
                print '\n'

    def delete(self, bucket_name, del_list):
        mbucket = BucketManager(self.mauth)
        del_item = build_batch_delete(bucket_name, del_list)
        ret, info = mbucket.batch(del_item)
        print info
Exemplo n.º 22
0
class Logs():
    def __init__(self):
        #初始化Auth状态
        self.au=Auth(access_key, secrek_ley)
        self.bucket=BucketManager(self.au)

    def time_format(self,date):
        '''
        校验日期格式
        :param date:
        :return:
        '''
        # domain_YYYY-MM-DD.gz
        # prefix=domain_YYYY-MM-DD
        # 正则校验日期格式
        reg=re.compile('[0-9]{4}-(0|1)[0-9]{1}-[0-3]{1}[0-9]{1}')
        if reg.match(date):
            return str(date)
        else:
            return False


    def log_list(self,prefix):
        '''
        根据前缀查询日志列表
        :param prefix: 查询前缀
        :return:返回查询结果
        '''
        res = self.bucket.list(bucket,prefix=prefix)
        return res


    def get_log_url(self,domain,date):
        '''
        生成日志下载链接
        :param domain: 需要下载日志的域名
        :param date: 时间:YYYY-MM-DD
        :return:返回日志下载链接,有效期3600
        '''

        #生成查询前缀
        if  domain is None :
            raise ValueError("Please Import Doamin!!!")

        prefix=""
        if self.time_format(str(date)):
            prefix = domain + "_" + date
        else:
            raise ValueError("Date Format:YYYY-MM-DD")

        #生成日志下载url
        urls=[]
        try:
            res = self.log_list(prefix)
            items=res[0]["items"]
            for item in items:
                key = item["key"]
                base_url = 'http://%s/%s' % (bucket_domain, key)
                private_url = self.au.private_download_url(base_url,3600)
                urls.append(private_url)
            return urls
        except:
            print "ERROR:"+res
Exemplo n.º 23
0
class QiniuStorageAdapter(StorageAbstractAdapter):
    __slots__ = ('__access_key', '__secret_key', '__auth', '__bucket',
                 '__file_info_cache', '__bucket_manager', '__domain')

    def __init__(self, config, *args, **kw):
        assert isinstance(config, dict)
        assert isinstance(config.get("bucket"), str)
        assert isinstance(config.get("access_key"), str)
        assert isinstance(config.get("secret_key"), str)
        assert isinstance(config.get("domain"), str)
        self.__access_key = config.get("access_key")
        self.__secret_key = config.get("secret_key")
        self.__bucket = config.get("bucket")
        self.__domain = config.get("domain")
        self.__auth = Auth(self.__access_key, self.__secret_key)
        self.__bucket_manager = BucketManager(self.__auth)
        self.__file_info_cache = {}

    def __gen_upload_token(self, file_name, expire=3600, policy=None):
        assert isinstance(file_name, str)
        assert isinstance(expire, int)
        assert isinstance(policy, (str, dict, type(None)))
        if policy:
            token = self.__auth.upload_token(
                self.__bucket, file_name, expire, policy)
        else:
            token = self.__auth.upload_token(self.__bucket, file_name, expire)
        return token

    def token(self, file_name, **kw):
        expire = kw.get("expire", 3600)
        policy = kw.get("policy")
        return self.__gen_upload_token(file_name, expire, policy)

    def put(self, file_name, local_file, **kw):
        token = kw.get('token')
        assert token, "token can't be empty"
        ret, info = put_file(token, file_name, local_file)
        assert ret['key'] == file_name
        assert ret['hash'] == etag(local_file)

    def __gen_download_url(self, bucket_domain, file_name, expire=3600):
        assert isinstance(bucket_domain, str)
        assert isinstance(file_name, str)
        assert isinstance(expire, int)
        base_url = 'http://%s/%s' % (bucket_domain, file_name)
        private_url = self.__auth.private_download_url(
            base_url, expires=expire)
        return private_url

    def get(self, file_name, **kw):
        pass

    def get_url(self, file_name, **kw):
        bucket_domain = self.__domain
        expire = kw.get("expire", 3600)
        return self.__gen_download_url(bucket_domain, file_name, expire)

    def move(self, src, dest):
        src_bucket, src_key = src.split("<:>")
        dest_bucket, dest_key = dest.split("<:>")
        ret, info = self.__bucket_manager.move(
            src_bucket, src_key, dest_bucket, dest_key)
        if ret != {}:
            return False
        return True

    def copy(self, src, dest):
        src_bucket, src_key = src.split("<:>")
        dest_bucket, dest_key = dest.split("<:>")
        ret, info = self.__bucket_manager.copy(
            src_bucket, src_key, dest_bucket, dest_key)
        if ret != {}:
            return False
        return True

    def delete(self, file_name):
        ret, info = self.__bucket_manager.delete(self.__bucket, file_name)
        if ret != {}:
            return False
        return True

    def __cache_file_info(self, file_name, content):
        self.__file_info_cache[self.__bucket + ':' + file_name] = content

    def __get_file_info_from_cache(self, file_name, item=None):
        if item:
            return self.__file_info_cache[self.__bucket + ':' + file_name].get(item)
        else:
            return self.__file_info_cache[self.__bucket + ':' + file_name]

    def __check_file_info_cache(self, file_name):
        if self.__bucket + ':' + file_name in self.__file_info_cache:
            return True
        return False

    def __get_file_info_item(self, file_name, item):
        if self.__check_file_info_cache(file_name):
            return self.__get_file_info_from_cache(file_name, item)
        ret, info = self.__bucket_manager.stat(self.__bucket, file_name)
        if ret:
            self.__cache_file_info(file_name, ret)
            return self.__get_file_info_from_cache(file_name, item)
        return None

    def file_info(self, file_name):
        return self.__get_file_info_item(file_name, None)

    def file_size(self, file_name):
        return self.__get_file_info_item(file_name, 'fsize')

    def file_hash(self, file_name):
        return self.__get_file_info_item(file_name, 'hash')

    def file_mime(self, file_name):
        return self.__get_file_info_item(file_name, 'mimeType')

    def file_create_time(self, file_name):
        return self.__get_file_info_item(file_name, 'putTime')
Exemplo n.º 24
0
# 前缀
prefix = None
# 列举条目
limit = 1000
# 列举出除'/'的所有文件以及以'/'为分隔的所有前缀
delimiter = None
# 标记
marker = None

with open('wyeth_qiniu_filepath_new.txt', mode='w+', encoding='utf-8') as f:
    while True:
        ret, eof, info = bucket.list(bucket_name, prefix, marker, limit, delimiter)
        text_body = json.loads(info.text_body)
        marker = text_body.get('marker', '')
        print('marker=' + marker)
        for i in ret.get('items', []):
            path = i['key']
            if path.startswith('/'):
                path = path[1:]
            if not path:
                continue
            base_url = 'http://wyeth-course.nibaguai.com/' + path
            base_url = 'http://wyeth-course.nibaguai.com/' + path
            f.write('%s\n' % base_url)
            # 如果空间有时间戳防盗链或是私有空间,可以调用该方法生成私有链接
            private_url = q.private_download_url(base_url, expires=100)
            # print('private_url' + private_url)
        if eof:
            break
print('success')
Exemplo n.º 25
0
class WebConnection():
    def __init__(self,init_path):
        self.path = os.path.realpath(init_path)
            
        self.AK = '72UZoe-AJao4469EfG0kSZ1B4CBzmn_WO6wd2fdQ'
        self.SK = 'Bn6vHxfwuX5IKiruRJsBLNQCmJwzLWr_9CCN89-2'
        self.BN = 'xiaoyphoto'
        self.BD = '7xpg1h.com1.z0.glb.clouddn.com'
        self.wait_time = 3600

        self.pre = ''
        self.user = ''
        self.passwd = ''

        self.inflock = thread.allocate_lock()
        self.ordlock = thread.allocate_lock()
        self.datlock = thread.allocate_lock()
        self.lislock = thread.allocate_lock()

        self.order = []

        self.log_state = 0

        self.filelist = []
        
        self.clouds = []

        self.static_file = '..\\map\\log.txt'
        self.inf = dict()

        self.__connect()
        thread.start_new_thread(self.__run,())

    def stop(self):
        self.log_state = 4
        
    def login(self,username,passwd):
        self.user = username
        self.passwd = passwd
        marker = None
        self.pre = username + '&&' +passwd + '&&'
        ret, eof, info = self.bucket.list(self.BN,\
                                          prefix=self.pre, marker=marker,\
                                          limit=1)
        if len(ret['items']) == 0:
            self.log_state = 2
        else:
            self.log_state = 1
            self.clouds = []
            print 'Success!'

    def signin(self,username,passwd):
        self.user = username
        self.passwd = passwd
        marker = None
        self.pre = username + '&&' +passwd + '&&'
        ret, eof, info = self.bucket.list(self.BN,\
                                          prefix=self.user, marker=marker,\
                                          limit=1)
        
        if len(ret['items']) > 0:
            self.log_state = 0
        else:
            self.log_state = 1
            self.clouds = []

            #up static_file
            localfile = self.static_file
            key = self.pre + localfile.split('\\')[-1]
            token = self.q.upload_token(self.BN, key)
            mime_type = self.__get_mime_type(localfile)
            params = {'x:a': 'a'}
            progress_handler = lambda progress, total: progress
            ret, info = qiniu.put_file(token, key, localfile, params,\
                                   mime_type, progress_handler=progress_handler)

    def logout(self):
        self.user = ''
        self.passwd = ''
        self.pre = ''
        self.log_state = 0

    def getlist(self):
        return self.filelist
        

    def setinf(self,ak,sk,bn,bd):
        self.inflock.acquire()
        self.AK = ak
        self.SK = sk
        self.BN = bn
        self.BD = bd
        self.inflock.release()
        self.__connect()

    def setpath(self,path):
        self.inflock.acquire()
        self.path = path
        self.inflock.release()

    #order must be a tuple such as ('up','...')
    def exeord(self,str):
        self.ordlock.acquire()
        self.order.append(str)
        self.ordlock.release()

    def __connect(self):
        self.datlock.acquire()
        self.q = Auth(self.AK,self.SK)
        self.bucket = BucketManager(self.q)
        self.datlock.release()

    def __run(self):
        while True:
            if self.log_state == 4:
                break
            elif self.log_state != 1 or len(self.order)==0:
                time.sleep(0.1)
            else:
                self.ordlock.acquire()
                tord = self.order[0]
                self.ordlock.release()
                print tord
                if tord[0] == 'up':
                    self.__upload_file(tord[1])
                elif tord[0] == 'down':
                    self.__download_file(tord[1])
                elif tord[0] == 'dir':
                    self.__list_all()
                elif tord[0] == 'del':
                    self.__delete_file(tord[1])
                elif tord[0] == 'upd':
                    self.__update_file(tord[1])
                self.ordlock.acquire()
                del self.order[0]
                self.ordlock.release()

    def __get_mime_type(self,__path):
        mime_type = "text/plain"
        return mime_type

    def __list_all(self):
        marker = None
        limit = None
        eof = False
        self.lislock.acquire()
        self.filelist = []
        self.lislock.release()
        temp = []
        while eof is False:
            ret, eof, info = self.bucket.list(self.BN, prefix=self.pre, marker=marker, limit=limit)
            marker = ret.get('marker', None)
            for item in ret['items']:
                if item['key'][-4:] == '.txt':
                    continue
                if item['key'][-4:] == '.inf':
                    info = item['key'].split('&&')
                    filename = info[0]+'&&'+info[1]+'&&'+info[2]
                    mood = int(info[3])
                    pl = info[4]
                    self.inf[filename] = [mood,pl]
                    continue
                temp.append(item['key'].split(self.pre)[1])  
                
        self.lislock.acquire()
        self.filelist = temp
        self.lislock.release()

    def __delete_file(self,key):
        ret ,info = self.bucket.delete(self.BN,self.pre+key)

    def __update_file(self,localphoto):
        localfile = localphoto.getpath()
        key = self.pre + localfile.split('\\')[-1]
        inf_key = key + '&&' + str(localphoto.getmood())\
                  +'&&'+localphoto.getcomment()+'&&.inf'
        self.inf[key] = [localphoto.getmood(),localphoto.getcomment]
        token1 = self.q.upload_token(self.BN,inf_key)
        mime_type = self.__get_mime_type(localfile)
        params = {'x:a': 'a'}
        progress_handler = lambda progress, total: progress
        ret1, info1 = qiniu.put_file(token1, inf_key, self.static_file,params,\
                                   mime_type, progress_handler=progress_handler)

    #now it need a photo object
    def __upload_file(self,localphoto):
        localfile = localphoto.getpath()
        key = self.pre + localfile.split('\\')[-1]
        inf_key = key + '&&' + str(localphoto.getmood())\
                  +'&&'+localphoto.getcomment()+'&&.inf'
        self.inf[key] = [localphoto.getmood(),localphoto.getcomment]
        token1 = self.q.upload_token(self.BN,inf_key)
        token = self.q.upload_token(self.BN, key)
        mime_type = self.__get_mime_type(localfile)
        params = {'x:a': 'a'}
        progress_handler = lambda progress, total: progress
        ret1, info1 = qiniu.put_file(token1, inf_key, self.static_file,params,\
                                   mime_type, progress_handler=progress_handler)
        ret, info = qiniu.put_file(token, key, localfile, params,\
                                   mime_type, progress_handler=progress_handler)

    def __download_file(self,key):
        filename = self.pre + key
        base_url = 'http://%s/%s' % (self.BD,filename)
        private_url = self.q.private_download_url(base_url,expires=3600)
        r = requests.get(private_url)
        with open(self.path+'\\'+key,'wb') as code:
            code.write(r.content)
        p = photo(self.path + '\\' +key)
        if filename in self.inf:
            p.setmood(self.inf[filename][0])
            p.setcomment(self.inf[filename][1])
        p.setcloud(1)
        self.clouds.append(p)
Exemplo n.º 26
0
from django.test import TestCase

# Create your tests here.
import requests
from qiniu import Auth
from swiper import config
qn = Auth(config.QN_ACCESS_KEY, config.QN_SECRET_KEY)
#有两种方式构造base_url的形式

#或者直接输入url的方式下载
base_url = 'http://qaezcmqwq.bkt.clouddn.com/Avatar-2.jpg'
#可以设置token过期时间
private_url = qn.private_download_url(base_url, expires=3600)
print(private_url)
r = requests.get(private_url)
print(r)
assert r.status_code == 200
Exemplo n.º 27
0
class QiniuStorage(Storage):
    """
    Qiniu Storage Service
    """
    location = ""

    def __init__(
            self,
            access_key=QINIU_ACCESS_KEY,
            secret_key=QINIU_SECRET_KEY,
            bucket_name=QINIU_BUCKET_NAME,
            bucket_domain=QINIU_BUCKET_DOMAIN,
            secure_url=QINIU_SECURE_URL,
            private_url=QINIU_PRIVATE_URL,
            private_url_expires=QINIU_PRIVATE_URL_EXPIRES):

        self.auth = Auth(access_key, secret_key)
        self.bucket_name = bucket_name
        self.bucket_domain = bucket_domain
        self.bucket_manager = BucketManager(self.auth)
        self.secure_url = secure_url
        self.private_url = private_url
        self.private_url_expires = private_url_expires

    def _clean_name(self, name):
        """
        Cleans the name so that Windows style paths work
        """
        # Normalize Windows style paths
        clean_name = posixpath.normpath(name).replace('\\', '/')

        # os.path.normpath() can strip trailing slashes so we implement
        # a workaround here.
        if name.endswith('/') and not clean_name.endswith('/'):
            # Add a trailing slash as it was stripped.
            return clean_name + '/'
        else:
            return clean_name

    def _normalize_name(self, name):
        """
        Normalizes the name so that paths like /path/to/ignored/../foo.txt
        work. We check to make sure that the path pointed to is not outside
        the directory specified by the LOCATION setting.
        """

        base_path = force_text(self.location)
        base_path = base_path.rstrip('/')

        final_path = urljoin(base_path.rstrip('/') + "/", name)

        base_path_len = len(base_path)
        if (not final_path.startswith(base_path) or
                final_path[base_path_len:base_path_len + 1] not in ('', '/')):
            raise SuspiciousOperation("Attempted access to '%s' denied." %
                                      name)
        return final_path.lstrip('/')

    def _open(self, name, mode='rb'):
        return QiniuFile(name, self, mode)

    def _save(self, name, content):
        cleaned_name = self._clean_name(name)
        name = self._normalize_name(cleaned_name)

        if hasattr(content, 'open'):
            # Since Django 1.6, content should be a instance
            # of `django.core.files.File`
            content.open()

        if hasattr(content, 'chunks'):
            content_str = b''.join(chunk for chunk in content.chunks())
        else:
            content_str = content.read()

        self._put_file(name, content_str)
        content.close()
        return cleaned_name

    def _put_file(self, name, content):
        token = self.auth.upload_token(self.bucket_name)
        ret, info = put_data(token, name, content)
        if ret is None or ret['key'] != name:
            raise QiniuError(info)

    def _read(self, name):
        return requests.get(self.url(name)).content

    def delete(self, name):
        name = self._normalize_name(self._clean_name(name))
        if six.PY2:
            name = name.encode('utf-8')
        ret, info = self.bucket_manager.delete(self.bucket_name, name)

        if ret is None or info.status_code == 612:
            raise QiniuError(info)

    def _file_stat(self, name, silent=False):
        name = self._normalize_name(self._clean_name(name))
        if six.PY2:
            name = name.encode('utf-8')
        ret, info = self.bucket_manager.stat(self.bucket_name, name)
        if ret is None and not silent:
            raise QiniuError(info)
        return ret

    def exists(self, name):
        stats = self._file_stat(name, silent=True)
        return True if stats else False

    def size(self, name):
        stats = self._file_stat(name)
        return stats['fsize']

    def modified_time(self, name):
        stats = self._file_stat(name)
        time_stamp = float(stats['putTime']) / 10000000
        return datetime.datetime.fromtimestamp(time_stamp)

    def listdir(self, name):
        name = self._normalize_name(self._clean_name(name))
        if name and not name.endswith('/'):
            name += '/'

        dirlist = bucket_lister(self.bucket_manager, self.bucket_name,
                                prefix=name)
        files = []
        dirs = set()
        base_parts = name.split("/")[:-1]
        for item in dirlist:
            parts = item['key'].split("/")
            parts = parts[len(base_parts):]
            if len(parts) == 1:
                # File
                files.append(parts[0])
            elif len(parts) > 1:
                # Directory
                dirs.add(parts[0])
        return list(dirs), files

    def url(self, name):
        name = self._normalize_name(self._clean_name(name))
        name = filepath_to_uri(name)
        protocol = u'https://' if self.secure_url else u'http://'
        url = urljoin(protocol + self.bucket_domain, name)
        if self.private_url:
            return self.auth.private_download_url(url, self.private_url_expires)
        else:
            return url
Exemplo n.º 28
0
class QiniuStorageService(storage_service.StorageService):

    def __init__(self, *args, **kwargs):

        accesskeyid = kwargs['accesskeyid']
        accesskeysecret = kwargs['accesskeysecret']
        self._bucket = kwargs['bucket']
        self._auth = Auth(accesskeyid, accesskeysecret)
        self._domain = kwargs['domain_url']
        self._qiniu_api = BucketManager(self._auth)

    def download(self, cos_path, local_path):
        if isinstance(local_path, str):
            local_path = local_path.decode('utf-8')
        if cos_path.startswith('/'):
            cos_path = cos_path[1:]

        if isinstance(cos_path, unicode):
            cos_path = cos_path.encode('utf-8')
            from urllib import quote
            cos_path = quote(cos_path)

        base_url = 'http://%s/%s' % (self._domain, cos_path)
        # print base_url
        private_url = self._auth.private_download_url(base_url, expires=3600)
        # print private_url
        logger.debug("private url: " + private_url)

        ret = requests.get(private_url)

        if ret.status_code != 200:
            raise SystemError("download file from qiniu failed")
        # print local_path.encode('utf-8')
        with open(local_path.encode('utf-8'), 'wb') as fd:
            for chunk in ret.iter_content(1024):
                fd.write(chunk)

    def upload(self, cos_path, local_path):
        raise NotImplementedError

    def list(self):
        prefix = None
        limit = 100
        delimiter = None
        marker = None

        eof = False

        while not eof:
            try:
                ret, eof, info = self._qiniu_api.list(self._bucket, prefix, marker, limit, delimiter)
                if ret is None:
                    logger.warn("ret is None")
                    if info.error == 'bad token':
                        raise TokenException
                    else:
                        logger.warn(info.text_body)
                        raise IOError(info.error)

                for i in ret['items']:
                    logger.info("yield new object: {}".format(i['key']))
                    yield i['key']

                if eof is True:
                    logger.info("eof is {}".format(eof))
                    continue

                if not eof and 'marker' in ret:
                    marker = ret['marker']
                else:
                    eof = True
            except TokenException as e:
                eof = True
                logger.warn("Your accessid/accesskey is incorrect, Please double check your configures")
            except Exception as e:
                logger.exception("list exception: " + str(e))

    def exists(self, _path):
        raise NotImplementedError
Exemplo n.º 29
0
class QiniuStorage(object):
    storage_type = 'qiniu'

    def __init__(self, user_id, dbQiniuStorage, use_system=False):
        if use_system is True:
            self.access_key = 'lnjEMrmedpqOhO2IAw4WtyGNbNVQrFPGEVLQuyFp'
            self.secret_key = 'Nw_4Cn0L0Z3LLOL-t-OvGOVVV2kSxY3_my85UJwG'
            self.bucket_name = 'czlib-system'
            self.download_url = '7xjrbh.com1.z0.glb.clouddn.com'
            self.q = Auth(self.access_key, self.secret_key)
        else:
            self.access_key = dbQiniuStorage.access_key.encode('utf-8')  # unicode to str
            self.secret_key = dbQiniuStorage.secret_key.encode('utf-8')
            self.bucket_name = dbQiniuStorage.bucket_name.encode('utf-8')
            self.download_url = dbQiniuStorage.download_url.encode('utf-8')
            self.q = Auth(self.access_key, self.secret_key)
            # print('QiniuStorage - __init__: user_id: %s' % user_id)
            # print('QiniuStorage - __init__: ak:%s, sk:%s, b_name:%s, d_url:%s' % (self.access_key, self.secret_key, self.bucket_name, self.download_url))

    def upload_stream(self, key, input_stream, data_size):
        # print('upload_stream: key=%s, data_size=%s' % (key, data_size))
        # key = ''
        # input_stream=self.fileobj.stream
        # print('upload_stream: 1')
        token = self.q.upload_token(self.bucket_name, key)
        # print('upload_stream: 2')
        ret, info = put_stream(token, key, input_stream, data_size)
        # print(info)
        assert ret['key'] == key
        return self.get_download_url(key)

    # 直接上传二进制流
    def upload_binary_stream(self, key, data):
        # key = ''
        # data = 'hello bubby!'
        token = self.q.upload_token(self.bucket_name, key)
        ret, info = put_data(token, key, data, mime_type="application/octet-stream", check_crc=True)
        # print(info)
        assert ret['key'] == key
        return self.get_download_url(key)

    # 上传本地文件
    def upload_file(self, key, localfile):
        # key = 'home/carlo/test_file.py'
        mime_type = "text/plain"
        params = {'x:a': 'a'}

        token = self.q.upload_token(self.bucket_name, key)
        ret, info = put_file(token, key, localfile, mime_type=mime_type, check_crc=True)
        # print(info)
        assert ret['key'] == key
        assert ret['hash'] == etag(localfile)

    #断点续上传、分块并行上传
    def upload_big_file(self, key, localfile):
        mime_type = "text/plain"
        params = {'x:a': 'a'}
        # key = 'big'

        token = self.q.upload_token(self.bucket_name, key)
        progress_handler = lambda progress, total: progress
        ret, info = put_file(token, key, localfile, params, mime_type, progress_handler=progress_handler)
        # print(info)
        assert ret['key'] == key

    def single_delete(self, key):
        bucket = BucketManager(self.q)
        ret, info = bucket.delete(self.bucket_name, key)
        # print(info)
        assert info.status_code == 200 or info.status_code == 612

    # 批量删除文件
    def multi_delete(self, key_list):
        # key_list: [key1, key2, key3, ...]
        from qiniu import build_batch_delete
        bucket = BucketManager(self.q)
        ops = build_batch_delete(self.bucket_name, key_list)
        ret, info = bucket.batch(ops)
        # print('QiniuStorage - multi_delete: %s' % info)

        json_info = json.loads(info.text_body)
        for m_info in json_info:
            # "code":612,"data":{"error":"no such file or directory"
            assert m_info[u'code'] == 200 or m_info[u'code'] == 612

    def get_private_download_url(self, key):
        # bucket is private, http://<domain>/<key>?e=<deadline>&token=<dntoken>
        base_url = 'http://%s/%s' % (self.bucket_name + '.qiniudn.com', key)
        download_url = self.q.private_download_url(base_url, expires=3600)
        assert isinstance(download_url, object)
        return download_url

    def get_download_url(self, key):
        # bucket is public, http://<domain>/<key>
        download_url = 'http://%s/%s' % (self.download_url, key)
        return download_url
Exemplo n.º 30
0
class QiniuStorage(object):
    def __init__(self):  #instanciate a Auth object with AK,SK
        self.q = Auth(settings.QINIU_ACCESS_KEY, settings.QINIU_SECRET_KEY)
        self.bucket = settings.QINIU_PRIVATE_BUCKET_NAME
        self.domain = settings.QINIU_PRIVATE_DOMAIN
        self.key = None

    def upload_token(
        self,
        ie=False
    ):  #got a token coding with policy and SK,when updating with policy ,it must get a corresponding token
        key = str(uuid.uuid4()).replace('-', '')
        key = 'ie_' + key if ie else key
        return key, self.q.upload_token(self.bucket, key=key)

    def upload_data(self,
                    data,
                    mime_type='application/octet-stream',
                    ie=True):  #using it to upload a data
        key, token = self.upload_token(ie)
        ret, info = put_data(token, key, data, mime_type=mime_type)
        self.key = key
        if ret['key'] != key:  # key is the key of the updated data
            raise Code.file_update_fail
        return key

    def view_url(self, key, style=None):
        if style:
            base_url = '{}{}!{}'.format(self.domain, key, style)
        else:
            base_url = '{}{}'.format(self.domain, key)
        # return self.q.private_download_url(base_url)
        return base_url

    def down_url(self, key, file_name=None):
        file_name = "?attname=" + urllib.quote(
            file_name.encode('utf8')) if file_name else ''
        url = '%s%s' % ("?{}".format(key), "?{}".format(file_name)
                        ) if file_name else "/{}".format(key)

        base_url = 'http://' + self.domain + url
        return self.q.private_download_url(base_url)

    def download(self, key):
        url = self.down_url(key)
        r = requests.get(url)
        if r.status_code != 200:
            raise Code.file_update_fail
        return r.content

    def verify_callback(self, request):
        auth = request.META.get('HTTP_AUTHORIZATION', None)
        return self.q.verify_callback(auth, settings.QINIU_CALLBACK,
                                      request.body)

    def persistent_with_pfop(
        self, perstr, fops
    ):  #persistenet operations with fop sentence.the result save as 'origin key+perstr'
        auth = self.q
        bucket_name = self.bucket
        saveas_key = self.key + perstr
        entryuri = settings.QINIU_PRIVATE_BUCKET_NAME + ':' + saveas_key
        entryuri = urlsafe_base64_encode(entryuri)
        fops = fops + '|saveas/{}'.format(entryuri)

        pfop = PersistentFop(auth,
                             bucket_name,
                             pipeline=settings.QINIU_PIPELINE)
        ret, info = pfop.execute(self.key, [fops], 1)
        return [ret, info]

    def persistent_with_pfop_nfop(
            self, perstr,
            fops):  #persistent fop operation with nums of fop execute sentence
        auth = self.q
        bucket_name = self.bucket
        foplist = []
        for i in range(len(fops)):
            saveas_key = self.key + perstr[i]
            entryuri = settings.QINIU_PRIVATE_BUCKET_NAME + ':' + saveas_key
            entryuri = urlsafe_base64_encode(entryuri)
            fop = fops[i] + '|saveas/{}'.format(entryuri)
            foplist.append(fop)
        pfop = PersistentFop(auth,
                             bucket_name,
                             pipeline=settings.QINIU_PIPELINE)
        ret, info = pfop.execute(self.key, foplist, 1)
        return [ret, info]

    def get_key(self, key):
        self.key = key
Exemplo n.º 31
0
class qiniuHelper:
    def __init__(self,access_key="HNkFJ3P5_5UpP5BOaXDFWTjZ6ZK9NIAEhafchRMh",secret_key="uwxfBCn-4Rc4M6B1awZx-084Kt-bMXYJbufZNKLk"):
        self.access_key = access_key
        self.secret_key = secret_key

    def __GetBucket(self):
        self.q = Auth(self.access_key, self.secret_key)
        bucket = BucketManager(self.q)
        if not bucket:
            raise(NameError,"连接七牛失败")
        else:
            return bucket

    """
    获取七牛文件列表
    """
    def getListFile(self, bucket_name="bimuptest", prefix=None):
        # 列举条目
        limit = 100
        # 列举出除'/'的所有文件以及以'/'为分隔的所有前缀
        delimiter = None
        # 标记
        marker = None

        bucket = self.__GetBucket()

        listfile = []
        while True:
            ret, eof, info = bucket.list(bucket_name, prefix, marker, limit, delimiter)

            for item in ret["items"]:
                listfile.append(item)

            if "marker" in ret.keys():
                marker = ret["marker"]
            else:
                return listfile

    """
    修改文件存储类型 1表示低频存储,0是标准存储
    """
    def changeSaveType(self,bucket_name="bimuptest",key="",saveType=1):
        bucket = self.__GetBucket()
        ret, info = bucket.change_type(bucket_name, key, saveType)  # 1表示低频存储,0是标准存储
        print(info)
        return info

    """
    获取七牛下载文件地址
    """
    def getDownUrl(self,url, expires=3600):
        self.q = Auth(self.access_key, self.secret_key)
        downUrl=self.q.private_download_url(url, expires)
        print(downUrl)
        return downUrl

    """
    上传文件
    """
    def upLoadFile(self,bucket_name="bimuptest",keyName="ExamCode/SafeCode/QuestionID.anst",filePath=""):
        self.q = Auth(self.access_key, self.secret_key)

        # 生成上传 Token,可以指定过期时间等
        token = self.q.upload_token(bucket_name, keyName, 3600)

        ret, info = put_file(token, keyName, filePath)
        print(info)
        assert ret['key'] == keyName
        assert ret['hash'] == etag(filePath)
Exemplo n.º 32
0
class Logs():
    def __init__(self):
        #初始化Auth状态
        self.au = Auth(access_key, secrek_ley)
        self.bucket = BucketManager(self.au)

    def time_format(self, date):
        '''
        校验日期格式
        :param date:
        :return:
        '''
        # domain_YYYY-MM-DD.gz
        # prefix=domain_YYYY-MM-DD
        # 正则校验日期格式
        reg = re.compile('[0-9]{4}-(0|1)[0-9]{1}-[0-3]{1}[0-9]{1}')
        if reg.match(date):
            return str(date)
        else:
            return False

    def log_list(self, prefix):
        '''
        根据前缀查询日志列表
        :param prefix: 查询前缀
        :return:返回查询结果
        '''
        res = self.bucket.list(bucket, prefix=prefix)
        return res

    def get_log_url(self, domain, date):
        '''
        生成日志下载链接
        :param domain: 需要下载日志的域名
        :param date: 时间:YYYY-MM-DD
        :return:返回日志下载链接,有效期3600
        '''

        #生成查询前缀
        if domain is None:
            raise ValueError("Please Import Doamin!!!")

        prefix = ""
        if self.time_format(str(date)):
            prefix = domain + "_" + date
        else:
            raise ValueError("Date Format:YYYY-MM-DD")

        #生成日志下载url
        urls = []
        try:
            res = self.log_list(prefix)
            items = res[0]["items"]
            for item in items:
                key = item["key"]
                base_url = 'http://%s/%s' % (bucket_domain, key)
                private_url = self.au.private_download_url(base_url, 3600)
                urls.append(private_url)
            return urls
        except:
            print "ERROR:" + res
Exemplo n.º 33
0
def private_url(base_url: str, duration_sec: int = URL_DURATION):
    q = Auth(Facade.config["qiniu"]["access_key"],
             Facade.config["qiniu"]["secret_key"])
    return q.private_download_url(base_url, expires=duration_sec)
Exemplo n.º 34
0
class qiniuBackup:
    __qiniu = None
    __bucket_name = None
    __bucket_domain = None
    __error_msg = "ERROR: 无法连接到七牛云服务器,请检查[AK/SK/存储空间]设置是否正确!"

    def __init__(self):
        #获取七牛秘钥
        fp = open('data/qiniuAs.conf', 'r')
        if not fp:
            print 'ERROR: 请检查qiniuAs.conf文件中是否有七牛Key相关信息!'
            exit()
        keys = fp.read().split('|')
        if len(keys) < 4:
            print 'ERROR: 请检查qiniuAs.conf文件中的七牛Key信息是否完整!'
            exit()

        self.__bucket_name = keys[2]
        self.__bucket_domain = keys[3]

        #构建鉴权对象
        self.__qiniu = Auth(keys[0], keys[1])

    #上传文件
    def upload_file(self, filename):
        try:
            from qiniu import put_file, etag, urlsafe_base64_encode
            #上传到七牛后保存的文件名
            key = filename.split('/')[-1]

            #生成上传 Token,可以指定过期时间等
            token = self.__qiniu.upload_token(self.__bucket_name, key,
                                              3600 * 2)
            result = put_file(token, key, filename)
            return result[0]
        except:
            print self.__error_msg
            return None

    #取回文件信息
    def get_files(self, filename):
        try:
            from qiniu import BucketManager
            bucket = BucketManager(self.__qiniu)
            result = bucket.stat(self.__bucket_name, filename)
            return result[0]
        except:
            print self.__error_msg
            return None

    #取回文件列表
    def get_list(self):
        try:
            from qiniu import BucketManager
            bucket = BucketManager(self.__qiniu)
            result = bucket.list(self.__bucket_name)
            if not len(result[0]['items']):
                return [{
                    "mimeType": "application/test",
                    "fsize": 0,
                    "hash": "",
                    "key": "没有文件",
                    "putTime": 14845314157209192
                }]
            return result[0]['items']
        except:
            print self.__error_msg
            return None

    #下载文件
    def download_file(self, filename):
        try:
            base_url = 'http://%s/%s' % (self.__bucket_domain, filename)
            private_url = self.__qiniu.private_download_url(base_url,
                                                            expires=3600)
            return private_url
        except:
            print self.__error_msg
            return None

    #删除文件
    def delete_file(self, filename):
        try:
            from qiniu import BucketManager
            bucket = BucketManager(self.__qiniu)
            result = bucket.delete(self.__bucket_name, filename)
            return result[0]
        except:
            print self.__error_msg
            return None

    #备份网站
    def backupSite(self, name, count):
        sql = db.Sql()
        path = sql.table('sites').where('name=?', (name, )).getField('path')
        startTime = time.time()
        if not path:
            endDate = time.strftime('%Y/%m/%d %X', time.localtime())
            log = "网站[" + name + "]不存在!"
            print "★[" + endDate + "] " + log
            print "----------------------------------------------------------------------------"
            return

        backup_path = sql.table('config').where(
            "id=?", (1, )).getField('backup_path') + '/site'
        if not os.path.exists(backup_path):
            public.ExecShell("mkdir -p " + backup_path)

        filename = backup_path + "/Web_" + name + "_" + time.strftime(
            '%Y%m%d_%H%M%S', time.localtime()) + '.tar.gz'
        public.ExecShell("cd " + os.path.dirname(path) + " && tar zcvf '" +
                         filename + "' '" + os.path.basename(path) +
                         "' > /dev/null")
        endDate = time.strftime('%Y/%m/%d %X', time.localtime())

        if not os.path.exists(filename):
            log = "网站[" + name + "]备份失败!"
            print "★[" + endDate + "] " + log
            print "----------------------------------------------------------------------------"
            return

        #上传到七牛
        self.upload_file(filename)

        outTime = time.time() - startTime
        pid = sql.table('sites').where('name=?', (name, )).getField('id')
        sql.table('backup').add('type,name,pid,filename,addtime,size',
                                ('0', os.path.basename(filename), pid, 'qiniu',
                                 endDate, os.path.getsize(filename)))
        log = "网站[" + name + "]已成功备份到七牛云,用时[" + str(round(outTime, 2)) + "]秒"
        public.WriteLog('计划任务', log)
        print "★[" + endDate + "] " + log
        print "|---保留最新的[" + count + "]份备份"
        print "|---文件名:" + os.path.basename(filename)

        #清理本地文件
        public.ExecShell("rm -f " + filename)

        #清理多余备份
        backups = sql.table('backup').where(
            'type=? and pid=?', ('0', pid)).field('id,name,filename').select()

        num = len(backups) - int(count)
        if num > 0:
            for backup in backups:
                if os.path.exists(backup['filename']):
                    public.ExecShell("rm -f " + backup['filename'])
                self.delete_file(backup['name'])
                sql.table('backup').where('id=?', (backup['id'], )).delete()
                num -= 1
                print "|---已清理过期备份文件:" + backup['name']
                if num < 1: break
        return None

    #备份数据库
    def backupDatabase(self, name, count):
        sql = db.Sql()
        path = sql.table('databases').where('name=?',
                                            (name, )).getField('path')
        startTime = time.time()
        if not path:
            endDate = time.strftime('%Y/%m/%d %X', time.localtime())
            log = "数据库[" + name + "]不存在!"
            print "★[" + endDate + "] " + log
            print "----------------------------------------------------------------------------"
            return

        backup_path = sql.table('config').where(
            "id=?", (1, )).getField('backup_path') + '/database'
        if not os.path.exists(backup_path):
            public.ExecShell("mkdir -p " + backup_path)

        filename = backup_path + "/Db_" + name + "_" + time.strftime(
            '%Y%m%d_%H%M%S', time.localtime()) + ".sql.gz"

        import re
        mysql_root = sql.table('config').where("id=?",
                                               (1, )).getField('mysql_root')
        mycnf = public.readFile('/etc/my.cnf')
        rep = "\[mysqldump\]\nuser=root"
        sea = '[mysqldump]\n'
        subStr = sea + "user=root\npassword="******"\n"
        mycnf = mycnf.replace(sea, subStr)
        if len(mycnf) > 100:
            public.writeFile('/etc/my.cnf', mycnf)

        public.ExecShell(
            "/www/server/mysql/bin/mysqldump --opt --default-character-set=utf8 "
            + name + " | gzip > " + filename)

        if not os.path.exists(filename):
            endDate = time.strftime('%Y/%m/%d %X', time.localtime())
            log = "数据库[" + name + "]备份失败!"
            print "★[" + endDate + "] " + log
            print "----------------------------------------------------------------------------"
            return

        mycnf = public.readFile('/etc/my.cnf')
        mycnf = mycnf.replace(subStr, sea)
        if len(mycnf) > 100:
            public.writeFile('/etc/my.cnf', mycnf)

        #上传到七牛
        self.upload_file(filename)

        endDate = time.strftime('%Y/%m/%d %X', time.localtime())
        outTime = time.time() - startTime
        pid = sql.table('databases').where('name=?', (name, )).getField('id')

        sql.table('backup').add('type,name,pid,filename,addtime,size',
                                (1, os.path.basename(filename), pid, 'qiniu',
                                 endDate, os.path.getsize(filename)))
        log = "数据库[" + name + "]已成功备份到七牛云,用时[" + str(round(outTime, 2)) + "]秒"
        public.WriteLog('计划任务', log)
        print "★[" + endDate + "] " + log
        print "|---保留最新的[" + count + "]份备份"
        print "|---文件名:" + os.path.basename(filename)

        #清理本地文件
        public.ExecShell("rm -f " + filename)

        #清理多余备份
        backups = sql.table('backup').where(
            'type=? and pid=?', ('1', pid)).field('id,name,filename').select()

        num = len(backups) - int(count)
        if num > 0:
            for backup in backups:
                if os.path.exists(backup['filename']):
                    public.ExecShell("rm -f " + backup['filename'])

                self.delete_file(backup['name'])
                sql.table('backup').where('id=?', (backup['id'], )).delete()
                num -= 1
                print "|---已清理过期备份文件:" + backup['name']
                if num < 1: break
        return None
Exemplo n.º 35
0
    token = q.upload_token('wm-test', None, 7200, {'callbackUrl':"http://298074.cicp.net:59295/upload/callback/", 'callbackBody':"name=$(key)&fname=$(fname)&hash=$(etag)&size=$(imageInfo.width)x$(imageInfo.height)", \
                                                   'saveKey':'aaa$(year)$(mon)$(day)$(hour)$(min)$(sec).jpg', 
                                                   'mimeType':'image/jpg'})
    return token

def upload():
    pass


# def get_dnurl(key):
#     base_url = qiniu.rs.make_base_url('wm-test.u.qiniudn.com', key)
 
if __name__ == '__main__':
#     u_t = upload_token()
#     print u_t
#     key = '1.jpg'
#     ret, info = put_file(u_t, None, '/home/liuxue/Downloads/mahua-logo.jpg', check_crc=True)
#     print(info)
#     print ret

    import requests    
     
    q = Auth(ACCESS_KEY, SECRET_KEY)
     
    bucket = 'wm-test'
    key = '1.jpg'
    base_url = 'http://%s/%s?imageView2/1/w/200/h/200/interlace/1' % (bucket + '.u.qiniudn.com', key)
    private_url = q.private_download_url(base_url, expires=3600)
    print(private_url)
    r = requests.get(private_url)
    print  r.status_code 
Exemplo n.º 36
0
class FileService(BaseService):
    table = 'filehub'
    fields = 'id, filename, size, qiniu_id, owner, mime, hash, type, pid, lord, form, dir'

    def __init__(self, ak, sk):
        super().__init__()
        self.qiniu = Auth(access_key=ak, secret_key=sk)

    @run_on_executor
    def upload_token(self, key):
        saveas = '{bucket}:{key}'.format(bucket=settings['qiniu_file_bucket'],
                                         key=key)
        saveas_key = urlsafe_base64_encode(saveas)
        policy = QINIU_POLICY.copy()
        policy['persistentOps'] = QINIU_THUMB + '|saveas/' + saveas_key
        token = self.qiniu.upload_token(
            bucket=settings['qiniu_file_bucket'],
            expires=settings['qiniu_token_timeout'],
            policy=policy)
        return token

    @run_on_executor
    def private_download_url(self, qiniu_id):
        url = settings['qiniu_file_bucket_url'] + '/' + qiniu_id
        expires = settings['qiniu_token_timeout']
        download_url = self.qiniu.private_download_url(url=url,
                                                       expires=expires)
        return download_url

    @coroutine
    def check_file_exist(self, hash):
        sql = """
              SELECT filename, size, qiniu_id, mime 
              FROM {table} 
              WHERE hash=%s and filename <> ''
              ORDER BY update_time LIMIT 1
              """.format(table=self.table)
        cur = yield self.db.execute(sql, [hash])
        return cur.fetchone()

    @coroutine
    def batch_upload(self, params):
        arg = {
            'filename': params['filename'],
            'size': 0,
            'qiniu_id': '',
            'owner': params['owner'],
            'mime': '',
            'hash': params['hash'],
            'type': 0,
            'pid': params['pid'],
            'lord': params['lord'],
            'form': params['form']
        }
        resp = {'file_status': 0, 'token': '', 'file_id': ''}
        data = yield self.check_file_exist(params['hash'])
        if data and arg['filename'] == data['filename']:
            resp['file_status'] = 1
            arg['size'] = data['size']
            arg['qiniu_id'] = data['qiniu_id']
            arg['mime'] = data['mime']
        else:
            resp['token'] = yield self.upload_token(params['hash'])
        add_result = yield self.add(arg)

        # 获取父节点的绝对路径,用来生成新增文件的完整路径
        pdata = yield self.select(conds={'id': params['pid']}, one=True)
        pdir = (pdata.get('dir') if pdata else '/0') + '/' + str(
            add_result['id'])
        yield self.update(sets={'dir': pdir}, conds={'id': add_result['id']})

        resp['file_id'] = add_result['id']
        return resp

    @coroutine
    def seg_page(self, params):
        sql = """
                SELECT f.id, f.filename, f.size, f.qiniu_id, u.name, f.mime, f.hash, f.type, f.pid, f.dir,
                CONCAT('{uri}', f.qiniu_id) as url, CONCAT('{uri}', f.hash) as thumb,
                DATE_FORMAT(f.create_time, %s) as create_time, DATE_FORMAT(f.update_time, %s) as update_time 
                FROM {filehub} as f, {user} as u
                WHERE f.pid = %s AND f.form = %s AND f.lord = %s AND f.owner = u.id
                ORDER BY f.create_time DESC
              """.format(filehub=self.table,
                         user='******',
                         uri=DISK_DOWNLOAD_URL)
        arg = [
            FULL_DATE_FORMAT, FULL_DATE_FORMAT, params['file_id'],
            params['form'], params['lord']
        ]
        cur = yield self.db.execute(sql, arg)
        data = cur.fetchall()
        return data

    @coroutine
    def total_pages(self, params):
        sql = "SELECT count(*) as number FROM {table} WHERE pid = %s AND form = %s AND lord=%s".format(
            table=self.table)
        cur = yield self.db.execute(
            sql, [params['pid'], params['form'], params['lord']])
        data = cur.fetchone()
        return data['number']
Exemplo n.º 37
0
class __ImageTouken:
    def __init__(self):
        self.__acckey = ""
        self.__secretkey = ""
        self.__domain = ""
        self.__bucket = ""
        self.__callbackUrl = ""
        self.__expires = 3600

        self.__auth = None

    def loadConfig(self):
        filePath = os.path.split(__file__)[0]
        configPath = os.path.join(filePath, "../../staticdata/utils/imagetoken.xml")

        xmlDoc = xmlParser.parse(configPath)
        xmlRoot = xmlDoc.getroot()
        for child in xmlRoot:

            if child.tag == "acckey":
                self.__acckey = child.text

            elif child.tag == "secretkey":
                self.__secretkey = child.text

            elif child.tag == "domain":
                self.__domain = child.text

            elif child.tag == "bucket":
                self.__bucket = child.text

            elif child.tag == "callbackUrl":
                self.__callbackUrl = child.text

            elif child.tag == "expires":
                self.__expires = int(child.text)

        self.__createAuth()

    def __createAuth(self):
        self.__auth = Auth(self.__acckey, self.__secretkey)

    def getExpires(self):
        return self.__expires

    def uploadToken(self, key):
        policy = None

        if self.__callbackUrl:
            policy = dict(callbackUrl=self.__callbackUrl,
                          callbackBody="key=$(key)&hash=$(etag)",
                          returnBody="key=$(key)&hash=$(etag)&name=$(fname)"
                          )

        return self.__auth.upload_token(self.__bucket, key, self.__expires, policy)

    def downloadToken(self, key, imageFormat=""):
        bauseUrl = "http://{}/{}".format(self.__domain, key)
        if imageFormat:
            bauseUrl = "{}?{}".format(bauseUrl, imageFormat)

        privateUrl = self.__auth.private_download_url(bauseUrl, self.__expires)

        return privateUrl
Exemplo n.º 38
0
class QiNiuFile(File):
    def __init__(self,
                 name,
                 bucket_name,
                 domain,
                 access_key,
                 secret_key,
                 mode='rb'):
        self.name = name
        self.mode = mode
        self.bucket_name = bucket_name
        self.domain = domain
        self.access_key = access_key
        self.secret_key = secret_key
        self.auth = Auth(self.access_key, self.secret_key)
        self.file = None

        if 'w' in mode or 'a' in mode:
            raise ValueError('qi niu file can only read.')
        if 'r' in mode and ('w' in mode or 'a' in mode):
            raise ValueError(
                'qi niu file cannot read and write in the same time.')

    def _get_file(self):
        if self._file is None:
            data = self._read()
            self._file = io.BytesIO(data)
        return self._file

    def _set_file(self, value):
        self._file = value

    file = property(_get_file, _set_file)

    def _read(self):
        url = urljoin(self.domain, self.name)
        private_url = self.auth.private_download_url(url)
        r = requests.get(private_url)
        if r.status_code == 200:
            return r.content
        return b''

    def __len__(self):
        bucket = BucketManager(self.auth)
        ret, info = bucket.stat(self.bucket_name, self.name)
        if ret is None:
            raise QiNiuError(info)
        return ret.get('fsize', 0)

    @cached_property
    def size(self):
        bucket = BucketManager(self.auth)
        ret, info = bucket.stat(self.bucket_name, self.name)
        if ret is None:
            raise QiNiuError(info)
        return ret.get('fsize', 0)

    def read(self, num_bytes=None):
        return self.file.read(num_bytes)

    def close(self):
        self.file.close()