示例#1
0
        survival = survivalLab.find('p').string
        temp.append(survival.strip())  # 添加存活时间
        if index == 1:
            solo.append(temp)
        elif index == 2:
            double.append(temp)
        elif index == 3:
            squad.append(temp)
        index += 1
    final.append(temp)
    print('getData success')
    return final


if __name__ == '__main__':
    common = Common()
    hostUrl = 'https://dak.gg/profile/'
    endUrl = '/2018-08/as'
    nameList = {'HRHLXL520', 'zhufuhengniubi', 'LittleNice_', 'Z_W_W_Z_S'}
    solo = []
    double = []
    squad = []
    for name in nameList:
        url = hostUrl + name + endUrl
        html = common.getUrlContent(url)
        result = getData(html, name, solo, double, squad)
        # common.writeData(result, 'D:/workSpace/pySpace/venv/Include/CJ/runned.csv')

    solosql = "insert into solo(solo_uuid, level, user_name, KD, win_rate, top_ten_rate, average_demage, count_game, most_kill, head_shot_rate, survived) value(null, %s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
    dousql = "insert into doub(doub_uuid, level, user_name, KD, win_rate, top_ten_rate, average_demage, count_game, most_kill, head_shot_rate, survived) value(null, %s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
    squadsql = "insert into squad(squad_uuid, level, user_name, KD, win_rate, top_ten_rate, average_demage, count_game, most_kill, head_shot_rate, survived) value(null, %s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
示例#2
0
# -*- coding:utf-8 -*-
# Author: zww

from Include.commons.common import Common
from bs4 import BeautifulSoup
import json
import re
import pymysql

common = Common()

# 获取商品id
def getProductIdList():
    url = 'https://list.tmall.com/search_product.htm?q=内衣' # q参数 是查询的关键字
    html = common.getUrlContent(url)  # 获取网页信息
    soup = BeautifulSoup(html,'html.parser')
    idList = []
    # 用Beautiful Soup提取商品页面中所有的商品ID
    productList = soup.find_all('div', {'class': 'product'})
    for product in productList:
        idList.append(product['data-id'])
    return idList

# 获取商品评论数据
def getCommentDetail(itemId,currentPage):
    url = 'https://rate.tmall.com/list_detail_rate.htm?itemId=' + str(
        itemId) + '&sellerId=2451699564&order=3&currentPage=' + str(currentPage) + '&append=0callback=jsonp336'
    # itemId 产品id ; sellerId 店铺id 字段必须有值,但随意值就行
    html = common.getUrlContent(url)  # 获取网页信息
    # 删掉返回的多余信息
    html = html.replace('jsonp128(','') #需要确定是不是 jsonp128
示例#3
0
# encoding: utf-8
# author zww

from Include.commons.common import Common
from pyecharts import Bar

common = Common()


def showSize():
    results = common.queryData(
        """select count(*) from bra where bra_size like '%A' 
                union all select count(*) from bra where bra_size like '%B' 
                union all select count(*) from bra where bra_size like '%C' 
                union all select count(*) from bra where bra_size like '%D' 
                union all select count(*) from bra where bra_size like '%E' 
                union all select count(*) from bra where bra_size like '%F' 
                union all select count(*) from bra where bra_size like '%G'"""
    )  # 获取每个罩杯数量
    attr = ["A罩杯", 'G罩杯', "B罩杯", "C罩杯", "D罩杯", "E罩杯", "F罩杯"]
    v1 = [
        results[0][0], results[6][0], results[1][0], results[2][0],
        results[3][0], results[4][0], results[5][0]
    ]
    pie = Pie("内衣罩杯", width=1300, height=620)
    pie.add("", attr, v1, is_label_show=True)
    pie.render('size.html')
    print('success')


def showColor():
示例#4
0
    data = body.find('div',{'id': '7d'})
    ul = data.find('ul')
    li = ul.find_all('li')

    for day in li:
        temp = []
        date = day.find('h1').string
        temp.append(date) #添加日期
        inf = day.find_all('p')
        weather = inf[0].string #天气
        temp.append(weather)
        temperature_highest = inf[1].find('span').string #最高温度
        temperature_low = inf[1].find('i').string  # 最低温度
        temp.append(temperature_highest)
        temp.append(temperature_low)
        final.append(temp)
    print('getDate success')
    return final

if __name__ == '__main__':
    common = Common()
    # url ='http://www.weather.com.cn/weather/101210101.shtml'
    url = 'https://rate.tmall.com/list_detail_rate.htm?itemId=566445932680&sellerId=2451699564&order=3&currentPage=1&append=0callback=jsonp336'
    # itemId 产品id  ; sellerId 店铺id 字段必须有值,但随意值就行
    html = common.getUrlContent(url)    # 获取网页信息
    result = getData(html)  # 解析网页信息,拿到需要的数据
    common.writeData(result, 'D:/py_work/venv/Include/amusement/weather.csv') #数据写入到 csv文档中
    # createTable() #表创建一次就好了,注意
    sql = 'insert into WEATHER(w_id, w_date, w_detail, w_temperature_low, w_temperature_high) value(null, %s,%s,%s,%s)'
    common.patchInsertData(sql, result) #批量写入数据
    print('my frist python file')
示例#5
0
# -*- codeing = utf-8 -*-
# @Time     : 11/3 9:59
# @Author   : River
# @File     : mycharts.py
# @Software : PyCharm

from pyecharts import Pie
from Include.commons.common import Common

if __name__ == '__main__':
    common = Common()
    results = common.queryData(
        """select count(*) from bra where bra_size like '%A' 
            union all select count(*) from bra where bra_size like '%B' 
            union all select count(*) from bra where bra_size like '%C' 
            union all select count(*) from bra where bra_size like '%D' 
            union all select count(*) from bra where bra_size like '%E' 
            union all select count(*) from bra where bra_size like '%F' 
            union all select count(*) from bra where bra_size like '%G'"""
    )  # 获取每个罩杯数量
    attr = ["A罩杯", 'G罩杯', "B罩杯", "C罩杯", "D罩杯", "E罩杯", "F罩杯"]
    v1 = [
        results[0][0], results[6][0], results[1][0], results[2][0],
        results[3][0], results[4][0], results[5][0]
    ]
    pie = Pie("内衣罩杯", width=1300, height=620)
    pie.add("", attr, v1, is_label_show=True)
    pie.render('size.html')
    print('success')

    results = common.queryData(
示例#6
0
# -*- coding:utf-8 -*-
# Author: zww
from Include.commons.common import Common
import json
import datetime
import urllib.request
import PIL
from PIL import Image
from PIL import ImageDraw
from PIL import ImageFont

common = Common()  #这是个我自己封装的工具类
key = 'cc186c9881b94b42b886a6d634c63002'
key_jh = '777d35900bffe58af88f56069b12785c'
# 提取故事的第一天
readBookStartDay = datetime.datetime(2019, 2, 17)


class DataUtil():

    # 获取天气信息
    def getWeatherData(self, cityname):
        # 阿凡达数据
        url = ' http://api.avatardata.cn/Weather/Query?key=' + key + '&cityname=' + cityname
        # 聚合数据
        url_jh = 'http://v.juhe.cn/weather/index?key=' + key_jh + '&cityname=' + cityname
        results = common.get(url)
        text = self.parseInfo_afd(results)
        print(text)
        return text
示例#7
0
# encoding: utf-8
# author zww

from pyecharts import Radar
from Include.commons.common import Common

if __name__ == '__main__':
    common = Common()
    results = common.queryData("select * from squad")  #获取四排战绩

    # 初始化雷达图
    schema = [("KD", 1.6), ("吃鸡率", 6), ("Top10", 45), ("场均伤害", 220),
              ("最多击杀", 9), ("爆头率", 35)]
    radar = Radar(width=1300, height=620)
    radar.config(schema)
    # 设置样例颜色
    range_color = [
        '#313695', '#a50026', '#74add1', '#fdae61', '#e0f3f8', '#ffffbf',
        '#fee090', '#f46d43'
    ]
    index = 0
    for result in results:
        data = [[
            str(result[3]),
            str(result[4]),
            str(result[5]),
            str(result[6]),
            str(result[8]),
            str(result[9])
        ]]
        radar.add(result[2], data, item_color=range_color[index])