Esempio n. 1
0
	def getMoreImages(search_term, search_index):
		ipAddress = environ.get('REMOTE_ADDR', '127.0.0.1')

		# Add search term to list
		FS.recordSearchTerm(search_term)

		# Load images from disk if we've already retrieved them
		response = FS.getStoredImages(search_term, search_index)
		if len(response.images) > 0:
			# Return 'em if we got 'em
			return response

		# Fetch new images
		response = Google.searchImages(search_term,
		                             search_index, 
		                             source_ip = ipAddress,
		                             safe = "off")

		if response.error != None:
			return response # No error

		directory = FS.getImageDir(search_term)

		# Add images to summary
		FS.saveSummaryOfImages(response.images, 
			                directory,
			                response.exhausted,
			                search_index + FS.BATCH_SIZE)

		# Save image info in separate files
		for image in response.images:
			FS.saveImageToFile(image, directory)

		return response
Esempio n. 2
0
def get_price_1y(item_name,i):
    code = code_df.code[i]
    url = get_url(item_name, code_df)
    m_price = 0
    try:
        a = FS(code)
        m_price = a.SC()
    except:
        pass
    response = urlopen(url).read()
    # print(url)
    a = BeautifulSoup(response,'lxml-xml')
    #일자/시가/고가/저가/종가/거래량
    df_col = ['date','s','e','u','r','q']
    rows = []
    file_data = {}
    daterows = []
    
    for node in a.find_all('item'):
        rows.append(node['data'].split("|"))

    df = pd.DataFrame(rows,columns=df_col)
    # print(df)
    with open('as.json', 'w', encoding="utf-8") as make_file:
        for line in rows:
            # print(line)
            # str(line)close,diff,open,high,low,volume
            if line[1] == "0":
                line[1],line[2],line[3] = line[4],line[4],line[4]
            date = line[0][:4] + '-' + line[0][4:6] + '-' + line[0][6:]
            daterows.append({"time": date, "open": line[1], "high":line[2], "low": line[3], "close": line[4], "volume": line[5]})
        file_data['data'] = daterows
        file_data["code"] = code
        file_data["name"] = item_name
        file_data['market_price'] = m_price
        file_data['id']= i
        json.dump(file_data, make_file, ensure_ascii=False, indent="\t")
    with open('as.json', encoding="utf-8") as json_file:
        json_data = json.load(json_file)
    # json_object = json_data["2020-09-16"]
    mycol.insert_one(json_data)
Esempio n. 3
0
            return response

        from Image import Image
        images = []
        for (current_index,
             json_image) in enumerate(json['responseData']['results']):
            imageUrl = json_image['unescapedUrl']
            meta = httpy.get_meta(imageUrl, timeout=5)
            if 'Content-type' not in meta or 'image' not in meta[
                    'Content-Type']:
                # Image is not an image.
                continue
            try:
                image = Image(json=json_image)
                image.imageIndex = start_index + current_index + 1
                images.append(image)
            except Exception, e:
                # Don't fail completely when deserializing single images
                pass
        response.images = images
        return response


if __name__ == '__main__':
    response = Google.searchImages("butterfly", 0)
    for image in response.images:
        print image.toJSON()
    from FS import FS
    FS.saveSummaryOfImages(response.images, 'butterfly', response.exhausted,
                           len(response.images))
Esempio n. 4
0
		if 'results' not in json['responseData']:
			response.error = 'invalid response from google while searching "%s":\n%s' \
				% (search_text, response)
			response.retryableError = False
			return response

		from Image import Image
		images = []
		for (current_index, json_image) in enumerate(json['responseData']['results']):
			imageUrl = json_image['unescapedUrl']
			meta = httpy.get_meta(imageUrl, timeout=5)
			if 'Content-type' not in meta or 'image' not in meta['Content-Type']:
				# Image is not an image.
				continue
			try:
				image = Image(json=json_image)
				image.imageIndex = start_index + current_index + 1
				images.append(image)
			except Exception, e:
				# Don't fail completely when deserializing single images
				pass
		response.images = images
		return response

if __name__ == '__main__':
	response = Google.searchImages("butterfly", 0)
	for image in response.images:
		print image.toJSON()
	from FS import FS
	FS.saveSummaryOfImages(response.images, 'butterfly', response.exhausted, len(response.images))
    'http://kind.krx.co.kr/corpgeneral/corpList.do?method=download&searchType=13',
    header=0)[0]
code_df.종목코드 = code_df.종목코드.map('{:06d}'.format)
code_df = code_df[['회사명', '종목코드']]
code_df = code_df.rename(columns={'회사명': 'name', '종목코드': 'code'})

value_df = pd.DataFrame(columns=['종목', 'code', 'PER', 'PBR', 'PSR'])

for cnt in range(len(code_df)):
    # for cnt in range(20):
    item_name = code_df.loc[cnt, 'name']
    code = code_df.loc[cnt, 'code']
    # print(fs.get_PER())

    try:
        fs = FS(code)
        if fs.get_SC() < 10000:
            continue
        if math.isnan(fs.get_PER()) or math.isnan(fs.get_PBR()) or math.isnan(
                fs.get_PSR()):
            continue
        cnt += 1
        print(item_name)
        value_df.loc[cnt, ['종목']] = item_name
        value_df.loc[cnt, ['code']] = code
        value_df.loc[cnt, ['PER']] = fs.get_PER()
        value_df.loc[cnt, ['PBR']] = fs.get_PBR()
        value_df.loc[cnt, ['PSR']] = fs.get_PSR()
    except:
        continue
Esempio n. 6
0
    header=0)[0]
code_df.종목코드 = code_df.종목코드.map('{:06d}'.format)
code_df = code_df[['회사명', '종목코드']]
code_df = code_df.rename(columns={'회사명': 'name', '종목코드': 'code'})

value_df = pd.DataFrame(columns=[
    '종목', 'ROA', 'CFO', 'ROA_DIFF', 'ACCRUAL', 'LIQUID', 'MARGIN', 'TURN',
    'SUM'
])

# for cnt in range(len(code_df)):
for cnt in range(5):
    item_name = code_df.loc[cnt, 'name']
    code = code_df.loc[cnt, 'code']
    try:
        fs = FS(code)
    except KeyError:
        continue
    cnt += 1
    value_df.loc[cnt, ['종목']] = item_name
    value_df.loc[cnt, ['ROA']] = 1 if fs.get_ROA() > 0 else 0
    value_df.loc[cnt, ['CFO']] = 1 if fs.get_CFO() > 0 else 0
    value_df.loc[cnt, ['ROA_DIFF']] = 1 if fs.get_ROA_DIFF() > 0 else 0
    value_df.loc[cnt, ['ACCRUAL']] = 1 if fs.get_CFO() > fs.get_ROA() else 0
    value_df.loc[cnt, ['LIQUID']] = 1 if fs.get_LIQUID_cur(
    ) - fs.get_LIQUID_past() > 0 else 0
    value_df.loc[cnt, ['MARGIN']] = 1 if fs.get_MARGIN_cur(
    ) - fs.get_MARGIN_past() > 0 else 0
    value_df.loc[
        cnt, ['TURN']] = 1 if fs.get_TURN_cur() - fs.get_TURN_past() > 0 else 0
Esempio n. 7
0
def savefile():
    uid = request.args.get("uid")
    fname = request.args.get("fname")
    fcontent = request.get_data()
    FS(uid).save_file(fname, fcontent)
    return "success"
Esempio n. 8
0
def retfile():
    uid = request.args.get("uid")
    fname = request.args.get("name")
    return FS(uid).get_file_content(fname)
Esempio n. 9
0
def retfiles():
    uid = request.args.get("uid")
    return FS(uid).get_file_list()
from bs4 import BeautifulSoup
from tabulate import tabulate
from FS import FS

import requests
import pandas as pd

# for fs_list in fs_lists2:
#     print(fs_list)
#     print('')
#     print('')

code = '005930'
fs = FS(code)

print(fs.get_PER())
print(fs.get_PBR())
print(fs.get_PSR())
print(fs.get_ROA())
print(fs.get_CFO())
print(fs.get_ROA_DIFF())
print(fs.get_LIQUID())
print(fs.get_MARGIN())
print(fs.get_TURN())
Esempio n. 11
0
from bs4 import BeautifulSoup
from tabulate import tabulate

code_df = pd.read_html(
    'http://kind.krx.co.kr/corpgeneral/corpList.do?method=download&searchType=13',
    header=0)[0]
code_df.종목코드 = code_df.종목코드.map('{:06d}'.format)
code_df = code_df[['회사명', '종목코드']]
code_df = code_df.rename(columns={'회사명': 'name', '종목코드': 'code'})

value_df = pd.DataFrame(columns=['종목', 'PER', 'PBR', 'PSR'])

for cnt in range(5):
    item_name = code_df.loc[cnt, 'name']
    code = code_df.loc[cnt, 'code']
    fs = FS(code)
    cnt += 1
    value_df.loc[cnt, ['종목']] = item_name
    value_df.loc[cnt, ['PER']] = fs.get_PER()
    value_df.loc[cnt, ['PBR']] = fs.get_PBR()
    value_df.loc[cnt, ['PSR']] = fs.get_PSR()

value_df['PERRANK'] = value_df['PER'].rank(axis=0)
value_df['PBRRANK'] = value_df['PBR'].rank(axis=0)
value_df['PSRRANK'] = value_df['PSR'].rank(axis=0)
# print(tabulate(value_df, headers='keys', tablefmt='psql'))

value_df['RANK'] = value_df.apply(
    lambda row: (row['PERRANK'] + row['PBRRANK'] + row['PSRRANK']), axis=1)
# value_df['RANK'] = value_df[['PER', 'PBR', 'PSR']].sum(axis=1)
value_df = value_df.sort_values(by=["RANK"], ascending=[True])