Notice
Recent Posts
Recent Comments
Link
일 | 월 | 화 | 수 | 목 | 금 | 토 |
---|---|---|---|---|---|---|
1 | 2 | 3 | 4 | 5 | 6 | 7 |
8 | 9 | 10 | 11 | 12 | 13 | 14 |
15 | 16 | 17 | 18 | 19 | 20 | 21 |
22 | 23 | 24 | 25 | 26 | 27 | 28 |
29 | 30 | 31 |
Tags
- DAPP
- 스마트컨트랙트
- 블록체인개발
- 공개키
- smart contract
- solidity
- js
- python
- node js
- 문자열
- 마스터링 이더리움
- pythonic
- 알고리즘
- 마스터링비트코인
- keras
- 파이썬
- Redis
- 솔리디티
- 개발
- Ethereum
- 개인키
- 블록체인
- 레디스
- 비트코인
- 마스터링 비트코인
- 백서
- 암호화폐
- 이더리움
- 주소
- javascript
Archives
- Today
- Total
개발이야기
[인프런 강의] Pyhon lab_10 stock_data_crawler 본문
드디어 강의 마지막 과제다.
구글에서 제공하는 주식데이터 중 삼성데이터를 이용한다.
csv 형식으로 데이터를 받아 데이터를 처리한다
# -*- coding: utf8 -*-
import urllib.request
import csv
def get_stock_data(url_address):
r = urllib.request.urlopen(url_address)
stock_data_string = r.read().decode("utf8").strip() # 반드시 Strip을 추가할 것
# ===Modify codes below=============
line_counter = 0
data_header = []
customer_list = []
result = True
data = stock_data_string.split("\n")
while 1:
try:
data[line_counter]
except:
result = False
if result == False:
break
customer_list.append(str(data[line_counter]).split(","))
line_counter += 1
# ==================================
return customer_list
def get_header_data(stock_data):
# ===Modify codes below=============
result = stock_data[0]
# ==================================
return result
def get_attribute_data(stock_data, attribue, year=None, month=None):
monthletter_dict = {
"Dec": 12, "Nov": 11, "Oct": 10, "Sep": 9, "Aug": 8,
"Jul": 7, "Jun": 6, "May": 5, "Apr": 4, "Mar": 3, "Feb": 2, "Jan": 1}
data = stock_data
#attribue가 헤더 인덱스 몇번인지 찾기
idx = 0
cnt = 0
for i in data[0]:
if i == attribue:
idx = cnt
cnt += 1
result = []
header = [data[0][0],data[0][idx]]
result.append(header)
flag = True
index = 1
target = ""
#입력받은 month와 year 찾기
for k,v in monthletter_dict.items():
if v == month:
target = k
break
elif month == None:
break
while 1:
try:
data[index]
except:
flag = False
if flag == False:
break
temp = []
if year == None:
temp.append(data[index][0])
temp.append(data[index][idx])
else:
if month == None:
year_check = data[index][0][3:]
if str(year)[2:] in year_check:
temp.append(data[index][0])
temp.append(data[index][idx])
else:
month_check = data[index][0]
year_check = data[index][0][3:]
if target in month_check and str(year)[2:] in year_check:
temp.append(data[index][0])
temp.append(data[index][idx])
if not temp == []:
result.append(temp)
index += 1
# ==================================
return result
def get_average_value_of_attribute(stock_data, attribue, year=None, month=None):
data = get_attribute_data(stock_data,attribue,year,month)
# ===Modify codes below=============
result = None
index = 1
flag =True
total = 0
while 1:
try:
data[index]
except:
flag = False
if flag==False:
break
total += float(data[index][1])
index += 1
# ==================================
result = total / (index - 1)
return result
def write_csv_file_by_result(stock_data, filename):
# ===Modify codes below=============
index = 0
f = open(filename,"w",encoding="utf-8")
for data in stock_data:
for i in data:
f.write(i)
if not i == data[-1]:
f.write(",")
f.write("\n")
# ==================================
def separate_user_query(user_input):
# ===Modify codes below===========
result = []
index = 0
user_input = user_input.split(",")
for data in user_input:
if index != 0:
data = data[1:]
result.append(data)
index +=1
# ==================================
return result
def main():
print("Stock Data Crawler Program!!")
user_input = 999
url = 'http://finance.google.com/finance/historical?q=KRX:005930&startdate=2013-01-01&enddate=2015-12-30&output=csv'
stock_data = get_stock_data(url)
# ===Modify codes below=============
while 1:
data = input("Insert Query Command - ex) SAMSUNG, 2014-12, Open, ALL : ")
flag = True
result = None
try:
int(data)
except:
flag = True
else:
if int(data) == 0:
flag = False
if flag == False:
print("Good Bye")
break
data = separate_user_query(data)
year = data[1][:4]
month = int(data[1][5:])
if data[3].upper() == "ALL":
result = get_attribute_data(stock_data, data[2], year, month)
for d in result:
print(' '.join(d))
elif data[3].upper() == "MEAN":
result = get_average_value_of_attribute(stock_data, data[2], year, month)
print(result)
elif data[3].upper() == "FILE":
result = get_attribute_data(stock_data, data[2], year, month)
write_csv_file_by_result(result,data[4])
print(data[4]+" file created")
# =========================="__main__":
main()
Comments