datacrawling.py 2.72 KB
# -*- coding: utf-8 -*-

from bs4 import BeautifulSoup
from urllib.request import urlopen
import pandas as pd
import requests
import os

url = 'http://211.237.50.150:7080/openapi/5e5e94876b673efe7161d3240516d65476da16210a391a9d6f31224c54a1fdaf/xml/Grid_20141119000000000012_1/'

number = 0

def makecsvfile(day):

    # file name setting
    output_file = str(day)+'data.csv'

    # dataframe 생성
    df = pd.DataFrame(columns=['row_num','aucng_de', 'pblmng_whsal_mrkt_nm','pblmng_whsal_mrkt_cd', 'cpr_nm', 'cpr_cd', 'prdlst_nm', 'prdlst_cd', 'spcies_nm','spcies_cd','grad','grad_cd','delngbundle_qy','stndrd','stndrd_cd','delng_qy','mumm_amt','avrg_amt','mxmm_amt','auc_co'])
    
    # 기본 number setting
    i = 0 # 날짜별 row
    number = 0
    
    while(True):

        # url 생성 & data 받아오기
        myurl = url + str(number*1000+1) + '/'+str((number+1)*1000) + '?AUCNG_DE='+str(day)
        data = urlopen(myurl).read()
        soup = BeautifulSoup(data, 'html.parser')

        # data error check
        result_code = soup.find('result')
        result_code = result_code.code.string
        if result_code != 'INFO-000':
            print(result_code)
            break
        
        # data number check
        start_num = int(str(soup.find('startrow'))[10:-11])
        total_num = int(str(soup.find('totalcnt'))[10:-11])
        print(str(soup.find('startrow'))[10:-11])
        if total_num < start_num :
            print('find all')
            break

        # if result is fine
        items = soup.find_all('row')
        for item in items:
            df.loc[i] = [item.row_num.string, item.aucng_de.string, item.pblmng_whsal_mrkt_nm.string, item.pblmng_whsal_mrkt_cd.string, item.cpr_nm.string, item.cpr_cd.string, item.prdlst_nm.string, item.prdlst_cd.string, item.spcies_nm.string, item.spcies_cd.string, item.grad.string, item.grad_cd.string, item.delngbundle_qy.string, item.stndrd.string, item.stndrd_cd.string, item.delng_qy.string, item.mumm_amt.string, item.avrg_amt.string, item.mxmm_amt.string, item.auc_co.string]
            i += 1
        
        # 다음 1000개
        number += 1

    # 결과 확인을 위한 print
    print(str(day), ' : ', str(i))
    # csv 파일로 내보내기
    df.to_csv(os.path.join('./', output_file), encoding='euc-kr', index=False)

def checkdatanum(day):
    myurl = url +'1/1?AUCNG_DE='+str(day)

    req = requests.get(myurl) 
    html = req.text
    soup = BeautifulSoup(html, 'html.parser')
    product_num = soup.find('totalcnt')
    product_num = int(str(product_num)[10:-11])
    print(day,':',product_num)
    return product_num


days=[20200413, 20200414, 20200415, 20200416, 20200417, 20200418, 20200412]

for day in days:
    number += checkdatanum(day)
    
print('week : ', number)