提交 a33c4898 作者: LiJunMing

新三板雪球网财务数据脚本维护

上级 01e8140c
......@@ -26,7 +26,7 @@ from DBUtils.PooledDB import PooledDB
# sys.path.append('D://zzsn_spider//base//fdfs_client')
from fdfs_client.client import get_tracker_conf, Fdfs_client
tracker_conf = get_tracker_conf('E:\\kkwork\\zzsn_spider\\base\\client.conf')
tracker_conf = get_tracker_conf('D:\\kkwork\\zzsn_spider\\base\\client.conf')
client = Fdfs_client(tracker_conf)
# 注意 程序退出前 调用BaseCore.close() 关闭相关资源
......@@ -267,6 +267,20 @@ class BaseCore:
charset='utf8mb4'
)
self.pool_11 = PooledDB(
creator=pymysql,
maxconnections=5,
mincached=2,
maxcached=5,
blocking=True,
host='114.116.44.11',
port=3306,
user='caiji',
password='f7s0&7qqtK',
database='clb_project',
charset='utf8mb4'
)
def close(self):
try:
self.cursor.close()
......@@ -472,13 +486,13 @@ class BaseCore:
return driver
# 根据社会信用代码获取企业信息
def getInfomation(self, gpdm):
def getInfomation(self, social_code):
data = []
try:
sql = f"SELECT * FROM NQEnterprise WHERE gpdm = '{gpdm}'"
sql = f"SELECT * FROM sys_base_enterprise_ipo WHERE social_credit_code = '{social_code}'and securities_type='新三板' and listed='1' "
# self.cursor.execute(sql)
# data = self.cursor.fetchone()
conn = self.pool_caiji.connection()
conn = self.pool_11.connection()
cursor = conn.cursor()
cursor.execute(sql)
data = cursor.fetchone()
......
......@@ -120,13 +120,13 @@ def getinfo(com_code,social_code):
for nnn in range(0, 3):
try:
panduan = check_date(com_code, report_date)
if panduan:
return dic_info
else:
pass
break
except:
time.sleep(1)
if panduan:
log.info(f'{report_date}----已采集过')
continue
else:
pass
log.info(f'======正在采集:{com_code}---{report_date}=======')
#利润表
list_Lrb = getdetail(reportLrbdata,lrb_name_map,listLrb,lrb_name)
......@@ -156,15 +156,14 @@ def getinfo(com_code,social_code):
# 调凯歌接口存储数据
data = json.dumps(dic_info)
# print(data)
url_baocun = 'http://114.115.236.206:8088/sync/finance/df'
url_baocun = 'http://114.115.236.206:8088/sync/finance/xq'
for nnn in range(0, 3):
try:
res_baocun = requests.post(url_baocun, data=data)
break
except:
time.sleep(1)
print(res_baocun.text)
log.info('------------数据发送接口完毕------------')
log.info(f'----{com_code}--{report_date}--------数据发送接口完毕------------')
for nnn in range(0, 3):
try:
add_date(com_code, report_date)
......@@ -173,15 +172,28 @@ def getinfo(com_code,social_code):
time.sleep(1)
else:
log.error(f'---{com_code}--{report_date}--')
return dic_info
if __name__ == '__main__':
info_date_list = []
try:
chromedriver = "D:/chrome/chromedriver.exe"
browser = webdriver.Chrome(chromedriver)
except Exception as e:
print(e)
# try:
# chromedriver = "D:/chrome/chromedriver.exe"
# browser = webdriver.Chrome(chromedriver)
# except Exception as e:
# print(e)
opt = webdriver.ChromeOptions()
opt.add_argument(
'user-agent=Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.198 Safari/537.36')
opt.add_argument("--ignore-certificate-errors")
opt.add_argument("--ignore-ssl-errors")
opt.add_experimental_option("excludeSwitches", ["enable-automation"])
opt.add_experimental_option('excludeSwitches', ['enable-logging'])
opt.add_experimental_option('useAutomationExtension', False)
opt.binary_location = r'D:/Google/Chrome/Application/chrome.exe'
# chromedriver = r'C:\Users\WIN10\DataspellProjects\crawlerProjectDemo\tmpcrawler\cmd100\chromedriver.exe'
chromedriver = r'D:/cmd100/chromedriver.exe'
browser = webdriver.Chrome(chrome_options=opt, executable_path=chromedriver)
headers = {
'authority': 'stock.xueqiu.com',
'method': 'GET',
......@@ -370,37 +382,21 @@ if __name__ == '__main__':
'期末现金及现金等价物余额':'final_balance_of_cce'
}
table_type = ['income','balance']
flag = 0
while True:
# com_code = baseCore.redicPullData('NQEnterprise:nq_finance')
com_code = baseCore.redicPullData('NQEnterprise:nq_finance_test')
if com_code is None:
if flag==0:
social_code = baseCore.redicPullData('NQEnterprise:nq_finance')
# social_code = baseCore.redicPullData('NQEnterprise:nq_finance_test')
if social_code is None:
log.info('已没有数据----------等待')
time.sleep(20)
continue
elif flag==1:
log.info('=============调用对比指标接口======')
time.sleep(5400)
url_ = 'http://114.115.236.206:8088/sync/calculateIndex?type=1'
for nnn in range(0, 3):
try:
res_ = requests.get(url_)
break
except:
time.sleep(1)
print(res_.text)
log.info('-----------数据触发对比指标接口完毕----------')
flag = 0
continue
log.info(f'========正在采集{com_code}===========')
data = baseCore.getInfomation(com_code)
social_code = data[1]
short_name = data[3]
log.info(f'========正在采集{social_code}===========')
data = baseCore.getInfomation(social_code)
# social_code = data[1]
com_code = data[3]
start = time.time()
com_code = 'NQ' + com_code
dic_info = getinfo(com_code,social_code)
flag =1
break
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论