# -*- encoding:utf-8 -*- import numpy as np from keras.models import load_model import random zixuan_stock_list = [ # 医疗 '000150.SZ', '300300.SZ', '603990.SH', '300759.SZ', '300347.SZ', '300003.SZ', '300253.SZ','002421.SZ','300168.SZ','002432.SZ', # 5G '300698.SZ', '600498.SH', '300310.SZ', '600353.SH', '603912.SH', '603220.SH', '300602.SZ', '600260.SH', '002463.SZ','300738.SZ', # 车联网 '002369.SZ', '002920.SZ', '300020.SZ', '002373.SZ', '002869.SZ','300098.SZ','300048.SZ','002401.SZ', # 工业互联网 '002184.SZ', '002364.SZ','300310.SZ', '300670.SZ', '300166.SZ', '002169.SZ', '002380.SZ','002421.SZ', # 特高压 '300341.SZ', '300670.SZ', '300018.SZ', '600268.SH', '002879.SZ','002028.SZ', # 基础建设 '300041.SZ', '603568.SH', '000967.SZ', '603018.SH','002062.SZ', # 华为 '300687.SZ','002316.SZ','300339.SZ','300378.SZ','300020.SZ','300634.SZ','002570.SZ', '600801.SH', '300113.SZ','002555.SZ', '002174.SZ','600585.SH','600276.SH','002415.SZ','000651.SZ', '300074.SZ' ] ROE_stock_list = [ # ROE '002976.SZ', '002847.SZ', '002597.SZ', '300686.SZ', '000708.SZ', '603948.SH', '600507.SH', '300401.SZ', '002714.SZ', '600732.SH', '300033.SZ', '300822.SZ', '300821.SZ', '002458.SZ', '000708.SZ', '600732.SH', '603719.SH', '300821.SZ', '300800.SZ', '300816.SZ', '300812.SZ', '603195.SH', '300815.SZ', '603053.SH', '603551.SH', '002975.SZ', '603949.SH', '002970.SZ', '300809.SZ', '002968.SZ', '300559.SZ', '002512.SZ', '300783.SZ', '300003.SZ', '603489.SH', '300564.SZ', '600802.SH', '002600.SZ', '000933.SZ', '601918.SH', '000651.SZ', '002916.SZ', '000568.SZ', '000717.SZ', '600452.SH', '603589.SH', '600690.SH', '603886.SH', '300117.SZ', '000858.SZ', '002102.SZ', '300136.SZ', '600801.SH', '600436.SH', '300401.SZ', '002190.SZ', '300122.SZ', '002299.SZ', '603610.SH', '002963.SZ', '600486.SH', '300601.SZ', '300682.SZ', '300771.SZ', '000868.SZ', '002607.SZ', '603068.SH', '603508.SH', '603658.SH', '300571.SZ', '603868.SH', '600768.SH', '300760.SZ', '002901.SZ', '603638.SH', '601100.SH', '002032.SZ', '600083.SH', '600507.SH', '603288.SH', '002304.SZ', '000963.SZ', '300572.SZ', '000885.SZ', '600995.SH', '300080.SZ', '601888.SH', '000048.SZ', '000333.SZ', '300529.SZ', '000537.SZ', '002869.SZ', '600217.SH', '000526.SZ', '600887.SH', '002161.SZ', '600267.SH', '600668.SH', '600052.SH', '002379.SZ', '603369.SH', '601360.SH', '002833.SZ', '002035.SZ', '600031.SH', '600678.SH', '600398.SH', '600587.SH', '600763.SH', '002016.SZ', '603816.SH', '000031.SZ', '002555.SZ', '603983.SH', '002746.SZ', '603899.SH', '300595.SZ', '300632.SZ', '600809.SH', '002507.SZ', '300198.SZ', '600779.SH', '603568.SH', '300638.SZ', '002011.SZ', '603517.SH', '000661.SZ', '300630.SZ', '000895.SZ', '002841.SZ', '300602.SZ', '300418.SZ', '603737.SH', '002755.SZ', '002803.SZ', '002182.SZ', '600132.SH', '300725.SZ', '600346.SH', '300015.SZ', '300014.SZ', '300628.SZ', '000789.SZ', '600368.SH', '300776.SZ', '600570.SH', '000509.SZ', '600338.SH', '300770.SZ', '600309.SH', '000596.SZ', '300702.SZ', '002271.SZ', '300782.SZ', '300577.SZ', '603505.SH', '603160.SH', '300761.SZ', '603327.SH', '002458.SZ', '300146.SZ', '002463.SZ', '300417.SZ', '600566.SH', '002372.SZ', '600585.SH', '000848.SZ', '600519.SH', '000672.SZ', '300357.SZ', '002234.SZ', '603444.SH', '300236.SZ', '603360.SH', '002677.SZ', '300487.SZ', '600319.SH', '002415.SZ', '000403.SZ', '600340.SH', '601318.SH', ] holder_stock_list = [ '600498.SH', '002223.SZ', '300136.SZ', '300559.SZ', '600496.SH', '300682.SZ' ] import pymongo from util.mongodb import get_mongo_table_instance code_table = get_mongo_table_instance('tushare_code') k_table = get_mongo_table_instance('stock_day_k') stock_concept_table = get_mongo_table_instance('tushare_concept_detail') all_concept_code_list = list(get_mongo_table_instance('tushare_concept').find({})) industry = ['家用电器', '元器件', 'IT设备', '汽车服务', '汽车配件', '软件服务', '互联网', '纺织', '塑料', '半导体',] gainian_map = {} hangye_map = {} Z_list = [] # 自选 R_list = [] # ROE O_list = [] # 其他 def predict_today(file, day, model='10_18d', log=True): lines = [] with open(file) as f: for line in f.readlines()[:]: line = eval(line.strip()) lines.append(line) size = len(lines[0]) model=load_model(model) for line in lines: train_x = np.array([line[:size - 1]]) train_x_tmp = train_x[:,:28*20] train_x_a = train_x_tmp.reshape(train_x.shape[0], 28, 20, 1) # train_x_b = train_x_tmp.reshape(train_x.shape[0], 18, 24) train_x_c = train_x[:,28*20:] result = model.predict([train_x_c, train_x_a, ]) # print(result, line[-1]) stock = code_table.find_one({'ts_code':line[-1][0]}) if result[0][0] > 0.6: pass elif result[0][1] > 0.5: pass elif result[0][2] > 0.5 or result[0][3] > 0.5: if stock['ts_code'] in holder_stock_list or stock['ts_code'] in zixuan_stock_list: print(stock['ts_code'], stock['name'], '赶紧卖出') else: pass # print(gainian_map) # print(hangye_map) def _read_pfile_map(path): s_list = [] with open(path, encoding='utf-8') as f: for line in f.readlines()[:]: s_list.append(line) return s_list def join_two_day(a, b): a_list = _read_pfile_map('D:\\data\\quantization\\predict\\' + str(a) + '.txt') b_list = _read_pfile_map('D:\\data\\quantization\\predict\\dmi_' + str(b) + '.txt') for a in a_list: for b in b_list: if a[2:11] == b[2:11]: print(a) def check_everyday(day, today): a_list = _read_pfile_map('D:\\data\\quantization\\predict\\' + str(day) + '.txt') x = 0 for a in a_list: print(a[:-1]) k_day_list = list(k_table.find({'code':a[2:11], 'tradeDate':{'$lte':int(today)}}).sort('tradeDate', pymongo.DESCENDING).limit(5)) if k_day_list is not None and len(k_day_list) > 0: k_day = k_day_list[0] k_day_0 = k_day_list[-1] k_day_last = k_day_list[1] if ((k_day_last['close'] - k_day_0['pre_close'])/k_day_0['pre_close']) < 0.2: print(k_day['open'], k_day['close'], 100*(k_day['close'] - k_day_last['close'])/k_day_last['close']) x = x + 100*(k_day['close'] - k_day_last['close'])/k_day_last['close'] print(x/len(a_list)) if __name__ == '__main__': # predict(file_path='D:\\data\\quantization\\stock6_5_test.log', model_path='5d_dnn_seq.h5') # predict(file_path='D:\\data\\quantization\\stock6_test.log', model_path='15m_dnn_seq.h5') # multi_predict() predict_today("D:\\data\\quantization\\stock500_28d_20200403.log", 20200403, model='500_28d_mix_5D_ma5_s_seq.h5', log=True) # join_two_day(20200305, 20200305) # check_everyday(20200311, 20200312)