如果为is_fivemodels为真,则不修改最佳模型

This commit is contained in:
workpc 2024-12-20 10:44:15 +08:00
parent 2f8381a0c5
commit 2896312a48
3 changed files with 540 additions and 132 deletions

View File

@ -1,10 +1,14 @@
from fastapi import FastAPI, HTTPException, Body
from fastapi.middleware.cors import CORSMiddleware
import requests import requests
import json
import functools
from fastapi import FastAPI, HTTPException, Body,Request
from fastapi.middleware.cors import CORSMiddleware
from requests_ntlm import HttpNtlmAuth from requests_ntlm import HttpNtlmAuth
import urllib3 import urllib3
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
from typing import Dict
''' '''
sql: sql:
@ -40,7 +44,7 @@ from datetime import datetime
# host = 'rm-2zehj3r1n60ttz9x5.mysql.rds.aliyuncs.com' # 服务器访问使用 # host = 'rm-2zehj3r1n60ttz9x5.mysql.rds.aliyuncs.com' # 服务器访问使用
# database = 'jingbo_test' # 服务器访问使用 # database = 'jingbo_test' # 服务器访问使用
host = 'rm-2zehj3r1n60ttz9x5ko.mysql.rds.aliyuncs.com' # 北京访问使用 host = 'rm-2zehj3r1n60ttz9x5ko.mysql.rds.aliyuncs.com' # 北京访问使用
database = 'jingbo_test' # 北京访问使用 database = 'jingbo-test' # 北京访问使用
@ -49,26 +53,42 @@ config = {
"user": "jingbo", "user": "jingbo",
"password": "shihua@123", "password": "shihua@123",
"host": host, "host": host,
"database": "jingbo_test" "database": database
} }
'''
`ID` varchar(128) NOT NULL COMMENT 'ID',
`REQUEST_METHOD` varchar(128) DEFAULT NULL COMMENT '方法名称',
`REQUEST_TIME` datetime DEFAULT NULL COMMENT '请求时间',
`REQUEST_URL` varchar(256) DEFAULT NULL COMMENT '请求URL',
`USING_FLAG` varchar(1) DEFAULT NULL COMMENT '启用状态',
`REQUEST_PARAMS` text COMMENT '接收到的请求参数',
`RESPONSE_CONTENT` text COMMENT '响应内容',
`RESPONSE_TIME` datetime DEFAULT NULL COMMENT '响应时间',
'''
def insert_api_log(request_time, request_url, request_method, request_params, response_content, response_time):
def insert_api_log(request_time, request_ip, request_url, request_method, request_params, response_status_code, response_content, response_time):
try: try:
# 建立数据库连接 # 建立数据库连接
cnx = mysql.connector.connect(**config) cnx = mysql.connector.connect(**config)
cursor = cnx.cursor() cursor = cnx.cursor()
# 先查询表中已有记录的数量用于生成新记录的ID
count_query = "SELECT COUNT(*) FROM v_tbl_aup_api_log"
cursor.execute(count_query)
result = cursor.fetchone()
new_id = result[0] + 1 if result else 1 # 如果表为空ID设为1否则数量加1
# 插入数据的SQL语句 # 插入数据的SQL语句
insert_query = """ insert_query = """
INSERT INTO pims_api_log (request_time, request_ip, request_url, request_method, request_params, response_status_code, response_content, response_time) INSERT INTO v_tbl_aup_api_log (ID,REQUEST_TIME, REQUEST_URL, REQUEST_METHOD, REQUEST_PARAMS, RESPONSE_CONTENT, RESPONSE_TIME)
VALUES (%s, %s, %s, %s, %s, %s, %s, %s) VALUES (%s,%s, %s, %s, %s, %s, %s)
""" """
# 准备要插入的数据注意数据顺序要和SQL语句中的占位符顺序一致 # 准备要插入的数据注意数据顺序要和SQL语句中的占位符顺序一致
data = (request_time, request_ip, request_url, request_method, request_params, response_status_code, response_content, response_time) data = (new_id,request_time, request_url, request_method, request_params, response_content, response_time)
# 执行插入操作 # 执行插入操作
cursor.execute(insert_query, data) cursor.execute(insert_query, data)
# 提交事务,使插入生效 # 提交事务,使插入生效
@ -98,14 +118,36 @@ app.add_middleware(
headers = {'content-type': 'application/json;charset=UTF-8'} headers = {'content-type': 'application/json;charset=UTF-8'}
# 您的 GraphQL API eg url = 'http://10.88.14.86/AspenTech/AspenUnified/api/v1/model/Chambroad20241205/graphql' # 您的 GraphQL API eg url = 'http://10.88.14.86/AspenTech/AspenUnified/api/v1/model/Chambroad20241205/graphql'
graphql_host = '10.88.14.86' graphql_host = 'http://10.88.14.86'
graphql_path = '/AspenTech/AspenUnified/api/v1/model/Chambroad20241205/graphql' graphql_path = '/AspenTech/AspenUnified/api/v1/model/Chambroad20241205/graphql'
url = graphql_host + graphql_path
query = """ query = """
mutation{ mutation{
purchases{ purchases{
update(inputs:[%s update(inputs:[{
name:"11月度计划"
inputs:[
{
name:"CWT"
inputs:[
{
field:Cost
periodName:"1"
value: 3100
}
]
},
{
name:"CWT"
inputs:[
{
field:Cost
periodName:"1"
value: 3100
}
]
},
] ]
}]) }])
} }
@ -120,6 +162,7 @@ mutation{
{name: "焦化加工油浆"} {name: "焦化加工油浆"}
{name: "焦化加工低硫原油"} {name: "焦化加工低硫原油"}
{name: "焦化加工低硫渣油"} {name: "焦化加工低硫渣油"}
] ]
} }
) )
@ -146,6 +189,29 @@ payload_json = {
"operationName": "" "operationName": ""
} }
query2 = '''
query
{
cases
{
items
{
name
}
}
}
'''
payload_json2 = {
"query": query2,
"operationName": ""
}
graphql_username = "bw19382" graphql_username = "bw19382"
graphql_password = "Fudong3!" graphql_password = "Fudong3!"
auth = HttpNtlmAuth(f'{graphql_username}', f'{graphql_password}') auth = HttpNtlmAuth(f'{graphql_username}', f'{graphql_password}')
@ -176,57 +242,306 @@ example_query = '''
}, },
''' '''
def log_api_call(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
request_time = datetime.now()
request_url = None
request_method = 'post'
request_params = None
try:
# 执行被装饰的函数,获取其返回的响应
request_time = datetime.now()
response = func(*args, **kwargs)
response_time = datetime.now()
# 准备请求参数和响应内容转换为合适的字符串格式用于记录例如JSON格式字符串
request_params_str = json.dumps(request_params) if request_params else None
response_content = response.text if hasattr(response, 'text') else None
# 调用插入日志的函数将相关信息记录到数据库中假设insert_api_log函数已正确定义且可访问
insert_api_log(
request_time,
request_url,
request_method,
request_params_str,
response_content,
response_time
)
return response
except Exception as e:
print(f"Error occurred during API call: {e}")
raise
return wrapper
@app.post("/graphql") @app.post("/graphql")
async def execute_graphql_query( async def post_execute_graphql_query(request: Request,
query: str = Body(..., example=example_query) # 使用Body和example参数添加示例 query:str = Body(query,example_query=query)
): ):
payload_json = {
"query": query
}
request_time = datetime.now()
full_path = str(request.url.path)
session = requests.Session() session = requests.Session()
response = session.post(url=url, headers=headers, json=payload_json, auth=auth, verify=False) response = session.post(url=url, headers=headers, json=payload_json, auth=auth, verify=False)
response_time = datetime.now()
# 调用插入日志的函数将相关信息记录到数据库中假设insert_api_log函数已正确定义且可访问
insert_api_log(
request_time,
full_path,
'POST',
json.dumps(payload_json),
json.dumps(response.json()),
response_time
)
if response.status_code!= 200: if response.status_code!= 200:
raise HTTPException(status_code=response.status_code, detail=response.text) raise HTTPException(status_code=response.status_code, detail=response.text)
return response.json() return response.json()
query2 = ''' # def insert_api_log(request_time, request_url, request_method, request_params, response_content, response_time):
query
{
cases
{
items
{
name
}
}
}
'''
@app.post("/cases")
async def post_cases_query_async(request: Request):
payload_json2 = { payload_json2 = {
"query": query2, "query": query2
"operationName": ""
} }
full_path = str(request.url.path)
@app.get("/cases") request_time = datetime.now()
async def get_cases_query_async():
session = requests.Session() session = requests.Session()
response = session.post(url=url, headers=headers, json=payload_json2, auth=auth, verify=False) response = session.post(url=url, headers=headers, json=payload_json2, auth=auth, verify=False)
response_time = datetime.now()
# 调用插入日志的函数将相关信息记录到数据库中假设insert_api_log函数已正确定义且可访问
insert_api_log( insert_api_log(
datetime.now(), request_time,
'IP_ADDRESS '12 full_path,
'IP_ADDRESS', 'POST',
'URL_ADDRESS, json.dumps(payload_json),
'http://127.0.0.1:8000/cases', json.dumps(response.json()),
'GET', response_time
'',
response.status_code,
response.text,
datetime.now()
) )
if response.status_code!= 200: if response.status_code!= 200:
raise HTTPException(status_code=response.status_code, detail=response.text) raise HTTPException(status_code=response.status_code, detail=response.text)
return response.json() return response.json()
# 定义一个函数用于生成定制化的GraphQL查询语句添加了参数类型检查
def generate_custom_graphql_query(
purchase_inputs=None,
case_execution_input=None,
wait_for_case_stack_job_name=None
):
base_query = """
mutation{
purchases{
update(inputs:[{
name:"11月度计划"
inputs:[
{
name:"CWT"
inputs:[
{
field:Cost
periodName:"1"
value: 3100
}
]
},
{
name:"CWT"
inputs:[
{
field:Cost
periodName:"1"
value: 3100
}
]
},
]
}])
}
caseExecution {
submitCaseStack(
input:{
name: "Job2"
cases: [
{name: "11月度计划"}
{name: "二催开工"}
{name: "一焦化停工"}
{name: "焦化加工油浆"}
{name: "焦化加工低硫原油"}
{name: "焦化加工低硫渣油"}
]
}
)
{id}
waitForCaseStackJob(name: "Job2")
{
started
submitted
finished
executionStatus
cases{
items{
name
objectiveValue
}
}
}
}
}
"""
# 检查purchase_inputs参数类型如果不为None需为列表类型且列表元素需为字典类型
if purchase_inputs is not None:
if not isinstance(purchase_inputs, list):
raise TypeError("purchase_inputs should be a list or None.")
for input_data in purchase_inputs:
if not isinstance(input_data, dict):
raise TypeError("Elements in purchase_inputs should be dictionaries.")
# 检查case_execution_input参数类型如果不为None需为字典类型
if case_execution_input is not None:
if not isinstance(case_execution_input, dict):
raise TypeError("case_execution_input should be a dictionary or None.")
# 检查wait_for_case_stack_job_name参数类型如果不为None需为字符串类型
if wait_for_case_stack_job_name is not None:
if not isinstance(wait_for_case_stack_job_name, str):
raise TypeError("wait_for_case_stack_job_name should be a string or None.")
if purchase_inputs:
# 购买相关的inputs部分的模板
purchase_inputs_template = """
name:"11月度计划"
inputs:[
{
name:"CWT"
inputs:[
{
field:Cost
periodName:"1"
value: 3100
}
]
},
{
name:"CWT"
inputs:[
{
field:Cost
periodName:"1"
value: 3100
}
]
},
]
"""
new_purchase_inputs_str = ""
for input_data in purchase_inputs:
input_str = f"""
name: "{input_data['name']}"
inputs: [
"""
inner_inputs = input_data.get('inputs', [])
for inner_input in inner_inputs:
inner_str = f"""
{{
field: "{inner_input['field']}"
periodName: "{inner_input['periodName']}"
value: {inner_input['value']}
}}
"""
input_str += inner_str
input_str += " ]"
new_purchase_inputs_str += input_str
base_query = base_query.replace(purchase_inputs_template, new_purchase_inputs_str)
if case_execution_input:
# caseExecution相关的input部分的模板
case_execution_input_template = """
name: "Job2"
cases: [
{name: "11月度计划"}
{name: "二催开工"}
{name: "一焦化停工"}
{name: "焦化加工油浆"}
{name: "焦化加工低硫原油"}
{name: "焦化加工低硫渣油"}
]
"""
input_dict_str = f"""
name: "{case_execution_input['name']}"
cases: [
"""
for case in case_execution_input['cases']:
case_str = f"""
{{name: "{case['name']}"}}
"""
input_dict_str += case_str
input_dict_str += " ]"
base_query = base_query.replace(case_execution_input_template, input_dict_str)
if wait_for_case_stack_job_name:
# waitForCaseStackJob部分的模板
wait_for_case_stack_job_template = "waitForCaseStackJob(name: \"Job2\")"
new_wait_for_case_stack_job_str = f"waitForCaseStackJob(name: \"{wait_for_case_stack_job_name}\")"
base_query = base_query.replace(wait_for_case_stack_job_template, new_wait_for_case_stack_job_str)
return base_query
# 定义一个POST请求的接口用于接收参数并生成GraphQL查询语句
@app.post("/generate_graphql_query")
async def generate_graphql_query(
request = Request,
purchase_inputs: list[dict] = Body(None, embed=True),
case_execution_input: dict = Body(None, embed=True),
wait_for_case_stack_job_name: str = Body(None, embed=True)
):
try:
custom_query = generate_custom_graphql_query(purchase_inputs, case_execution_input, wait_for_case_stack_job_name)
payload_json = {
"query": custom_query
}
request_time = datetime.now()
full_path = str(request.url.path)
session = requests.Session()
response = session.post(url=url, headers=headers, json=payload_json, auth=auth, verify=False)
response_time = datetime.now()
# 调用插入日志的函数将相关信息记录到数据库中假设insert_api_log函数已正确定义且可访问
insert_api_log(
request_time,
full_path,
'POST',
json.dumps(payload_json),
json.dumps(response.json()),
response_time
)
if response.status_code!= 200:
raise HTTPException(status_code=response.status_code, detail=response.text)
return response.json()
except TypeError as e:
return {"error": str(e)}
if __name__ == "__main__": if __name__ == "__main__":
import uvicorn import uvicorn
uvicorn.run(app, host="127.0.0.1", port=8000) uvicorn.run(app, host="127.0.0.1", port=8001)

View File

@ -678,6 +678,9 @@ def model_losss_juxiting(sqlitedb):
# 保存5个最佳模型的名称 # 保存5个最佳模型的名称
if len(modelnames) > 5: if len(modelnames) > 5:
modelnames = modelnames[0:5] modelnames = modelnames[0:5]
if is_fivemodels:
pass
else:
with open(os.path.join(dataset,"best_modelnames.txt"), 'w') as f: with open(os.path.join(dataset,"best_modelnames.txt"), 'w') as f:
f.write(','.join(modelnames) + '\n') f.write(','.join(modelnames) + '\n')

View File

@ -2,7 +2,7 @@
"cells": [ "cells": [
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": 13, "execution_count": 9,
"id": "9daadf20-caa6-4b25-901c-6cc3ef563f58", "id": "9daadf20-caa6-4b25-901c-6cc3ef563f58",
"metadata": {}, "metadata": {},
"outputs": [ "outputs": [
@ -10,65 +10,65 @@
"name": "stdout", "name": "stdout",
"output_type": "stream", "output_type": "stream",
"text": [ "text": [
"(85, 28)\n", "(255, 28)\n",
"(22, 4)\n", "(78, 4)\n",
"(85, 31)\n", "(255, 31)\n",
" ds NHITS Informer LSTM iTransformer TSMixer \\\n", " ds NHITS Informer LSTM iTransformer TSMixer \\\n",
"0 2024-11-25 75.714300 75.523370 73.614220 75.27068 75.03936 \n", "0 2024-10-08 79.76823 80.197660 79.802414 78.391460 80.200510 \n",
"1 2024-11-26 76.039635 75.558270 73.692310 75.04110 74.60100 \n", "1 2024-10-09 78.75903 80.235740 79.844154 78.888565 79.861180 \n",
"2 2024-11-27 77.375790 75.361885 73.826140 74.99121 74.37731 \n", "2 2024-10-10 79.43970 80.186230 79.885100 79.488700 79.483086 \n",
"3 2024-11-28 78.872400 76.339920 73.883484 75.79425 74.04826 \n", "3 2024-10-11 79.62268 80.502975 79.878560 79.406670 79.313965 \n",
"4 2024-11-29 79.576970 76.333170 73.876396 75.89008 74.07330 \n", "4 2024-10-14 79.91698 80.931946 79.936270 79.758575 79.197430 \n",
"\n", "\n",
" TSMixerx PatchTST RNN GRU ... y \\\n", " TSMixerx PatchTST RNN GRU ... y \\\n",
"0 74.581190 75.70277 74.721280 74.512060 ... 73.010002 \n", "0 79.243256 80.883450 80.836105 81.276060 ... 77.180000 \n",
"1 73.496025 75.97611 74.588060 74.713425 ... 72.809998 \n", "1 78.068150 80.950096 80.917860 81.303505 ... 76.580002 \n",
"2 73.522026 76.48628 74.486400 74.946010 ... 72.830002 \n", "2 77.189064 80.347400 80.866040 81.798050 ... 79.400002 \n",
"3 73.416306 76.38267 75.195710 74.946014 ... 73.279999 \n", "3 77.840096 80.545296 81.167710 81.552810 ... 79.040001 \n",
"4 73.521570 76.20661 75.089966 74.935165 ... 72.940002 \n", "4 77.904300 81.432976 81.144210 81.483215 ... 77.459999 \n",
"\n", "\n",
" min_within_quantile max_within_quantile id CREAT_DATE min_price \\\n", " min_within_quantile max_within_quantile id CREAT_DATE min_price \\\n",
"0 74.41491 75.29100 1 2024-11-22 74.414910 \n", "0 80.200510 81.163630 51 2024-10-07 79.848624 \n",
"1 74.11780 74.95678 2 2024-11-22 73.496025 \n", "1 79.861180 81.757850 52 2024-10-07 79.981211 \n",
"2 73.93820 74.50395 3 2024-11-22 73.522026 \n", "2 79.483086 81.190400 53 2024-10-07 79.398409 \n",
"3 73.85808 74.46382 4 2024-11-22 73.416306 \n", "3 79.313965 81.371100 54 2024-10-07 79.394607 \n",
"4 73.96690 74.81860 5 2024-11-22 73.521570 \n", "4 79.197430 81.432976 55 2024-10-07 79.351007 \n",
"\n", "\n",
" max_price 序号 LOW_PRICE HIGH_PRICE \n", " max_price 序号 LOW_PRICE HIGH_PRICE \n",
"0 75.959854 15.0 72.30 74.83 \n", "0 81.848624 52.0 76.36 81.14 \n",
"1 77.182580 14.0 71.63 73.80 \n", "1 81.981211 51.0 75.15 78.02 \n",
"2 78.378624 13.0 71.71 72.85 \n", "2 81.398409 50.0 76.72 79.72 \n",
"3 79.415400 12.0 71.85 72.96 \n", "3 81.394607 49.0 78.04 79.50 \n",
"4 79.576970 11.0 71.75 73.34 \n", "4 81.351007 48.0 74.86 78.55 \n",
"\n", "\n",
"[5 rows x 31 columns]\n", "[5 rows x 31 columns]\n",
" ds NHITS Informer LSTM iTransformer TSMixer \\\n", " ds NHITS Informer LSTM iTransformer TSMixer \\\n",
"80 2024-12-16 74.53431 73.944080 71.68200 74.022340 74.295820 \n", "250 2024-12-16 74.268654 73.333750 73.090164 74.007034 74.36094 \n",
"81 2024-12-17 74.81450 73.830450 71.95232 74.314950 74.167290 \n", "251 2024-12-17 74.724630 73.373810 73.242540 74.285530 74.24597 \n",
"82 2024-12-18 75.55861 73.525100 72.00824 74.441380 74.212180 \n", "252 2024-12-18 74.948860 73.505330 73.400400 74.260290 74.06419 \n",
"83 2024-12-19 75.36518 74.012215 72.20199 74.397190 74.330130 \n", "253 2024-12-19 74.396740 73.934380 73.764320 74.429800 74.18059 \n",
"84 2024-12-20 74.78187 73.929596 72.23908 74.510895 74.208084 \n", "254 2024-12-20 73.882930 73.700935 73.769050 73.977585 73.97370 \n",
"\n", "\n",
" TSMixerx PatchTST RNN GRU ... y min_within_quantile \\\n", " TSMixerx PatchTST RNN GRU ... y min_within_quantile \\\n",
"80 74.41700 74.587390 73.607780 73.747700 ... NaN 74.231680 \n", "250 74.67781 74.475680 75.63023 74.853800 ... NaN 74.157196 \n",
"81 74.36576 74.363060 73.688736 73.833950 ... NaN 73.735420 \n", "251 74.46460 74.628000 75.22519 74.957530 ... NaN 73.711680 \n",
"82 74.29719 74.073555 73.456700 74.146034 ... NaN 74.073555 \n", "252 74.32628 74.656815 75.49716 74.890236 ... NaN 74.064190 \n",
"83 73.79145 74.529945 74.230125 74.144520 ... NaN 74.330130 \n", "253 74.41026 74.698875 75.87007 75.118866 ... NaN 74.148070 \n",
"84 74.59672 74.231255 74.201860 73.996100 ... NaN 74.083810 \n", "254 74.49235 74.345410 75.88466 75.186325 ... NaN 73.816990 \n",
"\n", "\n",
" max_within_quantile id CREAT_DATE min_price max_price 序号 LOW_PRICE \\\n", " max_within_quantile id CREAT_DATE min_price max_price 序号 \\\n",
"80 74.621160 81 2024-12-16 72.75007 74.62116 NaN NaN \n", "250 74.576454 301 2024-12-16 73.416857 75.416857 3.0 \n",
"81 74.682365 82 2024-12-16 72.72196 74.81450 NaN NaN \n", "251 74.948060 302 2024-12-16 73.434301 75.434301 2.0 \n",
"82 75.157074 83 2024-12-16 73.12483 75.55861 NaN NaN \n", "252 75.200980 303 2024-12-16 73.707471 75.707471 1.0 \n",
"83 75.339240 84 2024-12-16 73.07359 75.36518 NaN NaN \n", "253 75.395440 304 2024-12-16 73.639791 75.639791 NaN \n",
"84 74.604610 85 2024-12-16 72.93583 74.78187 NaN NaN \n", "254 74.345410 305 2024-12-16 73.067399 75.067399 NaN \n",
"\n", "\n",
" HIGH_PRICE \n", " LOW_PRICE HIGH_PRICE \n",
"80 NaN \n", "250 72.53 73.28 \n",
"81 NaN \n", "251 72.48 74.18 \n",
"82 NaN \n", "252 72.80 74.20 \n",
"83 NaN \n", "253 NaN NaN \n",
"84 NaN \n", "254 NaN NaN \n",
"\n", "\n",
"[5 rows x 31 columns]\n" "[5 rows x 31 columns]\n"
] ]
@ -79,9 +79,9 @@
"import os\n", "import os\n",
"import pandas as pd\n", "import pandas as pd\n",
"\n", "\n",
"# dataset = r'D:\\liurui\\dev\\code\\PriceForecast\\yuanyoudataset'\n", "dataset = r'yuanyoudataset'\n",
"\n", "\n",
"dataset = r'C:\\Users\\Administrator\\Desktop' \n", "# dataset = r'C:\\Users\\Administrator\\Desktop' \n",
"\n", "\n",
"# 预测价格数据\n", "# 预测价格数据\n",
"# dbfilename = os.path.join(r'D:\\code\\PriceForecast\\yuanyoudataset','jbsh_yuanyou.db')\n", "# dbfilename = os.path.join(r'D:\\code\\PriceForecast\\yuanyoudataset','jbsh_yuanyou.db')\n",
@ -93,7 +93,7 @@
"# print(df1.shape)\n", "# print(df1.shape)\n",
"\n", "\n",
"# 预测价格数据\n", "# 预测价格数据\n",
"dfcsvfilename = os.path.join(dataset,'accuracy_ten.csv')\n", "dfcsvfilename = os.path.join(dataset,'accuracy_five_mean.csv')\n",
"df1 = pd.read_csv(dfcsvfilename)\n", "df1 = pd.read_csv(dfcsvfilename)\n",
"print(df1.shape)\n", "print(df1.shape)\n",
"\n", "\n",
@ -126,19 +126,19 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": 27, "execution_count": 10,
"id": "0d77ab7d", "id": "0d77ab7d",
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
"source": [ "source": [
"# 模型评估前五均值 \n", "# 模型评估前五均值 \n",
"df['min_price'] = df.iloc[:,1:11].mean(axis=1) -2\n", "df['min_price'] = df.iloc[:,1:6].mean(axis=1) -1.5\n",
"df['max_price'] = df.iloc[:,1:11].mean(axis=1) +2" "df['max_price'] = df.iloc[:,1:6].mean(axis=1) +1.5"
] ]
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": 28, "execution_count": 11,
"id": "e51c3fd0-6bff-45de-b8b6-971e7986c7a7", "id": "e51c3fd0-6bff-45de-b8b6-971e7986c7a7",
"metadata": {}, "metadata": {},
"outputs": [ "outputs": [
@ -147,39 +147,107 @@
"output_type": "stream", "output_type": "stream",
"text": [ "text": [
" 开始日期 结束日期 准确率\n", " 开始日期 结束日期 准确率\n",
"0 2024-11-08 2024-11-15 0\n", "0 2024-09-27 2024-10-04 0\n",
" 开始日期 结束日期 准确率\n", " 开始日期 结束日期 准确率\n",
"0 2024-11-15 2024-11-22 0\n", "0 2024-09-27 2024-10-04 0\n",
" 开始日期 结束日期 准确率\n", " 开始日期 结束日期 准确率\n",
"0 2024-11-15 2024-11-22 0\n", "0 2024-09-27 2024-10-04 0\n",
" 开始日期 结束日期 准确率\n", " 开始日期 结束日期 准确率\n",
"0 2024-11-15 2024-11-22 0\n", "0 2024-09-27 2024-10-04 0\n",
" 开始日期 结束日期 准确率\n", " 开始日期 结束日期 准确率\n",
"0 2024-11-15 2024-11-22 0\n", "0 2024-09-27 2024-10-04 0\n",
" 开始日期 结束日期 准确率\n", " 开始日期 结束日期 准确率\n",
"0 2024-11-15 2024-11-22 0\n", "0 2024-10-04 2024-10-11 0.495046\n",
" 开始日期 结束日期 准确率\n", " 开始日期 结束日期 准确率\n",
"0 2024-11-22 2024-11-29 0.808456\n", "0 2024-10-04 2024-10-11 0.495046\n",
" 开始日期 结束日期 准确率\n", " 开始日期 结束日期 准确率\n",
"0 2024-11-22 2024-11-29 0.808456\n", "0 2024-10-04 2024-10-11 0.495046\n",
" 开始日期 结束日期 准确率\n", " 开始日期 结束日期 准确率\n",
"0 2024-11-22 2024-11-29 0.808456\n", "0 2024-10-04 2024-10-11 0.495046\n",
" 开始日期 结束日期 准确率\n", " 开始日期 结束日期 准确率\n",
"0 2024-11-22 2024-11-29 0.808456\n", "0 2024-10-04 2024-10-11 0.495046\n",
" 开始日期 结束日期 准确率\n", " 开始日期 结束日期 准确率\n",
"0 2024-11-22 2024-11-29 0.808456\n", "0 2024-10-11 2024-10-18 0.449368\n",
" 开始日期 结束日期 准确率\n", " 开始日期 结束日期 准确率\n",
"0 2024-11-29 2024-12-06 0.955061\n", "0 2024-10-11 2024-10-18 0.449368\n",
" 开始日期 结束日期 准确率\n", " 开始日期 结束日期 准确率\n",
"0 2024-11-29 2024-12-06 0.955061\n", "0 2024-10-11 2024-10-18 0.449368\n",
" 开始日期 结束日期 准确率\n", " 开始日期 结束日期 准确率\n",
"0 2024-11-29 2024-12-06 0.955061\n", "0 2024-10-11 2024-10-18 0.449368\n",
" 开始日期 结束日期 准确率\n", " 开始日期 结束日期 准确率\n",
"0 2024-11-29 2024-12-06 0.955061\n", "0 2024-10-11 2024-10-18 0.449368\n",
" 开始日期 结束日期 准确率\n", " 开始日期 结束日期 准确率\n",
"0 2024-11-29 2024-12-06 0.955061\n", "0 2024-10-18 2024-10-25 0.814057\n",
" 开始日期 结束日期 准确率\n", " 开始日期 结束日期 准确率\n",
"0 2024-12-06 2024-12-13 0.905554\n" "0 2024-10-18 2024-10-25 0.814057\n",
" 开始日期 结束日期 准确率\n",
"0 2024-10-18 2024-10-25 0.814057\n",
" 开始日期 结束日期 准确率\n",
"0 2024-10-18 2024-10-25 0.814057\n",
" 开始日期 结束日期 准确率\n",
"0 2024-10-18 2024-10-25 0.814057\n",
" 开始日期 结束日期 准确率\n",
"0 2024-10-25 2024-11-01 0.433599\n",
" 开始日期 结束日期 准确率\n",
"0 2024-10-25 2024-11-01 0.433599\n",
" 开始日期 结束日期 准确率\n",
"0 2024-10-25 2024-11-01 0.433599\n",
" 开始日期 结束日期 准确率\n",
"0 2024-10-25 2024-11-01 0.433599\n",
" 开始日期 结束日期 准确率\n",
"0 2024-10-25 2024-11-01 0.433599\n",
" 开始日期 结束日期 准确率\n",
"0 2024-11-01 2024-11-08 0.894767\n",
" 开始日期 结束日期 准确率\n",
"0 2024-11-01 2024-11-08 0.894767\n",
" 开始日期 结束日期 准确率\n",
"0 2024-11-01 2024-11-08 0.894767\n",
" 开始日期 结束日期 准确率\n",
"0 2024-11-01 2024-11-08 0.894767\n",
" 开始日期 结束日期 准确率\n",
"0 2024-11-01 2024-11-08 0.894767\n",
" 开始日期 结束日期 准确率\n",
"0 2024-11-08 2024-11-15 0.915721\n",
" 开始日期 结束日期 准确率\n",
"0 2024-11-08 2024-11-15 0.915721\n",
" 开始日期 结束日期 准确率\n",
"0 2024-11-08 2024-11-15 0.915721\n",
" 开始日期 结束日期 准确率\n",
"0 2024-11-08 2024-11-15 0.915721\n",
" 开始日期 结束日期 准确率\n",
"0 2024-11-08 2024-11-15 0.915721\n",
" 开始日期 结束日期 准确率\n",
"0 2024-11-15 2024-11-22 0.835755\n",
" 开始日期 结束日期 准确率\n",
"0 2024-11-15 2024-11-22 0.835755\n",
" 开始日期 结束日期 准确率\n",
"0 2024-11-15 2024-11-22 0.835755\n",
" 开始日期 结束日期 准确率\n",
"0 2024-11-15 2024-11-22 0.835755\n",
" 开始日期 结束日期 准确率\n",
"0 2024-11-15 2024-11-22 0.835755\n",
" 开始日期 结束日期 准确率\n",
"0 2024-11-22 2024-11-29 0.718009\n",
" 开始日期 结束日期 准确率\n",
"0 2024-11-22 2024-11-29 0.718009\n",
" 开始日期 结束日期 准确率\n",
"0 2024-11-22 2024-11-29 0.718009\n",
" 开始日期 结束日期 准确率\n",
"0 2024-11-22 2024-11-29 0.718009\n",
" 开始日期 结束日期 准确率\n",
"0 2024-11-22 2024-11-29 0.718009\n",
" 开始日期 结束日期 准确率\n",
"0 2024-11-29 2024-12-06 0.948363\n",
" 开始日期 结束日期 准确率\n",
"0 2024-11-29 2024-12-06 0.948363\n",
" 开始日期 结束日期 准确率\n",
"0 2024-11-29 2024-12-06 0.948363\n",
" 开始日期 结束日期 准确率\n",
"0 2024-11-29 2024-12-06 0.948363\n",
" 开始日期 结束日期 准确率\n",
"0 2024-11-29 2024-12-06 0.948363\n",
" 开始日期 结束日期 准确率\n",
"0 2024-12-06 2024-12-13 0.947006\n"
] ]
} }
], ],
@ -239,7 +307,7 @@
"end_times = df['CREAT_DATE'].unique()\n", "end_times = df['CREAT_DATE'].unique()\n",
"for endtime in end_times:\n", "for endtime in end_times:\n",
" up_week_dates = get_week_date(endtime)\n", " up_week_dates = get_week_date(endtime)\n",
" _get_accuracy_rate(df,up_week_dates,end_time)\n", " _get_accuracy_rate(df,up_week_dates,endtime)\n",
"\n", "\n",
"# 打印结果\n", "# 打印结果\n",
"\n" "\n"
@ -247,10 +315,32 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": 12,
"id": "0f942c69", "id": "0f942c69",
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [
{
"ename": "KeyError",
"evalue": "'PREDICT_DATE'",
"output_type": "error",
"traceback": [
"\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
"\u001b[1;31mKeyError\u001b[0m Traceback (most recent call last)",
"File \u001b[1;32md:\\ProgramData\\anaconda3\\Lib\\site-packages\\pandas\\core\\indexes\\base.py:3791\u001b[0m, in \u001b[0;36mIndex.get_loc\u001b[1;34m(self, key)\u001b[0m\n\u001b[0;32m 3790\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m-> 3791\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_engine\u001b[38;5;241m.\u001b[39mget_loc(casted_key)\n\u001b[0;32m 3792\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mKeyError\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m err:\n",
"File \u001b[1;32mindex.pyx:152\u001b[0m, in \u001b[0;36mpandas._libs.index.IndexEngine.get_loc\u001b[1;34m()\u001b[0m\n",
"File \u001b[1;32mindex.pyx:181\u001b[0m, in \u001b[0;36mpandas._libs.index.IndexEngine.get_loc\u001b[1;34m()\u001b[0m\n",
"File \u001b[1;32mpandas\\_libs\\hashtable_class_helper.pxi:7080\u001b[0m, in \u001b[0;36mpandas._libs.hashtable.PyObjectHashTable.get_item\u001b[1;34m()\u001b[0m\n",
"File \u001b[1;32mpandas\\_libs\\hashtable_class_helper.pxi:7088\u001b[0m, in \u001b[0;36mpandas._libs.hashtable.PyObjectHashTable.get_item\u001b[1;34m()\u001b[0m\n",
"\u001b[1;31mKeyError\u001b[0m: 'PREDICT_DATE'",
"\nThe above exception was the direct cause of the following exception:\n",
"\u001b[1;31mKeyError\u001b[0m Traceback (most recent call last)",
"Cell \u001b[1;32mIn[12], line 4\u001b[0m\n\u001b[0;32m 2\u001b[0m \u001b[38;5;66;03m# ds 按周取\u001b[39;00m\n\u001b[0;32m 3\u001b[0m df[\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mDs_Week\u001b[39m\u001b[38;5;124m'\u001b[39m] \u001b[38;5;241m=\u001b[39m df[\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mds\u001b[39m\u001b[38;5;124m'\u001b[39m]\u001b[38;5;241m.\u001b[39mapply(\u001b[38;5;28;01mlambda\u001b[39;00m x: x\u001b[38;5;241m.\u001b[39mstrftime(\u001b[38;5;124m'\u001b[39m\u001b[38;5;124m%\u001b[39m\u001b[38;5;124mU\u001b[39m\u001b[38;5;124m'\u001b[39m))\n\u001b[1;32m----> 4\u001b[0m df[\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mPre_Week\u001b[39m\u001b[38;5;124m'\u001b[39m] \u001b[38;5;241m=\u001b[39m df[\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mPREDICT_DATE\u001b[39m\u001b[38;5;124m'\u001b[39m]\u001b[38;5;241m.\u001b[39mapply(\u001b[38;5;28;01mlambda\u001b[39;00m x: x\u001b[38;5;241m.\u001b[39mstrftime(\u001b[38;5;124m'\u001b[39m\u001b[38;5;124m%\u001b[39m\u001b[38;5;124mU\u001b[39m\u001b[38;5;124m'\u001b[39m))\n",
"File \u001b[1;32md:\\ProgramData\\anaconda3\\Lib\\site-packages\\pandas\\core\\frame.py:3893\u001b[0m, in \u001b[0;36mDataFrame.__getitem__\u001b[1;34m(self, key)\u001b[0m\n\u001b[0;32m 3891\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mcolumns\u001b[38;5;241m.\u001b[39mnlevels \u001b[38;5;241m>\u001b[39m \u001b[38;5;241m1\u001b[39m:\n\u001b[0;32m 3892\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_getitem_multilevel(key)\n\u001b[1;32m-> 3893\u001b[0m indexer \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mcolumns\u001b[38;5;241m.\u001b[39mget_loc(key)\n\u001b[0;32m 3894\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m is_integer(indexer):\n\u001b[0;32m 3895\u001b[0m indexer \u001b[38;5;241m=\u001b[39m [indexer]\n",
"File \u001b[1;32md:\\ProgramData\\anaconda3\\Lib\\site-packages\\pandas\\core\\indexes\\base.py:3798\u001b[0m, in \u001b[0;36mIndex.get_loc\u001b[1;34m(self, key)\u001b[0m\n\u001b[0;32m 3793\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(casted_key, \u001b[38;5;28mslice\u001b[39m) \u001b[38;5;129;01mor\u001b[39;00m (\n\u001b[0;32m 3794\u001b[0m \u001b[38;5;28misinstance\u001b[39m(casted_key, abc\u001b[38;5;241m.\u001b[39mIterable)\n\u001b[0;32m 3795\u001b[0m \u001b[38;5;129;01mand\u001b[39;00m \u001b[38;5;28many\u001b[39m(\u001b[38;5;28misinstance\u001b[39m(x, \u001b[38;5;28mslice\u001b[39m) \u001b[38;5;28;01mfor\u001b[39;00m x \u001b[38;5;129;01min\u001b[39;00m casted_key)\n\u001b[0;32m 3796\u001b[0m ):\n\u001b[0;32m 3797\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m InvalidIndexError(key)\n\u001b[1;32m-> 3798\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mKeyError\u001b[39;00m(key) \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01merr\u001b[39;00m\n\u001b[0;32m 3799\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mTypeError\u001b[39;00m:\n\u001b[0;32m 3800\u001b[0m \u001b[38;5;66;03m# If we have a listlike key, _check_indexing_error will raise\u001b[39;00m\n\u001b[0;32m 3801\u001b[0m \u001b[38;5;66;03m# InvalidIndexError. Otherwise we fall through and re-raise\u001b[39;00m\n\u001b[0;32m 3802\u001b[0m \u001b[38;5;66;03m# the TypeError.\u001b[39;00m\n\u001b[0;32m 3803\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_check_indexing_error(key)\n",
"\u001b[1;31mKeyError\u001b[0m: 'PREDICT_DATE'"
]
}
],
"source": [ "source": [
"import datetime\n", "import datetime\n",
"# ds 按周取\n", "# ds 按周取\n",