From debc56290664327b67739824570998b3aec86dd3 Mon Sep 17 00:00:00 2001 From: workpc Date: Fri, 20 Dec 2024 17:44:41 +0800 Subject: [PATCH] =?UTF-8?q?auptest=20=20v0.0.1=E6=8E=A5=E5=8F=A3=E6=96=87?= =?UTF-8?q?=E6=A1=A3?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- auptest.py | 674 +++++++++++++++++++---------------- 原油价格预测准确率计算.ipynb | 110 +++--- 2 files changed, 418 insertions(+), 366 deletions(-) diff --git a/auptest.py b/auptest.py index 3980936..543054e 100644 --- a/auptest.py +++ b/auptest.py @@ -6,37 +6,7 @@ from fastapi.middleware.cors import CORSMiddleware from requests_ntlm import HttpNtlmAuth import urllib3 urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) - from typing import Dict - - -''' -sql: - --- 创建名为pims_api_log的表 -CREATE TABLE pims_api_log ( - -- 自增的唯一标识主键 - id INT AUTO_INCREMENT PRIMARY KEY, - -- 请求时间,记录请求到达服务器的时间戳,使用DATETIME类型方便查看具体时间 - request_time DATETIME NOT NULL, - -- 请求的IP地址,用于标识请求来源,VARCHAR类型根据实际IP长度设置合适的长度 - request_ip VARCHAR(15) NOT NULL, - -- 请求的URL,记录具体是向哪个接口路径发起的请求,VARCHAR类型可根据预计最长路径长度来设置长度 - request_url VARCHAR(255) NOT NULL, - -- 请求方法,如GET、POST等,使用VARCHAR类型存储简短的方法名称 - request_method VARCHAR(10) NOT NULL, - -- 接收到的请求参数,以JSON格式的文本存储,方便应对复杂结构的参数情况,TEXT类型可存储较长的文本内容 - request_params TEXT, - -- 响应状态码,记录接口返回给客户端的HTTP状态码,INT类型 - response_status_code INT NOT NULL, - -- 响应内容,同样以JSON格式的文本存储,便于保存各种格式的数据返回情况,TEXT类型 - response_content TEXT, - -- 响应时间,记录接口完成处理并返回响应的时间戳,DATETIME类型 - response_time DATETIME NOT NULL -); -''' - - import mysql.connector from datetime import datetime @@ -46,8 +16,6 @@ from datetime import datetime host = 'rm-2zehj3r1n60ttz9x5ko.mysql.rds.aliyuncs.com' # 北京访问使用 database = 'jingbo-test' # 北京访问使用 - - # 配置数据库连接信息,根据实际情况修改 config = { "user": "jingbo", @@ -56,45 +24,50 @@ config = { "database": database } -''' - `ID` varchar(128) NOT NULL COMMENT 'ID', - `REQUEST_METHOD` varchar(128) DEFAULT NULL COMMENT '方法名称', - `REQUEST_TIME` datetime DEFAULT NULL COMMENT '请求时间', - `REQUEST_URL` varchar(256) DEFAULT NULL COMMENT '请求URL', - `USING_FLAG` varchar(1) DEFAULT NULL COMMENT '启用状态', - `REQUEST_PARAMS` text COMMENT '接收到的请求参数', - `RESPONSE_CONTENT` text COMMENT '响应内容', - `RESPONSE_TIME` datetime DEFAULT NULL COMMENT '响应时间', - - -''' - - +# GraphQL API eg: url = 'http://10.88.14.86/AspenTech/AspenUnified/api/v1/model/Chambroad20241205/graphql' +graphql_host = 'http://10.88.14.86' +graphql_path = '/AspenTech/AspenUnified/api/v1/model/Chambroad20241205/graphql' +url = graphql_host + graphql_path +graphql_username = "bw19382" +graphql_password = "Fudong3!" +auth = HttpNtlmAuth(f'{graphql_username}', f'{graphql_password}') +# 请求头设置 +headers = {'content-type': 'application/json;charset=UTF-8'} def insert_api_log(request_time, request_url, request_method, request_params, response_content, response_time): + ''' + 请求日志表 v_tbl_aup_api_log 写入 + ''' try: # 建立数据库连接 cnx = mysql.connector.connect(**config) cursor = cnx.cursor() # 先查询表中已有记录的数量,用于生成新记录的ID - count_query = "SELECT COUNT(*) FROM v_tbl_aup_api_log" - cursor.execute(count_query) - result = cursor.fetchone() - new_id = result[0] + 1 if result else 1 # 如果表为空,ID设为1,否则数量加1 + # count_query = "SELECT max(ID) FROM v_tbl_aup_api_log" + # cursor.execute(count_query) + # result = cursor.fetchone() + # new_id = int(result[0]) + 1 if result else 1 # 如果表为空,ID设为1,否则数量加1 # 插入数据的SQL语句 + # insert_query = """ + # INSERT INTO v_tbl_aup_api_log (ID,REQUEST_TIME, REQUEST_URL, REQUEST_METHOD, REQUEST_PARAMS, RESPONSE_CONTENT, RESPONSE_TIME) + # VALUES (%s,%s, %s, %s, %s, %s, %s) + # """ insert_query = """ - INSERT INTO v_tbl_aup_api_log (ID,REQUEST_TIME, REQUEST_URL, REQUEST_METHOD, REQUEST_PARAMS, RESPONSE_CONTENT, RESPONSE_TIME) - VALUES (%s,%s, %s, %s, %s, %s, %s) + INSERT INTO v_tbl_aup_api_log (REQUEST_TIME, REQUEST_URL, REQUEST_METHOD, REQUEST_PARAMS, RESPONSE_CONTENT, RESPONSE_TIME) + VALUES (%s, %s, %s, %s, %s, %s) """ # 准备要插入的数据,注意数据顺序要和SQL语句中的占位符顺序一致 - data = (new_id,request_time, request_url, request_method, request_params, response_content, response_time) + # data = (new_id,request_time, request_url, request_method, request_params, response_content, response_time) + data = (request_time, request_url, request_method, request_params.encode('utf-8'), response_content.encode('utf-8'), response_time) # 执行插入操作 cursor.execute(insert_query, data) # 提交事务,使插入生效 cnx.commit() except mysql.connector.Error as err: print(f"Error: {err}") + except UnboundLocalError as err: + print(f"Error: {err}") finally: # 关闭游标和连接 if cursor: @@ -105,7 +78,23 @@ def insert_api_log(request_time, request_url, request_method, request_params, re -app = FastAPI(docs_url="/docs") +tags_metadata = [ + { + "name": "get_cases", + "description": "获取所有cases", + }, + { + "name": "generate_graphql_query", + "description": "生成Graphql查询语句,并接收查询结果", + + }, +] +app = FastAPI( + title="AUP数据集成信息化接口", + version="0.0.1", + openapi_tags=tags_metadata, + # openapi_url="" +) # 允许跨域请求 app.add_middleware( @@ -115,17 +104,14 @@ app.add_middleware( allow_headers=["*"], ) -headers = {'content-type': 'application/json;charset=UTF-8'} -# 您的 GraphQL API eg: url = 'http://10.88.14.86/AspenTech/AspenUnified/api/v1/model/Chambroad20241205/graphql' -graphql_host = 'http://10.88.14.86' -graphql_path = '/AspenTech/AspenUnified/api/v1/model/Chambroad20241205/graphql' -url = graphql_host + graphql_path - -query = """ -mutation{ - purchases{ - update(inputs:[{ +class GraphqlQueryTemplates: + ''' + 从GraphqlQuery 查询语句中找到模板,并保留配置示例 + ''' + def __init__(self): + # 参数模板 + self.purchase_inputs_template = """ name:"11月度计划" inputs:[ { @@ -149,194 +135,90 @@ mutation{ ] }, ] - }]) - } - caseExecution { - submitCaseStack( - input:{ + """ + self.case_execution_input_template = """ name: "Job2" cases: [ {name: "11月度计划"} {name: "二催开工"} - {name: "一焦化停工"} - {name: "焦化加工油浆"} - {name: "焦化加工低硫原油"} - {name: "焦化加工低硫渣油"} + {name: "一焦化停工"} + {name: "焦化加工油浆"} + {name: "焦化加工低硫原油"} + {name: "焦化加工低硫渣油"} ] - } - ) - {id} - waitForCaseStackJob(name: "Job2") - { - started - submitted - finished - executionStatus - cases{ - items{ - name - objectiveValue + """ + self.wait_for_case_stack_job_template = "waitForCaseStackJob(name: \"Job2\")" + self.case_qurey = ''' + query + { + cases + { + items + { + name + } + } + } + ''' + # 参数示例 + self.purchase_inputs = ''' + [{ + "name": "11月度计划", + "inputs": [ + { + "name": "CWT", + "inputs": [ + { + "field": "Cost", + "periodName": "1", + "value": 3100 + } + ] + }, + { + "name": "CWT", + "inputs": [ + { + "field": "Cost", + "periodName": "1", + "value": 3100 + } + ] + } + ] + }] + ''' + self.case_execution_input = ''' + { + "name": "Job2", + "cases": [ + { + "name": "11月度计划" + }, + { + "name": "二催开工" + }, + { + "name": "一焦化停工" + }, + { + "name": "焦化加工油浆" + }, + { + "name": "焦化加工低硫原油" + }, + { + "name": "焦化加工低硫渣油" + } + ] } - } - } -} -} -""" + ''' + self.wait_for_case_stack_job_name = 'Job2' -payload_json = { - "query": query, - "operationName": "" -} +templates = GraphqlQueryTemplates() - -query2 = ''' -query -{ - cases - { - items - { - name - } - } -} -''' - -payload_json2 = { - "query": query2, - "operationName": "" -} - - - - -graphql_username = "bw19382" -graphql_password = "Fudong3!" -auth = HttpNtlmAuth(f'{graphql_username}', f'{graphql_password}') - -example_query = ''' - 'inputs':{ - name:"11月度计划" - inputs:[ - { - name:"CWT" - inputs:[ - { - field:Cost - periodName:"1" - value: 3100 - } - ] - }, - { - name:"VRa" - inputs:[ - { - field:Cost - periodName:"1" - value: 3333 - } - ] - }, - ''' - - -def log_api_call(func): - @functools.wraps(func) - def wrapper(*args, **kwargs): - request_time = datetime.now() - request_url = None - request_method = 'post' - request_params = None - - try: - # 执行被装饰的函数,获取其返回的响应 - request_time = datetime.now() - response = func(*args, **kwargs) - response_time = datetime.now() - - # 准备请求参数和响应内容,转换为合适的字符串格式用于记录(例如JSON格式字符串) - request_params_str = json.dumps(request_params) if request_params else None - response_content = response.text if hasattr(response, 'text') else None - - # 调用插入日志的函数,将相关信息记录到数据库中(假设insert_api_log函数已正确定义且可访问) - insert_api_log( - request_time, - request_url, - request_method, - request_params_str, - response_content, - response_time - ) - - return response - except Exception as e: - print(f"Error occurred during API call: {e}") - raise - - return wrapper - -@app.post("/graphql") -async def post_execute_graphql_query(request: Request, - query:str = Body(query,example_query=query) - ): - payload_json = { - "query": query - } - request_time = datetime.now() - full_path = str(request.url.path) - session = requests.Session() - response = session.post(url=url, headers=headers, json=payload_json, auth=auth, verify=False) - response_time = datetime.now() - - # 调用插入日志的函数,将相关信息记录到数据库中(假设insert_api_log函数已正确定义且可访问) - insert_api_log( - request_time, - full_path, - 'POST', - json.dumps(payload_json), - json.dumps(response.json()), - response_time - ) - - - if response.status_code!= 200: - raise HTTPException(status_code=response.status_code, detail=response.text) - return response.json() - -# def insert_api_log(request_time, request_url, request_method, request_params, response_content, response_time): - -@app.post("/cases") -async def post_cases_query_async(request: Request): - payload_json2 = { - "query": query2 - } - full_path = str(request.url.path) - request_time = datetime.now() - session = requests.Session() - response = session.post(url=url, headers=headers, json=payload_json2, auth=auth, verify=False) - response_time = datetime.now() - - # 调用插入日志的函数,将相关信息记录到数据库中(假设insert_api_log函数已正确定义且可访问) - insert_api_log( - request_time, - full_path, - 'POST', - json.dumps(payload_json), - json.dumps(response.json()), - response_time - ) - - - if response.status_code!= 200: - raise HTTPException(status_code=response.status_code, detail=response.text) - return response.json() - - - -# 定义一个函数用于生成定制化的GraphQL查询语句,添加了参数类型检查 def generate_custom_graphql_query( purchase_inputs=None, case_execution_input=None, @@ -423,33 +305,7 @@ mutation{ raise TypeError("wait_for_case_stack_job_name should be a string or None.") if purchase_inputs: - # 购买相关的inputs部分的模板 - purchase_inputs_template = """ - name:"11月度计划" - inputs:[ - { - name:"CWT" - inputs:[ - { - field:Cost - periodName:"1" - value: 3100 - } - ] - }, - { - name:"CWT" - inputs:[ - { - field:Cost - periodName:"1" - value: 3100 - } - ] - }, - ] - """ - new_purchase_inputs_str = "" + new_purchase_inputs_str = "[" for input_data in purchase_inputs: input_str = f""" name: "{input_data['name']}" @@ -458,32 +314,26 @@ mutation{ inner_inputs = input_data.get('inputs', []) for inner_input in inner_inputs: inner_str = f""" - {{ - field: "{inner_input['field']}" - periodName: "{inner_input['periodName']}" - value: {inner_input['value']} - }} - """ + name: "{inner_input['name']}" + inputs: [ + """ input_str += inner_str + for input in inner_input['inputs']: + inner_str = f""" + {{ + field: "{input['field']}" + periodName: "{input['periodName']}" + value: {input['value']} + }} + """ + input_str += inner_str + input_str += " ]" input_str += " ]" new_purchase_inputs_str += input_str - base_query = base_query.replace(purchase_inputs_template, new_purchase_inputs_str) + base_query = base_query.replace(templates.purchase_inputs_template, new_purchase_inputs_str) if case_execution_input: - # caseExecution相关的input部分的模板 - case_execution_input_template = """ - name: "Job2" - cases: [ - {name: "11月度计划"} - {name: "二催开工"} - {name: "一焦化停工"} - {name: "焦化加工油浆"} - {name: "焦化加工低硫原油"} - {name: "焦化加工低硫渣油"} - - ] - """ input_dict_str = f""" name: "{case_execution_input['name']}" cases: [ @@ -495,24 +345,20 @@ mutation{ input_dict_str += case_str input_dict_str += " ]" - base_query = base_query.replace(case_execution_input_template, input_dict_str) + base_query = base_query.replace(templates.case_execution_input_template, input_dict_str) if wait_for_case_stack_job_name: - # waitForCaseStackJob部分的模板 - wait_for_case_stack_job_template = "waitForCaseStackJob(name: \"Job2\")" new_wait_for_case_stack_job_str = f"waitForCaseStackJob(name: \"{wait_for_case_stack_job_name}\")" - base_query = base_query.replace(wait_for_case_stack_job_template, new_wait_for_case_stack_job_str) + base_query = base_query.replace(templates.wait_for_case_stack_job_template, new_wait_for_case_stack_job_str) return base_query - -# 定义一个POST请求的接口,用于接收参数并生成GraphQL查询语句 -@app.post("/generate_graphql_query") +@app.post("/generate_graphql_query",tags=['generate_graphql_query']) async def generate_graphql_query( - request = Request, - purchase_inputs: list[dict] = Body(None, embed=True), - case_execution_input: dict = Body(None, embed=True), - wait_for_case_stack_job_name: str = Body(None, embed=True) + request: Request, + purchase_inputs: list[dict] = Body(templates.purchase_inputs, embed=True,example_query=json.loads(templates.purchase_inputs)), + case_execution_input: dict = Body(templates.case_execution_input, embed=True,example_query=json.loads(templates.case_execution_input)), + wait_for_case_stack_job_name: str = Body(templates.wait_for_case_stack_job_name, embed=True,example_query=templates.wait_for_case_stack_job_name), ): try: custom_query = generate_custom_graphql_query(purchase_inputs, case_execution_input, wait_for_case_stack_job_name) @@ -522,19 +368,43 @@ async def generate_graphql_query( request_time = datetime.now() full_path = str(request.url.path) session = requests.Session() - response = session.post(url=url, headers=headers, json=payload_json, auth=auth, verify=False) - response_time = datetime.now() - - # 调用插入日志的函数,将相关信息记录到数据库中(假设insert_api_log函数已正确定义且可访问) - insert_api_log( - request_time, - full_path, - 'POST', - json.dumps(payload_json), - json.dumps(response.json()), - response_time - ) - + try: + response = await session.post(url=url, headers=headers, json=payload_json, auth=auth, verify=False, timeout=300) + print(type(response)) + except requests.exceptions.ConnectTimeout as e: + # 构造符合错误情况的响应数据字典 + error_response_data = { + "errors": [{"message": "连接超时,请检查网络或稍后重试"}], + "data": {}, + "status_code": 503 # 使用合适的状态码,如503表示服务暂时不可用,可根据具体错误类型调整 + } + response = error_response_data + raise HTTPException(status_code=503, detail=response) # 抛出合适状态码的HTTPException + except requests.exceptions.RequestException as e: + # 捕获其他请求相关的异常,统一处理 + error_response_data = { + "errors": [{"message": "请求出现其他错误,请联系管理员"}], + "data": {}, + "status_code": 500 + } + response = error_response_data + raise HTTPException(status_code=500, detail=response) + + finally: + response_time = datetime.now() + try: + res = response.json() + except (UnboundLocalError,AttributeError): + res = response + # 调用插入日志的函数,将相关信息记录到数据库中(假设insert_api_log函数已正确定义且可访问) + insert_api_log( + request_time, + full_path, + request.method, + json.dumps(payload_json), + json.dumps(res), + response_time + ) if response.status_code!= 200: raise HTTPException(status_code=response.status_code, detail=response.text) @@ -542,6 +412,182 @@ async def generate_graphql_query( except TypeError as e: return {"error": str(e)} +@app.get("/get_cases",tags=['get_cases']) +async def post_cases_query_async(request: Request): + payload_json2 = { + "query": templates.case_qurey + } + full_path = str(request.url.path) + request_time = datetime.now() + session = requests.Session() + try: + response = await session.post(url=url, headers=headers, json=payload_json2, auth=auth, verify=False) + except requests.exceptions.ConnectTimeout as e: + # 构造符合错误情况的响应数据字典 + error_response_data = { + "errors": [{"message": "连接超时,请检查网络或稍后重试"}], + "data": {}, + "status_code": 503 # 使用合适的状态码,如503表示服务暂时不可用,可根据具体错误类型调整 + } + response = error_response_data + raise HTTPException(status_code=503, detail=response) # 抛出合适状态码的HTTPException + except requests.exceptions.RequestException as e: + # 捕获其他请求相关的异常,统一处理 + error_response_data = { + "errors": [{"message": "请求出现其他错误,请联系管理员"}], + "data": {}, + "status_code": 500 + } + + response = error_response_data + raise HTTPException(status_code=500, detail=response) + finally: + response_time = datetime.now() + try: + res = response.json() + except (UnboundLocalError,AttributeError): + res = response + # 调用插入日志的函数,将相关信息记录到数据库中(假设insert_api_log函数已正确定义且可访问) + insert_api_log( + request_time, + full_path, + request.method, + json.dumps(payload_json2), + json.dumps(res), + response_time + ) + + if response.status_code!= 200: + raise HTTPException(status_code=response.status_code, detail=response.text) + + # 将JSON字符串解析为Python字典对象 + data_dict = response.json() + # 提取name列表 + name_list = [item["name"] for item in data_dict["data"]["cases"]["items"]] + data_dict['name_lsit'] = name_list + return json.dumps(data_dict) + if __name__ == "__main__": import uvicorn - uvicorn.run(app, host="127.0.0.1", port=8001) + uvicorn.run(app, host="127.0.0.1", port=8003) + + +# query = """ +# mutation{ +# purchases{ +# update(inputs:[{ +# name:"11月度计划" +# inputs:[ +# { +# name:"CWT" +# inputs:[ +# { +# field:Cost +# periodName:"1" +# value: 3100 +# } +# ] +# }, +# { +# name:"CWT" +# inputs:[ +# { +# field:Cost +# periodName:"1" +# value: 3100 +# } +# ] +# }, +# ] +# }]) +# } +# caseExecution { +# submitCaseStack( +# input:{ +# name: "Job2" +# cases: [ +# {name: "11月度计划"} +# {name: "二催开工"} +# {name: "一焦化停工"} +# {name: "焦化加工油浆"} +# {name: "焦化加工低硫原油"} +# {name: "焦化加工低硫渣油"} + +# ] +# } +# ) +# {id} +# waitForCaseStackJob(name: "Job2") +# { +# started +# submitted +# finished +# executionStatus +# cases{ +# items{ +# name +# objectiveValue +# } +# } +# } +# } +# } +# """ + +# payload_json = { +# "query": query, +# "operationName": "" +# } + + + +# query2 = ''' +# query +# { +# cases +# { +# items +# { +# name +# } +# } +# } +# ''' + +# payload_json2 = { +# "query": query2, +# "operationName": "" +# } + + + + +# @app.post("/graphql") +# async def post_execute_graphql_query(request: Request, +# query:str = Body(query,example_query=query) +# ): +# payload_json = { +# "query": query +# } +# request_time = datetime.now() +# full_path = str(request.url.path) +# session = requests.Session() +# response = session.post(url=url, headers=headers, json=payload_json, auth=auth, verify=False) +# response_time = datetime.now() + +# # 调用插入日志的函数,将相关信息记录到数据库中(假设insert_api_log函数已正确定义且可访问) +# insert_api_log( +# request_time, +# full_path, +# 'POST', +# json.dumps(payload_json), +# json.dumps(response.json()), +# response_time +# ) + + +# if response.status_code!= 200: +# raise HTTPException(status_code=response.status_code, detail=response.text) +# return response.json() + +# def insert_api_log(request_time, request_url, request_method, request_params, response_content, response_time): diff --git a/原油价格预测准确率计算.ipynb b/原油价格预测准确率计算.ipynb index e2f3285..4f6a04b 100644 --- a/原油价格预测准确率计算.ipynb +++ b/原油价格预测准确率计算.ipynb @@ -2,7 +2,7 @@ "cells": [ { "cell_type": "code", - "execution_count": 9, + "execution_count": 13, "id": "9daadf20-caa6-4b25-901c-6cc3ef563f58", "metadata": {}, "outputs": [ @@ -126,7 +126,7 @@ }, { "cell_type": "code", - "execution_count": 10, + "execution_count": 14, "id": "0d77ab7d", "metadata": {}, "outputs": [], @@ -138,7 +138,7 @@ }, { "cell_type": "code", - "execution_count": 11, + "execution_count": 15, "id": "e51c3fd0-6bff-45de-b8b6-971e7986c7a7", "metadata": {}, "outputs": [ @@ -151,103 +151,109 @@ " 开始日期 结束日期 准确率\n", "0 2024-09-27 2024-10-04 0\n", " 开始日期 结束日期 准确率\n", - "0 2024-09-27 2024-10-04 0\n", + "0 2024-09-27 2024-10-04 0\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ " 开始日期 结束日期 准确率\n", "0 2024-09-27 2024-10-04 0\n", " 开始日期 结束日期 准确率\n", "0 2024-09-27 2024-10-04 0\n", " 开始日期 结束日期 准确率\n", - "0 2024-10-04 2024-10-11 0.495046\n", + "0 2024-10-04 2024-10-11 0.433988\n", " 开始日期 结束日期 准确率\n", - "0 2024-10-04 2024-10-11 0.495046\n", + "0 2024-10-04 2024-10-11 0.433988\n", " 开始日期 结束日期 准确率\n", - "0 2024-10-04 2024-10-11 0.495046\n", + "0 2024-10-04 2024-10-11 0.433988\n", " 开始日期 结束日期 准确率\n", - "0 2024-10-04 2024-10-11 0.495046\n", + "0 2024-10-04 2024-10-11 0.433988\n", " 开始日期 结束日期 准确率\n", - "0 2024-10-04 2024-10-11 0.495046\n", + "0 2024-10-04 2024-10-11 0.433988\n", " 开始日期 结束日期 准确率\n", - "0 2024-10-11 2024-10-18 0.449368\n", + "0 2024-10-11 2024-10-18 0.367557\n", " 开始日期 结束日期 准确率\n", - "0 2024-10-11 2024-10-18 0.449368\n", + "0 2024-10-11 2024-10-18 0.367557\n", " 开始日期 结束日期 准确率\n", - "0 2024-10-11 2024-10-18 0.449368\n", + "0 2024-10-11 2024-10-18 0.367557\n", " 开始日期 结束日期 准确率\n", - "0 2024-10-11 2024-10-18 0.449368\n", + "0 2024-10-11 2024-10-18 0.367557\n", " 开始日期 结束日期 准确率\n", - "0 2024-10-11 2024-10-18 0.449368\n", + "0 2024-10-11 2024-10-18 0.367557\n", " 开始日期 结束日期 准确率\n", - "0 2024-10-18 2024-10-25 0.814057\n", + "0 2024-10-18 2024-10-25 0.342808\n", " 开始日期 结束日期 准确率\n", - "0 2024-10-18 2024-10-25 0.814057\n", + "0 2024-10-18 2024-10-25 0.342808\n", " 开始日期 结束日期 准确率\n", - "0 2024-10-18 2024-10-25 0.814057\n", + "0 2024-10-18 2024-10-25 0.342808\n", " 开始日期 结束日期 准确率\n", - "0 2024-10-18 2024-10-25 0.814057\n", + "0 2024-10-18 2024-10-25 0.342808\n", " 开始日期 结束日期 准确率\n", - "0 2024-10-18 2024-10-25 0.814057\n", + "0 2024-10-18 2024-10-25 0.342808\n", " 开始日期 结束日期 准确率\n", - "0 2024-10-25 2024-11-01 0.433599\n", + "0 2024-10-25 2024-11-01 0.397058\n", " 开始日期 结束日期 准确率\n", - "0 2024-10-25 2024-11-01 0.433599\n", + "0 2024-10-25 2024-11-01 0.397058\n", " 开始日期 结束日期 准确率\n", - "0 2024-10-25 2024-11-01 0.433599\n", + "0 2024-10-25 2024-11-01 0.397058\n", " 开始日期 结束日期 准确率\n", - "0 2024-10-25 2024-11-01 0.433599\n", + "0 2024-10-25 2024-11-01 0.397058\n", " 开始日期 结束日期 准确率\n", - "0 2024-10-25 2024-11-01 0.433599\n", + "0 2024-10-25 2024-11-01 0.397058\n", " 开始日期 结束日期 准确率\n", - "0 2024-11-01 2024-11-08 0.894767\n", + "0 2024-11-01 2024-11-08 0.666605\n", " 开始日期 结束日期 准确率\n", - "0 2024-11-01 2024-11-08 0.894767\n", + "0 2024-11-01 2024-11-08 0.666605\n", " 开始日期 结束日期 准确率\n", - "0 2024-11-01 2024-11-08 0.894767\n", + "0 2024-11-01 2024-11-08 0.666605\n", " 开始日期 结束日期 准确率\n", - "0 2024-11-01 2024-11-08 0.894767\n", + "0 2024-11-01 2024-11-08 0.666605\n", " 开始日期 结束日期 准确率\n", - "0 2024-11-01 2024-11-08 0.894767\n", + "0 2024-11-01 2024-11-08 0.666605\n", " 开始日期 结束日期 准确率\n", - "0 2024-11-08 2024-11-15 0.915721\n", + "0 2024-11-08 2024-11-15 0.805488\n", " 开始日期 结束日期 准确率\n", - "0 2024-11-08 2024-11-15 0.915721\n", + "0 2024-11-08 2024-11-15 0.805488\n", " 开始日期 结束日期 准确率\n", - "0 2024-11-08 2024-11-15 0.915721\n", + "0 2024-11-08 2024-11-15 0.805488\n", " 开始日期 结束日期 准确率\n", - "0 2024-11-08 2024-11-15 0.915721\n", + "0 2024-11-08 2024-11-15 0.805488\n", " 开始日期 结束日期 准确率\n", - "0 2024-11-08 2024-11-15 0.915721\n", + "0 2024-11-08 2024-11-15 0.805488\n", " 开始日期 结束日期 准确率\n", - "0 2024-11-15 2024-11-22 0.835755\n", + "0 2024-11-15 2024-11-22 0.744558\n", " 开始日期 结束日期 准确率\n", - "0 2024-11-15 2024-11-22 0.835755\n", + "0 2024-11-15 2024-11-22 0.744558\n", " 开始日期 结束日期 准确率\n", - "0 2024-11-15 2024-11-22 0.835755\n", + "0 2024-11-15 2024-11-22 0.744558\n", " 开始日期 结束日期 准确率\n", - "0 2024-11-15 2024-11-22 0.835755\n", + "0 2024-11-15 2024-11-22 0.744558\n", " 开始日期 结束日期 准确率\n", - "0 2024-11-15 2024-11-22 0.835755\n", + "0 2024-11-15 2024-11-22 0.744558\n", " 开始日期 结束日期 准确率\n", - "0 2024-11-22 2024-11-29 0.718009\n", + "0 2024-11-22 2024-11-29 0.351228\n", " 开始日期 结束日期 准确率\n", - "0 2024-11-22 2024-11-29 0.718009\n", + "0 2024-11-22 2024-11-29 0.351228\n", " 开始日期 结束日期 准确率\n", - "0 2024-11-22 2024-11-29 0.718009\n", + "0 2024-11-22 2024-11-29 0.351228\n", " 开始日期 结束日期 准确率\n", - "0 2024-11-22 2024-11-29 0.718009\n", + "0 2024-11-22 2024-11-29 0.351228\n", " 开始日期 结束日期 准确率\n", - "0 2024-11-22 2024-11-29 0.718009\n", + "0 2024-11-22 2024-11-29 0.351228\n", " 开始日期 结束日期 准确率\n", - "0 2024-11-29 2024-12-06 0.948363\n", + "0 2024-11-29 2024-12-06 0.727334\n", " 开始日期 结束日期 准确率\n", - "0 2024-11-29 2024-12-06 0.948363\n", + "0 2024-11-29 2024-12-06 0.727334\n", " 开始日期 结束日期 准确率\n", - "0 2024-11-29 2024-12-06 0.948363\n", + "0 2024-11-29 2024-12-06 0.727334\n", " 开始日期 结束日期 准确率\n", - "0 2024-11-29 2024-12-06 0.948363\n", + "0 2024-11-29 2024-12-06 0.727334\n", " 开始日期 结束日期 准确率\n", - "0 2024-11-29 2024-12-06 0.948363\n", + "0 2024-11-29 2024-12-06 0.727334\n", " 开始日期 结束日期 准确率\n", - "0 2024-12-06 2024-12-13 0.947006\n" + "0 2024-12-06 2024-12-13 0.835391\n" ] } ], @@ -315,7 +321,7 @@ }, { "cell_type": "code", - "execution_count": 12, + "execution_count": 16, "id": "0f942c69", "metadata": {}, "outputs": [ @@ -334,7 +340,7 @@ "\u001b[1;31mKeyError\u001b[0m: 'PREDICT_DATE'", "\nThe above exception was the direct cause of the following exception:\n", "\u001b[1;31mKeyError\u001b[0m Traceback (most recent call last)", - "Cell \u001b[1;32mIn[12], line 4\u001b[0m\n\u001b[0;32m 2\u001b[0m \u001b[38;5;66;03m# ds 按周取\u001b[39;00m\n\u001b[0;32m 3\u001b[0m df[\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mDs_Week\u001b[39m\u001b[38;5;124m'\u001b[39m] \u001b[38;5;241m=\u001b[39m df[\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mds\u001b[39m\u001b[38;5;124m'\u001b[39m]\u001b[38;5;241m.\u001b[39mapply(\u001b[38;5;28;01mlambda\u001b[39;00m x: x\u001b[38;5;241m.\u001b[39mstrftime(\u001b[38;5;124m'\u001b[39m\u001b[38;5;124m%\u001b[39m\u001b[38;5;124mU\u001b[39m\u001b[38;5;124m'\u001b[39m))\n\u001b[1;32m----> 4\u001b[0m df[\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mPre_Week\u001b[39m\u001b[38;5;124m'\u001b[39m] \u001b[38;5;241m=\u001b[39m df[\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mPREDICT_DATE\u001b[39m\u001b[38;5;124m'\u001b[39m]\u001b[38;5;241m.\u001b[39mapply(\u001b[38;5;28;01mlambda\u001b[39;00m x: x\u001b[38;5;241m.\u001b[39mstrftime(\u001b[38;5;124m'\u001b[39m\u001b[38;5;124m%\u001b[39m\u001b[38;5;124mU\u001b[39m\u001b[38;5;124m'\u001b[39m))\n", + "Cell \u001b[1;32mIn[16], line 4\u001b[0m\n\u001b[0;32m 2\u001b[0m \u001b[38;5;66;03m# ds 按周取\u001b[39;00m\n\u001b[0;32m 3\u001b[0m df[\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mDs_Week\u001b[39m\u001b[38;5;124m'\u001b[39m] \u001b[38;5;241m=\u001b[39m df[\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mds\u001b[39m\u001b[38;5;124m'\u001b[39m]\u001b[38;5;241m.\u001b[39mapply(\u001b[38;5;28;01mlambda\u001b[39;00m x: x\u001b[38;5;241m.\u001b[39mstrftime(\u001b[38;5;124m'\u001b[39m\u001b[38;5;124m%\u001b[39m\u001b[38;5;124mU\u001b[39m\u001b[38;5;124m'\u001b[39m))\n\u001b[1;32m----> 4\u001b[0m df[\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mPre_Week\u001b[39m\u001b[38;5;124m'\u001b[39m] \u001b[38;5;241m=\u001b[39m df[\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mPREDICT_DATE\u001b[39m\u001b[38;5;124m'\u001b[39m]\u001b[38;5;241m.\u001b[39mapply(\u001b[38;5;28;01mlambda\u001b[39;00m x: x\u001b[38;5;241m.\u001b[39mstrftime(\u001b[38;5;124m'\u001b[39m\u001b[38;5;124m%\u001b[39m\u001b[38;5;124mU\u001b[39m\u001b[38;5;124m'\u001b[39m))\n", "File \u001b[1;32md:\\ProgramData\\anaconda3\\Lib\\site-packages\\pandas\\core\\frame.py:3893\u001b[0m, in \u001b[0;36mDataFrame.__getitem__\u001b[1;34m(self, key)\u001b[0m\n\u001b[0;32m 3891\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mcolumns\u001b[38;5;241m.\u001b[39mnlevels \u001b[38;5;241m>\u001b[39m \u001b[38;5;241m1\u001b[39m:\n\u001b[0;32m 3892\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_getitem_multilevel(key)\n\u001b[1;32m-> 3893\u001b[0m indexer \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mcolumns\u001b[38;5;241m.\u001b[39mget_loc(key)\n\u001b[0;32m 3894\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m is_integer(indexer):\n\u001b[0;32m 3895\u001b[0m indexer \u001b[38;5;241m=\u001b[39m [indexer]\n", "File \u001b[1;32md:\\ProgramData\\anaconda3\\Lib\\site-packages\\pandas\\core\\indexes\\base.py:3798\u001b[0m, in \u001b[0;36mIndex.get_loc\u001b[1;34m(self, key)\u001b[0m\n\u001b[0;32m 3793\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(casted_key, \u001b[38;5;28mslice\u001b[39m) \u001b[38;5;129;01mor\u001b[39;00m (\n\u001b[0;32m 3794\u001b[0m \u001b[38;5;28misinstance\u001b[39m(casted_key, abc\u001b[38;5;241m.\u001b[39mIterable)\n\u001b[0;32m 3795\u001b[0m \u001b[38;5;129;01mand\u001b[39;00m \u001b[38;5;28many\u001b[39m(\u001b[38;5;28misinstance\u001b[39m(x, \u001b[38;5;28mslice\u001b[39m) \u001b[38;5;28;01mfor\u001b[39;00m x \u001b[38;5;129;01min\u001b[39;00m casted_key)\n\u001b[0;32m 3796\u001b[0m ):\n\u001b[0;32m 3797\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m InvalidIndexError(key)\n\u001b[1;32m-> 3798\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mKeyError\u001b[39;00m(key) \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01merr\u001b[39;00m\n\u001b[0;32m 3799\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mTypeError\u001b[39;00m:\n\u001b[0;32m 3800\u001b[0m \u001b[38;5;66;03m# If we have a listlike key, _check_indexing_error will raise\u001b[39;00m\n\u001b[0;32m 3801\u001b[0m \u001b[38;5;66;03m# InvalidIndexError. Otherwise we fall through and re-raise\u001b[39;00m\n\u001b[0;32m 3802\u001b[0m \u001b[38;5;66;03m# the TypeError.\u001b[39;00m\n\u001b[0;32m 3803\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_check_indexing_error(key)\n", "\u001b[1;31mKeyError\u001b[0m: 'PREDICT_DATE'"