diff --git a/auptest.py b/auptest.py new file mode 100644 index 0000000..d697635 --- /dev/null +++ b/auptest.py @@ -0,0 +1,219 @@ +from fastapi import FastAPI, HTTPException, Body +from fastapi.middleware.cors import CORSMiddleware +import requests +from requests_ntlm import HttpNtlmAuth +import urllib3 +urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) + + +''' +sql: + +-- 创建名为pims_api_log的表 +CREATE TABLE pims_api_log ( + -- 自增的唯一标识主键 + id INT AUTO_INCREMENT PRIMARY KEY, + -- 请求时间,记录请求到达服务器的时间戳,使用DATETIME类型方便查看具体时间 + request_time DATETIME NOT NULL, + -- 请求的IP地址,用于标识请求来源,VARCHAR类型根据实际IP长度设置合适的长度 + request_ip VARCHAR(15) NOT NULL, + -- 请求的URL,记录具体是向哪个接口路径发起的请求,VARCHAR类型可根据预计最长路径长度来设置长度 + request_url VARCHAR(255) NOT NULL, + -- 请求方法,如GET、POST等,使用VARCHAR类型存储简短的方法名称 + request_method VARCHAR(10) NOT NULL, + -- 接收到的请求参数,以JSON格式的文本存储,方便应对复杂结构的参数情况,TEXT类型可存储较长的文本内容 + request_params TEXT, + -- 响应状态码,记录接口返回给客户端的HTTP状态码,INT类型 + response_status_code INT NOT NULL, + -- 响应内容,同样以JSON格式的文本存储,便于保存各种格式的数据返回情况,TEXT类型 + response_content TEXT, + -- 响应时间,记录接口完成处理并返回响应的时间戳,DATETIME类型 + response_time DATETIME NOT NULL +); +''' + + +import mysql.connector +from datetime import datetime + +# 配置数据库连接信息,根据实际情况修改 +config = { + "user": "your_username", + "password": "your_password", + "host": "your_host", + "database": "your_database" +} + +def insert_api_log(request_time, request_ip, request_url, request_method, request_params, response_status_code, response_content, response_time): + try: + # 建立数据库连接 + cnx = mysql.connector.connect(**config) + cursor = cnx.cursor() + # 插入数据的SQL语句 + insert_query = """ + INSERT INTO pims_api_log (request_time, request_ip, request_url, request_method, request_params, response_status_code, response_content, response_time) + VALUES (%s, %s, %s, %s, %s, %s, %s, %s) + """ + # 准备要插入的数据,注意数据顺序要和SQL语句中的占位符顺序一致 + data = (request_time, request_ip, request_url, request_method, request_params, response_status_code, response_content, response_time) + # 执行插入操作 + cursor.execute(insert_query, data) + # 提交事务,使插入生效 + cnx.commit() + except mysql.connector.Error as err: + print(f"Error: {err}") + finally: + # 关闭游标和连接 + if cursor: + cursor.close() + if cnx: + cnx.close() + + + + +app = FastAPI(docs_url="/docs") + +# 允许跨域请求 +app.add_middleware( + CORSMiddleware, + allow_origins=["*"], + allow_methods=["*"], + allow_headers=["*"], +) + +headers = {'content-type': 'application/json;charset=UTF-8'} + +# 您的 GraphQL API eg: url = 'http://10.88.14.86/AspenTech/AspenUnified/api/v1/model/Chambroad20241205/graphql' +graphql_host = '10.88.14.86' +graphql_path = '/AspenTech/AspenUnified/api/v1/model/Chambroad20241205/graphql' + + +query = """ +mutation{ + purchases{ + update(inputs:[%s + ] + }]) + } + caseExecution { + submitCaseStack( + input:{ + name: "Job2" + cases: [ + {name: "11月度计划"} + {name: "二催开工"} + {name: "一焦化停工"} + {name: "焦化加工油浆"} + {name: "焦化加工低硫原油"} + {name: "焦化加工低硫渣油"} + ] + } + ) + {id} + waitForCaseStackJob(name: "Job2") + { + started + submitted + finished + executionStatus + cases{ + items{ + name + objectiveValue + } + } + } +} +} +""" + +payload_json = { + "query": query, + "operationName": "" +} + +graphql_username = "bw19382" +graphql_password = "Fudong3!" +auth = HttpNtlmAuth(f'{graphql_username}', f'{graphql_password}') + +example_query = ''' + 'inputs':{ + name:"11月度计划" + inputs:[ + { + name:"CWT" + inputs:[ + { + field:Cost + periodName:"1" + value: 3100 + } + ] + }, + { + name:"VRa" + inputs:[ + { + field:Cost + periodName:"1" + value: 3333 + } + ] + }, + ''' + +@app.post("/graphql") +async def execute_graphql_query( + query: str = Body(..., example=example_query) # 使用Body和example参数添加示例 +): + session = requests.Session() + response = session.post(url=url, headers=headers, json=payload_json, auth=auth, verify=False) + if response.status_code != 200: + raise HTTPException(status_code=response.status_code, detail=response.text) + return response.json() + +query2 = ''' +query +{ + cases + { + items + { + name + } + } +} +''' + +payload_json2 = { + "query": query2, + "operationName": "" +} + +@app.get("/cases") +async def get_cases_query_async(): + session = requests.Session() + response = session.post(url=url, headers=headers, json=payload_json2, auth=auth, verify=False) + insert_api_log( + datetime.now(), + 'IP_ADDRESS '12 + 'IP_ADDRESS', + 'URL_ADDRESS, + 'http://127.0.0.1:8000/cases', + 'GET', + '', + response.status_code, + response.text, + datetime.now() + ) + if response.status_code!= 200: + raise HTTPException(status_code=response.status_code, detail=response.text) + + return response.json() + + + +if __name__ == "__main__": + import uvicorn + uvicorn.run(app, host="127.0.0.1", port=8000) diff --git a/原油价格预测准确率计算.ipynb b/原油价格预测准确率计算.ipynb index 6bace69..6c6bfe6 100644 --- a/原油价格预测准确率计算.ipynb +++ b/原油价格预测准确率计算.ipynb @@ -2,7 +2,7 @@ "cells": [ { "cell_type": "code", - "execution_count": 136, + "execution_count": 2, "id": "9daadf20-caa6-4b25-901c-6cc3ef563f58", "metadata": {}, "outputs": [ @@ -10,35 +10,67 @@ "name": "stdout", "output_type": "stream", "text": [ - "(75, 9)\n", + "(85, 28)\n", "(20, 4)\n", - "(75, 12)\n", - " id PREDICT_DATE CREAT_DATE MIN_PRICE MAX_PRICE HIGH_PRICE_x LOW_PRICE_x \\\n", - "0 1 2024-11-26 2024-11-25 71.071556 76.006900 \n", - "1 2 2024-11-27 2024-11-25 71.003624 75.580560 \n", - "2 3 2024-11-28 2024-11-25 72.083850 76.204260 \n", - "3 4 2024-11-29 2024-11-25 71.329730 75.703950 \n", - "4 5 2024-12-02 2024-11-25 71.720825 76.264275 \n", + "(85, 31)\n", + " ds NHITS Informer LSTM iTransformer TSMixer \\\n", + "0 2024-11-25 75.714300 75.523370 73.614220 75.27068 75.03936 \n", + "1 2024-11-26 76.039635 75.558270 73.692310 75.04110 74.60100 \n", + "2 2024-11-27 77.375790 75.361885 73.826140 74.99121 74.37731 \n", + "3 2024-11-28 78.872400 76.339920 73.883484 75.79425 74.04826 \n", + "4 2024-11-29 79.576970 76.333170 73.876396 75.89008 74.07330 \n", "\n", - " RIGHT_ROTE ds 序号 LOW_PRICE_y HIGH_PRICE_y \n", - "0 2024-11-26 9.0 71.63 73.80 \n", - "1 2024-11-27 8.0 71.71 72.85 \n", - "2 2024-11-28 7.0 71.85 72.96 \n", - "3 2024-11-29 6.0 71.75 73.34 \n", - "4 2024-12-02 5.0 71.52 72.89 \n", - " id PREDICT_DATE CREAT_DATE MIN_PRICE MAX_PRICE HIGH_PRICE_x \\\n", - "70 71 2024-11-25 2024-11-22 74.53063 76.673140 \n", - "71 72 2024-11-26 2024-11-22 74.44043 76.874565 \n", - "72 73 2024-11-27 2024-11-22 74.66318 76.734130 \n", - "73 74 2024-11-28 2024-11-22 74.70841 77.141050 \n", - "74 75 2024-11-29 2024-11-22 74.70321 77.746170 \n", + " TSMixerx PatchTST RNN GRU ... y \\\n", + "0 74.581190 75.70277 74.721280 74.512060 ... 73.010002 \n", + "1 73.496025 75.97611 74.588060 74.713425 ... 72.809998 \n", + "2 73.522026 76.48628 74.486400 74.946010 ... 72.830002 \n", + "3 73.416306 76.38267 75.195710 74.946014 ... 73.279999 \n", + "4 73.521570 76.20661 75.089966 74.935165 ... 72.940002 \n", "\n", - " LOW_PRICE_x RIGHT_ROTE ds 序号 LOW_PRICE_y HIGH_PRICE_y \n", - "70 2024-11-25 10.0 72.30 74.83 \n", - "71 2024-11-26 9.0 71.63 73.80 \n", - "72 2024-11-27 8.0 71.71 72.85 \n", - "73 2024-11-28 7.0 71.85 72.96 \n", - "74 2024-11-29 6.0 71.75 73.34 \n" + " min_within_quantile max_within_quantile id CREAT_DATE min_price \\\n", + "0 74.41491 75.29100 1 2024-11-22 74.414910 \n", + "1 74.11780 74.95678 2 2024-11-22 73.496025 \n", + "2 73.93820 74.50395 3 2024-11-22 73.522026 \n", + "3 73.85808 74.46382 4 2024-11-22 73.416306 \n", + "4 73.96690 74.81860 5 2024-11-22 73.521570 \n", + "\n", + " max_price 序号 LOW_PRICE HIGH_PRICE \n", + "0 75.959854 10.0 72.30 74.83 \n", + "1 77.182580 9.0 71.63 73.80 \n", + "2 78.378624 8.0 71.71 72.85 \n", + "3 79.415400 7.0 71.85 72.96 \n", + "4 79.576970 6.0 71.75 73.34 \n", + "\n", + "[5 rows x 31 columns]\n", + " ds NHITS Informer LSTM iTransformer TSMixer \\\n", + "80 2024-12-16 74.53431 73.944080 71.68200 74.022340 74.295820 \n", + "81 2024-12-17 74.81450 73.830450 71.95232 74.314950 74.167290 \n", + "82 2024-12-18 75.55861 73.525100 72.00824 74.441380 74.212180 \n", + "83 2024-12-19 75.36518 74.012215 72.20199 74.397190 74.330130 \n", + "84 2024-12-20 74.78187 73.929596 72.23908 74.510895 74.208084 \n", + "\n", + " TSMixerx PatchTST RNN GRU ... y min_within_quantile \\\n", + "80 74.41700 74.587390 73.607780 73.747700 ... NaN 74.231680 \n", + "81 74.36576 74.363060 73.688736 73.833950 ... NaN 73.735420 \n", + "82 74.29719 74.073555 73.456700 74.146034 ... NaN 74.073555 \n", + "83 73.79145 74.529945 74.230125 74.144520 ... NaN 74.330130 \n", + "84 74.59672 74.231255 74.201860 73.996100 ... NaN 74.083810 \n", + "\n", + " max_within_quantile id CREAT_DATE min_price max_price 序号 LOW_PRICE \\\n", + "80 74.621160 81 2024-12-16 72.75007 74.62116 NaN NaN \n", + "81 74.682365 82 2024-12-16 72.72196 74.81450 NaN NaN \n", + "82 75.157074 83 2024-12-16 73.12483 75.55861 NaN NaN \n", + "83 75.339240 84 2024-12-16 73.07359 75.36518 NaN NaN \n", + "84 74.604610 85 2024-12-16 72.93583 74.78187 NaN NaN \n", + "\n", + " HIGH_PRICE \n", + "80 NaN \n", + "81 NaN \n", + "82 NaN \n", + "83 NaN \n", + "84 NaN \n", + "\n", + "[5 rows x 31 columns]\n" ] } ], @@ -48,17 +80,19 @@ "import pandas as pd\n", "\n", "# 预测价格数据\n", - "dbfilename = os.path.join(r'D:\\code\\PriceForecast\\yuanyoudataset','jbsh_yuanyou.db')\n", - "conn = sqlite3.connect(dbfilename)\n", - "query = 'SELECT * FROM accuracy'\n", - "df1 = pd.read_sql_query(query, conn)\n", - "df1['ds'] = df1['PREDICT_DATE']\n", - "conn.close()\n", + "# dbfilename = os.path.join(r'D:\\code\\PriceForecast\\yuanyoudataset','jbsh_yuanyou.db')\n", + "# conn = sqlite3.connect(dbfilename)\n", + "# query = 'SELECT * FROM accuracy'\n", + "# df1 = pd.read_sql_query(query, conn)\n", + "# df1['ds'] = df1['PREDICT_DATE']\n", + "# conn.close()\n", + "# print(df1.shape)\n", + "\n", + "# 预测价格数据\n", + "dfcsvfilename = os.path.join(r'D:\\code\\PriceForecast\\yuanyoudataset','accuracy_ten.csv')\n", + "df1 = pd.read_csv(dfcsvfilename)\n", "print(df1.shape)\n", "\n", - "\n", - "\n", - "\n", "# 最高最低价\n", "xlsfilename = os.path.join(r'D:\\code\\PriceForecast\\yuanyoudataset','数据项下载.xls')\n", "df2 = pd.read_excel(xlsfilename)[5:]\n", @@ -70,7 +104,7 @@ "df = pd.merge(df1,df2,on=['ds'],how='left')\n", "\n", "df['ds'] = pd.to_datetime(df['ds'])\n", - "df['PREDICT_DATE'] = pd.to_datetime(df['PREDICT_DATE'])\n", + "# df['PREDICT_DATE'] = pd.to_datetime(df['PREDICT_DATE'])\n", "df = df.reindex()\n", "\n", "print(df.shape)\n", @@ -87,28 +121,22 @@ }, { "cell_type": "code", - "execution_count": 137, + "execution_count": 3, "id": "e51c3fd0-6bff-45de-b8b6-971e7986c7a7", "metadata": {}, "outputs": [ { - "name": "stdout", - "output_type": "stream", - "text": [ - " ds ACCURACY HIGH_PRICE_y LOW_PRICE_y MIN_PRICE MAX_PRICE\n", - "0 2024-11-26 1.000000 73.80 71.63 71.071556 76.006900\n", - "1 2024-11-27 1.000000 72.85 71.71 71.003624 75.580560\n", - "2 2024-11-28 0.789324 72.96 71.85 72.083850 76.204260\n", - "3 2024-11-29 1.000000 73.34 71.75 71.329730 75.703950\n", - "4 2024-12-02 0.853412 72.89 71.52 71.720825 76.264275\n", - ".. ... ... ... ... ... ...\n", - "70 2024-11-25 0.118328 74.83 72.30 74.530630 76.673140\n", - "71 2024-11-26 0.000000 73.80 71.63 74.440430 76.874565\n", - "72 2024-11-27 0.000000 72.85 71.71 74.663180 76.734130\n", - "73 2024-11-28 0.000000 72.96 71.85 74.708410 77.141050\n", - "74 2024-11-29 0.000000 73.34 71.75 74.703210 77.746170\n", - "\n", - "[75 rows x 6 columns]\n" + "ename": "KeyError", + "evalue": "\"None of [Index(['HIGH_PRICE_y', 'LOW_PRICE_y', 'MIN_PRICE', 'MAX_PRICE'], dtype='object')] are in the [columns]\"", + "output_type": "error", + "traceback": [ + "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[1;31mKeyError\u001b[0m Traceback (most recent call last)", + "Cell \u001b[1;32mIn[3], line 15\u001b[0m\n\u001b[0;32m 12\u001b[0m \u001b[38;5;66;03m# 使用 apply 函数来应用计算准确率的函数\u001b[39;00m\n\u001b[0;32m 14\u001b[0m columns \u001b[38;5;241m=\u001b[39m [\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mHIGH_PRICE_y\u001b[39m\u001b[38;5;124m'\u001b[39m,\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mLOW_PRICE_y\u001b[39m\u001b[38;5;124m'\u001b[39m,\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mMIN_PRICE\u001b[39m\u001b[38;5;124m'\u001b[39m,\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mMAX_PRICE\u001b[39m\u001b[38;5;124m'\u001b[39m]\n\u001b[1;32m---> 15\u001b[0m df[columns] \u001b[38;5;241m=\u001b[39m df[columns]\u001b[38;5;241m.\u001b[39mastype(\u001b[38;5;28mfloat\u001b[39m)\n\u001b[0;32m 16\u001b[0m df[\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mACCURACY\u001b[39m\u001b[38;5;124m'\u001b[39m] \u001b[38;5;241m=\u001b[39m df\u001b[38;5;241m.\u001b[39mapply(calculate_accuracy, axis\u001b[38;5;241m=\u001b[39m\u001b[38;5;241m1\u001b[39m)\n\u001b[0;32m 19\u001b[0m \u001b[38;5;66;03m# 打印结果\u001b[39;00m\n", + "File \u001b[1;32md:\\ProgramData\\anaconda3\\Lib\\site-packages\\pandas\\core\\frame.py:3899\u001b[0m, in \u001b[0;36mDataFrame.__getitem__\u001b[1;34m(self, key)\u001b[0m\n\u001b[0;32m 3897\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m is_iterator(key):\n\u001b[0;32m 3898\u001b[0m key \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mlist\u001b[39m(key)\n\u001b[1;32m-> 3899\u001b[0m indexer \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mcolumns\u001b[38;5;241m.\u001b[39m_get_indexer_strict(key, \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mcolumns\u001b[39m\u001b[38;5;124m\"\u001b[39m)[\u001b[38;5;241m1\u001b[39m]\n\u001b[0;32m 3901\u001b[0m \u001b[38;5;66;03m# take() does not accept boolean indexers\u001b[39;00m\n\u001b[0;32m 3902\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mgetattr\u001b[39m(indexer, \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mdtype\u001b[39m\u001b[38;5;124m\"\u001b[39m, \u001b[38;5;28;01mNone\u001b[39;00m) \u001b[38;5;241m==\u001b[39m \u001b[38;5;28mbool\u001b[39m:\n", + "File \u001b[1;32md:\\ProgramData\\anaconda3\\Lib\\site-packages\\pandas\\core\\indexes\\base.py:6115\u001b[0m, in \u001b[0;36mIndex._get_indexer_strict\u001b[1;34m(self, key, axis_name)\u001b[0m\n\u001b[0;32m 6112\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m 6113\u001b[0m keyarr, indexer, new_indexer \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_reindex_non_unique(keyarr)\n\u001b[1;32m-> 6115\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_raise_if_missing(keyarr, indexer, axis_name)\n\u001b[0;32m 6117\u001b[0m keyarr \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mtake(indexer)\n\u001b[0;32m 6118\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(key, Index):\n\u001b[0;32m 6119\u001b[0m \u001b[38;5;66;03m# GH 42790 - Preserve name from an Index\u001b[39;00m\n", + "File \u001b[1;32md:\\ProgramData\\anaconda3\\Lib\\site-packages\\pandas\\core\\indexes\\base.py:6176\u001b[0m, in \u001b[0;36mIndex._raise_if_missing\u001b[1;34m(self, key, indexer, axis_name)\u001b[0m\n\u001b[0;32m 6174\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m use_interval_msg:\n\u001b[0;32m 6175\u001b[0m key \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mlist\u001b[39m(key)\n\u001b[1;32m-> 6176\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mKeyError\u001b[39;00m(\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mNone of [\u001b[39m\u001b[38;5;132;01m{\u001b[39;00mkey\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m] are in the [\u001b[39m\u001b[38;5;132;01m{\u001b[39;00maxis_name\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m]\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[0;32m 6178\u001b[0m not_found \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mlist\u001b[39m(ensure_index(key)[missing_mask\u001b[38;5;241m.\u001b[39mnonzero()[\u001b[38;5;241m0\u001b[39m]]\u001b[38;5;241m.\u001b[39munique())\n\u001b[0;32m 6179\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mKeyError\u001b[39;00m(\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;132;01m{\u001b[39;00mnot_found\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m not in index\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n", + "\u001b[1;31mKeyError\u001b[0m: \"None of [Index(['HIGH_PRICE_y', 'LOW_PRICE_y', 'MIN_PRICE', 'MAX_PRICE'], dtype='object')] are in the [columns]\"" ] } ],