diff --git a/src/.vuepress/sidebar/V1.3.3/en.ts b/src/.vuepress/sidebar/V1.3.3/en.ts
index ee0360ed5..867de90ab 100644
--- a/src/.vuepress/sidebar/V1.3.3/en.ts
+++ b/src/.vuepress/sidebar/V1.3.3/en.ts
@@ -60,6 +60,7 @@ export const enSidebar = {
},
{ text: 'Cluster Deployment', link: 'Cluster-Deployment_apache' },
{ text: 'Docker Deployment', link: 'Docker-Deployment_apache' },
+ { text: 'AINode Deployment', link: 'AINode_Deployment_apache' },
],
},
{
@@ -98,6 +99,7 @@ export const enSidebar = {
children: [
{ text: 'Data Sync', link: 'Data-Sync_apache' },
{ text: 'Data Subscription', link: 'Data-subscription' },
+ { text: 'AI Capability', link: 'AINode_apache' },
{
text: 'Security Management',
collapsible: true,
diff --git a/src/.vuepress/sidebar/V1.3.3/zh.ts b/src/.vuepress/sidebar/V1.3.3/zh.ts
index be404b53e..dd6974d83 100644
--- a/src/.vuepress/sidebar/V1.3.3/zh.ts
+++ b/src/.vuepress/sidebar/V1.3.3/zh.ts
@@ -58,6 +58,7 @@ export const zhSidebar = {
{ text: '单机版部署指导', link: 'Stand-Alone-Deployment_apache' },
{ text: '集群版部署指导', link: 'Cluster-Deployment_apache' },
{ text: 'Docker部署指导', link: 'Docker-Deployment_apache' },
+ { text: 'AINode部署', link: 'AINode_Deployment_apache' },
],
},
{
@@ -87,6 +88,7 @@ export const zhSidebar = {
children: [
{ text: '数据同步', link: 'Data-Sync_apache' },
{ text: '数据订阅', link: 'Data-subscription' },
+ { text: 'AI能力', link: 'AINode_apache' },
{
text: '安全管理',
collapsible: true,
diff --git a/src/.vuepress/sidebar/V2.0.x/zh-Tree.ts b/src/.vuepress/sidebar/V2.0.x/zh-Tree.ts
index 966e0f56a..a7d0ae41e 100644
--- a/src/.vuepress/sidebar/V2.0.x/zh-Tree.ts
+++ b/src/.vuepress/sidebar/V2.0.x/zh-Tree.ts
@@ -61,6 +61,7 @@ export const zhSidebar = {
{ text: '单机版部署指导', link: 'Stand-Alone-Deployment_apache' },
{ text: '集群版部署指导', link: 'Cluster-Deployment_apache' },
{ text: 'Docker部署指导', link: 'Docker-Deployment_apache' },
+ { text: 'AINode部署', link: 'AINode_Deployment_apache' },
],
},
{
@@ -82,6 +83,7 @@ export const zhSidebar = {
children: [
{ text: '数据同步', link: 'Data-Sync_apache' },
{ text: '数据订阅', link: 'Data-subscription' },
+ { text: 'AI能力', link: 'AINode_apache' },
{
text: '安全管理',
collapsible: true,
diff --git a/src/UserGuide/Master/Tree/Deployment-and-Maintenance/AINode_Deployment_apache.md b/src/UserGuide/Master/Tree/Deployment-and-Maintenance/AINode_Deployment_apache.md
new file mode 100644
index 000000000..610ec0b72
--- /dev/null
+++ b/src/UserGuide/Master/Tree/Deployment-and-Maintenance/AINode_Deployment_apache.md
@@ -0,0 +1,522 @@
+
+# AINode Deployment
+
+## AINode Introduction
+
+### Capability Introduction
+
+ AINode is the third type of endogenous node provided by IoTDB after the Configurable Node and DataNode. This node extends its ability to perform machine learning analysis on time series by interacting with the DataNode and Configurable Node of the IoTDB cluster. It supports the introduction of existing machine learning models from external sources for registration and the use of registered models to complete time series analysis tasks on specified time series data through simple SQL statements. The creation, management, and inference of models are integrated into the database engine. Currently, machine learning algorithms or self-developed models are available for common time series analysis scenarios, such as prediction and anomaly detection.
+
+### Delivery Method
+ It is an additional package outside the IoTDB cluster, with independent installation and activation (if you need to try or use it, please contact Timecho Technology Business or Technical Support).
+
+### Deployment mode
+
+

+

+
+
+## Installation preparation
+
+### Get installation package
+
+ Users can download the software installation package for AINode, download and unzip it to complete the installation of AINode.
+
+ Unzip and install the package
+ `(iotdb-enterprise-ainode-.zip)`, The directory structure after unpacking the installation package is as follows:
+| **Catalogue** | **Type** | **Explain** |
+| ------------ | -------- | ------------------------------------------------ |
+| lib | folder | AINode compiled binary executable files and related code dependencies |
+| sbin | folder | The running script of AINode can start, remove, and stop AINode |
+| conf | folder | Contains configuration items for AINode, specifically including the following configuration items |
+| LICENSE | file | Certificate |
+| NOTICE | file | Tips |
+| README_ZH.md | file | Explanation of the Chinese version of the markdown format |
+| `README.md` | file | Instructions |
+
+### Environment preparation
+- Suggested operating environment:Ubuntu, CentOS, MacOS
+
+- Runtime Environment
+ - Python>=3.8 and Python <= 3.14 is sufficient in a networked environment, and comes with pip and venv tools; Python 3.8 version is required for non networked environments, and download the zip package for the corresponding operating system from [here](https://cloud.tsinghua.edu.cn/d/4c1342f6c272439aa96c/?p=%2Flibs&mode=list) (Note that when downloading dependencies, you need to select the zip file in the libs folder, as shown in the following figure). Copy all files in the folder to the `lib` folder in the `iotdb-enterprise-ainode-` folder, and follow the steps below to start AINode.
+
+
+
+ - There must be a Python interpreter in the environment variables that can be directly called through the `python` instruction.
+ - It is recommended to create a Python interpreter venv virtual environment in the `iotdb-enterprise-ainode-` folder. If installing version 3.8.0 virtual environment, the statement is as follows:
+ ```shell
+ # Install version 3.8.0 of Venv , Create a virtual environment with the folder name `venv`.
+ ../Python-3.8.0/python -m venv `venv`
+ ```
+
+## Installation steps
+
+### Install AINode
+
+
+ 1. Check the kernel architecture of Linux
+```shell
+ uname -m
+ ```
+
+ 2. Import Python environment [Download](https://repo.anaconda.com/miniconda/)
+
+ Recommend downloading the py311 version application and importing it into the iotdb dedicated folder in the user's root directory
+
+ 3. Switch to the iotdb dedicated folder to install the Python environment
+
+ Taking Miniconda 3-py311_24.5.0-0-Lux-x86_64 as an example:
+
+```shell
+ bash ./Miniconda3-py311_24.5.0-0-Linux-x86_64.sh
+ ```
+> Type "Enter", "Long press space", "Enter", "Yes", "Yes" according to the prompt
+> Close the current SSH window and reconnect
+
+ 4. Create a dedicated environment
+
+```shell
+ conda create -n ainode_py python=3.11.9
+ ```
+
+ Type 'y' according to the prompt
+
+ 5. Activate dedicated environment
+
+```shell
+ conda activate ainode_py
+ ```
+
+ 6. Verify Python version
+
+```shell
+ python --version
+ ```
+ 7. Download and import AINode to a dedicated folder, switch to the dedicated folder and extract the installation package
+
+```shell
+ unzip iotdb-enterprise-ainode-1.3.3.2.zip
+ ```
+
+ 8. Configuration item modification
+
+```shell
+ vi iotdb-enterprise-ainode-1.3.3.2/conf/iotdb-ainode.properties
+ ```
+ Configuration item modification:[detailed information](#configuration-item-modification)
+
+> ain_seed_config_node=iotdb-1:10710 (Cluster communication node IP: communication node port)
+> ain_inference_rpc_address=iotdb-3 (IP address of the server running AINode)
+
+ 9. Replace Python source
+
+```shell
+ pip config set global.index-url https://mirrors.aliyun.com/pypi/simple/
+ ```
+
+ 10. Start the AINode node
+
+```shell
+ nohup bash iotdb-enterprise-ainode-1.3.3.2/sbin/start-ainode.sh > myout.file 2>& 1 &
+ ```
+> Return to the default environment of the system: conda deactivate
+
+ ### Configuration item modification
+
+AINode supports modifying some necessary parameters. You can find the following parameters in the `conf/iotdb-ainode.properties` file and make persistent modifications to them:
+:
+
+| **Name** | **Describe** | **Type** | **Default value** | **Effective method after modification** |
+| :----------------------------- | ------------------------------------------------------------ | ------- | ------------------ | ---------------------------- |
+| cluster_name | The identifier for AINode to join the cluster | string | defaultCluster | Only allow modifications before the first service startup |
+| ain_seed_config_node | The Configurable Node address registered during AINode startup | String | 127.0.0.1:10710 | Only allow modifications before the first service startup |
+| ain_inference_rpc_address | AINode provides service and communication addresses , Internal Service Communication Interface | String | 127.0.0.1 | Only allow modifications before the first service startup |
+| ain_inference_rpc_port | AINode provides ports for services and communication | String | 10810 | Only allow modifications before the first service startup |
+| ain_system_dir | AINode metadata storage path, the starting directory of the relative path is related to the operating system, and it is recommended to use an absolute path | String | data/AINode/system | Only allow modifications before the first service startup |
+| ain_models_dir | AINode stores the path of the model file, and the starting directory of the relative path is related to the operating system. It is recommended to use an absolute path | String | data/AINode/models | Only allow modifications before the first service startup |
+| ain_logs_dir | The path where AINode stores logs, the starting directory of the relative path is related to the operating system, and it is recommended to use an absolute path | String | logs/AINode | Effective after restart |
+| ain_thrift_compression_enabled | Does AINode enable Thrift's compression mechanism , 0-Do not start, 1-Start | Boolean | 0 | Effective after restart |
+
+### Start AINode
+
+ After completing the deployment of Seed Config Node, the registration and inference functions of the model can be supported by adding AINode nodes. After specifying the information of the IoTDB cluster in the configuration file, the corresponding instruction can be executed to start AINode and join the IoTDB cluster。
+
+#### Networking environment startup
+
+##### Start command
+
+```shell
+ # Start command
+ # Linux and MacOS systems
+ bash sbin/start-ainode.sh
+
+ # Windows systems
+ sbin\start-ainode.bat
+
+ # Backend startup command (recommended for long-term running)
+ # Linux and MacOS systems
+ nohup bash sbin/start-ainode.sh > myout.file 2>& 1 &
+
+ # Windows systems
+ nohup bash sbin\start-ainode.bat > myout.file 2>& 1 &
+ ```
+
+#### Detailed Syntax
+
+```shell
+ # Start command
+ # Linux and MacOS systems
+ bash sbin/start-ainode.sh -i -r -n
+
+ # Windows systems
+ sbin\start-ainode.bat -i -r -n
+ ```
+
+##### Parameter introduction:
+
+| **Name** | **Label** | **Describe** | **Is it mandatory** | **Type** | **Default value** | **Input method** |
+| ------------------- | ---- | ------------------------------------------------------------ | -------- | ------ | ---------------- | ---------------------- |
+| ain_interpreter_dir | -i | The interpreter path of the virtual environment where AINode is installed requires the use of an absolute path. | no | String | Default reading of environment variables | Input or persist modifications during invocation |
+| ain_force_reinstall | -r | Does this script check the version when checking the installation status of AINode. If it does, it will force the installation of the whl package in lib if the version is incorrect. | no | Bool | false | Input when calling |
+| ain_no_dependencies | -n | Specify whether to install dependencies when installing AINode, and if so, only install the AINode main program without installing dependencies. | no | Bool | false | Input when calling |
+
+ If you don't want to specify the corresponding parameters every time you start, you can also persistently modify the parameters in the `ainode-env.sh` and `ainode-env.bat` scripts in the `conf` folder (currently supporting persistent modification of the ain_interpreter-dir parameter).
+
+ `ainode-env.sh` :
+ ```shell
+ # The defaulte venv environment is used if ain_interpreter_dir is not set. Please use absolute path without quotation mark
+ # ain_interpreter_dir=
+ ```
+ `ainode-env.bat` :
+```shell
+ @REM The defaulte venv environment is used if ain_interpreter_dir is not set. Please use absolute path without quotation mark
+ @REM set ain_interpreter_dir=
+ ```
+ After writing the parameter value, uncomment the corresponding line and save it to take effect on the next script execution.
+
+
+#### Example
+
+##### Directly start:
+
+```shell
+ # Start command
+ # Linux and MacOS systems
+ bash sbin/start-ainode.sh
+ # Windows systems
+ sbin\start-ainode.bat
+
+
+ # Backend startup command (recommended for long-term running)
+ # Linux and MacOS systems
+ nohup bash sbin/start-ainode.sh > myout.file 2>& 1 &
+ # Windows systems
+ nohup bash sbin\start-ainode.bat > myout.file 2>& 1 &
+ ```
+
+##### Update Start:
+If the version of AINode has been updated (such as updating the `lib` folder), this command can be used. Firstly, it is necessary to ensure that AINode has stopped running, and then restart it using the `-r` parameter, which will reinstall AINode based on the files under `lib`.
+
+
+```shell
+ # Update startup command
+ # Linux and MacOS systems
+ bash sbin/start-ainode.sh -r
+ # Windows systems
+ sbin\start-ainode.bat -r
+
+
+ # Backend startup command (recommended for long-term running)
+ # Linux and MacOS systems
+ nohup bash sbin/start-ainode.sh -r > myout.file 2>& 1 &
+ # Windows c
+ nohup bash sbin\start-ainode.bat -r > myout.file 2>& 1 &
+ ```
+#### Non networked environment startup
+
+##### Start command
+
+```shell
+ # Start command
+ # Linux and MacOS systems
+ bash sbin/start-ainode.sh
+
+ # Windows systems
+ sbin\start-ainode.bat
+
+ # Backend startup command (recommended for long-term running)
+ # Linux and MacOS systems
+ nohup bash sbin/start-ainode.sh > myout.file 2>& 1 &
+
+ # Windows systems
+ nohup bash sbin\start-ainode.bat > myout.file 2>& 1 &
+ ```
+
+#### Detailed Syntax
+
+```shell
+ # Start command
+ # Linux and MacOS systems
+ bash sbin/start-ainode.sh -i -r -n
+
+ # Windows systems
+ sbin\start-ainode.bat -i -r -n
+ ```
+
+##### Parameter introduction:
+
+| **Name** | **Label** | **Describe** | **Is it mandatory** | **Type** | **Default value** | **Input method** |
+| ------------------- | ---- | ------------------------------------------------------------ | -------- | ------ | ---------------- | ---------------------- |
+| ain_interpreter_dir | -i | The interpreter path of the virtual environment where AINode is installed requires the use of an absolute path | no | String | Default reading of environment variables | Input or persist modifications during invocation |
+| ain_force_reinstall | -r | Does this script check the version when checking the installation status of AINode. If it does, it will force the installation of the whl package in lib if the version is incorrect | no | Bool | false | Input when calling |
+
+> Attention: When installation fails in a non networked environment, first check if the installation package corresponding to the platform is selected, and then confirm that the Python version is 3.8 (due to the limitations of the downloaded installation package on Python versions, 3.7, 3.9, and others are not allowed)
+
+#### Example
+
+##### Directly start:
+
+```shell
+ # Start command
+ # Linux and MacOS systems
+ bash sbin/start-ainode.sh
+ # Windows systems
+ sbin\start-ainode.bat
+
+ # Backend startup command (recommended for long-term running)
+ # Linux and MacOS systems
+ nohup bash sbin/start-ainode.sh > myout.file 2>& 1 &
+ # Windows systems
+ nohup bash sbin\start-ainode.bat > myout.file 2>& 1 &
+ ```
+
+### Detecting the status of AINode nodes
+
+During the startup process of AINode, the new AINode will be automatically added to the IoTDB cluster. After starting AINode, you can enter SQL in the command line to query. If you see an AINode node in the cluster and its running status is Running (as shown below), it indicates successful joining.
+
+
+```shell
+IoTDB> show cluster
++------+----------+-------+---------------+------------+-------+-----------+
+|NodeID| NodeType| Status|InternalAddress|InternalPort|Version| BuildInfo|
++------+----------+-------+---------------+------------+-------+-----------+
+| 0|ConfigNode|Running| 127.0.0.1| 10710|UNKNOWN|190e303-dev|
+| 1| DataNode|Running| 127.0.0.1| 10730|UNKNOWN|190e303-dev|
+| 2| AINode|Running| 127.0.0.1| 10810|UNKNOWN|190e303-dev|
++------+----------+-------+---------------+------------+-------+-----------+
+```
+
+### Stop AINode
+
+If you need to stop a running AINode node, execute the corresponding shutdown script.
+
+#### Stop command
+
+```shell
+ # Linux / MacOS
+ bash sbin/stop-ainode.sh
+
+ #Windows
+ sbin\stop-ainode.bat
+ ```
+
+
+#### Detailed Syntax
+
+```shell
+ # Linux / MacOS
+ bash sbin/stop-ainode.sh -t
+
+ #Windows
+ sbin\stop-ainode.bat -t
+ ```
+
+##### Parameter introduction:
+
+| **Name** | **Label** | **Describe** | **Is it mandatory** | **Type** | **Default value** | **Input method** |
+| ----------------- | ---- | ------------------------------------------------------------ | -------- | ------ | ------ | ---------- |
+| ain_remove_target | -t | When closing AINode, you can specify the Node ID, address, and port number of the target AINode to be removed, in the format of `` | no | String | nothing | Input when calling |
+
+#### Example
+
+```shell
+ # Linux / MacOS
+ bash sbin/stop-ainode.sh
+
+ # Windows
+ sbin\stop-ainode.bat
+ ```
+After stopping AINode, you can still see AINode nodes in the cluster, whose running status is UNKNOWN (as shown below), and the AINode function cannot be used at this time.
+
+ ```shell
+IoTDB> show cluster
++------+----------+-------+---------------+------------+-------+-----------+
+|NodeID| NodeType| Status|InternalAddress|InternalPort|Version| BuildInfo|
++------+----------+-------+---------------+------------+-------+-----------+
+| 0|ConfigNode|Running| 127.0.0.1| 10710|UNKNOWN|190e303-dev|
+| 1| DataNode|Running| 127.0.0.1| 10730|UNKNOWN|190e303-dev|
+| 2| AINode|UNKNOWN| 127.0.0.1| 10790|UNKNOWN|190e303-dev|
++------+----------+-------+---------------+------------+-------+-----------+
+```
+If you need to restart the node, you need to execute the startup script again.
+
+### Remove AINode
+
+When it is necessary to remove an AINode node from the cluster, a removal script can be executed. The difference between removing and stopping scripts is that stopping retains the AINode node in the cluster but stops the AINode service, while removing removes the AINode node from the cluster.
+
+#### Remove command
+
+
+```shell
+ # Linux / MacOS
+ bash sbin/remove-ainode.sh
+
+ # Windows
+ sbin\remove-ainode.bat
+ ```
+
+#### Detailed Syntax
+
+```shell
+ # Linux / MacOS
+ bash sbin/remove-ainode.sh -i -t/: -r -n
+
+ # Windows
+ sbin\remove-ainode.bat -i -t/: -r -n
+ ```
+
+##### Parameter introduction:
+
+ | **Name** | **Label** | **Describe** | **Is it mandatory** | **Type** | **Default value** | **Input method** |
+| ------------------- | ---- | ------------------------------------------------------------ | -------- | ------ | ---------------- | --------------------- |
+| ain_interpreter_dir | -i | The interpreter path of the virtual environment where AINode is installed requires the use of an absolute path | no | String | Default reading of environment variables | Input+persistent modification during invocation |
+| ain_remove_target | -t | When closing AINode, you can specify the Node ID, address, and port number of the target AINode to be removed, in the format of `` | no | String | nothing | Input when calling |
+| ain_force_reinstall | -r | Does this script check the version when checking the installation status of AINode. If it does, it will force the installation of the whl package in lib if the version is incorrect | no | Bool | false | Input when calling |
+| ain_no_dependencies | -n | Specify whether to install dependencies when installing AINode, and if so, only install the AINode main program without installing dependencies | no | Bool | false | Input when calling |
+
+ If you don't want to specify the corresponding parameters every time you start, you can also persistently modify the parameters in the `ainode-env.sh` and `ainode-env.bat` scripts in the `conf` folder (currently supporting persistent modification of the ain_interpreter-dir parameter).
+
+ `ainode-env.sh` :
+ ```shell
+ # The defaulte venv environment is used if ain_interpreter_dir is not set. Please use absolute path without quotation mark
+ # ain_interpreter_dir=
+ ```
+ `ainode-env.bat` :
+```shell
+ @REM The defaulte venv environment is used if ain_interpreter_dir is not set. Please use absolute path without quotation mark
+ @REM set ain_interpreter_dir=
+ ```
+ After writing the parameter value, uncomment the corresponding line and save it to take effect on the next script execution.
+
+#### Example
+
+##### Directly remove:
+
+ ```shell
+ # Linux / MacOS
+ bash sbin/remove-ainode.sh
+
+ # Windows
+ sbin\remove-ainode.bat
+ ```
+ After removing the node, relevant information about the node cannot be queried.
+
+ ```shell
+IoTDB> show cluster
++------+----------+-------+---------------+------------+-------+-----------+
+|NodeID| NodeType| Status|InternalAddress|InternalPort|Version| BuildInfo|
++------+----------+-------+---------------+------------+-------+-----------+
+| 0|ConfigNode|Running| 127.0.0.1| 10710|UNKNOWN|190e303-dev|
+| 1| DataNode|Running| 127.0.0.1| 10730|UNKNOWN|190e303-dev|
++------+----------+-------+---------------+------------+-------+-----------+
+```
+##### Specify removal:
+
+If the user loses files in the data folder, AINode may not be able to actively remove them locally. The user needs to specify the node number, address, and port number for removal. In this case, we support users to input parameters according to the following methods for deletion.
+
+ ```shell
+ # Linux / MacOS
+ bash sbin/remove-ainode.sh -t /:
+
+ # Windows
+ sbin\remove-ainode.bat -t /:
+ ```
+
+## common problem
+
+### An error occurs when starting AINode stating that the venv module cannot be found
+
+ When starting AINode using the default method, a Python virtual environment will be created in the installation package directory and dependencies will be installed, so it is required to install the venv module. Generally speaking, Python 3.8 and above versions come with built-in VenV, but for some systems with built-in Python environments, this requirement may not be met. There are two solutions when this error occurs (choose one or the other):
+
+ To install the Venv module locally, taking Ubuntu as an example, you can run the following command to install the built-in Venv module in Python. Or install a Python version with built-in Venv from the Python official website.
+
+ ```shell
+apt-get install python3.8-venv
+```
+Install version 3.8.0 of venv into AINode in the AINode path.
+
+ ```shell
+../Python-3.8.0/python -m venv venv(Folder Name)
+```
+ When running the startup script, use ` -i ` to specify an existing Python interpreter path as the running environment for AINode, eliminating the need to create a new virtual environment.
+
+ ### The SSL module in Python is not properly installed and configured to handle HTTPS resources
+WARNING: pip is configured with locations that require TLS/SSL, however the ssl module in Python is not available.
+You can install OpenSSLS and then rebuild Python to solve this problem
+> Currently Python versions 3.6 to 3.9 are compatible with OpenSSL 1.0.2, 1.1.0, and 1.1.1.
+
+ Python requires OpenSSL to be installed on our system, the specific installation method can be found in [link](https://stackoverflow.com/questions/56552390/how-to-fix-ssl-module-in-python-is-not-available-in-centos)
+
+ ```shell
+sudo apt-get install build-essential libssl-dev zlib1g-dev libbz2-dev libreadline-dev libsqlite3-dev wget curl llvm libncurses5-dev libncursesw5-dev xz-utils tk-dev libffi-dev liblzma-dev uuid-dev lzma-dev liblzma-dev
+sudo -E ./configure --with-ssl
+make
+sudo make install
+```
+
+ ### Pip version is lower
+
+ A compilation issue similar to "error: Microsoft Visual C++14.0 or greater is required..." appears on Windows
+
+The corresponding error occurs during installation and compilation, usually due to insufficient C++version or Setup tools version. You can check it in
+
+ ```shell
+./python -m pip install --upgrade pip
+./python -m pip install --upgrade setuptools
+```
+
+
+ ### Install and compile Python
+
+ Use the following instructions to download the installation package from the official website and extract it:
+ ```shell
+.wget https://www.python.org/ftp/python/3.8.0/Python-3.8.0.tar.xz
+tar Jxf Python-3.8.0.tar.xz
+```
+ Compile and install the corresponding Python package:
+ ```shell
+cd Python-3.8.0
+./configure prefix=/usr/local/python3
+make
+sudo make install
+python3 --version
+```
\ No newline at end of file
diff --git a/src/UserGuide/Master/Tree/User-Manual/AINode_apache.md b/src/UserGuide/Master/Tree/User-Manual/AINode_apache.md
new file mode 100644
index 000000000..f0c6647ec
--- /dev/null
+++ b/src/UserGuide/Master/Tree/User-Manual/AINode_apache.md
@@ -0,0 +1,654 @@
+
+
+# AI Capability(AINode)
+
+AINode is the third internal node after ConfigNode and DataNode in Apache IoTDB, which extends the capability of machine learning analysis of time series by interacting with DataNode and ConfigNode of IoTDB cluster, supports the introduction of pre-existing machine learning models from the outside to be registered, and uses the registered models in the It supports the process of introducing existing machine learning models from outside for registration, and using the registered models to complete the time series analysis tasks on the specified time series data through simple SQL statements, which integrates the model creation, management and inference in the database engine. At present, we have provided machine learning algorithms or self-developed models for common timing analysis scenarios (e.g. prediction and anomaly detection).
+
+The system architecture is shown below:
+::: center
+
+:::
+The responsibilities of the three nodes are as follows:
+
+- **ConfigNode**: responsible for storing and managing the meta-information of the model; responsible for distributed node management.
+- **DataNode**: responsible for receiving and parsing SQL requests from users; responsible for storing time-series data; responsible for preprocessing computation of data.
+- **AINode**: responsible for model file import creation and model inference.
+
+## Advantageous features
+
+Compared with building a machine learning service alone, it has the following advantages:
+
+- **Simple and easy to use**: no need to use Python or Java programming, the complete process of machine learning model management and inference can be completed using SQL statements. Creating a model can be done using the CREATE MODEL statement, and using a model for inference can be done using the CALL INFERENCE (...) statement, making it simpler and more convenient to use.
+
+- **Avoid Data Migration**: With IoTDB native machine learning, data stored in IoTDB can be directly applied to the inference of machine learning models without having to move the data to a separate machine learning service platform, which accelerates data processing, improves security, and reduces costs.
+
+
+
+- **Built-in Advanced Algorithms**: supports industry-leading machine learning analytics algorithms covering typical timing analysis tasks, empowering the timing database with native data analysis capabilities. Such as:
+ - **Time Series Forecasting**: learns patterns of change from past time series; thus outputs the most likely prediction of future series based on observations at a given past time.
+ - **Anomaly Detection for Time Series**: detects and identifies outliers in a given time series data, helping to discover anomalous behaviour in the time series.
+ - **Annotation for Time Series (Time Series Annotation)**: Adds additional information or markers, such as event occurrence, outliers, trend changes, etc., to each data point or specific time period to better understand and analyse the data.
+
+
+
+## Basic Concepts
+
+- **Model**: a machine learning model that takes time-series data as input and outputs the results or decisions of an analysis task. Model is the basic management unit of AINode, which supports adding (registration), deleting, checking, and using (inference) of models.
+- **Create**: Load externally designed or trained model files or algorithms into MLNode for unified management and use by IoTDB.
+- **Inference**: The process of using the created model to complete the timing analysis task applicable to the model on the specified timing data.
+- **Built-in capabilities**: AINode comes with machine learning algorithms or home-grown models for common timing analysis scenarios (e.g., prediction and anomaly detection).
+
+::: center
+
+::::
+
+## Installation and Deployment
+
+The deployment of AINode can be found in the document [Deployment Guidelines](../Deployment-and-Maintenance/AINode_Deployment_apache.md#ainode-deployment) .
+
+
+## Usage Guidelines
+
+AINode provides model creation and deletion process for deep learning models related to timing data. Built-in models do not need to be created and deleted, they can be used directly, and the built-in model instances created after inference is completed will be destroyed automatically.
+
+### Registering Models
+
+A trained deep learning model can be registered by specifying the vector dimensions of the model's inputs and outputs, which can be used for model inference.
+
+Models that meet the following criteria can be registered in AINode:
+1. Models trained on PyTorch 2.1.0 and 2.2.0 versions supported by AINode should avoid using features from versions 2.2.0 and above.
+2. AINode supports models stored using PyTorch JIT, and the model file needs to include the parameters and structure of the model.
+3. The input sequence of the model can contain one or more columns, and if there are multiple columns, they need to correspond to the model capability and model configuration file.
+4. The input and output dimensions of the model must be clearly defined in the `config.yaml` configuration file. When using the model, it is necessary to strictly follow the input-output dimensions defined in the `config.yaml` configuration file. If the number of input and output columns does not match the configuration file, it will result in errors.
+
+The following is the SQL syntax definition for model registration.
+
+```SQL
+create model using uri
+```
+
+The specific meanings of the parameters in the SQL are as follows:
+
+- model_name: a globally unique identifier for the model, which cannot be repeated. The model name has the following constraints:
+
+ - Identifiers [ 0-9 a-z A-Z _ ] (letters, numbers, underscores) are allowed.
+ - Length is limited to 2-64 characters
+ - Case sensitive
+
+- uri: resource path to the model registration file, which should contain the **model weights model.pt file and the model's metadata description file config.yaml**.
+
+ - Model weight file: the weight file obtained after the training of the deep learning model is completed, currently supporting pytorch training of the .pt file
+
+ - yaml metadata description file: parameters related to the model structure that need to be provided when the model is registered, which must contain the input and output dimensions of the model for model inference:
+
+ - | **Parameter name** | **Parameter description** | **Example** |
+ | ------------ | ---------------------------- | -------- |
+ | input_shape | Rows and columns of model inputs for model inference | [96,2] |
+ | output_shape | rows and columns of model outputs, for model inference | [48,2] |
+
+ - In addition to model inference, the data types of model input and output can be specified:
+
+ - | **Parameter name** | **Parameter description** | **Example** |
+ | ----------- | ------------------ | --------------------- |
+ | input_type | model input data type | ['float32','float32'] |
+ | output_type | data type of the model output | ['float32','float32'] |
+
+ - In addition to this, additional notes can be specified for display during model management
+
+ - | **Parameter name** | **Parameter description** | **Examples** |
+ | ---------- | ---------------------------------------------- | ------------------------------------------- |
+ | attributes | optional, user-defined model notes for model display | 'model_type': 'dlinear','kernel_size': '25' |
+
+
+In addition to registration of local model files, registration can also be done by specifying remote resource paths via URIs, using open source model repositories (e.g. HuggingFace).
+
+#### Example
+
+In the current example folder, it contains model.pt and config.yaml files, model.pt is the training get, and the content of config.yaml is as follows:
+
+```YAML
+configs.
+ # Required options
+ input_shape: [96, 2] # The model receives data in 96 rows x 2 columns.
+ output_shape: [48, 2] # Indicates that the model outputs 48 rows x 2 columns.
+
+ # Optional Default is all float32 and the number of columns is the number of columns in the shape.
+ input_type: ["int64", "int64"] # Input data type, need to match the number of columns.
+ output_type: ["text", "int64"] #Output data type, need to match the number of columns.
+
+attributes: # Optional user-defined notes for the input.
+ 'model_type': 'dlinear'
+ 'kernel_size': '25'
+```
+
+Specify this folder as the load path to register the model.
+
+```SQL
+IoTDB> create model dlinear_example using uri "file://. /example"
+```
+
+Alternatively, you can download the corresponding model file from huggingFace and register it.
+
+```SQL
+IoTDB> create model dlinear_example using uri "https://huggingface.com/IoTDBML/dlinear/"
+```
+
+After the SQL is executed, the registration process will be carried out asynchronously, and you can view the registration status of the model through the model showcase (see the Model Showcase section), and the time consumed for successful registration is mainly affected by the size of the model file.
+
+Once the model registration is complete, you can call specific functions and perform model inference by using normal queries.
+
+### Viewing Models
+
+Successfully registered models can be queried for model-specific information through the show models command. The SQL definition is as follows:
+
+```SQL
+show models
+
+show models
+```
+
+In addition to displaying information about all models directly, you can specify a model id to view information about a specific model. The results of the model show contain the following information:
+
+| **ModelId** | **State** | **Configs** | **Attributes** |
+| ------------ | ------------------------------------- | ---------------------------------------------- | -------------- |
+| Model Unique Identifier | Model Registration Status (LOADING, ACTIVE, DROPPING) | InputShape, outputShapeInputTypes, outputTypes | Model Notes |
+
+State is used to show the current state of model registration, which consists of the following three stages
+
+- **LOADING**: The corresponding model meta information has been added to the configNode, and the model file is being transferred to the AINode node.
+- **ACTIVE**: The model has been set up and the model is in the available state
+- **DROPPING**: Model deletion is in progress, model related information is being deleted from configNode and AINode.
+- **UNAVAILABLE**: Model creation failed, you can delete the failed model_name by drop model.
+
+#### Example
+
+```SQL
+IoTDB> show models
+
+
++---------------------+--------------------------+-----------+----------------------------+-----------------------+
+| ModelId| ModelType| State| Configs| Notes|
++---------------------+--------------------------+-----------+----------------------------+-----------------------+
+| dlinear_example| USER_DEFINED| ACTIVE| inputShape:[96,2]| |
+| | | | outputShape:[48,2]| |
+| | | | inputDataType:[float,float]| |
+| | | |outputDataType:[float,float]| |
+| _STLForecaster| BUILT_IN_FORECAST| ACTIVE| |Built-in model in IoTDB|
+| _NaiveForecaster| BUILT_IN_FORECAST| ACTIVE| |Built-in model in IoTDB|
+| _ARIMA| BUILT_IN_FORECAST| ACTIVE| |Built-in model in IoTDB|
+|_ExponentialSmoothing| BUILT_IN_FORECAST| ACTIVE| |Built-in model in IoTDB|
+| _GaussianHMM|BUILT_IN_ANOMALY_DETECTION| ACTIVE| |Built-in model in IoTDB|
+| _GMMHMM|BUILT_IN_ANOMALY_DETECTION| ACTIVE| |Built-in model in IoTDB|
+| _Stray|BUILT_IN_ANOMALY_DETECTION| ACTIVE| |Built-in model in IoTDB|
++---------------------+--------------------------+-----------+------------------------------------------------------------+-----------------------+
+```
+
+We have registered the corresponding model earlier, you can view the model status through the corresponding designation, active indicates that the model is successfully registered and can be used for inference.
+
+### Delete Model
+
+For a successfully registered model, the user can delete it via SQL. In addition to deleting the meta information on the configNode, this operation also deletes all the related model files under the AINode. The SQL is as follows:
+
+```SQL
+drop model
+```
+
+You need to specify the model model_name that has been successfully registered to delete the corresponding model. Since model deletion involves the deletion of data on multiple nodes, the operation will not be completed immediately, and the state of the model at this time is DROPPING, and the model in this state cannot be used for model inference.
+
+### Using Built-in Model Reasoning
+
+The SQL syntax is as follows:
+
+
+```SQL
+call inference(,sql[,=])
+```
+
+Built-in model inference does not require a registration process, the inference function can be used by calling the inference function through the call keyword, and its corresponding parameters are described as follows:
+
+- **built_in_model_name**: built-in model name
+- **parameterName**: parameter name
+- **parameterValue**: parameter value
+
+#### Built-in Models and Parameter Descriptions
+
+The following machine learning models are currently built-in, please refer to the following links for detailed parameter descriptions.
+
+| Model | built_in_model_name | Task type | Parameter description |
+| -------------------- | --------------------- | -------- | ------------------------------------------------------------ |
+| Arima | _Arima | Forecast | [Arima Parameter description](https://www.sktime.net/en/latest/api_reference/auto_generated/sktime.forecasting.arima.ARIMA.html?highlight=Arima) |
+| STLForecaster | _STLForecaster | Forecast | [STLForecaster Parameter description](https://www.sktime.net/en/latest/api_reference/auto_generated/sktime.forecasting.trend.STLForecaster.html#sktime.forecasting.trend.STLForecaster) |
+| NaiveForecaster | _NaiveForecaster | Forecast | [NaiveForecaster Parameter description](https://www.sktime.net/en/latest/api_reference/auto_generated/sktime.forecasting.naive.NaiveForecaster.html#naiveforecaster) |
+| ExponentialSmoothing | _ExponentialSmoothing | Forecast | [ExponentialSmoothing 参Parameter description](https://www.sktime.net/en/latest/api_reference/auto_generated/sktime.forecasting.exp_smoothing.ExponentialSmoothing.html) |
+| GaussianHMM | _GaussianHMM | Annotation | [GaussianHMMParameter description](https://www.sktime.net/en/latest/api_reference/auto_generated/sktime.annotation.hmm_learn.gaussian.GaussianHMM.html) |
+| GMMHMM | _GMMHMM | Annotation | [GMMHMM参数说明](https://www.sktime.net/en/latest/api_reference/auto_generated/sktime.annotation.hmm_learn.gmm.GMMHMM.html) |
+| Stray | _Stray | Anomaly detection | [Stray Parameter description](https://www.sktime.net/en/latest/api_reference/auto_generated/sktime.annotation.stray.STRAY.html) |
+
+
+#### Example
+
+The following is an example of an operation using built-in model inference. The built-in Stray model is used for anomaly detection algorithm. The input is `[144,1]` and the output is `[144,1]`. We use it for reasoning through SQL.
+
+```SQL
+IoTDB> select * from root.eg.airline
++-----------------------------+------------------+
+| Time|root.eg.airline.s0|
++-----------------------------+------------------+
+|1949-01-31T00:00:00.000+08:00| 224.0|
+|1949-02-28T00:00:00.000+08:00| 118.0|
+|1949-03-31T00:00:00.000+08:00| 132.0|
+|1949-04-30T00:00:00.000+08:00| 129.0|
+......
+|1960-09-30T00:00:00.000+08:00| 508.0|
+|1960-10-31T00:00:00.000+08:00| 461.0|
+|1960-11-30T00:00:00.000+08:00| 390.0|
+|1960-12-31T00:00:00.000+08:00| 432.0|
++-----------------------------+------------------+
+Total line number = 144
+
+IoTDB> call inference(_Stray, "select s0 from root.eg.airline", k=2)
++-------+
+|output0|
++-------+
+| 0|
+| 0|
+| 0|
+| 0|
+......
+| 1|
+| 1|
+| 0|
+| 0|
+| 0|
+| 0|
++-------+
+Total line number = 144
+```
+
+### Reasoning with Deep Learning Models
+
+The SQL syntax is as follows:
+
+```SQL
+call inference(,sql[,window=])
+
+
+window_function:
+ head(window_size)
+ tail(window_size)
+ count(window_size,sliding_step)
+```
+
+After completing the registration of the model, the inference function can be used by calling the inference function through the call keyword, and its corresponding parameters are described as follows:
+
+- **model_name**: corresponds to a registered model
+- **sql**: sql query statement, the result of the query is used as input to the model for model inference. The dimensions of the rows and columns in the result of the query need to match the size specified in the specific model config. (It is not recommended to use the `SELECT *` clause for the sql here because in IoTDB, `*` does not sort the columns, so the order of the columns is undefined, you can use `SELECT s0,s1` to ensure that the columns order matches the expectations of the model input)
+- **window_function**: Window functions that can be used in the inference process, there are currently three types of window functions provided to assist in model inference:
+ - **head(window_size)**: Get the top window_size points in the data for model inference, this window can be used for data cropping.
+ 
+
+ - **tail(window_size)**: get the last window_size point in the data for model inference, this window can be used for data cropping.
+ 
+
+ - **count(window_size, sliding_step)**: sliding window based on the number of points, the data in each window will be reasoned through the model respectively, as shown in the example below, window_size for 2 window function will be divided into three windows of the input dataset, and each window will perform reasoning operations to generate results respectively. The window can be used for continuous inference
+ 
+
+**Explanation 1**: window can be used to solve the problem of cropping rows when the results of the sql query and the input row requirements of the model do not match. Note that when the number of columns does not match or the number of rows is directly less than the model requirement, the inference cannot proceed and an error message will be returned.
+
+**Explanation 2**: In deep learning applications, timestamp-derived features (time columns in the data) are often used as covariates in generative tasks, and are input into the model together to enhance the model, but the time columns are generally not included in the model's output. In order to ensure the generality of the implementation, the model inference results only correspond to the real output of the model, if the model does not output the time column, it will not be included in the results.
+
+
+#### Example
+
+The following is an example of inference in action using a deep learning model, for the `dlinear` prediction model with input `[96,2]` and output `[48,2]` mentioned above, which we use via SQL.
+
+```Shell
+IoTDB> select s1,s2 from root.**
++-----------------------------+-------------------+-------------------+
+| Time| root.eg.etth.s0| root.eg.etth.s1|
++-----------------------------+-------------------+-------------------+
+|1990-01-01T00:00:00.000+08:00| 0.7855| 1.611|
+|1990-01-02T00:00:00.000+08:00| 0.7818| 1.61|
+|1990-01-03T00:00:00.000+08:00| 0.7867| 1.6293|
+|1990-01-04T00:00:00.000+08:00| 0.786| 1.637|
+|1990-01-05T00:00:00.000+08:00| 0.7849| 1.653|
+|1990-01-06T00:00:00.000+08:00| 0.7866| 1.6537|
+|1990-01-07T00:00:00.000+08:00| 0.7886| 1.662|
+......
+|1990-03-31T00:00:00.000+08:00| 0.7585| 1.678|
+|1990-04-01T00:00:00.000+08:00| 0.7587| 1.6763|
+|1990-04-02T00:00:00.000+08:00| 0.76| 1.6813|
+|1990-04-03T00:00:00.000+08:00| 0.7669| 1.684|
+|1990-04-04T00:00:00.000+08:00| 0.7645| 1.677|
+|1990-04-05T00:00:00.000+08:00| 0.7625| 1.68|
+|1990-04-06T00:00:00.000+08:00| 0.7617| 1.6917|
++-----------------------------+-------------------+-------------------+
+Total line number = 96
+
+IoTDB> call inference(dlinear_example,"select s0,s1 from root.**")
++--------------------------------------------+-----------------------------+
+| _result_0| _result_1|
++--------------------------------------------+-----------------------------+
+| 0.726302981376648| 1.6549958229064941|
+| 0.7354921698570251| 1.6482787370681763|
+| 0.7238251566886902| 1.6278168201446533|
+......
+| 0.7692174911499023| 1.654654049873352|
+| 0.7685555815696716| 1.6625318765640259|
+| 0.7856493592262268| 1.6508299350738525|
++--------------------------------------------+-----------------------------+
+Total line number = 48
+```
+
+#### Example of using the tail/head window function
+
+When the amount of data is variable and you want to take the latest 96 rows of data for inference, you can use the corresponding window function tail. head function is used in a similar way, except that it takes the earliest 96 points.
+
+```Shell
+IoTDB> select s1,s2 from root.**
++-----------------------------+-------------------+-------------------+
+| Time| root.eg.etth.s0| root.eg.etth.s1|
++-----------------------------+-------------------+-------------------+
+|1988-01-01T00:00:00.000+08:00| 0.7355| 1.211|
+......
+|1990-01-01T00:00:00.000+08:00| 0.7855| 1.611|
+|1990-01-02T00:00:00.000+08:00| 0.7818| 1.61|
+|1990-01-03T00:00:00.000+08:00| 0.7867| 1.6293|
+|1990-01-04T00:00:00.000+08:00| 0.786| 1.637|
+|1990-01-05T00:00:00.000+08:00| 0.7849| 1.653|
+|1990-01-06T00:00:00.000+08:00| 0.7866| 1.6537|
+|1990-01-07T00:00:00.000+08:00| 0.7886| 1.662|
+......
+|1990-03-31T00:00:00.000+08:00| 0.7585| 1.678|
+|1990-04-01T00:00:00.000+08:00| 0.7587| 1.6763|
+|1990-04-02T00:00:00.000+08:00| 0.76| 1.6813|
+|1990-04-03T00:00:00.000+08:00| 0.7669| 1.684|
+|1990-04-04T00:00:00.000+08:00| 0.7645| 1.677|
+|1990-04-05T00:00:00.000+08:00| 0.7625| 1.68|
+|1990-04-06T00:00:00.000+08:00| 0.7617| 1.6917|
++-----------------------------+-------------------+-------------------+
+Total line number = 996
+
+IoTDB> call inference(dlinear_example,"select s0,s1 from root.**",window=tail(96))
++--------------------------------------------+-----------------------------+
+| _result_0| _result_1|
++--------------------------------------------+-----------------------------+
+| 0.726302981376648| 1.6549958229064941|
+| 0.7354921698570251| 1.6482787370681763|
+| 0.7238251566886902| 1.6278168201446533|
+......
+| 0.7692174911499023| 1.654654049873352|
+| 0.7685555815696716| 1.6625318765640259|
+| 0.7856493592262268| 1.6508299350738525|
++--------------------------------------------+-----------------------------+
+Total line number = 48
+```
+
+#### Example of using the count window function
+
+This window is mainly used for computational tasks. When the task's corresponding model can only handle a fixed number of rows of data at a time, but the final desired outcome is multiple sets of prediction results, this window function can be used to perform continuous inference using a sliding window of points. Suppose we now have an anomaly detection model `anomaly_example(input: [24,2], output[1,1])`, which generates a 0/1 label for every 24 rows of data. An example of its use is as follows:
+
+```Shell
+IoTDB> select s1,s2 from root.**
++-----------------------------+-------------------+-------------------+
+| Time| root.eg.etth.s0| root.eg.etth.s1|
++-----------------------------+-------------------+-------------------+
+|1990-01-01T00:00:00.000+08:00| 0.7855| 1.611|
+|1990-01-02T00:00:00.000+08:00| 0.7818| 1.61|
+|1990-01-03T00:00:00.000+08:00| 0.7867| 1.6293|
+|1990-01-04T00:00:00.000+08:00| 0.786| 1.637|
+|1990-01-05T00:00:00.000+08:00| 0.7849| 1.653|
+|1990-01-06T00:00:00.000+08:00| 0.7866| 1.6537|
+|1990-01-07T00:00:00.000+08:00| 0.7886| 1.662|
+......
+|1990-03-31T00:00:00.000+08:00| 0.7585| 1.678|
+|1990-04-01T00:00:00.000+08:00| 0.7587| 1.6763|
+|1990-04-02T00:00:00.000+08:00| 0.76| 1.6813|
+|1990-04-03T00:00:00.000+08:00| 0.7669| 1.684|
+|1990-04-04T00:00:00.000+08:00| 0.7645| 1.677|
+|1990-04-05T00:00:00.000+08:00| 0.7625| 1.68|
+|1990-04-06T00:00:00.000+08:00| 0.7617| 1.6917|
++-----------------------------+-------------------+-------------------+
+Total line number = 96
+
+IoTDB> call inference(anomaly_example,"select s0,s1 from root.**",window=count(24,24))
++-------------------------+
+| _result_0|
++-------------------------+
+| 0|
+| 1|
+| 1|
+| 0|
++-------------------------+
+Total line number = 4
+```
+
+In the result set, each row's label corresponds to the output of the anomaly detection model after inputting each group of 24 rows of data.
+
+## Privilege Management
+
+When using AINode related functions, the authentication of IoTDB itself can be used to do a permission management, users can only use the model management related functions when they have the USE_MODEL permission. When using the inference function, the user needs to have the permission to access the source sequence corresponding to the SQL of the input model.
+
+| Privilege Name | Privilege Scope | Administrator User (default ROOT) | Normal User | Path Related |
+| --------- | --------------------------------- | ---------------------- | -------- | -------- |
+| USE_MODEL | create model/show models/drop model | √ | √ | x |
+| READ_DATA| call inference | √ | √|√ |
+
+## Practical Examples
+
+### Power Load Prediction
+
+In some industrial scenarios, there is a need to predict power loads, which can be used to optimise power supply, conserve energy and resources, support planning and expansion, and enhance power system reliability.
+
+The data for the test set of ETTh1 that we use is [ETTh1](https://alioss.timecho.com/docs/img/ETTh1.csv).
+
+
+It contains power data collected at 1h intervals, and each data consists of load and oil temperature as High UseFul Load, High UseLess Load, Middle UseLess Load, Low UseFul Load, Low UseLess Load, Oil Temperature.
+
+On this dataset, the model inference function of IoTDB-ML can predict the oil temperature in the future period of time through the relationship between the past values of high, middle and low use loads and the corresponding time stamp oil temperature, which empowers the automatic regulation and monitoring of grid transformers.
+
+#### Step 1: Data Import
+
+Users can import the ETT dataset into IoTDB using `import-csv.sh` in the tools folder
+
+``Bash
+bash . /import-csv.sh -h 127.0.0.1 -p 6667 -u root -pw root -f ... /... /ETTh1.csv
+``
+
+#### Step 2: Model Import
+
+We can enter the following SQL in iotdb-cli to pull a trained model from huggingface for registration for subsequent inference.
+
+```SQL
+create model dlinear using uri 'https://huggingface.co/hvlgo/dlinear/tree/main'
+```
+
+This model is trained on the lighter weight deep model DLinear, which is able to capture as many trends within a sequence and relationships between variables as possible with relatively fast inference, making it more suitable for fast real-time prediction than other deeper models.
+
+#### Step 3: Model inference
+
+```Shell
+IoTDB> select s0,s1,s2,s3,s4,s5,s6 from root.eg.etth LIMIT 96
++-----------------------------+---------------+---------------+---------------+---------------+---------------+---------------+---------------+
+| Time|root.eg.etth.s0|root.eg.etth.s1|root.eg.etth.s2|root.eg.etth.s3|root.eg.etth.s4|root.eg.etth.s5|root.eg.etth.s6|
++-----------------------------+---------------+---------------+---------------+---------------+---------------+---------------+---------------+
+|2017-10-20T00:00:00.000+08:00| 10.449| 3.885| 8.706| 2.025| 2.041| 0.944| 8.864|
+|2017-10-20T01:00:00.000+08:00| 11.119| 3.952| 8.813| 2.31| 2.071| 1.005| 8.442|
+|2017-10-20T02:00:00.000+08:00| 9.511| 2.88| 7.533| 1.564| 1.949| 0.883| 8.16|
+|2017-10-20T03:00:00.000+08:00| 9.645| 2.21| 7.249| 1.066| 1.828| 0.914| 7.949|
+......
+|2017-10-23T20:00:00.000+08:00| 8.105| 0.938| 4.371| -0.569| 3.533| 1.279| 9.708|
+|2017-10-23T21:00:00.000+08:00| 7.167| 1.206| 4.087| -0.462| 3.107| 1.432| 8.723|
+|2017-10-23T22:00:00.000+08:00| 7.1| 1.34| 4.015| -0.32| 2.772| 1.31| 8.864|
+|2017-10-23T23:00:00.000+08:00| 9.176| 2.746| 7.107| 1.635| 2.65| 1.097| 9.004|
++-----------------------------+---------------+---------------+---------------+---------------+---------------+---------------+---------------+
+Total line number = 96
+
+IoTDB> call inference(dlinear_example, "select s0,s1,s2,s3,s4,s5,s6 from root.eg.etth", window=head(96))
++-----------+----------+----------+------------+---------+----------+----------+
+| output0| output1| output2| output3| output4| output5| output6|
++-----------+----------+----------+------------+---------+----------+----------+
+| 10.319546| 3.1450553| 7.877341| 1.5723765|2.7303758| 1.1362307| 8.867775|
+| 10.443649| 3.3286757| 7.8593454| 1.7675098| 2.560634| 1.1177158| 8.920919|
+| 10.883752| 3.2341104| 8.47036| 1.6116762|2.4874182| 1.1760603| 8.798939|
+......
+| 8.0115595| 1.2995274| 6.9900327|-0.098746896| 3.04923| 1.176214| 9.548782|
+| 8.612427| 2.5036244| 5.6790237| 0.66474205|2.8870275| 1.2051733| 9.330128|
+| 10.096699| 3.399722| 6.9909| 1.7478468|2.7642853| 1.1119363| 9.541455|
++-----------+----------+----------+------------+---------+----------+----------+
+Total line number = 48
+```
+
+We compare the results of the prediction of the oil temperature with the real results, and we can get the following image.
+
+The data before 10/24 00:00 represents the past data input to the model, the blue line after 10/24 00:00 is the oil temperature forecast result given by the model, and the red line is the actual oil temperature data from the dataset (used for comparison).
+
+
+
+As can be seen, we have used the relationship between the six load information and the corresponding time oil temperatures for the past 96 hours (4 days) to model the possible changes in this data for the oil temperature for the next 48 hours (2 days) based on the inter-relationships between the sequences learned previously, and it can be seen that the predicted curves maintain a high degree of consistency in trend with the actual results after visualisation.
+
+### Power Prediction
+
+Power monitoring of current, voltage and power data is required in substations for detecting potential grid problems, identifying faults in the power system, effectively managing grid loads and analysing power system performance and trends.
+
+We have used the current, voltage and power data in a substation to form a dataset in a real scenario. The dataset consists of data such as A-phase voltage, B-phase voltage, and C-phase voltage collected every 5 - 6s for a time span of nearly four months in the substation.
+
+The test set data content is [data](https://alioss.timecho.com/docs/img/data.csv).
+
+On this dataset, the model inference function of IoTDB-ML can predict the C-phase voltage in the future period through the previous values and corresponding timestamps of A-phase voltage, B-phase voltage and C-phase voltage, empowering the monitoring management of the substation.
+
+#### Step 1: Data Import
+
+Users can import the dataset using `import-csv.sh` in the tools folder
+
+```Bash
+bash ./import-csv.sh -h 127.0.0.1 -p 6667 -u root -pw root -f ... /... /data.csv
+```
+
+#### Step 2: Model Import
+
+We can select built-in models or registered models in IoTDB CLI for subsequent inference.
+
+We use the built-in model STLForecaster for prediction. STLForecaster is a time series forecasting method based on the STL implementation in the statsmodels library.
+
+#### Step 3: Model Inference
+
+```Shell
+IoTDB> select * from root.eg.voltage limit 96
++-----------------------------+------------------+------------------+------------------+
+| Time|root.eg.voltage.s0|root.eg.voltage.s1|root.eg.voltage.s2|
++-----------------------------+------------------+------------------+------------------+
+|2023-02-14T20:38:32.000+08:00| 2038.0| 2028.0| 2041.0|
+|2023-02-14T20:38:38.000+08:00| 2014.0| 2005.0| 2018.0|
+|2023-02-14T20:38:44.000+08:00| 2014.0| 2005.0| 2018.0|
+......
+|2023-02-14T20:47:52.000+08:00| 2024.0| 2016.0| 2027.0|
+|2023-02-14T20:47:57.000+08:00| 2024.0| 2016.0| 2027.0|
+|2023-02-14T20:48:03.000+08:00| 2024.0| 2016.0| 2027.0|
++-----------------------------+------------------+------------------+------------------+
+Total line number = 96
+
+IoTDB> call inference(_STLForecaster, "select s0,s1,s2 from root.eg.voltage", window=head(96),predict_length=48)
++---------+---------+---------+
+| output0| output1| output2|
++---------+---------+---------+
+|2026.3601|2018.2953|2029.4257|
+|2019.1538|2011.4361|2022.0888|
+|2025.5074|2017.4522|2028.5199|
+......
+
+|2022.2336|2015.0290|2025.1023|
+|2015.7241|2008.8975|2018.5085|
+|2022.0777|2014.9136|2024.9396|
+|2015.5682|2008.7821|2018.3458|
++---------+---------+---------+
+Total line number = 48
+```
+
+Comparing the predicted results of the C-phase voltage with the real results, we can get the following image.
+
+The data before 02/14 20:48 represents the past data input to the model, the blue line after 02/14 20:48 is the predicted result of phase C voltage given by the model, while the red line is the actual phase C voltage data from the dataset (used for comparison).
+
+
+
+It can be seen that we used the voltage data from the past 10 minutes and, based on the previously learned inter-sequence relationships, modeled the possible changes in the phase C voltage data for the next 5 minutes. The visualized forecast curve shows a certain degree of synchronicity with the actual results in terms of trend.
+
+### Anomaly Detection
+
+In the civil aviation and transport industry, there exists a need for anomaly detection of the number of passengers travelling on an aircraft. The results of anomaly detection can be used to guide the adjustment of flight scheduling to make the organisation more efficient.
+
+Airline Passengers is a time-series dataset that records the number of international air passengers between 1949 and 1960, sampled at one-month intervals. The dataset contains a total of one time series. The dataset is [airline](https://alioss.timecho.com/docs/img/airline.csv).
+On this dataset, the model inference function of IoTDB-ML can empower the transport industry by capturing the changing patterns of the sequence in order to detect anomalies at the sequence time points.
+
+#### Step 1: Data Import
+
+Users can import the dataset using `import-csv.sh` in the tools folder
+
+``Bash
+bash . /import-csv.sh -h 127.0.0.1 -p 6667 -u root -pw root -f ... /... /data.csv
+``
+
+#### Step 2: Model Inference
+
+IoTDB has some built-in machine learning algorithms that can be used directly, a sample prediction using one of the anomaly detection algorithms is shown below:
+
+```Shell
+IoTDB> select * from root.eg.airline
++-----------------------------+------------------+
+| Time|root.eg.airline.s0|
++-----------------------------+------------------+
+|1949-01-31T00:00:00.000+08:00| 224.0|
+|1949-02-28T00:00:00.000+08:00| 118.0|
+|1949-03-31T00:00:00.000+08:00| 132.0|
+|1949-04-30T00:00:00.000+08:00| 129.0|
+......
+|1960-09-30T00:00:00.000+08:00| 508.0|
+|1960-10-31T00:00:00.000+08:00| 461.0|
+|1960-11-30T00:00:00.000+08:00| 390.0|
+|1960-12-31T00:00:00.000+08:00| 432.0|
++-----------------------------+------------------+
+Total line number = 144
+
+IoTDB> call inference(_Stray, "select s0 from root.eg.airline", k=2)
++-------+
+|output0|
++-------+
+| 0|
+| 0|
+| 0|
+| 0|
+......
+| 1|
+| 1|
+| 0|
+| 0|
+| 0|
+| 0|
++-------+
+Total line number = 144
+```
+
+We plot the results detected as anomalies to get the following image. Where the blue curve is the original time series and the time points specially marked with red dots are the time points that the algorithm detects as anomalies.
+
+
+
+It can be seen that the Stray model has modelled the input sequence changes and successfully detected the time points where anomalies occur.
\ No newline at end of file
diff --git a/src/UserGuide/V1.3.3/Deployment-and-Maintenance/AINode_Deployment_apache.md b/src/UserGuide/V1.3.3/Deployment-and-Maintenance/AINode_Deployment_apache.md
new file mode 100644
index 000000000..610ec0b72
--- /dev/null
+++ b/src/UserGuide/V1.3.3/Deployment-and-Maintenance/AINode_Deployment_apache.md
@@ -0,0 +1,522 @@
+
+# AINode Deployment
+
+## AINode Introduction
+
+### Capability Introduction
+
+ AINode is the third type of endogenous node provided by IoTDB after the Configurable Node and DataNode. This node extends its ability to perform machine learning analysis on time series by interacting with the DataNode and Configurable Node of the IoTDB cluster. It supports the introduction of existing machine learning models from external sources for registration and the use of registered models to complete time series analysis tasks on specified time series data through simple SQL statements. The creation, management, and inference of models are integrated into the database engine. Currently, machine learning algorithms or self-developed models are available for common time series analysis scenarios, such as prediction and anomaly detection.
+
+### Delivery Method
+ It is an additional package outside the IoTDB cluster, with independent installation and activation (if you need to try or use it, please contact Timecho Technology Business or Technical Support).
+
+### Deployment mode
+
+

+

+
+
+## Installation preparation
+
+### Get installation package
+
+ Users can download the software installation package for AINode, download and unzip it to complete the installation of AINode.
+
+ Unzip and install the package
+ `(iotdb-enterprise-ainode-.zip)`, The directory structure after unpacking the installation package is as follows:
+| **Catalogue** | **Type** | **Explain** |
+| ------------ | -------- | ------------------------------------------------ |
+| lib | folder | AINode compiled binary executable files and related code dependencies |
+| sbin | folder | The running script of AINode can start, remove, and stop AINode |
+| conf | folder | Contains configuration items for AINode, specifically including the following configuration items |
+| LICENSE | file | Certificate |
+| NOTICE | file | Tips |
+| README_ZH.md | file | Explanation of the Chinese version of the markdown format |
+| `README.md` | file | Instructions |
+
+### Environment preparation
+- Suggested operating environment:Ubuntu, CentOS, MacOS
+
+- Runtime Environment
+ - Python>=3.8 and Python <= 3.14 is sufficient in a networked environment, and comes with pip and venv tools; Python 3.8 version is required for non networked environments, and download the zip package for the corresponding operating system from [here](https://cloud.tsinghua.edu.cn/d/4c1342f6c272439aa96c/?p=%2Flibs&mode=list) (Note that when downloading dependencies, you need to select the zip file in the libs folder, as shown in the following figure). Copy all files in the folder to the `lib` folder in the `iotdb-enterprise-ainode-` folder, and follow the steps below to start AINode.
+
+
+
+ - There must be a Python interpreter in the environment variables that can be directly called through the `python` instruction.
+ - It is recommended to create a Python interpreter venv virtual environment in the `iotdb-enterprise-ainode-` folder. If installing version 3.8.0 virtual environment, the statement is as follows:
+ ```shell
+ # Install version 3.8.0 of Venv , Create a virtual environment with the folder name `venv`.
+ ../Python-3.8.0/python -m venv `venv`
+ ```
+
+## Installation steps
+
+### Install AINode
+
+
+ 1. Check the kernel architecture of Linux
+```shell
+ uname -m
+ ```
+
+ 2. Import Python environment [Download](https://repo.anaconda.com/miniconda/)
+
+ Recommend downloading the py311 version application and importing it into the iotdb dedicated folder in the user's root directory
+
+ 3. Switch to the iotdb dedicated folder to install the Python environment
+
+ Taking Miniconda 3-py311_24.5.0-0-Lux-x86_64 as an example:
+
+```shell
+ bash ./Miniconda3-py311_24.5.0-0-Linux-x86_64.sh
+ ```
+> Type "Enter", "Long press space", "Enter", "Yes", "Yes" according to the prompt
+> Close the current SSH window and reconnect
+
+ 4. Create a dedicated environment
+
+```shell
+ conda create -n ainode_py python=3.11.9
+ ```
+
+ Type 'y' according to the prompt
+
+ 5. Activate dedicated environment
+
+```shell
+ conda activate ainode_py
+ ```
+
+ 6. Verify Python version
+
+```shell
+ python --version
+ ```
+ 7. Download and import AINode to a dedicated folder, switch to the dedicated folder and extract the installation package
+
+```shell
+ unzip iotdb-enterprise-ainode-1.3.3.2.zip
+ ```
+
+ 8. Configuration item modification
+
+```shell
+ vi iotdb-enterprise-ainode-1.3.3.2/conf/iotdb-ainode.properties
+ ```
+ Configuration item modification:[detailed information](#configuration-item-modification)
+
+> ain_seed_config_node=iotdb-1:10710 (Cluster communication node IP: communication node port)
+> ain_inference_rpc_address=iotdb-3 (IP address of the server running AINode)
+
+ 9. Replace Python source
+
+```shell
+ pip config set global.index-url https://mirrors.aliyun.com/pypi/simple/
+ ```
+
+ 10. Start the AINode node
+
+```shell
+ nohup bash iotdb-enterprise-ainode-1.3.3.2/sbin/start-ainode.sh > myout.file 2>& 1 &
+ ```
+> Return to the default environment of the system: conda deactivate
+
+ ### Configuration item modification
+
+AINode supports modifying some necessary parameters. You can find the following parameters in the `conf/iotdb-ainode.properties` file and make persistent modifications to them:
+:
+
+| **Name** | **Describe** | **Type** | **Default value** | **Effective method after modification** |
+| :----------------------------- | ------------------------------------------------------------ | ------- | ------------------ | ---------------------------- |
+| cluster_name | The identifier for AINode to join the cluster | string | defaultCluster | Only allow modifications before the first service startup |
+| ain_seed_config_node | The Configurable Node address registered during AINode startup | String | 127.0.0.1:10710 | Only allow modifications before the first service startup |
+| ain_inference_rpc_address | AINode provides service and communication addresses , Internal Service Communication Interface | String | 127.0.0.1 | Only allow modifications before the first service startup |
+| ain_inference_rpc_port | AINode provides ports for services and communication | String | 10810 | Only allow modifications before the first service startup |
+| ain_system_dir | AINode metadata storage path, the starting directory of the relative path is related to the operating system, and it is recommended to use an absolute path | String | data/AINode/system | Only allow modifications before the first service startup |
+| ain_models_dir | AINode stores the path of the model file, and the starting directory of the relative path is related to the operating system. It is recommended to use an absolute path | String | data/AINode/models | Only allow modifications before the first service startup |
+| ain_logs_dir | The path where AINode stores logs, the starting directory of the relative path is related to the operating system, and it is recommended to use an absolute path | String | logs/AINode | Effective after restart |
+| ain_thrift_compression_enabled | Does AINode enable Thrift's compression mechanism , 0-Do not start, 1-Start | Boolean | 0 | Effective after restart |
+
+### Start AINode
+
+ After completing the deployment of Seed Config Node, the registration and inference functions of the model can be supported by adding AINode nodes. After specifying the information of the IoTDB cluster in the configuration file, the corresponding instruction can be executed to start AINode and join the IoTDB cluster。
+
+#### Networking environment startup
+
+##### Start command
+
+```shell
+ # Start command
+ # Linux and MacOS systems
+ bash sbin/start-ainode.sh
+
+ # Windows systems
+ sbin\start-ainode.bat
+
+ # Backend startup command (recommended for long-term running)
+ # Linux and MacOS systems
+ nohup bash sbin/start-ainode.sh > myout.file 2>& 1 &
+
+ # Windows systems
+ nohup bash sbin\start-ainode.bat > myout.file 2>& 1 &
+ ```
+
+#### Detailed Syntax
+
+```shell
+ # Start command
+ # Linux and MacOS systems
+ bash sbin/start-ainode.sh -i -r -n
+
+ # Windows systems
+ sbin\start-ainode.bat -i -r -n
+ ```
+
+##### Parameter introduction:
+
+| **Name** | **Label** | **Describe** | **Is it mandatory** | **Type** | **Default value** | **Input method** |
+| ------------------- | ---- | ------------------------------------------------------------ | -------- | ------ | ---------------- | ---------------------- |
+| ain_interpreter_dir | -i | The interpreter path of the virtual environment where AINode is installed requires the use of an absolute path. | no | String | Default reading of environment variables | Input or persist modifications during invocation |
+| ain_force_reinstall | -r | Does this script check the version when checking the installation status of AINode. If it does, it will force the installation of the whl package in lib if the version is incorrect. | no | Bool | false | Input when calling |
+| ain_no_dependencies | -n | Specify whether to install dependencies when installing AINode, and if so, only install the AINode main program without installing dependencies. | no | Bool | false | Input when calling |
+
+ If you don't want to specify the corresponding parameters every time you start, you can also persistently modify the parameters in the `ainode-env.sh` and `ainode-env.bat` scripts in the `conf` folder (currently supporting persistent modification of the ain_interpreter-dir parameter).
+
+ `ainode-env.sh` :
+ ```shell
+ # The defaulte venv environment is used if ain_interpreter_dir is not set. Please use absolute path without quotation mark
+ # ain_interpreter_dir=
+ ```
+ `ainode-env.bat` :
+```shell
+ @REM The defaulte venv environment is used if ain_interpreter_dir is not set. Please use absolute path without quotation mark
+ @REM set ain_interpreter_dir=
+ ```
+ After writing the parameter value, uncomment the corresponding line and save it to take effect on the next script execution.
+
+
+#### Example
+
+##### Directly start:
+
+```shell
+ # Start command
+ # Linux and MacOS systems
+ bash sbin/start-ainode.sh
+ # Windows systems
+ sbin\start-ainode.bat
+
+
+ # Backend startup command (recommended for long-term running)
+ # Linux and MacOS systems
+ nohup bash sbin/start-ainode.sh > myout.file 2>& 1 &
+ # Windows systems
+ nohup bash sbin\start-ainode.bat > myout.file 2>& 1 &
+ ```
+
+##### Update Start:
+If the version of AINode has been updated (such as updating the `lib` folder), this command can be used. Firstly, it is necessary to ensure that AINode has stopped running, and then restart it using the `-r` parameter, which will reinstall AINode based on the files under `lib`.
+
+
+```shell
+ # Update startup command
+ # Linux and MacOS systems
+ bash sbin/start-ainode.sh -r
+ # Windows systems
+ sbin\start-ainode.bat -r
+
+
+ # Backend startup command (recommended for long-term running)
+ # Linux and MacOS systems
+ nohup bash sbin/start-ainode.sh -r > myout.file 2>& 1 &
+ # Windows c
+ nohup bash sbin\start-ainode.bat -r > myout.file 2>& 1 &
+ ```
+#### Non networked environment startup
+
+##### Start command
+
+```shell
+ # Start command
+ # Linux and MacOS systems
+ bash sbin/start-ainode.sh
+
+ # Windows systems
+ sbin\start-ainode.bat
+
+ # Backend startup command (recommended for long-term running)
+ # Linux and MacOS systems
+ nohup bash sbin/start-ainode.sh > myout.file 2>& 1 &
+
+ # Windows systems
+ nohup bash sbin\start-ainode.bat > myout.file 2>& 1 &
+ ```
+
+#### Detailed Syntax
+
+```shell
+ # Start command
+ # Linux and MacOS systems
+ bash sbin/start-ainode.sh -i -r -n
+
+ # Windows systems
+ sbin\start-ainode.bat -i -r -n
+ ```
+
+##### Parameter introduction:
+
+| **Name** | **Label** | **Describe** | **Is it mandatory** | **Type** | **Default value** | **Input method** |
+| ------------------- | ---- | ------------------------------------------------------------ | -------- | ------ | ---------------- | ---------------------- |
+| ain_interpreter_dir | -i | The interpreter path of the virtual environment where AINode is installed requires the use of an absolute path | no | String | Default reading of environment variables | Input or persist modifications during invocation |
+| ain_force_reinstall | -r | Does this script check the version when checking the installation status of AINode. If it does, it will force the installation of the whl package in lib if the version is incorrect | no | Bool | false | Input when calling |
+
+> Attention: When installation fails in a non networked environment, first check if the installation package corresponding to the platform is selected, and then confirm that the Python version is 3.8 (due to the limitations of the downloaded installation package on Python versions, 3.7, 3.9, and others are not allowed)
+
+#### Example
+
+##### Directly start:
+
+```shell
+ # Start command
+ # Linux and MacOS systems
+ bash sbin/start-ainode.sh
+ # Windows systems
+ sbin\start-ainode.bat
+
+ # Backend startup command (recommended for long-term running)
+ # Linux and MacOS systems
+ nohup bash sbin/start-ainode.sh > myout.file 2>& 1 &
+ # Windows systems
+ nohup bash sbin\start-ainode.bat > myout.file 2>& 1 &
+ ```
+
+### Detecting the status of AINode nodes
+
+During the startup process of AINode, the new AINode will be automatically added to the IoTDB cluster. After starting AINode, you can enter SQL in the command line to query. If you see an AINode node in the cluster and its running status is Running (as shown below), it indicates successful joining.
+
+
+```shell
+IoTDB> show cluster
++------+----------+-------+---------------+------------+-------+-----------+
+|NodeID| NodeType| Status|InternalAddress|InternalPort|Version| BuildInfo|
++------+----------+-------+---------------+------------+-------+-----------+
+| 0|ConfigNode|Running| 127.0.0.1| 10710|UNKNOWN|190e303-dev|
+| 1| DataNode|Running| 127.0.0.1| 10730|UNKNOWN|190e303-dev|
+| 2| AINode|Running| 127.0.0.1| 10810|UNKNOWN|190e303-dev|
++------+----------+-------+---------------+------------+-------+-----------+
+```
+
+### Stop AINode
+
+If you need to stop a running AINode node, execute the corresponding shutdown script.
+
+#### Stop command
+
+```shell
+ # Linux / MacOS
+ bash sbin/stop-ainode.sh
+
+ #Windows
+ sbin\stop-ainode.bat
+ ```
+
+
+#### Detailed Syntax
+
+```shell
+ # Linux / MacOS
+ bash sbin/stop-ainode.sh -t
+
+ #Windows
+ sbin\stop-ainode.bat -t
+ ```
+
+##### Parameter introduction:
+
+| **Name** | **Label** | **Describe** | **Is it mandatory** | **Type** | **Default value** | **Input method** |
+| ----------------- | ---- | ------------------------------------------------------------ | -------- | ------ | ------ | ---------- |
+| ain_remove_target | -t | When closing AINode, you can specify the Node ID, address, and port number of the target AINode to be removed, in the format of `` | no | String | nothing | Input when calling |
+
+#### Example
+
+```shell
+ # Linux / MacOS
+ bash sbin/stop-ainode.sh
+
+ # Windows
+ sbin\stop-ainode.bat
+ ```
+After stopping AINode, you can still see AINode nodes in the cluster, whose running status is UNKNOWN (as shown below), and the AINode function cannot be used at this time.
+
+ ```shell
+IoTDB> show cluster
++------+----------+-------+---------------+------------+-------+-----------+
+|NodeID| NodeType| Status|InternalAddress|InternalPort|Version| BuildInfo|
++------+----------+-------+---------------+------------+-------+-----------+
+| 0|ConfigNode|Running| 127.0.0.1| 10710|UNKNOWN|190e303-dev|
+| 1| DataNode|Running| 127.0.0.1| 10730|UNKNOWN|190e303-dev|
+| 2| AINode|UNKNOWN| 127.0.0.1| 10790|UNKNOWN|190e303-dev|
++------+----------+-------+---------------+------------+-------+-----------+
+```
+If you need to restart the node, you need to execute the startup script again.
+
+### Remove AINode
+
+When it is necessary to remove an AINode node from the cluster, a removal script can be executed. The difference between removing and stopping scripts is that stopping retains the AINode node in the cluster but stops the AINode service, while removing removes the AINode node from the cluster.
+
+#### Remove command
+
+
+```shell
+ # Linux / MacOS
+ bash sbin/remove-ainode.sh
+
+ # Windows
+ sbin\remove-ainode.bat
+ ```
+
+#### Detailed Syntax
+
+```shell
+ # Linux / MacOS
+ bash sbin/remove-ainode.sh -i -t/: -r -n
+
+ # Windows
+ sbin\remove-ainode.bat -i -t/: -r -n
+ ```
+
+##### Parameter introduction:
+
+ | **Name** | **Label** | **Describe** | **Is it mandatory** | **Type** | **Default value** | **Input method** |
+| ------------------- | ---- | ------------------------------------------------------------ | -------- | ------ | ---------------- | --------------------- |
+| ain_interpreter_dir | -i | The interpreter path of the virtual environment where AINode is installed requires the use of an absolute path | no | String | Default reading of environment variables | Input+persistent modification during invocation |
+| ain_remove_target | -t | When closing AINode, you can specify the Node ID, address, and port number of the target AINode to be removed, in the format of `` | no | String | nothing | Input when calling |
+| ain_force_reinstall | -r | Does this script check the version when checking the installation status of AINode. If it does, it will force the installation of the whl package in lib if the version is incorrect | no | Bool | false | Input when calling |
+| ain_no_dependencies | -n | Specify whether to install dependencies when installing AINode, and if so, only install the AINode main program without installing dependencies | no | Bool | false | Input when calling |
+
+ If you don't want to specify the corresponding parameters every time you start, you can also persistently modify the parameters in the `ainode-env.sh` and `ainode-env.bat` scripts in the `conf` folder (currently supporting persistent modification of the ain_interpreter-dir parameter).
+
+ `ainode-env.sh` :
+ ```shell
+ # The defaulte venv environment is used if ain_interpreter_dir is not set. Please use absolute path without quotation mark
+ # ain_interpreter_dir=
+ ```
+ `ainode-env.bat` :
+```shell
+ @REM The defaulte venv environment is used if ain_interpreter_dir is not set. Please use absolute path without quotation mark
+ @REM set ain_interpreter_dir=
+ ```
+ After writing the parameter value, uncomment the corresponding line and save it to take effect on the next script execution.
+
+#### Example
+
+##### Directly remove:
+
+ ```shell
+ # Linux / MacOS
+ bash sbin/remove-ainode.sh
+
+ # Windows
+ sbin\remove-ainode.bat
+ ```
+ After removing the node, relevant information about the node cannot be queried.
+
+ ```shell
+IoTDB> show cluster
++------+----------+-------+---------------+------------+-------+-----------+
+|NodeID| NodeType| Status|InternalAddress|InternalPort|Version| BuildInfo|
++------+----------+-------+---------------+------------+-------+-----------+
+| 0|ConfigNode|Running| 127.0.0.1| 10710|UNKNOWN|190e303-dev|
+| 1| DataNode|Running| 127.0.0.1| 10730|UNKNOWN|190e303-dev|
++------+----------+-------+---------------+------------+-------+-----------+
+```
+##### Specify removal:
+
+If the user loses files in the data folder, AINode may not be able to actively remove them locally. The user needs to specify the node number, address, and port number for removal. In this case, we support users to input parameters according to the following methods for deletion.
+
+ ```shell
+ # Linux / MacOS
+ bash sbin/remove-ainode.sh -t /:
+
+ # Windows
+ sbin\remove-ainode.bat -t /:
+ ```
+
+## common problem
+
+### An error occurs when starting AINode stating that the venv module cannot be found
+
+ When starting AINode using the default method, a Python virtual environment will be created in the installation package directory and dependencies will be installed, so it is required to install the venv module. Generally speaking, Python 3.8 and above versions come with built-in VenV, but for some systems with built-in Python environments, this requirement may not be met. There are two solutions when this error occurs (choose one or the other):
+
+ To install the Venv module locally, taking Ubuntu as an example, you can run the following command to install the built-in Venv module in Python. Or install a Python version with built-in Venv from the Python official website.
+
+ ```shell
+apt-get install python3.8-venv
+```
+Install version 3.8.0 of venv into AINode in the AINode path.
+
+ ```shell
+../Python-3.8.0/python -m venv venv(Folder Name)
+```
+ When running the startup script, use ` -i ` to specify an existing Python interpreter path as the running environment for AINode, eliminating the need to create a new virtual environment.
+
+ ### The SSL module in Python is not properly installed and configured to handle HTTPS resources
+WARNING: pip is configured with locations that require TLS/SSL, however the ssl module in Python is not available.
+You can install OpenSSLS and then rebuild Python to solve this problem
+> Currently Python versions 3.6 to 3.9 are compatible with OpenSSL 1.0.2, 1.1.0, and 1.1.1.
+
+ Python requires OpenSSL to be installed on our system, the specific installation method can be found in [link](https://stackoverflow.com/questions/56552390/how-to-fix-ssl-module-in-python-is-not-available-in-centos)
+
+ ```shell
+sudo apt-get install build-essential libssl-dev zlib1g-dev libbz2-dev libreadline-dev libsqlite3-dev wget curl llvm libncurses5-dev libncursesw5-dev xz-utils tk-dev libffi-dev liblzma-dev uuid-dev lzma-dev liblzma-dev
+sudo -E ./configure --with-ssl
+make
+sudo make install
+```
+
+ ### Pip version is lower
+
+ A compilation issue similar to "error: Microsoft Visual C++14.0 or greater is required..." appears on Windows
+
+The corresponding error occurs during installation and compilation, usually due to insufficient C++version or Setup tools version. You can check it in
+
+ ```shell
+./python -m pip install --upgrade pip
+./python -m pip install --upgrade setuptools
+```
+
+
+ ### Install and compile Python
+
+ Use the following instructions to download the installation package from the official website and extract it:
+ ```shell
+.wget https://www.python.org/ftp/python/3.8.0/Python-3.8.0.tar.xz
+tar Jxf Python-3.8.0.tar.xz
+```
+ Compile and install the corresponding Python package:
+ ```shell
+cd Python-3.8.0
+./configure prefix=/usr/local/python3
+make
+sudo make install
+python3 --version
+```
\ No newline at end of file
diff --git a/src/UserGuide/V1.3.3/User-Manual/AINode_apache.md b/src/UserGuide/V1.3.3/User-Manual/AINode_apache.md
new file mode 100644
index 000000000..f0c6647ec
--- /dev/null
+++ b/src/UserGuide/V1.3.3/User-Manual/AINode_apache.md
@@ -0,0 +1,654 @@
+
+
+# AI Capability(AINode)
+
+AINode is the third internal node after ConfigNode and DataNode in Apache IoTDB, which extends the capability of machine learning analysis of time series by interacting with DataNode and ConfigNode of IoTDB cluster, supports the introduction of pre-existing machine learning models from the outside to be registered, and uses the registered models in the It supports the process of introducing existing machine learning models from outside for registration, and using the registered models to complete the time series analysis tasks on the specified time series data through simple SQL statements, which integrates the model creation, management and inference in the database engine. At present, we have provided machine learning algorithms or self-developed models for common timing analysis scenarios (e.g. prediction and anomaly detection).
+
+The system architecture is shown below:
+::: center
+
+:::
+The responsibilities of the three nodes are as follows:
+
+- **ConfigNode**: responsible for storing and managing the meta-information of the model; responsible for distributed node management.
+- **DataNode**: responsible for receiving and parsing SQL requests from users; responsible for storing time-series data; responsible for preprocessing computation of data.
+- **AINode**: responsible for model file import creation and model inference.
+
+## Advantageous features
+
+Compared with building a machine learning service alone, it has the following advantages:
+
+- **Simple and easy to use**: no need to use Python or Java programming, the complete process of machine learning model management and inference can be completed using SQL statements. Creating a model can be done using the CREATE MODEL statement, and using a model for inference can be done using the CALL INFERENCE (...) statement, making it simpler and more convenient to use.
+
+- **Avoid Data Migration**: With IoTDB native machine learning, data stored in IoTDB can be directly applied to the inference of machine learning models without having to move the data to a separate machine learning service platform, which accelerates data processing, improves security, and reduces costs.
+
+
+
+- **Built-in Advanced Algorithms**: supports industry-leading machine learning analytics algorithms covering typical timing analysis tasks, empowering the timing database with native data analysis capabilities. Such as:
+ - **Time Series Forecasting**: learns patterns of change from past time series; thus outputs the most likely prediction of future series based on observations at a given past time.
+ - **Anomaly Detection for Time Series**: detects and identifies outliers in a given time series data, helping to discover anomalous behaviour in the time series.
+ - **Annotation for Time Series (Time Series Annotation)**: Adds additional information or markers, such as event occurrence, outliers, trend changes, etc., to each data point or specific time period to better understand and analyse the data.
+
+
+
+## Basic Concepts
+
+- **Model**: a machine learning model that takes time-series data as input and outputs the results or decisions of an analysis task. Model is the basic management unit of AINode, which supports adding (registration), deleting, checking, and using (inference) of models.
+- **Create**: Load externally designed or trained model files or algorithms into MLNode for unified management and use by IoTDB.
+- **Inference**: The process of using the created model to complete the timing analysis task applicable to the model on the specified timing data.
+- **Built-in capabilities**: AINode comes with machine learning algorithms or home-grown models for common timing analysis scenarios (e.g., prediction and anomaly detection).
+
+::: center
+
+::::
+
+## Installation and Deployment
+
+The deployment of AINode can be found in the document [Deployment Guidelines](../Deployment-and-Maintenance/AINode_Deployment_apache.md#ainode-deployment) .
+
+
+## Usage Guidelines
+
+AINode provides model creation and deletion process for deep learning models related to timing data. Built-in models do not need to be created and deleted, they can be used directly, and the built-in model instances created after inference is completed will be destroyed automatically.
+
+### Registering Models
+
+A trained deep learning model can be registered by specifying the vector dimensions of the model's inputs and outputs, which can be used for model inference.
+
+Models that meet the following criteria can be registered in AINode:
+1. Models trained on PyTorch 2.1.0 and 2.2.0 versions supported by AINode should avoid using features from versions 2.2.0 and above.
+2. AINode supports models stored using PyTorch JIT, and the model file needs to include the parameters and structure of the model.
+3. The input sequence of the model can contain one or more columns, and if there are multiple columns, they need to correspond to the model capability and model configuration file.
+4. The input and output dimensions of the model must be clearly defined in the `config.yaml` configuration file. When using the model, it is necessary to strictly follow the input-output dimensions defined in the `config.yaml` configuration file. If the number of input and output columns does not match the configuration file, it will result in errors.
+
+The following is the SQL syntax definition for model registration.
+
+```SQL
+create model using uri
+```
+
+The specific meanings of the parameters in the SQL are as follows:
+
+- model_name: a globally unique identifier for the model, which cannot be repeated. The model name has the following constraints:
+
+ - Identifiers [ 0-9 a-z A-Z _ ] (letters, numbers, underscores) are allowed.
+ - Length is limited to 2-64 characters
+ - Case sensitive
+
+- uri: resource path to the model registration file, which should contain the **model weights model.pt file and the model's metadata description file config.yaml**.
+
+ - Model weight file: the weight file obtained after the training of the deep learning model is completed, currently supporting pytorch training of the .pt file
+
+ - yaml metadata description file: parameters related to the model structure that need to be provided when the model is registered, which must contain the input and output dimensions of the model for model inference:
+
+ - | **Parameter name** | **Parameter description** | **Example** |
+ | ------------ | ---------------------------- | -------- |
+ | input_shape | Rows and columns of model inputs for model inference | [96,2] |
+ | output_shape | rows and columns of model outputs, for model inference | [48,2] |
+
+ - In addition to model inference, the data types of model input and output can be specified:
+
+ - | **Parameter name** | **Parameter description** | **Example** |
+ | ----------- | ------------------ | --------------------- |
+ | input_type | model input data type | ['float32','float32'] |
+ | output_type | data type of the model output | ['float32','float32'] |
+
+ - In addition to this, additional notes can be specified for display during model management
+
+ - | **Parameter name** | **Parameter description** | **Examples** |
+ | ---------- | ---------------------------------------------- | ------------------------------------------- |
+ | attributes | optional, user-defined model notes for model display | 'model_type': 'dlinear','kernel_size': '25' |
+
+
+In addition to registration of local model files, registration can also be done by specifying remote resource paths via URIs, using open source model repositories (e.g. HuggingFace).
+
+#### Example
+
+In the current example folder, it contains model.pt and config.yaml files, model.pt is the training get, and the content of config.yaml is as follows:
+
+```YAML
+configs.
+ # Required options
+ input_shape: [96, 2] # The model receives data in 96 rows x 2 columns.
+ output_shape: [48, 2] # Indicates that the model outputs 48 rows x 2 columns.
+
+ # Optional Default is all float32 and the number of columns is the number of columns in the shape.
+ input_type: ["int64", "int64"] # Input data type, need to match the number of columns.
+ output_type: ["text", "int64"] #Output data type, need to match the number of columns.
+
+attributes: # Optional user-defined notes for the input.
+ 'model_type': 'dlinear'
+ 'kernel_size': '25'
+```
+
+Specify this folder as the load path to register the model.
+
+```SQL
+IoTDB> create model dlinear_example using uri "file://. /example"
+```
+
+Alternatively, you can download the corresponding model file from huggingFace and register it.
+
+```SQL
+IoTDB> create model dlinear_example using uri "https://huggingface.com/IoTDBML/dlinear/"
+```
+
+After the SQL is executed, the registration process will be carried out asynchronously, and you can view the registration status of the model through the model showcase (see the Model Showcase section), and the time consumed for successful registration is mainly affected by the size of the model file.
+
+Once the model registration is complete, you can call specific functions and perform model inference by using normal queries.
+
+### Viewing Models
+
+Successfully registered models can be queried for model-specific information through the show models command. The SQL definition is as follows:
+
+```SQL
+show models
+
+show models
+```
+
+In addition to displaying information about all models directly, you can specify a model id to view information about a specific model. The results of the model show contain the following information:
+
+| **ModelId** | **State** | **Configs** | **Attributes** |
+| ------------ | ------------------------------------- | ---------------------------------------------- | -------------- |
+| Model Unique Identifier | Model Registration Status (LOADING, ACTIVE, DROPPING) | InputShape, outputShapeInputTypes, outputTypes | Model Notes |
+
+State is used to show the current state of model registration, which consists of the following three stages
+
+- **LOADING**: The corresponding model meta information has been added to the configNode, and the model file is being transferred to the AINode node.
+- **ACTIVE**: The model has been set up and the model is in the available state
+- **DROPPING**: Model deletion is in progress, model related information is being deleted from configNode and AINode.
+- **UNAVAILABLE**: Model creation failed, you can delete the failed model_name by drop model.
+
+#### Example
+
+```SQL
+IoTDB> show models
+
+
++---------------------+--------------------------+-----------+----------------------------+-----------------------+
+| ModelId| ModelType| State| Configs| Notes|
++---------------------+--------------------------+-----------+----------------------------+-----------------------+
+| dlinear_example| USER_DEFINED| ACTIVE| inputShape:[96,2]| |
+| | | | outputShape:[48,2]| |
+| | | | inputDataType:[float,float]| |
+| | | |outputDataType:[float,float]| |
+| _STLForecaster| BUILT_IN_FORECAST| ACTIVE| |Built-in model in IoTDB|
+| _NaiveForecaster| BUILT_IN_FORECAST| ACTIVE| |Built-in model in IoTDB|
+| _ARIMA| BUILT_IN_FORECAST| ACTIVE| |Built-in model in IoTDB|
+|_ExponentialSmoothing| BUILT_IN_FORECAST| ACTIVE| |Built-in model in IoTDB|
+| _GaussianHMM|BUILT_IN_ANOMALY_DETECTION| ACTIVE| |Built-in model in IoTDB|
+| _GMMHMM|BUILT_IN_ANOMALY_DETECTION| ACTIVE| |Built-in model in IoTDB|
+| _Stray|BUILT_IN_ANOMALY_DETECTION| ACTIVE| |Built-in model in IoTDB|
++---------------------+--------------------------+-----------+------------------------------------------------------------+-----------------------+
+```
+
+We have registered the corresponding model earlier, you can view the model status through the corresponding designation, active indicates that the model is successfully registered and can be used for inference.
+
+### Delete Model
+
+For a successfully registered model, the user can delete it via SQL. In addition to deleting the meta information on the configNode, this operation also deletes all the related model files under the AINode. The SQL is as follows:
+
+```SQL
+drop model
+```
+
+You need to specify the model model_name that has been successfully registered to delete the corresponding model. Since model deletion involves the deletion of data on multiple nodes, the operation will not be completed immediately, and the state of the model at this time is DROPPING, and the model in this state cannot be used for model inference.
+
+### Using Built-in Model Reasoning
+
+The SQL syntax is as follows:
+
+
+```SQL
+call inference(,sql[,=])
+```
+
+Built-in model inference does not require a registration process, the inference function can be used by calling the inference function through the call keyword, and its corresponding parameters are described as follows:
+
+- **built_in_model_name**: built-in model name
+- **parameterName**: parameter name
+- **parameterValue**: parameter value
+
+#### Built-in Models and Parameter Descriptions
+
+The following machine learning models are currently built-in, please refer to the following links for detailed parameter descriptions.
+
+| Model | built_in_model_name | Task type | Parameter description |
+| -------------------- | --------------------- | -------- | ------------------------------------------------------------ |
+| Arima | _Arima | Forecast | [Arima Parameter description](https://www.sktime.net/en/latest/api_reference/auto_generated/sktime.forecasting.arima.ARIMA.html?highlight=Arima) |
+| STLForecaster | _STLForecaster | Forecast | [STLForecaster Parameter description](https://www.sktime.net/en/latest/api_reference/auto_generated/sktime.forecasting.trend.STLForecaster.html#sktime.forecasting.trend.STLForecaster) |
+| NaiveForecaster | _NaiveForecaster | Forecast | [NaiveForecaster Parameter description](https://www.sktime.net/en/latest/api_reference/auto_generated/sktime.forecasting.naive.NaiveForecaster.html#naiveforecaster) |
+| ExponentialSmoothing | _ExponentialSmoothing | Forecast | [ExponentialSmoothing 参Parameter description](https://www.sktime.net/en/latest/api_reference/auto_generated/sktime.forecasting.exp_smoothing.ExponentialSmoothing.html) |
+| GaussianHMM | _GaussianHMM | Annotation | [GaussianHMMParameter description](https://www.sktime.net/en/latest/api_reference/auto_generated/sktime.annotation.hmm_learn.gaussian.GaussianHMM.html) |
+| GMMHMM | _GMMHMM | Annotation | [GMMHMM参数说明](https://www.sktime.net/en/latest/api_reference/auto_generated/sktime.annotation.hmm_learn.gmm.GMMHMM.html) |
+| Stray | _Stray | Anomaly detection | [Stray Parameter description](https://www.sktime.net/en/latest/api_reference/auto_generated/sktime.annotation.stray.STRAY.html) |
+
+
+#### Example
+
+The following is an example of an operation using built-in model inference. The built-in Stray model is used for anomaly detection algorithm. The input is `[144,1]` and the output is `[144,1]`. We use it for reasoning through SQL.
+
+```SQL
+IoTDB> select * from root.eg.airline
++-----------------------------+------------------+
+| Time|root.eg.airline.s0|
++-----------------------------+------------------+
+|1949-01-31T00:00:00.000+08:00| 224.0|
+|1949-02-28T00:00:00.000+08:00| 118.0|
+|1949-03-31T00:00:00.000+08:00| 132.0|
+|1949-04-30T00:00:00.000+08:00| 129.0|
+......
+|1960-09-30T00:00:00.000+08:00| 508.0|
+|1960-10-31T00:00:00.000+08:00| 461.0|
+|1960-11-30T00:00:00.000+08:00| 390.0|
+|1960-12-31T00:00:00.000+08:00| 432.0|
++-----------------------------+------------------+
+Total line number = 144
+
+IoTDB> call inference(_Stray, "select s0 from root.eg.airline", k=2)
++-------+
+|output0|
++-------+
+| 0|
+| 0|
+| 0|
+| 0|
+......
+| 1|
+| 1|
+| 0|
+| 0|
+| 0|
+| 0|
++-------+
+Total line number = 144
+```
+
+### Reasoning with Deep Learning Models
+
+The SQL syntax is as follows:
+
+```SQL
+call inference(,sql[,window=])
+
+
+window_function:
+ head(window_size)
+ tail(window_size)
+ count(window_size,sliding_step)
+```
+
+After completing the registration of the model, the inference function can be used by calling the inference function through the call keyword, and its corresponding parameters are described as follows:
+
+- **model_name**: corresponds to a registered model
+- **sql**: sql query statement, the result of the query is used as input to the model for model inference. The dimensions of the rows and columns in the result of the query need to match the size specified in the specific model config. (It is not recommended to use the `SELECT *` clause for the sql here because in IoTDB, `*` does not sort the columns, so the order of the columns is undefined, you can use `SELECT s0,s1` to ensure that the columns order matches the expectations of the model input)
+- **window_function**: Window functions that can be used in the inference process, there are currently three types of window functions provided to assist in model inference:
+ - **head(window_size)**: Get the top window_size points in the data for model inference, this window can be used for data cropping.
+ 
+
+ - **tail(window_size)**: get the last window_size point in the data for model inference, this window can be used for data cropping.
+ 
+
+ - **count(window_size, sliding_step)**: sliding window based on the number of points, the data in each window will be reasoned through the model respectively, as shown in the example below, window_size for 2 window function will be divided into three windows of the input dataset, and each window will perform reasoning operations to generate results respectively. The window can be used for continuous inference
+ 
+
+**Explanation 1**: window can be used to solve the problem of cropping rows when the results of the sql query and the input row requirements of the model do not match. Note that when the number of columns does not match or the number of rows is directly less than the model requirement, the inference cannot proceed and an error message will be returned.
+
+**Explanation 2**: In deep learning applications, timestamp-derived features (time columns in the data) are often used as covariates in generative tasks, and are input into the model together to enhance the model, but the time columns are generally not included in the model's output. In order to ensure the generality of the implementation, the model inference results only correspond to the real output of the model, if the model does not output the time column, it will not be included in the results.
+
+
+#### Example
+
+The following is an example of inference in action using a deep learning model, for the `dlinear` prediction model with input `[96,2]` and output `[48,2]` mentioned above, which we use via SQL.
+
+```Shell
+IoTDB> select s1,s2 from root.**
++-----------------------------+-------------------+-------------------+
+| Time| root.eg.etth.s0| root.eg.etth.s1|
++-----------------------------+-------------------+-------------------+
+|1990-01-01T00:00:00.000+08:00| 0.7855| 1.611|
+|1990-01-02T00:00:00.000+08:00| 0.7818| 1.61|
+|1990-01-03T00:00:00.000+08:00| 0.7867| 1.6293|
+|1990-01-04T00:00:00.000+08:00| 0.786| 1.637|
+|1990-01-05T00:00:00.000+08:00| 0.7849| 1.653|
+|1990-01-06T00:00:00.000+08:00| 0.7866| 1.6537|
+|1990-01-07T00:00:00.000+08:00| 0.7886| 1.662|
+......
+|1990-03-31T00:00:00.000+08:00| 0.7585| 1.678|
+|1990-04-01T00:00:00.000+08:00| 0.7587| 1.6763|
+|1990-04-02T00:00:00.000+08:00| 0.76| 1.6813|
+|1990-04-03T00:00:00.000+08:00| 0.7669| 1.684|
+|1990-04-04T00:00:00.000+08:00| 0.7645| 1.677|
+|1990-04-05T00:00:00.000+08:00| 0.7625| 1.68|
+|1990-04-06T00:00:00.000+08:00| 0.7617| 1.6917|
++-----------------------------+-------------------+-------------------+
+Total line number = 96
+
+IoTDB> call inference(dlinear_example,"select s0,s1 from root.**")
++--------------------------------------------+-----------------------------+
+| _result_0| _result_1|
++--------------------------------------------+-----------------------------+
+| 0.726302981376648| 1.6549958229064941|
+| 0.7354921698570251| 1.6482787370681763|
+| 0.7238251566886902| 1.6278168201446533|
+......
+| 0.7692174911499023| 1.654654049873352|
+| 0.7685555815696716| 1.6625318765640259|
+| 0.7856493592262268| 1.6508299350738525|
++--------------------------------------------+-----------------------------+
+Total line number = 48
+```
+
+#### Example of using the tail/head window function
+
+When the amount of data is variable and you want to take the latest 96 rows of data for inference, you can use the corresponding window function tail. head function is used in a similar way, except that it takes the earliest 96 points.
+
+```Shell
+IoTDB> select s1,s2 from root.**
++-----------------------------+-------------------+-------------------+
+| Time| root.eg.etth.s0| root.eg.etth.s1|
++-----------------------------+-------------------+-------------------+
+|1988-01-01T00:00:00.000+08:00| 0.7355| 1.211|
+......
+|1990-01-01T00:00:00.000+08:00| 0.7855| 1.611|
+|1990-01-02T00:00:00.000+08:00| 0.7818| 1.61|
+|1990-01-03T00:00:00.000+08:00| 0.7867| 1.6293|
+|1990-01-04T00:00:00.000+08:00| 0.786| 1.637|
+|1990-01-05T00:00:00.000+08:00| 0.7849| 1.653|
+|1990-01-06T00:00:00.000+08:00| 0.7866| 1.6537|
+|1990-01-07T00:00:00.000+08:00| 0.7886| 1.662|
+......
+|1990-03-31T00:00:00.000+08:00| 0.7585| 1.678|
+|1990-04-01T00:00:00.000+08:00| 0.7587| 1.6763|
+|1990-04-02T00:00:00.000+08:00| 0.76| 1.6813|
+|1990-04-03T00:00:00.000+08:00| 0.7669| 1.684|
+|1990-04-04T00:00:00.000+08:00| 0.7645| 1.677|
+|1990-04-05T00:00:00.000+08:00| 0.7625| 1.68|
+|1990-04-06T00:00:00.000+08:00| 0.7617| 1.6917|
++-----------------------------+-------------------+-------------------+
+Total line number = 996
+
+IoTDB> call inference(dlinear_example,"select s0,s1 from root.**",window=tail(96))
++--------------------------------------------+-----------------------------+
+| _result_0| _result_1|
++--------------------------------------------+-----------------------------+
+| 0.726302981376648| 1.6549958229064941|
+| 0.7354921698570251| 1.6482787370681763|
+| 0.7238251566886902| 1.6278168201446533|
+......
+| 0.7692174911499023| 1.654654049873352|
+| 0.7685555815696716| 1.6625318765640259|
+| 0.7856493592262268| 1.6508299350738525|
++--------------------------------------------+-----------------------------+
+Total line number = 48
+```
+
+#### Example of using the count window function
+
+This window is mainly used for computational tasks. When the task's corresponding model can only handle a fixed number of rows of data at a time, but the final desired outcome is multiple sets of prediction results, this window function can be used to perform continuous inference using a sliding window of points. Suppose we now have an anomaly detection model `anomaly_example(input: [24,2], output[1,1])`, which generates a 0/1 label for every 24 rows of data. An example of its use is as follows:
+
+```Shell
+IoTDB> select s1,s2 from root.**
++-----------------------------+-------------------+-------------------+
+| Time| root.eg.etth.s0| root.eg.etth.s1|
++-----------------------------+-------------------+-------------------+
+|1990-01-01T00:00:00.000+08:00| 0.7855| 1.611|
+|1990-01-02T00:00:00.000+08:00| 0.7818| 1.61|
+|1990-01-03T00:00:00.000+08:00| 0.7867| 1.6293|
+|1990-01-04T00:00:00.000+08:00| 0.786| 1.637|
+|1990-01-05T00:00:00.000+08:00| 0.7849| 1.653|
+|1990-01-06T00:00:00.000+08:00| 0.7866| 1.6537|
+|1990-01-07T00:00:00.000+08:00| 0.7886| 1.662|
+......
+|1990-03-31T00:00:00.000+08:00| 0.7585| 1.678|
+|1990-04-01T00:00:00.000+08:00| 0.7587| 1.6763|
+|1990-04-02T00:00:00.000+08:00| 0.76| 1.6813|
+|1990-04-03T00:00:00.000+08:00| 0.7669| 1.684|
+|1990-04-04T00:00:00.000+08:00| 0.7645| 1.677|
+|1990-04-05T00:00:00.000+08:00| 0.7625| 1.68|
+|1990-04-06T00:00:00.000+08:00| 0.7617| 1.6917|
++-----------------------------+-------------------+-------------------+
+Total line number = 96
+
+IoTDB> call inference(anomaly_example,"select s0,s1 from root.**",window=count(24,24))
++-------------------------+
+| _result_0|
++-------------------------+
+| 0|
+| 1|
+| 1|
+| 0|
++-------------------------+
+Total line number = 4
+```
+
+In the result set, each row's label corresponds to the output of the anomaly detection model after inputting each group of 24 rows of data.
+
+## Privilege Management
+
+When using AINode related functions, the authentication of IoTDB itself can be used to do a permission management, users can only use the model management related functions when they have the USE_MODEL permission. When using the inference function, the user needs to have the permission to access the source sequence corresponding to the SQL of the input model.
+
+| Privilege Name | Privilege Scope | Administrator User (default ROOT) | Normal User | Path Related |
+| --------- | --------------------------------- | ---------------------- | -------- | -------- |
+| USE_MODEL | create model/show models/drop model | √ | √ | x |
+| READ_DATA| call inference | √ | √|√ |
+
+## Practical Examples
+
+### Power Load Prediction
+
+In some industrial scenarios, there is a need to predict power loads, which can be used to optimise power supply, conserve energy and resources, support planning and expansion, and enhance power system reliability.
+
+The data for the test set of ETTh1 that we use is [ETTh1](https://alioss.timecho.com/docs/img/ETTh1.csv).
+
+
+It contains power data collected at 1h intervals, and each data consists of load and oil temperature as High UseFul Load, High UseLess Load, Middle UseLess Load, Low UseFul Load, Low UseLess Load, Oil Temperature.
+
+On this dataset, the model inference function of IoTDB-ML can predict the oil temperature in the future period of time through the relationship between the past values of high, middle and low use loads and the corresponding time stamp oil temperature, which empowers the automatic regulation and monitoring of grid transformers.
+
+#### Step 1: Data Import
+
+Users can import the ETT dataset into IoTDB using `import-csv.sh` in the tools folder
+
+``Bash
+bash . /import-csv.sh -h 127.0.0.1 -p 6667 -u root -pw root -f ... /... /ETTh1.csv
+``
+
+#### Step 2: Model Import
+
+We can enter the following SQL in iotdb-cli to pull a trained model from huggingface for registration for subsequent inference.
+
+```SQL
+create model dlinear using uri 'https://huggingface.co/hvlgo/dlinear/tree/main'
+```
+
+This model is trained on the lighter weight deep model DLinear, which is able to capture as many trends within a sequence and relationships between variables as possible with relatively fast inference, making it more suitable for fast real-time prediction than other deeper models.
+
+#### Step 3: Model inference
+
+```Shell
+IoTDB> select s0,s1,s2,s3,s4,s5,s6 from root.eg.etth LIMIT 96
++-----------------------------+---------------+---------------+---------------+---------------+---------------+---------------+---------------+
+| Time|root.eg.etth.s0|root.eg.etth.s1|root.eg.etth.s2|root.eg.etth.s3|root.eg.etth.s4|root.eg.etth.s5|root.eg.etth.s6|
++-----------------------------+---------------+---------------+---------------+---------------+---------------+---------------+---------------+
+|2017-10-20T00:00:00.000+08:00| 10.449| 3.885| 8.706| 2.025| 2.041| 0.944| 8.864|
+|2017-10-20T01:00:00.000+08:00| 11.119| 3.952| 8.813| 2.31| 2.071| 1.005| 8.442|
+|2017-10-20T02:00:00.000+08:00| 9.511| 2.88| 7.533| 1.564| 1.949| 0.883| 8.16|
+|2017-10-20T03:00:00.000+08:00| 9.645| 2.21| 7.249| 1.066| 1.828| 0.914| 7.949|
+......
+|2017-10-23T20:00:00.000+08:00| 8.105| 0.938| 4.371| -0.569| 3.533| 1.279| 9.708|
+|2017-10-23T21:00:00.000+08:00| 7.167| 1.206| 4.087| -0.462| 3.107| 1.432| 8.723|
+|2017-10-23T22:00:00.000+08:00| 7.1| 1.34| 4.015| -0.32| 2.772| 1.31| 8.864|
+|2017-10-23T23:00:00.000+08:00| 9.176| 2.746| 7.107| 1.635| 2.65| 1.097| 9.004|
++-----------------------------+---------------+---------------+---------------+---------------+---------------+---------------+---------------+
+Total line number = 96
+
+IoTDB> call inference(dlinear_example, "select s0,s1,s2,s3,s4,s5,s6 from root.eg.etth", window=head(96))
++-----------+----------+----------+------------+---------+----------+----------+
+| output0| output1| output2| output3| output4| output5| output6|
++-----------+----------+----------+------------+---------+----------+----------+
+| 10.319546| 3.1450553| 7.877341| 1.5723765|2.7303758| 1.1362307| 8.867775|
+| 10.443649| 3.3286757| 7.8593454| 1.7675098| 2.560634| 1.1177158| 8.920919|
+| 10.883752| 3.2341104| 8.47036| 1.6116762|2.4874182| 1.1760603| 8.798939|
+......
+| 8.0115595| 1.2995274| 6.9900327|-0.098746896| 3.04923| 1.176214| 9.548782|
+| 8.612427| 2.5036244| 5.6790237| 0.66474205|2.8870275| 1.2051733| 9.330128|
+| 10.096699| 3.399722| 6.9909| 1.7478468|2.7642853| 1.1119363| 9.541455|
++-----------+----------+----------+------------+---------+----------+----------+
+Total line number = 48
+```
+
+We compare the results of the prediction of the oil temperature with the real results, and we can get the following image.
+
+The data before 10/24 00:00 represents the past data input to the model, the blue line after 10/24 00:00 is the oil temperature forecast result given by the model, and the red line is the actual oil temperature data from the dataset (used for comparison).
+
+
+
+As can be seen, we have used the relationship between the six load information and the corresponding time oil temperatures for the past 96 hours (4 days) to model the possible changes in this data for the oil temperature for the next 48 hours (2 days) based on the inter-relationships between the sequences learned previously, and it can be seen that the predicted curves maintain a high degree of consistency in trend with the actual results after visualisation.
+
+### Power Prediction
+
+Power monitoring of current, voltage and power data is required in substations for detecting potential grid problems, identifying faults in the power system, effectively managing grid loads and analysing power system performance and trends.
+
+We have used the current, voltage and power data in a substation to form a dataset in a real scenario. The dataset consists of data such as A-phase voltage, B-phase voltage, and C-phase voltage collected every 5 - 6s for a time span of nearly four months in the substation.
+
+The test set data content is [data](https://alioss.timecho.com/docs/img/data.csv).
+
+On this dataset, the model inference function of IoTDB-ML can predict the C-phase voltage in the future period through the previous values and corresponding timestamps of A-phase voltage, B-phase voltage and C-phase voltage, empowering the monitoring management of the substation.
+
+#### Step 1: Data Import
+
+Users can import the dataset using `import-csv.sh` in the tools folder
+
+```Bash
+bash ./import-csv.sh -h 127.0.0.1 -p 6667 -u root -pw root -f ... /... /data.csv
+```
+
+#### Step 2: Model Import
+
+We can select built-in models or registered models in IoTDB CLI for subsequent inference.
+
+We use the built-in model STLForecaster for prediction. STLForecaster is a time series forecasting method based on the STL implementation in the statsmodels library.
+
+#### Step 3: Model Inference
+
+```Shell
+IoTDB> select * from root.eg.voltage limit 96
++-----------------------------+------------------+------------------+------------------+
+| Time|root.eg.voltage.s0|root.eg.voltage.s1|root.eg.voltage.s2|
++-----------------------------+------------------+------------------+------------------+
+|2023-02-14T20:38:32.000+08:00| 2038.0| 2028.0| 2041.0|
+|2023-02-14T20:38:38.000+08:00| 2014.0| 2005.0| 2018.0|
+|2023-02-14T20:38:44.000+08:00| 2014.0| 2005.0| 2018.0|
+......
+|2023-02-14T20:47:52.000+08:00| 2024.0| 2016.0| 2027.0|
+|2023-02-14T20:47:57.000+08:00| 2024.0| 2016.0| 2027.0|
+|2023-02-14T20:48:03.000+08:00| 2024.0| 2016.0| 2027.0|
++-----------------------------+------------------+------------------+------------------+
+Total line number = 96
+
+IoTDB> call inference(_STLForecaster, "select s0,s1,s2 from root.eg.voltage", window=head(96),predict_length=48)
++---------+---------+---------+
+| output0| output1| output2|
++---------+---------+---------+
+|2026.3601|2018.2953|2029.4257|
+|2019.1538|2011.4361|2022.0888|
+|2025.5074|2017.4522|2028.5199|
+......
+
+|2022.2336|2015.0290|2025.1023|
+|2015.7241|2008.8975|2018.5085|
+|2022.0777|2014.9136|2024.9396|
+|2015.5682|2008.7821|2018.3458|
++---------+---------+---------+
+Total line number = 48
+```
+
+Comparing the predicted results of the C-phase voltage with the real results, we can get the following image.
+
+The data before 02/14 20:48 represents the past data input to the model, the blue line after 02/14 20:48 is the predicted result of phase C voltage given by the model, while the red line is the actual phase C voltage data from the dataset (used for comparison).
+
+
+
+It can be seen that we used the voltage data from the past 10 minutes and, based on the previously learned inter-sequence relationships, modeled the possible changes in the phase C voltage data for the next 5 minutes. The visualized forecast curve shows a certain degree of synchronicity with the actual results in terms of trend.
+
+### Anomaly Detection
+
+In the civil aviation and transport industry, there exists a need for anomaly detection of the number of passengers travelling on an aircraft. The results of anomaly detection can be used to guide the adjustment of flight scheduling to make the organisation more efficient.
+
+Airline Passengers is a time-series dataset that records the number of international air passengers between 1949 and 1960, sampled at one-month intervals. The dataset contains a total of one time series. The dataset is [airline](https://alioss.timecho.com/docs/img/airline.csv).
+On this dataset, the model inference function of IoTDB-ML can empower the transport industry by capturing the changing patterns of the sequence in order to detect anomalies at the sequence time points.
+
+#### Step 1: Data Import
+
+Users can import the dataset using `import-csv.sh` in the tools folder
+
+``Bash
+bash . /import-csv.sh -h 127.0.0.1 -p 6667 -u root -pw root -f ... /... /data.csv
+``
+
+#### Step 2: Model Inference
+
+IoTDB has some built-in machine learning algorithms that can be used directly, a sample prediction using one of the anomaly detection algorithms is shown below:
+
+```Shell
+IoTDB> select * from root.eg.airline
++-----------------------------+------------------+
+| Time|root.eg.airline.s0|
++-----------------------------+------------------+
+|1949-01-31T00:00:00.000+08:00| 224.0|
+|1949-02-28T00:00:00.000+08:00| 118.0|
+|1949-03-31T00:00:00.000+08:00| 132.0|
+|1949-04-30T00:00:00.000+08:00| 129.0|
+......
+|1960-09-30T00:00:00.000+08:00| 508.0|
+|1960-10-31T00:00:00.000+08:00| 461.0|
+|1960-11-30T00:00:00.000+08:00| 390.0|
+|1960-12-31T00:00:00.000+08:00| 432.0|
++-----------------------------+------------------+
+Total line number = 144
+
+IoTDB> call inference(_Stray, "select s0 from root.eg.airline", k=2)
++-------+
+|output0|
++-------+
+| 0|
+| 0|
+| 0|
+| 0|
+......
+| 1|
+| 1|
+| 0|
+| 0|
+| 0|
+| 0|
++-------+
+Total line number = 144
+```
+
+We plot the results detected as anomalies to get the following image. Where the blue curve is the original time series and the time points specially marked with red dots are the time points that the algorithm detects as anomalies.
+
+
+
+It can be seen that the Stray model has modelled the input sequence changes and successfully detected the time points where anomalies occur.
\ No newline at end of file
diff --git a/src/UserGuide/latest/Deployment-and-Maintenance/AINode_Deployment_apache.md b/src/UserGuide/latest/Deployment-and-Maintenance/AINode_Deployment_apache.md
new file mode 100644
index 000000000..610ec0b72
--- /dev/null
+++ b/src/UserGuide/latest/Deployment-and-Maintenance/AINode_Deployment_apache.md
@@ -0,0 +1,522 @@
+
+# AINode Deployment
+
+## AINode Introduction
+
+### Capability Introduction
+
+ AINode is the third type of endogenous node provided by IoTDB after the Configurable Node and DataNode. This node extends its ability to perform machine learning analysis on time series by interacting with the DataNode and Configurable Node of the IoTDB cluster. It supports the introduction of existing machine learning models from external sources for registration and the use of registered models to complete time series analysis tasks on specified time series data through simple SQL statements. The creation, management, and inference of models are integrated into the database engine. Currently, machine learning algorithms or self-developed models are available for common time series analysis scenarios, such as prediction and anomaly detection.
+
+### Delivery Method
+ It is an additional package outside the IoTDB cluster, with independent installation and activation (if you need to try or use it, please contact Timecho Technology Business or Technical Support).
+
+### Deployment mode
+
+

+

+
+
+## Installation preparation
+
+### Get installation package
+
+ Users can download the software installation package for AINode, download and unzip it to complete the installation of AINode.
+
+ Unzip and install the package
+ `(iotdb-enterprise-ainode-.zip)`, The directory structure after unpacking the installation package is as follows:
+| **Catalogue** | **Type** | **Explain** |
+| ------------ | -------- | ------------------------------------------------ |
+| lib | folder | AINode compiled binary executable files and related code dependencies |
+| sbin | folder | The running script of AINode can start, remove, and stop AINode |
+| conf | folder | Contains configuration items for AINode, specifically including the following configuration items |
+| LICENSE | file | Certificate |
+| NOTICE | file | Tips |
+| README_ZH.md | file | Explanation of the Chinese version of the markdown format |
+| `README.md` | file | Instructions |
+
+### Environment preparation
+- Suggested operating environment:Ubuntu, CentOS, MacOS
+
+- Runtime Environment
+ - Python>=3.8 and Python <= 3.14 is sufficient in a networked environment, and comes with pip and venv tools; Python 3.8 version is required for non networked environments, and download the zip package for the corresponding operating system from [here](https://cloud.tsinghua.edu.cn/d/4c1342f6c272439aa96c/?p=%2Flibs&mode=list) (Note that when downloading dependencies, you need to select the zip file in the libs folder, as shown in the following figure). Copy all files in the folder to the `lib` folder in the `iotdb-enterprise-ainode-` folder, and follow the steps below to start AINode.
+
+
+
+ - There must be a Python interpreter in the environment variables that can be directly called through the `python` instruction.
+ - It is recommended to create a Python interpreter venv virtual environment in the `iotdb-enterprise-ainode-` folder. If installing version 3.8.0 virtual environment, the statement is as follows:
+ ```shell
+ # Install version 3.8.0 of Venv , Create a virtual environment with the folder name `venv`.
+ ../Python-3.8.0/python -m venv `venv`
+ ```
+
+## Installation steps
+
+### Install AINode
+
+
+ 1. Check the kernel architecture of Linux
+```shell
+ uname -m
+ ```
+
+ 2. Import Python environment [Download](https://repo.anaconda.com/miniconda/)
+
+ Recommend downloading the py311 version application and importing it into the iotdb dedicated folder in the user's root directory
+
+ 3. Switch to the iotdb dedicated folder to install the Python environment
+
+ Taking Miniconda 3-py311_24.5.0-0-Lux-x86_64 as an example:
+
+```shell
+ bash ./Miniconda3-py311_24.5.0-0-Linux-x86_64.sh
+ ```
+> Type "Enter", "Long press space", "Enter", "Yes", "Yes" according to the prompt
+> Close the current SSH window and reconnect
+
+ 4. Create a dedicated environment
+
+```shell
+ conda create -n ainode_py python=3.11.9
+ ```
+
+ Type 'y' according to the prompt
+
+ 5. Activate dedicated environment
+
+```shell
+ conda activate ainode_py
+ ```
+
+ 6. Verify Python version
+
+```shell
+ python --version
+ ```
+ 7. Download and import AINode to a dedicated folder, switch to the dedicated folder and extract the installation package
+
+```shell
+ unzip iotdb-enterprise-ainode-1.3.3.2.zip
+ ```
+
+ 8. Configuration item modification
+
+```shell
+ vi iotdb-enterprise-ainode-1.3.3.2/conf/iotdb-ainode.properties
+ ```
+ Configuration item modification:[detailed information](#configuration-item-modification)
+
+> ain_seed_config_node=iotdb-1:10710 (Cluster communication node IP: communication node port)
+> ain_inference_rpc_address=iotdb-3 (IP address of the server running AINode)
+
+ 9. Replace Python source
+
+```shell
+ pip config set global.index-url https://mirrors.aliyun.com/pypi/simple/
+ ```
+
+ 10. Start the AINode node
+
+```shell
+ nohup bash iotdb-enterprise-ainode-1.3.3.2/sbin/start-ainode.sh > myout.file 2>& 1 &
+ ```
+> Return to the default environment of the system: conda deactivate
+
+ ### Configuration item modification
+
+AINode supports modifying some necessary parameters. You can find the following parameters in the `conf/iotdb-ainode.properties` file and make persistent modifications to them:
+:
+
+| **Name** | **Describe** | **Type** | **Default value** | **Effective method after modification** |
+| :----------------------------- | ------------------------------------------------------------ | ------- | ------------------ | ---------------------------- |
+| cluster_name | The identifier for AINode to join the cluster | string | defaultCluster | Only allow modifications before the first service startup |
+| ain_seed_config_node | The Configurable Node address registered during AINode startup | String | 127.0.0.1:10710 | Only allow modifications before the first service startup |
+| ain_inference_rpc_address | AINode provides service and communication addresses , Internal Service Communication Interface | String | 127.0.0.1 | Only allow modifications before the first service startup |
+| ain_inference_rpc_port | AINode provides ports for services and communication | String | 10810 | Only allow modifications before the first service startup |
+| ain_system_dir | AINode metadata storage path, the starting directory of the relative path is related to the operating system, and it is recommended to use an absolute path | String | data/AINode/system | Only allow modifications before the first service startup |
+| ain_models_dir | AINode stores the path of the model file, and the starting directory of the relative path is related to the operating system. It is recommended to use an absolute path | String | data/AINode/models | Only allow modifications before the first service startup |
+| ain_logs_dir | The path where AINode stores logs, the starting directory of the relative path is related to the operating system, and it is recommended to use an absolute path | String | logs/AINode | Effective after restart |
+| ain_thrift_compression_enabled | Does AINode enable Thrift's compression mechanism , 0-Do not start, 1-Start | Boolean | 0 | Effective after restart |
+
+### Start AINode
+
+ After completing the deployment of Seed Config Node, the registration and inference functions of the model can be supported by adding AINode nodes. After specifying the information of the IoTDB cluster in the configuration file, the corresponding instruction can be executed to start AINode and join the IoTDB cluster。
+
+#### Networking environment startup
+
+##### Start command
+
+```shell
+ # Start command
+ # Linux and MacOS systems
+ bash sbin/start-ainode.sh
+
+ # Windows systems
+ sbin\start-ainode.bat
+
+ # Backend startup command (recommended for long-term running)
+ # Linux and MacOS systems
+ nohup bash sbin/start-ainode.sh > myout.file 2>& 1 &
+
+ # Windows systems
+ nohup bash sbin\start-ainode.bat > myout.file 2>& 1 &
+ ```
+
+#### Detailed Syntax
+
+```shell
+ # Start command
+ # Linux and MacOS systems
+ bash sbin/start-ainode.sh -i -r -n
+
+ # Windows systems
+ sbin\start-ainode.bat -i -r -n
+ ```
+
+##### Parameter introduction:
+
+| **Name** | **Label** | **Describe** | **Is it mandatory** | **Type** | **Default value** | **Input method** |
+| ------------------- | ---- | ------------------------------------------------------------ | -------- | ------ | ---------------- | ---------------------- |
+| ain_interpreter_dir | -i | The interpreter path of the virtual environment where AINode is installed requires the use of an absolute path. | no | String | Default reading of environment variables | Input or persist modifications during invocation |
+| ain_force_reinstall | -r | Does this script check the version when checking the installation status of AINode. If it does, it will force the installation of the whl package in lib if the version is incorrect. | no | Bool | false | Input when calling |
+| ain_no_dependencies | -n | Specify whether to install dependencies when installing AINode, and if so, only install the AINode main program without installing dependencies. | no | Bool | false | Input when calling |
+
+ If you don't want to specify the corresponding parameters every time you start, you can also persistently modify the parameters in the `ainode-env.sh` and `ainode-env.bat` scripts in the `conf` folder (currently supporting persistent modification of the ain_interpreter-dir parameter).
+
+ `ainode-env.sh` :
+ ```shell
+ # The defaulte venv environment is used if ain_interpreter_dir is not set. Please use absolute path without quotation mark
+ # ain_interpreter_dir=
+ ```
+ `ainode-env.bat` :
+```shell
+ @REM The defaulte venv environment is used if ain_interpreter_dir is not set. Please use absolute path without quotation mark
+ @REM set ain_interpreter_dir=
+ ```
+ After writing the parameter value, uncomment the corresponding line and save it to take effect on the next script execution.
+
+
+#### Example
+
+##### Directly start:
+
+```shell
+ # Start command
+ # Linux and MacOS systems
+ bash sbin/start-ainode.sh
+ # Windows systems
+ sbin\start-ainode.bat
+
+
+ # Backend startup command (recommended for long-term running)
+ # Linux and MacOS systems
+ nohup bash sbin/start-ainode.sh > myout.file 2>& 1 &
+ # Windows systems
+ nohup bash sbin\start-ainode.bat > myout.file 2>& 1 &
+ ```
+
+##### Update Start:
+If the version of AINode has been updated (such as updating the `lib` folder), this command can be used. Firstly, it is necessary to ensure that AINode has stopped running, and then restart it using the `-r` parameter, which will reinstall AINode based on the files under `lib`.
+
+
+```shell
+ # Update startup command
+ # Linux and MacOS systems
+ bash sbin/start-ainode.sh -r
+ # Windows systems
+ sbin\start-ainode.bat -r
+
+
+ # Backend startup command (recommended for long-term running)
+ # Linux and MacOS systems
+ nohup bash sbin/start-ainode.sh -r > myout.file 2>& 1 &
+ # Windows c
+ nohup bash sbin\start-ainode.bat -r > myout.file 2>& 1 &
+ ```
+#### Non networked environment startup
+
+##### Start command
+
+```shell
+ # Start command
+ # Linux and MacOS systems
+ bash sbin/start-ainode.sh
+
+ # Windows systems
+ sbin\start-ainode.bat
+
+ # Backend startup command (recommended for long-term running)
+ # Linux and MacOS systems
+ nohup bash sbin/start-ainode.sh > myout.file 2>& 1 &
+
+ # Windows systems
+ nohup bash sbin\start-ainode.bat > myout.file 2>& 1 &
+ ```
+
+#### Detailed Syntax
+
+```shell
+ # Start command
+ # Linux and MacOS systems
+ bash sbin/start-ainode.sh -i -r -n
+
+ # Windows systems
+ sbin\start-ainode.bat -i -r -n
+ ```
+
+##### Parameter introduction:
+
+| **Name** | **Label** | **Describe** | **Is it mandatory** | **Type** | **Default value** | **Input method** |
+| ------------------- | ---- | ------------------------------------------------------------ | -------- | ------ | ---------------- | ---------------------- |
+| ain_interpreter_dir | -i | The interpreter path of the virtual environment where AINode is installed requires the use of an absolute path | no | String | Default reading of environment variables | Input or persist modifications during invocation |
+| ain_force_reinstall | -r | Does this script check the version when checking the installation status of AINode. If it does, it will force the installation of the whl package in lib if the version is incorrect | no | Bool | false | Input when calling |
+
+> Attention: When installation fails in a non networked environment, first check if the installation package corresponding to the platform is selected, and then confirm that the Python version is 3.8 (due to the limitations of the downloaded installation package on Python versions, 3.7, 3.9, and others are not allowed)
+
+#### Example
+
+##### Directly start:
+
+```shell
+ # Start command
+ # Linux and MacOS systems
+ bash sbin/start-ainode.sh
+ # Windows systems
+ sbin\start-ainode.bat
+
+ # Backend startup command (recommended for long-term running)
+ # Linux and MacOS systems
+ nohup bash sbin/start-ainode.sh > myout.file 2>& 1 &
+ # Windows systems
+ nohup bash sbin\start-ainode.bat > myout.file 2>& 1 &
+ ```
+
+### Detecting the status of AINode nodes
+
+During the startup process of AINode, the new AINode will be automatically added to the IoTDB cluster. After starting AINode, you can enter SQL in the command line to query. If you see an AINode node in the cluster and its running status is Running (as shown below), it indicates successful joining.
+
+
+```shell
+IoTDB> show cluster
++------+----------+-------+---------------+------------+-------+-----------+
+|NodeID| NodeType| Status|InternalAddress|InternalPort|Version| BuildInfo|
++------+----------+-------+---------------+------------+-------+-----------+
+| 0|ConfigNode|Running| 127.0.0.1| 10710|UNKNOWN|190e303-dev|
+| 1| DataNode|Running| 127.0.0.1| 10730|UNKNOWN|190e303-dev|
+| 2| AINode|Running| 127.0.0.1| 10810|UNKNOWN|190e303-dev|
++------+----------+-------+---------------+------------+-------+-----------+
+```
+
+### Stop AINode
+
+If you need to stop a running AINode node, execute the corresponding shutdown script.
+
+#### Stop command
+
+```shell
+ # Linux / MacOS
+ bash sbin/stop-ainode.sh
+
+ #Windows
+ sbin\stop-ainode.bat
+ ```
+
+
+#### Detailed Syntax
+
+```shell
+ # Linux / MacOS
+ bash sbin/stop-ainode.sh -t
+
+ #Windows
+ sbin\stop-ainode.bat -t
+ ```
+
+##### Parameter introduction:
+
+| **Name** | **Label** | **Describe** | **Is it mandatory** | **Type** | **Default value** | **Input method** |
+| ----------------- | ---- | ------------------------------------------------------------ | -------- | ------ | ------ | ---------- |
+| ain_remove_target | -t | When closing AINode, you can specify the Node ID, address, and port number of the target AINode to be removed, in the format of `` | no | String | nothing | Input when calling |
+
+#### Example
+
+```shell
+ # Linux / MacOS
+ bash sbin/stop-ainode.sh
+
+ # Windows
+ sbin\stop-ainode.bat
+ ```
+After stopping AINode, you can still see AINode nodes in the cluster, whose running status is UNKNOWN (as shown below), and the AINode function cannot be used at this time.
+
+ ```shell
+IoTDB> show cluster
++------+----------+-------+---------------+------------+-------+-----------+
+|NodeID| NodeType| Status|InternalAddress|InternalPort|Version| BuildInfo|
++------+----------+-------+---------------+------------+-------+-----------+
+| 0|ConfigNode|Running| 127.0.0.1| 10710|UNKNOWN|190e303-dev|
+| 1| DataNode|Running| 127.0.0.1| 10730|UNKNOWN|190e303-dev|
+| 2| AINode|UNKNOWN| 127.0.0.1| 10790|UNKNOWN|190e303-dev|
++------+----------+-------+---------------+------------+-------+-----------+
+```
+If you need to restart the node, you need to execute the startup script again.
+
+### Remove AINode
+
+When it is necessary to remove an AINode node from the cluster, a removal script can be executed. The difference between removing and stopping scripts is that stopping retains the AINode node in the cluster but stops the AINode service, while removing removes the AINode node from the cluster.
+
+#### Remove command
+
+
+```shell
+ # Linux / MacOS
+ bash sbin/remove-ainode.sh
+
+ # Windows
+ sbin\remove-ainode.bat
+ ```
+
+#### Detailed Syntax
+
+```shell
+ # Linux / MacOS
+ bash sbin/remove-ainode.sh -i -t/: -r -n
+
+ # Windows
+ sbin\remove-ainode.bat -i -t/: -r -n
+ ```
+
+##### Parameter introduction:
+
+ | **Name** | **Label** | **Describe** | **Is it mandatory** | **Type** | **Default value** | **Input method** |
+| ------------------- | ---- | ------------------------------------------------------------ | -------- | ------ | ---------------- | --------------------- |
+| ain_interpreter_dir | -i | The interpreter path of the virtual environment where AINode is installed requires the use of an absolute path | no | String | Default reading of environment variables | Input+persistent modification during invocation |
+| ain_remove_target | -t | When closing AINode, you can specify the Node ID, address, and port number of the target AINode to be removed, in the format of `` | no | String | nothing | Input when calling |
+| ain_force_reinstall | -r | Does this script check the version when checking the installation status of AINode. If it does, it will force the installation of the whl package in lib if the version is incorrect | no | Bool | false | Input when calling |
+| ain_no_dependencies | -n | Specify whether to install dependencies when installing AINode, and if so, only install the AINode main program without installing dependencies | no | Bool | false | Input when calling |
+
+ If you don't want to specify the corresponding parameters every time you start, you can also persistently modify the parameters in the `ainode-env.sh` and `ainode-env.bat` scripts in the `conf` folder (currently supporting persistent modification of the ain_interpreter-dir parameter).
+
+ `ainode-env.sh` :
+ ```shell
+ # The defaulte venv environment is used if ain_interpreter_dir is not set. Please use absolute path without quotation mark
+ # ain_interpreter_dir=
+ ```
+ `ainode-env.bat` :
+```shell
+ @REM The defaulte venv environment is used if ain_interpreter_dir is not set. Please use absolute path without quotation mark
+ @REM set ain_interpreter_dir=
+ ```
+ After writing the parameter value, uncomment the corresponding line and save it to take effect on the next script execution.
+
+#### Example
+
+##### Directly remove:
+
+ ```shell
+ # Linux / MacOS
+ bash sbin/remove-ainode.sh
+
+ # Windows
+ sbin\remove-ainode.bat
+ ```
+ After removing the node, relevant information about the node cannot be queried.
+
+ ```shell
+IoTDB> show cluster
++------+----------+-------+---------------+------------+-------+-----------+
+|NodeID| NodeType| Status|InternalAddress|InternalPort|Version| BuildInfo|
++------+----------+-------+---------------+------------+-------+-----------+
+| 0|ConfigNode|Running| 127.0.0.1| 10710|UNKNOWN|190e303-dev|
+| 1| DataNode|Running| 127.0.0.1| 10730|UNKNOWN|190e303-dev|
++------+----------+-------+---------------+------------+-------+-----------+
+```
+##### Specify removal:
+
+If the user loses files in the data folder, AINode may not be able to actively remove them locally. The user needs to specify the node number, address, and port number for removal. In this case, we support users to input parameters according to the following methods for deletion.
+
+ ```shell
+ # Linux / MacOS
+ bash sbin/remove-ainode.sh -t /:
+
+ # Windows
+ sbin\remove-ainode.bat -t /:
+ ```
+
+## common problem
+
+### An error occurs when starting AINode stating that the venv module cannot be found
+
+ When starting AINode using the default method, a Python virtual environment will be created in the installation package directory and dependencies will be installed, so it is required to install the venv module. Generally speaking, Python 3.8 and above versions come with built-in VenV, but for some systems with built-in Python environments, this requirement may not be met. There are two solutions when this error occurs (choose one or the other):
+
+ To install the Venv module locally, taking Ubuntu as an example, you can run the following command to install the built-in Venv module in Python. Or install a Python version with built-in Venv from the Python official website.
+
+ ```shell
+apt-get install python3.8-venv
+```
+Install version 3.8.0 of venv into AINode in the AINode path.
+
+ ```shell
+../Python-3.8.0/python -m venv venv(Folder Name)
+```
+ When running the startup script, use ` -i ` to specify an existing Python interpreter path as the running environment for AINode, eliminating the need to create a new virtual environment.
+
+ ### The SSL module in Python is not properly installed and configured to handle HTTPS resources
+WARNING: pip is configured with locations that require TLS/SSL, however the ssl module in Python is not available.
+You can install OpenSSLS and then rebuild Python to solve this problem
+> Currently Python versions 3.6 to 3.9 are compatible with OpenSSL 1.0.2, 1.1.0, and 1.1.1.
+
+ Python requires OpenSSL to be installed on our system, the specific installation method can be found in [link](https://stackoverflow.com/questions/56552390/how-to-fix-ssl-module-in-python-is-not-available-in-centos)
+
+ ```shell
+sudo apt-get install build-essential libssl-dev zlib1g-dev libbz2-dev libreadline-dev libsqlite3-dev wget curl llvm libncurses5-dev libncursesw5-dev xz-utils tk-dev libffi-dev liblzma-dev uuid-dev lzma-dev liblzma-dev
+sudo -E ./configure --with-ssl
+make
+sudo make install
+```
+
+ ### Pip version is lower
+
+ A compilation issue similar to "error: Microsoft Visual C++14.0 or greater is required..." appears on Windows
+
+The corresponding error occurs during installation and compilation, usually due to insufficient C++version or Setup tools version. You can check it in
+
+ ```shell
+./python -m pip install --upgrade pip
+./python -m pip install --upgrade setuptools
+```
+
+
+ ### Install and compile Python
+
+ Use the following instructions to download the installation package from the official website and extract it:
+ ```shell
+.wget https://www.python.org/ftp/python/3.8.0/Python-3.8.0.tar.xz
+tar Jxf Python-3.8.0.tar.xz
+```
+ Compile and install the corresponding Python package:
+ ```shell
+cd Python-3.8.0
+./configure prefix=/usr/local/python3
+make
+sudo make install
+python3 --version
+```
\ No newline at end of file
diff --git a/src/UserGuide/latest/User-Manual/AINode_apache.md b/src/UserGuide/latest/User-Manual/AINode_apache.md
new file mode 100644
index 000000000..f0c6647ec
--- /dev/null
+++ b/src/UserGuide/latest/User-Manual/AINode_apache.md
@@ -0,0 +1,654 @@
+
+
+# AI Capability(AINode)
+
+AINode is the third internal node after ConfigNode and DataNode in Apache IoTDB, which extends the capability of machine learning analysis of time series by interacting with DataNode and ConfigNode of IoTDB cluster, supports the introduction of pre-existing machine learning models from the outside to be registered, and uses the registered models in the It supports the process of introducing existing machine learning models from outside for registration, and using the registered models to complete the time series analysis tasks on the specified time series data through simple SQL statements, which integrates the model creation, management and inference in the database engine. At present, we have provided machine learning algorithms or self-developed models for common timing analysis scenarios (e.g. prediction and anomaly detection).
+
+The system architecture is shown below:
+::: center
+
+:::
+The responsibilities of the three nodes are as follows:
+
+- **ConfigNode**: responsible for storing and managing the meta-information of the model; responsible for distributed node management.
+- **DataNode**: responsible for receiving and parsing SQL requests from users; responsible for storing time-series data; responsible for preprocessing computation of data.
+- **AINode**: responsible for model file import creation and model inference.
+
+## Advantageous features
+
+Compared with building a machine learning service alone, it has the following advantages:
+
+- **Simple and easy to use**: no need to use Python or Java programming, the complete process of machine learning model management and inference can be completed using SQL statements. Creating a model can be done using the CREATE MODEL statement, and using a model for inference can be done using the CALL INFERENCE (...) statement, making it simpler and more convenient to use.
+
+- **Avoid Data Migration**: With IoTDB native machine learning, data stored in IoTDB can be directly applied to the inference of machine learning models without having to move the data to a separate machine learning service platform, which accelerates data processing, improves security, and reduces costs.
+
+
+
+- **Built-in Advanced Algorithms**: supports industry-leading machine learning analytics algorithms covering typical timing analysis tasks, empowering the timing database with native data analysis capabilities. Such as:
+ - **Time Series Forecasting**: learns patterns of change from past time series; thus outputs the most likely prediction of future series based on observations at a given past time.
+ - **Anomaly Detection for Time Series**: detects and identifies outliers in a given time series data, helping to discover anomalous behaviour in the time series.
+ - **Annotation for Time Series (Time Series Annotation)**: Adds additional information or markers, such as event occurrence, outliers, trend changes, etc., to each data point or specific time period to better understand and analyse the data.
+
+
+
+## Basic Concepts
+
+- **Model**: a machine learning model that takes time-series data as input and outputs the results or decisions of an analysis task. Model is the basic management unit of AINode, which supports adding (registration), deleting, checking, and using (inference) of models.
+- **Create**: Load externally designed or trained model files or algorithms into MLNode for unified management and use by IoTDB.
+- **Inference**: The process of using the created model to complete the timing analysis task applicable to the model on the specified timing data.
+- **Built-in capabilities**: AINode comes with machine learning algorithms or home-grown models for common timing analysis scenarios (e.g., prediction and anomaly detection).
+
+::: center
+
+::::
+
+## Installation and Deployment
+
+The deployment of AINode can be found in the document [Deployment Guidelines](../Deployment-and-Maintenance/AINode_Deployment_apache.md#ainode-deployment) .
+
+
+## Usage Guidelines
+
+AINode provides model creation and deletion process for deep learning models related to timing data. Built-in models do not need to be created and deleted, they can be used directly, and the built-in model instances created after inference is completed will be destroyed automatically.
+
+### Registering Models
+
+A trained deep learning model can be registered by specifying the vector dimensions of the model's inputs and outputs, which can be used for model inference.
+
+Models that meet the following criteria can be registered in AINode:
+1. Models trained on PyTorch 2.1.0 and 2.2.0 versions supported by AINode should avoid using features from versions 2.2.0 and above.
+2. AINode supports models stored using PyTorch JIT, and the model file needs to include the parameters and structure of the model.
+3. The input sequence of the model can contain one or more columns, and if there are multiple columns, they need to correspond to the model capability and model configuration file.
+4. The input and output dimensions of the model must be clearly defined in the `config.yaml` configuration file. When using the model, it is necessary to strictly follow the input-output dimensions defined in the `config.yaml` configuration file. If the number of input and output columns does not match the configuration file, it will result in errors.
+
+The following is the SQL syntax definition for model registration.
+
+```SQL
+create model using uri
+```
+
+The specific meanings of the parameters in the SQL are as follows:
+
+- model_name: a globally unique identifier for the model, which cannot be repeated. The model name has the following constraints:
+
+ - Identifiers [ 0-9 a-z A-Z _ ] (letters, numbers, underscores) are allowed.
+ - Length is limited to 2-64 characters
+ - Case sensitive
+
+- uri: resource path to the model registration file, which should contain the **model weights model.pt file and the model's metadata description file config.yaml**.
+
+ - Model weight file: the weight file obtained after the training of the deep learning model is completed, currently supporting pytorch training of the .pt file
+
+ - yaml metadata description file: parameters related to the model structure that need to be provided when the model is registered, which must contain the input and output dimensions of the model for model inference:
+
+ - | **Parameter name** | **Parameter description** | **Example** |
+ | ------------ | ---------------------------- | -------- |
+ | input_shape | Rows and columns of model inputs for model inference | [96,2] |
+ | output_shape | rows and columns of model outputs, for model inference | [48,2] |
+
+ - In addition to model inference, the data types of model input and output can be specified:
+
+ - | **Parameter name** | **Parameter description** | **Example** |
+ | ----------- | ------------------ | --------------------- |
+ | input_type | model input data type | ['float32','float32'] |
+ | output_type | data type of the model output | ['float32','float32'] |
+
+ - In addition to this, additional notes can be specified for display during model management
+
+ - | **Parameter name** | **Parameter description** | **Examples** |
+ | ---------- | ---------------------------------------------- | ------------------------------------------- |
+ | attributes | optional, user-defined model notes for model display | 'model_type': 'dlinear','kernel_size': '25' |
+
+
+In addition to registration of local model files, registration can also be done by specifying remote resource paths via URIs, using open source model repositories (e.g. HuggingFace).
+
+#### Example
+
+In the current example folder, it contains model.pt and config.yaml files, model.pt is the training get, and the content of config.yaml is as follows:
+
+```YAML
+configs.
+ # Required options
+ input_shape: [96, 2] # The model receives data in 96 rows x 2 columns.
+ output_shape: [48, 2] # Indicates that the model outputs 48 rows x 2 columns.
+
+ # Optional Default is all float32 and the number of columns is the number of columns in the shape.
+ input_type: ["int64", "int64"] # Input data type, need to match the number of columns.
+ output_type: ["text", "int64"] #Output data type, need to match the number of columns.
+
+attributes: # Optional user-defined notes for the input.
+ 'model_type': 'dlinear'
+ 'kernel_size': '25'
+```
+
+Specify this folder as the load path to register the model.
+
+```SQL
+IoTDB> create model dlinear_example using uri "file://. /example"
+```
+
+Alternatively, you can download the corresponding model file from huggingFace and register it.
+
+```SQL
+IoTDB> create model dlinear_example using uri "https://huggingface.com/IoTDBML/dlinear/"
+```
+
+After the SQL is executed, the registration process will be carried out asynchronously, and you can view the registration status of the model through the model showcase (see the Model Showcase section), and the time consumed for successful registration is mainly affected by the size of the model file.
+
+Once the model registration is complete, you can call specific functions and perform model inference by using normal queries.
+
+### Viewing Models
+
+Successfully registered models can be queried for model-specific information through the show models command. The SQL definition is as follows:
+
+```SQL
+show models
+
+show models
+```
+
+In addition to displaying information about all models directly, you can specify a model id to view information about a specific model. The results of the model show contain the following information:
+
+| **ModelId** | **State** | **Configs** | **Attributes** |
+| ------------ | ------------------------------------- | ---------------------------------------------- | -------------- |
+| Model Unique Identifier | Model Registration Status (LOADING, ACTIVE, DROPPING) | InputShape, outputShapeInputTypes, outputTypes | Model Notes |
+
+State is used to show the current state of model registration, which consists of the following three stages
+
+- **LOADING**: The corresponding model meta information has been added to the configNode, and the model file is being transferred to the AINode node.
+- **ACTIVE**: The model has been set up and the model is in the available state
+- **DROPPING**: Model deletion is in progress, model related information is being deleted from configNode and AINode.
+- **UNAVAILABLE**: Model creation failed, you can delete the failed model_name by drop model.
+
+#### Example
+
+```SQL
+IoTDB> show models
+
+
++---------------------+--------------------------+-----------+----------------------------+-----------------------+
+| ModelId| ModelType| State| Configs| Notes|
++---------------------+--------------------------+-----------+----------------------------+-----------------------+
+| dlinear_example| USER_DEFINED| ACTIVE| inputShape:[96,2]| |
+| | | | outputShape:[48,2]| |
+| | | | inputDataType:[float,float]| |
+| | | |outputDataType:[float,float]| |
+| _STLForecaster| BUILT_IN_FORECAST| ACTIVE| |Built-in model in IoTDB|
+| _NaiveForecaster| BUILT_IN_FORECAST| ACTIVE| |Built-in model in IoTDB|
+| _ARIMA| BUILT_IN_FORECAST| ACTIVE| |Built-in model in IoTDB|
+|_ExponentialSmoothing| BUILT_IN_FORECAST| ACTIVE| |Built-in model in IoTDB|
+| _GaussianHMM|BUILT_IN_ANOMALY_DETECTION| ACTIVE| |Built-in model in IoTDB|
+| _GMMHMM|BUILT_IN_ANOMALY_DETECTION| ACTIVE| |Built-in model in IoTDB|
+| _Stray|BUILT_IN_ANOMALY_DETECTION| ACTIVE| |Built-in model in IoTDB|
++---------------------+--------------------------+-----------+------------------------------------------------------------+-----------------------+
+```
+
+We have registered the corresponding model earlier, you can view the model status through the corresponding designation, active indicates that the model is successfully registered and can be used for inference.
+
+### Delete Model
+
+For a successfully registered model, the user can delete it via SQL. In addition to deleting the meta information on the configNode, this operation also deletes all the related model files under the AINode. The SQL is as follows:
+
+```SQL
+drop model
+```
+
+You need to specify the model model_name that has been successfully registered to delete the corresponding model. Since model deletion involves the deletion of data on multiple nodes, the operation will not be completed immediately, and the state of the model at this time is DROPPING, and the model in this state cannot be used for model inference.
+
+### Using Built-in Model Reasoning
+
+The SQL syntax is as follows:
+
+
+```SQL
+call inference(,sql[,=])
+```
+
+Built-in model inference does not require a registration process, the inference function can be used by calling the inference function through the call keyword, and its corresponding parameters are described as follows:
+
+- **built_in_model_name**: built-in model name
+- **parameterName**: parameter name
+- **parameterValue**: parameter value
+
+#### Built-in Models and Parameter Descriptions
+
+The following machine learning models are currently built-in, please refer to the following links for detailed parameter descriptions.
+
+| Model | built_in_model_name | Task type | Parameter description |
+| -------------------- | --------------------- | -------- | ------------------------------------------------------------ |
+| Arima | _Arima | Forecast | [Arima Parameter description](https://www.sktime.net/en/latest/api_reference/auto_generated/sktime.forecasting.arima.ARIMA.html?highlight=Arima) |
+| STLForecaster | _STLForecaster | Forecast | [STLForecaster Parameter description](https://www.sktime.net/en/latest/api_reference/auto_generated/sktime.forecasting.trend.STLForecaster.html#sktime.forecasting.trend.STLForecaster) |
+| NaiveForecaster | _NaiveForecaster | Forecast | [NaiveForecaster Parameter description](https://www.sktime.net/en/latest/api_reference/auto_generated/sktime.forecasting.naive.NaiveForecaster.html#naiveforecaster) |
+| ExponentialSmoothing | _ExponentialSmoothing | Forecast | [ExponentialSmoothing 参Parameter description](https://www.sktime.net/en/latest/api_reference/auto_generated/sktime.forecasting.exp_smoothing.ExponentialSmoothing.html) |
+| GaussianHMM | _GaussianHMM | Annotation | [GaussianHMMParameter description](https://www.sktime.net/en/latest/api_reference/auto_generated/sktime.annotation.hmm_learn.gaussian.GaussianHMM.html) |
+| GMMHMM | _GMMHMM | Annotation | [GMMHMM参数说明](https://www.sktime.net/en/latest/api_reference/auto_generated/sktime.annotation.hmm_learn.gmm.GMMHMM.html) |
+| Stray | _Stray | Anomaly detection | [Stray Parameter description](https://www.sktime.net/en/latest/api_reference/auto_generated/sktime.annotation.stray.STRAY.html) |
+
+
+#### Example
+
+The following is an example of an operation using built-in model inference. The built-in Stray model is used for anomaly detection algorithm. The input is `[144,1]` and the output is `[144,1]`. We use it for reasoning through SQL.
+
+```SQL
+IoTDB> select * from root.eg.airline
++-----------------------------+------------------+
+| Time|root.eg.airline.s0|
++-----------------------------+------------------+
+|1949-01-31T00:00:00.000+08:00| 224.0|
+|1949-02-28T00:00:00.000+08:00| 118.0|
+|1949-03-31T00:00:00.000+08:00| 132.0|
+|1949-04-30T00:00:00.000+08:00| 129.0|
+......
+|1960-09-30T00:00:00.000+08:00| 508.0|
+|1960-10-31T00:00:00.000+08:00| 461.0|
+|1960-11-30T00:00:00.000+08:00| 390.0|
+|1960-12-31T00:00:00.000+08:00| 432.0|
++-----------------------------+------------------+
+Total line number = 144
+
+IoTDB> call inference(_Stray, "select s0 from root.eg.airline", k=2)
++-------+
+|output0|
++-------+
+| 0|
+| 0|
+| 0|
+| 0|
+......
+| 1|
+| 1|
+| 0|
+| 0|
+| 0|
+| 0|
++-------+
+Total line number = 144
+```
+
+### Reasoning with Deep Learning Models
+
+The SQL syntax is as follows:
+
+```SQL
+call inference(,sql[,window=])
+
+
+window_function:
+ head(window_size)
+ tail(window_size)
+ count(window_size,sliding_step)
+```
+
+After completing the registration of the model, the inference function can be used by calling the inference function through the call keyword, and its corresponding parameters are described as follows:
+
+- **model_name**: corresponds to a registered model
+- **sql**: sql query statement, the result of the query is used as input to the model for model inference. The dimensions of the rows and columns in the result of the query need to match the size specified in the specific model config. (It is not recommended to use the `SELECT *` clause for the sql here because in IoTDB, `*` does not sort the columns, so the order of the columns is undefined, you can use `SELECT s0,s1` to ensure that the columns order matches the expectations of the model input)
+- **window_function**: Window functions that can be used in the inference process, there are currently three types of window functions provided to assist in model inference:
+ - **head(window_size)**: Get the top window_size points in the data for model inference, this window can be used for data cropping.
+ 
+
+ - **tail(window_size)**: get the last window_size point in the data for model inference, this window can be used for data cropping.
+ 
+
+ - **count(window_size, sliding_step)**: sliding window based on the number of points, the data in each window will be reasoned through the model respectively, as shown in the example below, window_size for 2 window function will be divided into three windows of the input dataset, and each window will perform reasoning operations to generate results respectively. The window can be used for continuous inference
+ 
+
+**Explanation 1**: window can be used to solve the problem of cropping rows when the results of the sql query and the input row requirements of the model do not match. Note that when the number of columns does not match or the number of rows is directly less than the model requirement, the inference cannot proceed and an error message will be returned.
+
+**Explanation 2**: In deep learning applications, timestamp-derived features (time columns in the data) are often used as covariates in generative tasks, and are input into the model together to enhance the model, but the time columns are generally not included in the model's output. In order to ensure the generality of the implementation, the model inference results only correspond to the real output of the model, if the model does not output the time column, it will not be included in the results.
+
+
+#### Example
+
+The following is an example of inference in action using a deep learning model, for the `dlinear` prediction model with input `[96,2]` and output `[48,2]` mentioned above, which we use via SQL.
+
+```Shell
+IoTDB> select s1,s2 from root.**
++-----------------------------+-------------------+-------------------+
+| Time| root.eg.etth.s0| root.eg.etth.s1|
++-----------------------------+-------------------+-------------------+
+|1990-01-01T00:00:00.000+08:00| 0.7855| 1.611|
+|1990-01-02T00:00:00.000+08:00| 0.7818| 1.61|
+|1990-01-03T00:00:00.000+08:00| 0.7867| 1.6293|
+|1990-01-04T00:00:00.000+08:00| 0.786| 1.637|
+|1990-01-05T00:00:00.000+08:00| 0.7849| 1.653|
+|1990-01-06T00:00:00.000+08:00| 0.7866| 1.6537|
+|1990-01-07T00:00:00.000+08:00| 0.7886| 1.662|
+......
+|1990-03-31T00:00:00.000+08:00| 0.7585| 1.678|
+|1990-04-01T00:00:00.000+08:00| 0.7587| 1.6763|
+|1990-04-02T00:00:00.000+08:00| 0.76| 1.6813|
+|1990-04-03T00:00:00.000+08:00| 0.7669| 1.684|
+|1990-04-04T00:00:00.000+08:00| 0.7645| 1.677|
+|1990-04-05T00:00:00.000+08:00| 0.7625| 1.68|
+|1990-04-06T00:00:00.000+08:00| 0.7617| 1.6917|
++-----------------------------+-------------------+-------------------+
+Total line number = 96
+
+IoTDB> call inference(dlinear_example,"select s0,s1 from root.**")
++--------------------------------------------+-----------------------------+
+| _result_0| _result_1|
++--------------------------------------------+-----------------------------+
+| 0.726302981376648| 1.6549958229064941|
+| 0.7354921698570251| 1.6482787370681763|
+| 0.7238251566886902| 1.6278168201446533|
+......
+| 0.7692174911499023| 1.654654049873352|
+| 0.7685555815696716| 1.6625318765640259|
+| 0.7856493592262268| 1.6508299350738525|
++--------------------------------------------+-----------------------------+
+Total line number = 48
+```
+
+#### Example of using the tail/head window function
+
+When the amount of data is variable and you want to take the latest 96 rows of data for inference, you can use the corresponding window function tail. head function is used in a similar way, except that it takes the earliest 96 points.
+
+```Shell
+IoTDB> select s1,s2 from root.**
++-----------------------------+-------------------+-------------------+
+| Time| root.eg.etth.s0| root.eg.etth.s1|
++-----------------------------+-------------------+-------------------+
+|1988-01-01T00:00:00.000+08:00| 0.7355| 1.211|
+......
+|1990-01-01T00:00:00.000+08:00| 0.7855| 1.611|
+|1990-01-02T00:00:00.000+08:00| 0.7818| 1.61|
+|1990-01-03T00:00:00.000+08:00| 0.7867| 1.6293|
+|1990-01-04T00:00:00.000+08:00| 0.786| 1.637|
+|1990-01-05T00:00:00.000+08:00| 0.7849| 1.653|
+|1990-01-06T00:00:00.000+08:00| 0.7866| 1.6537|
+|1990-01-07T00:00:00.000+08:00| 0.7886| 1.662|
+......
+|1990-03-31T00:00:00.000+08:00| 0.7585| 1.678|
+|1990-04-01T00:00:00.000+08:00| 0.7587| 1.6763|
+|1990-04-02T00:00:00.000+08:00| 0.76| 1.6813|
+|1990-04-03T00:00:00.000+08:00| 0.7669| 1.684|
+|1990-04-04T00:00:00.000+08:00| 0.7645| 1.677|
+|1990-04-05T00:00:00.000+08:00| 0.7625| 1.68|
+|1990-04-06T00:00:00.000+08:00| 0.7617| 1.6917|
++-----------------------------+-------------------+-------------------+
+Total line number = 996
+
+IoTDB> call inference(dlinear_example,"select s0,s1 from root.**",window=tail(96))
++--------------------------------------------+-----------------------------+
+| _result_0| _result_1|
++--------------------------------------------+-----------------------------+
+| 0.726302981376648| 1.6549958229064941|
+| 0.7354921698570251| 1.6482787370681763|
+| 0.7238251566886902| 1.6278168201446533|
+......
+| 0.7692174911499023| 1.654654049873352|
+| 0.7685555815696716| 1.6625318765640259|
+| 0.7856493592262268| 1.6508299350738525|
++--------------------------------------------+-----------------------------+
+Total line number = 48
+```
+
+#### Example of using the count window function
+
+This window is mainly used for computational tasks. When the task's corresponding model can only handle a fixed number of rows of data at a time, but the final desired outcome is multiple sets of prediction results, this window function can be used to perform continuous inference using a sliding window of points. Suppose we now have an anomaly detection model `anomaly_example(input: [24,2], output[1,1])`, which generates a 0/1 label for every 24 rows of data. An example of its use is as follows:
+
+```Shell
+IoTDB> select s1,s2 from root.**
++-----------------------------+-------------------+-------------------+
+| Time| root.eg.etth.s0| root.eg.etth.s1|
++-----------------------------+-------------------+-------------------+
+|1990-01-01T00:00:00.000+08:00| 0.7855| 1.611|
+|1990-01-02T00:00:00.000+08:00| 0.7818| 1.61|
+|1990-01-03T00:00:00.000+08:00| 0.7867| 1.6293|
+|1990-01-04T00:00:00.000+08:00| 0.786| 1.637|
+|1990-01-05T00:00:00.000+08:00| 0.7849| 1.653|
+|1990-01-06T00:00:00.000+08:00| 0.7866| 1.6537|
+|1990-01-07T00:00:00.000+08:00| 0.7886| 1.662|
+......
+|1990-03-31T00:00:00.000+08:00| 0.7585| 1.678|
+|1990-04-01T00:00:00.000+08:00| 0.7587| 1.6763|
+|1990-04-02T00:00:00.000+08:00| 0.76| 1.6813|
+|1990-04-03T00:00:00.000+08:00| 0.7669| 1.684|
+|1990-04-04T00:00:00.000+08:00| 0.7645| 1.677|
+|1990-04-05T00:00:00.000+08:00| 0.7625| 1.68|
+|1990-04-06T00:00:00.000+08:00| 0.7617| 1.6917|
++-----------------------------+-------------------+-------------------+
+Total line number = 96
+
+IoTDB> call inference(anomaly_example,"select s0,s1 from root.**",window=count(24,24))
++-------------------------+
+| _result_0|
++-------------------------+
+| 0|
+| 1|
+| 1|
+| 0|
++-------------------------+
+Total line number = 4
+```
+
+In the result set, each row's label corresponds to the output of the anomaly detection model after inputting each group of 24 rows of data.
+
+## Privilege Management
+
+When using AINode related functions, the authentication of IoTDB itself can be used to do a permission management, users can only use the model management related functions when they have the USE_MODEL permission. When using the inference function, the user needs to have the permission to access the source sequence corresponding to the SQL of the input model.
+
+| Privilege Name | Privilege Scope | Administrator User (default ROOT) | Normal User | Path Related |
+| --------- | --------------------------------- | ---------------------- | -------- | -------- |
+| USE_MODEL | create model/show models/drop model | √ | √ | x |
+| READ_DATA| call inference | √ | √|√ |
+
+## Practical Examples
+
+### Power Load Prediction
+
+In some industrial scenarios, there is a need to predict power loads, which can be used to optimise power supply, conserve energy and resources, support planning and expansion, and enhance power system reliability.
+
+The data for the test set of ETTh1 that we use is [ETTh1](https://alioss.timecho.com/docs/img/ETTh1.csv).
+
+
+It contains power data collected at 1h intervals, and each data consists of load and oil temperature as High UseFul Load, High UseLess Load, Middle UseLess Load, Low UseFul Load, Low UseLess Load, Oil Temperature.
+
+On this dataset, the model inference function of IoTDB-ML can predict the oil temperature in the future period of time through the relationship between the past values of high, middle and low use loads and the corresponding time stamp oil temperature, which empowers the automatic regulation and monitoring of grid transformers.
+
+#### Step 1: Data Import
+
+Users can import the ETT dataset into IoTDB using `import-csv.sh` in the tools folder
+
+``Bash
+bash . /import-csv.sh -h 127.0.0.1 -p 6667 -u root -pw root -f ... /... /ETTh1.csv
+``
+
+#### Step 2: Model Import
+
+We can enter the following SQL in iotdb-cli to pull a trained model from huggingface for registration for subsequent inference.
+
+```SQL
+create model dlinear using uri 'https://huggingface.co/hvlgo/dlinear/tree/main'
+```
+
+This model is trained on the lighter weight deep model DLinear, which is able to capture as many trends within a sequence and relationships between variables as possible with relatively fast inference, making it more suitable for fast real-time prediction than other deeper models.
+
+#### Step 3: Model inference
+
+```Shell
+IoTDB> select s0,s1,s2,s3,s4,s5,s6 from root.eg.etth LIMIT 96
++-----------------------------+---------------+---------------+---------------+---------------+---------------+---------------+---------------+
+| Time|root.eg.etth.s0|root.eg.etth.s1|root.eg.etth.s2|root.eg.etth.s3|root.eg.etth.s4|root.eg.etth.s5|root.eg.etth.s6|
++-----------------------------+---------------+---------------+---------------+---------------+---------------+---------------+---------------+
+|2017-10-20T00:00:00.000+08:00| 10.449| 3.885| 8.706| 2.025| 2.041| 0.944| 8.864|
+|2017-10-20T01:00:00.000+08:00| 11.119| 3.952| 8.813| 2.31| 2.071| 1.005| 8.442|
+|2017-10-20T02:00:00.000+08:00| 9.511| 2.88| 7.533| 1.564| 1.949| 0.883| 8.16|
+|2017-10-20T03:00:00.000+08:00| 9.645| 2.21| 7.249| 1.066| 1.828| 0.914| 7.949|
+......
+|2017-10-23T20:00:00.000+08:00| 8.105| 0.938| 4.371| -0.569| 3.533| 1.279| 9.708|
+|2017-10-23T21:00:00.000+08:00| 7.167| 1.206| 4.087| -0.462| 3.107| 1.432| 8.723|
+|2017-10-23T22:00:00.000+08:00| 7.1| 1.34| 4.015| -0.32| 2.772| 1.31| 8.864|
+|2017-10-23T23:00:00.000+08:00| 9.176| 2.746| 7.107| 1.635| 2.65| 1.097| 9.004|
++-----------------------------+---------------+---------------+---------------+---------------+---------------+---------------+---------------+
+Total line number = 96
+
+IoTDB> call inference(dlinear_example, "select s0,s1,s2,s3,s4,s5,s6 from root.eg.etth", window=head(96))
++-----------+----------+----------+------------+---------+----------+----------+
+| output0| output1| output2| output3| output4| output5| output6|
++-----------+----------+----------+------------+---------+----------+----------+
+| 10.319546| 3.1450553| 7.877341| 1.5723765|2.7303758| 1.1362307| 8.867775|
+| 10.443649| 3.3286757| 7.8593454| 1.7675098| 2.560634| 1.1177158| 8.920919|
+| 10.883752| 3.2341104| 8.47036| 1.6116762|2.4874182| 1.1760603| 8.798939|
+......
+| 8.0115595| 1.2995274| 6.9900327|-0.098746896| 3.04923| 1.176214| 9.548782|
+| 8.612427| 2.5036244| 5.6790237| 0.66474205|2.8870275| 1.2051733| 9.330128|
+| 10.096699| 3.399722| 6.9909| 1.7478468|2.7642853| 1.1119363| 9.541455|
++-----------+----------+----------+------------+---------+----------+----------+
+Total line number = 48
+```
+
+We compare the results of the prediction of the oil temperature with the real results, and we can get the following image.
+
+The data before 10/24 00:00 represents the past data input to the model, the blue line after 10/24 00:00 is the oil temperature forecast result given by the model, and the red line is the actual oil temperature data from the dataset (used for comparison).
+
+
+
+As can be seen, we have used the relationship between the six load information and the corresponding time oil temperatures for the past 96 hours (4 days) to model the possible changes in this data for the oil temperature for the next 48 hours (2 days) based on the inter-relationships between the sequences learned previously, and it can be seen that the predicted curves maintain a high degree of consistency in trend with the actual results after visualisation.
+
+### Power Prediction
+
+Power monitoring of current, voltage and power data is required in substations for detecting potential grid problems, identifying faults in the power system, effectively managing grid loads and analysing power system performance and trends.
+
+We have used the current, voltage and power data in a substation to form a dataset in a real scenario. The dataset consists of data such as A-phase voltage, B-phase voltage, and C-phase voltage collected every 5 - 6s for a time span of nearly four months in the substation.
+
+The test set data content is [data](https://alioss.timecho.com/docs/img/data.csv).
+
+On this dataset, the model inference function of IoTDB-ML can predict the C-phase voltage in the future period through the previous values and corresponding timestamps of A-phase voltage, B-phase voltage and C-phase voltage, empowering the monitoring management of the substation.
+
+#### Step 1: Data Import
+
+Users can import the dataset using `import-csv.sh` in the tools folder
+
+```Bash
+bash ./import-csv.sh -h 127.0.0.1 -p 6667 -u root -pw root -f ... /... /data.csv
+```
+
+#### Step 2: Model Import
+
+We can select built-in models or registered models in IoTDB CLI for subsequent inference.
+
+We use the built-in model STLForecaster for prediction. STLForecaster is a time series forecasting method based on the STL implementation in the statsmodels library.
+
+#### Step 3: Model Inference
+
+```Shell
+IoTDB> select * from root.eg.voltage limit 96
++-----------------------------+------------------+------------------+------------------+
+| Time|root.eg.voltage.s0|root.eg.voltage.s1|root.eg.voltage.s2|
++-----------------------------+------------------+------------------+------------------+
+|2023-02-14T20:38:32.000+08:00| 2038.0| 2028.0| 2041.0|
+|2023-02-14T20:38:38.000+08:00| 2014.0| 2005.0| 2018.0|
+|2023-02-14T20:38:44.000+08:00| 2014.0| 2005.0| 2018.0|
+......
+|2023-02-14T20:47:52.000+08:00| 2024.0| 2016.0| 2027.0|
+|2023-02-14T20:47:57.000+08:00| 2024.0| 2016.0| 2027.0|
+|2023-02-14T20:48:03.000+08:00| 2024.0| 2016.0| 2027.0|
++-----------------------------+------------------+------------------+------------------+
+Total line number = 96
+
+IoTDB> call inference(_STLForecaster, "select s0,s1,s2 from root.eg.voltage", window=head(96),predict_length=48)
++---------+---------+---------+
+| output0| output1| output2|
++---------+---------+---------+
+|2026.3601|2018.2953|2029.4257|
+|2019.1538|2011.4361|2022.0888|
+|2025.5074|2017.4522|2028.5199|
+......
+
+|2022.2336|2015.0290|2025.1023|
+|2015.7241|2008.8975|2018.5085|
+|2022.0777|2014.9136|2024.9396|
+|2015.5682|2008.7821|2018.3458|
++---------+---------+---------+
+Total line number = 48
+```
+
+Comparing the predicted results of the C-phase voltage with the real results, we can get the following image.
+
+The data before 02/14 20:48 represents the past data input to the model, the blue line after 02/14 20:48 is the predicted result of phase C voltage given by the model, while the red line is the actual phase C voltage data from the dataset (used for comparison).
+
+
+
+It can be seen that we used the voltage data from the past 10 minutes and, based on the previously learned inter-sequence relationships, modeled the possible changes in the phase C voltage data for the next 5 minutes. The visualized forecast curve shows a certain degree of synchronicity with the actual results in terms of trend.
+
+### Anomaly Detection
+
+In the civil aviation and transport industry, there exists a need for anomaly detection of the number of passengers travelling on an aircraft. The results of anomaly detection can be used to guide the adjustment of flight scheduling to make the organisation more efficient.
+
+Airline Passengers is a time-series dataset that records the number of international air passengers between 1949 and 1960, sampled at one-month intervals. The dataset contains a total of one time series. The dataset is [airline](https://alioss.timecho.com/docs/img/airline.csv).
+On this dataset, the model inference function of IoTDB-ML can empower the transport industry by capturing the changing patterns of the sequence in order to detect anomalies at the sequence time points.
+
+#### Step 1: Data Import
+
+Users can import the dataset using `import-csv.sh` in the tools folder
+
+``Bash
+bash . /import-csv.sh -h 127.0.0.1 -p 6667 -u root -pw root -f ... /... /data.csv
+``
+
+#### Step 2: Model Inference
+
+IoTDB has some built-in machine learning algorithms that can be used directly, a sample prediction using one of the anomaly detection algorithms is shown below:
+
+```Shell
+IoTDB> select * from root.eg.airline
++-----------------------------+------------------+
+| Time|root.eg.airline.s0|
++-----------------------------+------------------+
+|1949-01-31T00:00:00.000+08:00| 224.0|
+|1949-02-28T00:00:00.000+08:00| 118.0|
+|1949-03-31T00:00:00.000+08:00| 132.0|
+|1949-04-30T00:00:00.000+08:00| 129.0|
+......
+|1960-09-30T00:00:00.000+08:00| 508.0|
+|1960-10-31T00:00:00.000+08:00| 461.0|
+|1960-11-30T00:00:00.000+08:00| 390.0|
+|1960-12-31T00:00:00.000+08:00| 432.0|
++-----------------------------+------------------+
+Total line number = 144
+
+IoTDB> call inference(_Stray, "select s0 from root.eg.airline", k=2)
++-------+
+|output0|
++-------+
+| 0|
+| 0|
+| 0|
+| 0|
+......
+| 1|
+| 1|
+| 0|
+| 0|
+| 0|
+| 0|
++-------+
+Total line number = 144
+```
+
+We plot the results detected as anomalies to get the following image. Where the blue curve is the original time series and the time points specially marked with red dots are the time points that the algorithm detects as anomalies.
+
+
+
+It can be seen that the Stray model has modelled the input sequence changes and successfully detected the time points where anomalies occur.
\ No newline at end of file
diff --git a/src/zh/UserGuide/Master/Tree/Deployment-and-Maintenance/AINode_Deployment_apache.md b/src/zh/UserGuide/Master/Tree/Deployment-and-Maintenance/AINode_Deployment_apache.md
new file mode 100644
index 000000000..4c4710469
--- /dev/null
+++ b/src/zh/UserGuide/Master/Tree/Deployment-and-Maintenance/AINode_Deployment_apache.md
@@ -0,0 +1,512 @@
+
+# AINode 部署
+
+## AINode介绍
+
+### 能力介绍
+
+AINode 是 IoTDB 在 ConfigNode、DataNode 后提供的第三种内生节点,该节点通过与 IoTDB 集群的 DataNode、ConfigNode 的交互,扩展了对时间序列进行机器学习分析的能力,支持从外部引入已有机器学习模型进行注册,并使用注册的模型在指定时序数据上通过简单 SQL 语句完成时序分析任务的过程,将模型的创建、管理及推理融合在数据库引擎中。目前已提供常见时序分析场景(例如预测与异常检测)的机器学习算法或自研模型。
+
+### 交付方式
+ 是 IoTDB 集群外的额外套件,独立安装包,独立激活(如需试用或使用,请联系天谋科技商务或技术支持)。
+
+### 部署模式
+
+

+

+
+
+## 安装准备
+
+### 安装包获取
+
+ 用户可以下载AINode的软件安装包,下载并解压后即完成AINode的安装。
+
+ 解压后安装包(`iotdb-enterprise-ainode-.zip`),安装包解压后目录结构如下:
+| **目录** | **类型** | **说明** |
+| ------------ | -------- | ------------------------------------------------ |
+| lib | 文件夹 | AINode编译后的二进制可执行文件以及相关的代码依赖 |
+| sbin | 文件夹 | AINode的运行脚本,可以启动,移除和停止AINode |
+| conf | 文件夹 | 包含AINode的配置项,具体包含以下配置项 |
+| LICENSE | 文件 | 证书 |
+| NOTICE | 文件 | 提示 |
+| README_ZH.md | 文件 | markdown格式的中文版说明 |
+| `README.md` | 文件 | 使用说明 |
+
+### 环境准备
+- 建议操作环境: Ubuntu, CentOS, MacOS
+
+- 运行环境
+ - 联网环境下 Python >= 3.8即可,且带有 pip 和 venv 工具;非联网环境下需要使用 Python 3.8版本,并从 [此处](https://cloud.tsinghua.edu.cn/d/4c1342f6c272439aa96c/?p=%2Flibs&mode=list) 下载对应操作系统的zip压缩包(注意下载依赖需选择libs文件夹中的zip压缩包,如下图),并将文件夹下的所有文件拷贝到 `iotdb-enterprise-ainode-` 文件夹中 `lib` 文件夹下,并按下文步骤启动AINode。
+
+
+
+ - 环境变量中需存在 Python 解释器且可以通过 `python` 指令直接调用
+ - 建议在 `iotdb-enterprise-ainode-` 文件夹下,新建 Python 解释器 venv 虚拟环境。如安装 3.8.0 版本虚拟环境,语句如下:
+
+ ```shell
+ # 安装3.8.0版本的venv,创建虚拟环境,文件夹名为 `venv`
+ ../Python-3.8.0/python -m venv `venv`
+ ```
+## 安装部署及使用
+
+### 安装 AINode
+
+1. 检查Linux的内核架构
+```shell
+ uname -m
+ ```
+
+2. 导入Python环境[下载](https://repo.anaconda.com/miniconda/)
+
+推荐下载py311版本应用,导入至用户根目录下 iotdb专用文件夹 中
+
+3. 切换至iotdb专用文件夹安装Python环境
+
+以 Miniconda3-py311_24.5.0-0-Linux-x86_64 为例:
+
+```shell
+ bash ./Miniconda3-py311_24.5.0-0-Linux-x86_64.sh
+ ```
+> 根据提示键入“回车”、“长按空格”、“回车”、“yes”、“yes”
+> 关闭当前SSH窗口重新连接
+
+ 4. 创建专用环境
+
+```shell
+ conda create -n ainode_py python=3.11.9
+ ```
+
+ 根据提示键入“y”
+
+ 5. 激活专用环境
+
+```shell
+ conda activate ainode_py
+ ```
+
+ 6. 验证Python版本
+
+```shell
+ python --version
+ ```
+ 7. 下载导入AINode到专用文件夹,切换到专用文件夹并解压安装包
+
+```shell
+ unzip iotdb-enterprise-ainode-1.3.3.2.zip
+ ```
+
+ 8. 配置项修改
+
+```shell
+ vi iotdb-enterprise-ainode-1.3.3.2/conf/iotdb-ainode.properties
+ ```
+ 配置项修改:[详细信息](#配置项修改)
+> ain_seed_config_node=iotdb-1:10710(集群通讯节点IP:通讯节点端口)
+> ain_inference_rpc_address=iotdb-3(运行AINode的服务器IP)
+
+ 9. 更换Python源
+
+```shell
+ pip config set global.index-url https://mirrors.aliyun.com/pypi/simple/
+ ```
+
+ 10. 启动AINode节点
+
+```shell
+ nohup bash iotdb-enterprise-ainode-1.3.3.2/sbin/start-ainode.sh > myout.file 2>& 1 &
+ ```
+> 回到系统默认环境:conda deactivate
+
+### 配置项修改
+AINode 支持修改一些必要的参数。可以在 `conf/iotdb-ainode.properties` 文件中找到下列参数并进行持久化的修改:
+
+| **名称** | **描述** | **类型** | **默认值** | **改后生效方式** |
+| :----------------------------- | ------------------------------------------------------------ | ------- | ------------------ | ---------------------------- |
+| cluster_name | AINode 要加入集群的标识 | string | defaultCluster | 仅允许在第一次启动服务前修改 |
+| ain_seed_config_node | AINode 启动时注册的 ConfigNode 地址 | String | 127.0.0.1:10710 | 仅允许在第一次启动服务前修改 |
+| ain_inference_rpc_address | AINode 提供服务与通信的地址 ,内部服务通讯接口 | String | 127.0.0.1 | 仅允许在第一次启动服务前修改 |
+| ain_inference_rpc_port | AINode 提供服务与通信的端口 | String | 10810 | 仅允许在第一次启动服务前修改 |
+| ain_system_dir | AINode 元数据存储路径,相对路径的起始目录与操作系统相关,建议使用绝对路径 | String | data/AINode/system | 仅允许在第一次启动服务前修改 |
+| ain_models_dir | AINode 存储模型文件的路径,相对路径的起始目录与操作系统相关,建议使用绝对路径 | String | data/AINode/models | 仅允许在第一次启动服务前修改 |
+| ain_logs_dir | AINode 存储日志的路径,相对路径的起始目录与操作系统相关,建议使用绝对路径 | String | logs/AINode | 重启后生效 |
+| ain_thrift_compression_enabled | AINode 是否启用 thrift 的压缩机制,0-不启动、1-启动 | Boolean | 0 | 重启后生效 |
+### 启动 AINode
+
+ 在完成 Seed-ConfigNode 的部署后,可以通过添加 AINode 节点来支持模型的注册和推理功能。在配置项中指定 IoTDB 集群的信息后,可以执行相应的指令来启动 AINode,加入 IoTDB 集群。
+
+#### 联网环境启动
+
+##### 启动命令
+
+```shell
+ # 启动命令
+ # Linux 和 MacOS 系统
+ bash sbin/start-ainode.sh
+
+ # Windows 系统
+ sbin\start-ainode.bat
+
+ # 后台启动命令(长期运行推荐)
+ # Linux 和 MacOS 系统
+ nohup bash sbin/start-ainode.sh > myout.file 2>& 1 &
+
+ # Windows 系统
+ nohup bash sbin\start-ainode.bat > myout.file 2>& 1 &
+ ```
+
+##### 详细语法
+
+```shell
+ # 启动命令
+ # Linux 和 MacOS 系统
+ bash sbin/start-ainode.sh -i -r -n
+
+ # Windows 系统
+ sbin\start-ainode.bat -i -r -n
+ ```
+
+##### 参数介绍:
+
+| **名称** | **标签** | **描述** | **是否必填** | **类型** | **默认值** | **输入方式** |
+| ------------------- | ---- | ------------------------------------------------------------ | -------- | ------ | ---------------- | ---------------------- |
+| ain_interpreter_dir | -i | AINode 所安装在的虚拟环境的解释器路径,需要使用绝对路径 | 否 | String | 默认读取环境变量 | 调用时输入或持久化修改 |
+| ain_force_reinstall | -r | 该脚本在检查 AINode 安装情况的时候是否检查版本,如果检查则在版本不对的情况下会强制安装 lib 里的 whl 安装包 | 否 | Bool | false | 调用时输入 |
+| ain_no_dependencies | -n | 指定在安装 AINode 的时候是否安装依赖,如果指定则仅安装 AINode 主程序而不安装依赖。 | 否 | Bool | false | 调用时输入 |
+
+ 如不想每次启动时指定对应参数,也可以在 `conf` 文件夹下的`ainode-env.sh` 和 `ainode-env.bat` 脚本中持久化修改参数(目前支持持久化修改 ain_interpreter_dir 参数)。
+
+ `ainode-env.sh` :
+ ```shell
+ # The defaulte venv environment is used if ain_interpreter_dir is not set. Please use absolute path without quotation mark
+ # ain_interpreter_dir=
+ ```
+ `ainode-env.bat` :
+```shell
+ @REM The defaulte venv environment is used if ain_interpreter_dir is not set. Please use absolute path without quotation mark
+ @REM set ain_interpreter_dir=
+ ```
+ 在写入参数值的后解除对应行的注释并保存即可在下一次执行脚本时生效。
+
+#### 示例
+
+##### 直接启动:
+
+```shell
+ # 启动命令
+ # Linux 和 MacOS 系统
+ bash sbin/start-ainode.sh
+ # Windows 系统
+ sbin\start-ainode.bat
+
+
+ # 后台启动命令(长期运行推荐)
+ # Linux 和 MacOS 系统
+ nohup bash sbin/start-ainode.sh > myout.file 2>& 1 &
+ # Windows 系统
+ nohup bash sbin\start-ainode.bat > myout.file 2>& 1 &
+ ```
+
+##### 更新启动:
+如果 AINode 的版本进行了更新(如更新了 `lib` 文件夹),可使用此命令。首先要保证 AINode 已经停止运行,然后通过 `-r` 参数重启,该参数会根据 `lib` 下的文件重新安装 AINode。
+
+```shell
+ # 更新启动命令
+ # Linux 和 MacOS 系统
+ bash sbin/start-ainode.sh -r
+ # Windows 系统
+ sbin\start-ainode.bat -r
+
+
+ # 后台更新启动命令(长期运行推荐)
+ # Linux 和 MacOS 系统
+ nohup bash sbin/start-ainode.sh -r > myout.file 2>& 1 &
+ # Windows 系统
+ nohup bash sbin\start-ainode.bat -r > myout.file 2>& 1 &
+ ```
+#### 非联网环境启动
+
+##### 启动命令
+
+```shell
+ # 启动命令
+ # Linux 和 MacOS 系统
+ bash sbin/start-ainode.sh
+
+ # Windows 系统
+ sbin\start-ainode.bat
+
+ # 后台启动命令(长期运行推荐)
+ # Linux 和 MacOS 系统
+ nohup bash sbin/start-ainode.sh > myout.file 2>& 1 &
+
+ # Windows 系统
+ nohup bash sbin\start-ainode.bat > myout.file 2>& 1 &
+ ```
+
+##### 详细语法
+
+```shell
+ # 启动命令
+ # Linux 和 MacOS 系统
+ bash sbin/start-ainode.sh -i -r -n
+
+ # Windows 系统
+ sbin\start-ainode.bat -i -r -n
+ ```
+
+##### 参数介绍:
+
+| **名称** | **标签** | **描述** | **是否必填** | **类型** | **默认值** | **输入方式** |
+| ------------------- | ---- | ------------------------------------------------------------ | -------- | ------ | ---------------- | ---------------------- |
+| ain_interpreter_dir | -i | AINode 所安装在的虚拟环境的解释器路径,需要使用绝对路径 | 否 | String | 默认读取环境变量 | 调用时输入或持久化修改 |
+| ain_force_reinstall | -r | 该脚本在检查 AINode 安装情况的时候是否检查版本,如果检查则在版本不对的情况下会强制安装 lib 里的 whl 安装包 | 否 | Bool | false | 调用时输入 |
+
+> 注意:非联网环境下安装失败时,首先检查是否选择了平台对应的安装包,其次确认python版本为3.8(由于下载的安装包限制了python版本,3.7、3.9等其他都不行)
+
+#### 示例
+
+##### 直接启动:
+
+```shell
+ # 启动命令
+ # Linux 和 MacOS 系统
+ bash sbin/start-ainode.sh
+ # Windows 系统
+ sbin\start-ainode.bat
+
+
+ # 后台启动命令(长期运行推荐)
+ # Linux 和 MacOS 系统
+ nohup bash sbin/start-ainode.sh > myout.file 2>& 1 &
+ # Windows 系统
+ nohup bash sbin\start-ainode.bat > myout.file 2>& 1 &
+ ```
+
+### 检测 AINode 节点状态
+
+AINode 启动过程中会自动将新的 AINode 加入 IoTDB 集群。启动 AINode 后可以在 命令行中输入 SQL 来查询,集群中看到 AINode 节点,其运行状态为 Running(如下展示)表示加入成功。
+
+```shell
+IoTDB> show cluster
++------+----------+-------+---------------+------------+-------+-----------+
+|NodeID| NodeType| Status|InternalAddress|InternalPort|Version| BuildInfo|
++------+----------+-------+---------------+------------+-------+-----------+
+| 0|ConfigNode|Running| 127.0.0.1| 10710|UNKNOWN|190e303-dev|
+| 1| DataNode|Running| 127.0.0.1| 10730|UNKNOWN|190e303-dev|
+| 2| AINode|Running| 127.0.0.1| 10810|UNKNOWN|190e303-dev|
++------+----------+-------+---------------+------------+-------+-----------+
+```
+
+### 停止 AINode
+
+如果需要停止正在运行的 AINode 节点,则执行相应的关闭脚本。
+
+#### 停止命令
+
+```shell
+ # Linux / MacOS
+ bash sbin/stop-ainode.sh
+
+ #Windows
+ sbin\stop-ainode.bat
+ ```
+
+##### 详细语法
+
+```shell
+ # Linux / MacOS
+ bash sbin/stop-ainode.sh -t
+
+ #Windows
+ sbin\stop-ainode.bat -t
+ ```
+
+##### 参数介绍:
+
+ | **名称** | **标签** | **描述** | **是否必填** | **类型** | **默认值** | **输入方式** |
+| ----------------- | ---- | ------------------------------------------------------------ | -------- | ------ | ------ | ---------- |
+| ain_remove_target | -t | AINode 关闭时可以指定待移除的目标 AINode 的 Node ID、地址和端口号,格式为`` | 否 | String | 无 | 调用时输入 |
+
+#### 示例
+```shell
+ # Linux / MacOS
+ bash sbin/stop-ainode.sh
+
+ # Windows
+ sbin\stop-ainode.bat
+ ```
+停止 AINode 后,还可以在集群中看到 AINode 节点,其运行状态为 UNKNOWN(如下展示),此时无法使用 AINode 功能。
+
+ ```shell
+IoTDB> show cluster
++------+----------+-------+---------------+------------+-------+-----------+
+|NodeID| NodeType| Status|InternalAddress|InternalPort|Version| BuildInfo|
++------+----------+-------+---------------+------------+-------+-----------+
+| 0|ConfigNode|Running| 127.0.0.1| 10710|UNKNOWN|190e303-dev|
+| 1| DataNode|Running| 127.0.0.1| 10730|UNKNOWN|190e303-dev|
+| 2| AINode|UNKNOWN| 127.0.0.1| 10790|UNKNOWN|190e303-dev|
++------+----------+-------+---------------+------------+-------+-----------+
+```
+如果需要重新启动该节点,需重新执行启动脚本。
+
+### 移除 AINode
+
+当需要把一个 AINode 节点移出集群时,可以执行移除脚本。移除和停止脚本的差别是:停止是在集群中保留 AINode 节点但停止 AINode 服务,移除则是把 AINode 节点从集群中移除出去。
+
+
+ #### 移除命令
+
+```shell
+ # Linux / MacOS
+ bash sbin/remove-ainode.sh
+
+ # Windows
+ sbin\remove-ainode.bat
+ ```
+
+##### 详细语法
+
+```shell
+ # Linux / MacOS
+ bash sbin/remove-ainode.sh -i -t -r -n
+
+ # Windows
+ sbin\remove-ainode.bat -i -t -r -n
+ ```
+
+##### 参数介绍:
+
+ | **名称** | **标签** | **描述** | **是否必填** | **类型** | **默认值** | **输入方式** |
+| ------------------- | ---- | ------------------------------------------------------------ | -------- | ------ | ---------------- | --------------------- |
+| ain_interpreter_dir | -i | AINode 所安装在的虚拟环境的解释器路径,需要使用绝对路径 | 否 | String | 默认读取环境变量 | 调用时输入+持久化修改 |
+| ain_remove_target | -t | AINode 关闭时可以指定待移除的目标 AINode 的 Node ID、地址和端口号,格式为`` | 否 | String | 无 | 调用时输入 |
+| ain_force_reinstall | -r | 该脚本在检查 AINode 安装情况的时候是否检查版本,如果检查则在版本不对的情况下会强制安装 lib 里的 whl 安装包 | 否 | Bool | false | 调用时输入 |
+| ain_no_dependencies | -n | 指定在安装 AINode 的时候是否安装依赖,如果指定则仅安装 AINode 主程序而不安装依赖。 | 否 | Bool | false | 调用时输入 |
+
+ 如不想每次启动时指定对应参数,也可以在 `conf` 文件夹下的`ainode-env.sh` 和 `ainode-env.bat` 脚本中持久化修改参数(目前支持持久化修改 ain_interpreter_dir 参数)。
+
+ `ainode-env.sh` :
+ ```shell
+ # The defaulte venv environment is used if ain_interpreter_dir is not set. Please use absolute path without quotation mark
+ # ain_interpreter_dir=
+ ```
+ `ainode-env.bat` :
+```shell
+ @REM The defaulte venv environment is used if ain_interpreter_dir is not set. Please use absolute path without quotation mark
+ @REM set ain_interpreter_dir=
+ ```
+ 在写入参数值的后解除对应行的注释并保存即可在下一次执行脚本时生效。
+
+#### 示例
+
+##### 直接移除:
+
+ ```shell
+ # Linux / MacOS
+ bash sbin/remove-ainode.sh
+
+ # Windows
+ sbin\remove-ainode.bat
+ ```
+ 移除节点后,将无法查询到节点的相关信息。
+
+ ```shell
+IoTDB> show cluster
++------+----------+-------+---------------+------------+-------+-----------+
+|NodeID| NodeType| Status|InternalAddress|InternalPort|Version| BuildInfo|
++------+----------+-------+---------------+------------+-------+-----------+
+| 0|ConfigNode|Running| 127.0.0.1| 10710|UNKNOWN|190e303-dev|
+| 1| DataNode|Running| 127.0.0.1| 10730|UNKNOWN|190e303-dev|
++------+----------+-------+---------------+------------+-------+-----------+
+```
+##### 指定移除:
+
+如果用户丢失了 data 文件夹下的文件,可能 AINode 本地无法主动移除自己,需要用户指定节点号、地址和端口号进行移除,此时我们支持用户按照以下方法输入参数进行删除。
+
+ ```shell
+ # Linux / MacOS
+ bash sbin/remove-ainode.sh -t /:
+
+ # Windows
+ sbin\remove-ainode.bat -t /:
+ ```
+
+## 常见问题
+
+### 启动AINode时出现找不到venv模块的报错
+
+ 当使用默认方式启动 AINode 时,会在安装包目录下创建一个 python 虚拟环境并安装依赖,因此要求安装 venv 模块。通常来说 python3.8 及以上的版本会自带 venv,但对于一些系统自带的 python 环境可能并不满足这一要求。出现该报错时有两种解决方案(二选一):
+
+ 在本地安装 venv 模块,以 ubuntu 为例,可以通过运行以下命令来安装 python 自带的 venv 模块。或者从 python 官网安装一个自带 venv 的 python 版本。
+
+ ```shell
+apt-get install python3.8-venv
+```
+ 安装 3.8.0 版本的 venv 到 AINode 里面 在 AINode 路径下
+
+ ```shell
+../Python-3.8.0/python -m venv venv(文件夹名)
+```
+ 在运行启动脚本时通过 `-i` 指定已有的 python 解释器路径作为 AINode 的运行环境,这样就不再需要创建一个新的虚拟环境。
+
+ ### python中的SSL模块没有被正确安装和配置,无法处理HTTPS资源
+WARNING: pip is configured with locations that require TLS/SSL, however the ssl module in Python is not available.
+可以安装 OpenSSLS 后,再重新构建 python 来解决这个问题
+> Currently Python versions 3.6 to 3.9 are compatible with OpenSSL 1.0.2, 1.1.0, and 1.1.1.
+
+ Python 要求我们的系统上安装有 OpenSSL,具体安装方法可见[链接](https://stackoverflow.com/questions/56552390/how-to-fix-ssl-module-in-python-is-not-available-in-centos)
+
+ ```shell
+sudo apt-get install build-essential libssl-dev zlib1g-dev libbz2-dev libreadline-dev libsqlite3-dev wget curl llvm libncurses5-dev libncursesw5-dev xz-utils tk-dev libffi-dev liblzma-dev uuid-dev lzma-dev liblzma-dev
+sudo -E ./configure --with-ssl
+make
+sudo make install
+```
+
+ ### pip版本较低
+
+ windows下出现类似“error:Microsoft Visual C++ 14.0 or greater is required...”的编译问题
+
+ 出现对应的报错,通常是 c++版本或是 setuptools 版本不足,可以在
+
+ ```shell
+./python -m pip install --upgrade pip
+./python -m pip install --upgrade setuptools
+```
+
+
+ ### 安装编译python
+
+ 使用以下指定从官网下载安装包并解压:
+ ```shell
+.wget https://www.python.org/ftp/python/3.8.0/Python-3.8.0.tar.xz
+tar Jxf Python-3.8.0.tar.xz
+```
+ 编译安装对应的 python 包:
+ ```shell
+cd Python-3.8.0
+./configure prefix=/usr/local/python3
+make
+sudo make install
+python3 --version
+```
\ No newline at end of file
diff --git a/src/zh/UserGuide/Master/Tree/User-Manual/AINode_apache.md b/src/zh/UserGuide/Master/Tree/User-Manual/AINode_apache.md
new file mode 100644
index 000000000..2da3c824d
--- /dev/null
+++ b/src/zh/UserGuide/Master/Tree/User-Manual/AINode_apache.md
@@ -0,0 +1,650 @@
+
+
+# AI能力(AINode)
+
+AINode 是 IoTDB 在ConfigNode、DataNode后提供的第三种内生节点,该节点通过与 IoTDB 集群的 DataNode、ConfigNode 的交互,扩展了对时间序列进行机器学习分析的能力,支持从外部引入已有机器学习模型进行注册,并使用注册的模型在指定时序数据上通过简单 SQL 语句完成时序分析任务的过程,将模型的创建、管理及推理融合在数据库引擎中。目前已提供常见时序分析场景(例如预测与异常检测)的机器学习算法或自研模型。
+
+系统架构如下图所示:
+::: center
+
+:::
+三种节点的职责如下:
+
+- **ConfigNode**:负责保存和管理模型的元信息;负责分布式节点管理。
+- **DataNode**:负责接收并解析用户的 SQL请求;负责存储时间序列数据;负责数据的预处理计算。
+- **AINode**:负责模型文件的导入创建以及模型推理。
+
+## 优势特点
+
+与单独构建机器学习服务相比,具有以下优势:
+
+- **简单易用**:无需使用 Python 或 Java 编程,使用 SQL 语句即可完成机器学习模型管理与推理的完整流程。如创建模型可使用CREATE MODEL语句、使用模型进行推理可使用CALL INFERENCE(...)语句等,使用更加简单便捷。
+
+- **避免数据迁移**:使用 IoTDB 原生机器学习可以将存储在 IoTDB 中的数据直接应用于机器学习模型的推理,无需将数据移动到单独的机器学习服务平台,从而加速数据处理、提高安全性并降低成本。
+
+
+
+- **内置先进算法**:支持业内领先机器学习分析算法,覆盖典型时序分析任务,为时序数据库赋能原生数据分析能力。如:
+ - **时间序列预测(Time Series Forecasting)**:从过去时间序列中学习变化模式;从而根据给定过去时间的观测值,输出未来序列最可能的预测。
+ - **时序异常检测(Anomaly Detection for Time Series)**:在给定的时间序列数据中检测和识别异常值,帮助发现时间序列中的异常行为。
+ - **时间序列标注(Time Series Annotation)**:为每个数据点或特定时间段添加额外的信息或标记,例如事件发生、异常点、趋势变化等,以便更好地理解和分析数据。
+
+
+## 基本概念
+
+- **模型(Model)**:机器学习模型,以时序数据作为输入,输出分析任务的结果或决策。模型是AINode 的基本管理单元,支持模型的增(注册)、删、查、用(推理)。
+- **创建(Create)**: 将外部设计或训练好的模型文件或算法加载到MLNode中,由IoTDB统一管理与使用。
+- **推理(Inference)**:使用创建的模型在指定时序数据上完成该模型适用的时序分析任务的过程。
+- **内置能力(Built-in)**:AINode 自带常见时序分析场景(例如预测与异常检测)的机器学习算法或自研模型。
+
+::: center
+
+:::
+
+## 安装部署
+
+AINode 的部署可参考文档 [部署指导](../Deployment-and-Maintenance/AINode_Deployment_apache.md#ainode-部署) 章节。
+
+## 使用指导
+
+AINode 对时序数据相关的深度学习模型提供了模型创建及删除的流程,内置模型无需创建及删除,可直接使用,并且在完成推理后创建的内置模型实例将自动销毁。
+
+### 注册模型
+
+通过指定模型输入输出的向量维度,可以注册训练好的深度学习模型,从而用于模型推理。
+
+符合以下内容的模型可以注册到AINode中:
+ 1. AINode 支持的PyTorch 2.1.0、 2.2.0版本训练的模型,需避免使用2.2.0版本以上的特性。
+ 2. AINode支持使用PyTorch JIT存储的模型,模型文件需要包含模型的参数和结构。
+ 3. 模型输入序列可以包含一列或多列,若有多列,需要和模型能力、模型配置文件对应。
+ 4. 模型的输入输出维度必须在`config.yaml`配置文件中明确定义。使用模型时,必须严格按照`config.yaml`配置文件中定义的输入输出维度。如果输入输出列数不匹配配置文件,将会导致错误。
+
+下方为模型注册的SQL语法定义。
+
+```SQL
+create model using uri
+```
+
+SQL中参数的具体含义如下:
+
+- model_name:模型的全局唯一标识,不可重复。模型名称具备以下约束:
+
+ - 允许出现标识符 [ 0-9 a-z A-Z _ ] (字母,数字,下划线)
+ - 长度限制为2-64字符
+ - 大小写敏感
+
+- uri:模型注册文件的资源路径,路径下应包含**模型权重model.pt文件和模型的元数据描述文件config.yaml**
+
+ - 模型权重文件:深度学习模型训练完成后得到的权重文件,目前支持pytorch训练得到的.pt文件
+
+ - yaml元数据描述文件:模型注册时需要提供的与模型结构有关的参数,其中必须包含模型的输入输出维度用于模型推理:
+
+ - | **参数名** | **参数描述** | **示例** |
+ | ------------ | ---------------------------- | -------- |
+ | input_shape | 模型输入的行列,用于模型推理 | [96,2] |
+ | output_shape | 模型输出的行列,用于模型推理 | [48,2] |
+
+ - 除了模型推理外,还可以指定模型输入输出的数据类型:
+
+ - | **参数名** | **参数描述** | **示例** |
+ | ----------- | ------------------ | --------------------- |
+ | input_type | 模型输入的数据类型 | ['float32','float32'] |
+ | output_type | 模型输出的数据类型 | ['float32','float32'] |
+
+ - 除此之外,可以额外指定备注信息用于在模型管理时进行展示
+
+ - | **参数名** | **参数描述** | **示例** |
+ | ---------- | ---------------------------------------------- | ------------------------------------------- |
+ | attributes | 可选,用户自行设定的模型备注信息,用于模型展示 | 'model_type': 'dlinear','kernel_size': '25' |
+
+
+除了本地模型文件的注册,还可以通过URI来指定远程资源路径来进行注册,使用开源的模型仓库(例如HuggingFace)。
+
+#### 示例
+
+在当前的example文件夹下,包含model.pt和config.yaml文件,model.pt为训练得到,config.yaml的内容如下:
+
+```YAML
+configs:
+ # 必选项
+ input_shape: [96, 2] # 表示模型接收的数据为96行x2列
+ output_shape: [48, 2] # 表示模型输出的数据为48行x2列
+
+ # 可选项 默认为全部float32,列数为shape对应的列数
+ input_type: ["int64","int64"] #输入对应的数据类型,需要与输入列数匹配
+ output_type: ["text","int64"] #输出对应的数据类型,需要与输出列数匹配
+
+attributes: # 可选项 为用户自定义的备注信息
+ 'model_type': 'dlinear'
+ 'kernel_size': '25'
+```
+
+指定该文件夹作为加载路径就可以注册该模型
+
+```SQL
+IoTDB> create model dlinear_example using uri "file://./example"
+```
+
+也可以从huggingFace上下载对应的模型文件进行注册
+
+```SQL
+IoTDB> create model dlinear_example using uri "https://huggingface.com/IoTDBML/dlinear/"
+```
+
+SQL执行后会异步进行注册的流程,可以通过模型展示查看模型的注册状态(见模型展示章节),注册成功的耗时主要受到模型文件大小的影响。
+
+模型注册完成后,就可以通过使用正常查询的方式调用具体函数,进行模型推理。
+
+### 查看模型
+
+注册成功的模型可以通过show models指令查询模型的具体信息。其SQL定义如下:
+
+```SQL
+show models
+
+show models
+```
+
+除了直接展示所有模型的信息外,可以指定model id来查看某一具体模型的信息。模型展示的结果中包含如下信息:
+
+| **ModelId** | **State** | **Configs** | **Attributes** |
+| ------------ | ------------------------------------- | ---------------------------------------------- | -------------- |
+| 模型唯一标识 | 模型注册状态(LOADING,ACTIVE,DROPPING) | InputShape, outputShapeInputTypes, outputTypes | 模型备注信息 |
+
+其中,State用于展示当前模型注册的状态,包含以下三个阶段
+
+- **LOADING**:已经在configNode中添加对应的模型元信息,正将模型文件传输到AINode节点上
+- **ACTIVE:** 模型已经设置完成,模型处于可用状态
+- **DROPPING**:模型删除中,正在从configNode以及AINode处删除模型相关信息
+- **UNAVAILABLE**: 模型创建失败,可以通过drop model删除创建失败的model_name。
+
+#### 示例
+
+```SQL
+IoTDB> show models
+
+
++---------------------+--------------------------+-----------+----------------------------+-----------------------+
+| ModelId| ModelType| State| Configs| Notes|
++---------------------+--------------------------+-----------+----------------------------+-----------------------+
+| dlinear_example| USER_DEFINED| ACTIVE| inputShape:[96,2]| |
+| | | | outputShape:[48,2]| |
+| | | | inputDataType:[float,float]| |
+| | | |outputDataType:[float,float]| |
+| _STLForecaster| BUILT_IN_FORECAST| ACTIVE| |Built-in model in IoTDB|
+| _NaiveForecaster| BUILT_IN_FORECAST| ACTIVE| |Built-in model in IoTDB|
+| _ARIMA| BUILT_IN_FORECAST| ACTIVE| |Built-in model in IoTDB|
+|_ExponentialSmoothing| BUILT_IN_FORECAST| ACTIVE| |Built-in model in IoTDB|
+| _GaussianHMM|BUILT_IN_ANOMALY_DETECTION| ACTIVE| |Built-in model in IoTDB|
+| _GMMHMM|BUILT_IN_ANOMALY_DETECTION| ACTIVE| |Built-in model in IoTDB|
+| _Stray|BUILT_IN_ANOMALY_DETECTION| ACTIVE| |Built-in model in IoTDB|
++---------------------+--------------------------+-----------+------------------------------------------------------------+-----------------------+
+```
+
+我们前面已经注册了对应的模型,可以通过对应的指定查看模型状态,active表明模型注册成功,可用于推理。
+
+### 删除模型
+
+对于注册成功的模型,用户可以通过SQL进行删除。该操作除了删除configNode上的元信息外,还会删除所有AINode下的相关模型文件。其SQL如下:
+
+```SQL
+drop model
+```
+
+需要指定已经成功注册的模型model_name来删除对应的模型。由于模型删除涉及多个节点上的数据删除,操作不会立即完成,此时模型的状态为DROPPING,该状态的模型不能用于模型推理。
+
+### 使用内置模型推理
+
+SQL语法如下:
+
+
+```SQL
+call inference(,sql[,=])
+```
+
+内置模型推理无需注册流程,通过call关键字,调用inference函数就可以使用模型的推理功能,其对应的参数介绍如下:
+
+- **built_in_model_name:** 内置模型名称
+- **parameterName**:参数名
+- **parameterValue**:参数值
+
+#### 内置模型及参数说明
+
+目前已内置如下机器学习模型,具体参数说明请参考以下链接。
+
+| 模型 | built_in_model_name | 任务类型 | 参数说明 |
+| -------------------- | --------------------- | -------- | ------------------------------------------------------------ |
+| Arima | _Arima | 预测 | [Arima参数说明](https://www.sktime.net/en/latest/api_reference/auto_generated/sktime.forecasting.arima.ARIMA.html?highlight=Arima) |
+| STLForecaster | _STLForecaster | 预测 | [STLForecaster参数说明](https://www.sktime.net/en/latest/api_reference/auto_generated/sktime.forecasting.trend.STLForecaster.html#sktime.forecasting.trend.STLForecaster) |
+| NaiveForecaster | _NaiveForecaster | 预测 | [NaiveForecaster参数说明](https://www.sktime.net/en/latest/api_reference/auto_generated/sktime.forecasting.naive.NaiveForecaster.html#naiveforecaster) |
+| ExponentialSmoothing | _ExponentialSmoothing | 预测 | [ExponentialSmoothing参数说明](https://www.sktime.net/en/latest/api_reference/auto_generated/sktime.forecasting.exp_smoothing.ExponentialSmoothing.html) |
+| GaussianHMM | _GaussianHMM | 标注 | [GaussianHMM参数说明](https://www.sktime.net/en/latest/api_reference/auto_generated/sktime.annotation.hmm_learn.gaussian.GaussianHMM.html) |
+| GMMHMM | _GMMHMM | 标注 | [GMMHMM参数说明](https://www.sktime.net/en/latest/api_reference/auto_generated/sktime.annotation.hmm_learn.gmm.GMMHMM.html) |
+| Stray | _Stray | 异常检测 | [Stray参数说明](https://www.sktime.net/en/latest/api_reference/auto_generated/sktime.annotation.stray.STRAY.html) |
+
+#### 示例
+
+下面是使用内置模型推理的一个操作示例,使用内置的Stray模型进行异常检测算法,输入为`[144,1]`,输出为`[144,1]`,我们通过SQL使用其进行推理。
+
+```SQL
+IoTDB> select * from root.eg.airline
++-----------------------------+------------------+
+| Time|root.eg.airline.s0|
++-----------------------------+------------------+
+|1949-01-31T00:00:00.000+08:00| 224.0|
+|1949-02-28T00:00:00.000+08:00| 118.0|
+|1949-03-31T00:00:00.000+08:00| 132.0|
+|1949-04-30T00:00:00.000+08:00| 129.0|
+......
+|1960-09-30T00:00:00.000+08:00| 508.0|
+|1960-10-31T00:00:00.000+08:00| 461.0|
+|1960-11-30T00:00:00.000+08:00| 390.0|
+|1960-12-31T00:00:00.000+08:00| 432.0|
++-----------------------------+------------------+
+Total line number = 144
+
+IoTDB> call inference(_Stray, "select s0 from root.eg.airline", k=2)
++-------+
+|output0|
++-------+
+| 0|
+| 0|
+| 0|
+| 0|
+......
+| 1|
+| 1|
+| 0|
+| 0|
+| 0|
+| 0|
++-------+
+Total line number = 144
+```
+
+### 使用深度学习模型推理
+
+SQL语法如下:
+
+```SQL
+call inference(,sql[,window=])
+
+
+window_function:
+ head(window_size)
+ tail(window_size)
+ count(window_size,sliding_step)
+```
+
+在完成模型的注册后,通过call关键字,调用inference函数就可以使用模型的推理功能,其对应的参数介绍如下:
+
+- **model_name**: 对应一个已经注册的模型
+- **sql**:sql查询语句,查询的结果作为模型的输入进行模型推理。查询的结果中行列的维度需要与具体模型config中指定的大小相匹配。(这里的sql不建议使用`SELECT *`子句,因为在IoTDB中,`*`并不会对列进行排序,因此列的顺序是未定义的,可以使用`SELECT s0,s1`的方式确保列的顺序符合模型输入的预期)
+- **window_function**: 推理过程中可以使用的窗口函数,目前提供三种类型的窗口函数用于辅助模型推理:
+ - **head(window_size)**: 获取数据中最前的window_size个点用于模型推理,该窗口可用于数据裁剪
+ 
+
+ - **tail(window_size)**:获取数据中最后的window_size个点用于模型推,该窗口可用于数据裁剪
+ 
+
+ - **count(window_size, sliding_step)**:基于点数的滑动窗口,每个窗口的数据会分别通过模型进行推理,如下图示例所示,window_size为2的窗口函数将输入数据集分为三个窗口,每个窗口分别进行推理运算生成结果。该窗口可用于连续推理
+ 
+
+**说明1: window可以用来解决sql查询结果和模型的输入行数要求不一致时的问题,对行进行裁剪。需要注意的是,当列数不匹配或是行数直接少于模型需求时,推理无法进行,会返回错误信息。**
+
+**说明2: 在深度学习应用中,经常将时间戳衍生特征(数据中的时间列)作为生成式任务的协变量,一同输入到模型中以提升模型的效果,但是在模型的输出结果中一般不包含时间列。为了保证实现的通用性,模型推理结果只对应模型的真实输出,如果模型不输出时间列,则结果中不会包含。**
+
+
+#### 示例
+
+下面是使用深度学习模型推理的一个操作示例,针对上面提到的输入为`[96,2]`,输出为`[48,2]`的`dlinear`预测模型,我们通过SQL使用其进行推理。
+
+```Shell
+IoTDB> select s1,s2 from root.**
++-----------------------------+-------------------+-------------------+
+| Time| root.eg.etth.s0| root.eg.etth.s1|
++-----------------------------+-------------------+-------------------+
+|1990-01-01T00:00:00.000+08:00| 0.7855| 1.611|
+|1990-01-02T00:00:00.000+08:00| 0.7818| 1.61|
+|1990-01-03T00:00:00.000+08:00| 0.7867| 1.6293|
+|1990-01-04T00:00:00.000+08:00| 0.786| 1.637|
+|1990-01-05T00:00:00.000+08:00| 0.7849| 1.653|
+|1990-01-06T00:00:00.000+08:00| 0.7866| 1.6537|
+|1990-01-07T00:00:00.000+08:00| 0.7886| 1.662|
+......
+|1990-03-31T00:00:00.000+08:00| 0.7585| 1.678|
+|1990-04-01T00:00:00.000+08:00| 0.7587| 1.6763|
+|1990-04-02T00:00:00.000+08:00| 0.76| 1.6813|
+|1990-04-03T00:00:00.000+08:00| 0.7669| 1.684|
+|1990-04-04T00:00:00.000+08:00| 0.7645| 1.677|
+|1990-04-05T00:00:00.000+08:00| 0.7625| 1.68|
+|1990-04-06T00:00:00.000+08:00| 0.7617| 1.6917|
++-----------------------------+-------------------+-------------------+
+Total line number = 96
+
+IoTDB> call inference(dlinear_example,"select s0,s1 from root.**")
++--------------------------------------------+-----------------------------+
+| _result_0| _result_1|
++--------------------------------------------+-----------------------------+
+| 0.726302981376648| 1.6549958229064941|
+| 0.7354921698570251| 1.6482787370681763|
+| 0.7238251566886902| 1.6278168201446533|
+......
+| 0.7692174911499023| 1.654654049873352|
+| 0.7685555815696716| 1.6625318765640259|
+| 0.7856493592262268| 1.6508299350738525|
++--------------------------------------------+-----------------------------+
+Total line number = 48
+```
+
+#### 使用tail/head窗口函数的示例
+
+当数据量不定且想要取96行最新数据用于推理时,可以使用对应的窗口函数tail。head函数的用法与其类似,不同点在于其取的是最早的96个点。
+
+```Shell
+IoTDB> select s1,s2 from root.**
++-----------------------------+-------------------+-------------------+
+| Time| root.eg.etth.s0| root.eg.etth.s1|
++-----------------------------+-------------------+-------------------+
+|1988-01-01T00:00:00.000+08:00| 0.7355| 1.211|
+......
+|1990-01-01T00:00:00.000+08:00| 0.7855| 1.611|
+|1990-01-02T00:00:00.000+08:00| 0.7818| 1.61|
+|1990-01-03T00:00:00.000+08:00| 0.7867| 1.6293|
+|1990-01-04T00:00:00.000+08:00| 0.786| 1.637|
+|1990-01-05T00:00:00.000+08:00| 0.7849| 1.653|
+|1990-01-06T00:00:00.000+08:00| 0.7866| 1.6537|
+|1990-01-07T00:00:00.000+08:00| 0.7886| 1.662|
+......
+|1990-03-31T00:00:00.000+08:00| 0.7585| 1.678|
+|1990-04-01T00:00:00.000+08:00| 0.7587| 1.6763|
+|1990-04-02T00:00:00.000+08:00| 0.76| 1.6813|
+|1990-04-03T00:00:00.000+08:00| 0.7669| 1.684|
+|1990-04-04T00:00:00.000+08:00| 0.7645| 1.677|
+|1990-04-05T00:00:00.000+08:00| 0.7625| 1.68|
+|1990-04-06T00:00:00.000+08:00| 0.7617| 1.6917|
++-----------------------------+-------------------+-------------------+
+Total line number = 996
+
+IoTDB> call inference(dlinear_example,"select s0,s1 from root.**",window=tail(96))
++--------------------------------------------+-----------------------------+
+| _result_0| _result_1|
++--------------------------------------------+-----------------------------+
+| 0.726302981376648| 1.6549958229064941|
+| 0.7354921698570251| 1.6482787370681763|
+| 0.7238251566886902| 1.6278168201446533|
+......
+| 0.7692174911499023| 1.654654049873352|
+| 0.7685555815696716| 1.6625318765640259|
+| 0.7856493592262268| 1.6508299350738525|
++--------------------------------------------+-----------------------------+
+Total line number = 48
+```
+
+#### 使用count窗口函数的示例
+
+该窗口主要用于计算式任务,当任务对应的模型一次只能处理固定行数据而最终想要的确实多组预测结果时,使用该窗口函数可以使用点数滑动窗口进行连续推理。假设我们现在有一个异常检测模型anomaly_example(input: [24,2], output[1,1]),对每24行数据会生成一个0/1的标签,其使用示例如下:
+
+```Shell
+IoTDB> select s1,s2 from root.**
++-----------------------------+-------------------+-------------------+
+| Time| root.eg.etth.s0| root.eg.etth.s1|
++-----------------------------+-------------------+-------------------+
+|1990-01-01T00:00:00.000+08:00| 0.7855| 1.611|
+|1990-01-02T00:00:00.000+08:00| 0.7818| 1.61|
+|1990-01-03T00:00:00.000+08:00| 0.7867| 1.6293|
+|1990-01-04T00:00:00.000+08:00| 0.786| 1.637|
+|1990-01-05T00:00:00.000+08:00| 0.7849| 1.653|
+|1990-01-06T00:00:00.000+08:00| 0.7866| 1.6537|
+|1990-01-07T00:00:00.000+08:00| 0.7886| 1.662|
+......
+|1990-03-31T00:00:00.000+08:00| 0.7585| 1.678|
+|1990-04-01T00:00:00.000+08:00| 0.7587| 1.6763|
+|1990-04-02T00:00:00.000+08:00| 0.76| 1.6813|
+|1990-04-03T00:00:00.000+08:00| 0.7669| 1.684|
+|1990-04-04T00:00:00.000+08:00| 0.7645| 1.677|
+|1990-04-05T00:00:00.000+08:00| 0.7625| 1.68|
+|1990-04-06T00:00:00.000+08:00| 0.7617| 1.6917|
++-----------------------------+-------------------+-------------------+
+Total line number = 96
+
+IoTDB> call inference(anomaly_example,"select s0,s1 from root.**",window=count(24,24))
++-------------------------+
+| _result_0|
++-------------------------+
+| 0|
+| 1|
+| 1|
+| 0|
++-------------------------+
+Total line number = 4
+```
+
+其中结果集中每行的标签对应每24行数据为一组,输入该异常检测模型后的输出。
+
+## 权限管理
+
+使用AINode相关的功能时,可以使用IoTDB本身的鉴权去做一个权限管理,用户只有在具备 USE_MODEL 权限时,才可以使用模型管理的相关功能。当使用推理功能时,用户需要有访问输入模型的SQL对应的源序列的权限。
+
+| 权限名称 | 权限范围 | 管理员用户(默认ROOT) | 普通用户 | 路径相关 |
+| --------- | --------------------------------- | ---------------------- | -------- | -------- |
+| USE_MODEL | create model / show models / drop model | √ | √ | x |
+| READ_DATA | call inference | √ | √ | √ |
+
+## 实际案例
+
+### 电力负载预测
+
+在部分工业场景下,会存在预测电力负载的需求,预测结果可用于优化电力供应、节约能源和资源、支持规划和扩展以及增强电力系统的可靠性。
+
+我们所使用的 ETTh1 的测试集的数据为[ETTh1](https://alioss.timecho.com/docs/img/ETTh1.csv)。
+
+
+包含间隔1h采集一次的电力数据,每条数据由负载和油温构成,分别为:High UseFul Load, High UseLess Load, Middle UseLess Load, Low UseFul Load, Low UseLess Load, Oil Temperature。
+
+在该数据集上,IoTDB-ML的模型推理功能可以通过以往高中低三种负载的数值和对应时间戳油温的关系,预测未来一段时间内的油温,赋能电网变压器的自动调控和监视。
+
+#### 步骤一:数据导入
+
+用户可以使用tools文件夹中的`import-csv.sh` 向 IoTDB 中导入 ETT 数据集
+
+```Bash
+bash ./import-csv.sh -h 127.0.0.1 -p 6667 -u root -pw root -f ../../ETTh1.csv
+```
+
+#### 步骤二:模型导入
+
+我们可以在iotdb-cli 中输入以下SQL从 huggingface 上拉取一个已经训练好的模型进行注册,用于后续的推理。
+
+```SQL
+create model dlinear using uri 'https://huggingface.co/hvlgo/dlinear/tree/main'
+```
+
+该模型基于较为轻量化的深度模型DLinear训练而得,能够以相对快的推理速度尽可能多地捕捉到序列内部的变化趋势和变量间的数据变化关系,相较于其他更深的模型更适用于快速实时预测。
+
+#### 步骤三:模型推理
+
+```Shell
+IoTDB> select s0,s1,s2,s3,s4,s5,s6 from root.eg.etth LIMIT 96
++-----------------------------+---------------+---------------+---------------+---------------+---------------+---------------+---------------+
+| Time|root.eg.etth.s0|root.eg.etth.s1|root.eg.etth.s2|root.eg.etth.s3|root.eg.etth.s4|root.eg.etth.s5|root.eg.etth.s6|
++-----------------------------+---------------+---------------+---------------+---------------+---------------+---------------+---------------+
+|2017-10-20T00:00:00.000+08:00| 10.449| 3.885| 8.706| 2.025| 2.041| 0.944| 8.864|
+|2017-10-20T01:00:00.000+08:00| 11.119| 3.952| 8.813| 2.31| 2.071| 1.005| 8.442|
+|2017-10-20T02:00:00.000+08:00| 9.511| 2.88| 7.533| 1.564| 1.949| 0.883| 8.16|
+|2017-10-20T03:00:00.000+08:00| 9.645| 2.21| 7.249| 1.066| 1.828| 0.914| 7.949|
+......
+|2017-10-23T20:00:00.000+08:00| 8.105| 0.938| 4.371| -0.569| 3.533| 1.279| 9.708|
+|2017-10-23T21:00:00.000+08:00| 7.167| 1.206| 4.087| -0.462| 3.107| 1.432| 8.723|
+|2017-10-23T22:00:00.000+08:00| 7.1| 1.34| 4.015| -0.32| 2.772| 1.31| 8.864|
+|2017-10-23T23:00:00.000+08:00| 9.176| 2.746| 7.107| 1.635| 2.65| 1.097| 9.004|
++-----------------------------+---------------+---------------+---------------+---------------+---------------+---------------+---------------+
+Total line number = 96
+
+IoTDB> call inference(dlinear_example, "select s0,s1,s2,s3,s4,s5,s6 from root.eg.etth", window=head(96))
++-----------+----------+----------+------------+---------+----------+----------+
+| output0| output1| output2| output3| output4| output5| output6|
++-----------+----------+----------+------------+---------+----------+----------+
+| 10.319546| 3.1450553| 7.877341| 1.5723765|2.7303758| 1.1362307| 8.867775|
+| 10.443649| 3.3286757| 7.8593454| 1.7675098| 2.560634| 1.1177158| 8.920919|
+| 10.883752| 3.2341104| 8.47036| 1.6116762|2.4874182| 1.1760603| 8.798939|
+......
+| 8.0115595| 1.2995274| 6.9900327|-0.098746896| 3.04923| 1.176214| 9.548782|
+| 8.612427| 2.5036244| 5.6790237| 0.66474205|2.8870275| 1.2051733| 9.330128|
+| 10.096699| 3.399722| 6.9909| 1.7478468|2.7642853| 1.1119363| 9.541455|
++-----------+----------+----------+------------+---------+----------+----------+
+Total line number = 48
+```
+
+我们将对油温的预测的结果和真实结果进行对比,可以得到以下的图像。
+
+图中10/24 00:00之前的数据为输入模型的过去数据,10/24 00:00后的蓝色线条为模型给出的油温预测结果,而红色为数据集中实际的油温数据(用于进行对比)。
+
+
+
+可以看到,我们使用了过去96个小时(4天)的六个负载信息和对应时间油温的关系,基于之前学习到的序列间相互关系对未来48个小时(2天)的油温这一数据的可能变化进行了建模,可以看到可视化后预测曲线与实际结果在趋势上保持了较高程度的一致性。
+
+### 功率预测
+
+变电站需要对电流、电压、功率等数据进行电力监控,用于检测潜在的电网问题、识别电力系统中的故障、有效管理电网负载以及分析电力系统的性能和趋势等。
+
+我们利用某变电站中的电流、电压和功率等数据构成了真实场景下的数据集。该数据集包括变电站近四个月时间跨度,每5 - 6s 采集一次的 A相电压、B相电压、C相电压等数据。
+
+测试集数据内容为[data](https://alioss.timecho.com/docs/img/data.csv)。
+
+在该数据集上,IoTDB-ML的模型推理功能可以通过以往A相电压,B相电压和C相电压的数值和对应时间戳,预测未来一段时间内的C相电压,赋能变电站的监视管理。
+
+#### 步骤一:数据导入
+
+用户可以使用tools文件夹中的`import-csv.sh` 导入数据集
+
+```Bash
+bash ./import-csv.sh -h 127.0.0.1 -p 6667 -u root -pw root -f ../../data.csv
+```
+
+#### 步骤二:模型导入
+
+我们可以在iotdb-cli 中选择内置模型或已经注册好的模型用于后续的推理。
+
+我们采用内置模型STLForecaster进行预测,STLForecaster 是一个基于 statsmodels 库中 STL 实现的时间序列预测方法。
+
+#### 步骤三:模型推理
+
+```Shell
+IoTDB> select * from root.eg.voltage limit 96
++-----------------------------+------------------+------------------+------------------+
+| Time|root.eg.voltage.s0|root.eg.voltage.s1|root.eg.voltage.s2|
++-----------------------------+------------------+------------------+------------------+
+|2023-02-14T20:38:32.000+08:00| 2038.0| 2028.0| 2041.0|
+|2023-02-14T20:38:38.000+08:00| 2014.0| 2005.0| 2018.0|
+|2023-02-14T20:38:44.000+08:00| 2014.0| 2005.0| 2018.0|
+......
+|2023-02-14T20:47:52.000+08:00| 2024.0| 2016.0| 2027.0|
+|2023-02-14T20:47:57.000+08:00| 2024.0| 2016.0| 2027.0|
+|2023-02-14T20:48:03.000+08:00| 2024.0| 2016.0| 2027.0|
++-----------------------------+------------------+------------------+------------------+
+Total line number = 96
+
+IoTDB> call inference(_STLForecaster, "select s0,s1,s2 from root.eg.voltage", window=head(96),predict_length=48)
++---------+---------+---------+
+| output0| output1| output2|
++---------+---------+---------+
+|2026.3601|2018.2953|2029.4257|
+|2019.1538|2011.4361|2022.0888|
+|2025.5074|2017.4522|2028.5199|
+......
+
+|2022.2336|2015.0290|2025.1023|
+|2015.7241|2008.8975|2018.5085|
+|2022.0777|2014.9136|2024.9396|
+|2015.5682|2008.7821|2018.3458|
++---------+---------+---------+
+Total line number = 48
+```
+我们将对C相电压的预测的结果和真实结果进行对比,可以得到以下的图像。
+
+图中 02/14 20:48 之前的数据为输入模型的过去数据, 02/14 20:48 后的蓝色线条为模型给出的C相电压预测结果,而红色为数据集中实际的C相电压数据(用于进行对比)。
+
+
+
+可以看到,我们使用了过去10分钟的电压的数据,基于之前学习到的序列间相互关系对未来5分钟的C相电压这一数据的可能变化进行了建模,可以看到可视化后预测曲线与实际结果在趋势上保持了一定的同步性。
+
+### 异常检测
+
+在民航交通运输业,存在着对乘机旅客数量进行异常检测的需求。异常检测的结果可用于指导调整航班的调度,以使得企业获得更大效益。
+
+Airline Passengers一个时间序列数据集,该数据集记录了1949年至1960年期间国际航空乘客数量,间隔一个月进行一次采样。该数据集共含一条时间序列。数据集为[airline](https://alioss.timecho.com/docs/img/airline.csv)。
+在该数据集上,IoTDB-ML的模型推理功能可以通过捕捉序列的变化规律以对序列时间点进行异常检测,赋能交通运输业。
+
+#### 步骤一:数据导入
+
+用户可以使用tools文件夹中的`import-csv.sh` 导入数据集
+
+```Bash
+bash ./import-csv.sh -h 127.0.0.1 -p 6667 -u root -pw root -f ../../data.csv
+```
+
+#### 步骤二:模型推理
+
+IoTDB内置有部分可以直接使用的机器学习算法,使用其中的异常检测算法进行预测的样例如下:
+
+```Shell
+IoTDB> select * from root.eg.airline
++-----------------------------+------------------+
+| Time|root.eg.airline.s0|
++-----------------------------+------------------+
+|1949-01-31T00:00:00.000+08:00| 224.0|
+|1949-02-28T00:00:00.000+08:00| 118.0|
+|1949-03-31T00:00:00.000+08:00| 132.0|
+|1949-04-30T00:00:00.000+08:00| 129.0|
+......
+|1960-09-30T00:00:00.000+08:00| 508.0|
+|1960-10-31T00:00:00.000+08:00| 461.0|
+|1960-11-30T00:00:00.000+08:00| 390.0|
+|1960-12-31T00:00:00.000+08:00| 432.0|
++-----------------------------+------------------+
+Total line number = 144
+
+IoTDB> call inference(_Stray, "select s0 from root.eg.airline", k=2)
++-------+
+|output0|
++-------+
+| 0|
+| 0|
+| 0|
+| 0|
+......
+| 1|
+| 1|
+| 0|
+| 0|
+| 0|
+| 0|
++-------+
+Total line number = 144
+```
+
+我们将检测为异常的结果进行绘制,可以得到以下图像。其中蓝色曲线为原时间序列,用红色点特殊标注的时间点为算法检测为异常的时间点。
+
+
+
+可以看到,Stray模型对输入序列变化进行了建模,成功检测出出现异常的时间点。
\ No newline at end of file
diff --git a/src/zh/UserGuide/V1.3.3/Deployment-and-Maintenance/AINode_Deployment_apache.md b/src/zh/UserGuide/V1.3.3/Deployment-and-Maintenance/AINode_Deployment_apache.md
new file mode 100644
index 000000000..4c4710469
--- /dev/null
+++ b/src/zh/UserGuide/V1.3.3/Deployment-and-Maintenance/AINode_Deployment_apache.md
@@ -0,0 +1,512 @@
+
+# AINode 部署
+
+## AINode介绍
+
+### 能力介绍
+
+AINode 是 IoTDB 在 ConfigNode、DataNode 后提供的第三种内生节点,该节点通过与 IoTDB 集群的 DataNode、ConfigNode 的交互,扩展了对时间序列进行机器学习分析的能力,支持从外部引入已有机器学习模型进行注册,并使用注册的模型在指定时序数据上通过简单 SQL 语句完成时序分析任务的过程,将模型的创建、管理及推理融合在数据库引擎中。目前已提供常见时序分析场景(例如预测与异常检测)的机器学习算法或自研模型。
+
+### 交付方式
+ 是 IoTDB 集群外的额外套件,独立安装包,独立激活(如需试用或使用,请联系天谋科技商务或技术支持)。
+
+### 部署模式
+
+

+

+
+
+## 安装准备
+
+### 安装包获取
+
+ 用户可以下载AINode的软件安装包,下载并解压后即完成AINode的安装。
+
+ 解压后安装包(`iotdb-enterprise-ainode-.zip`),安装包解压后目录结构如下:
+| **目录** | **类型** | **说明** |
+| ------------ | -------- | ------------------------------------------------ |
+| lib | 文件夹 | AINode编译后的二进制可执行文件以及相关的代码依赖 |
+| sbin | 文件夹 | AINode的运行脚本,可以启动,移除和停止AINode |
+| conf | 文件夹 | 包含AINode的配置项,具体包含以下配置项 |
+| LICENSE | 文件 | 证书 |
+| NOTICE | 文件 | 提示 |
+| README_ZH.md | 文件 | markdown格式的中文版说明 |
+| `README.md` | 文件 | 使用说明 |
+
+### 环境准备
+- 建议操作环境: Ubuntu, CentOS, MacOS
+
+- 运行环境
+ - 联网环境下 Python >= 3.8即可,且带有 pip 和 venv 工具;非联网环境下需要使用 Python 3.8版本,并从 [此处](https://cloud.tsinghua.edu.cn/d/4c1342f6c272439aa96c/?p=%2Flibs&mode=list) 下载对应操作系统的zip压缩包(注意下载依赖需选择libs文件夹中的zip压缩包,如下图),并将文件夹下的所有文件拷贝到 `iotdb-enterprise-ainode-` 文件夹中 `lib` 文件夹下,并按下文步骤启动AINode。
+
+
+
+ - 环境变量中需存在 Python 解释器且可以通过 `python` 指令直接调用
+ - 建议在 `iotdb-enterprise-ainode-` 文件夹下,新建 Python 解释器 venv 虚拟环境。如安装 3.8.0 版本虚拟环境,语句如下:
+
+ ```shell
+ # 安装3.8.0版本的venv,创建虚拟环境,文件夹名为 `venv`
+ ../Python-3.8.0/python -m venv `venv`
+ ```
+## 安装部署及使用
+
+### 安装 AINode
+
+1. 检查Linux的内核架构
+```shell
+ uname -m
+ ```
+
+2. 导入Python环境[下载](https://repo.anaconda.com/miniconda/)
+
+推荐下载py311版本应用,导入至用户根目录下 iotdb专用文件夹 中
+
+3. 切换至iotdb专用文件夹安装Python环境
+
+以 Miniconda3-py311_24.5.0-0-Linux-x86_64 为例:
+
+```shell
+ bash ./Miniconda3-py311_24.5.0-0-Linux-x86_64.sh
+ ```
+> 根据提示键入“回车”、“长按空格”、“回车”、“yes”、“yes”
+> 关闭当前SSH窗口重新连接
+
+ 4. 创建专用环境
+
+```shell
+ conda create -n ainode_py python=3.11.9
+ ```
+
+ 根据提示键入“y”
+
+ 5. 激活专用环境
+
+```shell
+ conda activate ainode_py
+ ```
+
+ 6. 验证Python版本
+
+```shell
+ python --version
+ ```
+ 7. 下载导入AINode到专用文件夹,切换到专用文件夹并解压安装包
+
+```shell
+ unzip iotdb-enterprise-ainode-1.3.3.2.zip
+ ```
+
+ 8. 配置项修改
+
+```shell
+ vi iotdb-enterprise-ainode-1.3.3.2/conf/iotdb-ainode.properties
+ ```
+ 配置项修改:[详细信息](#配置项修改)
+> ain_seed_config_node=iotdb-1:10710(集群通讯节点IP:通讯节点端口)
+> ain_inference_rpc_address=iotdb-3(运行AINode的服务器IP)
+
+ 9. 更换Python源
+
+```shell
+ pip config set global.index-url https://mirrors.aliyun.com/pypi/simple/
+ ```
+
+ 10. 启动AINode节点
+
+```shell
+ nohup bash iotdb-enterprise-ainode-1.3.3.2/sbin/start-ainode.sh > myout.file 2>& 1 &
+ ```
+> 回到系统默认环境:conda deactivate
+
+### 配置项修改
+AINode 支持修改一些必要的参数。可以在 `conf/iotdb-ainode.properties` 文件中找到下列参数并进行持久化的修改:
+
+| **名称** | **描述** | **类型** | **默认值** | **改后生效方式** |
+| :----------------------------- | ------------------------------------------------------------ | ------- | ------------------ | ---------------------------- |
+| cluster_name | AINode 要加入集群的标识 | string | defaultCluster | 仅允许在第一次启动服务前修改 |
+| ain_seed_config_node | AINode 启动时注册的 ConfigNode 地址 | String | 127.0.0.1:10710 | 仅允许在第一次启动服务前修改 |
+| ain_inference_rpc_address | AINode 提供服务与通信的地址 ,内部服务通讯接口 | String | 127.0.0.1 | 仅允许在第一次启动服务前修改 |
+| ain_inference_rpc_port | AINode 提供服务与通信的端口 | String | 10810 | 仅允许在第一次启动服务前修改 |
+| ain_system_dir | AINode 元数据存储路径,相对路径的起始目录与操作系统相关,建议使用绝对路径 | String | data/AINode/system | 仅允许在第一次启动服务前修改 |
+| ain_models_dir | AINode 存储模型文件的路径,相对路径的起始目录与操作系统相关,建议使用绝对路径 | String | data/AINode/models | 仅允许在第一次启动服务前修改 |
+| ain_logs_dir | AINode 存储日志的路径,相对路径的起始目录与操作系统相关,建议使用绝对路径 | String | logs/AINode | 重启后生效 |
+| ain_thrift_compression_enabled | AINode 是否启用 thrift 的压缩机制,0-不启动、1-启动 | Boolean | 0 | 重启后生效 |
+### 启动 AINode
+
+ 在完成 Seed-ConfigNode 的部署后,可以通过添加 AINode 节点来支持模型的注册和推理功能。在配置项中指定 IoTDB 集群的信息后,可以执行相应的指令来启动 AINode,加入 IoTDB 集群。
+
+#### 联网环境启动
+
+##### 启动命令
+
+```shell
+ # 启动命令
+ # Linux 和 MacOS 系统
+ bash sbin/start-ainode.sh
+
+ # Windows 系统
+ sbin\start-ainode.bat
+
+ # 后台启动命令(长期运行推荐)
+ # Linux 和 MacOS 系统
+ nohup bash sbin/start-ainode.sh > myout.file 2>& 1 &
+
+ # Windows 系统
+ nohup bash sbin\start-ainode.bat > myout.file 2>& 1 &
+ ```
+
+##### 详细语法
+
+```shell
+ # 启动命令
+ # Linux 和 MacOS 系统
+ bash sbin/start-ainode.sh -i -r -n
+
+ # Windows 系统
+ sbin\start-ainode.bat -i -r -n
+ ```
+
+##### 参数介绍:
+
+| **名称** | **标签** | **描述** | **是否必填** | **类型** | **默认值** | **输入方式** |
+| ------------------- | ---- | ------------------------------------------------------------ | -------- | ------ | ---------------- | ---------------------- |
+| ain_interpreter_dir | -i | AINode 所安装在的虚拟环境的解释器路径,需要使用绝对路径 | 否 | String | 默认读取环境变量 | 调用时输入或持久化修改 |
+| ain_force_reinstall | -r | 该脚本在检查 AINode 安装情况的时候是否检查版本,如果检查则在版本不对的情况下会强制安装 lib 里的 whl 安装包 | 否 | Bool | false | 调用时输入 |
+| ain_no_dependencies | -n | 指定在安装 AINode 的时候是否安装依赖,如果指定则仅安装 AINode 主程序而不安装依赖。 | 否 | Bool | false | 调用时输入 |
+
+ 如不想每次启动时指定对应参数,也可以在 `conf` 文件夹下的`ainode-env.sh` 和 `ainode-env.bat` 脚本中持久化修改参数(目前支持持久化修改 ain_interpreter_dir 参数)。
+
+ `ainode-env.sh` :
+ ```shell
+ # The defaulte venv environment is used if ain_interpreter_dir is not set. Please use absolute path without quotation mark
+ # ain_interpreter_dir=
+ ```
+ `ainode-env.bat` :
+```shell
+ @REM The defaulte venv environment is used if ain_interpreter_dir is not set. Please use absolute path without quotation mark
+ @REM set ain_interpreter_dir=
+ ```
+ 在写入参数值的后解除对应行的注释并保存即可在下一次执行脚本时生效。
+
+#### 示例
+
+##### 直接启动:
+
+```shell
+ # 启动命令
+ # Linux 和 MacOS 系统
+ bash sbin/start-ainode.sh
+ # Windows 系统
+ sbin\start-ainode.bat
+
+
+ # 后台启动命令(长期运行推荐)
+ # Linux 和 MacOS 系统
+ nohup bash sbin/start-ainode.sh > myout.file 2>& 1 &
+ # Windows 系统
+ nohup bash sbin\start-ainode.bat > myout.file 2>& 1 &
+ ```
+
+##### 更新启动:
+如果 AINode 的版本进行了更新(如更新了 `lib` 文件夹),可使用此命令。首先要保证 AINode 已经停止运行,然后通过 `-r` 参数重启,该参数会根据 `lib` 下的文件重新安装 AINode。
+
+```shell
+ # 更新启动命令
+ # Linux 和 MacOS 系统
+ bash sbin/start-ainode.sh -r
+ # Windows 系统
+ sbin\start-ainode.bat -r
+
+
+ # 后台更新启动命令(长期运行推荐)
+ # Linux 和 MacOS 系统
+ nohup bash sbin/start-ainode.sh -r > myout.file 2>& 1 &
+ # Windows 系统
+ nohup bash sbin\start-ainode.bat -r > myout.file 2>& 1 &
+ ```
+#### 非联网环境启动
+
+##### 启动命令
+
+```shell
+ # 启动命令
+ # Linux 和 MacOS 系统
+ bash sbin/start-ainode.sh
+
+ # Windows 系统
+ sbin\start-ainode.bat
+
+ # 后台启动命令(长期运行推荐)
+ # Linux 和 MacOS 系统
+ nohup bash sbin/start-ainode.sh > myout.file 2>& 1 &
+
+ # Windows 系统
+ nohup bash sbin\start-ainode.bat > myout.file 2>& 1 &
+ ```
+
+##### 详细语法
+
+```shell
+ # 启动命令
+ # Linux 和 MacOS 系统
+ bash sbin/start-ainode.sh -i -r -n
+
+ # Windows 系统
+ sbin\start-ainode.bat -i -r -n
+ ```
+
+##### 参数介绍:
+
+| **名称** | **标签** | **描述** | **是否必填** | **类型** | **默认值** | **输入方式** |
+| ------------------- | ---- | ------------------------------------------------------------ | -------- | ------ | ---------------- | ---------------------- |
+| ain_interpreter_dir | -i | AINode 所安装在的虚拟环境的解释器路径,需要使用绝对路径 | 否 | String | 默认读取环境变量 | 调用时输入或持久化修改 |
+| ain_force_reinstall | -r | 该脚本在检查 AINode 安装情况的时候是否检查版本,如果检查则在版本不对的情况下会强制安装 lib 里的 whl 安装包 | 否 | Bool | false | 调用时输入 |
+
+> 注意:非联网环境下安装失败时,首先检查是否选择了平台对应的安装包,其次确认python版本为3.8(由于下载的安装包限制了python版本,3.7、3.9等其他都不行)
+
+#### 示例
+
+##### 直接启动:
+
+```shell
+ # 启动命令
+ # Linux 和 MacOS 系统
+ bash sbin/start-ainode.sh
+ # Windows 系统
+ sbin\start-ainode.bat
+
+
+ # 后台启动命令(长期运行推荐)
+ # Linux 和 MacOS 系统
+ nohup bash sbin/start-ainode.sh > myout.file 2>& 1 &
+ # Windows 系统
+ nohup bash sbin\start-ainode.bat > myout.file 2>& 1 &
+ ```
+
+### 检测 AINode 节点状态
+
+AINode 启动过程中会自动将新的 AINode 加入 IoTDB 集群。启动 AINode 后可以在 命令行中输入 SQL 来查询,集群中看到 AINode 节点,其运行状态为 Running(如下展示)表示加入成功。
+
+```shell
+IoTDB> show cluster
++------+----------+-------+---------------+------------+-------+-----------+
+|NodeID| NodeType| Status|InternalAddress|InternalPort|Version| BuildInfo|
++------+----------+-------+---------------+------------+-------+-----------+
+| 0|ConfigNode|Running| 127.0.0.1| 10710|UNKNOWN|190e303-dev|
+| 1| DataNode|Running| 127.0.0.1| 10730|UNKNOWN|190e303-dev|
+| 2| AINode|Running| 127.0.0.1| 10810|UNKNOWN|190e303-dev|
++------+----------+-------+---------------+------------+-------+-----------+
+```
+
+### 停止 AINode
+
+如果需要停止正在运行的 AINode 节点,则执行相应的关闭脚本。
+
+#### 停止命令
+
+```shell
+ # Linux / MacOS
+ bash sbin/stop-ainode.sh
+
+ #Windows
+ sbin\stop-ainode.bat
+ ```
+
+##### 详细语法
+
+```shell
+ # Linux / MacOS
+ bash sbin/stop-ainode.sh -t
+
+ #Windows
+ sbin\stop-ainode.bat -t
+ ```
+
+##### 参数介绍:
+
+ | **名称** | **标签** | **描述** | **是否必填** | **类型** | **默认值** | **输入方式** |
+| ----------------- | ---- | ------------------------------------------------------------ | -------- | ------ | ------ | ---------- |
+| ain_remove_target | -t | AINode 关闭时可以指定待移除的目标 AINode 的 Node ID、地址和端口号,格式为`` | 否 | String | 无 | 调用时输入 |
+
+#### 示例
+```shell
+ # Linux / MacOS
+ bash sbin/stop-ainode.sh
+
+ # Windows
+ sbin\stop-ainode.bat
+ ```
+停止 AINode 后,还可以在集群中看到 AINode 节点,其运行状态为 UNKNOWN(如下展示),此时无法使用 AINode 功能。
+
+ ```shell
+IoTDB> show cluster
++------+----------+-------+---------------+------------+-------+-----------+
+|NodeID| NodeType| Status|InternalAddress|InternalPort|Version| BuildInfo|
++------+----------+-------+---------------+------------+-------+-----------+
+| 0|ConfigNode|Running| 127.0.0.1| 10710|UNKNOWN|190e303-dev|
+| 1| DataNode|Running| 127.0.0.1| 10730|UNKNOWN|190e303-dev|
+| 2| AINode|UNKNOWN| 127.0.0.1| 10790|UNKNOWN|190e303-dev|
++------+----------+-------+---------------+------------+-------+-----------+
+```
+如果需要重新启动该节点,需重新执行启动脚本。
+
+### 移除 AINode
+
+当需要把一个 AINode 节点移出集群时,可以执行移除脚本。移除和停止脚本的差别是:停止是在集群中保留 AINode 节点但停止 AINode 服务,移除则是把 AINode 节点从集群中移除出去。
+
+
+ #### 移除命令
+
+```shell
+ # Linux / MacOS
+ bash sbin/remove-ainode.sh
+
+ # Windows
+ sbin\remove-ainode.bat
+ ```
+
+##### 详细语法
+
+```shell
+ # Linux / MacOS
+ bash sbin/remove-ainode.sh -i -t -r -n
+
+ # Windows
+ sbin\remove-ainode.bat -i -t -r -n
+ ```
+
+##### 参数介绍:
+
+ | **名称** | **标签** | **描述** | **是否必填** | **类型** | **默认值** | **输入方式** |
+| ------------------- | ---- | ------------------------------------------------------------ | -------- | ------ | ---------------- | --------------------- |
+| ain_interpreter_dir | -i | AINode 所安装在的虚拟环境的解释器路径,需要使用绝对路径 | 否 | String | 默认读取环境变量 | 调用时输入+持久化修改 |
+| ain_remove_target | -t | AINode 关闭时可以指定待移除的目标 AINode 的 Node ID、地址和端口号,格式为`` | 否 | String | 无 | 调用时输入 |
+| ain_force_reinstall | -r | 该脚本在检查 AINode 安装情况的时候是否检查版本,如果检查则在版本不对的情况下会强制安装 lib 里的 whl 安装包 | 否 | Bool | false | 调用时输入 |
+| ain_no_dependencies | -n | 指定在安装 AINode 的时候是否安装依赖,如果指定则仅安装 AINode 主程序而不安装依赖。 | 否 | Bool | false | 调用时输入 |
+
+ 如不想每次启动时指定对应参数,也可以在 `conf` 文件夹下的`ainode-env.sh` 和 `ainode-env.bat` 脚本中持久化修改参数(目前支持持久化修改 ain_interpreter_dir 参数)。
+
+ `ainode-env.sh` :
+ ```shell
+ # The defaulte venv environment is used if ain_interpreter_dir is not set. Please use absolute path without quotation mark
+ # ain_interpreter_dir=
+ ```
+ `ainode-env.bat` :
+```shell
+ @REM The defaulte venv environment is used if ain_interpreter_dir is not set. Please use absolute path without quotation mark
+ @REM set ain_interpreter_dir=
+ ```
+ 在写入参数值的后解除对应行的注释并保存即可在下一次执行脚本时生效。
+
+#### 示例
+
+##### 直接移除:
+
+ ```shell
+ # Linux / MacOS
+ bash sbin/remove-ainode.sh
+
+ # Windows
+ sbin\remove-ainode.bat
+ ```
+ 移除节点后,将无法查询到节点的相关信息。
+
+ ```shell
+IoTDB> show cluster
++------+----------+-------+---------------+------------+-------+-----------+
+|NodeID| NodeType| Status|InternalAddress|InternalPort|Version| BuildInfo|
++------+----------+-------+---------------+------------+-------+-----------+
+| 0|ConfigNode|Running| 127.0.0.1| 10710|UNKNOWN|190e303-dev|
+| 1| DataNode|Running| 127.0.0.1| 10730|UNKNOWN|190e303-dev|
++------+----------+-------+---------------+------------+-------+-----------+
+```
+##### 指定移除:
+
+如果用户丢失了 data 文件夹下的文件,可能 AINode 本地无法主动移除自己,需要用户指定节点号、地址和端口号进行移除,此时我们支持用户按照以下方法输入参数进行删除。
+
+ ```shell
+ # Linux / MacOS
+ bash sbin/remove-ainode.sh -t /:
+
+ # Windows
+ sbin\remove-ainode.bat -t /:
+ ```
+
+## 常见问题
+
+### 启动AINode时出现找不到venv模块的报错
+
+ 当使用默认方式启动 AINode 时,会在安装包目录下创建一个 python 虚拟环境并安装依赖,因此要求安装 venv 模块。通常来说 python3.8 及以上的版本会自带 venv,但对于一些系统自带的 python 环境可能并不满足这一要求。出现该报错时有两种解决方案(二选一):
+
+ 在本地安装 venv 模块,以 ubuntu 为例,可以通过运行以下命令来安装 python 自带的 venv 模块。或者从 python 官网安装一个自带 venv 的 python 版本。
+
+ ```shell
+apt-get install python3.8-venv
+```
+ 安装 3.8.0 版本的 venv 到 AINode 里面 在 AINode 路径下
+
+ ```shell
+../Python-3.8.0/python -m venv venv(文件夹名)
+```
+ 在运行启动脚本时通过 `-i` 指定已有的 python 解释器路径作为 AINode 的运行环境,这样就不再需要创建一个新的虚拟环境。
+
+ ### python中的SSL模块没有被正确安装和配置,无法处理HTTPS资源
+WARNING: pip is configured with locations that require TLS/SSL, however the ssl module in Python is not available.
+可以安装 OpenSSLS 后,再重新构建 python 来解决这个问题
+> Currently Python versions 3.6 to 3.9 are compatible with OpenSSL 1.0.2, 1.1.0, and 1.1.1.
+
+ Python 要求我们的系统上安装有 OpenSSL,具体安装方法可见[链接](https://stackoverflow.com/questions/56552390/how-to-fix-ssl-module-in-python-is-not-available-in-centos)
+
+ ```shell
+sudo apt-get install build-essential libssl-dev zlib1g-dev libbz2-dev libreadline-dev libsqlite3-dev wget curl llvm libncurses5-dev libncursesw5-dev xz-utils tk-dev libffi-dev liblzma-dev uuid-dev lzma-dev liblzma-dev
+sudo -E ./configure --with-ssl
+make
+sudo make install
+```
+
+ ### pip版本较低
+
+ windows下出现类似“error:Microsoft Visual C++ 14.0 or greater is required...”的编译问题
+
+ 出现对应的报错,通常是 c++版本或是 setuptools 版本不足,可以在
+
+ ```shell
+./python -m pip install --upgrade pip
+./python -m pip install --upgrade setuptools
+```
+
+
+ ### 安装编译python
+
+ 使用以下指定从官网下载安装包并解压:
+ ```shell
+.wget https://www.python.org/ftp/python/3.8.0/Python-3.8.0.tar.xz
+tar Jxf Python-3.8.0.tar.xz
+```
+ 编译安装对应的 python 包:
+ ```shell
+cd Python-3.8.0
+./configure prefix=/usr/local/python3
+make
+sudo make install
+python3 --version
+```
\ No newline at end of file
diff --git a/src/zh/UserGuide/V1.3.3/User-Manual/AINode_apache.md b/src/zh/UserGuide/V1.3.3/User-Manual/AINode_apache.md
new file mode 100644
index 000000000..2da3c824d
--- /dev/null
+++ b/src/zh/UserGuide/V1.3.3/User-Manual/AINode_apache.md
@@ -0,0 +1,650 @@
+
+
+# AI能力(AINode)
+
+AINode 是 IoTDB 在ConfigNode、DataNode后提供的第三种内生节点,该节点通过与 IoTDB 集群的 DataNode、ConfigNode 的交互,扩展了对时间序列进行机器学习分析的能力,支持从外部引入已有机器学习模型进行注册,并使用注册的模型在指定时序数据上通过简单 SQL 语句完成时序分析任务的过程,将模型的创建、管理及推理融合在数据库引擎中。目前已提供常见时序分析场景(例如预测与异常检测)的机器学习算法或自研模型。
+
+系统架构如下图所示:
+::: center
+
+:::
+三种节点的职责如下:
+
+- **ConfigNode**:负责保存和管理模型的元信息;负责分布式节点管理。
+- **DataNode**:负责接收并解析用户的 SQL请求;负责存储时间序列数据;负责数据的预处理计算。
+- **AINode**:负责模型文件的导入创建以及模型推理。
+
+## 优势特点
+
+与单独构建机器学习服务相比,具有以下优势:
+
+- **简单易用**:无需使用 Python 或 Java 编程,使用 SQL 语句即可完成机器学习模型管理与推理的完整流程。如创建模型可使用CREATE MODEL语句、使用模型进行推理可使用CALL INFERENCE(...)语句等,使用更加简单便捷。
+
+- **避免数据迁移**:使用 IoTDB 原生机器学习可以将存储在 IoTDB 中的数据直接应用于机器学习模型的推理,无需将数据移动到单独的机器学习服务平台,从而加速数据处理、提高安全性并降低成本。
+
+
+
+- **内置先进算法**:支持业内领先机器学习分析算法,覆盖典型时序分析任务,为时序数据库赋能原生数据分析能力。如:
+ - **时间序列预测(Time Series Forecasting)**:从过去时间序列中学习变化模式;从而根据给定过去时间的观测值,输出未来序列最可能的预测。
+ - **时序异常检测(Anomaly Detection for Time Series)**:在给定的时间序列数据中检测和识别异常值,帮助发现时间序列中的异常行为。
+ - **时间序列标注(Time Series Annotation)**:为每个数据点或特定时间段添加额外的信息或标记,例如事件发生、异常点、趋势变化等,以便更好地理解和分析数据。
+
+
+## 基本概念
+
+- **模型(Model)**:机器学习模型,以时序数据作为输入,输出分析任务的结果或决策。模型是AINode 的基本管理单元,支持模型的增(注册)、删、查、用(推理)。
+- **创建(Create)**: 将外部设计或训练好的模型文件或算法加载到MLNode中,由IoTDB统一管理与使用。
+- **推理(Inference)**:使用创建的模型在指定时序数据上完成该模型适用的时序分析任务的过程。
+- **内置能力(Built-in)**:AINode 自带常见时序分析场景(例如预测与异常检测)的机器学习算法或自研模型。
+
+::: center
+
+:::
+
+## 安装部署
+
+AINode 的部署可参考文档 [部署指导](../Deployment-and-Maintenance/AINode_Deployment_apache.md#ainode-部署) 章节。
+
+## 使用指导
+
+AINode 对时序数据相关的深度学习模型提供了模型创建及删除的流程,内置模型无需创建及删除,可直接使用,并且在完成推理后创建的内置模型实例将自动销毁。
+
+### 注册模型
+
+通过指定模型输入输出的向量维度,可以注册训练好的深度学习模型,从而用于模型推理。
+
+符合以下内容的模型可以注册到AINode中:
+ 1. AINode 支持的PyTorch 2.1.0、 2.2.0版本训练的模型,需避免使用2.2.0版本以上的特性。
+ 2. AINode支持使用PyTorch JIT存储的模型,模型文件需要包含模型的参数和结构。
+ 3. 模型输入序列可以包含一列或多列,若有多列,需要和模型能力、模型配置文件对应。
+ 4. 模型的输入输出维度必须在`config.yaml`配置文件中明确定义。使用模型时,必须严格按照`config.yaml`配置文件中定义的输入输出维度。如果输入输出列数不匹配配置文件,将会导致错误。
+
+下方为模型注册的SQL语法定义。
+
+```SQL
+create model using uri
+```
+
+SQL中参数的具体含义如下:
+
+- model_name:模型的全局唯一标识,不可重复。模型名称具备以下约束:
+
+ - 允许出现标识符 [ 0-9 a-z A-Z _ ] (字母,数字,下划线)
+ - 长度限制为2-64字符
+ - 大小写敏感
+
+- uri:模型注册文件的资源路径,路径下应包含**模型权重model.pt文件和模型的元数据描述文件config.yaml**
+
+ - 模型权重文件:深度学习模型训练完成后得到的权重文件,目前支持pytorch训练得到的.pt文件
+
+ - yaml元数据描述文件:模型注册时需要提供的与模型结构有关的参数,其中必须包含模型的输入输出维度用于模型推理:
+
+ - | **参数名** | **参数描述** | **示例** |
+ | ------------ | ---------------------------- | -------- |
+ | input_shape | 模型输入的行列,用于模型推理 | [96,2] |
+ | output_shape | 模型输出的行列,用于模型推理 | [48,2] |
+
+ - 除了模型推理外,还可以指定模型输入输出的数据类型:
+
+ - | **参数名** | **参数描述** | **示例** |
+ | ----------- | ------------------ | --------------------- |
+ | input_type | 模型输入的数据类型 | ['float32','float32'] |
+ | output_type | 模型输出的数据类型 | ['float32','float32'] |
+
+ - 除此之外,可以额外指定备注信息用于在模型管理时进行展示
+
+ - | **参数名** | **参数描述** | **示例** |
+ | ---------- | ---------------------------------------------- | ------------------------------------------- |
+ | attributes | 可选,用户自行设定的模型备注信息,用于模型展示 | 'model_type': 'dlinear','kernel_size': '25' |
+
+
+除了本地模型文件的注册,还可以通过URI来指定远程资源路径来进行注册,使用开源的模型仓库(例如HuggingFace)。
+
+#### 示例
+
+在当前的example文件夹下,包含model.pt和config.yaml文件,model.pt为训练得到,config.yaml的内容如下:
+
+```YAML
+configs:
+ # 必选项
+ input_shape: [96, 2] # 表示模型接收的数据为96行x2列
+ output_shape: [48, 2] # 表示模型输出的数据为48行x2列
+
+ # 可选项 默认为全部float32,列数为shape对应的列数
+ input_type: ["int64","int64"] #输入对应的数据类型,需要与输入列数匹配
+ output_type: ["text","int64"] #输出对应的数据类型,需要与输出列数匹配
+
+attributes: # 可选项 为用户自定义的备注信息
+ 'model_type': 'dlinear'
+ 'kernel_size': '25'
+```
+
+指定该文件夹作为加载路径就可以注册该模型
+
+```SQL
+IoTDB> create model dlinear_example using uri "file://./example"
+```
+
+也可以从huggingFace上下载对应的模型文件进行注册
+
+```SQL
+IoTDB> create model dlinear_example using uri "https://huggingface.com/IoTDBML/dlinear/"
+```
+
+SQL执行后会异步进行注册的流程,可以通过模型展示查看模型的注册状态(见模型展示章节),注册成功的耗时主要受到模型文件大小的影响。
+
+模型注册完成后,就可以通过使用正常查询的方式调用具体函数,进行模型推理。
+
+### 查看模型
+
+注册成功的模型可以通过show models指令查询模型的具体信息。其SQL定义如下:
+
+```SQL
+show models
+
+show models
+```
+
+除了直接展示所有模型的信息外,可以指定model id来查看某一具体模型的信息。模型展示的结果中包含如下信息:
+
+| **ModelId** | **State** | **Configs** | **Attributes** |
+| ------------ | ------------------------------------- | ---------------------------------------------- | -------------- |
+| 模型唯一标识 | 模型注册状态(LOADING,ACTIVE,DROPPING) | InputShape, outputShapeInputTypes, outputTypes | 模型备注信息 |
+
+其中,State用于展示当前模型注册的状态,包含以下三个阶段
+
+- **LOADING**:已经在configNode中添加对应的模型元信息,正将模型文件传输到AINode节点上
+- **ACTIVE:** 模型已经设置完成,模型处于可用状态
+- **DROPPING**:模型删除中,正在从configNode以及AINode处删除模型相关信息
+- **UNAVAILABLE**: 模型创建失败,可以通过drop model删除创建失败的model_name。
+
+#### 示例
+
+```SQL
+IoTDB> show models
+
+
++---------------------+--------------------------+-----------+----------------------------+-----------------------+
+| ModelId| ModelType| State| Configs| Notes|
++---------------------+--------------------------+-----------+----------------------------+-----------------------+
+| dlinear_example| USER_DEFINED| ACTIVE| inputShape:[96,2]| |
+| | | | outputShape:[48,2]| |
+| | | | inputDataType:[float,float]| |
+| | | |outputDataType:[float,float]| |
+| _STLForecaster| BUILT_IN_FORECAST| ACTIVE| |Built-in model in IoTDB|
+| _NaiveForecaster| BUILT_IN_FORECAST| ACTIVE| |Built-in model in IoTDB|
+| _ARIMA| BUILT_IN_FORECAST| ACTIVE| |Built-in model in IoTDB|
+|_ExponentialSmoothing| BUILT_IN_FORECAST| ACTIVE| |Built-in model in IoTDB|
+| _GaussianHMM|BUILT_IN_ANOMALY_DETECTION| ACTIVE| |Built-in model in IoTDB|
+| _GMMHMM|BUILT_IN_ANOMALY_DETECTION| ACTIVE| |Built-in model in IoTDB|
+| _Stray|BUILT_IN_ANOMALY_DETECTION| ACTIVE| |Built-in model in IoTDB|
++---------------------+--------------------------+-----------+------------------------------------------------------------+-----------------------+
+```
+
+我们前面已经注册了对应的模型,可以通过对应的指定查看模型状态,active表明模型注册成功,可用于推理。
+
+### 删除模型
+
+对于注册成功的模型,用户可以通过SQL进行删除。该操作除了删除configNode上的元信息外,还会删除所有AINode下的相关模型文件。其SQL如下:
+
+```SQL
+drop model
+```
+
+需要指定已经成功注册的模型model_name来删除对应的模型。由于模型删除涉及多个节点上的数据删除,操作不会立即完成,此时模型的状态为DROPPING,该状态的模型不能用于模型推理。
+
+### 使用内置模型推理
+
+SQL语法如下:
+
+
+```SQL
+call inference(,sql[,=])
+```
+
+内置模型推理无需注册流程,通过call关键字,调用inference函数就可以使用模型的推理功能,其对应的参数介绍如下:
+
+- **built_in_model_name:** 内置模型名称
+- **parameterName**:参数名
+- **parameterValue**:参数值
+
+#### 内置模型及参数说明
+
+目前已内置如下机器学习模型,具体参数说明请参考以下链接。
+
+| 模型 | built_in_model_name | 任务类型 | 参数说明 |
+| -------------------- | --------------------- | -------- | ------------------------------------------------------------ |
+| Arima | _Arima | 预测 | [Arima参数说明](https://www.sktime.net/en/latest/api_reference/auto_generated/sktime.forecasting.arima.ARIMA.html?highlight=Arima) |
+| STLForecaster | _STLForecaster | 预测 | [STLForecaster参数说明](https://www.sktime.net/en/latest/api_reference/auto_generated/sktime.forecasting.trend.STLForecaster.html#sktime.forecasting.trend.STLForecaster) |
+| NaiveForecaster | _NaiveForecaster | 预测 | [NaiveForecaster参数说明](https://www.sktime.net/en/latest/api_reference/auto_generated/sktime.forecasting.naive.NaiveForecaster.html#naiveforecaster) |
+| ExponentialSmoothing | _ExponentialSmoothing | 预测 | [ExponentialSmoothing参数说明](https://www.sktime.net/en/latest/api_reference/auto_generated/sktime.forecasting.exp_smoothing.ExponentialSmoothing.html) |
+| GaussianHMM | _GaussianHMM | 标注 | [GaussianHMM参数说明](https://www.sktime.net/en/latest/api_reference/auto_generated/sktime.annotation.hmm_learn.gaussian.GaussianHMM.html) |
+| GMMHMM | _GMMHMM | 标注 | [GMMHMM参数说明](https://www.sktime.net/en/latest/api_reference/auto_generated/sktime.annotation.hmm_learn.gmm.GMMHMM.html) |
+| Stray | _Stray | 异常检测 | [Stray参数说明](https://www.sktime.net/en/latest/api_reference/auto_generated/sktime.annotation.stray.STRAY.html) |
+
+#### 示例
+
+下面是使用内置模型推理的一个操作示例,使用内置的Stray模型进行异常检测算法,输入为`[144,1]`,输出为`[144,1]`,我们通过SQL使用其进行推理。
+
+```SQL
+IoTDB> select * from root.eg.airline
++-----------------------------+------------------+
+| Time|root.eg.airline.s0|
++-----------------------------+------------------+
+|1949-01-31T00:00:00.000+08:00| 224.0|
+|1949-02-28T00:00:00.000+08:00| 118.0|
+|1949-03-31T00:00:00.000+08:00| 132.0|
+|1949-04-30T00:00:00.000+08:00| 129.0|
+......
+|1960-09-30T00:00:00.000+08:00| 508.0|
+|1960-10-31T00:00:00.000+08:00| 461.0|
+|1960-11-30T00:00:00.000+08:00| 390.0|
+|1960-12-31T00:00:00.000+08:00| 432.0|
++-----------------------------+------------------+
+Total line number = 144
+
+IoTDB> call inference(_Stray, "select s0 from root.eg.airline", k=2)
++-------+
+|output0|
++-------+
+| 0|
+| 0|
+| 0|
+| 0|
+......
+| 1|
+| 1|
+| 0|
+| 0|
+| 0|
+| 0|
++-------+
+Total line number = 144
+```
+
+### 使用深度学习模型推理
+
+SQL语法如下:
+
+```SQL
+call inference(,sql[,window=])
+
+
+window_function:
+ head(window_size)
+ tail(window_size)
+ count(window_size,sliding_step)
+```
+
+在完成模型的注册后,通过call关键字,调用inference函数就可以使用模型的推理功能,其对应的参数介绍如下:
+
+- **model_name**: 对应一个已经注册的模型
+- **sql**:sql查询语句,查询的结果作为模型的输入进行模型推理。查询的结果中行列的维度需要与具体模型config中指定的大小相匹配。(这里的sql不建议使用`SELECT *`子句,因为在IoTDB中,`*`并不会对列进行排序,因此列的顺序是未定义的,可以使用`SELECT s0,s1`的方式确保列的顺序符合模型输入的预期)
+- **window_function**: 推理过程中可以使用的窗口函数,目前提供三种类型的窗口函数用于辅助模型推理:
+ - **head(window_size)**: 获取数据中最前的window_size个点用于模型推理,该窗口可用于数据裁剪
+ 
+
+ - **tail(window_size)**:获取数据中最后的window_size个点用于模型推,该窗口可用于数据裁剪
+ 
+
+ - **count(window_size, sliding_step)**:基于点数的滑动窗口,每个窗口的数据会分别通过模型进行推理,如下图示例所示,window_size为2的窗口函数将输入数据集分为三个窗口,每个窗口分别进行推理运算生成结果。该窗口可用于连续推理
+ 
+
+**说明1: window可以用来解决sql查询结果和模型的输入行数要求不一致时的问题,对行进行裁剪。需要注意的是,当列数不匹配或是行数直接少于模型需求时,推理无法进行,会返回错误信息。**
+
+**说明2: 在深度学习应用中,经常将时间戳衍生特征(数据中的时间列)作为生成式任务的协变量,一同输入到模型中以提升模型的效果,但是在模型的输出结果中一般不包含时间列。为了保证实现的通用性,模型推理结果只对应模型的真实输出,如果模型不输出时间列,则结果中不会包含。**
+
+
+#### 示例
+
+下面是使用深度学习模型推理的一个操作示例,针对上面提到的输入为`[96,2]`,输出为`[48,2]`的`dlinear`预测模型,我们通过SQL使用其进行推理。
+
+```Shell
+IoTDB> select s1,s2 from root.**
++-----------------------------+-------------------+-------------------+
+| Time| root.eg.etth.s0| root.eg.etth.s1|
++-----------------------------+-------------------+-------------------+
+|1990-01-01T00:00:00.000+08:00| 0.7855| 1.611|
+|1990-01-02T00:00:00.000+08:00| 0.7818| 1.61|
+|1990-01-03T00:00:00.000+08:00| 0.7867| 1.6293|
+|1990-01-04T00:00:00.000+08:00| 0.786| 1.637|
+|1990-01-05T00:00:00.000+08:00| 0.7849| 1.653|
+|1990-01-06T00:00:00.000+08:00| 0.7866| 1.6537|
+|1990-01-07T00:00:00.000+08:00| 0.7886| 1.662|
+......
+|1990-03-31T00:00:00.000+08:00| 0.7585| 1.678|
+|1990-04-01T00:00:00.000+08:00| 0.7587| 1.6763|
+|1990-04-02T00:00:00.000+08:00| 0.76| 1.6813|
+|1990-04-03T00:00:00.000+08:00| 0.7669| 1.684|
+|1990-04-04T00:00:00.000+08:00| 0.7645| 1.677|
+|1990-04-05T00:00:00.000+08:00| 0.7625| 1.68|
+|1990-04-06T00:00:00.000+08:00| 0.7617| 1.6917|
++-----------------------------+-------------------+-------------------+
+Total line number = 96
+
+IoTDB> call inference(dlinear_example,"select s0,s1 from root.**")
++--------------------------------------------+-----------------------------+
+| _result_0| _result_1|
++--------------------------------------------+-----------------------------+
+| 0.726302981376648| 1.6549958229064941|
+| 0.7354921698570251| 1.6482787370681763|
+| 0.7238251566886902| 1.6278168201446533|
+......
+| 0.7692174911499023| 1.654654049873352|
+| 0.7685555815696716| 1.6625318765640259|
+| 0.7856493592262268| 1.6508299350738525|
++--------------------------------------------+-----------------------------+
+Total line number = 48
+```
+
+#### 使用tail/head窗口函数的示例
+
+当数据量不定且想要取96行最新数据用于推理时,可以使用对应的窗口函数tail。head函数的用法与其类似,不同点在于其取的是最早的96个点。
+
+```Shell
+IoTDB> select s1,s2 from root.**
++-----------------------------+-------------------+-------------------+
+| Time| root.eg.etth.s0| root.eg.etth.s1|
++-----------------------------+-------------------+-------------------+
+|1988-01-01T00:00:00.000+08:00| 0.7355| 1.211|
+......
+|1990-01-01T00:00:00.000+08:00| 0.7855| 1.611|
+|1990-01-02T00:00:00.000+08:00| 0.7818| 1.61|
+|1990-01-03T00:00:00.000+08:00| 0.7867| 1.6293|
+|1990-01-04T00:00:00.000+08:00| 0.786| 1.637|
+|1990-01-05T00:00:00.000+08:00| 0.7849| 1.653|
+|1990-01-06T00:00:00.000+08:00| 0.7866| 1.6537|
+|1990-01-07T00:00:00.000+08:00| 0.7886| 1.662|
+......
+|1990-03-31T00:00:00.000+08:00| 0.7585| 1.678|
+|1990-04-01T00:00:00.000+08:00| 0.7587| 1.6763|
+|1990-04-02T00:00:00.000+08:00| 0.76| 1.6813|
+|1990-04-03T00:00:00.000+08:00| 0.7669| 1.684|
+|1990-04-04T00:00:00.000+08:00| 0.7645| 1.677|
+|1990-04-05T00:00:00.000+08:00| 0.7625| 1.68|
+|1990-04-06T00:00:00.000+08:00| 0.7617| 1.6917|
++-----------------------------+-------------------+-------------------+
+Total line number = 996
+
+IoTDB> call inference(dlinear_example,"select s0,s1 from root.**",window=tail(96))
++--------------------------------------------+-----------------------------+
+| _result_0| _result_1|
++--------------------------------------------+-----------------------------+
+| 0.726302981376648| 1.6549958229064941|
+| 0.7354921698570251| 1.6482787370681763|
+| 0.7238251566886902| 1.6278168201446533|
+......
+| 0.7692174911499023| 1.654654049873352|
+| 0.7685555815696716| 1.6625318765640259|
+| 0.7856493592262268| 1.6508299350738525|
++--------------------------------------------+-----------------------------+
+Total line number = 48
+```
+
+#### 使用count窗口函数的示例
+
+该窗口主要用于计算式任务,当任务对应的模型一次只能处理固定行数据而最终想要的确实多组预测结果时,使用该窗口函数可以使用点数滑动窗口进行连续推理。假设我们现在有一个异常检测模型anomaly_example(input: [24,2], output[1,1]),对每24行数据会生成一个0/1的标签,其使用示例如下:
+
+```Shell
+IoTDB> select s1,s2 from root.**
++-----------------------------+-------------------+-------------------+
+| Time| root.eg.etth.s0| root.eg.etth.s1|
++-----------------------------+-------------------+-------------------+
+|1990-01-01T00:00:00.000+08:00| 0.7855| 1.611|
+|1990-01-02T00:00:00.000+08:00| 0.7818| 1.61|
+|1990-01-03T00:00:00.000+08:00| 0.7867| 1.6293|
+|1990-01-04T00:00:00.000+08:00| 0.786| 1.637|
+|1990-01-05T00:00:00.000+08:00| 0.7849| 1.653|
+|1990-01-06T00:00:00.000+08:00| 0.7866| 1.6537|
+|1990-01-07T00:00:00.000+08:00| 0.7886| 1.662|
+......
+|1990-03-31T00:00:00.000+08:00| 0.7585| 1.678|
+|1990-04-01T00:00:00.000+08:00| 0.7587| 1.6763|
+|1990-04-02T00:00:00.000+08:00| 0.76| 1.6813|
+|1990-04-03T00:00:00.000+08:00| 0.7669| 1.684|
+|1990-04-04T00:00:00.000+08:00| 0.7645| 1.677|
+|1990-04-05T00:00:00.000+08:00| 0.7625| 1.68|
+|1990-04-06T00:00:00.000+08:00| 0.7617| 1.6917|
++-----------------------------+-------------------+-------------------+
+Total line number = 96
+
+IoTDB> call inference(anomaly_example,"select s0,s1 from root.**",window=count(24,24))
++-------------------------+
+| _result_0|
++-------------------------+
+| 0|
+| 1|
+| 1|
+| 0|
++-------------------------+
+Total line number = 4
+```
+
+其中结果集中每行的标签对应每24行数据为一组,输入该异常检测模型后的输出。
+
+## 权限管理
+
+使用AINode相关的功能时,可以使用IoTDB本身的鉴权去做一个权限管理,用户只有在具备 USE_MODEL 权限时,才可以使用模型管理的相关功能。当使用推理功能时,用户需要有访问输入模型的SQL对应的源序列的权限。
+
+| 权限名称 | 权限范围 | 管理员用户(默认ROOT) | 普通用户 | 路径相关 |
+| --------- | --------------------------------- | ---------------------- | -------- | -------- |
+| USE_MODEL | create model / show models / drop model | √ | √ | x |
+| READ_DATA | call inference | √ | √ | √ |
+
+## 实际案例
+
+### 电力负载预测
+
+在部分工业场景下,会存在预测电力负载的需求,预测结果可用于优化电力供应、节约能源和资源、支持规划和扩展以及增强电力系统的可靠性。
+
+我们所使用的 ETTh1 的测试集的数据为[ETTh1](https://alioss.timecho.com/docs/img/ETTh1.csv)。
+
+
+包含间隔1h采集一次的电力数据,每条数据由负载和油温构成,分别为:High UseFul Load, High UseLess Load, Middle UseLess Load, Low UseFul Load, Low UseLess Load, Oil Temperature。
+
+在该数据集上,IoTDB-ML的模型推理功能可以通过以往高中低三种负载的数值和对应时间戳油温的关系,预测未来一段时间内的油温,赋能电网变压器的自动调控和监视。
+
+#### 步骤一:数据导入
+
+用户可以使用tools文件夹中的`import-csv.sh` 向 IoTDB 中导入 ETT 数据集
+
+```Bash
+bash ./import-csv.sh -h 127.0.0.1 -p 6667 -u root -pw root -f ../../ETTh1.csv
+```
+
+#### 步骤二:模型导入
+
+我们可以在iotdb-cli 中输入以下SQL从 huggingface 上拉取一个已经训练好的模型进行注册,用于后续的推理。
+
+```SQL
+create model dlinear using uri 'https://huggingface.co/hvlgo/dlinear/tree/main'
+```
+
+该模型基于较为轻量化的深度模型DLinear训练而得,能够以相对快的推理速度尽可能多地捕捉到序列内部的变化趋势和变量间的数据变化关系,相较于其他更深的模型更适用于快速实时预测。
+
+#### 步骤三:模型推理
+
+```Shell
+IoTDB> select s0,s1,s2,s3,s4,s5,s6 from root.eg.etth LIMIT 96
++-----------------------------+---------------+---------------+---------------+---------------+---------------+---------------+---------------+
+| Time|root.eg.etth.s0|root.eg.etth.s1|root.eg.etth.s2|root.eg.etth.s3|root.eg.etth.s4|root.eg.etth.s5|root.eg.etth.s6|
++-----------------------------+---------------+---------------+---------------+---------------+---------------+---------------+---------------+
+|2017-10-20T00:00:00.000+08:00| 10.449| 3.885| 8.706| 2.025| 2.041| 0.944| 8.864|
+|2017-10-20T01:00:00.000+08:00| 11.119| 3.952| 8.813| 2.31| 2.071| 1.005| 8.442|
+|2017-10-20T02:00:00.000+08:00| 9.511| 2.88| 7.533| 1.564| 1.949| 0.883| 8.16|
+|2017-10-20T03:00:00.000+08:00| 9.645| 2.21| 7.249| 1.066| 1.828| 0.914| 7.949|
+......
+|2017-10-23T20:00:00.000+08:00| 8.105| 0.938| 4.371| -0.569| 3.533| 1.279| 9.708|
+|2017-10-23T21:00:00.000+08:00| 7.167| 1.206| 4.087| -0.462| 3.107| 1.432| 8.723|
+|2017-10-23T22:00:00.000+08:00| 7.1| 1.34| 4.015| -0.32| 2.772| 1.31| 8.864|
+|2017-10-23T23:00:00.000+08:00| 9.176| 2.746| 7.107| 1.635| 2.65| 1.097| 9.004|
++-----------------------------+---------------+---------------+---------------+---------------+---------------+---------------+---------------+
+Total line number = 96
+
+IoTDB> call inference(dlinear_example, "select s0,s1,s2,s3,s4,s5,s6 from root.eg.etth", window=head(96))
++-----------+----------+----------+------------+---------+----------+----------+
+| output0| output1| output2| output3| output4| output5| output6|
++-----------+----------+----------+------------+---------+----------+----------+
+| 10.319546| 3.1450553| 7.877341| 1.5723765|2.7303758| 1.1362307| 8.867775|
+| 10.443649| 3.3286757| 7.8593454| 1.7675098| 2.560634| 1.1177158| 8.920919|
+| 10.883752| 3.2341104| 8.47036| 1.6116762|2.4874182| 1.1760603| 8.798939|
+......
+| 8.0115595| 1.2995274| 6.9900327|-0.098746896| 3.04923| 1.176214| 9.548782|
+| 8.612427| 2.5036244| 5.6790237| 0.66474205|2.8870275| 1.2051733| 9.330128|
+| 10.096699| 3.399722| 6.9909| 1.7478468|2.7642853| 1.1119363| 9.541455|
++-----------+----------+----------+------------+---------+----------+----------+
+Total line number = 48
+```
+
+我们将对油温的预测的结果和真实结果进行对比,可以得到以下的图像。
+
+图中10/24 00:00之前的数据为输入模型的过去数据,10/24 00:00后的蓝色线条为模型给出的油温预测结果,而红色为数据集中实际的油温数据(用于进行对比)。
+
+
+
+可以看到,我们使用了过去96个小时(4天)的六个负载信息和对应时间油温的关系,基于之前学习到的序列间相互关系对未来48个小时(2天)的油温这一数据的可能变化进行了建模,可以看到可视化后预测曲线与实际结果在趋势上保持了较高程度的一致性。
+
+### 功率预测
+
+变电站需要对电流、电压、功率等数据进行电力监控,用于检测潜在的电网问题、识别电力系统中的故障、有效管理电网负载以及分析电力系统的性能和趋势等。
+
+我们利用某变电站中的电流、电压和功率等数据构成了真实场景下的数据集。该数据集包括变电站近四个月时间跨度,每5 - 6s 采集一次的 A相电压、B相电压、C相电压等数据。
+
+测试集数据内容为[data](https://alioss.timecho.com/docs/img/data.csv)。
+
+在该数据集上,IoTDB-ML的模型推理功能可以通过以往A相电压,B相电压和C相电压的数值和对应时间戳,预测未来一段时间内的C相电压,赋能变电站的监视管理。
+
+#### 步骤一:数据导入
+
+用户可以使用tools文件夹中的`import-csv.sh` 导入数据集
+
+```Bash
+bash ./import-csv.sh -h 127.0.0.1 -p 6667 -u root -pw root -f ../../data.csv
+```
+
+#### 步骤二:模型导入
+
+我们可以在iotdb-cli 中选择内置模型或已经注册好的模型用于后续的推理。
+
+我们采用内置模型STLForecaster进行预测,STLForecaster 是一个基于 statsmodels 库中 STL 实现的时间序列预测方法。
+
+#### 步骤三:模型推理
+
+```Shell
+IoTDB> select * from root.eg.voltage limit 96
++-----------------------------+------------------+------------------+------------------+
+| Time|root.eg.voltage.s0|root.eg.voltage.s1|root.eg.voltage.s2|
++-----------------------------+------------------+------------------+------------------+
+|2023-02-14T20:38:32.000+08:00| 2038.0| 2028.0| 2041.0|
+|2023-02-14T20:38:38.000+08:00| 2014.0| 2005.0| 2018.0|
+|2023-02-14T20:38:44.000+08:00| 2014.0| 2005.0| 2018.0|
+......
+|2023-02-14T20:47:52.000+08:00| 2024.0| 2016.0| 2027.0|
+|2023-02-14T20:47:57.000+08:00| 2024.0| 2016.0| 2027.0|
+|2023-02-14T20:48:03.000+08:00| 2024.0| 2016.0| 2027.0|
++-----------------------------+------------------+------------------+------------------+
+Total line number = 96
+
+IoTDB> call inference(_STLForecaster, "select s0,s1,s2 from root.eg.voltage", window=head(96),predict_length=48)
++---------+---------+---------+
+| output0| output1| output2|
++---------+---------+---------+
+|2026.3601|2018.2953|2029.4257|
+|2019.1538|2011.4361|2022.0888|
+|2025.5074|2017.4522|2028.5199|
+......
+
+|2022.2336|2015.0290|2025.1023|
+|2015.7241|2008.8975|2018.5085|
+|2022.0777|2014.9136|2024.9396|
+|2015.5682|2008.7821|2018.3458|
++---------+---------+---------+
+Total line number = 48
+```
+我们将对C相电压的预测的结果和真实结果进行对比,可以得到以下的图像。
+
+图中 02/14 20:48 之前的数据为输入模型的过去数据, 02/14 20:48 后的蓝色线条为模型给出的C相电压预测结果,而红色为数据集中实际的C相电压数据(用于进行对比)。
+
+
+
+可以看到,我们使用了过去10分钟的电压的数据,基于之前学习到的序列间相互关系对未来5分钟的C相电压这一数据的可能变化进行了建模,可以看到可视化后预测曲线与实际结果在趋势上保持了一定的同步性。
+
+### 异常检测
+
+在民航交通运输业,存在着对乘机旅客数量进行异常检测的需求。异常检测的结果可用于指导调整航班的调度,以使得企业获得更大效益。
+
+Airline Passengers一个时间序列数据集,该数据集记录了1949年至1960年期间国际航空乘客数量,间隔一个月进行一次采样。该数据集共含一条时间序列。数据集为[airline](https://alioss.timecho.com/docs/img/airline.csv)。
+在该数据集上,IoTDB-ML的模型推理功能可以通过捕捉序列的变化规律以对序列时间点进行异常检测,赋能交通运输业。
+
+#### 步骤一:数据导入
+
+用户可以使用tools文件夹中的`import-csv.sh` 导入数据集
+
+```Bash
+bash ./import-csv.sh -h 127.0.0.1 -p 6667 -u root -pw root -f ../../data.csv
+```
+
+#### 步骤二:模型推理
+
+IoTDB内置有部分可以直接使用的机器学习算法,使用其中的异常检测算法进行预测的样例如下:
+
+```Shell
+IoTDB> select * from root.eg.airline
++-----------------------------+------------------+
+| Time|root.eg.airline.s0|
++-----------------------------+------------------+
+|1949-01-31T00:00:00.000+08:00| 224.0|
+|1949-02-28T00:00:00.000+08:00| 118.0|
+|1949-03-31T00:00:00.000+08:00| 132.0|
+|1949-04-30T00:00:00.000+08:00| 129.0|
+......
+|1960-09-30T00:00:00.000+08:00| 508.0|
+|1960-10-31T00:00:00.000+08:00| 461.0|
+|1960-11-30T00:00:00.000+08:00| 390.0|
+|1960-12-31T00:00:00.000+08:00| 432.0|
++-----------------------------+------------------+
+Total line number = 144
+
+IoTDB> call inference(_Stray, "select s0 from root.eg.airline", k=2)
++-------+
+|output0|
++-------+
+| 0|
+| 0|
+| 0|
+| 0|
+......
+| 1|
+| 1|
+| 0|
+| 0|
+| 0|
+| 0|
++-------+
+Total line number = 144
+```
+
+我们将检测为异常的结果进行绘制,可以得到以下图像。其中蓝色曲线为原时间序列,用红色点特殊标注的时间点为算法检测为异常的时间点。
+
+
+
+可以看到,Stray模型对输入序列变化进行了建模,成功检测出出现异常的时间点。
\ No newline at end of file
diff --git a/src/zh/UserGuide/latest/Deployment-and-Maintenance/AINode_Deployment_apache.md b/src/zh/UserGuide/latest/Deployment-and-Maintenance/AINode_Deployment_apache.md
new file mode 100644
index 000000000..4c4710469
--- /dev/null
+++ b/src/zh/UserGuide/latest/Deployment-and-Maintenance/AINode_Deployment_apache.md
@@ -0,0 +1,512 @@
+
+# AINode 部署
+
+## AINode介绍
+
+### 能力介绍
+
+AINode 是 IoTDB 在 ConfigNode、DataNode 后提供的第三种内生节点,该节点通过与 IoTDB 集群的 DataNode、ConfigNode 的交互,扩展了对时间序列进行机器学习分析的能力,支持从外部引入已有机器学习模型进行注册,并使用注册的模型在指定时序数据上通过简单 SQL 语句完成时序分析任务的过程,将模型的创建、管理及推理融合在数据库引擎中。目前已提供常见时序分析场景(例如预测与异常检测)的机器学习算法或自研模型。
+
+### 交付方式
+ 是 IoTDB 集群外的额外套件,独立安装包,独立激活(如需试用或使用,请联系天谋科技商务或技术支持)。
+
+### 部署模式
+
+

+

+
+
+## 安装准备
+
+### 安装包获取
+
+ 用户可以下载AINode的软件安装包,下载并解压后即完成AINode的安装。
+
+ 解压后安装包(`iotdb-enterprise-ainode-.zip`),安装包解压后目录结构如下:
+| **目录** | **类型** | **说明** |
+| ------------ | -------- | ------------------------------------------------ |
+| lib | 文件夹 | AINode编译后的二进制可执行文件以及相关的代码依赖 |
+| sbin | 文件夹 | AINode的运行脚本,可以启动,移除和停止AINode |
+| conf | 文件夹 | 包含AINode的配置项,具体包含以下配置项 |
+| LICENSE | 文件 | 证书 |
+| NOTICE | 文件 | 提示 |
+| README_ZH.md | 文件 | markdown格式的中文版说明 |
+| `README.md` | 文件 | 使用说明 |
+
+### 环境准备
+- 建议操作环境: Ubuntu, CentOS, MacOS
+
+- 运行环境
+ - 联网环境下 Python >= 3.8即可,且带有 pip 和 venv 工具;非联网环境下需要使用 Python 3.8版本,并从 [此处](https://cloud.tsinghua.edu.cn/d/4c1342f6c272439aa96c/?p=%2Flibs&mode=list) 下载对应操作系统的zip压缩包(注意下载依赖需选择libs文件夹中的zip压缩包,如下图),并将文件夹下的所有文件拷贝到 `iotdb-enterprise-ainode-` 文件夹中 `lib` 文件夹下,并按下文步骤启动AINode。
+
+
+
+ - 环境变量中需存在 Python 解释器且可以通过 `python` 指令直接调用
+ - 建议在 `iotdb-enterprise-ainode-` 文件夹下,新建 Python 解释器 venv 虚拟环境。如安装 3.8.0 版本虚拟环境,语句如下:
+
+ ```shell
+ # 安装3.8.0版本的venv,创建虚拟环境,文件夹名为 `venv`
+ ../Python-3.8.0/python -m venv `venv`
+ ```
+## 安装部署及使用
+
+### 安装 AINode
+
+1. 检查Linux的内核架构
+```shell
+ uname -m
+ ```
+
+2. 导入Python环境[下载](https://repo.anaconda.com/miniconda/)
+
+推荐下载py311版本应用,导入至用户根目录下 iotdb专用文件夹 中
+
+3. 切换至iotdb专用文件夹安装Python环境
+
+以 Miniconda3-py311_24.5.0-0-Linux-x86_64 为例:
+
+```shell
+ bash ./Miniconda3-py311_24.5.0-0-Linux-x86_64.sh
+ ```
+> 根据提示键入“回车”、“长按空格”、“回车”、“yes”、“yes”
+> 关闭当前SSH窗口重新连接
+
+ 4. 创建专用环境
+
+```shell
+ conda create -n ainode_py python=3.11.9
+ ```
+
+ 根据提示键入“y”
+
+ 5. 激活专用环境
+
+```shell
+ conda activate ainode_py
+ ```
+
+ 6. 验证Python版本
+
+```shell
+ python --version
+ ```
+ 7. 下载导入AINode到专用文件夹,切换到专用文件夹并解压安装包
+
+```shell
+ unzip iotdb-enterprise-ainode-1.3.3.2.zip
+ ```
+
+ 8. 配置项修改
+
+```shell
+ vi iotdb-enterprise-ainode-1.3.3.2/conf/iotdb-ainode.properties
+ ```
+ 配置项修改:[详细信息](#配置项修改)
+> ain_seed_config_node=iotdb-1:10710(集群通讯节点IP:通讯节点端口)
+> ain_inference_rpc_address=iotdb-3(运行AINode的服务器IP)
+
+ 9. 更换Python源
+
+```shell
+ pip config set global.index-url https://mirrors.aliyun.com/pypi/simple/
+ ```
+
+ 10. 启动AINode节点
+
+```shell
+ nohup bash iotdb-enterprise-ainode-1.3.3.2/sbin/start-ainode.sh > myout.file 2>& 1 &
+ ```
+> 回到系统默认环境:conda deactivate
+
+### 配置项修改
+AINode 支持修改一些必要的参数。可以在 `conf/iotdb-ainode.properties` 文件中找到下列参数并进行持久化的修改:
+
+| **名称** | **描述** | **类型** | **默认值** | **改后生效方式** |
+| :----------------------------- | ------------------------------------------------------------ | ------- | ------------------ | ---------------------------- |
+| cluster_name | AINode 要加入集群的标识 | string | defaultCluster | 仅允许在第一次启动服务前修改 |
+| ain_seed_config_node | AINode 启动时注册的 ConfigNode 地址 | String | 127.0.0.1:10710 | 仅允许在第一次启动服务前修改 |
+| ain_inference_rpc_address | AINode 提供服务与通信的地址 ,内部服务通讯接口 | String | 127.0.0.1 | 仅允许在第一次启动服务前修改 |
+| ain_inference_rpc_port | AINode 提供服务与通信的端口 | String | 10810 | 仅允许在第一次启动服务前修改 |
+| ain_system_dir | AINode 元数据存储路径,相对路径的起始目录与操作系统相关,建议使用绝对路径 | String | data/AINode/system | 仅允许在第一次启动服务前修改 |
+| ain_models_dir | AINode 存储模型文件的路径,相对路径的起始目录与操作系统相关,建议使用绝对路径 | String | data/AINode/models | 仅允许在第一次启动服务前修改 |
+| ain_logs_dir | AINode 存储日志的路径,相对路径的起始目录与操作系统相关,建议使用绝对路径 | String | logs/AINode | 重启后生效 |
+| ain_thrift_compression_enabled | AINode 是否启用 thrift 的压缩机制,0-不启动、1-启动 | Boolean | 0 | 重启后生效 |
+### 启动 AINode
+
+ 在完成 Seed-ConfigNode 的部署后,可以通过添加 AINode 节点来支持模型的注册和推理功能。在配置项中指定 IoTDB 集群的信息后,可以执行相应的指令来启动 AINode,加入 IoTDB 集群。
+
+#### 联网环境启动
+
+##### 启动命令
+
+```shell
+ # 启动命令
+ # Linux 和 MacOS 系统
+ bash sbin/start-ainode.sh
+
+ # Windows 系统
+ sbin\start-ainode.bat
+
+ # 后台启动命令(长期运行推荐)
+ # Linux 和 MacOS 系统
+ nohup bash sbin/start-ainode.sh > myout.file 2>& 1 &
+
+ # Windows 系统
+ nohup bash sbin\start-ainode.bat > myout.file 2>& 1 &
+ ```
+
+##### 详细语法
+
+```shell
+ # 启动命令
+ # Linux 和 MacOS 系统
+ bash sbin/start-ainode.sh -i -r -n
+
+ # Windows 系统
+ sbin\start-ainode.bat -i -r -n
+ ```
+
+##### 参数介绍:
+
+| **名称** | **标签** | **描述** | **是否必填** | **类型** | **默认值** | **输入方式** |
+| ------------------- | ---- | ------------------------------------------------------------ | -------- | ------ | ---------------- | ---------------------- |
+| ain_interpreter_dir | -i | AINode 所安装在的虚拟环境的解释器路径,需要使用绝对路径 | 否 | String | 默认读取环境变量 | 调用时输入或持久化修改 |
+| ain_force_reinstall | -r | 该脚本在检查 AINode 安装情况的时候是否检查版本,如果检查则在版本不对的情况下会强制安装 lib 里的 whl 安装包 | 否 | Bool | false | 调用时输入 |
+| ain_no_dependencies | -n | 指定在安装 AINode 的时候是否安装依赖,如果指定则仅安装 AINode 主程序而不安装依赖。 | 否 | Bool | false | 调用时输入 |
+
+ 如不想每次启动时指定对应参数,也可以在 `conf` 文件夹下的`ainode-env.sh` 和 `ainode-env.bat` 脚本中持久化修改参数(目前支持持久化修改 ain_interpreter_dir 参数)。
+
+ `ainode-env.sh` :
+ ```shell
+ # The defaulte venv environment is used if ain_interpreter_dir is not set. Please use absolute path without quotation mark
+ # ain_interpreter_dir=
+ ```
+ `ainode-env.bat` :
+```shell
+ @REM The defaulte venv environment is used if ain_interpreter_dir is not set. Please use absolute path without quotation mark
+ @REM set ain_interpreter_dir=
+ ```
+ 在写入参数值的后解除对应行的注释并保存即可在下一次执行脚本时生效。
+
+#### 示例
+
+##### 直接启动:
+
+```shell
+ # 启动命令
+ # Linux 和 MacOS 系统
+ bash sbin/start-ainode.sh
+ # Windows 系统
+ sbin\start-ainode.bat
+
+
+ # 后台启动命令(长期运行推荐)
+ # Linux 和 MacOS 系统
+ nohup bash sbin/start-ainode.sh > myout.file 2>& 1 &
+ # Windows 系统
+ nohup bash sbin\start-ainode.bat > myout.file 2>& 1 &
+ ```
+
+##### 更新启动:
+如果 AINode 的版本进行了更新(如更新了 `lib` 文件夹),可使用此命令。首先要保证 AINode 已经停止运行,然后通过 `-r` 参数重启,该参数会根据 `lib` 下的文件重新安装 AINode。
+
+```shell
+ # 更新启动命令
+ # Linux 和 MacOS 系统
+ bash sbin/start-ainode.sh -r
+ # Windows 系统
+ sbin\start-ainode.bat -r
+
+
+ # 后台更新启动命令(长期运行推荐)
+ # Linux 和 MacOS 系统
+ nohup bash sbin/start-ainode.sh -r > myout.file 2>& 1 &
+ # Windows 系统
+ nohup bash sbin\start-ainode.bat -r > myout.file 2>& 1 &
+ ```
+#### 非联网环境启动
+
+##### 启动命令
+
+```shell
+ # 启动命令
+ # Linux 和 MacOS 系统
+ bash sbin/start-ainode.sh
+
+ # Windows 系统
+ sbin\start-ainode.bat
+
+ # 后台启动命令(长期运行推荐)
+ # Linux 和 MacOS 系统
+ nohup bash sbin/start-ainode.sh > myout.file 2>& 1 &
+
+ # Windows 系统
+ nohup bash sbin\start-ainode.bat > myout.file 2>& 1 &
+ ```
+
+##### 详细语法
+
+```shell
+ # 启动命令
+ # Linux 和 MacOS 系统
+ bash sbin/start-ainode.sh -i -r -n
+
+ # Windows 系统
+ sbin\start-ainode.bat -i -r -n
+ ```
+
+##### 参数介绍:
+
+| **名称** | **标签** | **描述** | **是否必填** | **类型** | **默认值** | **输入方式** |
+| ------------------- | ---- | ------------------------------------------------------------ | -------- | ------ | ---------------- | ---------------------- |
+| ain_interpreter_dir | -i | AINode 所安装在的虚拟环境的解释器路径,需要使用绝对路径 | 否 | String | 默认读取环境变量 | 调用时输入或持久化修改 |
+| ain_force_reinstall | -r | 该脚本在检查 AINode 安装情况的时候是否检查版本,如果检查则在版本不对的情况下会强制安装 lib 里的 whl 安装包 | 否 | Bool | false | 调用时输入 |
+
+> 注意:非联网环境下安装失败时,首先检查是否选择了平台对应的安装包,其次确认python版本为3.8(由于下载的安装包限制了python版本,3.7、3.9等其他都不行)
+
+#### 示例
+
+##### 直接启动:
+
+```shell
+ # 启动命令
+ # Linux 和 MacOS 系统
+ bash sbin/start-ainode.sh
+ # Windows 系统
+ sbin\start-ainode.bat
+
+
+ # 后台启动命令(长期运行推荐)
+ # Linux 和 MacOS 系统
+ nohup bash sbin/start-ainode.sh > myout.file 2>& 1 &
+ # Windows 系统
+ nohup bash sbin\start-ainode.bat > myout.file 2>& 1 &
+ ```
+
+### 检测 AINode 节点状态
+
+AINode 启动过程中会自动将新的 AINode 加入 IoTDB 集群。启动 AINode 后可以在 命令行中输入 SQL 来查询,集群中看到 AINode 节点,其运行状态为 Running(如下展示)表示加入成功。
+
+```shell
+IoTDB> show cluster
++------+----------+-------+---------------+------------+-------+-----------+
+|NodeID| NodeType| Status|InternalAddress|InternalPort|Version| BuildInfo|
++------+----------+-------+---------------+------------+-------+-----------+
+| 0|ConfigNode|Running| 127.0.0.1| 10710|UNKNOWN|190e303-dev|
+| 1| DataNode|Running| 127.0.0.1| 10730|UNKNOWN|190e303-dev|
+| 2| AINode|Running| 127.0.0.1| 10810|UNKNOWN|190e303-dev|
++------+----------+-------+---------------+------------+-------+-----------+
+```
+
+### 停止 AINode
+
+如果需要停止正在运行的 AINode 节点,则执行相应的关闭脚本。
+
+#### 停止命令
+
+```shell
+ # Linux / MacOS
+ bash sbin/stop-ainode.sh
+
+ #Windows
+ sbin\stop-ainode.bat
+ ```
+
+##### 详细语法
+
+```shell
+ # Linux / MacOS
+ bash sbin/stop-ainode.sh -t
+
+ #Windows
+ sbin\stop-ainode.bat -t
+ ```
+
+##### 参数介绍:
+
+ | **名称** | **标签** | **描述** | **是否必填** | **类型** | **默认值** | **输入方式** |
+| ----------------- | ---- | ------------------------------------------------------------ | -------- | ------ | ------ | ---------- |
+| ain_remove_target | -t | AINode 关闭时可以指定待移除的目标 AINode 的 Node ID、地址和端口号,格式为`` | 否 | String | 无 | 调用时输入 |
+
+#### 示例
+```shell
+ # Linux / MacOS
+ bash sbin/stop-ainode.sh
+
+ # Windows
+ sbin\stop-ainode.bat
+ ```
+停止 AINode 后,还可以在集群中看到 AINode 节点,其运行状态为 UNKNOWN(如下展示),此时无法使用 AINode 功能。
+
+ ```shell
+IoTDB> show cluster
++------+----------+-------+---------------+------------+-------+-----------+
+|NodeID| NodeType| Status|InternalAddress|InternalPort|Version| BuildInfo|
++------+----------+-------+---------------+------------+-------+-----------+
+| 0|ConfigNode|Running| 127.0.0.1| 10710|UNKNOWN|190e303-dev|
+| 1| DataNode|Running| 127.0.0.1| 10730|UNKNOWN|190e303-dev|
+| 2| AINode|UNKNOWN| 127.0.0.1| 10790|UNKNOWN|190e303-dev|
++------+----------+-------+---------------+------------+-------+-----------+
+```
+如果需要重新启动该节点,需重新执行启动脚本。
+
+### 移除 AINode
+
+当需要把一个 AINode 节点移出集群时,可以执行移除脚本。移除和停止脚本的差别是:停止是在集群中保留 AINode 节点但停止 AINode 服务,移除则是把 AINode 节点从集群中移除出去。
+
+
+ #### 移除命令
+
+```shell
+ # Linux / MacOS
+ bash sbin/remove-ainode.sh
+
+ # Windows
+ sbin\remove-ainode.bat
+ ```
+
+##### 详细语法
+
+```shell
+ # Linux / MacOS
+ bash sbin/remove-ainode.sh -i -t -r -n
+
+ # Windows
+ sbin\remove-ainode.bat -i -t -r -n
+ ```
+
+##### 参数介绍:
+
+ | **名称** | **标签** | **描述** | **是否必填** | **类型** | **默认值** | **输入方式** |
+| ------------------- | ---- | ------------------------------------------------------------ | -------- | ------ | ---------------- | --------------------- |
+| ain_interpreter_dir | -i | AINode 所安装在的虚拟环境的解释器路径,需要使用绝对路径 | 否 | String | 默认读取环境变量 | 调用时输入+持久化修改 |
+| ain_remove_target | -t | AINode 关闭时可以指定待移除的目标 AINode 的 Node ID、地址和端口号,格式为`` | 否 | String | 无 | 调用时输入 |
+| ain_force_reinstall | -r | 该脚本在检查 AINode 安装情况的时候是否检查版本,如果检查则在版本不对的情况下会强制安装 lib 里的 whl 安装包 | 否 | Bool | false | 调用时输入 |
+| ain_no_dependencies | -n | 指定在安装 AINode 的时候是否安装依赖,如果指定则仅安装 AINode 主程序而不安装依赖。 | 否 | Bool | false | 调用时输入 |
+
+ 如不想每次启动时指定对应参数,也可以在 `conf` 文件夹下的`ainode-env.sh` 和 `ainode-env.bat` 脚本中持久化修改参数(目前支持持久化修改 ain_interpreter_dir 参数)。
+
+ `ainode-env.sh` :
+ ```shell
+ # The defaulte venv environment is used if ain_interpreter_dir is not set. Please use absolute path without quotation mark
+ # ain_interpreter_dir=
+ ```
+ `ainode-env.bat` :
+```shell
+ @REM The defaulte venv environment is used if ain_interpreter_dir is not set. Please use absolute path without quotation mark
+ @REM set ain_interpreter_dir=
+ ```
+ 在写入参数值的后解除对应行的注释并保存即可在下一次执行脚本时生效。
+
+#### 示例
+
+##### 直接移除:
+
+ ```shell
+ # Linux / MacOS
+ bash sbin/remove-ainode.sh
+
+ # Windows
+ sbin\remove-ainode.bat
+ ```
+ 移除节点后,将无法查询到节点的相关信息。
+
+ ```shell
+IoTDB> show cluster
++------+----------+-------+---------------+------------+-------+-----------+
+|NodeID| NodeType| Status|InternalAddress|InternalPort|Version| BuildInfo|
++------+----------+-------+---------------+------------+-------+-----------+
+| 0|ConfigNode|Running| 127.0.0.1| 10710|UNKNOWN|190e303-dev|
+| 1| DataNode|Running| 127.0.0.1| 10730|UNKNOWN|190e303-dev|
++------+----------+-------+---------------+------------+-------+-----------+
+```
+##### 指定移除:
+
+如果用户丢失了 data 文件夹下的文件,可能 AINode 本地无法主动移除自己,需要用户指定节点号、地址和端口号进行移除,此时我们支持用户按照以下方法输入参数进行删除。
+
+ ```shell
+ # Linux / MacOS
+ bash sbin/remove-ainode.sh -t /:
+
+ # Windows
+ sbin\remove-ainode.bat -t /:
+ ```
+
+## 常见问题
+
+### 启动AINode时出现找不到venv模块的报错
+
+ 当使用默认方式启动 AINode 时,会在安装包目录下创建一个 python 虚拟环境并安装依赖,因此要求安装 venv 模块。通常来说 python3.8 及以上的版本会自带 venv,但对于一些系统自带的 python 环境可能并不满足这一要求。出现该报错时有两种解决方案(二选一):
+
+ 在本地安装 venv 模块,以 ubuntu 为例,可以通过运行以下命令来安装 python 自带的 venv 模块。或者从 python 官网安装一个自带 venv 的 python 版本。
+
+ ```shell
+apt-get install python3.8-venv
+```
+ 安装 3.8.0 版本的 venv 到 AINode 里面 在 AINode 路径下
+
+ ```shell
+../Python-3.8.0/python -m venv venv(文件夹名)
+```
+ 在运行启动脚本时通过 `-i` 指定已有的 python 解释器路径作为 AINode 的运行环境,这样就不再需要创建一个新的虚拟环境。
+
+ ### python中的SSL模块没有被正确安装和配置,无法处理HTTPS资源
+WARNING: pip is configured with locations that require TLS/SSL, however the ssl module in Python is not available.
+可以安装 OpenSSLS 后,再重新构建 python 来解决这个问题
+> Currently Python versions 3.6 to 3.9 are compatible with OpenSSL 1.0.2, 1.1.0, and 1.1.1.
+
+ Python 要求我们的系统上安装有 OpenSSL,具体安装方法可见[链接](https://stackoverflow.com/questions/56552390/how-to-fix-ssl-module-in-python-is-not-available-in-centos)
+
+ ```shell
+sudo apt-get install build-essential libssl-dev zlib1g-dev libbz2-dev libreadline-dev libsqlite3-dev wget curl llvm libncurses5-dev libncursesw5-dev xz-utils tk-dev libffi-dev liblzma-dev uuid-dev lzma-dev liblzma-dev
+sudo -E ./configure --with-ssl
+make
+sudo make install
+```
+
+ ### pip版本较低
+
+ windows下出现类似“error:Microsoft Visual C++ 14.0 or greater is required...”的编译问题
+
+ 出现对应的报错,通常是 c++版本或是 setuptools 版本不足,可以在
+
+ ```shell
+./python -m pip install --upgrade pip
+./python -m pip install --upgrade setuptools
+```
+
+
+ ### 安装编译python
+
+ 使用以下指定从官网下载安装包并解压:
+ ```shell
+.wget https://www.python.org/ftp/python/3.8.0/Python-3.8.0.tar.xz
+tar Jxf Python-3.8.0.tar.xz
+```
+ 编译安装对应的 python 包:
+ ```shell
+cd Python-3.8.0
+./configure prefix=/usr/local/python3
+make
+sudo make install
+python3 --version
+```
\ No newline at end of file
diff --git a/src/zh/UserGuide/latest/User-Manual/AINode_apache.md b/src/zh/UserGuide/latest/User-Manual/AINode_apache.md
new file mode 100644
index 000000000..2da3c824d
--- /dev/null
+++ b/src/zh/UserGuide/latest/User-Manual/AINode_apache.md
@@ -0,0 +1,650 @@
+
+
+# AI能力(AINode)
+
+AINode 是 IoTDB 在ConfigNode、DataNode后提供的第三种内生节点,该节点通过与 IoTDB 集群的 DataNode、ConfigNode 的交互,扩展了对时间序列进行机器学习分析的能力,支持从外部引入已有机器学习模型进行注册,并使用注册的模型在指定时序数据上通过简单 SQL 语句完成时序分析任务的过程,将模型的创建、管理及推理融合在数据库引擎中。目前已提供常见时序分析场景(例如预测与异常检测)的机器学习算法或自研模型。
+
+系统架构如下图所示:
+::: center
+
+:::
+三种节点的职责如下:
+
+- **ConfigNode**:负责保存和管理模型的元信息;负责分布式节点管理。
+- **DataNode**:负责接收并解析用户的 SQL请求;负责存储时间序列数据;负责数据的预处理计算。
+- **AINode**:负责模型文件的导入创建以及模型推理。
+
+## 优势特点
+
+与单独构建机器学习服务相比,具有以下优势:
+
+- **简单易用**:无需使用 Python 或 Java 编程,使用 SQL 语句即可完成机器学习模型管理与推理的完整流程。如创建模型可使用CREATE MODEL语句、使用模型进行推理可使用CALL INFERENCE(...)语句等,使用更加简单便捷。
+
+- **避免数据迁移**:使用 IoTDB 原生机器学习可以将存储在 IoTDB 中的数据直接应用于机器学习模型的推理,无需将数据移动到单独的机器学习服务平台,从而加速数据处理、提高安全性并降低成本。
+
+
+
+- **内置先进算法**:支持业内领先机器学习分析算法,覆盖典型时序分析任务,为时序数据库赋能原生数据分析能力。如:
+ - **时间序列预测(Time Series Forecasting)**:从过去时间序列中学习变化模式;从而根据给定过去时间的观测值,输出未来序列最可能的预测。
+ - **时序异常检测(Anomaly Detection for Time Series)**:在给定的时间序列数据中检测和识别异常值,帮助发现时间序列中的异常行为。
+ - **时间序列标注(Time Series Annotation)**:为每个数据点或特定时间段添加额外的信息或标记,例如事件发生、异常点、趋势变化等,以便更好地理解和分析数据。
+
+
+## 基本概念
+
+- **模型(Model)**:机器学习模型,以时序数据作为输入,输出分析任务的结果或决策。模型是AINode 的基本管理单元,支持模型的增(注册)、删、查、用(推理)。
+- **创建(Create)**: 将外部设计或训练好的模型文件或算法加载到MLNode中,由IoTDB统一管理与使用。
+- **推理(Inference)**:使用创建的模型在指定时序数据上完成该模型适用的时序分析任务的过程。
+- **内置能力(Built-in)**:AINode 自带常见时序分析场景(例如预测与异常检测)的机器学习算法或自研模型。
+
+::: center
+
+:::
+
+## 安装部署
+
+AINode 的部署可参考文档 [部署指导](../Deployment-and-Maintenance/AINode_Deployment_apache.md#ainode-部署) 章节。
+
+## 使用指导
+
+AINode 对时序数据相关的深度学习模型提供了模型创建及删除的流程,内置模型无需创建及删除,可直接使用,并且在完成推理后创建的内置模型实例将自动销毁。
+
+### 注册模型
+
+通过指定模型输入输出的向量维度,可以注册训练好的深度学习模型,从而用于模型推理。
+
+符合以下内容的模型可以注册到AINode中:
+ 1. AINode 支持的PyTorch 2.1.0、 2.2.0版本训练的模型,需避免使用2.2.0版本以上的特性。
+ 2. AINode支持使用PyTorch JIT存储的模型,模型文件需要包含模型的参数和结构。
+ 3. 模型输入序列可以包含一列或多列,若有多列,需要和模型能力、模型配置文件对应。
+ 4. 模型的输入输出维度必须在`config.yaml`配置文件中明确定义。使用模型时,必须严格按照`config.yaml`配置文件中定义的输入输出维度。如果输入输出列数不匹配配置文件,将会导致错误。
+
+下方为模型注册的SQL语法定义。
+
+```SQL
+create model using uri
+```
+
+SQL中参数的具体含义如下:
+
+- model_name:模型的全局唯一标识,不可重复。模型名称具备以下约束:
+
+ - 允许出现标识符 [ 0-9 a-z A-Z _ ] (字母,数字,下划线)
+ - 长度限制为2-64字符
+ - 大小写敏感
+
+- uri:模型注册文件的资源路径,路径下应包含**模型权重model.pt文件和模型的元数据描述文件config.yaml**
+
+ - 模型权重文件:深度学习模型训练完成后得到的权重文件,目前支持pytorch训练得到的.pt文件
+
+ - yaml元数据描述文件:模型注册时需要提供的与模型结构有关的参数,其中必须包含模型的输入输出维度用于模型推理:
+
+ - | **参数名** | **参数描述** | **示例** |
+ | ------------ | ---------------------------- | -------- |
+ | input_shape | 模型输入的行列,用于模型推理 | [96,2] |
+ | output_shape | 模型输出的行列,用于模型推理 | [48,2] |
+
+ - 除了模型推理外,还可以指定模型输入输出的数据类型:
+
+ - | **参数名** | **参数描述** | **示例** |
+ | ----------- | ------------------ | --------------------- |
+ | input_type | 模型输入的数据类型 | ['float32','float32'] |
+ | output_type | 模型输出的数据类型 | ['float32','float32'] |
+
+ - 除此之外,可以额外指定备注信息用于在模型管理时进行展示
+
+ - | **参数名** | **参数描述** | **示例** |
+ | ---------- | ---------------------------------------------- | ------------------------------------------- |
+ | attributes | 可选,用户自行设定的模型备注信息,用于模型展示 | 'model_type': 'dlinear','kernel_size': '25' |
+
+
+除了本地模型文件的注册,还可以通过URI来指定远程资源路径来进行注册,使用开源的模型仓库(例如HuggingFace)。
+
+#### 示例
+
+在当前的example文件夹下,包含model.pt和config.yaml文件,model.pt为训练得到,config.yaml的内容如下:
+
+```YAML
+configs:
+ # 必选项
+ input_shape: [96, 2] # 表示模型接收的数据为96行x2列
+ output_shape: [48, 2] # 表示模型输出的数据为48行x2列
+
+ # 可选项 默认为全部float32,列数为shape对应的列数
+ input_type: ["int64","int64"] #输入对应的数据类型,需要与输入列数匹配
+ output_type: ["text","int64"] #输出对应的数据类型,需要与输出列数匹配
+
+attributes: # 可选项 为用户自定义的备注信息
+ 'model_type': 'dlinear'
+ 'kernel_size': '25'
+```
+
+指定该文件夹作为加载路径就可以注册该模型
+
+```SQL
+IoTDB> create model dlinear_example using uri "file://./example"
+```
+
+也可以从huggingFace上下载对应的模型文件进行注册
+
+```SQL
+IoTDB> create model dlinear_example using uri "https://huggingface.com/IoTDBML/dlinear/"
+```
+
+SQL执行后会异步进行注册的流程,可以通过模型展示查看模型的注册状态(见模型展示章节),注册成功的耗时主要受到模型文件大小的影响。
+
+模型注册完成后,就可以通过使用正常查询的方式调用具体函数,进行模型推理。
+
+### 查看模型
+
+注册成功的模型可以通过show models指令查询模型的具体信息。其SQL定义如下:
+
+```SQL
+show models
+
+show models
+```
+
+除了直接展示所有模型的信息外,可以指定model id来查看某一具体模型的信息。模型展示的结果中包含如下信息:
+
+| **ModelId** | **State** | **Configs** | **Attributes** |
+| ------------ | ------------------------------------- | ---------------------------------------------- | -------------- |
+| 模型唯一标识 | 模型注册状态(LOADING,ACTIVE,DROPPING) | InputShape, outputShapeInputTypes, outputTypes | 模型备注信息 |
+
+其中,State用于展示当前模型注册的状态,包含以下三个阶段
+
+- **LOADING**:已经在configNode中添加对应的模型元信息,正将模型文件传输到AINode节点上
+- **ACTIVE:** 模型已经设置完成,模型处于可用状态
+- **DROPPING**:模型删除中,正在从configNode以及AINode处删除模型相关信息
+- **UNAVAILABLE**: 模型创建失败,可以通过drop model删除创建失败的model_name。
+
+#### 示例
+
+```SQL
+IoTDB> show models
+
+
++---------------------+--------------------------+-----------+----------------------------+-----------------------+
+| ModelId| ModelType| State| Configs| Notes|
++---------------------+--------------------------+-----------+----------------------------+-----------------------+
+| dlinear_example| USER_DEFINED| ACTIVE| inputShape:[96,2]| |
+| | | | outputShape:[48,2]| |
+| | | | inputDataType:[float,float]| |
+| | | |outputDataType:[float,float]| |
+| _STLForecaster| BUILT_IN_FORECAST| ACTIVE| |Built-in model in IoTDB|
+| _NaiveForecaster| BUILT_IN_FORECAST| ACTIVE| |Built-in model in IoTDB|
+| _ARIMA| BUILT_IN_FORECAST| ACTIVE| |Built-in model in IoTDB|
+|_ExponentialSmoothing| BUILT_IN_FORECAST| ACTIVE| |Built-in model in IoTDB|
+| _GaussianHMM|BUILT_IN_ANOMALY_DETECTION| ACTIVE| |Built-in model in IoTDB|
+| _GMMHMM|BUILT_IN_ANOMALY_DETECTION| ACTIVE| |Built-in model in IoTDB|
+| _Stray|BUILT_IN_ANOMALY_DETECTION| ACTIVE| |Built-in model in IoTDB|
++---------------------+--------------------------+-----------+------------------------------------------------------------+-----------------------+
+```
+
+我们前面已经注册了对应的模型,可以通过对应的指定查看模型状态,active表明模型注册成功,可用于推理。
+
+### 删除模型
+
+对于注册成功的模型,用户可以通过SQL进行删除。该操作除了删除configNode上的元信息外,还会删除所有AINode下的相关模型文件。其SQL如下:
+
+```SQL
+drop model
+```
+
+需要指定已经成功注册的模型model_name来删除对应的模型。由于模型删除涉及多个节点上的数据删除,操作不会立即完成,此时模型的状态为DROPPING,该状态的模型不能用于模型推理。
+
+### 使用内置模型推理
+
+SQL语法如下:
+
+
+```SQL
+call inference(,sql[,=])
+```
+
+内置模型推理无需注册流程,通过call关键字,调用inference函数就可以使用模型的推理功能,其对应的参数介绍如下:
+
+- **built_in_model_name:** 内置模型名称
+- **parameterName**:参数名
+- **parameterValue**:参数值
+
+#### 内置模型及参数说明
+
+目前已内置如下机器学习模型,具体参数说明请参考以下链接。
+
+| 模型 | built_in_model_name | 任务类型 | 参数说明 |
+| -------------------- | --------------------- | -------- | ------------------------------------------------------------ |
+| Arima | _Arima | 预测 | [Arima参数说明](https://www.sktime.net/en/latest/api_reference/auto_generated/sktime.forecasting.arima.ARIMA.html?highlight=Arima) |
+| STLForecaster | _STLForecaster | 预测 | [STLForecaster参数说明](https://www.sktime.net/en/latest/api_reference/auto_generated/sktime.forecasting.trend.STLForecaster.html#sktime.forecasting.trend.STLForecaster) |
+| NaiveForecaster | _NaiveForecaster | 预测 | [NaiveForecaster参数说明](https://www.sktime.net/en/latest/api_reference/auto_generated/sktime.forecasting.naive.NaiveForecaster.html#naiveforecaster) |
+| ExponentialSmoothing | _ExponentialSmoothing | 预测 | [ExponentialSmoothing参数说明](https://www.sktime.net/en/latest/api_reference/auto_generated/sktime.forecasting.exp_smoothing.ExponentialSmoothing.html) |
+| GaussianHMM | _GaussianHMM | 标注 | [GaussianHMM参数说明](https://www.sktime.net/en/latest/api_reference/auto_generated/sktime.annotation.hmm_learn.gaussian.GaussianHMM.html) |
+| GMMHMM | _GMMHMM | 标注 | [GMMHMM参数说明](https://www.sktime.net/en/latest/api_reference/auto_generated/sktime.annotation.hmm_learn.gmm.GMMHMM.html) |
+| Stray | _Stray | 异常检测 | [Stray参数说明](https://www.sktime.net/en/latest/api_reference/auto_generated/sktime.annotation.stray.STRAY.html) |
+
+#### 示例
+
+下面是使用内置模型推理的一个操作示例,使用内置的Stray模型进行异常检测算法,输入为`[144,1]`,输出为`[144,1]`,我们通过SQL使用其进行推理。
+
+```SQL
+IoTDB> select * from root.eg.airline
++-----------------------------+------------------+
+| Time|root.eg.airline.s0|
++-----------------------------+------------------+
+|1949-01-31T00:00:00.000+08:00| 224.0|
+|1949-02-28T00:00:00.000+08:00| 118.0|
+|1949-03-31T00:00:00.000+08:00| 132.0|
+|1949-04-30T00:00:00.000+08:00| 129.0|
+......
+|1960-09-30T00:00:00.000+08:00| 508.0|
+|1960-10-31T00:00:00.000+08:00| 461.0|
+|1960-11-30T00:00:00.000+08:00| 390.0|
+|1960-12-31T00:00:00.000+08:00| 432.0|
++-----------------------------+------------------+
+Total line number = 144
+
+IoTDB> call inference(_Stray, "select s0 from root.eg.airline", k=2)
++-------+
+|output0|
++-------+
+| 0|
+| 0|
+| 0|
+| 0|
+......
+| 1|
+| 1|
+| 0|
+| 0|
+| 0|
+| 0|
++-------+
+Total line number = 144
+```
+
+### 使用深度学习模型推理
+
+SQL语法如下:
+
+```SQL
+call inference(,sql[,window=])
+
+
+window_function:
+ head(window_size)
+ tail(window_size)
+ count(window_size,sliding_step)
+```
+
+在完成模型的注册后,通过call关键字,调用inference函数就可以使用模型的推理功能,其对应的参数介绍如下:
+
+- **model_name**: 对应一个已经注册的模型
+- **sql**:sql查询语句,查询的结果作为模型的输入进行模型推理。查询的结果中行列的维度需要与具体模型config中指定的大小相匹配。(这里的sql不建议使用`SELECT *`子句,因为在IoTDB中,`*`并不会对列进行排序,因此列的顺序是未定义的,可以使用`SELECT s0,s1`的方式确保列的顺序符合模型输入的预期)
+- **window_function**: 推理过程中可以使用的窗口函数,目前提供三种类型的窗口函数用于辅助模型推理:
+ - **head(window_size)**: 获取数据中最前的window_size个点用于模型推理,该窗口可用于数据裁剪
+ 
+
+ - **tail(window_size)**:获取数据中最后的window_size个点用于模型推,该窗口可用于数据裁剪
+ 
+
+ - **count(window_size, sliding_step)**:基于点数的滑动窗口,每个窗口的数据会分别通过模型进行推理,如下图示例所示,window_size为2的窗口函数将输入数据集分为三个窗口,每个窗口分别进行推理运算生成结果。该窗口可用于连续推理
+ 
+
+**说明1: window可以用来解决sql查询结果和模型的输入行数要求不一致时的问题,对行进行裁剪。需要注意的是,当列数不匹配或是行数直接少于模型需求时,推理无法进行,会返回错误信息。**
+
+**说明2: 在深度学习应用中,经常将时间戳衍生特征(数据中的时间列)作为生成式任务的协变量,一同输入到模型中以提升模型的效果,但是在模型的输出结果中一般不包含时间列。为了保证实现的通用性,模型推理结果只对应模型的真实输出,如果模型不输出时间列,则结果中不会包含。**
+
+
+#### 示例
+
+下面是使用深度学习模型推理的一个操作示例,针对上面提到的输入为`[96,2]`,输出为`[48,2]`的`dlinear`预测模型,我们通过SQL使用其进行推理。
+
+```Shell
+IoTDB> select s1,s2 from root.**
++-----------------------------+-------------------+-------------------+
+| Time| root.eg.etth.s0| root.eg.etth.s1|
++-----------------------------+-------------------+-------------------+
+|1990-01-01T00:00:00.000+08:00| 0.7855| 1.611|
+|1990-01-02T00:00:00.000+08:00| 0.7818| 1.61|
+|1990-01-03T00:00:00.000+08:00| 0.7867| 1.6293|
+|1990-01-04T00:00:00.000+08:00| 0.786| 1.637|
+|1990-01-05T00:00:00.000+08:00| 0.7849| 1.653|
+|1990-01-06T00:00:00.000+08:00| 0.7866| 1.6537|
+|1990-01-07T00:00:00.000+08:00| 0.7886| 1.662|
+......
+|1990-03-31T00:00:00.000+08:00| 0.7585| 1.678|
+|1990-04-01T00:00:00.000+08:00| 0.7587| 1.6763|
+|1990-04-02T00:00:00.000+08:00| 0.76| 1.6813|
+|1990-04-03T00:00:00.000+08:00| 0.7669| 1.684|
+|1990-04-04T00:00:00.000+08:00| 0.7645| 1.677|
+|1990-04-05T00:00:00.000+08:00| 0.7625| 1.68|
+|1990-04-06T00:00:00.000+08:00| 0.7617| 1.6917|
++-----------------------------+-------------------+-------------------+
+Total line number = 96
+
+IoTDB> call inference(dlinear_example,"select s0,s1 from root.**")
++--------------------------------------------+-----------------------------+
+| _result_0| _result_1|
++--------------------------------------------+-----------------------------+
+| 0.726302981376648| 1.6549958229064941|
+| 0.7354921698570251| 1.6482787370681763|
+| 0.7238251566886902| 1.6278168201446533|
+......
+| 0.7692174911499023| 1.654654049873352|
+| 0.7685555815696716| 1.6625318765640259|
+| 0.7856493592262268| 1.6508299350738525|
++--------------------------------------------+-----------------------------+
+Total line number = 48
+```
+
+#### 使用tail/head窗口函数的示例
+
+当数据量不定且想要取96行最新数据用于推理时,可以使用对应的窗口函数tail。head函数的用法与其类似,不同点在于其取的是最早的96个点。
+
+```Shell
+IoTDB> select s1,s2 from root.**
++-----------------------------+-------------------+-------------------+
+| Time| root.eg.etth.s0| root.eg.etth.s1|
++-----------------------------+-------------------+-------------------+
+|1988-01-01T00:00:00.000+08:00| 0.7355| 1.211|
+......
+|1990-01-01T00:00:00.000+08:00| 0.7855| 1.611|
+|1990-01-02T00:00:00.000+08:00| 0.7818| 1.61|
+|1990-01-03T00:00:00.000+08:00| 0.7867| 1.6293|
+|1990-01-04T00:00:00.000+08:00| 0.786| 1.637|
+|1990-01-05T00:00:00.000+08:00| 0.7849| 1.653|
+|1990-01-06T00:00:00.000+08:00| 0.7866| 1.6537|
+|1990-01-07T00:00:00.000+08:00| 0.7886| 1.662|
+......
+|1990-03-31T00:00:00.000+08:00| 0.7585| 1.678|
+|1990-04-01T00:00:00.000+08:00| 0.7587| 1.6763|
+|1990-04-02T00:00:00.000+08:00| 0.76| 1.6813|
+|1990-04-03T00:00:00.000+08:00| 0.7669| 1.684|
+|1990-04-04T00:00:00.000+08:00| 0.7645| 1.677|
+|1990-04-05T00:00:00.000+08:00| 0.7625| 1.68|
+|1990-04-06T00:00:00.000+08:00| 0.7617| 1.6917|
++-----------------------------+-------------------+-------------------+
+Total line number = 996
+
+IoTDB> call inference(dlinear_example,"select s0,s1 from root.**",window=tail(96))
++--------------------------------------------+-----------------------------+
+| _result_0| _result_1|
++--------------------------------------------+-----------------------------+
+| 0.726302981376648| 1.6549958229064941|
+| 0.7354921698570251| 1.6482787370681763|
+| 0.7238251566886902| 1.6278168201446533|
+......
+| 0.7692174911499023| 1.654654049873352|
+| 0.7685555815696716| 1.6625318765640259|
+| 0.7856493592262268| 1.6508299350738525|
++--------------------------------------------+-----------------------------+
+Total line number = 48
+```
+
+#### 使用count窗口函数的示例
+
+该窗口主要用于计算式任务,当任务对应的模型一次只能处理固定行数据而最终想要的确实多组预测结果时,使用该窗口函数可以使用点数滑动窗口进行连续推理。假设我们现在有一个异常检测模型anomaly_example(input: [24,2], output[1,1]),对每24行数据会生成一个0/1的标签,其使用示例如下:
+
+```Shell
+IoTDB> select s1,s2 from root.**
++-----------------------------+-------------------+-------------------+
+| Time| root.eg.etth.s0| root.eg.etth.s1|
++-----------------------------+-------------------+-------------------+
+|1990-01-01T00:00:00.000+08:00| 0.7855| 1.611|
+|1990-01-02T00:00:00.000+08:00| 0.7818| 1.61|
+|1990-01-03T00:00:00.000+08:00| 0.7867| 1.6293|
+|1990-01-04T00:00:00.000+08:00| 0.786| 1.637|
+|1990-01-05T00:00:00.000+08:00| 0.7849| 1.653|
+|1990-01-06T00:00:00.000+08:00| 0.7866| 1.6537|
+|1990-01-07T00:00:00.000+08:00| 0.7886| 1.662|
+......
+|1990-03-31T00:00:00.000+08:00| 0.7585| 1.678|
+|1990-04-01T00:00:00.000+08:00| 0.7587| 1.6763|
+|1990-04-02T00:00:00.000+08:00| 0.76| 1.6813|
+|1990-04-03T00:00:00.000+08:00| 0.7669| 1.684|
+|1990-04-04T00:00:00.000+08:00| 0.7645| 1.677|
+|1990-04-05T00:00:00.000+08:00| 0.7625| 1.68|
+|1990-04-06T00:00:00.000+08:00| 0.7617| 1.6917|
++-----------------------------+-------------------+-------------------+
+Total line number = 96
+
+IoTDB> call inference(anomaly_example,"select s0,s1 from root.**",window=count(24,24))
++-------------------------+
+| _result_0|
++-------------------------+
+| 0|
+| 1|
+| 1|
+| 0|
++-------------------------+
+Total line number = 4
+```
+
+其中结果集中每行的标签对应每24行数据为一组,输入该异常检测模型后的输出。
+
+## 权限管理
+
+使用AINode相关的功能时,可以使用IoTDB本身的鉴权去做一个权限管理,用户只有在具备 USE_MODEL 权限时,才可以使用模型管理的相关功能。当使用推理功能时,用户需要有访问输入模型的SQL对应的源序列的权限。
+
+| 权限名称 | 权限范围 | 管理员用户(默认ROOT) | 普通用户 | 路径相关 |
+| --------- | --------------------------------- | ---------------------- | -------- | -------- |
+| USE_MODEL | create model / show models / drop model | √ | √ | x |
+| READ_DATA | call inference | √ | √ | √ |
+
+## 实际案例
+
+### 电力负载预测
+
+在部分工业场景下,会存在预测电力负载的需求,预测结果可用于优化电力供应、节约能源和资源、支持规划和扩展以及增强电力系统的可靠性。
+
+我们所使用的 ETTh1 的测试集的数据为[ETTh1](https://alioss.timecho.com/docs/img/ETTh1.csv)。
+
+
+包含间隔1h采集一次的电力数据,每条数据由负载和油温构成,分别为:High UseFul Load, High UseLess Load, Middle UseLess Load, Low UseFul Load, Low UseLess Load, Oil Temperature。
+
+在该数据集上,IoTDB-ML的模型推理功能可以通过以往高中低三种负载的数值和对应时间戳油温的关系,预测未来一段时间内的油温,赋能电网变压器的自动调控和监视。
+
+#### 步骤一:数据导入
+
+用户可以使用tools文件夹中的`import-csv.sh` 向 IoTDB 中导入 ETT 数据集
+
+```Bash
+bash ./import-csv.sh -h 127.0.0.1 -p 6667 -u root -pw root -f ../../ETTh1.csv
+```
+
+#### 步骤二:模型导入
+
+我们可以在iotdb-cli 中输入以下SQL从 huggingface 上拉取一个已经训练好的模型进行注册,用于后续的推理。
+
+```SQL
+create model dlinear using uri 'https://huggingface.co/hvlgo/dlinear/tree/main'
+```
+
+该模型基于较为轻量化的深度模型DLinear训练而得,能够以相对快的推理速度尽可能多地捕捉到序列内部的变化趋势和变量间的数据变化关系,相较于其他更深的模型更适用于快速实时预测。
+
+#### 步骤三:模型推理
+
+```Shell
+IoTDB> select s0,s1,s2,s3,s4,s5,s6 from root.eg.etth LIMIT 96
++-----------------------------+---------------+---------------+---------------+---------------+---------------+---------------+---------------+
+| Time|root.eg.etth.s0|root.eg.etth.s1|root.eg.etth.s2|root.eg.etth.s3|root.eg.etth.s4|root.eg.etth.s5|root.eg.etth.s6|
++-----------------------------+---------------+---------------+---------------+---------------+---------------+---------------+---------------+
+|2017-10-20T00:00:00.000+08:00| 10.449| 3.885| 8.706| 2.025| 2.041| 0.944| 8.864|
+|2017-10-20T01:00:00.000+08:00| 11.119| 3.952| 8.813| 2.31| 2.071| 1.005| 8.442|
+|2017-10-20T02:00:00.000+08:00| 9.511| 2.88| 7.533| 1.564| 1.949| 0.883| 8.16|
+|2017-10-20T03:00:00.000+08:00| 9.645| 2.21| 7.249| 1.066| 1.828| 0.914| 7.949|
+......
+|2017-10-23T20:00:00.000+08:00| 8.105| 0.938| 4.371| -0.569| 3.533| 1.279| 9.708|
+|2017-10-23T21:00:00.000+08:00| 7.167| 1.206| 4.087| -0.462| 3.107| 1.432| 8.723|
+|2017-10-23T22:00:00.000+08:00| 7.1| 1.34| 4.015| -0.32| 2.772| 1.31| 8.864|
+|2017-10-23T23:00:00.000+08:00| 9.176| 2.746| 7.107| 1.635| 2.65| 1.097| 9.004|
++-----------------------------+---------------+---------------+---------------+---------------+---------------+---------------+---------------+
+Total line number = 96
+
+IoTDB> call inference(dlinear_example, "select s0,s1,s2,s3,s4,s5,s6 from root.eg.etth", window=head(96))
++-----------+----------+----------+------------+---------+----------+----------+
+| output0| output1| output2| output3| output4| output5| output6|
++-----------+----------+----------+------------+---------+----------+----------+
+| 10.319546| 3.1450553| 7.877341| 1.5723765|2.7303758| 1.1362307| 8.867775|
+| 10.443649| 3.3286757| 7.8593454| 1.7675098| 2.560634| 1.1177158| 8.920919|
+| 10.883752| 3.2341104| 8.47036| 1.6116762|2.4874182| 1.1760603| 8.798939|
+......
+| 8.0115595| 1.2995274| 6.9900327|-0.098746896| 3.04923| 1.176214| 9.548782|
+| 8.612427| 2.5036244| 5.6790237| 0.66474205|2.8870275| 1.2051733| 9.330128|
+| 10.096699| 3.399722| 6.9909| 1.7478468|2.7642853| 1.1119363| 9.541455|
++-----------+----------+----------+------------+---------+----------+----------+
+Total line number = 48
+```
+
+我们将对油温的预测的结果和真实结果进行对比,可以得到以下的图像。
+
+图中10/24 00:00之前的数据为输入模型的过去数据,10/24 00:00后的蓝色线条为模型给出的油温预测结果,而红色为数据集中实际的油温数据(用于进行对比)。
+
+
+
+可以看到,我们使用了过去96个小时(4天)的六个负载信息和对应时间油温的关系,基于之前学习到的序列间相互关系对未来48个小时(2天)的油温这一数据的可能变化进行了建模,可以看到可视化后预测曲线与实际结果在趋势上保持了较高程度的一致性。
+
+### 功率预测
+
+变电站需要对电流、电压、功率等数据进行电力监控,用于检测潜在的电网问题、识别电力系统中的故障、有效管理电网负载以及分析电力系统的性能和趋势等。
+
+我们利用某变电站中的电流、电压和功率等数据构成了真实场景下的数据集。该数据集包括变电站近四个月时间跨度,每5 - 6s 采集一次的 A相电压、B相电压、C相电压等数据。
+
+测试集数据内容为[data](https://alioss.timecho.com/docs/img/data.csv)。
+
+在该数据集上,IoTDB-ML的模型推理功能可以通过以往A相电压,B相电压和C相电压的数值和对应时间戳,预测未来一段时间内的C相电压,赋能变电站的监视管理。
+
+#### 步骤一:数据导入
+
+用户可以使用tools文件夹中的`import-csv.sh` 导入数据集
+
+```Bash
+bash ./import-csv.sh -h 127.0.0.1 -p 6667 -u root -pw root -f ../../data.csv
+```
+
+#### 步骤二:模型导入
+
+我们可以在iotdb-cli 中选择内置模型或已经注册好的模型用于后续的推理。
+
+我们采用内置模型STLForecaster进行预测,STLForecaster 是一个基于 statsmodels 库中 STL 实现的时间序列预测方法。
+
+#### 步骤三:模型推理
+
+```Shell
+IoTDB> select * from root.eg.voltage limit 96
++-----------------------------+------------------+------------------+------------------+
+| Time|root.eg.voltage.s0|root.eg.voltage.s1|root.eg.voltage.s2|
++-----------------------------+------------------+------------------+------------------+
+|2023-02-14T20:38:32.000+08:00| 2038.0| 2028.0| 2041.0|
+|2023-02-14T20:38:38.000+08:00| 2014.0| 2005.0| 2018.0|
+|2023-02-14T20:38:44.000+08:00| 2014.0| 2005.0| 2018.0|
+......
+|2023-02-14T20:47:52.000+08:00| 2024.0| 2016.0| 2027.0|
+|2023-02-14T20:47:57.000+08:00| 2024.0| 2016.0| 2027.0|
+|2023-02-14T20:48:03.000+08:00| 2024.0| 2016.0| 2027.0|
++-----------------------------+------------------+------------------+------------------+
+Total line number = 96
+
+IoTDB> call inference(_STLForecaster, "select s0,s1,s2 from root.eg.voltage", window=head(96),predict_length=48)
++---------+---------+---------+
+| output0| output1| output2|
++---------+---------+---------+
+|2026.3601|2018.2953|2029.4257|
+|2019.1538|2011.4361|2022.0888|
+|2025.5074|2017.4522|2028.5199|
+......
+
+|2022.2336|2015.0290|2025.1023|
+|2015.7241|2008.8975|2018.5085|
+|2022.0777|2014.9136|2024.9396|
+|2015.5682|2008.7821|2018.3458|
++---------+---------+---------+
+Total line number = 48
+```
+我们将对C相电压的预测的结果和真实结果进行对比,可以得到以下的图像。
+
+图中 02/14 20:48 之前的数据为输入模型的过去数据, 02/14 20:48 后的蓝色线条为模型给出的C相电压预测结果,而红色为数据集中实际的C相电压数据(用于进行对比)。
+
+
+
+可以看到,我们使用了过去10分钟的电压的数据,基于之前学习到的序列间相互关系对未来5分钟的C相电压这一数据的可能变化进行了建模,可以看到可视化后预测曲线与实际结果在趋势上保持了一定的同步性。
+
+### 异常检测
+
+在民航交通运输业,存在着对乘机旅客数量进行异常检测的需求。异常检测的结果可用于指导调整航班的调度,以使得企业获得更大效益。
+
+Airline Passengers一个时间序列数据集,该数据集记录了1949年至1960年期间国际航空乘客数量,间隔一个月进行一次采样。该数据集共含一条时间序列。数据集为[airline](https://alioss.timecho.com/docs/img/airline.csv)。
+在该数据集上,IoTDB-ML的模型推理功能可以通过捕捉序列的变化规律以对序列时间点进行异常检测,赋能交通运输业。
+
+#### 步骤一:数据导入
+
+用户可以使用tools文件夹中的`import-csv.sh` 导入数据集
+
+```Bash
+bash ./import-csv.sh -h 127.0.0.1 -p 6667 -u root -pw root -f ../../data.csv
+```
+
+#### 步骤二:模型推理
+
+IoTDB内置有部分可以直接使用的机器学习算法,使用其中的异常检测算法进行预测的样例如下:
+
+```Shell
+IoTDB> select * from root.eg.airline
++-----------------------------+------------------+
+| Time|root.eg.airline.s0|
++-----------------------------+------------------+
+|1949-01-31T00:00:00.000+08:00| 224.0|
+|1949-02-28T00:00:00.000+08:00| 118.0|
+|1949-03-31T00:00:00.000+08:00| 132.0|
+|1949-04-30T00:00:00.000+08:00| 129.0|
+......
+|1960-09-30T00:00:00.000+08:00| 508.0|
+|1960-10-31T00:00:00.000+08:00| 461.0|
+|1960-11-30T00:00:00.000+08:00| 390.0|
+|1960-12-31T00:00:00.000+08:00| 432.0|
++-----------------------------+------------------+
+Total line number = 144
+
+IoTDB> call inference(_Stray, "select s0 from root.eg.airline", k=2)
++-------+
+|output0|
++-------+
+| 0|
+| 0|
+| 0|
+| 0|
+......
+| 1|
+| 1|
+| 0|
+| 0|
+| 0|
+| 0|
++-------+
+Total line number = 144
+```
+
+我们将检测为异常的结果进行绘制,可以得到以下图像。其中蓝色曲线为原时间序列,用红色点特殊标注的时间点为算法检测为异常的时间点。
+
+
+
+可以看到,Stray模型对输入序列变化进行了建模,成功检测出出现异常的时间点。
\ No newline at end of file