소스 검색

初始化提交

oldwine 5 달 전
커밋
4d400bbb6c
11개의 변경된 파일1299개의 추가작업 그리고 0개의 파일을 삭제
  1. 165 0
      .gitignore
  2. 64 0
      conf/BarLine1.csv
  3. 7 0
      main.py
  4. 55 0
      models/data_loader.py
  5. 217 0
      models/network.py
  6. 42 0
      test/main.py
  7. 58 0
      utils/logger.py
  8. 117 0
      utils/mqttdata.py
  9. 258 0
      utils/s7data.py
  10. 199 0
      utils/statepoint.py
  11. 117 0
      utils/tcp_data.py

+ 165 - 0
.gitignore

@@ -0,0 +1,165 @@
+# ---> Python
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
+*$py.class
+
+# C extensions
+*.so
+
+# User
+localtest/
+
+# Distribution / packaging
+.Python
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+share/python-wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+MANIFEST
+
+# PyInstaller
+#  Usually these files are written by a python script from a template
+#  before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.nox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+*.cover
+*.py,cover
+.hypothesis/
+.pytest_cache/
+cover/
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+local_settings.py
+db.sqlite3
+db.sqlite3-journal
+
+# Flask stuff:
+instance/
+.webassets-cache
+
+# Scrapy stuff:
+.scrapy
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+.pybuilder/
+target/
+
+# Jupyter Notebook
+.ipynb_checkpoints
+
+# IPython
+profile_default/
+ipython_config.py
+
+# pyenv
+#   For a library or package, you might want to ignore these files since the code is
+#   intended to run in multiple environments; otherwise, check them in:
+# .python-version
+
+# pipenv
+#   According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
+#   However, in case of collaboration, if having platform-specific dependencies or dependencies
+#   having no cross-platform support, pipenv may install dependencies that don't work, or not
+#   install all needed dependencies.
+#Pipfile.lock
+
+# poetry
+#   Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
+#   This is especially recommended for binary packages to ensure reproducibility, and is more
+#   commonly ignored for libraries.
+#   https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
+#poetry.lock
+
+# pdm
+#   Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
+#pdm.lock
+#   pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
+#   in version control.
+#   https://pdm.fming.dev/#use-with-ide
+.pdm.toml
+
+# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
+__pypackages__/
+
+# Celery stuff
+celerybeat-schedule
+celerybeat.pid
+
+# SageMath parsed files
+*.sage.py
+
+# Environments
+.env
+.venv
+env/
+venv/
+ENV/
+env.bak/
+venv.bak/
+
+# Spyder project settings
+.spyderproject
+.spyproject
+
+# Rope project settings
+.ropeproject
+
+# mkdocs documentation
+/site
+
+# mypy
+.mypy_cache/
+.dmypy.json
+dmypy.json
+
+# Pyre type checker
+.pyre/
+
+# pytype static type analyzer
+.pytype/
+
+# Cython debug symbols
+cython_debug/
+
+# PyCharm
+#  JetBrains specific template is maintained in a separate JetBrains.gitignore that can
+#  be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
+#  and can be added to the global gitignore or merged into this file.  For a more nuclear
+#  option (not recommended) you can uncomment the following to ignore the entire idea folder.
+#.idea/
+

+ 64 - 0
conf/BarLine1.csv

@@ -0,0 +1,64 @@
+name,type,db,start,offset,size,read_allow,write_allow,frequency,group
+였쳔뭍,bool,889,0,0,1,TRUE,FALSE,500,1
+18#唐멀斤뵀,bool,889,0,1,1,TRUE,FALSE,500,1
+짱뵀,dint,889,4,0,4,TRUE,FALSE,500,1
+땍넹,dint,889,8,0,4,TRUE,FALSE,500,1
+직뵀(칫솥),int,889,36,0,2,TRUE,FALSE,500,1
+棺멀낀똑(훑쇱),int,889,38,0,2,TRUE,FALSE,500,1
+羅齡방목,real,889,52,0,4,TRUE,FALSE,500,1
+棺멀낀똑(샴밟),real,889,56,0,4,TRUE,FALSE,500,1
+羅품侊똑,real,889,60,0,4,TRUE,FALSE,500,1
+1직錦攣令,real,889,64,0,4,TRUE,FALSE,500,1
+2직錦攣令,real,889,68,0,4,TRUE,FALSE,500,1
+3직錦攣令,real,889,72,0,4,TRUE,FALSE,500,1
+4직錦攣令,real,889,76,0,4,TRUE,FALSE,500,1
+5직錦攣令,real,889,80,0,4,TRUE,FALSE,500,1
+6직錦攣令,real,889,84,0,4,TRUE,FALSE,500,1
+7직錦攣令,real,889,88,0,4,TRUE,FALSE,500,1
+8직錦攣令,real,889,92,0,4,TRUE,FALSE,500,1
+1직조멀럽웩路좆,real,889,96,0,4,TRUE,FALSE,500,1
+2직조멀럽웩路좆,real,889,100,0,4,TRUE,FALSE,500,1
+3직조멀럽웩路좆,real,889,104,0,4,TRUE,FALSE,500,2
+4직조멀럽웩路좆,real,889,108,0,4,TRUE,FALSE,500,2
+5직조멀럽웩路좆,real,889,112,0,4,TRUE,FALSE,500,2
+6직조멀럽웩路좆,real,889,116,0,4,TRUE,FALSE,500,2
+7직조멀럽웩路좆,real,889,120,0,4,TRUE,FALSE,500,2
+8직조멀럽웩路좆,real,889,124,0,4,TRUE,FALSE,500,2
+쵠路,real,889,128,0,4,TRUE,FALSE,500,2
+棺멀�땍,real,889,132,0,4,TRUE,FALSE,500,2
+1#�땍醵똑,real,889,136,0,4,TRUE,FALSE,500,2
+2#�땍醵똑,real,889,140,0,4,TRUE,FALSE,500,2
+3#�땍醵똑,real,889,144,0,4,TRUE,FALSE,500,2
+4#�땍醵똑,real,889,148,0,4,TRUE,FALSE,500,2
+5#�땍醵똑,real,889,152,0,4,TRUE,FALSE,500,2
+6#�땍醵똑,real,889,156,0,4,TRUE,FALSE,500,2
+7#�땍醵똑,real,889,160,0,4,TRUE,FALSE,500,2
+8#�땍醵똑,real,889,164,0,4,TRUE,FALSE,500,2
+9#�땍醵똑,real,889,168,0,4,TRUE,FALSE,500,2
+10#�땍醵똑,real,889,172,0,4,TRUE,FALSE,500,2
+11#�땍醵똑,real,889,176,0,4,TRUE,FALSE,500,2
+12#�땍醵똑,real,889,180,0,4,TRUE,FALSE,500,3
+13#�땍醵똑,real,889,184,0,4,TRUE,FALSE,500,3
+14#�땍醵똑,real,889,188,0,4,TRUE,FALSE,500,3
+15#�땍醵똑,real,889,192,0,4,TRUE,FALSE,500,3
+16#�땍醵똑,real,889,196,0,4,TRUE,FALSE,500,3
+17#�땍醵똑,real,889,200,0,4,TRUE,FALSE,500,3
+18#�땍醵똑,real,889,204,0,4,TRUE,FALSE,500,3
+1#뱝쓺,real,889,208,0,4,TRUE,FALSE,500,3
+2#뱝쓺,real,889,212,0,4,TRUE,FALSE,500,3
+3#뱝쓺,real,889,216,0,4,TRUE,FALSE,500,3
+4#뱝쓺,real,889,220,0,4,TRUE,FALSE,500,3
+5#뱝쓺,real,889,224,0,4,TRUE,FALSE,500,3
+6#뱝쓺,real,889,228,0,4,TRUE,FALSE,500,3
+7#뱝쓺,real,889,232,0,4,TRUE,FALSE,500,3
+8#뱝쓺,real,889,236,0,4,TRUE,FALSE,500,3
+9#뱝쓺,real,889,240,0,4,TRUE,FALSE,500,3
+10#뱝쓺,real,889,244,0,4,TRUE,FALSE,500,3
+11#뱝쓺,real,889,248,0,4,TRUE,FALSE,500,3
+12#뱝쓺,real,889,252,0,4,TRUE,FALSE,500,3
+13#뱝쓺,real,889,256,0,4,TRUE,FALSE,500,4
+14#뱝쓺,real,889,260,0,4,TRUE,FALSE,500,4
+15#뱝쓺,real,889,264,0,4,TRUE,FALSE,500,4
+16#뱝쓺,real,889,268,0,4,TRUE,FALSE,500,4
+17#뱝쓺,real,889,272,0,4,TRUE,FALSE,500,4
+18#뱝쓺,real,889,276,0,4,TRUE,FALSE,500,4

+ 7 - 0
main.py

@@ -0,0 +1,7 @@
+from utils.s7data import S7Client, S7data
+
+s7 = S7Client()
+s7.connect("192.168.0.3", 0, 3)
+data = S7data("conf/BarLine1")
+data.set_S7Client(s7)
+data.auto_update_group()

+ 55 - 0
models/data_loader.py

@@ -0,0 +1,55 @@
+import sqlite3, datetime, csv
+
+temp_limit = (760, 870)
+
+conn = sqlite3.connect("steel_production_data3.db")
+cursor = conn.cursor()
+
+cursor.execute("select * from production_data")
+data_list = cursor.fetchall()
+
+conn.close()
+
+solved_list = []
+has_steel_sig = 1
+counter = 1
+last_length = data_list[0][9]
+
+input_buffer = []
+for data in data_list:
+    if data[3] == 0 and has_steel_sig != 0:
+        last_length = 0
+
+        strand_no = int(data[6])
+        if strand_no > 8 or strand_no != 2:
+            continue
+
+        timestamp = datetime.datetime.strptime(data[1], "%Y-%m-%d %H:%M:%S.%f").timestamp()
+        tmp = [counter, timestamp, data[5], data[19+strand_no-1], data[28], data[27], data[8], data[10]]
+        counter += 1
+        tmp_2 = [data[29+i] for i in range(36)]
+
+        input_buffer = tmp + tmp_2
+    
+    if data[9]*10 != last_length:
+        last_length = data[9]*10
+        if input_buffer:
+            if (data[7] == temp_limit[0] and data[9]*10 < data[7]) or (data[7] == temp_limit[1] and data[9]*10 > data[7]):
+                solved_list.append((input_buffer, data[9]*10))
+            elif abs(data[9]*10-data[7]) < 10:
+                solved_list.append((input_buffer, data[9]*10))
+            else:
+                solved_list.append((input_buffer, data[7]))
+            input_buffer = []
+
+output_list = []
+for i in range(len(solved_list)-1):
+    tmp = solved_list[i][0][:]
+    next = solved_list[i+1]
+    tmp.append((next[0][4]-next[1])*next[0][5]/10 + next[0][3])
+    output_list.append(tmp)
+
+# 写入 CSV 文件
+with open("output.csv", "w", newline="", encoding="utf-8") as file:
+    writer = csv.writer(file)
+    writer.writerows(output_list)

+ 217 - 0
models/network.py

@@ -0,0 +1,217 @@
+import numpy as np
+import pandas as pd
+import torch
+import torch.nn as nn
+from torch.utils.data import Dataset, DataLoader, TensorDataset
+from sklearn.preprocessing import StandardScaler
+from sklearn.model_selection import train_test_split
+import matplotlib.pyplot as plt
+
+
+# 1. 加载和预处理数据
+def load_data(file_path):
+    df = pd.read_csv(file_path, header=None)
+    # 分离特征和目标列(最后1列是目标值)
+    features = df.iloc[:, 1:-1].values  # 忽略第一列索引
+    target = df.iloc[:, -1].values.reshape(-1, 1)
+    return features, target
+
+
+# 2. 数据标准化
+def scale_data(features, target):
+    feature_scaler = StandardScaler()
+    target_scaler = StandardScaler()
+
+    scaled_features = feature_scaler.fit_transform(features)
+    scaled_target = target_scaler.fit_transform(target)
+
+    return scaled_features, scaled_target, feature_scaler, target_scaler
+
+
+# 3. 创建时间序列数据集
+def create_sequences(features, target, seq_length):
+    xs, ys = [], []
+    for i in range(len(features) - seq_length):
+        x = features[i:i + seq_length]
+        y = target[i + seq_length - 1]
+        xs.append(x)
+        ys.append(y)
+    return np.array(xs), np.array(ys)
+
+
+# 4. 定义GRU模型
+class GRUModel(nn.Module):
+    def __init__(self, input_size, hidden_size, num_layers, output_size):
+        super(GRUModel, self).__init__()
+        self.gru = nn.GRU(
+            input_size=input_size,
+            hidden_size=hidden_size,
+            num_layers=num_layers,
+            batch_first=True
+        )
+        self.fc = nn.Linear(hidden_size, output_size)
+
+    def forward(self, x):
+        out, _ = self.gru(x)  # out: (batch, seq, hidden)
+        out = out[:, -1, :]  # 取序列最后一个输出
+        out = self.fc(out)
+        return out
+
+
+# 5. 训练函数
+def train_model(model, train_loader, val_loader, criterion, optimizer, epochs):
+    train_losses, val_losses = [], []
+
+    for epoch in range(epochs):
+        # 训练阶段
+        model.train()
+        train_loss = 0
+        for X_batch, y_batch in train_loader:
+            optimizer.zero_grad()
+            outputs = model(X_batch)
+            loss = criterion(outputs, y_batch)
+            loss.backward()
+            optimizer.step()
+            train_loss += loss.item()
+
+        # 验证阶段
+        model.eval()
+        val_loss = 0
+        with torch.no_grad():
+            for X_val, y_val in val_loader:
+                outputs = model(X_val)
+                loss = criterion(outputs, y_val)
+                val_loss += loss.item()
+
+        # 记录损失
+        avg_train_loss = train_loss / len(train_loader)
+        avg_val_loss = val_loss / len(val_loader)
+        train_losses.append(avg_train_loss)
+        val_losses.append(avg_val_loss)
+
+        print(f'Epoch [{epoch + 1}/{epochs}], '
+              f'Train Loss: {avg_train_loss:.6f}, '
+              f'Val Loss: {avg_val_loss:.6f}')
+
+    return train_losses, val_losses
+
+
+# 主程序
+if __name__ == "__main__":
+    # 参数设置
+    SEQ_LENGTH = 10
+    BATCH_SIZE = 16
+    HIDDEN_SIZE = 64
+    NUM_LAYERS = 2
+    EPOCHS = 100
+    LR = 0.001
+
+    # 1. 加载数据
+    features, target = load_data('output.csv')
+
+    # 2. 数据标准化
+    scaled_features, scaled_target, feature_scaler, target_scaler = scale_data(features, target)
+
+    # 3. 创建序列
+    X, y = create_sequences(scaled_features, scaled_target, SEQ_LENGTH)
+
+    # 4. 划分数据集
+    X_train, X_temp, y_train, y_temp = train_test_split(
+        X, y, test_size=0.3, shuffle=False
+    )
+    X_val, X_test, y_val, y_test = train_test_split(
+        X_temp, y_temp, test_size=0.5, shuffle=False
+    )
+
+    # 转换为PyTorch张量
+    X_train = torch.tensor(X_train, dtype=torch.float32)
+    y_train = torch.tensor(y_train, dtype=torch.float32)
+    X_val = torch.tensor(X_val, dtype=torch.float32)
+    y_val = torch.tensor(y_val, dtype=torch.float32)
+    X_test = torch.tensor(X_test, dtype=torch.float32)
+    y_test = torch.tensor(y_test, dtype=torch.float32)
+
+    # 创建数据加载器
+    train_dataset = TensorDataset(X_train, y_train)
+    val_dataset = TensorDataset(X_val, y_val)
+    test_dataset = TensorDataset(X_test, y_test)
+
+    train_loader = DataLoader(train_dataset, batch_size=BATCH_SIZE, shuffle=False)
+    val_loader = DataLoader(val_dataset, batch_size=BATCH_SIZE, shuffle=False)
+    test_loader = DataLoader(test_dataset, batch_size=BATCH_SIZE, shuffle=False)
+
+    # 5. 初始化模型
+    input_size = X_train.shape[2]  # 特征数量
+    output_size = 1
+    model = GRUModel(input_size, HIDDEN_SIZE, NUM_LAYERS, output_size)
+
+    # 损失函数和优化器
+    criterion = nn.MSELoss()
+    optimizer = torch.optim.Adam(model.parameters(), lr=LR)
+
+    # 6. 训练模型
+    train_losses, val_losses = train_model(
+        model, train_loader, val_loader, criterion, optimizer, EPOCHS
+    )
+
+    # 7. 评估测试集
+    model.eval()
+    test_loss = 0
+    predictions = []
+    actuals = []
+
+    with torch.no_grad():
+        for X_test_batch, y_test_batch in test_loader:
+            outputs = model(X_test_batch)
+            loss = criterion(outputs, y_test_batch)
+            test_loss += loss.item()
+
+            # 保存预测结果
+            predictions.extend(outputs.numpy())
+            actuals.extend(y_test_batch.numpy())
+
+    avg_test_loss = test_loss / len(test_loader)
+    print(f'Test Loss: {avg_test_loss:.6f}')
+
+    # 8. 反标准化结果
+    predictions = np.array(predictions).reshape(-1, 1)
+    actuals = np.array(actuals).reshape(-1, 1)
+
+    pred_inverse = target_scaler.inverse_transform(predictions)
+    actual_inverse = target_scaler.inverse_transform(actuals)
+
+    # 9. 可视化结果
+    plt.figure(figsize=(15, 6))
+
+    plt.subplot(1, 2, 1)
+    plt.plot(train_losses, label='Train Loss')
+    plt.plot(val_losses, label='Validation Loss')
+    plt.title('Training and Validation Loss')
+    plt.xlabel('Epochs')
+    plt.ylabel('MSE Loss')
+    plt.legend()
+    plt.grid(True)
+
+    plt.subplot(1, 2, 2)
+    plt.plot(actual_inverse, label='Actual Values', alpha=0.7)
+    plt.plot(pred_inverse, label='Predicted Values', linestyle='--')
+    plt.title('Actual vs Predicted Values')
+    plt.xlabel('Time Steps')
+    plt.ylabel('Weight Values')
+    plt.legend()
+    plt.grid(True)
+
+    plt.tight_layout()
+    plt.savefig('gru_results.png')
+
+    # 10. 计算性能指标
+    from sklearn.metrics import mean_absolute_error, mean_squared_error, r2_score
+
+    mae = mean_absolute_error(actual_inverse, pred_inverse)
+    rmse = np.sqrt(mean_squared_error(actual_inverse, pred_inverse))
+    r2 = r2_score(actual_inverse, pred_inverse)
+
+    print(f'Performance Metrics:')
+    print(f'MAE: {mae:.4f}')
+    print(f'RMSE: {rmse:.4f}')
+    print(f'R²: {r2:.4f}')

+ 42 - 0
test/main.py

@@ -0,0 +1,42 @@
+import sqlite3
+import numpy as np
+
+temp_limit = (760, 870)
+
+conn = sqlite3.connect("steel_production_data1.db")
+cursor = conn.cursor()
+
+cursor.execute("select tail_steel_length_laser, tail_steel_length_thermal, fixed_length from production_data")
+length_list = cursor.fetchall()
+
+conn.close()
+
+standard = 832
+
+solved_list = []
+last_length = length_list[0][0]
+for i, j, k in length_list:
+    if i == last_length:
+        continue
+    last_length = i
+    if k < 10000:
+        continue
+    i *= 10
+    if (j == temp_limit[0] and i < j) or (j == temp_limit[1] and i > j):
+        solved_list.append(i)
+    elif abs(i-j) < 10:
+        solved_list.append(i)
+    else:
+        solved_list.append(j)
+
+diff = [i-standard for i in solved_list]
+variance = np.mean([i**2 for i in diff])
+sd = np.sqrt(variance)
+print('统计结果:')
+print('差值平均:', np.mean(diff))
+print('最大差值:', max([abs(i) for i in diff]))
+print('最小差值:', min([abs(i) for i in diff]))
+print('\n标 准 差:', np.std(solved_list))
+print('方    差:', np.var(solved_list))
+print('标准差(基于设定尾钢长度):', sd)
+print('方  差(基于设定尾钢长度):', variance)

+ 58 - 0
utils/logger.py

@@ -0,0 +1,58 @@
+import logging
+from logging.handlers import RotatingFileHandler
+
+class Logger(logging.Logger):
+    def __init__(self, name):
+        super().__init__(name)
+
+        self.setLevel(level=logging.DEBUG)
+        self.format_default = '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
+
+        self.console = None
+        self.handler = None
+
+    def screen_on(self, level=logging.DEBUG, format=None):
+        if self.console:
+            return None
+        
+        if format == None:
+            formatter = logging.Formatter(self.format_default)
+        else:
+            formatter = logging.Formatter(format)
+
+        self.console = logging.StreamHandler()
+        self.console.setLevel(level)
+        self.console.setFormatter(formatter)
+        self.addHandler(self.console)
+
+    def file_on(self, path='log.txt', level=logging.DEBUG, format=None):
+        if self.handler:
+            return None
+        
+        if format == None:
+            formatter = logging.Formatter(self.format_default)
+        else:
+            formatter = logging.Formatter(format)
+
+        self.handler = logging.FileHandler(path)
+        self.handler.setLevel(level)
+        self.handler.setFormatter(formatter)
+        self.addHandler(self.handler)
+
+    def file_on_with_rotation(self, path='log.txt', level=logging.DEBUG, format=None, maxBytes=8*1024*1024, backupCount=3):
+        if self.handler:
+            return None
+        
+        if format == None:
+            formatter = logging.Formatter(self.format_default)
+        else:
+            formatter = logging.Formatter(format)
+
+        self.handler = RotatingFileHandler(
+            path, 
+            maxBytes=maxBytes,
+            backupCount=backupCount
+        )
+        self.handler.setLevel(level)
+        self.handler.setFormatter(formatter)
+        self.addHandler(self.handler)

+ 117 - 0
utils/mqttdata.py

@@ -0,0 +1,117 @@
+import paho.mqtt.client as mqtt
+import json, warnings
+from utils.statepoint import *
+
+class MqttClient(mqtt.Client):
+    def __init__(self, client_id, username=None, password=None, version=mqtt.CallbackAPIVersion.VERSION2):
+        super().__init__(version, client_id)
+
+        if username and password:
+            self.username_pw_set(username, password)
+
+class Mqttdata:
+    def __init__(self):
+        self.logger = None
+
+        self.thread = None
+        self.node_data = {'5#开浇信号': {}, '5#停浇信号': {}, '6#开浇信号': {}, '6#停浇信号': {}, '5#手动换炉': {}, '6#手动换炉': {}}
+        self.target_from_name = {}
+
+    def set_logger(self, logger):
+        self.logger = logger
+
+    def set_mqtt_client(self, cli):
+        self.cli = cli
+        self.cli.on_connect = self.on_connect
+        self.cli.on_message = self.on_message
+        self.cli.subscribe('data/service/cast/info', qos=2)
+        self.cli.subscribe('syn/pushbillethotsend/nexthosend', qos=2)
+
+    def start_auto_update(self):
+        if self.thread == None:
+            self.thread = threading.Thread(target=self.cli.loop_forever)
+        self.thread.start()
+
+    def send(self, name):
+        if name in self.target_from_name:
+            for i in self.target_from_name[name]:
+                i.inject(self.node_data[name])
+                timer = threading.Timer(5, lambda i=i: i.set_state(False))
+                timer.start()
+        
+    def on_subscribe(self, client, userdata, mid, reason_code_list, properties):
+        if reason_code_list[0].is_failure:
+            warnings.warn(f"Broker rejected you subscription: {reason_code_list[0]}")
+            if self.logger:
+                self.logger.error(f"Broker rejected you subscription: {reason_code_list[0]}")
+        else:
+            if self.logger:
+                self.logger.info(f"Broker granted the following QoS: {reason_code_list[0].value}")
+
+    def on_message(self, client, userdata, message):
+        # logger.debug(message.payload.decode())
+        topic = message.topic
+        if topic == 'syn/pushbillethotsend/nexthosend':
+            data = json.loads(message.payload.decode())
+            if 'ccmNo' not in data:
+                warnings.warn('[MES]MQTT报文格式错误')
+                if self.logger:
+                    self.logger.error('[MES]MQTT报文格式错误')
+                return None
+            if int(data['ccmNo']) == 6:
+                if data != self.node_data['6#手动换炉']:
+                    self.node_data['6#手动换炉'] = data
+                    self.send('6#手动换炉')
+            elif int(data['ccmNo']) == 5:
+                if data != self.node_data['5#手动换炉']:
+                    self.node_data['5#手动换炉'] = data
+                    self.send('5#手动换炉')
+            else:
+                warnings.warn('[MES]MQTT收到未知铸机号')
+                if self.logger:
+                    self.logger.error('[MES]MQTT收到未知铸机号')
+        elif topic == 'data/service/cast/info':
+            data = json.loads(message.payload.decode())
+            if 'ccmNo' not in data or 'castState' not in data:
+                warnings.warn('[MES]MQTT报文格式错误')
+                if self.logger:
+                    self.logger.error('[MES]MQTT报文格式错误')
+                return None
+            if int(data['ccmNo']) == 6:
+                if data['castState'] and data != self.node_data['6#开浇信号']:
+                    self.node_data['6#开浇信号'] = data
+                    self.send('6#开浇信号')
+                elif not data['castState'] and data != self.node_data['6#停浇信号']:
+                    self.node_data['6#停浇信号'] = data
+                    self.send('6#停浇信号')
+            elif int(data['ccmNo']) == 5:
+                if data['castState'] and data != self.node_data['5#开浇信号']:
+                    self.node_data['5#开浇信号'] = data
+                    self.send('5#开浇信号')
+                elif not data['castState'] and data != self.node_data['5#停浇信号']:
+                    self.node_data['5#停浇信号'] = data
+                    self.send('5#停浇信号')
+            else:
+                warnings.warn('[MES]MQTT收到未知铸机号')
+                if self.logger:
+                    self.logger.error('[MES]MQTT收到未知铸机号')
+
+    def on_connect(self, client, userdata, flags, reason_code, properties):
+        if reason_code.is_failure:
+            warnings.warn(f"Failed to connect: {reason_code}. loop_forever() will retry connection")
+            if self.logger:
+                self.logger.error(f"Failed to connect: {reason_code}. loop_forever() will retry connection")
+        else:
+            client.subscribe('data/service/cast/info', qos=2)
+            client.subscribe('syn/pushbillethotsend/nexthosend', qos=2)
+            if self.logger:
+                self.logger.info("MQTT connection succeeded")
+
+    def make_point(self, name):
+        if name not in self.node_data:
+            raise ValueError("创建了未配置的点")
+        if name not in self.target_from_name:
+            self.target_from_name[name] = []
+        res = Statepoint()
+        self.target_from_name[name].append(res)
+        return res

+ 258 - 0
utils/s7data.py

@@ -0,0 +1,258 @@
+import snap7, csv, threading, warnings, time, ctypes
+from utils.statepoint import *
+
+class TS7DataItem(ctypes.Structure):
+    _fields_ = [
+        ('Area', ctypes.c_int),
+        ('WordLen', ctypes.c_int),
+        ('Result', ctypes.c_int),
+        ('DBNumber', ctypes.c_int),
+        ('Start', ctypes.c_int),
+        ('Amount', ctypes.c_int),
+        ('pdata', ctypes.c_void_p)
+    ]
+
+class S7Client(snap7.client.Client):
+    def multi_db_read_py(self, db_number: list, start: list, size: list):
+        count = len(size)
+        buffers = [ctypes.create_string_buffer(i) for i in size]
+        params = []
+        for i in range(count):
+            params.append(TS7DataItem(snap7.type.Areas.DB, snap7.type.WordLen.Byte, 0, db_number[i], start[i], size[i], ctypes.cast(buffers[i], ctypes.c_void_p)))
+        
+        array_type = TS7DataItem * count
+        param_array = array_type(*params)
+        result = self.read_multi_vars(param_array)
+        if result[0]:
+            raise RuntimeError("多组读取失败")
+        
+        res_rtn = [bytearray(i) for i in buffers]
+        return res_rtn
+
+class S7data:
+    def __init__(self, csvfile):
+        self.logger = None
+
+        self.S7Client = None
+        self.lock = threading.Lock()
+        self.thread_run = False
+        self.threads = []
+        self.nodes = {}
+        self.node_data = {}
+        self.groups = {}
+        self.target_from_name = {}
+        with open(csvfile) as f:
+            for i in csv.DictReader(f):
+                if i['name'] in self.nodes:
+                    raise Exception(f"S7配置文件节点名称重复:{i['name']}")
+                else:
+                    self.nodes[i['name']] = i
+                    self.node_data[i['name']] = bytearray(int(i['size']))
+                    if i['group'] not in self.groups:
+                        self.groups[i['group']] = []
+                    self.groups[i['group']].append(i['name'])
+
+    def set_logger(self, logger):
+        self.logger = logger
+
+    def set_S7Client(self, s7c: S7Client):
+        self.S7Client = s7c
+
+    def get_S7Client(self):
+        return self.S7Client
+    
+    def get_value(self, name):
+        if len(name) > 3 and name[-3] == '[' and name[-1] == ']' and name[-2].isdigit() and 0 <= int(name[-2]) < 8:
+            index = int(name[-2])
+            name = name[:-3]
+            data = (self.node_data[name][0] >> index) & 1
+        elif self.nodes[name]['type'] == 'int':
+            data = snap7.util.get_int(self.node_data[name], 0)
+        elif self.nodes[name]['type'] == 'dint':
+            data = snap7.util.get_dint(self.node_data[name], 0)
+        elif self.nodes[name]['type'] == 'bool':
+            data = snap7.util.get_bool(self.node_data[name], 0, int(self.nodes[name]['offset']))
+        elif self.nodes[name]['type'] == 'boollist':
+            data = [(self.node_data[name][0] >> i) & 1 for i in range(8)]
+        elif self.nodes[name]['type'] == 'real':
+            data = snap7.util.get_real(self.node_data[name], 0)
+        elif self.nodes[name]['type'] == 'string':
+            data = self.node_data[name][2:2+int.from_bytes(self.node_data[name][1:2])].decode('gbk', errors='replace')
+        elif self.nodes[name]['type'] == 'wstring':
+            data = self.node_data[name][4:].decode(encoding='utf-16be', errors='replace')
+        else:
+            warnings.warn('暂不支持的类型:' + self.nodes[name]['type'])
+            if self.logger:
+                self.logger.error('暂不支持的类型:' + self.nodes[name]['type'])
+            return None
+        
+        return data
+
+    def send(self, name):
+        if self.nodes[name]['type'] == 'int':
+            data = snap7.util.get_int(self.node_data[name], 0)
+        elif self.nodes[name]['type'] == 'dint':
+            data = snap7.util.get_dint(self.node_data[name], 0)
+        elif self.nodes[name]['type'] == 'bool':
+            data = snap7.util.get_bool(self.node_data[name], 0, int(self.nodes[name]['offset']))
+        elif self.nodes[name]['type'] == 'boollist':
+            data = [(self.node_data[name][0] >> i) & 1 for i in range(8)]
+        elif self.nodes[name]['type'] == 'real':
+            data = snap7.util.get_real(self.node_data[name], 0)
+        elif self.nodes[name]['type'] == 'string':
+            data = self.node_data[name][2:2+int.from_bytes(self.node_data[name][1:2])].decode('gbk', errors='replace')
+        elif self.nodes[name]['type'] == 'wstring':
+            data = self.node_data[name][4:].decode(encoding='utf-16be', errors='replace')
+        else:
+            warnings.warn('暂不支持的类型:' + self.nodes[name]['type'])
+            if self.logger:
+                self.logger.error('暂不支持的类型:' + self.nodes[name]['type'])
+            return None
+
+        if name in self.target_from_name:
+            for i in self.target_from_name[name]:
+                i.inject(data)
+        if self.nodes[name]['type'] == 'boollist' and name + '*' in self.target_from_name:
+            for i in range(8):
+                for j in self.target_from_name[name+'*'][i]:
+                    j.inject(data[i])
+
+    def update(self, name):
+        nodeinfo = self.nodes[name]
+        try:
+            while True:
+                if not self.thread_run:
+                    return None
+                self.lock.acquire()
+                if not self.S7Client.get_connected():
+                    warnings.warn('S7Client连接中断')
+                    if self.logger:
+                        self.logger.error('S7Client连接中断')
+                    self.thread_run = False
+                    self.lock.release()
+                    return None
+                tmp = self.S7Client.db_read(int(nodeinfo['db']), int(nodeinfo['start']), int(nodeinfo['size']))
+                self.lock.release()
+                if self.node_data[name] != tmp:
+                    self.node_data[name] = tmp
+                    self.send(name)
+                time.sleep(float(nodeinfo['frequency']) / 1000)
+        except RuntimeError as reason:
+            warnings.warn(reason)
+            if self.logger:
+                self.logger.error(reason)
+            self.thread_run = False
+            self.lock.release()
+
+    def start_auto_update(self):
+        if self.thread_run:
+            return None
+        self.threads = []
+        if self.S7Client == None:
+            warnings.warn('未初始化S7Client')
+            if self.logger:
+                self.logger.error('未初始化S7Client')
+            return None
+        if not self.S7Client.get_connected():
+            warnings.warn('S7Client未连接')
+            if self.logger:
+                self.logger.error('S7Client未连接')
+            return None
+        for key, value in self.nodes.items():
+            if value['read_allow'].upper() != 'FALSE':
+                self.threads.append(threading.Thread(target=self.update, args=(value['name'],)))
+        self.thread_run = True
+        for i in self.threads:
+            i.start()
+
+    def update_group(self, group_name):
+        nodesname = self.groups[group_name]
+        db_number = []
+        start = []
+        size = []
+
+        for name in nodesname:
+            nodeinfo = self.nodes[name]
+            db_number.append(int(nodeinfo['db']))
+            start.append(int(nodeinfo['start']))
+            size.append(int(nodeinfo['size']))
+
+        while True:
+            if not self.thread_run:
+                return None
+            
+            tmp = False
+            read_valid = True
+            with self.lock:
+                if not self.S7Client.get_connected():
+                    warnings.warn('S7Client连接中断')
+                    if self.logger:
+                        self.logger.error('S7Client连接中断')
+                    self.thread_run = False
+                    return None
+
+                try:
+                    tmp = self.S7Client.multi_db_read_py(db_number, start, size)
+                except RuntimeError as reason:
+                    warnings.warn(reason)
+                    read_valid = False
+                    if self.logger:
+                        self.logger.error(reason)
+                    self.thread_run = False
+
+            if read_valid and tmp:
+                for i in range(len(tmp)):
+                    if self.node_data[nodesname[i]] != tmp[i]:
+                        self.node_data[nodesname[i]] = tmp[i]
+                        self.send(nodesname[i])
+
+    def auto_update_group(self):
+        if self.thread_run:
+            return None
+        self.threads = []
+        if self.S7Client == None:
+            warnings.warn('未初始化S7Client')
+            if self.logger:
+                self.logger.error('未初始化S7Client')
+            return None
+        if not self.S7Client.get_connected():
+            warnings.warn('S7Client未连接')
+            if self.logger:
+                self.logger.error('S7Client未连接')
+            return None
+        
+        for group in self.groups.keys():
+            self.threads.append(threading.Thread(target=self.update_group, args=(group,)))
+
+        self.thread_run = True
+        for i in self.threads:
+            i.start()
+
+    def end_auto_update(self):
+        self.thread_run = False
+        for i in self.threads:
+            i.join()
+
+    def make_point(self, name, point_type = Statepoint):
+        index = -1
+        solvedname = name
+        if len(name) > 3 and name[-3] == '[' and name[-1] == ']' and name[-2].isdigit() and 0 <= int(name[-2]) < 8:
+            index = int(name[-2])
+            name = name[:-3]
+            solvedname = name + '*'
+        if name not in self.nodes:
+            raise ValueError("创建了未配置的点")
+
+        if solvedname not in self.target_from_name:
+            if index == -1:
+                self.target_from_name[solvedname] = []
+            else:
+                self.target_from_name[solvedname] = [[],[],[],[],[],[],[],[]]
+                
+        res = point_type()
+        if index == -1:
+            self.target_from_name[solvedname].append(res)
+        else:
+            self.target_from_name[solvedname][index].append(res)
+        self.send(name)
+        return res

+ 199 - 0
utils/statepoint.py

@@ -0,0 +1,199 @@
+import threading, time
+
+class Statepoint:
+    def __init__(self, initvalue = False, initstate = False):
+        self.data = initvalue
+        self.state = initstate
+        self.hmd = set()
+        self.lock = threading.Lock()
+        self.permitted_update = True
+        self.__private_permitted_update = True
+        self.converter = lambda data: bool(data)
+        self.do_excite = lambda: None
+        self.do_reset = lambda: None
+        self.keep_time = 1000
+        self.pre_reset = False
+
+    def hmd_add(self, data):
+        self.hmd.add(data)
+
+    def inject(self, data):
+        if self.data == data or (self.hmd and data in self.hmd):
+            return None
+        #数据更新
+        self.data = data
+        #状态更新
+        if self.permitted_update and self.__private_permitted_update:
+            self.__async_update_state()
+            #self.__update_state()
+
+    def excite(self):
+        #logger.info('excite to next')
+        self.do_excite()
+
+    def reset(self):
+        self.do_reset()
+
+    def __update_state(self):
+        with self.lock:
+            last_state = self.state
+            self.state = self.converter(self.data)
+            if last_state == False and self.state == True:
+                self.pre_reset = False
+                self.excite()
+            elif last_state == True and self.state == False:
+                if self.keep_time <= 0:
+                    self.reset()
+                elif self.pre_reset:
+                    self.pre_reset = False
+                    self.reset()
+                else:
+                    self.state = True
+                    self.__private_allow_update(False)
+                    self.pre_reset = True
+                    timer = threading.Timer(self.keep_time/1000, lambda: self.__private_allow_update())
+                    timer.start()
+            elif last_state == True and self.state == True:
+                self.pre_reset = False
+            else:
+                self.pre_reset = False
+
+    def __async_update_state(self):
+        threading.Thread(target=self.__update_state).start()
+
+    def allow_update(self, enable: bool = True):
+        self.permitted_update = enable
+        if enable and self.__private_permitted_update:
+            self.__async_update_state()
+            #self.__update_state()
+
+    def __private_allow_update(self, enable: bool = True):
+        self.__private_permitted_update = enable
+        if enable:
+            self.__async_update_state()
+            #self.__update_state()
+
+    def set_convertor(self, func = lambda data: bool(data)):
+        if callable(func):
+            self.converter = func
+        else:
+            raise TypeError('The parameter func can only be a function')
+
+    def set_excite_action(self, func = lambda: None):
+        if callable(func):
+            self.do_excite = func
+        else:
+            raise TypeError('The parameter func can only be a function')
+
+    def set_reset_action(self, func = lambda: None):
+        if callable(func):
+            self.do_reset = func
+        else:
+            raise TypeError('The parameter func can only be a function')
+
+    def set_keep_time(self, keeptime):
+        self.keep_time = keeptime
+
+    def set_state(self, state):
+        self.state = state
+
+class Through_state_continues3(Statepoint):
+    def __init__(self, p1, p2, p3):
+        super().__init__()
+        self.point1 = p1
+        self.point2 = p2
+        self.point3 = p3
+        self.point1.allow_update(False)
+        self.point2.allow_update(False)
+        self.point3.allow_update(False)
+        self.point1.set_excite_action(lambda: self.point2.allow_update())
+        self.point2.set_excite_action(lambda: self.point3.allow_update())
+        self.point3.set_excite_action(lambda: self.inject(True))
+        
+        self.point1.set_reset_action(lambda: (None if self.point2.state else self.point2.allow_update(False),
+                                              None if self.point2.state or self.point3.state else self.inject(False)))
+        self.point2.set_reset_action(lambda: (None if self.point3.state else self.point3.allow_update(False),
+                                              None if self.point1.state else self.inject(False)))
+        self.point3.set_reset_action(lambda: None if self.point2.state else self.inject(False))
+        # self.set_excite_action(lambda: logger.info('经过点已触发'))
+        # self.set_reset_action(lambda: logger.info('已前往推钢区域'))
+
+    def reset(self):
+        self.point2.allow_update(False)
+        self.point3.allow_update(False)
+        self.point3.state = False
+        super().reset()
+        if self.point1.state:
+            self.point1.excite()
+
+    def allow_update(self, enable: bool = True):
+        if enable:
+            self.point1.allow_update(enable)
+        else:
+            self.permitted_update = False
+            self.point1.allow_update(False)
+            self.point2.allow_update(False)
+            self.point3.allow_update(False)
+            self.point1.state = False
+            self.point2.state = False
+            self.point3.state = False
+            self.permitted_update = True
+            
+
+class Through_state_separation2(Statepoint):
+    def __init__(self, p1, p2):
+        super().__init__()
+        self.point1 = p1
+        self.point2 = p2
+        self.point1.allow_update(False)
+        self.point2.allow_update(False)
+        #self.point1.set_keep_time(3000)
+        self.point1.set_excite_action(lambda: self.point2.allow_update())
+        self.point2.set_excite_action(lambda: self.inject(True))
+        
+        self.point1.set_reset_action(lambda: None if self.point2.state else self.point2.allow_update(False))
+        self.point2.set_reset_action(lambda: self.inject(False))
+        # self.set_excite_action(lambda: logger.debug('推钢机已经推动钢坯'))
+
+    def reset(self):
+        self.point1.allow_update(False)
+        self.point2.allow_update(False)
+        self.point1.state = False
+        self.point2.state = False
+        #logger.debug('推钢机刚刚经过')
+        super().reset()
+        self.point1.allow_update()
+
+    def allow_update(self, enable: bool = True):
+        if enable:
+            # logger.debug('open')
+            self.point1.allow_update()
+        else:
+            # logger.debug('close')
+            self.permitted_update = False
+            self.point1.allow_update(False)
+            self.point2.allow_update(False)
+            self.point1.state = False
+            self.point2.state = False
+            self.permitted_update = True
+
+class Integration_speed_mpmin(Statepoint):
+    def __init__(self, *args):
+        super().__init__(*args)
+        self.last_inject_time = time.time()
+        self.last_data = 0
+        self.last_data_time = self.last_inject_time
+
+    def inject(self, data):
+        current_inject_time = time.time()
+        current_data = data
+        current_data_time = self.last_inject_time + (current_inject_time - self.last_inject_time) / 2
+
+        data = self.data + (current_data_time - self.last_data_time) * (self.last_data + current_data) / 120
+
+        self.last_inject_time = current_inject_time
+        self.last_data = current_data
+        self.last_data_time = current_data_time
+
+        return super().inject(data)
+    

+ 117 - 0
utils/tcp_data.py

@@ -0,0 +1,117 @@
+import socket
+from utils.statepoint import *
+
+class Tcp_server(socket.socket):
+    def __init__(self, ip, port, backlog = 5, cli_max = 5, encoding = 'utf-8', point_t = Statepoint):
+        super().__init__()
+
+        self.backlog = backlog
+        self.cli_max = cli_max
+        self.encoding = encoding
+        self.point_t = point_t
+
+        self.run_flag = False
+        self.main_thread = None
+
+        self.convertor = lambda data: bool(data)
+        self.excite_action = lambda: None
+
+        self.clients = {}
+        self.datas = {}
+        self.points = {}
+        self.threads = {}
+
+        self.bind((ip, port))
+        self.listen(backlog)
+
+    def accept_action(self):
+        while self.run_flag:
+            if len(self.clients) < self.cli_max:
+                try:
+                    cli, addr = self.accept()
+                except OSError as e:
+                    print('客户端等待连接服务被打断:', str(e))
+                    self.run_flag = False
+                    return None
+                tmp_point = self.point_t()
+                tmp_point.set_convertor(self.convertor)
+                tmp_point.set_excite_action(lambda t=tmp_point: (t.set_state(False), self.excite_action()))
+                self.clients[addr] = cli
+                self.datas[addr] = b''
+                self.points[addr] = [tmp_point]
+                self.threads[addr] = threading.Thread(target=self.update_action, args=(addr,))
+                self.threads[addr].start()
+                print(f'{addr}客户端已连接')
+            else:
+                time.sleep(1)
+
+    def update_action(self, addr):
+        cli = self.clients[addr]
+        noerror = True
+        while self.run_flag:
+            try:
+                tmp = cli.recv(1024)
+            except Exception as e:
+                noerror = False
+                tmp = b''
+                print(f'{addr}异常断连:{e}')
+            if tmp:
+                self.datas[addr] = tmp
+                self.send(addr)
+            else:
+                break
+        if noerror:
+            print(f'{addr}正常断连')
+        self.clients.pop(addr, None)
+        self.datas.pop(addr, None)
+        self.points.pop(addr, None)
+        self.threads.pop(addr, None)
+
+    def send(self, addr):
+        if self.encoding:
+            data = self.datas[addr].decode(encoding=self.encoding, errors='replace')
+        else:
+            data = self.datas[addr]
+
+        for i in self.points[addr]:
+            i.inject(data)
+
+    def start(self):
+        if self.main_thread and self.main_thread.is_alive():
+            return None
+        
+        self.getsockname()
+        self.main_thread = threading.Thread(target=self.accept_action)
+        self.run_flag = True
+        self.main_thread.start()
+
+    def stop(self):
+        if self.main_thread == None:
+            return None
+        
+        self.run_flag = False
+        try:
+            addr = self.getsockname()
+            cli_tmp = socket.socket()
+            cli_tmp.connect(addr)
+            cli_tmp.close()
+        finally:
+            self.main_thread.join()
+            self.main_thread = None
+
+    def set_convertor(self, func):
+        self.convertor = func
+
+    def set_excite_action(self, func):
+        self.excite_action = func
+
+    def close(self):
+        self.stop()
+        return super().close()
+
+    def send_to_all_clients(self, bytes_data: bytes):
+        for i, j in self.clients.items():
+            try:
+                j.send(bytes_data)
+            except Exception as e:
+                print(f'{i}发送数据时产生异常:{e}')