فهرست منبع

9.24提交稳定运行版本(非正式)

oldwine 2 ماه پیش
والد
کامیت
302bb57574
15فایلهای تغییر یافته به همراه949 افزوده شده و 85 حذف شده
  1. 28 0
      conf/etst.sql
  2. 4 0
      conf/s7@192.168.0.3.csv
  3. 18 0
      conf/s7@192.168.1.215.csv
  4. 0 7
      main.py
  5. 104 0
      models/cip_data.py
  6. 85 0
      models/data_sender.py
  7. 75 0
      models/mysql_data.py
  8. 263 0
      models/steel_fit.py
  9. 30 0
      sql/event.sql
  10. 27 0
      sql/steel_cutting.sql
  11. 133 0
      sql/steel_making.sql
  12. 68 65
      sql/steel_rolling.sql
  13. 81 0
      steel_making_main.py
  14. 33 0
      steel_rolling_main.py
  15. 0 13
      steelmaking_main.py

+ 28 - 0
conf/etst.sql

@@ -0,0 +1,28 @@
+-- INSERT INTO data_points(ip_address, name, type, db_number, start_offset, bit_offset, size, read_allow, write_allow, frequency, group_id) VALUES
+-- ('192.168.1.215', '1流定尺反馈补偿量', 'int', 100, 28, 0, 2, TRUE, FALSE, 500, 1),
+-- ('192.168.1.215', '2流定尺反馈补偿量', 'int', 100, 30, 0, 2, TRUE, FALSE, 500, 1),
+-- ('192.168.1.215', '3流定尺反馈补偿量', 'int', 100, 32, 0, 2, TRUE, FALSE, 500, 1),
+-- ('192.168.1.215', '4流定尺反馈补偿量', 'int', 100, 34, 0, 2, TRUE, FALSE, 500, 1),
+-- ('192.168.1.215', '5流定尺反馈补偿量', 'int', 101, 28, 0, 2, TRUE, FALSE, 500, 1),
+-- ('192.168.1.215', '6流定尺反馈补偿量', 'int', 101, 30, 0, 2, TRUE, FALSE, 500, 1),
+-- ('192.168.1.215', '7流定尺反馈补偿量', 'int', 101, 32, 0, 2, TRUE, FALSE, 500, 1),
+-- ('192.168.1.215', '8流定尺反馈补偿量', 'int', 101, 34, 0, 2, TRUE, FALSE, 500, 1);
+
+-- INSERT INTO realtime_data(point_id, int_value, timestamp) VALUES
+-- SELECT id, 0.0, NOW(3) FROM data_points WHERE ip_address = '192.168.1.215';
+
+INSERT INTO data_points(ip_address, name, type, db_number, start_offset, bit_offset, size, read_allow, write_allow, frequency, group_id) VALUES
+('192.168.1.215', '炉号', 'dint', 420, 34, 0, 4, TRUE, FALSE, 500, 1),
+
+INSERT INTO realtime_data(point_id, int_value, timestamp)
+SELECT id, 0, NOW(3) FROM data_points WHERE name = '炉号';
+
+INSERT INTO data_points(ip_address, name, type, db_number, start_offset, bit_offset, size, read_allow, write_allow, frequency, group_id) VALUES
+('192.168.0.3', '触发信号', 'bool', 889, 0, 3, 1, TRUE, FALSE, 500, 4),
+('192.168.0.3', '取样标志', 'int', 889, 40, 0, 2, TRUE, FALSE, 500, 4);
+
+INSERT INTO realtime_data(point_id, int_value, timestamp)
+SELECT id, 0, NOW(3) FROM data_points WHERE name = '触发信号';
+
+INSERT INTO realtime_data(point_id, int_value, timestamp)
+SELECT id, 0, NOW(3) FROM data_points WHERE name = '取样标志';

+ 4 - 0
conf/s7@192.168.0.3.csv

@@ -62,3 +62,7 @@ name,type,db,start,offset,size,read_allow,write_allow,frequency,group
 16#辊径,real,889,268,0,4,TRUE,FALSE,500,4
 17#辊径,real,889,272,0,4,TRUE,FALSE,500,4
 18#辊径,real,889,276,0,4,TRUE,FALSE,500,4
+棒一变棒三定尺,bool,889,0,2,1,TRUE,FALSE,500,4
+轧钢启用倍尺,bool,889,0,3,1,TRUE,FALSE,500,4
+触发信号,bool,889,0,3,1,TRUE,FALSE,500,4
+取样标志,int,889,40,0,2,TRUE,FALSE,500,4

+ 18 - 0
conf/s7@192.168.1.215.csv

@@ -0,0 +1,18 @@
+name,type,db,start,offset,size,read_allow,write_allow,frequency,group
+4流定尺反馈补偿量,int,100,28,0,2,TRUE,FALSE,500,1
+3流定尺反馈补偿量,int,100,30,0,2,TRUE,FALSE,500,1
+2流定尺反馈补偿量,int,100,32,0,2,TRUE,FALSE,500,1
+1流定尺反馈补偿量,int,100,34,0,2,TRUE,FALSE,500,1
+5流定尺反馈补偿量,int,101,28,0,2,TRUE,FALSE,500,1
+6流定尺反馈补偿量,int,101,30,0,2,TRUE,FALSE,500,1
+7流定尺反馈补偿量,int,101,32,0,2,TRUE,FALSE,500,1
+8流定尺反馈补偿量,int,101,34,0,2,TRUE,FALSE,500,1
+炉号,dint,420,34,0,4,TRUE,FALSE,500,1
+L1切割信号,boollist,131,4,0,1,TRUE,FALSE,500,1
+L2切割信号,boollist,132,4,0,1,TRUE,FALSE,500,1
+L3切割信号,boollist,133,4,0,1,TRUE,FALSE,500,1
+L4切割信号,boollist,134,4,0,1,TRUE,FALSE,500,1
+L5切割信号,boollist,135,4,0,1,TRUE,FALSE,500,1
+L6切割信号,boollist,136,4,0,1,TRUE,FALSE,500,1
+L7切割信号,boollist,137,4,0,1,TRUE,FALSE,500,1
+L8切割信号,boollist,138,4,0,1,TRUE,FALSE,500,1

+ 0 - 7
main.py

@@ -1,7 +0,0 @@
-from utils.s7data import S7Client, S7data
-
-s7 = S7Client()
-s7.connect("192.168.0.3", 0, 3)
-data = S7data("conf/s7@192.168.0.3.csv")
-data.set_S7Client(s7)
-data.auto_update_group()

+ 104 - 0
models/cip_data.py

@@ -0,0 +1,104 @@
+from utils.statepoint import Statepoint
+from pylogix import PLC
+from pylogix.lgx_response import Response
+from threading import Thread
+import time
+
+class CIPData:
+    __sentinel = object()
+
+    def __init__(self, ip = ''):
+        self.tags2name = {
+            "GB_PEISHUI": [f"5#水流量-{i}流-{j}段" for i in range(1, 9) for j in range(1, 6)],
+            "GB_PEISHUI[58]": ['5#结晶器流量', '5#结晶器水温差', '5#二冷水总管压力', '5#结晶器进水温度', '5#结晶器水压', '5#二冷水总管温度']
+        }
+
+        self.name2value = {j: 0 for i in self.tags2name.values() for j in i}
+
+        self.name2point = {}
+
+        self.plc_ip = ip
+        self.thread_update = None
+        self.thread_run = False
+
+    def deliver_value(self, name, value):
+        if self.get_value(name) == value:
+            return None
+        
+        self.name2value[name] = value
+
+        if name in self.name2point:
+            for point in self.name2point[name]:
+                point.inject(value)
+
+    def process_response(self, response: Response, name: str | list[str]):
+        if response.Status == "Success":
+            if isinstance(name, list):
+                _ = [self.deliver_value(name[i], response.Value[i]) for i in range(len(name))]
+            else:
+                self.deliver_value(name, response.Value)
+        else:
+            print("Error:", response.Status)
+
+    def update_forever(self, sleep_second = 0.5):
+        ip = self.plc_ip
+        if ip == '':
+            raise ValueError("PLC IPAddress is not defined.")
+
+        retry_count = 3
+        with PLC(ip) as plc:
+            while retry_count > 0 and self.thread_run:
+                try:
+                    for tag, name in self.tags2name.items():
+                        if isinstance(name, list):
+                            ret = plc.Read(tag, len(name))
+                        else:
+                            ret = plc.Read(tag)
+                        self.process_response(ret, name)
+                    retry_count = 3
+                except:
+                    retry_count -= 1
+                finally:
+                    time.sleep(sleep_second)
+        self.thread_run = False
+        if retry_count <= 0:
+            print("An abnormal connection with the PLC occurred.")
+
+    def start_update(self):
+        if self.thread_update or self.thread_run:
+            raise ChildProcessError("This thread cannot be started now.")
+        
+        self.thread_update = Thread(target=self.update_forever)
+        self.thread_run = True
+        self.thread_update.start()
+
+    def stop_update(self):
+        self.thread_run = False
+        if self.thread_update == None:
+            return None
+        
+        self.thread_update.join()
+        self.thread_update = None
+
+    def restart_update(self):
+        self.stop_update()
+        self.start_update()
+
+    def get_value(self, name: str, default: any = __sentinel):
+        if name not in self.name2value:
+            if default is self.__sentinel:
+                raise NameError(f"Name {name} is not defined.")
+            else:
+                return default
+        
+        return self.name2value[name]
+
+    def make_point(self, name: str, point_t = Statepoint):
+        point = point_t(self.get_value(name))
+
+        if name not in self.name2point:
+            self.name2point[name] = set()
+
+        self.name2point[name].add(point)
+
+        return point

+ 85 - 0
models/data_sender.py

@@ -0,0 +1,85 @@
+from utils.s7data import S7data
+from utils.logger import Logger
+from dbutils.pooled_db import PooledDB
+from datetime import datetime
+import pymysql, threading, time
+
+class Sender:
+    def __init__(self, s7data: S7data, mysql_pool: PooledDB, logger: Logger, ipaddr):
+        self.s7data = s7data
+        self.mysql_pool = mysql_pool
+        self.logger = logger
+        self.point_info = self.get_init_node_info(ipaddr)
+
+        self.thread_run = True
+        self.thread = threading.Thread(target=self.update_all_forever)
+        self.thread.start()
+
+    def __del__(self):
+        if isinstance(self.thread, threading.Thread) and self.thread.is_alive():
+            self.thread_run = False
+            self.thread.join()
+
+    def update_all_forever(self, round_sleep=500):
+        while self.thread_run:
+            time.sleep(round_sleep/1000)
+            threads = []
+            for i in self.point_info.keys():
+                thread = threading.Thread(target=self.update_point, args=(i,))
+                thread.start()
+                threads.append(thread)
+            for i in threads:
+                i.join()
+            self.logger.debug("更新成功")
+
+    def update_point(self, name):
+        dataid = self.point_info[name][1]
+        datatype = self.point_info[name][2]
+        datatype = datatype if datatype != 'dint' else 'int'
+        timestamp = datetime.now()
+        datavalue = self.s7data.get_value(name)
+        sql = "UPDATE realtime_data SET {}_value = %s, timestamp = %s WHERE point_id = %s;".format(datatype)
+        sql2 = "INSERT INTO historical_data(point_id, {}_value, timestamp) VALUES(%s, %s, %s);".format(datatype)
+        sql3 = "INSERT INTO recent_data(point_id, {}_value, timestamp) VALUES(%s, %s, %s);".format(datatype)
+        with self.mysql_pool.connection() as conn:
+            try:
+                with conn.cursor() as cursor:
+                    cursor.execute(sql, (datavalue, timestamp, dataid))
+                    cursor.execute(sql2, (dataid, datavalue, timestamp))
+                    cursor.execute(sql3, (dataid, datavalue, timestamp))
+                    conn.commit()
+                    return True
+            except pymysql.Error as e:
+                self.logger.error(f"[SENDER]MYSQL:{e}")
+
+    def get_init_node_info(self, ipaddr):
+        sql = "SELECT name, id, type FROM data_points where ip_address = %s;"
+        res_dict = {}
+        with self.mysql_pool.connection() as conn:
+            try:
+                with conn.cursor() as cursor:
+                    cursor.execute(sql, (ipaddr,))
+                    res = cursor.fetchall()
+            except pymysql.Error as e:
+                self.logger.error(f"[SENDER]MYSQL:{e}")
+                raise ConnectionError("mysql connection error."+str(e))
+        for i in res:
+            res_dict[i[0]] = i
+        
+        return res_dict
+
+    def upload_billet(self, arg_dict: dict):
+        sql = "INSERT INTO steel_billet_monitoring(strand_no, cutting_time, entry_time, exit_time, \
+                water_temperature, water_pressure, water_volume, water_pressure_sd, steel_temperature, \
+                drawing_speed, water_temperature_difference)\
+                VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)"
+        with self.mysql_pool.connection() as conn:
+            try:
+                with conn.cursor() as cursor:
+                    cursor.execute(sql, (arg_dict["strand_no"], arg_dict["cutting_time"], arg_dict["entry_time"], arg_dict["exit_time"],
+                                         arg_dict.get("water_temperature"), arg_dict.get("water_pressure"), arg_dict.get("water_volume"), arg_dict.get("water_pressure_sd"),
+                                         arg_dict.get("steel_temperature"), arg_dict.get("drawing_speed"), arg_dict.get("water_temperature_difference")))
+                    conn.commit()
+                    return True
+            except pymysql.Error as e:
+                self.logger.error(f"[SENDER]MYSQL:{e}")

+ 75 - 0
models/mysql_data.py

@@ -0,0 +1,75 @@
+import time, threading, snap7.util
+from utils.s7data import S7Client
+from utils.logger import Logger
+from dbutils.pooled_db import PooledDB
+
+
+class MysqlData:
+    def __init__(self, mysql_pool: PooledDB, s7conn: S7Client, logger: Logger):
+        self.mysql_pool = mysql_pool
+        self.s7conn = s7conn
+        self.logger = logger
+        self.datas = {"is_use_model": False, "is_use_length": False, "棒一变棒三定尺": False}
+        self.model_datas = [0 for i in range(8)]
+
+        self.thread_flag = True
+        self.thread = threading.Thread(target=self.update_forever)
+        self.thread.start()
+        self.thread_write = threading.Thread(target=self.write_forever)
+        self.thread_write.start()
+    
+    def get_value(self, name, default=0):
+        if name == "棒一变棒三定尺":
+            sql = f"SELECT bool_value from industrial_data.realtime_data WHERE point_id = 64;"
+        else:
+            sql = f"SELECT {name} from length_config;"
+        try:
+            with self.mysql_pool.connection() as conn:
+                with conn.cursor() as cursor:
+                    cursor.execute(sql)
+                    result = cursor.fetchall()
+            if len(result) == 0:
+                raise ValueError("Read no data from mysql.")
+            return result[0][0]
+        except Exception as e:
+            self.logger.error(f"[mysql]:{e}")
+            return default
+        
+    def get_model_value(self, strand_no, default=0):
+        sql = f"SELECT model_compensation FROM prediction_full_data.prediction{strand_no}_full_data WHERE model_compensation IS NOT NULL ORDER BY prediction_timestamp DESC LIMIT 1;"
+        try:
+            with self.mysql_pool.connection() as conn:
+                with conn.cursor() as cursor:
+                    cursor.execute(sql)
+                    result = cursor.fetchall()
+            if len(result) == 0:
+                raise ValueError("Read no data from mysql.")
+            return result[0][0]
+        except Exception as e:
+            self.logger.error(f"[mysql]:{e}")
+            return default
+        
+    def update_forever(self, fru=500):
+        while self.thread_flag:
+            for i in self.datas.keys():
+                self.datas[i] = self.get_value(i)
+            for i in range(8):
+                self.model_datas[i] = self.get_model_value(i+1)
+            time.sleep(fru/1000)
+
+    def write_forever(self, fru=500):
+        while self.thread_flag:
+            data = bytearray(1)
+            snap7.util.set_bool(data, 0, 1, self.datas["is_use_model"])
+            snap7.util.set_bool(data, 0, 0, not self.datas["棒一变棒三定尺"])
+            snap7.util.set_bool(data, 0, 3, self.datas["棒一变棒三定尺"])
+            self.s7conn.db_write(420, 32, data)
+            self.s7conn.db_write(421, 32, data)
+
+            data = bytearray(32)
+            for i in range(8):
+                snap7.util.set_real(data, i*4, self.model_datas[i])
+            self.s7conn.db_write(420, 0, data)
+            self.s7conn.db_write(421, 0, data)
+
+            time.sleep(fru/1000)

+ 263 - 0
models/steel_fit.py

@@ -0,0 +1,263 @@
+from utils.statepoint import Statepoint
+from models.data_sender import Sender
+import numpy as np
+from scipy import interpolate, integrate
+from collections import deque
+from models.cip_data import CIPData
+from utils.s7data import S7data
+import datetime, logging, time, threading, queue
+
+def interp1d(x, y, logger: logging.Logger):
+    if x[-1] < datetime.datetime.now().timestamp() - 300:
+        logger.error("连续5分钟未采集到变化数值,表示为-1")
+        return lambda data: -1
+    if len(x) < 4:
+        logger.warning("采集到的数量低于4个,表示为平均值")
+        avg = np.mean(y)
+        return lambda data: avg
+    try:
+        return interpolate.interp1d(x, y, kind='cubic')
+    except Exception as e:
+        logger.error(f"三次样条插值失败:{e}")
+        return lambda data: -1
+
+class BufferPoint(Statepoint):
+    def __init__(self, initvalue = None, initstate = False, maxlen: int | None = 3000):
+        super().__init__(deque(maxlen = maxlen), initstate)
+
+    def inject(self, data):
+        self.data.append((data, datetime.datetime.now().timestamp()))
+
+    def get_buffer(self):
+        res = self.data.copy()
+        last = res[-1][0]
+        res.append((last, datetime.datetime.now().timestamp() + 0.001))
+        return res
+
+class billet_data_gatherer:
+    def __init__(self, dspeed_point: BufferPoint, cutting_sig_point: Statepoint, sizing_point: Statepoint, flow_rate_point_list: list[BufferPoint],
+                 logger: logging.Logger, strand_no: int, result_queue: queue.Queue):
+        self.dspeed_point = dspeed_point
+        self.cutting_sig_point = cutting_sig_point
+        self.sizing_point = sizing_point
+        self.flow_rate_point_list = flow_rate_point_list
+        self.logger = logger
+        self.strand_no = strand_no
+        self.result_queue = result_queue
+
+        self.MOLD_TO_CUTTER_DISTANCE = 28
+        self.CRITICAL_ZONE_LENGTH = 12
+
+        self.cutting_sig_point.set_excite_action(self.cutting_action)
+        
+    def cutting_action(self):
+        cutting_time = datetime.datetime.now().timestamp()
+        sizing = self.sizing_point.data / 1000
+
+        self.logger.debug(f"{self.strand_no}流开始切割")
+
+        time.sleep(30)
+        dspeed_buffer: deque = self.dspeed_point.get_buffer()
+        flow_rate_buffer_list: list[deque] = [self.flow_rate_point_list[i].get_buffer() for i in range(5)]
+
+        if len(dspeed_buffer) < 10:
+            self.logger.debug(f"{self.strand_no}流已统计数据量不足,无法计算")
+            return
+
+        data_tuple, time_tuple = zip(*dspeed_buffer)
+
+        x = np.array(time_tuple)
+        y = np.array(data_tuple) / 60
+        vt_func = interp1d(x, y, self.logger)
+        entry_time = self._binary_search_start(vt_func, cutting_time, sizing + self.MOLD_TO_CUTTER_DISTANCE)
+
+        if entry_time == None:
+            self.logger.debug(f"{self.strand_no}流已统计数据量不足,无法计算")
+            return
+        
+        exit_time = self._binary_search_end(vt_func, entry_time, sizing + self.CRITICAL_ZONE_LENGTH)
+        dspeed_avg = (sizing + self.CRITICAL_ZONE_LENGTH) / (exit_time - entry_time) * 60
+
+        self.create_data(cutting_time, entry_time, exit_time, self.flow_rate_total(flow_rate_buffer_list, entry_time, exit_time), dspeed_avg)
+        
+    def _binary_search_start(self, func, upper_limit, target):
+        left = func.x.min()
+        right = func.x.max()
+
+        if self._get_distance(func, left, upper_limit) < target:
+            return
+
+        while abs(right - left) > 0.01:
+            mid = (left + right) / 2
+            if self._get_distance(func, mid, upper_limit) >= target:
+                left = mid
+            else:
+                right = mid
+        
+        return (left + right) / 2
+    
+    def _binary_search_end(self, func, lower_limit, target):
+        left = func.x.min()
+        right = func.x.max()
+
+        while abs(right - left) > 0.01:
+            mid = (left + right) / 2
+            if self._get_distance(func, lower_limit, mid) >= target:
+                right = mid
+            else:
+                left = mid
+        
+        return (left + right) / 2
+
+    def _get_distance(self, vt_func, lower, upper):
+        return integrate.quad(vt_func, lower, upper)[0]
+    
+    def flow_rate_total(self, deque_list: list[deque], start_time, end_time):
+        res = []
+        for dequei in deque_list:
+            data_tuple, time_tuple = zip(*dequei)
+            x = np.array(time_tuple)
+            y = np.array(data_tuple) / 3600
+            vt_func = interp1d(x, y, self.logger)
+            total = integrate.quad(vt_func, start_time, end_time)[0]
+            res.append(total)
+
+        return sum(res)
+    
+    def create_data(self, cutting_time, entry_time, exit_time, water_total, dspeed_avg):
+        self.logger.debug(f"{self.strand_no}流钢坯计算结果:")
+        self.logger.debug(f'\t切割时间:{datetime.datetime.fromtimestamp(cutting_time).strftime("%Y-%m-%d %H:%M:%S")}')
+        self.logger.debug(f'\t进入时间:{datetime.datetime.fromtimestamp(entry_time).strftime("%Y-%m-%d %H:%M:%S")}')
+        self.logger.debug(f'\t离开时间:{datetime.datetime.fromtimestamp(exit_time).strftime("%Y-%m-%d %H:%M:%S")}')
+        self.logger.debug(f'\t水量总计:{water_total}')
+        self.logger.debug(f'\t平均拉速:{dspeed_avg}')
+        self.result_queue.put((self.strand_no, float(cutting_time), float(entry_time), float(exit_time), float(water_total), float(dspeed_avg)))
+
+class SteelFit:
+    def __init__(self, s7_data_20: S7data, s7_data_215: S7data, cip_data: CIPData, sender: Sender, logger: logging.Logger):
+        self.water_temperature_buffer: BufferPoint = cip_data.make_point("5#二冷水总管温度", BufferPoint)
+        self.water_pressure_buffer: BufferPoint = cip_data.make_point("5#二冷水总管压力", BufferPoint)
+        self.steel_temperature_buffer: BufferPoint = s7_data_20.make_point("中间包连续测温温度", BufferPoint)
+        self.water_temperature_difference_buffer: BufferPoint = cip_data.make_point("5#结晶器水温差", BufferPoint)
+
+        self.dspeed_buffer = [s7_data_20.make_point(f"{i}流结晶器拉速", BufferPoint) for i in range(1, 9)]
+        self.cutting_sig_point = [s7_data_215.make_point(f"L{i}切割信号[0]") for i in range(1, 9)]
+        self.sizing_point = [s7_data_20.make_point(f"{i}流定尺") for i in range(1, 9)]
+        self.flow_rate_point_list = [[cip_data.make_point(f"5#水流量-{i}流-{j}段", BufferPoint) for j in range(1, 6)] for i in range(1, 9)]
+
+        self.sender = sender
+        self.logger = logger
+        self.task_queue = queue.Queue()
+
+        self.billet_data_gatherer_list = [
+            billet_data_gatherer(
+                self.dspeed_buffer[i],
+                self.cutting_sig_point[i],
+                self.sizing_point[i],
+                self.flow_rate_point_list[i],
+                logger,
+                i + 1,
+                self.task_queue
+            )
+            for i in range(8)
+        ]
+        
+        self.thread_run = True
+        self.thread = threading.Thread(target=self.loop_process)
+        self.thread.start()
+
+    def loop_process(self):
+        while self.thread_run:
+            try:
+                task_tuple = self.task_queue.get(True, 1)
+                tmp_dict = {}
+                tmp_dict["strand_no"] = task_tuple[0]
+                tmp_dict["cutting_time"] = datetime.datetime.fromtimestamp(task_tuple[1])
+                tmp_dict["entry_time"] = datetime.datetime.fromtimestamp(task_tuple[2])
+                tmp_dict["exit_time"] = datetime.datetime.fromtimestamp(task_tuple[3])
+                
+                cal_res = self.cal_data(task_tuple[2], task_tuple[3])
+                tmp_dict["water_temperature"] = cal_res[0]
+                tmp_dict["water_pressure"] = cal_res[1]
+                tmp_dict["water_volume"] = task_tuple[4]
+                tmp_dict["water_pressure_sd"] = cal_res[2]
+                tmp_dict["steel_temperature"] = cal_res[3]
+                tmp_dict["drawing_speed"] = task_tuple[5]
+                tmp_dict["water_temperature_difference"] = cal_res[4]
+
+                self.sender.upload_billet(tmp_dict)
+            except queue.Empty:
+                pass
+            except Exception as e:
+                self.logger.error(f"铸机数据计算过程中出现意外:{e}")
+
+    def cal_data(self, entry_time, exit_time):
+        wt = self.interval_avg(self.water_temperature_buffer.get_buffer(), entry_time, exit_time)
+        wp = self.interval_avg(self.water_pressure_buffer.get_buffer(), entry_time, exit_time)
+        wps = self.interval_sd(self.water_pressure_buffer.get_buffer(), entry_time, exit_time)
+        st = self.interval_avg(self.steel_temperature_buffer.get_buffer(), entry_time, exit_time)
+        wtd = self.interval_avg(self.water_temperature_difference_buffer.get_buffer(), entry_time, exit_time)
+
+        return (float(wt), float(wp), float(wps), float(st), float(wtd))
+
+    def interval_avg(self, buffer, left, right):
+        data_tuple, time_tuple = zip(*buffer)
+        x = np.array(time_tuple)
+        y = np.array(data_tuple)
+        func = interp1d(x, y, self.logger)
+        inte = integrate.quad(func, left, right)[0]
+
+        return inte / (right - left)
+
+    def interval_sd(self, buffer, left, right):
+        data_tuple, time_tuple = zip(*buffer)
+        x = np.array(time_tuple)
+        y = np.array(data_tuple)
+        func = interp1d(x, y, self.logger)
+        inte = integrate.quad(func, left, right)[0]
+        avg = inte / (right - left)
+
+        func2 = lambda x: (func(x) - avg) ** 2
+        inte2 = integrate.quad(func2, left, right)[0]
+        avg2 = inte2 / (right - left)
+
+        return avg2 ** 0.5
+
+
+if __name__ == "__main__":
+    from utils.s7data import S7Client, S7data
+    from utils.logger import Logger
+
+    # 配置S7连接
+    s7_1 = S7Client()
+    s7_1.connect("172.16.1.20", 0, 0)
+    data_1 = S7data("conf/s7@172.16.1.20.csv")
+    data_1.set_S7Client(s7_1)
+    data_1.auto_update_group()
+    
+    s7_2 = S7Client()
+    s7_2.connect("172.16.1.21", 0, 0)
+    data_2 = S7data("conf/s7@172.16.1.21.csv")
+    data_2.set_S7Client(s7_2)
+    data_2.auto_update_group()
+
+    s7_3 = S7Client()
+    s7_3.connect("192.168.1.215", 0, 0)
+    data_3 = S7data("conf/s7@192.168.1.215.csv")
+    data_3.set_S7Client(s7_3)
+    data_3.auto_update_group()
+
+    # 配置CIP连接
+    cip_data = CIPData("192.168.3.100")
+    cip_data.start_update()
+    
+    # 配置日志模块
+    logger = Logger('test')
+    logger.screen_on()
+
+    class C:
+        def upload_billet(self, arg_dict: dict):
+            pass
+
+    # 钢坯拟合模块
+    steel_fit = SteelFit(data_1, data_3, cip_data, C(), logger)

+ 30 - 0
sql/event.sql

@@ -0,0 +1,30 @@
+USE industrial_data;
+
+-- 创建最近数据表
+CREATE TABLE recent_data (
+    id BIGINT AUTO_INCREMENT PRIMARY KEY,
+    point_id INT NOT NULL COMMENT '数据点ID',
+    bool_value BOOLEAN NULL COMMENT '布尔值',
+    int_value INT NULL COMMENT '整数值',
+    real_value FLOAT NULL COMMENT '实数值',
+    timestamp TIMESTAMP(3) NOT NULL COMMENT '数据时间戳(毫秒精度)',
+    create_time TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP(3) COMMENT '数据创建时间(毫秒精度)',
+    FOREIGN KEY (point_id) REFERENCES data_points(id),
+    INDEX (point_id, timestamp)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COMMENT='最近数据表';
+
+
+DELIMITER //
+
+CREATE EVENT purge_old_data
+ON SCHEDULE EVERY 1 DAY 
+STARTS TIMESTAMP(CURRENT_DATE, '01:00:00') + INTERVAL 1 DAY
+COMMENT '每天凌晨删除昨天之前的所有数据'
+DO
+BEGIN
+    -- 删除昨天之前的所有数据(保留昨天和今天的数据)
+    DELETE FROM recent_data 
+    WHERE timestamp < DATE_SUB(CURRENT_DATE, INTERVAL 1 DAY);
+END//
+
+DELIMITER ;

+ 27 - 0
sql/steel_cutting.sql

@@ -0,0 +1,27 @@
+CREATE TABLE steel_billet_monitoring (
+    billet_id BIGINT AUTO_INCREMENT PRIMARY KEY COMMENT '拟合钢坯编号',
+    strand_no TINYINT NOT NULL COMMENT '铸流编号',
+    
+    -- 时间参数
+    cutting_time DATETIME NOT NULL COMMENT '钢坯开始切割时间',
+    entry_time DATETIME NOT NULL COMMENT '钢坯进入结晶器前12m时间',
+    exit_time DATETIME NOT NULL COMMENT '钢坯离开结晶器前12m时间',
+    
+    -- 冷却水参数
+    water_temperature DECIMAL(5,2) COMMENT '二冷水平均水温',
+    water_pressure DECIMAL(6,2) COMMENT '二冷水平均水压',
+    water_volume DECIMAL(8,2) COMMENT '5段总水量',
+    water_pressure_sd DECIMAL(6,2) COMMENT '二冷水水压标准差',
+    
+    -- 钢温度
+    steel_temperature DECIMAL(7,2) COMMENT '平均钢温',
+
+    -- 结晶器
+    drawing_speed DECIMAL(7,2) COMMENT '平均拉速',
+    water_temperature_difference DECIMAL(7,2) COMMENT '结晶器平均水温差',
+    
+    created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP COMMENT '记录创建时间',
+    updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT '记录更新时间',
+    
+    INDEX idx_cutting_time (cutting_time)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COMMENT='钢坯拟合数据表';

+ 133 - 0
sql/steel_making.sql

@@ -0,0 +1,133 @@
+-- 新建炼钢数据采集数据库
+CREATE DATABASE IF NOT EXISTS steelmaking_data DEFAULT CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci;
+USE steelmaking_data;
+
+-- 创建炼钢数据点表
+CREATE TABLE data_points (
+    id INT AUTO_INCREMENT PRIMARY KEY,
+    ip_address VARCHAR(20) NOT NULL COMMENT 'PLC网络地址',
+    name VARCHAR(100) NOT NULL COMMENT '数据点名称',
+    type VARCHAR(20) NOT NULL COMMENT '数据类型(bool/int/dint/real)',
+    db_number INT NOT NULL COMMENT 'DB块号',
+    start_offset INT NOT NULL COMMENT '起始偏移量',
+    bit_offset INT NOT NULL COMMENT '位偏移量(仅bool类型有意义)',
+    size INT NOT NULL COMMENT '数据大小(字节)',
+    read_allow BOOLEAN NOT NULL COMMENT '是否允许读取',
+    write_allow BOOLEAN NOT NULL COMMENT '是否允许写入',
+    frequency INT NOT NULL COMMENT '采集频率(ms)',
+    group_id INT NOT NULL COMMENT '组别',
+    created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间',
+    UNIQUE KEY (ip_address, name)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COMMENT='炼钢数据点配置表';
+
+-- 创建炼钢实时数据表
+CREATE TABLE realtime_data (
+    id BIGINT AUTO_INCREMENT PRIMARY KEY,
+    point_id INT NOT NULL UNIQUE COMMENT '数据点ID',
+    bool_value BOOLEAN NULL COMMENT '布尔值',
+    int_value INT NULL COMMENT '整数值',
+    real_value FLOAT NULL COMMENT '实数值',
+    timestamp TIMESTAMP(3) NOT NULL COMMENT '数据时间戳(毫秒精度)',
+    update_time TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP(3) ON UPDATE CURRENT_TIMESTAMP(3) COMMENT '最后更新时间(毫秒精度)',
+    FOREIGN KEY (point_id) REFERENCES data_points(id)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COMMENT='炼钢实时数据表';
+
+-- 创建炼钢历史数据表
+CREATE TABLE historical_data (
+    id BIGINT AUTO_INCREMENT PRIMARY KEY,
+    point_id INT NOT NULL COMMENT '数据点ID',
+    bool_value BOOLEAN NULL COMMENT '布尔值',
+    int_value INT NULL COMMENT '整数值',
+    real_value FLOAT NULL COMMENT '实数值',
+    timestamp TIMESTAMP(3) NOT NULL COMMENT '数据时间戳(毫秒精度)',
+    create_time TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP(3) COMMENT '数据创建时间(毫秒精度)',
+    FOREIGN KEY (point_id) REFERENCES data_points(id),
+    INDEX (point_id, timestamp)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COMMENT='炼钢历史数据表';
+
+-- 插入炼钢PLC 172.16.1.20的数据点配置
+INSERT INTO data_points (ip_address, name, type, db_number, start_offset, bit_offset, size, read_allow, write_allow, frequency, group_id) VALUES
+('172.16.1.20', '中间包连续测温温度', 'dint', 7, 4, 0, 4, TRUE, FALSE, 500, 1),
+('172.16.1.20', '中间包手动测温', 'dint', 4, 30, 0, 4, TRUE, FALSE, 500, 1),
+('172.16.1.20', '1流结晶器拉速', 'real', 6, 36, 0, 4, TRUE, FALSE, 500, 1),
+('172.16.1.20', '1流结晶器通钢量', 'real', 15, 0, 0, 4, TRUE, FALSE, 500, 1),
+('172.16.1.20', '2流结晶器拉速', 'real', 6, 40, 0, 4, TRUE, FALSE, 500, 1),
+('172.16.1.20', '2流结晶器通钢量', 'real', 15, 4, 0, 4, TRUE, FALSE, 500, 1),
+('172.16.1.20', '3流结晶器拉速', 'real', 6, 44, 0, 4, TRUE, FALSE, 500, 1),
+('172.16.1.20', '3流结晶器通钢量', 'real', 15, 8, 0, 4, TRUE, FALSE, 500, 1),
+('172.16.1.20', '4流结晶器拉速', 'real', 6, 48, 0, 4, TRUE, FALSE, 500, 1),
+('172.16.1.20', '4流结晶器通钢量', 'real', 15, 12, 0, 4, TRUE, FALSE, 500, 1),
+('172.16.1.20', '5流结晶器拉速', 'real', 6, 52, 0, 4, TRUE, FALSE, 500, 1),
+('172.16.1.20', '5流结晶器通钢量', 'real', 15, 16, 0, 4, TRUE, FALSE, 500, 1),
+('172.16.1.20', '6流结晶器拉速', 'real', 6, 56, 0, 4, TRUE, FALSE, 500, 1),
+('172.16.1.20', '6流结晶器通钢量', 'real', 15, 20, 0, 4, TRUE, FALSE, 500, 1),
+('172.16.1.20', '7流结晶器拉速', 'real', 6, 60, 0, 4, TRUE, FALSE, 500, 1),
+('172.16.1.20', '7流结晶器通钢量', 'real', 15, 24, 0, 4, TRUE, FALSE, 500, 1),
+('172.16.1.20', '8流结晶器拉速', 'real', 6, 64, 0, 4, TRUE, FALSE, 500, 1),
+('172.16.1.20', '8流结晶器通钢量', 'real', 15, 28, 0, 4, TRUE, FALSE, 500, 1),
+('172.16.1.20', '1流定尺', 'real', 6, 72, 0, 4, TRUE, FALSE, 500, 1),
+('172.16.1.20', '2流定尺', 'real', 6, 76, 0, 4, TRUE, FALSE, 500, 2),
+('172.16.1.20', '3流定尺', 'real', 6, 80, 0, 4, TRUE, FALSE, 500, 2),
+('172.16.1.20', '4流定尺', 'real', 6, 84, 0, 4, TRUE, FALSE, 500, 2),
+('172.16.1.20', '5流定尺', 'real', 6, 88, 0, 4, TRUE, FALSE, 500, 2),
+('172.16.1.20', '6流定尺', 'real', 6, 92, 0, 4, TRUE, FALSE, 500, 2),
+('172.16.1.20', '7流定尺', 'real', 6, 96, 0, 4, TRUE, FALSE, 500, 2),
+('172.16.1.20', '8流定尺', 'real', 6, 100, 0, 4, TRUE, FALSE, 500, 2);
+
+-- 插入炼钢PLC 172.16.1.21的数据点配置
+INSERT INTO data_points (ip_address, name, type, db_number, start_offset, bit_offset, size, read_allow, write_allow, frequency, group_id) VALUES
+('172.16.1.21', '5#结晶器流量', 'real', 16, 232, 0, 4, TRUE, FALSE, 500, 1),
+('172.16.1.21', '5#结晶器水温差', 'real', 16, 236, 0, 4, TRUE, FALSE, 500, 1),
+('172.16.1.21', '5#二冷水总管压力', 'real', 16, 240, 0, 4, TRUE, FALSE, 500, 1),
+('172.16.1.21', '5#结晶器进水温度', 'real', 16, 244, 0, 4, TRUE, FALSE, 500, 1),
+('172.16.1.21', '5#结晶器水压', 'real', 16, 248, 0, 4, TRUE, FALSE, 500, 1),
+('172.16.1.21', '5#二冷水总管温度', 'real', 16, 252, 0, 4, TRUE, FALSE, 500, 1),
+('172.16.1.21', '5#水流量-1流-1段', 'real', 16, 0, 0, 4, TRUE, FALSE, 500, 1),
+('172.16.1.21', '5#水流量-1流-2段', 'real', 16, 4, 0, 4, TRUE, FALSE, 500, 1),
+('172.16.1.21', '5#水流量-1流-3段', 'real', 16, 8, 0, 4, TRUE, FALSE, 500, 1),
+('172.16.1.21', '5#水流量-1流-4段', 'real', 16, 12, 0, 4, TRUE, FALSE, 500, 1),
+('172.16.1.21', '5#水流量-1流-5段', 'real', 16, 16, 0, 4, TRUE, FALSE, 500, 1),
+('172.16.1.21', '5#水流量-2流-1段', 'real', 16, 20, 0, 4, TRUE, FALSE, 500, 1),
+('172.16.1.21', '5#水流量-2流-2段', 'real', 16, 24, 0, 4, TRUE, FALSE, 500, 1),
+('172.16.1.21', '5#水流量-2流-3段', 'real', 16, 28, 0, 4, TRUE, FALSE, 500, 1),
+('172.16.1.21', '5#水流量-2流-4段', 'real', 16, 32, 0, 4, TRUE, FALSE, 500, 1),
+('172.16.1.21', '5#水流量-2流-5段', 'real', 16, 36, 0, 4, TRUE, FALSE, 500, 1),
+('172.16.1.21', '5#水流量-3流-1段', 'real', 16, 40, 0, 4, TRUE, FALSE, 500, 1),
+('172.16.1.21', '5#水流量-3流-2段', 'real', 16, 44, 0, 4, TRUE, FALSE, 500, 1),
+('172.16.1.21', '5#水流量-3流-3段', 'real', 16, 48, 0, 4, TRUE, FALSE, 500, 1),
+('172.16.1.21', '5#水流量-3流-4段', 'real', 16, 52, 0, 4, TRUE, FALSE, 500, 2),
+('172.16.1.21', '5#水流量-3流-5段', 'real', 16, 56, 0, 4, TRUE, FALSE, 500, 2),
+('172.16.1.21', '5#水流量-4流-1段', 'real', 16, 60, 0, 4, TRUE, FALSE, 500, 2),
+('172.16.1.21', '5#水流量-4流-2段', 'real', 16, 64, 0, 4, TRUE, FALSE, 500, 2),
+('172.16.1.21', '5#水流量-4流-3段', 'real', 16, 68, 0, 4, TRUE, FALSE, 500, 2),
+('172.16.1.21', '5#水流量-4流-4段', 'real', 16, 72, 0, 4, TRUE, FALSE, 500, 2),
+('172.16.1.21', '5#水流量-4流-5段', 'real', 16, 76, 0, 4, TRUE, FALSE, 500, 2),
+('172.16.1.21', '5#水流量-5流-1段', 'real', 16, 80, 0, 4, TRUE, FALSE, 500, 2),
+('172.16.1.21', '5#水流量-5流-2段', 'real', 16, 84, 0, 4, TRUE, FALSE, 500, 2),
+('172.16.1.21', '5#水流量-5流-3段', 'real', 16, 88, 0, 4, TRUE, FALSE, 500, 2),
+('172.16.1.21', '5#水流量-5流-4段', 'real', 16, 92, 0, 4, TRUE, FALSE, 500, 2),
+('172.16.1.21', '5#水流量-5流-5段', 'real', 16, 96, 0, 4, TRUE, FALSE, 500, 2),
+('172.16.1.21', '5#水流量-6流-1段', 'real', 16, 100, 0, 4, TRUE, FALSE, 500, 2),
+('172.16.1.21', '5#水流量-6流-2段', 'real', 16, 104, 0, 4, TRUE, FALSE, 500, 2),
+('172.16.1.21', '5#水流量-6流-3段', 'real', 16, 108, 0, 4, TRUE, FALSE, 500, 2),
+('172.16.1.21', '5#水流量-6流-4段', 'real', 16, 112, 0, 4, TRUE, FALSE, 500, 2),
+('172.16.1.21', '5#水流量-6流-5段', 'real', 16, 116, 0, 4, TRUE, FALSE, 500, 2),
+('172.16.1.21', '5#水流量-7流-1段', 'real', 16, 120, 0, 4, TRUE, FALSE, 500, 2),
+('172.16.1.21', '5#水流量-7流-2段', 'real', 16, 124, 0, 4, TRUE, FALSE, 500, 2),
+('172.16.1.21', '5#水流量-7流-3段', 'real', 16, 128, 0, 4, TRUE, FALSE, 500, 3),
+('172.16.1.21', '5#水流量-7流-4段', 'real', 16, 132, 0, 4, TRUE, FALSE, 500, 3),
+('172.16.1.21', '5#水流量-7流-5段', 'real', 16, 136, 0, 4, TRUE, FALSE, 500, 3),
+('172.16.1.21', '5#水流量-8流-1段', 'real', 16, 140, 0, 4, TRUE, FALSE, 500, 3),
+('172.16.1.21', '5#水流量-8流-2段', 'real', 16, 144, 0, 4, TRUE, FALSE, 500, 3),
+('172.16.1.21', '5#水流量-8流-3段', 'real', 16, 148, 0, 4, TRUE, FALSE, 500, 3),
+('172.16.1.21', '5#水流量-8流-4段', 'real', 16, 152, 0, 4, TRUE, FALSE, 500, 3),
+('172.16.1.21', '5#水流量-8流-5段', 'real', 16, 156, 0, 4, TRUE, FALSE, 500, 3);
+
+-- 初始化炼钢实时数据表
+-- dint类型数据点
+INSERT INTO realtime_data (point_id, int_value, timestamp)
+SELECT id, 0, NOW(3) FROM data_points WHERE type = 'dint';
+
+-- real类型数据点
+INSERT INTO realtime_data (point_id, real_value, timestamp)
+SELECT id, 0.0, NOW(3) FROM data_points WHERE type = 'real';

+ 68 - 65
sql/steel_rolling.sql

@@ -4,6 +4,7 @@ USE industrial_data;
 -- 创建数据点表
 CREATE TABLE data_points (
     id INT AUTO_INCREMENT PRIMARY KEY,
+    ip_address VARCHAR(20) NOT NULL COMMENT 'PLC网络地址',
     name VARCHAR(100) NOT NULL COMMENT '数据点名称',
     type VARCHAR(20) NOT NULL COMMENT '数据类型(bool/int/dint/real)',
     db_number INT NOT NULL COMMENT 'DB块号',
@@ -26,11 +27,12 @@ CREATE TABLE realtime_data (
     int_value INT NULL COMMENT '整数值',
     real_value FLOAT NULL COMMENT '实数值',
     timestamp TIMESTAMP(3) NOT NULL COMMENT '数据时间戳(毫秒精度)',
+    update_time TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP(3) ON UPDATE CURRENT_TIMESTAMP(3) COMMENT '最后更新时间(毫秒精度)',
     FOREIGN KEY (point_id) REFERENCES data_points(id),
     INDEX (point_id, timestamp)
 ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COMMENT='实时数据表';
 
--- 创建历史数据表(按天分区)
+-- 创建历史数据表
 CREATE TABLE historical_data (
     id BIGINT AUTO_INCREMENT PRIMARY KEY,
     point_id INT NOT NULL COMMENT '数据点ID',
@@ -38,75 +40,76 @@ CREATE TABLE historical_data (
     int_value INT NULL COMMENT '整数值',
     real_value FLOAT NULL COMMENT '实数值',
     timestamp TIMESTAMP(3) NOT NULL COMMENT '数据时间戳(毫秒精度)',
+    create_time TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP(3) COMMENT '数据创建时间(毫秒精度)',
     FOREIGN KEY (point_id) REFERENCES data_points(id),
     INDEX (point_id, timestamp)
 ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COMMENT='历史数据表';
 
 -- 插入数据点配置
-INSERT INTO data_points (name, type, db_number, start_offset, bit_offset, size, read_allow, write_allow, frequency, group_id) VALUES
-('看门狗', 'bool', 889, 0, 0, 1, TRUE, FALSE, 500, 1),
-('18#有钢信号', 'bool', 889, 0, 1, 1, TRUE, FALSE, 500, 1),
-('炉号', 'dint', 889, 4, 0, 4, TRUE, FALSE, 500, 1),
-('定尺', 'dint', 889, 8, 0, 4, TRUE, FALSE, 500, 1),
-('流号(末架)', 'int', 889, 36, 0, 2, TRUE, FALSE, 500, 1),
-('尾钢长度(热检)', 'int', 889, 38, 0, 2, TRUE, FALSE, 500, 1),
-('轧制规格', 'real', 889, 52, 0, 4, TRUE, FALSE, 500, 1),
-('尾钢长度(激光)', 'real', 889, 56, 0, 4, TRUE, FALSE, 500, 1),
-('轧前温度', 'real', 889, 60, 0, 4, TRUE, FALSE, 500, 1),
-('1流修正值', 'real', 889, 64, 0, 4, TRUE, FALSE, 500, 1),
-('2流修正值', 'real', 889, 68, 0, 4, TRUE, FALSE, 500, 1),
-('3流修正值', 'real', 889, 72, 0, 4, TRUE, FALSE, 500, 1),
-('4流修正值', 'real', 889, 76, 0, 4, TRUE, FALSE, 500, 1),
-('5流修正值', 'real', 889, 80, 0, 4, TRUE, FALSE, 500, 1),
-('6流修正值', 'real', 889, 84, 0, 4, TRUE, FALSE, 500, 1),
-('7流修正值', 'real', 889, 88, 0, 4, TRUE, FALSE, 500, 1),
-('8流修正值', 'real', 889, 92, 0, 4, TRUE, FALSE, 500, 1),
-('1流炼钢反馈重量', 'real', 889, 96, 0, 4, TRUE, FALSE, 500, 1),
-('2流炼钢反馈重量', 'real', 889, 100, 0, 4, TRUE, FALSE, 500, 1),
-('3流炼钢反馈重量', 'real', 889, 104, 0, 4, TRUE, FALSE, 500, 2),
-('4流炼钢反馈重量', 'real', 889, 108, 0, 4, TRUE, FALSE, 500, 2),
-('5流炼钢反馈重量', 'real', 889, 112, 0, 4, TRUE, FALSE, 500, 2),
-('6流炼钢反馈重量', 'real', 889, 116, 0, 4, TRUE, FALSE, 500, 2),
-('7流炼钢反馈重量', 'real', 889, 120, 0, 4, TRUE, FALSE, 500, 2),
-('8流炼钢反馈重量', 'real', 889, 124, 0, 4, TRUE, FALSE, 500, 2),
-('米重', 'real', 889, 128, 0, 4, TRUE, FALSE, 500, 2),
-('尾钢设定', 'real', 889, 132, 0, 4, TRUE, FALSE, 500, 2),
-('1#设定速度', 'real', 889, 136, 0, 4, TRUE, FALSE, 500, 2),
-('2#设定速度', 'real', 889, 140, 0, 4, TRUE, FALSE, 500, 2),
-('3#设定速度', 'real', 889, 144, 0, 4, TRUE, FALSE, 500, 2),
-('4#设定速度', 'real', 889, 148, 0, 4, TRUE, FALSE, 500, 2),
-('5#设定速度', 'real', 889, 152, 0, 4, TRUE, FALSE, 500, 2),
-('6#设定速度', 'real', 889, 156, 0, 4, TRUE, FALSE, 500, 2),
-('7#设定速度', 'real', 889, 160, 0, 4, TRUE, FALSE, 500, 2),
-('8#设定速度', 'real', 889, 164, 0, 4, TRUE, FALSE, 500, 2),
-('9#设定速度', 'real', 889, 168, 0, 4, TRUE, FALSE, 500, 2),
-('10#设定速度', 'real', 889, 172, 0, 4, TRUE, FALSE, 500, 2),
-('11#设定速度', 'real', 889, 176, 0, 4, TRUE, FALSE, 500, 2),
-('12#设定速度', 'real', 889, 180, 0, 4, TRUE, FALSE, 500, 3),
-('13#设定速度', 'real', 889, 184, 0, 4, TRUE, FALSE, 500, 3),
-('14#设定速度', 'real', 889, 188, 0, 4, TRUE, FALSE, 500, 3),
-('15#设定速度', 'real', 889, 192, 0, 4, TRUE, FALSE, 500, 3),
-('16#设定速度', 'real', 889, 196, 0, 4, TRUE, FALSE, 500, 3),
-('17#设定速度', 'real', 889, 200, 0, 4, TRUE, FALSE, 500, 3),
-('18#设定速度', 'real', 889, 204, 0, 4, TRUE, FALSE, 500, 3),
-('1#辊径', 'real', 889, 208, 0, 4, TRUE, FALSE, 500, 3),
-('2#辊径', 'real', 889, 212, 0, 4, TRUE, FALSE, 500, 3),
-('3#辊径', 'real', 889, 216, 0, 4, TRUE, FALSE, 500, 3),
-('4#辊径', 'real', 889, 220, 0, 4, TRUE, FALSE, 500, 3),
-('5#辊径', 'real', 889, 224, 0, 4, TRUE, FALSE, 500, 3),
-('6#辊径', 'real', 889, 228, 0, 4, TRUE, FALSE, 500, 3),
-('7#辊径', 'real', 889, 232, 0, 4, TRUE, FALSE, 500, 3),
-('8#辊径', 'real', 889, 236, 0, 4, TRUE, FALSE, 500, 3),
-('9#辊径', 'real', 889, 240, 0, 4, TRUE, FALSE, 500, 3),
-('10#辊径', 'real', 889, 244, 0, 4, TRUE, FALSE, 500, 3),
-('11#辊径', 'real', 889, 248, 0, 4, TRUE, FALSE, 500, 3),
-('12#辊径', 'real', 889, 252, 0, 4, TRUE, FALSE, 500, 3),
-('13#辊径', 'real', 889, 256, 0, 4, TRUE, FALSE, 500, 4),
-('14#辊径', 'real', 889, 260, 0, 4, TRUE, FALSE, 500, 4),
-('15#辊径', 'real', 889, 264, 0, 4, TRUE, FALSE, 500, 4),
-('16#辊径', 'real', 889, 268, 0, 4, TRUE, FALSE, 500, 4),
-('17#辊径', 'real', 889, 272, 0, 4, TRUE, FALSE, 500, 4),
-('18#辊径', 'real', 889, 276, 0, 4, TRUE, FALSE, 500, 4);
+INSERT INTO data_points (ip_address, name, type, db_number, start_offset, bit_offset, size, read_allow, write_allow, frequency, group_id) VALUES
+('192.168.0.3', '看门狗', 'bool', 889, 0, 0, 1, TRUE, FALSE, 500, 1),
+('192.168.0.3', '18#有钢信号', 'bool', 889, 0, 1, 1, TRUE, FALSE, 500, 1),
+('192.168.0.3', '炉号', 'dint', 889, 4, 0, 4, TRUE, FALSE, 500, 1),
+('192.168.0.3', '定尺', 'dint', 889, 8, 0, 4, TRUE, FALSE, 500, 1),
+('192.168.0.3', '流号(末架)', 'int', 889, 36, 0, 2, TRUE, FALSE, 500, 1),
+('192.168.0.3', '尾钢长度(热检)', 'int', 889, 38, 0, 2, TRUE, FALSE, 500, 1),
+('192.168.0.3', '轧制规格', 'real', 889, 52, 0, 4, TRUE, FALSE, 500, 1),
+('192.168.0.3', '尾钢长度(激光)', 'real', 889, 56, 0, 4, TRUE, FALSE, 500, 1),
+('192.168.0.3', '轧前温度', 'real', 889, 60, 0, 4, TRUE, FALSE, 500, 1),
+('192.168.0.3', '1流修正值', 'real', 889, 64, 0, 4, TRUE, FALSE, 500, 1),
+('192.168.0.3', '2流修正值', 'real', 889, 68, 0, 4, TRUE, FALSE, 500, 1),
+('192.168.0.3', '3流修正值', 'real', 889, 72, 0, 4, TRUE, FALSE, 500, 1),
+('192.168.0.3', '4流修正值', 'real', 889, 76, 0, 4, TRUE, FALSE, 500, 1),
+('192.168.0.3', '5流修正值', 'real', 889, 80, 0, 4, TRUE, FALSE, 500, 1),
+('192.168.0.3', '6流修正值', 'real', 889, 84, 0, 4, TRUE, FALSE, 500, 1),
+('192.168.0.3', '7流修正值', 'real', 889, 88, 0, 4, TRUE, FALSE, 500, 1),
+('192.168.0.3', '8流修正值', 'real', 889, 92, 0, 4, TRUE, FALSE, 500, 1),
+('192.168.0.3', '1流炼钢反馈重量', 'real', 889, 96, 0, 4, TRUE, FALSE, 500, 1),
+('192.168.0.3', '2流炼钢反馈重量', 'real', 889, 100, 0, 4, TRUE, FALSE, 500, 1),
+('192.168.0.3', '3流炼钢反馈重量', 'real', 889, 104, 0, 4, TRUE, FALSE, 500, 2),
+('192.168.0.3', '4流炼钢反馈重量', 'real', 889, 108, 0, 4, TRUE, FALSE, 500, 2),
+('192.168.0.3', '5流炼钢反馈重量', 'real', 889, 112, 0, 4, TRUE, FALSE, 500, 2),
+('192.168.0.3', '6流炼钢反馈重量', 'real', 889, 116, 0, 4, TRUE, FALSE, 500, 2),
+('192.168.0.3', '7流炼钢反馈重量', 'real', 889, 120, 0, 4, TRUE, FALSE, 500, 2),
+('192.168.0.3', '8流炼钢反馈重量', 'real', 889, 124, 0, 4, TRUE, FALSE, 500, 2),
+('192.168.0.3', '米重', 'real', 889, 128, 0, 4, TRUE, FALSE, 500, 2),
+('192.168.0.3', '尾钢设定', 'real', 889, 132, 0, 4, TRUE, FALSE, 500, 2),
+('192.168.0.3', '1#设定速度', 'real', 889, 136, 0, 4, TRUE, FALSE, 500, 2),
+('192.168.0.3', '2#设定速度', 'real', 889, 140, 0, 4, TRUE, FALSE, 500, 2),
+('192.168.0.3', '3#设定速度', 'real', 889, 144, 0, 4, TRUE, FALSE, 500, 2),
+('192.168.0.3', '4#设定速度', 'real', 889, 148, 0, 4, TRUE, FALSE, 500, 2),
+('192.168.0.3', '5#设定速度', 'real', 889, 152, 0, 4, TRUE, FALSE, 500, 2),
+('192.168.0.3', '6#设定速度', 'real', 889, 156, 0, 4, TRUE, FALSE, 500, 2),
+('192.168.0.3', '7#设定速度', 'real', 889, 160, 0, 4, TRUE, FALSE, 500, 2),
+('192.168.0.3', '8#设定速度', 'real', 889, 164, 0, 4, TRUE, FALSE, 500, 2),
+('192.168.0.3', '9#设定速度', 'real', 889, 168, 0, 4, TRUE, FALSE, 500, 2),
+('192.168.0.3', '10#设定速度', 'real', 889, 172, 0, 4, TRUE, FALSE, 500, 2),
+('192.168.0.3', '11#设定速度', 'real', 889, 176, 0, 4, TRUE, FALSE, 500, 2),
+('192.168.0.3', '12#设定速度', 'real', 889, 180, 0, 4, TRUE, FALSE, 500, 3),
+('192.168.0.3', '13#设定速度', 'real', 889, 184, 0, 4, TRUE, FALSE, 500, 3),
+('192.168.0.3', '14#设定速度', 'real', 889, 188, 0, 4, TRUE, FALSE, 500, 3),
+('192.168.0.3', '15#设定速度', 'real', 889, 192, 0, 4, TRUE, FALSE, 500, 3),
+('192.168.0.3', '16#设定速度', 'real', 889, 196, 0, 4, TRUE, FALSE, 500, 3),
+('192.168.0.3', '17#设定速度', 'real', 889, 200, 0, 4, TRUE, FALSE, 500, 3),
+('192.168.0.3', '18#设定速度', 'real', 889, 204, 0, 4, TRUE, FALSE, 500, 3),
+('192.168.0.3', '1#辊径', 'real', 889, 208, 0, 4, TRUE, FALSE, 500, 3),
+('192.168.0.3', '2#辊径', 'real', 889, 212, 0, 4, TRUE, FALSE, 500, 3),
+('192.168.0.3', '3#辊径', 'real', 889, 216, 0, 4, TRUE, FALSE, 500, 3),
+('192.168.0.3', '4#辊径', 'real', 889, 220, 0, 4, TRUE, FALSE, 500, 3),
+('192.168.0.3', '5#辊径', 'real', 889, 224, 0, 4, TRUE, FALSE, 500, 3),
+('192.168.0.3', '6#辊径', 'real', 889, 228, 0, 4, TRUE, FALSE, 500, 3),
+('192.168.0.3', '7#辊径', 'real', 889, 232, 0, 4, TRUE, FALSE, 500, 3),
+('192.168.0.3', '8#辊径', 'real', 889, 236, 0, 4, TRUE, FALSE, 500, 3),
+('192.168.0.3', '9#辊径', 'real', 889, 240, 0, 4, TRUE, FALSE, 500, 3),
+('192.168.0.3', '10#辊径', 'real', 889, 244, 0, 4, TRUE, FALSE, 500, 3),
+('192.168.0.3', '11#辊径', 'real', 889, 248, 0, 4, TRUE, FALSE, 500, 3),
+('192.168.0.3', '12#辊径', 'real', 889, 252, 0, 4, TRUE, FALSE, 500, 3),
+('192.168.0.3', '13#辊径', 'real', 889, 256, 0, 4, TRUE, FALSE, 500, 4),
+('192.168.0.3', '14#辊径', 'real', 889, 260, 0, 4, TRUE, FALSE, 500, 4),
+('192.168.0.3', '15#辊径', 'real', 889, 264, 0, 4, TRUE, FALSE, 500, 4),
+('192.168.0.3', '16#辊径', 'real', 889, 268, 0, 4, TRUE, FALSE, 500, 4),
+('192.168.0.3', '17#辊径', 'real', 889, 272, 0, 4, TRUE, FALSE, 500, 4),
+('192.168.0.3', '18#辊径', 'real', 889, 276, 0, 4, TRUE, FALSE, 500, 4);
 
 -- 批量初始化bool类型数据点
 INSERT INTO realtime_data (point_id, bool_value, timestamp)

+ 81 - 0
steel_making_main.py

@@ -0,0 +1,81 @@
+from utils.s7data import S7Client, S7data
+from dbutils.pooled_db import PooledDB
+from utils.logger import Logger
+from models.data_sender import Sender
+from models.mysql_data import MysqlData
+from models.cip_data import CIPData
+from models.steel_fit import SteelFit
+import pymysql
+
+# 配置S7连接
+s7_1 = S7Client()
+s7_1.connect("172.16.1.20", 0, 0)
+data_1 = S7data("conf/s7@172.16.1.20.csv")
+data_1.set_S7Client(s7_1)
+data_1.auto_update_group()
+
+s7_2 = S7Client()
+s7_2.connect("172.16.1.21", 0, 0)
+data_2 = S7data("conf/s7@172.16.1.21.csv")
+data_2.set_S7Client(s7_2)
+data_2.auto_update_group()
+
+s7_3 = S7Client()
+s7_3.connect("192.168.1.215", 0, 0)
+data_3 = S7data("conf/s7@192.168.1.215.csv")
+data_3.set_S7Client(s7_3)
+data_3.auto_update_group()
+
+s7_4 = S7Client()
+s7_4.connect("192.168.1.215", 0, 0)
+
+
+# 配置CIP连接
+cip_data = CIPData("192.168.3.100")
+cip_data.start_update()
+
+
+# 配置MySQL连接池
+mysql_pool = PooledDB(
+    creator=pymysql,
+    maxconnections=60,
+    mincached=2,
+    blocking=True,
+    host='localhost',
+    user='root',
+    password='qwer1234',
+    database='steelmaking_data',
+    charset='utf8mb4'
+)
+
+mysql_pool_web = PooledDB(
+    creator=pymysql,
+    maxconnections=10,
+    mincached=1,
+    blocking=True,
+    host='192.168.3.165',
+    user='zgzt',
+    password='zgzt1234',
+    database='lg_length',
+    charset='utf8mb4'
+)
+
+# 配置日志模块
+logger = Logger('steel_making')
+logger.file_on_with_rotation('logs/steel_making.log')
+# logger.screen_on()
+
+logger2 = Logger('steel_fit')
+logger2.file_on_with_rotation('logs/steel_fit.log')
+# logger.screen_on()
+
+# 配置主模块
+sender_1 = Sender(data_1, mysql_pool, logger, "172.16.1.20")
+sender_2 = Sender(data_2, mysql_pool, logger, "172.16.1.21")
+sender_3 = Sender(data_3, mysql_pool, logger, "192.168.1.215")
+
+# Mysql数据源
+data_mysql = MysqlData(mysql_pool_web, s7_4, logger)
+
+# 钢坯拟合模块
+steel_fit = SteelFit(data_1, data_3, cip_data, sender_1, logger2)

+ 33 - 0
steel_rolling_main.py

@@ -0,0 +1,33 @@
+from utils.s7data import S7Client, S7data
+from dbutils.pooled_db import PooledDB
+from utils.logger import Logger
+from models.data_sender import Sender
+import pymysql
+
+# 配置S7连接
+s7 = S7Client()
+s7.connect("192.168.0.3", 0, 3)
+data = S7data("conf/s7@192.168.0.3.csv")
+data.set_S7Client(s7)
+data.auto_update_group()
+
+# 配置MySQL连接池
+mysql_pool = PooledDB(
+    creator=pymysql,
+    maxconnections=40,
+    mincached=2,
+    blocking=True,
+    host='localhost',
+    user='root',
+    password='5538..',
+    database='industrial_data',
+    charset='utf8mb4'
+)
+
+# 配置日志模块
+logger = Logger('steel_rolling')
+logger.file_on_with_rotation('logs/steel_rolling.log')
+logger.screen_on()
+
+# 配置主模块
+sender = Sender(data, mysql_pool, logger, "192.168.0.3")

+ 0 - 13
steelmaking_main.py

@@ -1,13 +0,0 @@
-from utils.s7data import S7Client, S7data
-
-s7_1 = S7Client()
-s7_1.connect("172.16.1.20", 0, 0)
-data_1 = S7data("conf/s7@172.16.1.20.csv")
-data_1.set_S7Client(s7_1)
-data_1.auto_update_group()
-
-s7_2 = S7Client()
-s7_2.connect("172.16.1.21", 0, 0)
-data_2 = S7data("conf/s7@172.16.1.21.csv")
-data_2.set_S7Client(s7_2)
-data_2.auto_update_group()