Prechádzať zdrojové kódy

Merge branch 'master' of http://114.244.114.158:8802/siwei/sanya-dbms

zhongdawei 2 týždňov pred
rodič
commit
e9b02f5d4d
39 zmenil súbory, kde vykonal 1097 pridanie a 54 odobranie
  1. 0 34
      encrypt/data_decrypted.json
  2. 1 1
      encrypt/encry.py
  3. 4 2
      encrypt/siwei_config.json
  4. 1 1
      encrypt/siwei_config.json.enc
  5. 368 0
      processing/algs/gdal/DataStorageStatistics.py
  6. 3 0
      processing/algs/gdal/GdalAlgorithm.py
  7. 9 0
      processing/algs/gdal/GdalAlgorithmProvider.py
  8. 237 0
      processing/algs/gdal/ImportOSGBToPostGIS.py
  9. 249 0
      processing/algs/gdal/ImportSingleOSGBToPostGIS.py
  10. 205 0
      processing/algs/gdal/ImportTableToPostGIS.py
  11. 1 1
      processing/algs/gdal/rastertopostgislist.py
  12. 4 3
      processing/app.py
  13. BIN
      processing/images/dbms/CustomerMap.png
  14. BIN
      processing/images/dbms/export.png
  15. BIN
      processing/images/dbms/export2.png
  16. BIN
      processing/images/dbms/import.png
  17. BIN
      processing/images/dbms/importimage.png
  18. BIN
      processing/images/dbms/importvector.png
  19. BIN
      processing/images/dbms/logo.png
  20. BIN
      processing/images/dbms/map.png
  21. BIN
      processing/images/dbms/new.png
  22. BIN
      processing/images/dbms/publishserver.png
  23. BIN
      processing/images/dbms/refresh.png
  24. BIN
      processing/images/dbms/resource.png
  25. BIN
      processing/images/dbms/unselect.png
  26. BIN
      processing/images/dbms/view.png
  27. 3 1
      processing/tools/CustomWebView/Manager.py
  28. BIN
      processing/tools/FTP/icon/logo.png
  29. BIN
      processing/tools/FTP/icon/文件上传.png
  30. BIN
      processing/tools/FTP/icon/文件下载.png
  31. BIN
      processing/tools/FTP/icon/文件删除.png
  32. BIN
      processing/tools/FTP/icon/文件夹上传.png
  33. BIN
      processing/tools/FTP/icon/文件查看.png
  34. BIN
      processing/tools/FTP/icon/文件重命名.png
  35. BIN
      processing/tools/FTP/icon/新建文件夹.png
  36. BIN
      processing/tools/FTP/icon/模型查看.png
  37. 8 9
      processing/tools/GeoServer/Geoserver.py
  38. 1 0
      processing/tools/PostgreSQL/PgExxcute.py
  39. 3 2
      processing/tools/PostgreSQL/PostgreSQL.py

+ 0 - 34
encrypt/data_decrypted.json

@@ -1,34 +0,0 @@
-{
-    "db": {
-        "host": "192.168.60.52",
-        "port": "5432",
-        "schema": "vector",
-        "schema_base": "base",
-        "user": "postgres",
-        "password": "postgres",
-        "name": "real3d"
-    },
-    "redis": {
-        "host": "192.168.60.220",
-        "port": 6379,
-        "db": 0
-    },
-    "geoserver": {
-        "url": "http://192.168.60.2:28085/geoserver",
-        "username": "admin",
-        "password": "geoserver",
-        "default_workspace": "demo",
-        "cachestart": "0",
-        "cacheend": "15"
-    },
-    "ftp": {
-        "host": "192.168.60.52",
-        "port": 2021,
-        "username": "user1",
-        "password": "password"
-    },
-    "model": {
-        "viewer_uri": "http://192.168.60.52:8091/website/#model=",
-        "base_uri": "http://192.168.60.52:8090"
-    }
-}

+ 1 - 1
encrypt/encry.py

@@ -46,7 +46,7 @@ if __name__ == "__main__":
     # generate_key()
 
     # 加密
-    encrypt_json("siwei_config.json", "siwei_config.json.enc")
+    encrypt_json("siwei_config.json", r"C:\siwei_model\siwei_config.json.enc")
 
     # 解密
     # decrypt_json("siwei_config_encry.json", "data_decrypted.json")

+ 4 - 2
encrypt/siwei_config.json

@@ -1,4 +1,6 @@
 {
+  "host":"http://192.168.60.2",
+  "port": 8090,
   "db": {
     "host": "192.168.60.2",
     "port": "5432",
@@ -28,7 +30,7 @@
     "password": "password"
   },
   "model": {
-    "viewer_uri": "http://192.168.60.52:8091/website/#model=",
-    "base_uri": "http://192.168.60.52:8090"
+    "viewer_uri": "http://192.168.60.2:8091/website/#model=",
+    "base_uri": "http://192.168.60.2:8090"
   }
 }

+ 1 - 1
encrypt/siwei_config.json.enc

@@ -1 +1 @@
-gAAAAABoEHAmK-uD84fGoHCKItWwCkoCULrRXlMgVz7ZocPbS6bpn6Cd8j31MwXJ7-7_MKbX7qBDgsv2x51Zjgak_yW0O02D08jmEhHFIyCNiXk3oy3qCLI8LB1qRJc5vmZpVkI2Zz_uUBfnwgv8lQRtfvIbemDdxs4g9FIbxZ4EWo-dbWTkl5yXChWAtpVdVBzr2aCjEuhRWTBlffpIHD72-MZVwCQd5AcrWA9XHYz_bMS9G65h4J7m7j9eFxjaOUkhmYCh_k_muwHPwlJwZqCfoF78Aktta5t1hl2jTdQZ4IlisV-0l26EgKa0T33F473bY8NTaLYVDSLa9BZkHIM7ylzQx7Y4lSN2MluFpHYk9BGiTMmtMjb7h7ygCaUjgwcRMSLla3chW2XZ12bwzEO1ZyucgdUEutyHLVwY9dn5EfeMRGaKe83I7wubvu7lKUoqkI3rYI_IkY_j6iwR9y4jWI9oKuNmmV897aNmOj9kAH62c7xcHW7Ofk2BgNKw-al8sLZgldOpt5XO6wOPE5tWPTBXnYg_pGIZeCp2Klo2L4kxQNYymtA6Tzzun2F1crXz4EkUS8FzaGxzYsYWtLVksVks1ec889PBB1I2ORhOPArUgQfNvycRHuGLt6H8udFeVsB8yGxcPGeDr-qHLPCc4cxJH6U0XEVVYg3EqpC8OS9ZJ4zkKlAR5HM8mHUDhPMYTYnRAbLAeQ0HQYw_fC8kIWBY3mE0mLiFnILdIAo6xAPIgskXvWkc7zo2w3OiGfGSFuLcoSFd8gZpCk2F4PPhzxrCza7geUHaIfubNjuiZ3VxE_L2X1W4UhvmVPmkDUubFxxtllSrnOkCK1SJMbTnpMkfnqoL6Q==
+gAAAAABoKtXotLJoKVxJcjvxLJhaQq4TINJjgzYT7qpy8CJugBzsM40_vJYN33jTovvOUxsVSecXazq2FW5608vtpxGc7etbExbLu3IIB3lC77LjSbg-f8ZOseasRBQY2WCbk6Kz4rQd_UcTCrWzcXVszpGLHQwjiaufNEKMuXn20hMytwET_RWqc7J7eTGB3MOUGcRyoEJbyL61OXbrA30obbtq5wzmQzi_awrXtp2sgH4_pymaouzBXbKdXmBmO0N1yZXoXIJLPBWVAZyjaBE0k1Ok1N0HuJskzs-K4N5-yDvaTRmkMSDsEnVydgDHzjzthqaIE4oy5qzTJOFG0KOh7xzE79TxTkFJxCxzEXQrHQJzq_XRA5fk5o5MLeGFLFuhk6Afdzw8XCaJ1WO-jCtB3QYCPLaK0_96RNQ6WLr_LYeMwvd8N8bbvQyPoltK2-hH-FWnZjXetFQ1fsn6AmLGagqs9EZxvM60AT3UXfB8LSc7bzTknqYhvVrFnGhPzlXvuI-YDVc-z8Hrqvs7oaoT3VbaPx6tV6Zxi8RgwPZ74Oir1do9xdcmYuF378hiVvS_H7W6aPB-jeRCibTUpTjaSPT-yRn9tg8hRZUVKH6P9krCYTpZPKAhqi78alpLfXjgwRvhx-MkQ3Z4GyF09h08GV9VdpCnIM5Wylp5basXlTGlRfJiDYNfL5PygWYNRitnDRVhy9FPm6LkXN0OeSkpnyb4BsbyCHvXs9e-cWlNekGFGtJZwIO5dHwd0tZin44Ce3-4E-KoS1lxLdf_CV9B7MKqy9QF72k6oYs4hCEsRj9ih7UYWw7DldUC5eR3VWXhUO-N6u8RX5pWGGTIdQlRPGqRWsUV7f4vQ38YubA3xbvoGQJUsKWblfs4r-fyT49xNLxPer6W1VX1s9lyb8k15gfSDPHeNA==

+ 368 - 0
processing/algs/gdal/DataStorageStatistics.py

@@ -0,0 +1,368 @@
+__author__ = 'liying'
+__date__ = 'May 2025'
+__copyright__ = '(C) 2025, liying'
+
+import os
+import time
+import psycopg2
+import matplotlib.pyplot as plt
+from matplotlib import font_manager
+import numpy as np
+import siwei_config
+
+from PyQt5.QtWidgets import QDialog, QVBoxLayout
+from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg as FigureCanvas
+
+from qgis.core import (
+    QgsProcessingAlgorithm,
+    QgsProcessingParameterProviderConnection,
+    QgsProcessingParameterDatabaseSchema,
+    QgsProcessingParameterString,
+    QgsProcessingParameterFile,
+    QgsProcessingParameterBoolean
+)
+from qgis.utils import iface
+
+
+# 设置字体以支持中文
+def set_matplotlib_font():
+    try:
+        font_path = 'C:/Windows/Fonts/msyh.ttc'
+        prop = font_manager.FontProperties(fname=font_path)
+        plt.rcParams['font.family'] = prop.get_name()
+    except Exception as e:
+        print(f"字体设置失败:{e}")
+
+
+class DataStorageStatistics(QgsProcessingAlgorithm):
+    DATABASE = 'DATABASE'
+    SCHEMA = 'SCHEMA'
+    TABLE = 'TABLE'
+    EXPORT_DIR = 'EXPORT_DIR'
+    EXPORT_CHARTS = 'EXPORT_CHARTS'
+
+    def initAlgorithm(self, config=None):
+        self.addParameter(QgsProcessingParameterProviderConnection(
+            self.DATABASE, '数据库连接', 'postgres', defaultValue=siwei_config.CONFIG['db']['host']))
+
+        self.addParameter(QgsProcessingParameterDatabaseSchema(
+            self.SCHEMA, '模式', connectionParameterName=self.DATABASE, defaultValue='base'))
+
+        self.addParameter(QgsProcessingParameterString(
+            self.TABLE, '表名', defaultValue='t_vector_storage'))
+
+        self.addParameter(QgsProcessingParameterFile(
+            self.EXPORT_DIR, '图表导出目录', behavior=QgsProcessingParameterFile.Folder, optional=True))
+
+        self.addParameter(QgsProcessingParameterBoolean(
+            self.EXPORT_CHARTS, '是否导出图表', defaultValue=True))
+
+    def processAlgorithm(self, parameters, context, feedback):
+        connection_params = {
+            'host': siwei_config.CONFIG['db']['host'],
+            'port': siwei_config.CONFIG['db']['port'],
+            'dbname': siwei_config.CONFIG['db']['name'],
+            'user': siwei_config.CONFIG['db']['user'],
+            'password': siwei_config.CONFIG['db']['password'],
+            "connect_timeout": 10,
+        }
+
+        schema = self.parameterAsString(parameters, self.SCHEMA, context)
+        table = self.parameterAsString(parameters, self.TABLE, context)
+        full_table = f'"{schema}"."{table}"'
+        export_dir = self.parameterAsString(parameters, self.EXPORT_DIR, context) or os.path.expanduser(
+            '~/qgis_stat_exports')
+        os.makedirs(export_dir, exist_ok=True)
+        export_charts = self.parameterAsBoolean(parameters, self.EXPORT_CHARTS, context)
+
+        conn = psycopg2.connect(**connection_params)
+        conn.autocommit = True
+        cursor = conn.cursor()
+
+        fields = ['ywlx', 'sjlx', 'glbm']
+        stat_results = {}
+
+        for field in fields:
+            feedback.pushInfo(f"正在统计字段:{field}")
+            cursor.execute(f"""
+                SELECT {field}, COUNT(*) FROM {full_table}
+                WHERE {field} IS NOT NULL
+                GROUP BY {field}
+                ORDER BY COUNT(*) DESC
+            """)
+            data = cursor.fetchall()
+            stat_results[field] = data
+            for row in data:
+                feedback.pushInfo(f"{row[0]}: {row[1]}")
+
+        feedback.pushInfo("正在统计数据文件大小...")
+        size_map = {'vector': 0, 'raster': 0, 'table': 0, 'osgb': 0}
+        found_tables = {'vector': 0, 'raster': 0, 'table': 0, 'osgb': 0}
+        missing_tables = {'vector': 0, 'raster': 0, 'table': 0, 'osgb': 0}
+
+        # 特别处理 osgb 类型的数据
+        feedback.pushInfo("正在处理 osgb 类型数据...")
+        cursor.execute(f"""
+            SELECT id, sjywz
+            FROM {full_table}
+            WHERE sjlx = 'osgb' AND sjywz IS NOT NULL
+        """)
+        osgb_rows = cursor.fetchall()
+
+        feedback.pushInfo(f"找到 {len(osgb_rows)} 条 osgb 数据记录")
+
+        # 为 osgb 类型计算总大小
+        total_osgb_size = 0
+        osgb_files_count = 0
+        missing_osgb_files = 0
+        osgb_file_sizes = []  # 用于统计各个文件的大小
+
+        for osgb_id, sjywz in osgb_rows:
+            if not sjywz:
+                missing_osgb_files += 1
+                feedback.pushWarning(f"ID为 {osgb_id} 的 osgb 记录 sjywz 字段为空")
+                continue
+
+            try:
+                # 检查文件路径是否存在
+                if not os.path.exists(sjywz):
+                    missing_osgb_files += 1
+                    feedback.pushWarning(f"找不到 osgb 文件路径: {sjywz}")
+                    continue
+
+                # 如果 sjywz 是文件夹,则计算文件夹总大小
+                if os.path.isdir(sjywz):
+                    folder_size = 0
+                    for dirpath, dirnames, filenames in os.walk(sjywz):
+                        for filename in filenames:
+                            file_path = os.path.join(dirpath, filename)
+                            if os.path.exists(file_path):
+                                folder_size += os.path.getsize(file_path)
+
+                    file_size = folder_size
+                    feedback.pushInfo(f"OSGB文件夹 {sjywz} 总大小: {round(file_size / 1024 / 1024, 2)} MB")
+                else:
+                    # 如果 sjywz 是文件,则直接获取文件大小
+                    file_size = os.path.getsize(sjywz)
+                    feedback.pushInfo(f"OSGB文件 {sjywz} 大小: {round(file_size / 1024 / 1024, 2)} MB")
+
+                total_osgb_size += file_size
+                osgb_files_count += 1
+                osgb_file_sizes.append((sjywz, file_size))
+
+            except Exception as e:
+                missing_osgb_files += 1
+                feedback.pushWarning(f"读取 osgb 文件 {sjywz} 失败: {str(e)}")
+
+        # 更新 osgb 类型的统计信息
+        size_map['osgb'] = total_osgb_size
+        found_tables['osgb'] = osgb_files_count
+        missing_tables['osgb'] = missing_osgb_files
+
+        # 输出 osgb 统计结果
+        feedback.pushInfo(f"\nOSGB 文件统计结果:")
+        feedback.pushInfo(f"总文件数: {osgb_files_count}")
+        feedback.pushInfo(f"缺失文件数: {missing_osgb_files}")
+        feedback.pushInfo(f"总大小: {round(total_osgb_size / 1024 / 1024, 2)} MB")
+
+        # 输出最大的几个 osgb 文件
+        if osgb_file_sizes:
+            osgb_file_sizes.sort(key=lambda x: x[1], reverse=True)
+            feedback.pushInfo("\n最大的5个 OSGB 文件:")
+            for i, (file_path, size) in enumerate(osgb_file_sizes[:5], 1):
+                feedback.pushInfo(f"{i}. {file_path}: {round(size / 1024 / 1024, 2)} MB")
+
+        # 处理其他数据类型
+        cursor.execute(f"""
+            SELECT sjlx, name, sjywz
+            FROM {full_table}
+            WHERE sjlx IN ('vector', 'raster', 'table') AND sjlx != 'osgb'
+        """)
+        rows = cursor.fetchall()
+
+        for sjlx, name, sjywz in rows:
+            try:
+                if sjlx in ['vector', 'raster']:
+                    if not name:
+                        continue
+                    table_name_candidates = self.get_table_name_candidates(name)
+                else:  # sjlx == 'table'
+                    if not sjywz:
+                        continue
+                    table_name_candidates = self.get_table_name_candidates(sjywz)
+
+                size_found = False
+                for full_target in table_name_candidates:
+                    try:
+                        cursor.execute("SELECT pg_total_relation_size(%s);", (full_target,))
+                        size = cursor.fetchone()
+                        if size and size[0]:
+                            size_map[sjlx] += size[0]
+                            found_tables[sjlx] += 1
+                            size_found = True
+                            break
+                    except Exception:
+                        continue
+
+                if not size_found:
+                    missing_tables[sjlx] += 1
+                    feedback.pushWarning(f"无法找到表:{name if sjlx in ['vector', 'raster'] else sjywz}")
+
+            except Exception as e:
+                feedback.pushInfo(f"处理表时出错:{name if sjlx in ['vector', 'raster'] else sjywz},错误信息:{str(e)}")
+
+        feedback.pushInfo("\n表大小统计结果:")
+        data_type_names = {
+            'vector': '矢量数据',
+            'raster': '栅格数据',
+            'table': '附件资料',
+            'osgb': '三维数据'
+        }
+        for sjlx in size_map:
+            feedback.pushInfo(
+                f"{data_type_names[sjlx]}: 找到 {found_tables[sjlx]} 个表/文件,缺失 {missing_tables[sjlx]} 个,总大小 {round(size_map[sjlx] / 1024 / 1024, 2)} MB")
+
+        conn.close()
+
+        # 图表绘制
+        set_matplotlib_font()
+        timestamp = time.strftime("%Y%m%d_%H%M%S")
+        fig, axes = plt.subplots(2, 2, figsize=(14, 12))
+
+        field_names = {'ywlx': '业务类型', 'sjlx': '数据类型', 'glbm': '管理部门'}
+
+        for idx, field in enumerate(fields):
+            data = stat_results.get(field, [])
+            labels = [str(r[0]) for r in data]
+            sizes = [r[1] for r in data]
+
+            ax = axes[idx // 2][idx % 2]
+            if field == 'ywlx':
+                self.plot_donut(ax, labels, sizes, f"{field_names[field]} 分布")
+            elif field == 'glbm':
+                self.plot_line(ax, labels, sizes, f"{field_names[field]} 分布")
+            else:
+                self.plot_pie(ax, labels, sizes, f"{field_names[field]} 分布")
+
+        size_labels = ['矢量数据', '栅格数据', '附件资料', '三维数据']
+        original_keys = ['vector', 'raster', 'table', 'osgb']
+        size_values = [round(size_map[key] / 1024 / 1024, 2) for key in original_keys]
+        self.plot_bar(axes[1][1], size_labels, size_values, "数据类型大小 (MB)")
+
+        plt.suptitle("数据字段分布与存储大小统计", fontsize=16)
+        plt.tight_layout(rect=[0, 0.03, 1, 0.95])
+
+        if export_charts:
+            png_path = os.path.join(export_dir, f"data_storage_stats_{timestamp}.png")
+            plt.savefig(png_path, dpi=300)
+            feedback.pushInfo(f"图表已保存至:{png_path}")
+
+        self.show_matplotlib_dialog(fig)
+        return {'结果': '统计完成'}
+
+    def get_table_name_candidates(self, table_ref):
+        candidates = [table_ref]
+        if '.' in table_ref:
+            parts = table_ref.split('.')
+            if len(parts) == 2:
+                candidates.extend([
+                    f'"{parts[0]}"."{parts[1]}"',
+                    f'{parts[0]}."{parts[1]}"',
+                    f'"{parts[0]}".{parts[1]}'
+                ])
+        if '_' in table_ref:
+            candidates.append(table_ref.split('_', 1)[1])
+        if table_ref.startswith('hhht_'):
+            candidates.append(table_ref[5:])
+        return list(set(candidates))
+
+    def plot_donut(self, ax, labels, sizes, title):
+        if not sizes or sum(sizes) == 0:
+            ax.axis('off')
+            ax.set_title(f"{title}\n(无数据)")
+            return
+        if len(labels) > 6:
+            others_sum = sum(sizes[5:])
+            labels = labels[:5] + ["其他"]
+            sizes = sizes[:5] + [others_sum]
+        wedges, texts, autotexts = ax.pie(
+            sizes, labels=labels, autopct='%1.1f%%', startangle=90,
+            wedgeprops=dict(width=0.4), textprops={'fontsize': 9})
+        for i, autotext in enumerate(autotexts):
+            autotext.set_text(f'{sizes[i]}')
+        ax.set_title(title)
+        ax.axis('equal')
+        ax.legend(wedges, [f'{l}: {s}' for l, s in zip(labels, sizes)], loc="best", fontsize=8)
+
+    def plot_pie(self, ax, labels, sizes, title):
+        if not sizes or sum(sizes) == 0:
+            ax.axis('off')
+            ax.set_title(f"{title}\n(无数据)")
+            return
+        if len(labels) > 6:
+            others_sum = sum(sizes[5:])
+            labels = labels[:5] + ["其他"]
+            sizes = sizes[:5] + [others_sum]
+        wedges, texts, autotexts = ax.pie(
+            sizes, labels=labels, autopct='%1.1f%%', startangle=90,
+            textprops={'fontsize': 9})
+        for i, autotext in enumerate(autotexts):
+            autotext.set_text(f'{sizes[i]}')
+        ax.set_title(title)
+        ax.axis('equal')
+        ax.legend(wedges, [f'{l}: {s}' for l, s in zip(labels, sizes)], loc="best", fontsize=8)
+
+    def plot_line(self, ax, labels, sizes, title):
+        if not sizes:
+            ax.axis('off')
+            ax.set_title(f"{title}\n(无数据)")
+            return
+        ax.plot(labels, sizes, marker='o', linestyle='-', color='teal')
+        ax.set_title(title)
+        ax.set_xlabel("管理部门")
+        ax.set_ylabel("数量")
+        for i, (x, y) in enumerate(zip(labels, sizes)):
+            ax.annotate(f'{y}', (x, y), textcoords="offset points", xytext=(0, 5), ha='center', fontsize=9)
+        ax.tick_params(axis='x', rotation=30)
+
+    def plot_bar(self, ax, labels, sizes, title):
+        if not sizes or sum(sizes) == 0:
+            ax.axis('off')
+            ax.set_title(f"{title}\n(无数据)")
+            return
+        bars = ax.bar(labels, sizes, color='cornflowerblue')
+        ax.set_title(title)
+        ax.set_xlabel('数据类型')
+        ax.set_ylabel('大小 (MB)')
+        for bar in bars:
+            height = bar.get_height()
+            ax.annotate(f'{height:.2f} MB',
+                        xy=(bar.get_x() + bar.get_width() / 2, height),
+                        xytext=(0, 3),
+                        textcoords="offset points",
+                        ha='center', va='bottom', fontsize=9)
+
+    def show_matplotlib_dialog(self, fig):
+        dialog = QDialog(iface.mainWindow())
+        dialog.setWindowTitle("统计图表")
+        layout = QVBoxLayout()
+        canvas = FigureCanvas(fig)
+        layout.addWidget(canvas)
+        dialog.setLayout(layout)
+        dialog.resize(1200, 900)
+        dialog.exec_()
+
+    def name(self):
+        return 'DataStorageStatistics'
+
+    def displayName(self):
+        return '数据统计'
+
+    def group(self):
+        return '数据分析工具'
+
+    def groupId(self):
+        return 'data_storage_analysis'
+
+    def createInstance(self):
+        return DataStorageStatistics()

+ 3 - 0
processing/algs/gdal/GdalAlgorithm.py

@@ -175,6 +175,9 @@ class GdalAlgorithm(QgsProcessingAlgorithm):
 
     def processAlgorithm(self, parameters, context, feedback):
         print("############进入GdalAlgorithm组件的processAlgorithm################")
+        # if parameters.get("Metadata_storage") is not None and parameters.get("Metadata_storage") == True:
+        #
+        #     return;
         # TODO wanger GDB入库
         if parameters.get("INPUTGDB") is not None:
             res = self.gdbimport(parameters, context, feedback, executing=True)

+ 9 - 0
processing/algs/gdal/GdalAlgorithmProvider.py

@@ -128,6 +128,11 @@ from .rasterize import rasterize
 from .rasterize_over import rasterize_over
 from .rasterize_over_fixed_value import rasterize_over_fixed_value
 from .rastertopostgislist import Ogr2PostGisList
+from .ImportOSGBToPostGIS import ImportOSGBToPostGIS
+from .ImportTableToPostGIS import ImportTableToPostGIS
+from .ImportSingleOSGBToPostGIS import ImportSingleOSGBToPostGIS
+from .DataStorageStatistics import DataStorageStatistics
+
 from .rearrange_bands import rearrange_bands
 from .retile import retile
 from .rgb2pct import rgb2pct
@@ -217,6 +222,10 @@ class GdalAlgorithmProvider(QgsProcessingProvider):
             GridLinear(),
             GridNearestNeighbor(),
             hillshade(),
+            ImportOSGBToPostGIS(),
+            ImportTableToPostGIS(),
+            ImportSingleOSGBToPostGIS(),
+            DataStorageStatistics(),
             merge(),
             nearblack(),
             pct2rgb(),

+ 237 - 0
processing/algs/gdal/ImportOSGBToPostGIS.py

@@ -0,0 +1,237 @@
+__author__ = 'liying'
+__date__ = 'May 2025'
+__copyright__ = '(C) 2025, liying'
+
+import uuid
+import siwei_config
+from qgis.core import (
+    QgsProcessingAlgorithm,
+    QgsProcessingParameterFile,
+    QgsProcessingParameterProviderConnection,
+    QgsProcessingParameterDatabaseSchema,
+    QgsProcessingParameterString,
+    QgsProcessingParameterEnum,
+    QgsProcessingParameterDateTime,
+    QgsProcessingParameterBoolean,
+    QgsProcessingParameterCrs,
+)
+from qgis.PyQt.QtCore import QCoreApplication
+import os
+import psycopg2
+from psycopg2 import sql
+from datetime import datetime
+from processing.tools.PostgreSQL.PostgreSQL import PostgreSQL
+
+class ImportOSGBToPostGIS(QgsProcessingAlgorithm):
+    DATABASE = 'DATABASE'
+    SCHEMA = 'SCHEMA'
+    TABLE = 'TABLE'
+    INPUT_DIR = 'INPUT_DIR'
+    VECTOR_SJLY = 'VECTOR_SJLY'
+    VECTOR_YEAR = 'VECTOR_YEAR'
+    VECTOR_YWLX = 'VECTOR_YWLX'
+    VECTOR_GLBM = 'VECTOR_GLBM'
+    VECTOR_ZYML = 'VECTOR_ZYML'
+    T_SRS = 'T_SRS'
+    RASTER_T = 'RASTER_T'
+    SOURCE_TYPE = 'SOURCE_TYPE'
+    Metadata_storage = 'Metadata_storage'
+    INDEX = 'INDEX'
+
+    RASTER_T_LIST = ['不分块', '128', '256', '512']
+
+    @staticmethod
+    def tr(string):
+        return QCoreApplication.translate('ImportOSGBToPostGIS', string)
+
+    def initAlgorithm(self, config=None):
+        self.addParameter(QgsProcessingParameterProviderConnection(
+            self.DATABASE,
+            self.tr('数据库连接'),
+            'postgres',
+            defaultValue=siwei_config.CONFIG['db']['host']
+        ))
+
+        self.addParameter(QgsProcessingParameterDatabaseSchema(
+            self.SCHEMA,
+            self.tr('模式'),
+            connectionParameterName=self.DATABASE,
+            defaultValue='base'
+        ))
+
+        self.addParameter(QgsProcessingParameterString(
+            self.TABLE,
+            self.tr('表名'),
+            defaultValue='t_vector_storage'
+        ))
+
+        self.addParameter(QgsProcessingParameterFile(
+            self.INPUT_DIR,
+            self.tr('OSGB 数据目录'),
+            behavior=QgsProcessingParameterFile.Folder,
+            optional=False
+        ))
+
+        self.addParameter(QgsProcessingParameterString(
+            self.VECTOR_SJLY,
+            self.tr('数据来源'),
+            optional=False
+        ))
+
+        self.addParameter(QgsProcessingParameterDateTime(
+            self.VECTOR_YEAR,
+            self.tr('数据时效'),
+            type=QgsProcessingParameterDateTime.Type.Date,
+            optional=False
+        ))
+
+        pgconn = PostgreSQL(schema='base')
+        rows = pgconn.getVectorYwlx()
+        self.ywlxs = [row[0] for row in rows]
+        self.addParameter(QgsProcessingParameterEnum(self.VECTOR_YWLX,
+                                                     self.tr('业务类型'),
+                                                     options=self.ywlxs,
+                                                     optional=False))
+        rows = pgconn.getDeptList()
+        self.depts = [row[0] for row in rows]
+        self.addParameter(QgsProcessingParameterEnum(self.VECTOR_GLBM,
+                                                     self.tr('管理部门'),
+                                                     options=self.depts))
+        rows = pgconn.getVectorZyml()
+        self.zymls = [row[1] for row in rows]
+        self.addParameter(QgsProcessingParameterEnum(self.VECTOR_ZYML,
+                                                     self.tr('资源目录'),
+                                                     options=self.zymls))
+
+        self.addParameter(QgsProcessingParameterCrs(
+            self.T_SRS,
+            self.tr('指定入库坐标系'),
+            defaultValue='EPSG:4326',
+            optional=False
+        ))
+
+        self.addParameter(QgsProcessingParameterEnum(
+            self.RASTER_T,
+            self.tr('分块存储大小'),
+            options=self.RASTER_T_LIST,
+            defaultValue=0,
+            optional=False
+        ))
+
+        self.addParameter(QgsProcessingParameterString(
+            self.SOURCE_TYPE,
+            self.tr('数据源类型'),
+            defaultValue='osgb',
+            optional=False
+        ))
+
+        self.addParameter(QgsProcessingParameterBoolean(
+            self.Metadata_storage,
+            self.tr('元数据入库'),
+            defaultValue=True
+        ))
+
+        self.addParameter(QgsProcessingParameterBoolean(
+            self.INDEX,
+            self.tr('创建空间索引'),
+            defaultValue=True
+        ))
+
+    def processAlgorithm(self, parameters, context, feedback):
+        connection_name = self.parameterAsString(parameters, self.DATABASE, context)
+        schema = self.parameterAsString(parameters, self.SCHEMA, context)
+        table = self.parameterAsString(parameters, self.TABLE, context)
+        input_dir = self.parameterAsString(parameters, self.INPUT_DIR, context)
+        data_source = self.parameterAsString(parameters, self.VECTOR_SJLY, context)
+
+        # 转换 QDateTime 为 Python date
+        qdatetime = self.parameterAsDateTime(parameters, self.VECTOR_YEAR, context)
+        data_year = qdatetime.date().toPyDate().year  # 只保留年份(整数)
+
+        business_type_index = self.parameterAsInt(parameters, self.VECTOR_YWLX, context)
+        management_dept_index = self.parameterAsInt(parameters, self.VECTOR_GLBM, context)
+        resource_catalog_index = self.parameterAsInt(parameters, self.VECTOR_ZYML, context)
+        t_srs = self.parameterAsCrs(parameters, self.T_SRS, context)
+        raster_t_index = self.parameterAsInt(parameters, self.RASTER_T, context)
+        source_type = self.parameterAsString(parameters, self.SOURCE_TYPE, context)
+        metadata_storage = self.parameterAsBoolean(parameters, self.Metadata_storage, context)
+        create_index = self.parameterAsBoolean(parameters, self.INDEX, context)
+
+        pgconn = PostgreSQL(schema='base')
+        business_type = pgconn.getVectorYwlx()[business_type_index][0]
+        management_dept = pgconn.getDeptList()[management_dept_index][0]
+        resource_catalog_name = pgconn.getVectorZyml()[resource_catalog_index][1]
+
+        # 查资源目录 bsm
+        query = f"SELECT bsm FROM t_vector_zyml WHERE name = '{resource_catalog_name}'"
+        result = pgconn.execute(query)
+        if result and len(result) > 0:
+            resource_catalog = result[0][0]
+        else:
+            feedback.reportError(f"未找到名称为 {resource_catalog_name} 的资源目录 BSM 值。")
+            raise Exception(f"资源目录未找到:{resource_catalog_name}")
+
+        # 连接数据库
+        conn_params = {
+            'host': siwei_config.CONFIG['db']['host'],
+            'port': siwei_config.CONFIG['db']['port'],
+            'dbname': siwei_config.CONFIG['db']['name'],
+            'user': siwei_config.CONFIG['db']['user'],
+            'password': siwei_config.CONFIG['db']['password'],
+            "connect_timeout": 10,
+        }
+
+        feedback.pushInfo("连接数据库...")
+        conn = psycopg2.connect(**conn_params)
+        cursor = conn.cursor()
+
+        feedback.pushInfo("插入数据目录元数据...")
+        id = str(uuid.uuid4())
+        nm = os.path.basename(input_dir)
+
+        insert_sql = sql.SQL("""
+            INSERT INTO {schema}.{table}
+                (sjywz, rksj, year, ywlx, glbm, id, sjlx, xmlx, name, sjly)
+            VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
+        """).format(
+            schema=sql.Identifier(schema),
+            table=sql.Identifier(table)
+        )
+
+        cursor.execute(insert_sql, (
+            input_dir,
+            datetime.now().date(),
+            data_year,
+            business_type,
+            management_dept,
+            id,
+            source_type,
+            resource_catalog,
+            nm,
+            data_source
+        ))
+
+        conn.commit()
+        cursor.close()
+        conn.close()
+
+        feedback.pushInfo("数据目录元数据已成功插入。")
+        return {}
+
+    def name(self):
+        return "importosgbtopostgis"
+
+    def displayName(self):
+        return "三维数据入库"
+
+    def group(self):
+        return "三维数据工具"
+
+    def groupId(self):
+        return "osgbtools"
+
+    def shortHelpString(self):
+        return "扫描 OSGB 文件目录,将路径、包围盒等元数据信息写入 PostGIS 数据库。"
+
+    def createInstance(self):
+        return ImportOSGBToPostGIS()

+ 249 - 0
processing/algs/gdal/ImportSingleOSGBToPostGIS.py

@@ -0,0 +1,249 @@
+__author__ = 'liying'
+__date__ = 'May 2025'
+__copyright__ = '(C) 2025, liying'
+
+import os
+import uuid
+from datetime import datetime
+
+import psycopg2
+import siwei_config
+from processing.tools.PostgreSQL.PostgreSQL import PostgreSQL
+from psycopg2 import sql
+from qgis.PyQt.QtCore import QCoreApplication
+from qgis.core import (
+    QgsProcessingAlgorithm,
+    QgsProcessingParameterFile,
+    QgsProcessingParameterProviderConnection,
+    QgsProcessingParameterDatabaseSchema,
+    QgsProcessingParameterString,
+    QgsProcessingParameterEnum,
+    QgsProcessingParameterDateTime,
+    QgsProcessingParameterBoolean,
+    QgsProcessingParameterCrs
+)
+
+
+class ImportSingleOSGBToPostGIS(QgsProcessingAlgorithm):
+    DATABASE = 'DATABASE'
+    SCHEMA = 'SCHEMA'
+    TABLE = 'TABLE'
+    INPUT_DIR = 'INPUT_DIR'
+    VECTOR_SJLY = 'VECTOR_SJLY'
+    VECTOR_YEAR = 'VECTOR_YEAR'
+    VECTOR_YWLX = 'VECTOR_YWLX'
+    VECTOR_GLBM = 'VECTOR_GLBM'
+    VECTOR_ZYML = 'VECTOR_ZYML'
+    T_SRS = 'T_SRS'
+    RASTER_T = 'RASTER_T'
+    SOURCE_TYPE = 'SOURCE_TYPE'
+    Metadata_storage = 'Metadata_storage'
+    INDEX = 'INDEX'
+
+    RASTER_T_LIST = ['不分块', '128', '256', '512']
+
+    @staticmethod
+    def tr(string):
+        return QCoreApplication.translate('ImportSingleOSGBToPostGIS', string)
+
+    def initAlgorithm(self, config=None):
+        self.addParameter(QgsProcessingParameterProviderConnection(
+            self.DATABASE,
+            self.tr('数据库连接'),
+            'postgres',
+            defaultValue=siwei_config.CONFIG['db']['host']
+        ))
+
+        self.addParameter(QgsProcessingParameterDatabaseSchema(
+            self.SCHEMA,
+            self.tr('模式'),
+            connectionParameterName=self.DATABASE,
+            defaultValue='base'
+        ))
+
+        self.addParameter(QgsProcessingParameterString(
+            self.TABLE,
+            self.tr('表名'),
+            defaultValue='t_vector_storage'
+        ))
+
+        self.addParameter(QgsProcessingParameterFile(
+            self.INPUT_DIR,
+            self.tr('OSGB 数据目录'),
+            behavior=QgsProcessingParameterFile.Folder,
+            optional=False
+        ))
+
+        self.addParameter(QgsProcessingParameterString(
+            self.VECTOR_SJLY,
+            self.tr('数据来源'),
+            optional=False
+        ))
+
+        self.addParameter(QgsProcessingParameterDateTime(
+            self.VECTOR_YEAR,
+            self.tr('数据时效'),
+            type=QgsProcessingParameterDateTime.Type.Date,
+            optional=False
+        ))
+
+        pgconn = PostgreSQL(schema='base')
+        rows = pgconn.getVectorYwlx()
+        self.ywlxs = [row[0] for row in rows]
+        self.addParameter(QgsProcessingParameterEnum(name=self.VECTOR_YWLX,
+                                                     description=self.tr('业务类型'), options=self.ywlxs,
+                                                     optional=False))
+        rows = pgconn.getDeptList()
+        self.depts = [row[0] for row in rows]
+        self.addParameter(QgsProcessingParameterEnum(name=self.VECTOR_GLBM,
+                                                     description=self.tr('管理部门'), options=self.depts))
+
+        rows = pgconn.getVectorZyml()
+        self.zymls = [row[1] for row in rows]
+        self.addParameter(QgsProcessingParameterEnum(name=self.VECTOR_ZYML,
+                                                     description=self.tr('资源目录'), options=self.zymls))
+
+        self.addParameter(QgsProcessingParameterCrs(
+            self.T_SRS,
+            self.tr('指定入库坐标系'),
+            defaultValue='EPSG:4326',
+            optional=False
+        ))
+
+        self.addParameter(QgsProcessingParameterEnum(
+            self.RASTER_T,
+            self.tr('分块存储大小'),
+            options=self.RASTER_T_LIST,
+            defaultValue=0,
+            optional=False
+        ))
+
+        self.addParameter(QgsProcessingParameterString(
+            self.SOURCE_TYPE,
+            self.tr('数据源类型'),
+            defaultValue='osgb',
+            optional=False
+        ))
+
+        self.addParameter(QgsProcessingParameterBoolean(
+            self.Metadata_storage,
+            self.tr('元数据入库'),
+            defaultValue=True
+        ))
+
+        self.addParameter(QgsProcessingParameterBoolean(
+            self.INDEX,
+            self.tr('创建空间索引'),
+            defaultValue=True
+        ))
+
+    def processAlgorithm(self, parameters, context, feedback):
+        connection_name = self.parameterAsString(parameters, self.DATABASE, context)
+        schema = self.parameterAsString(parameters, self.SCHEMA, context)
+        table = self.parameterAsString(parameters, self.TABLE, context)
+        input_dir = self.parameterAsString(parameters, self.INPUT_DIR, context)
+        data_source = self.parameterAsString(parameters, self.VECTOR_SJLY, context)
+
+        # 转换 QDateTime 为 Python date
+        qdatetime = self.parameterAsDateTime(parameters, self.VECTOR_YEAR, context)
+        data_year = qdatetime.date().toPyDate().year  # 只保留年份(整数)
+
+        business_type_index = self.parameterAsInt(parameters, self.VECTOR_YWLX, context)
+        management_dept_index = self.parameterAsInt(parameters, self.VECTOR_GLBM, context)
+        resource_catalog_index = self.parameterAsInt(parameters, self.VECTOR_ZYML, context)
+        source_type = self.parameterAsString(parameters, self.SOURCE_TYPE, context)
+
+        pgconn = PostgreSQL(schema='base')
+        business_type = pgconn.getVectorYwlx()[business_type_index][0]
+        management_dept = pgconn.getDeptList()[management_dept_index][0]
+        resource_catalog = pgconn.getVectorZyml()[resource_catalog_index][1]
+
+        query = f"SELECT bsm FROM t_vector_zyml WHERE name = '{resource_catalog}'"
+        result = pgconn.execute(query)
+        if result and len(result) > 0:
+            resource_catalog = result[0][0]
+        else:
+            feedback.pushWarning(f"未找到名称为 {resource_catalog} 的BSM值")
+
+        db_config = siwei_config.CONFIG['db']
+        conn_params = {
+            'host': db_config['host'],
+            'port': db_config['port'],
+            'dbname': db_config['name'],
+            'user': db_config['user'],
+            'password': db_config['password'],
+            "connect_timeout": 10,
+        }
+
+        conn = psycopg2.connect(**conn_params)
+        cursor = conn.cursor()
+
+        feedback.pushInfo("插入目录级元数据记录...")
+        id = str(uuid.uuid4())
+        nm = os.path.basename(input_dir)
+        insert_sql = sql.SQL("""
+            INSERT INTO {schema}.{table}
+                (sjywz, rksj, year, ywlx, glbm, id, sjlx, xmlx, name, sjly)
+            VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
+        """).format(
+            schema=sql.Identifier(schema),
+            table=sql.Identifier(table)
+        )
+        cursor.execute(insert_sql, (
+            input_dir,
+            datetime.now().date(),
+            data_year,
+            business_type,
+            management_dept,
+            id,
+            source_type,
+            resource_catalog,
+            nm,
+            data_source
+        ))
+        conn.commit()
+
+        feedback.pushInfo("扫描目录并插入单体模型记录...")
+
+        for root, dirs, files in os.walk(input_dir):
+            for file in files:
+                if file.lower().endswith('.osgb'):
+                    full_path = os.path.join(root, file)
+                    model_id = str(uuid.uuid4())
+                    cursor.execute(insert_sql, (
+                        full_path,
+                        datetime.now().date(),
+                        data_year,
+                        business_type,
+                        management_dept,
+                        model_id,
+                        source_type,
+                        resource_catalog,
+                        file,
+                        data_source
+                    ))
+
+        conn.commit()
+        cursor.close()
+        conn.close()
+
+        feedback.pushInfo("所有单体模型路径已成功插入数据库。")
+        return {}
+
+    def name(self):
+        return "importsingleosgbtopostgis"
+
+    def displayName(self):
+        return "单体三维模型入库"
+
+    def group(self):
+        return "三维数据工具"
+
+    def groupId(self):
+        return "osgbtools"
+
+    def shortHelpString(self):
+        return "扫描 OSGB 文件目录,将单体模型文件路径等元数据信息写入 PostGIS 数据库。"
+
+    def createInstance(self):
+        return ImportSingleOSGBToPostGIS()

+ 205 - 0
processing/algs/gdal/ImportTableToPostGIS.py

@@ -0,0 +1,205 @@
+__author__ = 'liying'
+__date__ = 'May 2025'
+__copyright__ = '(C) 2025, liying'
+
+
+import uuid
+import siwei_config
+from qgis.core import (
+    QgsProcessingAlgorithm,
+    QgsProcessingParameterFile,
+    QgsProcessingParameterProviderConnection,
+    QgsProcessingParameterDatabaseSchema,
+    QgsProcessingParameterString,
+    QgsProcessingParameterEnum,
+    QgsProcessingParameterDateTime,
+    QgsProcessingParameterBoolean,
+)
+from PyQt5.QtCore import QCoreApplication
+import os
+import psycopg2
+from psycopg2 import sql
+from pandas import read_csv, read_excel
+from datetime import datetime
+from processing.tools.PostgreSQL.PostgreSQL import PostgreSQL
+
+
+class ImportTableToPostGIS(QgsProcessingAlgorithm):
+    DATABASE = 'DATABASE'
+    SCHEMA = 'SCHEMA'
+    TABLE = 'TABLE'
+    INPUT_FILE = 'INPUT_FILE'
+    VECTOR_SJLY = 'VECTOR_SJLY'
+    VECTOR_YEAR = 'VECTOR_YEAR'
+    VECTOR_YWLX = 'VECTOR_YWLX'
+    VECTOR_GLBM = 'VECTOR_GLBM'
+    VECTOR_ZYML = 'VECTOR_ZYML'
+    SOURCE_TYPE = 'SOURCE_TYPE'
+    Metadata_storage = 'Metadata_storage'
+
+    def initAlgorithm(self, config=None):
+        self.addParameter(QgsProcessingParameterProviderConnection(
+            self.DATABASE, '数据库连接', 'postgres', defaultValue=siwei_config.CONFIG['db']['host']))
+        self.addParameter(QgsProcessingParameterDatabaseSchema(
+            self.SCHEMA, '模式', connectionParameterName=self.DATABASE, defaultValue='base'))
+        self.addParameter(QgsProcessingParameterString(
+            self.TABLE, '导入目标表名(新建)', defaultValue='t_table'))
+        self.addParameter(QgsProcessingParameterFile(
+            self.INPUT_FILE, '表格文件(CSV或Excel)', optional=False))
+        self.addParameter(QgsProcessingParameterString(
+            self.VECTOR_SJLY, '数据来源', optional=False))
+        self.addParameter(QgsProcessingParameterDateTime(
+            self.VECTOR_YEAR, '数据时效', type=QgsProcessingParameterDateTime.Type.Date, optional=False))
+
+        pgconn = PostgreSQL(schema='base')
+        self.ywlxs = [row[0] for row in pgconn.getVectorYwlx()]
+        self.addParameter(QgsProcessingParameterEnum(
+            self.VECTOR_YWLX, '业务类型', options=self.ywlxs))
+        self.depts = [row[0] for row in pgconn.getDeptList()]
+        self.addParameter(QgsProcessingParameterEnum(
+            self.VECTOR_GLBM, '管理部门', options=self.depts))
+        self.zymls = [row[1] for row in pgconn.getVectorZyml()]
+        self.addParameter(QgsProcessingParameterEnum(
+            self.VECTOR_ZYML, '资源目录', options=self.zymls))
+
+        self.addParameter(QgsProcessingParameterString(
+            self.SOURCE_TYPE, '数据源类型', defaultValue='table', optional=False))
+        self.addParameter(QgsProcessingParameterBoolean(
+            self.Metadata_storage, '是否写入数据目录', defaultValue=True))
+
+    def processAlgorithm(self, parameters, context, feedback):
+        input_file = self.parameterAsString(parameters, self.INPUT_FILE, context)
+        ext = os.path.splitext(input_file)[-1].lower()
+        df = read_csv(input_file) if ext == '.csv' else read_excel(input_file)
+
+        connection_name = self.parameterAsString(parameters, self.DATABASE, context)
+        schema = self.parameterAsString(parameters, self.SCHEMA, context)
+        target_table = self.parameterAsString(parameters, self.TABLE, context)
+        data_source = self.parameterAsString(parameters, self.VECTOR_SJLY, context)
+        source_type = self.parameterAsString(parameters, self.SOURCE_TYPE, context)
+
+        # 转换 QDateTime 为 Python date
+        qdatetime = self.parameterAsDateTime(parameters, self.VECTOR_YEAR, context)
+        data_year = qdatetime.date().toPyDate().year  # 只保留年份(整数)
+
+        pgconn = PostgreSQL(schema='base')
+        business_type = pgconn.getVectorYwlx()[self.parameterAsInt(parameters, self.VECTOR_YWLX, context)][0]
+        management_dept = pgconn.getDeptList()[self.parameterAsInt(parameters, self.VECTOR_GLBM, context)][0]
+        resource_catalog_name = pgconn.getVectorZyml()[self.parameterAsInt(parameters, self.VECTOR_ZYML, context)][1]
+
+        bsm_query = f"SELECT bsm FROM t_vector_zyml WHERE name = '{resource_catalog_name}'"
+        bsm_result = pgconn.execute(bsm_query)
+        resource_catalog = bsm_result[0][0] if bsm_result else resource_catalog_name
+
+        conn_params = {
+            'host': siwei_config.CONFIG['db']['host'],
+            'port': siwei_config.CONFIG['db']['port'],
+            'dbname': siwei_config.CONFIG['db']['name'],
+            'user': siwei_config.CONFIG['db']['user'],
+            'password': siwei_config.CONFIG['db']['password'],
+            "connect_timeout": 10,
+        }
+        conn = psycopg2.connect(**conn_params)
+        cursor = conn.cursor()
+
+        # 检查表是否存在,如果存在则添加递增后缀
+        target_table_new = target_table
+        suffix = 0
+
+        while True:
+            # 构建检查表是否存在的SQL
+            check_sql = sql.SQL(
+                "SELECT EXISTS (SELECT 1 FROM information_schema.tables WHERE table_schema = %s AND table_name = %s)")
+            cursor.execute(check_sql, (schema, target_table_new))
+            exists = cursor.fetchone()[0]
+
+            # 如果表不存在,跳出循环
+            if not exists:
+                break
+
+            # 如果表存在,增加后缀
+            suffix += 1
+            target_table_new = f"{target_table}_{suffix}"
+            feedback.pushInfo(f"表 {schema}.{target_table} 已存在,尝试使用新名称: {target_table_new}")
+
+        feedback.pushInfo(f"将使用表名: {schema}.{target_table_new}")
+
+        # 动态生成字段类型(全部为 TEXT)
+        columns = [f'"{col.strip()}" TEXT' for col in df.columns]
+        create_sql = sql.SQL("CREATE TABLE {schema}.{table} ({fields})").format(
+            schema=sql.Identifier(schema),
+            table=sql.Identifier(target_table_new),
+            fields=sql.SQL(', ').join(sql.SQL(col) for col in columns)
+        )
+
+        cursor.execute(create_sql)
+
+        # 插入数据
+        insert_cols = [f'"{col.strip()}"' for col in df.columns]
+        insert_sql = sql.SQL("""
+                             INSERT INTO {schema}.{table} ({fields})
+                             VALUES ({placeholders})
+                             """).format(
+            schema=sql.Identifier(schema),
+            table=sql.Identifier(target_table_new),
+            fields=sql.SQL(', ').join(map(sql.Identifier, df.columns)),
+            placeholders=sql.SQL(', ').join(sql.Placeholder() * len(df.columns))
+        )
+
+        for _, row in df.iterrows():
+            cursor.execute(insert_sql, tuple(row.fillna("").astype(str).values))
+
+        feedback.pushInfo(f"数据已成功导入表 {schema}.{target_table_new}")
+
+        # 元数据插入
+        if self.parameterAsBool(parameters, self.Metadata_storage, context):
+            feedback.pushInfo("插入数据目录元数据...")
+            id = str(uuid.uuid4())
+
+
+            # table_fullname = os.path.basename(input_file)
+            table_fullname = f"{schema}.{target_table_new}"
+
+            metadata_insert_sql = sql.SQL("""
+                                          INSERT INTO {schema}.t_vector_storage
+                                              (sjywz, rksj, year, ywlx, glbm, id, sjlx, xmlx, name, sjly)
+                                          VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
+                                          """).format(schema=sql.Identifier(schema))
+
+            cursor.execute(metadata_insert_sql, (
+                table_fullname,  # 使用新表名
+                datetime.now().date(),
+                data_year,
+                business_type,
+                management_dept,
+                id,
+                source_type,
+                resource_catalog,
+                table_fullname,
+                data_source
+            ))
+            feedback.pushInfo("数据目录元数据已成功插入。")
+
+        conn.commit()
+        cursor.close()
+        conn.close()
+
+        return {}
+
+    def name(self):
+        return "importtabletopostgis"
+
+    def displayName(self):
+        return "表格数据入库"
+
+    def group(self):
+        return "表格数据工具"
+
+    def groupId(self):
+        return "tabletools"
+
+    def shortHelpString(self):
+        return "将 CSV 或 Excel 表格导入为 PostgreSQL 中的新表,并写入数据目录元数据。"
+
+    def createInstance(self):
+        return ImportTableToPostGIS()

+ 1 - 1
processing/algs/gdal/rastertopostgislist.py

@@ -136,7 +136,7 @@ class Ogr2PostGisList(GdalAlgorithm):
         self.addParameter(table_param)
         # 数据来源
         self.addParameter(QgsProcessingParameterString(self.VECTOR_SJLY,
-                                                       self.tr('数据来源'), "",
+                                                       self.tr('数据来源'), defaultValue=None,
                                                        optional=False))
 
         self.addParameter(

+ 4 - 3
processing/app.py

@@ -4,12 +4,13 @@ import uuid
 import requests
 from PIL import Image
 from io import BytesIO
+import siwei_config
 
 app = Flask(__name__)
 
 # 设置静态文件夹(如果您想指定路径)
 app.config['UPLOAD_FOLDER'] = 'predict/output'
-host = 'http://192.168.60.2'
+host = siwei_config.CONFIG['host']
 
 uploadPath = 'predict/upload/'
 outputPath = 'predict/output/'
@@ -71,7 +72,7 @@ def save_image():
     img1 = data['img1']
     img2 = data['img2']
 
-    response1 = requests.get('http://192.168.60.63' + img1)
+    response1 = requests.get(host + img1)
     if response1.status_code != 200:
         return jsonify({
             "message": "图片1不存在",
@@ -83,7 +84,7 @@ def save_image():
     img1Path = uploadPath + str(uuid.uuid4()) + '.png'
     img1.save(img1Path, format='PNG')
 
-    response2 = requests.get('http://192.168.60.63' + img2)
+    response2 = requests.get(host + img2)
     if response2.status_code != 200:
         return jsonify({
             "message": "图片2不存在",

BIN
processing/images/dbms/CustomerMap.png


BIN
processing/images/dbms/export.png


BIN
processing/images/dbms/export2.png


BIN
processing/images/dbms/import.png


BIN
processing/images/dbms/importimage.png


BIN
processing/images/dbms/importvector.png


BIN
processing/images/dbms/logo.png


BIN
processing/images/dbms/map.png


BIN
processing/images/dbms/new.png


BIN
processing/images/dbms/publishserver.png


BIN
processing/images/dbms/refresh.png


BIN
processing/images/dbms/resource.png


BIN
processing/images/dbms/unselect.png


BIN
processing/images/dbms/view.png


+ 3 - 1
processing/tools/CustomWebView/Manager.py

@@ -11,6 +11,7 @@ from PyQt5.QtWebChannel import QWebChannel
 import sys
 from requests.auth import HTTPBasicAuth
 from PyQt5.QtCore import QObject
+import siwei_config
 
 # 创建一个QWebEngineProfile实例
 # profile = QWebEngineProfile.defaultProfile()
@@ -18,7 +19,8 @@ from PyQt5.QtCore import QObject
 
 # 创建一个QWebEnginePage实例
 # page = QWebEnginePage(QWebEngineProfile())
-pageURI = 'http://192.168.60.2:8082/index'
+
+pageURI = f"{siwei_config.CONFIG['host']}:{siwei_config.CONFIG['port']}/index"
 
 
 def main():

BIN
processing/tools/FTP/icon/logo.png


BIN
processing/tools/FTP/icon/文件上传.png


BIN
processing/tools/FTP/icon/文件下载.png


BIN
processing/tools/FTP/icon/文件删除.png


BIN
processing/tools/FTP/icon/文件夹上传.png


BIN
processing/tools/FTP/icon/文件查看.png


BIN
processing/tools/FTP/icon/文件重命名.png


BIN
processing/tools/FTP/icon/新建文件夹.png


BIN
processing/tools/FTP/icon/模型查看.png


+ 8 - 9
processing/tools/GeoServer/Geoserver.py

@@ -7,17 +7,16 @@ __revision__ = '1.0'
 
 # inbuilt libraries
 import os
-from typing import List, Optional, Set, Union, Dict, Iterable, Any
 from pathlib import Path
-
+from typing import List, Optional, Set, Dict, Iterable, Any
+import siwei_config
 # third-party libraries
 import requests
-from xmltodict import parse, unparse
-
 # custom functions
 from processing.tools.GeoServer.Calculation_gdal import raster_value
 from processing.tools.GeoServer.Style import catagorize_xml, classified_xml, coverage_style_xml, outline_only_xml
 from processing.tools.GeoServer.supports import prepare_zip_file, is_valid_xml, is_surrounded_by_quotes
+from xmltodict import parse, unparse
 
 default_gridset_name = "WebMercatorQuadx2"  # 默认切片方案
 default_seed_type = "seed"  # 默认切片请求类型 Type can be seed (add tiles), reseed (replace tiles), or truncate (remove tiles)
@@ -65,9 +64,9 @@ class FileReader:
 class Geoserver:
     def __init__(
             self,
-            service_url: str = "http://192.168.60.2:28085/geoserver",  # default deployment url during installation
-            username: str = "admin",  # default username during geoserver installation
-            password: str = "geoserver",  # default password during geoserver installation
+            service_url: str = siwei_config.CONFIG['geoserver']['url'],  # default deployment url during installation
+            username: str = siwei_config.CONFIG['geoserver']['username'],  # default username during geoserver installation
+            password: str = siwei_config.CONFIG['geoserver']['password'],  # default password during geoserver installation
             request_options: Dict[str, Any] = None  # additional parameters to be sent with each request
     ):
         self.service_url = service_url
@@ -416,7 +415,7 @@ class Geoserver:
             return r.json()
         else:
             return None
-            #raise GeoserverException(r.status_code, r.content)
+            # raise GeoserverException(r.status_code, r.content)
 
     def create_layergroup(
             self,
@@ -681,7 +680,7 @@ class Geoserver:
             return "Layer group deleted successfully"
         else:
             return "Layer group deleted successfully"
-            #raise GeoserverException(r.status_code, r.content)
+            # raise GeoserverException(r.status_code, r.content)
 
     def add_layer_to_layergroup(
             self,

+ 1 - 0
processing/tools/PostgreSQL/PgExxcute.py

@@ -6,6 +6,7 @@ __copyright__ = '(C) 2024 by siwei'
 __revision__ = '1.0'
 
 import psycopg2
+import siwei_config
 
 # 配置数据库连接参数并指定schema
 connparams = {

+ 3 - 2
processing/tools/PostgreSQL/PostgreSQL.py

@@ -26,7 +26,7 @@ class PostgreSQL:
     # 附件表
     Vector_FJ = "t_vector_fj"
     # 字段表
-    Vector_Field = "t_vector_field"
+    Vector_Field = "t_vector_ywlx"
     # 平台资源目录表
     Portal_Zyml = "t_yzt_zyml"
     # 备份表后缀名
@@ -57,6 +57,7 @@ class PostgreSQL:
             "password": password,
             "host": host,
             "port": port,
+            "connect_timeout": 3,
             "options": "-c search_path=otherSchema," + schema if schema is not None else None
         }
         self.conn = psycopg2.connect(**self.connparams)
@@ -270,7 +271,7 @@ class PostgreSQL:
     # 获取矢量数据字典类型
     def getVectorYwlx(self):
         self.cur.execute(
-            "select distinct(t.ywlx) from t_vector_field t ")
+            "select distinct(t.ywlx) from t_vector_ywlx t ")
         rows = self.cur.fetchall()
         return rows