|
|
@@ -0,0 +1,295 @@
|
|
|
+package com.onemap.spotoverlap.utils;
|
|
|
+
|
|
|
+import com.google.gson.Gson;
|
|
|
+import com.google.gson.reflect.TypeToken;
|
|
|
+import com.onemap.common.core.utils.StringUtils;
|
|
|
+import org.gdal.ogr.*;
|
|
|
+import org.gdal.gdal.gdal;
|
|
|
+import org.gdal.ogr.Driver;
|
|
|
+import org.gdal.osr.SpatialReference;
|
|
|
+import org.springframework.beans.factory.annotation.Value;
|
|
|
+import org.springframework.stereotype.Component;
|
|
|
+
|
|
|
+import java.io.File;
|
|
|
+import java.lang.reflect.Type;
|
|
|
+import java.sql.*;
|
|
|
+import java.util.HashMap;
|
|
|
+import java.util.Map;
|
|
|
+import java.util.Vector;
|
|
|
+
|
|
|
+@Component
|
|
|
+public class GdbToPostgisImporter {
|
|
|
+ private static final Gson gson = new Gson();
|
|
|
+ private static final Type mapType =
|
|
|
+ new TypeToken<Map<String, Object>>() {
|
|
|
+ }.getType();
|
|
|
+ public static String dbconnection;
|
|
|
+
|
|
|
+ @Value("${dbconnection}")
|
|
|
+ public void setDbconnection(String dbconnection) {
|
|
|
+ GdbToPostgisImporter.dbconnection = dbconnection;
|
|
|
+ }
|
|
|
+
|
|
|
+ // 配置参数
|
|
|
+ private static String GDB_PATH = "E:\\projects\\甘肃一张图\\测试数据\\WP15012520240705\\WP15012520240705.gdb";
|
|
|
+ private static String GDB_LAYER = "T15012520240705tb";
|
|
|
+ private static String PG_HOST = "localhost";
|
|
|
+ private static String PG_PORT = "5432";
|
|
|
+ private static String PG_DB = "spot";
|
|
|
+ private static String PG_USER = "postgres";
|
|
|
+ private static String PG_PASSWORD = "postgres";
|
|
|
+ private static String PG_SCHEMA = "public";
|
|
|
+
|
|
|
+ public static void main(String[] args) {
|
|
|
+ // 初始化GDAL
|
|
|
+ gdal.AllRegister();
|
|
|
+ // 打开GDB数据源
|
|
|
+ DataSource gdbDataSource = ogr.Open(GDB_PATH);
|
|
|
+ if (gdbDataSource == null) {
|
|
|
+ System.err.println("无法打开GDB文件: " + GDB_PATH);
|
|
|
+ return;
|
|
|
+ }
|
|
|
+ // 获取PostgreSQL连接
|
|
|
+ try (Connection conn = getPostgresConnection()) {
|
|
|
+ // 遍历GDB中的所有图层
|
|
|
+ for (int i = 0; i < gdbDataSource.GetLayerCount(); i++) {
|
|
|
+ Layer layer = gdbDataSource.GetLayerByIndex(i);
|
|
|
+ String layerName = layer.GetName();
|
|
|
+ System.out.println("正在处理图层: " + layerName);
|
|
|
+ // 在PostGIS中创建表
|
|
|
+ String srid = createPostgisTable(conn, layer, layerName);
|
|
|
+ // 导入数据
|
|
|
+ importDataToPostgis(conn, layer, layerName, srid);
|
|
|
+ }
|
|
|
+ } catch (Exception e) {
|
|
|
+ e.printStackTrace();
|
|
|
+ } finally {
|
|
|
+ // 清理GDAL资源
|
|
|
+ gdbDataSource.delete();
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ public static void exportLayerToShapefile(Layer srcLayer, String shpPath) {
|
|
|
+ // 获取驱动
|
|
|
+ Driver shpDriver = ogr.GetDriverByName("ESRI Shapefile");
|
|
|
+ // 创建输出数据源
|
|
|
+ DataSource shpDataSource = shpDriver.CreateDataSource(shpPath);
|
|
|
+ if (shpDataSource == null) {
|
|
|
+ throw new RuntimeException("无法创建Shapefile: " + shpPath);
|
|
|
+ }
|
|
|
+ try {
|
|
|
+ Vector<String> options = new Vector<>();
|
|
|
+ options.add("ENCODING=UTF-8");
|
|
|
+ // 创建输出图层(使用相同的空间参考)
|
|
|
+ Layer dstLayer = shpDataSource.CreateLayer(
|
|
|
+ srcLayer.GetName(),
|
|
|
+ srcLayer.GetSpatialRef(),
|
|
|
+ srcLayer.GetGeomType(),
|
|
|
+ options
|
|
|
+ );
|
|
|
+ // 复制字段定义
|
|
|
+ FeatureDefn srcDefn = srcLayer.GetLayerDefn();
|
|
|
+ for (int i = 0; i < srcDefn.GetFieldCount(); i++) {
|
|
|
+ dstLayer.CreateField(srcDefn.GetFieldDefn(i));
|
|
|
+ }
|
|
|
+ // 复制所有要素
|
|
|
+ srcLayer.ResetReading();
|
|
|
+ Feature feature;
|
|
|
+ while ((feature = srcLayer.GetNextFeature()) != null) {
|
|
|
+ dstLayer.CreateFeature(feature);
|
|
|
+ feature.delete();
|
|
|
+ }
|
|
|
+ } finally {
|
|
|
+ // 确保资源释放
|
|
|
+ shpDataSource.delete();
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ public static String importToPostgis(String filepath, String layername, String tablename) {
|
|
|
+ initDbConfig();
|
|
|
+ File file = new File(filepath);
|
|
|
+ String shppath = "";
|
|
|
+ DataSource gdbDataSource = ogr.Open(filepath);
|
|
|
+ if (gdbDataSource == null) {
|
|
|
+ System.err.println("无法打开GDB文件: " + filepath);
|
|
|
+ return null;
|
|
|
+ }
|
|
|
+ try (Connection conn = getPostgresConnection()) {
|
|
|
+ Layer layer = gdbDataSource.GetLayer(layername);
|
|
|
+ System.out.println("正在处理图层: " + layername);
|
|
|
+ //TODO 先导出一份shape
|
|
|
+ shppath = file.getParent() + "\\" + tablename + ".shp";
|
|
|
+ exportLayerToShapefile(layer, shppath);
|
|
|
+ layer.ResetReading();
|
|
|
+ //TODO 在PostGIS中创建表
|
|
|
+ String srid = createPostgisTable(conn, layer, tablename);
|
|
|
+ //关掉数据
|
|
|
+ // 导入数据
|
|
|
+ importDataToPostgis(conn, layer, tablename, srid);
|
|
|
+ } catch (Exception e) {
|
|
|
+ e.printStackTrace();
|
|
|
+ } finally {
|
|
|
+ gdbDataSource.delete();
|
|
|
+ }
|
|
|
+ return shppath;
|
|
|
+ }
|
|
|
+
|
|
|
+ public static void initDbConfig() {
|
|
|
+ Map<String, Object> dbconn = new HashMap<>();
|
|
|
+ try {
|
|
|
+ dbconn = gson.fromJson(dbconnection, mapType);
|
|
|
+ dbconn.put("port", Integer.valueOf(String.valueOf(dbconn.get("port")).replaceAll(".0", "")));
|
|
|
+ PG_HOST = String.valueOf(dbconn.get("host"));
|
|
|
+ PG_PORT = String.valueOf(dbconn.get("port"));
|
|
|
+ PG_DB = String.valueOf(dbconn.get("database"));
|
|
|
+ PG_USER = String.valueOf(dbconn.get("user"));
|
|
|
+ PG_PASSWORD = String.valueOf(dbconn.get("passwd"));
|
|
|
+ PG_SCHEMA = String.valueOf(dbconn.get("schema"));
|
|
|
+ } catch (Exception e) {
|
|
|
+ e.printStackTrace();
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ public static Connection getPostgresConnection() throws SQLException {
|
|
|
+ String url = String.format("jdbc:postgresql://%s:%s/%s",
|
|
|
+ PG_HOST, PG_PORT, PG_DB);
|
|
|
+ return DriverManager.getConnection(url, PG_USER, PG_PASSWORD);
|
|
|
+ }
|
|
|
+
|
|
|
+ public static String createPostgisTable(Connection conn, Layer layer, String layername) throws SQLException {
|
|
|
+ // 获取图层定义
|
|
|
+ FeatureDefn defn = layer.GetLayerDefn();
|
|
|
+ // 构建CREATE TABLE SQL
|
|
|
+ StringBuilder sql = new StringBuilder();
|
|
|
+ sql.append(String.format("CREATE TABLE IF NOT EXISTS %s.%s (", PG_SCHEMA, layername));
|
|
|
+ sql.append("gid SERIAL PRIMARY KEY, ");
|
|
|
+ // 添加属性字段
|
|
|
+ for (int i = 0; i < defn.GetFieldCount(); i++) {
|
|
|
+ FieldDefn fieldDefn = defn.GetFieldDefn(i);
|
|
|
+ String fieldName = fieldDefn.GetName();
|
|
|
+ String fieldType = getPgFieldType(fieldDefn.GetFieldType());
|
|
|
+ sql.append(String.format("%s %s, ", fieldName, fieldType));
|
|
|
+ }
|
|
|
+ // 添加几何字段
|
|
|
+ sql.append("geom geometry");
|
|
|
+ // 添加空间参考信息
|
|
|
+ SpatialReference srs = layer.GetSpatialRef();
|
|
|
+ String srid = srs.GetAuthorityCode(null);
|
|
|
+ if (srs != null) {
|
|
|
+ if (StringUtils.isNotEmpty(srid)) {
|
|
|
+ sql.append(String.format(", CONSTRAINT enforce_srid_geom CHECK (st_srid(geom) = %d)", Integer.valueOf(srid)));
|
|
|
+ }
|
|
|
+ }
|
|
|
+ sql.append(")");
|
|
|
+ // 执行创建表
|
|
|
+ try (Statement stmt = conn.createStatement()) {
|
|
|
+ stmt.execute(sql.toString());
|
|
|
+ System.out.println("表创建成功: " + layername);
|
|
|
+ }
|
|
|
+ return srid;
|
|
|
+ }
|
|
|
+
|
|
|
+ public static String getPgFieldType(int ogrFieldType) {
|
|
|
+ switch (ogrFieldType) {
|
|
|
+ case ogr.OFTInteger:
|
|
|
+ return "integer";
|
|
|
+ case ogr.OFTReal:
|
|
|
+ return "double precision";
|
|
|
+ case ogr.OFTString:
|
|
|
+ return "text";
|
|
|
+ case ogr.OFTDate:
|
|
|
+ return "date";
|
|
|
+ case ogr.OFTDateTime:
|
|
|
+ return "timestamp";
|
|
|
+ default:
|
|
|
+ return "text";
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ public static void importDataToPostgis(Connection conn, Layer layer, String tableName, String srid) throws SQLException {
|
|
|
+ // 准备插入语句
|
|
|
+ FeatureDefn defn = layer.GetLayerDefn();
|
|
|
+ StringBuilder insertSql = new StringBuilder();
|
|
|
+ insertSql.append(String.format("INSERT INTO %s.%s (", PG_SCHEMA, tableName));
|
|
|
+ // 添加属性字段名
|
|
|
+ for (int i = 0; i < defn.GetFieldCount(); i++) {
|
|
|
+ insertSql.append(defn.GetFieldDefn(i).GetName()).append(", ");
|
|
|
+ }
|
|
|
+ // 添加几何字段
|
|
|
+ insertSql.append("geom) VALUES (");
|
|
|
+ // 添加参数占位符
|
|
|
+ for (int i = 0; i < defn.GetFieldCount(); i++) {
|
|
|
+ insertSql.append("?, ");
|
|
|
+ }
|
|
|
+ insertSql.setLength(insertSql.length() - 2); // 移除最后的逗号和空格
|
|
|
+ insertSql.append(", st_setsrid(?::geometry, " + srid + "))");
|
|
|
+ // 准备批处理
|
|
|
+ conn.setAutoCommit(false);
|
|
|
+ try (PreparedStatement pstmt = conn.prepareStatement(insertSql.toString())) {
|
|
|
+ int batchCount = 0;
|
|
|
+ // 遍历所有要素
|
|
|
+ Feature feature;
|
|
|
+ Integer count = Math.toIntExact(layer.GetFeatureCount(1));
|
|
|
+ while ((feature = layer.GetNextFeature()) != null) {
|
|
|
+// feature = layer.GetFeature(j);
|
|
|
+ // 设置属性值
|
|
|
+ for (int i = 0; i < defn.GetFieldCount(); i++) {
|
|
|
+ setPreparedStatementValue(pstmt, i + 1, feature, i);
|
|
|
+ }
|
|
|
+ // 设置几何值
|
|
|
+ Geometry geom = feature.GetGeometryRef();
|
|
|
+ if (geom != null) {
|
|
|
+ pstmt.setString(defn.GetFieldCount() + 1, geom.ExportToWkt() + "");
|
|
|
+ } else {
|
|
|
+ pstmt.setNull(defn.GetFieldCount() + 1, Types.OTHER);
|
|
|
+ }
|
|
|
+ pstmt.addBatch();
|
|
|
+ batchCount++;
|
|
|
+ // 每1000条提交一次
|
|
|
+ if (batchCount % 1000 == 0) {
|
|
|
+ pstmt.executeBatch();
|
|
|
+ conn.commit();
|
|
|
+ System.out.println("已导入 " + batchCount + " 条记录");
|
|
|
+ }
|
|
|
+ feature.delete();
|
|
|
+ }
|
|
|
+ // 提交剩余记录
|
|
|
+ pstmt.executeBatch();
|
|
|
+ conn.commit();
|
|
|
+ System.out.println("导入完成,共导入 " + batchCount + " 条记录");
|
|
|
+ } finally {
|
|
|
+ conn.setAutoCommit(true);
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ public static void setPreparedStatementValue(PreparedStatement pstmt, int index, Feature feature, int fieldIndex) throws SQLException {
|
|
|
+ if (feature.IsFieldSet(fieldIndex)) {
|
|
|
+ FieldDefn fieldDefn = feature.GetFieldDefnRef(fieldIndex);
|
|
|
+ switch (fieldDefn.GetFieldType()) {
|
|
|
+ case ogr.OFTInteger:
|
|
|
+ pstmt.setInt(index, feature.GetFieldAsInteger(fieldIndex));
|
|
|
+ break;
|
|
|
+ case ogr.OFTReal:
|
|
|
+ pstmt.setDouble(index, feature.GetFieldAsDouble(fieldIndex));
|
|
|
+ break;
|
|
|
+ case ogr.OFTString:
|
|
|
+ pstmt.setString(index, feature.GetFieldAsString(fieldIndex));
|
|
|
+ break;
|
|
|
+ case ogr.OFTDate:
|
|
|
+ pstmt.setString(index, feature.GetFieldAsString(fieldIndex));
|
|
|
+// pstmt.setDate(index, new java.sql.Date(
|
|
|
+// feature.GetFieldAsDateTime(fieldIndex).toCalendar().getTimeInMillis()));
|
|
|
+ break;
|
|
|
+ case ogr.OFTDateTime:
|
|
|
+ pstmt.setString(index, feature.GetFieldAsString(fieldIndex));
|
|
|
+// pstmt.setTimestamp(index, new java.sql.Timestamp(
|
|
|
+// feature.GetFieldAsDateTime(fieldIndex).toCalendar().getTimeInMillis()));
|
|
|
+ break;
|
|
|
+ default:
|
|
|
+ pstmt.setString(index, feature.GetFieldAsString(fieldIndex));
|
|
|
+ }
|
|
|
+ } else {
|
|
|
+ pstmt.setNull(index, Types.NULL);
|
|
|
+ }
|
|
|
+ }
|
|
|
+}
|