From 4dfc75b7061bee39968df9e94ee0f56bc003f5fd Mon Sep 17 00:00:00 2001 From: wuww <252740454@qq.com> Date: 星期五, 11 四月 2025 20:13:36 +0800 Subject: [PATCH] 代码初始化~ --- src/main/java/com/se/nsl/utils/ZipUtils.java | 171 src/main/java/com/se/nsl/config/WebConfig.java | 66 src/main/java/com/se/nsl/utils/TiffClipper.java | 42 src/main/resources/application-prod.yml | 149 src/test/java/com/se/nsl/AppTest.java | 28 libs/sem-1.1.1.jar | 0 src/main/java/com/se/nsl/utils/EntityLibraryUtils.java | 313 + src/main/java/com/se/nsl/constant/RedisCache.java | 243 src/main/java/com/se/nsl/utils/ShpToolUtils.java | 237 src/main/java/com/se/nsl/controller/SimuController.java | 1023 +++ src/main/resources/layerQueryParams.json | 1 src/main/java/com/se/nsl/service/Impl/SwwFilesDealServiceImpl.java | 134 src/main/java/com/se/nsl/service/SwwFilesDealService.java | 5 libs/jblosc-1.0.1.dev.jar | 0 src/main/java/com/se/nsl/domain/dto/WaterDto.java | 70 src/main/java/com/se/nsl/domain/vo/BuildingDepthVo.java | 54 src/main/resources/landuse.json | 1 src/main/resources/mybatis.xml | 37 src/main/java/com/se/nsl/helper/ShpHelper.java | 359 + src/main/java/com/se/nsl/service/SemFilesSimuService.java | 17 src/main/resources/mapper/SimuMapper.xml | 7 src/main/java/com/se/nsl/controller/FilesUploadController.java | 298 src/main/java/com/se/nsl/utils/TiffToRGBUtil.java | 109 src/main/java/com/se/nsl/domain/vo/StationRainVo.java | 65 src/main/java/com/se/nsl/helper/H5ReadHelper.java | 393 + src/main/java/com/se/nsl/service/GedbService.java | 515 + src/main/java/com/se/nsl/controller/SemFilesSimuController.java | 92 src/main/resources/layerQueryPointParams.json | 1 src/main/resources/terrainmodule.json | 1 src/main/java/com/se/nsl/utils/SemUtils.java | 20 src/main/java/com/se/nsl/domain/dto/GeLayer.java | 112 src/main/java/com/se/nsl/helper/FileHelper.java | 279 src/main/java/com/se/nsl/service/WaterService.java | 235 src/main/java/com/se/nsl/config/Knife4jConfig.java | 68 src/main/java/com/se/nsl/domain/vo/CreateFilesSimuVo.java | 109 src/main/java/com/se/nsl/utils/ShpReadUtils.java | 82 src/main/java/com/se/nsl/domain/dto/LayerDto.java | 67 src/main/java/com/se/nsl/domain/dto/XYDto.java | 32 src/main/resources/pointmodule.json | 1 pom.xml | 402 + src/main/java/com/se/nsl/controller/TestController.java | 161 src/main/java/com/se/nsl/enums/SemErrorEnums.java | 47 libs/simu.jar | 0 src/main/java/com/se/nsl/helper/GdalHelper.java | 231 src/main/java/com/se/nsl/helper/CaffeineHelper.java | 79 libs/cdm-core-5.4.1.jar | 0 libs/jarhdf5-3.3.2.jar | 0 src/main/java/com/se/nsl/service/SimuService.java | 236 src/main/resources/linemodule.json | 1 src/main/resources/win32-x86-64/blosc.dll | 0 src/main/java/com/se/nsl/helper/RsaHelper.java | 153 src/main/java/com/se/nsl/controller/BaseController.java | 43 src/main/java/com/se/nsl/helper/HDF5ReaderHelper.java | 10 src/main/java/com/se/nsl/config/CorsConfig.java | 51 src/main/java/com/se/nsl/domain/dto/GeFile.java | 34 src/main/java/com/se/nsl/domain/po/PondingPo.java | 79 src/main/java/com/se/nsl/domain/dto/DurationDto.java | 32 src/main/java/com/se/nsl/domain/dto/PointDto.java | 48 src/main/java/com/se/nsl/SimuApplication.java | 20 src/main/java/com/se/nsl/domain/vo/PondingVo.java | 36 src/main/java/com/se/nsl/domain/dto/TerrainDto.java | 73 src/main/java/com/se/nsl/service/Impl/ProjectRelatedServiceImpl.java | 406 + libs/init.sql | 24 src/main/resources/application.yml | 12 hs_err_pid20576.log | 395 + src/main/java/com/se/nsl/domain/dto/GeField.java | 84 src/main/java/com/se/nsl/controller/DbController.java | 73 src/main/java/com/se/nsl/mapper/SimuMapper.java | 13 src/main/java/com/se/nsl/service/ProjectRelatedService.java | 35 src/main/java/com/se/nsl/helper/StringHelper.java | 166 src/main/java/com/se/nsl/domain/dto/ConfigDto.java | 459 + src/main/java/com/se/nsl/config/InitConfig.java | 184 src/main/java/com/se/nsl/service/Impl/BizH5FileParserServiceImpl.java | 101 libs/n5-zarr-1.3.5.jar | 0 src/main/java/com/se/nsl/domain/EntityDataBase.java | 38 src/main/java/com/se/nsl/domain/dto/GeDb.java | 62 src/main/java/com/se/nsl/domain/po/DataPo.java | 274 libs/jzarr-0.4.2.jar | 0 src/main/java/com/se/nsl/service/Hdf5Service.java | 81 src/main/java/com/se/nsl/config/PropertiesConfig.java | 332 + src/main/java/com/se/nsl/controller/SwwFilesDealController.java | 46 src/main/resources/logback-spring.xml | 106 src/main/java/com/se/nsl/utils/CsvToSQLiteUtils.java | 436 + src/main/java/com/se/nsl/domain/vo/SimuVo.java | 87 src/main/java/com/se/nsl/service/DbService.java | 153 src/main/java/com/se/nsl/controller/WaterController.java | 259 src/main/java/com/se/nsl/domain/vo/CreateSimuVo.java | 227 src/main/java/com/se/nsl/utils/FileUtil.java | 37 src/main/java/com/se/nsl/service/UwService.java | 207 src/main/java/com/se/nsl/helper/WebHelper.java | 316 + src/main/java/com/se/nsl/config/MybatisPlusConfig.java | 38 src/main/java/com/se/nsl/enums/RadioEnums.java | 33 src/main/java/com/se/nsl/service/Impl/SemFilesSimuServiceImpl.java | 338 + src/main/java/com/se/nsl/utils/H5.java | 2397 +++++++ src/main/resources/application-zyy.yml | 156 .gitignore | 42 src/main/java/com/se/nsl/domain/po/SimuPo.java | 179 src/main/java/com/se/nsl/service/IBizH5FileParserService.java | 46 src/main/java/com/se/nsl/domain/vo/R.java | 96 src/main/java/com/se/nsl/utils/ZarrUtils.java | 90 src/main/java/com/se/nsl/controller/BizH5FileParserController.java | 71 src/main/java/com/se/nsl/domain/EntityTypeInfo.java | 30 src/main/java/com/se/nsl/utils/ProjectionToGeographicUtil.java | 66 src/main/java/com/se/nsl/domain/vo/QueryVo.java | 107 src/main/java/com/se/nsl/helper/ComHelper.java | 250 src/main/resources/grid.json | 1 src/main/resources/river.json | 1 src/main/java/com/se/nsl/service/SimuFilesService.java | 229 src/main/java/com/se/nsl/domain/dto/ResultDto.java | 173 src/main/resources/layerQueryDetailParams.json | 1 src/main/java/com/se/nsl/domain/dto/GridDto.java | 34 src/main/java/com/se/nsl/constant/CacheConstants.java | 48 src/main/java/com/se/nsl/helper/HttpHelper.java | 269 src/main/java/com/se/nsl/service/ResultService.java | 615 ++ libs/javabuilder.jar | 0 src/main/java/com/se/nsl/utils/CustomWebClient.java | 500 + src/main/java/com/se/nsl/controller/ProjectRelatedController.java | 98 src/main/java/com/se/nsl/utils/TiffCoordinateExtractorUtil.java | 35 src/main/resources/config.json | 15 src/main/java/com/se/nsl/domain/LoginParams.java | 17 src/main/java/com/se/nsl/domain/dto/XYO.java | 43 src/main/resources/rainfallmodule.json | 1 src/main/java/com/se/nsl/domain/dto/ExtensionDto.java | 100 src/main/java/com/se/nsl/config/RestTemplateConfig.java | 71 src/main/java/com/se/nsl/domain/dto/BuildingDto.java | 34 libs/citygson-1.1.3.8.jar | 0 126 files changed, 18,000 insertions(+), 9 deletions(-) diff --git a/.gitignore b/.gitignore index 32858aa..9ecb131 100644 --- a/.gitignore +++ b/.gitignore @@ -1,12 +1,36 @@ -*.class +HELP.md +target/ +!.mvn/wrapper/maven-wrapper.jar +!**/src/main/**/target/ +!**/src/test/**/target/ -# Mobile Tools for Java (J2ME) -.mtj.tmp/ +### STS ### +.apt_generated +.classpath +.factorypath +.project +.settings +.springBeans +.sts4-cache -# Package Files # -*.jar -*.war -*.ear +### IntelliJ IDEA ### +.idea +*.iws +*.iml +*.ipr -# virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml -hs_err_pid* +### NetBeans ### +/nbproject/private/ +/nbbuild/ +/dist/ +/nbdist/ +/.nb-gradle/ +build/ +!**/src/main/**/build/ +!**/src/test/**/build/ + +### VS Code ### +.vscode/ +/data +/logs +/Test.txt diff --git a/hs_err_pid20576.log b/hs_err_pid20576.log new file mode 100644 index 0000000..6d2c642 --- /dev/null +++ b/hs_err_pid20576.log @@ -0,0 +1,395 @@ +# +# A fatal error has been detected by the Java Runtime Environment: +# +# EXCEPTION_ACCESS_VIOLATION (0xc0000005) at pc=0x000000002d247438, pid=20576, tid=19880 +# +# JRE version: Java(TM) SE Runtime Environment (8.0-b132) (build 1.8.0-b132) +# Java VM: Java HotSpot(TM) 64-Bit Server VM (25.0-b70 mixed mode windows-amd64 compressed oops) +# Problematic frame: +# C 0x000000002d247438 +# +# Failed to write core dump. Minidumps are not enabled by default on client versions of Windows +# +# If you would like to submit a bug report, please visit: +# http://bugreport.sun.com/bugreport/crash.jsp +# + +--------------- T H R E A D --------------- + +Current thread (0x000000002f29d800): JavaThread "background-preinit" [_thread_in_Java, id=19880, stack(0x0000000030510000,0x0000000030610000)] + +siginfo: ExceptionCode=0xc0000005, ExceptionInformation=0x0000000000000008 0x000000002d247438 + +Registers: +RAX=0x0000000718310760, RBX=0x00000000272d0ae8, RCX=0x000000000458ede0, RDX=0x0000000718310760 +RSP=0x000000003060da60, RBP=0x00000007c0371d38, RSI=0x00000005c2153438, RDI=0x00000007c000b290 +R8 =0x00000005c2153458, R9 =0x00000005c211fe20, R10=0x00000005c2153438, R11=0x0000000004b2bd20 +R12=0x0000000000000000, R13=0x000000003060da40, R14=0x0000000718310b30, R15=0x000000002f29d800 +RIP=0x000000002d247438, EFLAGS=0x0000000000010246 + +Top of Stack: (sp=0x000000003060da60) +0x000000003060da60: 000000003060dbf8 000000002f29d800 +0x000000003060da70: 000000003060daa0 000000000499504c +0x000000003060da80: 000000000499504c 0000000718310760 +0x000000003060da90: 00000005c2153438 000000000499504c +0x000000003060daa0: 0000aacffb7d7e9f 000000003060dbf0 +0x000000003060dab0: 0000000000000000 0000000027072a88 +0x000000003060dac0: 00000005c2153438 0000000718310cb8 +0x000000003060dad0: 0000000000000001 0000000718077f58 +0x000000003060dae0: 000000003060db10 0000000004927a1c +0x000000003060daf0: 0000000004927a1c 0000000004927a1c +0x000000003060db00: 0000000718310cb8 00000005c2153438 +0x000000003060db10: 0000000000000000 0000000000000000 +0x000000003060db20: 0000000000000000 0000000000000000 +0x000000003060db30: 0000000000000000 00000005c2112e00 +0x000000003060db40: 0000000718310cb8 00000005c2153438 +0x000000003060db50: 00000005c2112e78 00000005c211fe20 + +Instructions: (pc=0x000000002d247438) +0x000000002d247418: 78 74 11 c2 05 00 00 00 d8 d8 30 18 07 00 00 00 +0x000000002d247428: f8 d8 30 18 07 00 00 00 28 2c 11 c2 05 00 00 00 +0x000000002d247438: 30 d9 30 18 07 00 00 00 58 7f 07 18 07 00 00 00 +0x000000002d247448: 78 74 11 c2 05 00 00 00 f8 d8 30 18 07 00 00 00 + + +Register to memory mapping: + +RAX=0x0000000718310760 is an oop +com.fasterxml.jackson.datatype.jsr310.deser.InstantDeserializer$$Lambda$540/1138312957 + - klass: 'com/fasterxml/jackson/datatype/jsr310/deser/InstantDeserializer$$Lambda$540' +RBX=0x00000000272d0ae8 is an unknown value +RCX=0x000000000458ede0 is at begin+31 in a stub +MethodHandle::interpreter_entry::_linkToStatic [0x000000000458edc1, 0x000000000458ee05[ (68 bytes) +RDX=0x0000000718310760 is an oop +com.fasterxml.jackson.datatype.jsr310.deser.InstantDeserializer$$Lambda$540/1138312957 + - klass: 'com/fasterxml/jackson/datatype/jsr310/deser/InstantDeserializer$$Lambda$540' +RSP=0x000000003060da60 is pointing into the stack for thread: 0x000000002f29d800 +RBP=0x00000007c0371d38 is an unknown value +RSI=0x00000005c2153438 is an oop +java.lang.invoke.DirectMethodHandle + - klass: 'java/lang/invoke/DirectMethodHandle' +RDI=0x00000007c000b290 is an unknown value +R8 =0x00000005c2153458 is an oop +java.lang.invoke.MemberName + - klass: 'java/lang/invoke/MemberName' +R9 =0x00000005c211fe20 is an oop +java.lang.invoke.MemberName + - klass: 'java/lang/invoke/MemberName' +R10=0x00000005c2153438 is an oop +java.lang.invoke.DirectMethodHandle + - klass: 'java/lang/invoke/DirectMethodHandle' +R11=0x0000000004b2bd20 is at entry_point+0 in (nmethod*)0x0000000004b2bbd0 +R12=0x0000000000000000 is an unknown value +R13=0x000000003060da40 is pointing into the stack for thread: 0x000000002f29d800 +R14=0x0000000718310b30 is an oop + +[error occurred during error reporting (printing register info), id 0xc0000005] + +Stack: [0x0000000030510000,0x0000000030610000], sp=0x000000003060da60, free space=1014k +Native frames: (J=compiled Java code, j=interpreted, Vv=VM code, C=native code) +C 0x000000002d247438 + + +--------------- P R O C E S S --------------- + +Java Threads: ( => current thread ) +=>0x000000002f29d800 JavaThread "background-preinit" [_thread_in_Java, id=19880, stack(0x0000000030510000,0x0000000030610000)] + 0x000000002e2cb800 JavaThread "logback-1" daemon [_thread_blocked, id=22036, stack(0x0000000030410000,0x0000000030510000)] + 0x000000002b850800 JavaThread "Service Thread" daemon [_thread_blocked, id=24416, stack(0x000000002c7a0000,0x000000002c8a0000)] + 0x000000002b847800 JavaThread "C1 CompilerThread11" daemon [_thread_blocked, id=24176, stack(0x000000002c6a0000,0x000000002c7a0000)] + 0x000000002b847000 JavaThread "C1 CompilerThread10" daemon [_thread_blocked, id=26028, stack(0x000000002c5a0000,0x000000002c6a0000)] + 0x000000002b846000 JavaThread "C1 CompilerThread9" daemon [_thread_blocked, id=4132, stack(0x000000002c4a0000,0x000000002c5a0000)] + 0x000000002b843800 JavaThread "C1 CompilerThread8" daemon [_thread_blocked, id=23096, stack(0x000000002c3a0000,0x000000002c4a0000)] + 0x000000002b842800 JavaThread "C2 CompilerThread7" daemon [_thread_blocked, id=19736, stack(0x000000002c2a0000,0x000000002c3a0000)] + 0x000000002b83c000 JavaThread "C2 CompilerThread6" daemon [_thread_blocked, id=26600, stack(0x000000002c1a0000,0x000000002c2a0000)] + 0x000000002b83b000 JavaThread "C2 CompilerThread5" daemon [_thread_blocked, id=16668, stack(0x000000002c0a0000,0x000000002c1a0000)] + 0x000000002b83a800 JavaThread "C2 CompilerThread4" daemon [_thread_blocked, id=25804, stack(0x000000002bfa0000,0x000000002c0a0000)] + 0x000000002b837800 JavaThread "C2 CompilerThread3" daemon [_thread_blocked, id=9548, stack(0x000000002bea0000,0x000000002bfa0000)] + 0x000000002b837000 JavaThread "C2 CompilerThread2" daemon [_thread_blocked, id=10292, stack(0x000000002bda0000,0x000000002bea0000)] + 0x000000002b832000 JavaThread "C2 CompilerThread1" daemon [_thread_blocked, id=20152, stack(0x000000002bca0000,0x000000002bda0000)] + 0x000000002b825000 JavaThread "C2 CompilerThread0" daemon [_thread_blocked, id=20288, stack(0x000000002bba0000,0x000000002bca0000)] + 0x0000000029515000 JavaThread "JDWP Command Reader" daemon [_thread_in_native, id=13584, stack(0x000000002b1f0000,0x000000002b2f0000)] + 0x0000000029514800 JavaThread "JDWP Event Helper Thread" daemon [_thread_blocked, id=12856, stack(0x000000002b0f0000,0x000000002b1f0000)] + 0x000000002950c000 JavaThread "JDWP Transport Listener: dt_socket" daemon [_thread_blocked, id=5484, stack(0x000000002aff0000,0x000000002b0f0000)] + 0x00000000294fa800 JavaThread "Attach Listener" daemon [_thread_blocked, id=11980, stack(0x000000002aef0000,0x000000002aff0000)] + 0x00000000294f8000 JavaThread "Signal Dispatcher" daemon [_thread_blocked, id=27328, stack(0x000000002adf0000,0x000000002aef0000)] + 0x0000000027a03800 JavaThread "Finalizer" daemon [_thread_blocked, id=23752, stack(0x000000002acf0000,0x000000002adf0000)] + 0x00000000279fe000 JavaThread "Reference Handler" daemon [_thread_blocked, id=16532, stack(0x000000002abf0000,0x000000002acf0000)] + 0x00000000037a9800 JavaThread "main" [_thread_in_Java, id=14344, stack(0x00000000036a0000,0x00000000037a0000)] + +Other Threads: + 0x0000000029455000 VMThread [stack: 0x000000002aaf0000,0x000000002abf0000] [id=27236] + 0x000000002b8c3000 WatcherThread [stack: 0x000000002c8a0000,0x000000002c9a0000] [id=24300] + +VM state:not at safepoint (normal execution) + +VM Mutex/Monitor currently owned by a thread: None + +Heap: + PSYoungGen total 58368K, used 39715K [0x0000000716000000, 0x000000071b300000, 0x00000007c0000000) + eden space 54784K, 65% used [0x0000000716000000,0x000000071834cae8,0x0000000719580000) + from space 3584K, 99% used [0x0000000719580000,0x00000007198fc270,0x0000000719900000) + to space 4608K, 0% used [0x000000071ae80000,0x000000071ae80000,0x000000071b300000) + ParOldGen total 10752K, used 8083K [0x00000005c2000000, 0x00000005c2a80000, 0x0000000716000000) + object space 10752K, 75% used [0x00000005c2000000,0x00000005c27e4dc0,0x00000005c2a80000) + Metaspace used 25067K, capacity 26808K, committed 27008K, reserved 1073152K + class space used 3295K, capacity 3586K, committed 3712K, reserved 1048576K + +Card table byte_map: [0x00000000138e0000,0x00000000148e0000] byte_map_base: 0x0000000010ad0000 + +Marking Bits: (ParMarkBitMap*) 0x0000000071fd13b0 + Begin Bits: [0x0000000016080000, 0x000000001e000000) + End Bits: [0x000000001e000000, 0x0000000025f80000) + +Polling page: 0x0000000003110000 + +CodeCache: size=245760Kb used=9777Kb max_used=9785Kb free=235982Kb + bounds [0x0000000004520000, 0x0000000004ec0000, 0x0000000013520000] + total_blobs=2617 nmethods=2238 adapters=298 + compilation: enabled + +Compilation events (10 events): +Event: 3.209 Thread 0x000000002b847800 nmethod 2233 0x0000000004ead8d0 code [0x0000000004eada80, 0x0000000004eade28] +Event: 3.209 Thread 0x000000002b847800 2234 ! 3 com.sun.proxy.$Proxy11::annotationType (29 bytes) +Event: 3.210 Thread 0x000000002b847800 nmethod 2234 0x0000000004eae0d0 code [0x0000000004eae260, 0x0000000004eae6f8] +Event: 3.210 Thread 0x000000002b847800 2236 3 org.springframework.core.annotation.TypeMappedAnnotation::getType (8 bytes) +Event: 3.210 Thread 0x000000002b847800 nmethod 2236 0x0000000004eae990 code [0x0000000004eaeae0, 0x0000000004eaecd0] +Event: 3.210 Thread 0x000000002b847800 2237 1 org.springframework.core.type.classreading.SimpleAnnotationMetadata::getAnnotations (5 bytes) +Event: 3.210 Thread 0x000000002b847800 nmethod 2237 0x0000000004eaedd0 code [0x0000000004eaef20, 0x0000000004eaf030] +Event: 3.210 Thread 0x000000002b832000 nmethod 2235 0x0000000004eb1d90 code [0x0000000004eb1ee0, 0x0000000004eb1f88] +Event: 3.238 Thread 0x000000002b847800 2238 3 java.util.regex.Matcher::search (109 bytes) +Event: 3.238 Thread 0x000000002b847800 nmethod 2238 0x0000000004eb1490 code [0x0000000004eb1620, 0x0000000004eb1a28] + +GC Heap History (10 events): +Event: 2.447 GC heap before +{Heap before GC invocations=18 (full 0): + PSYoungGen total 29696K, used 29684K [0x0000000716000000, 0x0000000718b00000, 0x00000007c0000000) + eden space 27648K, 100% used [0x0000000716000000,0x0000000717b00000,0x0000000717b00000) + from space 2048K, 99% used [0x0000000717b00000,0x0000000717cfd3c8,0x0000000717d00000) + to space 2560K, 0% used [0x0000000718880000,0x0000000718880000,0x0000000718b00000) + ParOldGen total 520704K, used 4931K [0x00000005c2000000, 0x00000005e1c80000, 0x0000000716000000) + object space 520704K, 0% used [0x00000005c2000000,0x00000005c24d0cd0,0x00000005e1c80000) + Metaspace used 16025K, capacity 17418K, committed 17792K, reserved 1064960K + class space used 2080K, capacity 2311K, committed 2432K, reserved 1048576K +Event: 2.450 GC heap after +Heap after GC invocations=18 (full 0): + PSYoungGen total 41472K, used 2539K [0x0000000716000000, 0x0000000718b00000, 0x00000007c0000000) + eden space 38912K, 0% used [0x0000000716000000,0x0000000716000000,0x0000000718600000) + from space 2560K, 99% used [0x0000000718880000,0x0000000718afaec0,0x0000000718b00000) + to space 2560K, 0% used [0x0000000718600000,0x0000000718600000,0x0000000718880000) + ParOldGen total 520704K, used 5518K [0x00000005c2000000, 0x00000005e1c80000, 0x0000000716000000) + object space 520704K, 1% used [0x00000005c2000000,0x00000005c25638f0,0x00000005e1c80000) + Metaspace used 16025K, capacity 17418K, committed 17792K, reserved 1064960K + class space used 2080K, capacity 2311K, committed 2432K, reserved 1048576K +} +Event: 2.836 GC heap before +{Heap before GC invocations=19 (full 0): + PSYoungGen total 41472K, used 41451K [0x0000000716000000, 0x0000000718b00000, 0x00000007c0000000) + eden space 38912K, 100% used [0x0000000716000000,0x0000000718600000,0x0000000718600000) + from space 2560K, 99% used [0x0000000718880000,0x0000000718afaec0,0x0000000718b00000) + to space 2560K, 0% used [0x0000000718600000,0x0000000718600000,0x0000000718880000) + ParOldGen total 520704K, used 5518K [0x00000005c2000000, 0x00000005e1c80000, 0x0000000716000000) + object space 520704K, 1% used [0x00000005c2000000,0x00000005c25638f0,0x00000005e1c80000) + Metaspace used 19676K, capacity 21222K, committed 21296K, reserved 1067008K + class space used 2613K, capacity 2909K, committed 2944K, reserved 1048576K +Event: 2.839 GC heap after +Heap after GC invocations=19 (full 0): + PSYoungGen total 41472K, used 2544K [0x0000000716000000, 0x0000000719c80000, 0x00000007c0000000) + eden space 38912K, 0% used [0x0000000716000000,0x0000000716000000,0x0000000718600000) + from space 2560K, 99% used [0x0000000718600000,0x000000071887c2b0,0x0000000718880000) + to space 3584K, 0% used [0x0000000719900000,0x0000000719900000,0x0000000719c80000) + ParOldGen total 520704K, used 7208K [0x00000005c2000000, 0x00000005e1c80000, 0x0000000716000000) + object space 520704K, 1% used [0x00000005c2000000,0x00000005c270a250,0x00000005e1c80000) + Metaspace used 19676K, capacity 21222K, committed 21296K, reserved 1067008K + class space used 2613K, capacity 2909K, committed 2944K, reserved 1048576K +} +Event: 2.839 GC heap before +{Heap before GC invocations=20 (full 0): + PSYoungGen total 41472K, used 3381K [0x0000000716000000, 0x0000000719c80000, 0x00000007c0000000) + eden space 38912K, 2% used [0x0000000716000000,0x00000007160d12c0,0x0000000718600000) + from space 2560K, 99% used [0x0000000718600000,0x000000071887c2b0,0x0000000718880000) + to space 3584K, 0% used [0x0000000719900000,0x0000000719900000,0x0000000719c80000) + ParOldGen total 520704K, used 7208K [0x00000005c2000000, 0x00000005e1c80000, 0x0000000716000000) + object space 520704K, 1% used [0x00000005c2000000,0x00000005c270a250,0x00000005e1c80000) + Metaspace used 19679K, capacity 21222K, committed 21296K, reserved 1067008K + class space used 2613K, capacity 2909K, committed 2944K, reserved 1048576K +Event: 2.841 GC heap after +Heap after GC invocations=20 (full 0): + PSYoungGen total 58368K, used 96K [0x0000000716000000, 0x0000000719c80000, 0x00000007c0000000) + eden space 54784K, 0% used [0x0000000716000000,0x0000000716000000,0x0000000719580000) + from space 3584K, 2% used [0x0000000719900000,0x0000000719918000,0x0000000719c80000) + to space 3584K, 0% used [0x0000000719580000,0x0000000719580000,0x0000000719900000) + ParOldGen total 520704K, used 9517K [0x00000005c2000000, 0x00000005e1c80000, 0x0000000716000000) + object space 520704K, 1% used [0x00000005c2000000,0x00000005c294b6f8,0x00000005e1c80000) + Metaspace used 19679K, capacity 21222K, committed 21296K, reserved 1067008K + class space used 2613K, capacity 2909K, committed 2944K, reserved 1048576K +} +Event: 2.841 GC heap before +{Heap before GC invocations=21 (full 1): + PSYoungGen total 58368K, used 96K [0x0000000716000000, 0x0000000719c80000, 0x00000007c0000000) + eden space 54784K, 0% used [0x0000000716000000,0x0000000716000000,0x0000000719580000) + from space 3584K, 2% used [0x0000000719900000,0x0000000719918000,0x0000000719c80000) + to space 3584K, 0% used [0x0000000719580000,0x0000000719580000,0x0000000719900000) + ParOldGen total 520704K, used 9517K [0x00000005c2000000, 0x00000005e1c80000, 0x0000000716000000) + object space 520704K, 1% used [0x00000005c2000000,0x00000005c294b6f8,0x00000005e1c80000) + Metaspace used 19679K, capacity 21222K, committed 21296K, reserved 1067008K + class space used 2613K, capacity 2909K, committed 2944K, reserved 1048576K +Event: 2.878 GC heap after +Heap after GC invocations=21 (full 1): + PSYoungGen total 58368K, used 0K [0x0000000716000000, 0x0000000719c80000, 0x00000007c0000000) + eden space 54784K, 0% used [0x0000000716000000,0x0000000716000000,0x0000000719580000) + from space 3584K, 0% used [0x0000000719900000,0x0000000719900000,0x0000000719c80000) + to space 3584K, 0% used [0x0000000719580000,0x0000000719580000,0x0000000719900000) + ParOldGen total 10752K, used 7562K [0x00000005c2000000, 0x00000005c2a80000, 0x0000000716000000) + object space 10752K, 70% used [0x00000005c2000000,0x00000005c2762ba0,0x00000005c2a80000) + Metaspace used 19679K, capacity 21222K, committed 21296K, reserved 1067008K + class space used 2613K, capacity 2909K, committed 2944K, reserved 1048576K +} +Event: 3.103 GC heap before +{Heap before GC invocations=22 (full 1): + PSYoungGen total 58368K, used 54784K [0x0000000716000000, 0x0000000719c80000, 0x00000007c0000000) + eden space 54784K, 100% used [0x0000000716000000,0x0000000719580000,0x0000000719580000) + from space 3584K, 0% used [0x0000000719900000,0x0000000719900000,0x0000000719c80000) + to space 3584K, 0% used [0x0000000719580000,0x0000000719580000,0x0000000719900000) + ParOldGen total 10752K, used 7562K [0x00000005c2000000, 0x00000005c2a80000, 0x0000000716000000) + object space 10752K, 70% used [0x00000005c2000000,0x00000005c2762ba0,0x00000005c2a80000) + Metaspace used 23458K, capacity 25124K, committed 25472K, reserved 1071104K + class space used 3053K, capacity 3346K, committed 3456K, reserved 1048576K +Event: 3.107 GC heap after +Heap after GC invocations=22 (full 1): + PSYoungGen total 58368K, used 3568K [0x0000000716000000, 0x000000071b300000, 0x00000007c0000000) + eden space 54784K, 0% used [0x0000000716000000,0x0000000716000000,0x0000000719580000) + from space 3584K, 99% used [0x0000000719580000,0x00000007198fc270,0x0000000719900000) + to space 4608K, 0% used [0x000000071ae80000,0x000000071ae80000,0x000000071b300000) + ParOldGen total 10752K, used 8083K [0x00000005c2000000, 0x00000005c2a80000, 0x0000000716000000) + object space 10752K, 75% used [0x00000005c2000000,0x00000005c27e4dc0,0x00000005c2a80000) + Metaspace used 23458K, capacity 25124K, committed 25472K, reserved 1071104K + class space used 3053K, capacity 3346K, committed 3456K, reserved 1048576K +} + +Deoptimization events (10 events): +Event: 1.809 Thread 0x00000000037a9800 Uncommon trap: reason=class_check action=maybe_recompile pc=0x00000000047f2628 method=java.lang.String.equals(Ljava/lang/Object;)Z @ 8 +Event: 1.809 Thread 0x00000000037a9800 Uncommon trap: reason=class_check action=maybe_recompile pc=0x00000000047f2628 method=java.lang.String.equals(Ljava/lang/Object;)Z @ 8 +Event: 1.813 Thread 0x00000000037a9800 Uncommon trap: reason=class_check action=maybe_recompile pc=0x00000000047f2628 method=java.lang.String.equals(Ljava/lang/Object;)Z @ 8 +Event: 1.905 Thread 0x00000000037a9800 Uncommon trap: reason=bimorphic action=maybe_recompile pc=0x0000000004afd6a8 method=java.util.HashMap.putVal(ILjava/lang/Object;Ljava/lang/Object;ZZ)Ljava/lang/Object; @ 203 +Event: 2.016 Thread 0x00000000037a9800 Uncommon trap: reason=unreached action=reinterpret pc=0x0000000004b054f0 method=java.util.HashMap.hash(Ljava/lang/Object;)I @ 1 +Event: 2.038 Thread 0x00000000037a9800 Uncommon trap: reason=bimorphic action=maybe_recompile pc=0x0000000004afd6a8 method=java.util.HashMap.putVal(ILjava/lang/Object;Ljava/lang/Object;ZZ)Ljava/lang/Object; @ 203 +Event: 2.078 Thread 0x00000000037a9800 Uncommon trap: reason=range_check action=make_not_entrant pc=0x0000000004a0d950 method=sun.reflect.generics.parser.SignatureParser.current()C @ 34 +Event: 2.475 Thread 0x00000000037a9800 Uncommon trap: reason=unreached action=reinterpret pc=0x0000000004914d28 method=java.util.HashMap.hash(Ljava/lang/Object;)I @ 1 +Event: 2.814 Thread 0x000000002f29d800 Uncommon trap: reason=unreached action=reinterpret pc=0x0000000004bb3280 method=java.lang.ThreadLocal.get()Ljava/lang/Object; @ 11 +Event: 2.818 Thread 0x00000000037a9800 Uncommon trap: reason=unreached action=reinterpret pc=0x0000000004bfc300 method=java.net.URL.<init>(Ljava/net/URL;Ljava/lang/String;Ljava/net/URLStreamHandler;)V @ 347 + +Internal exceptions (10 events): +Event: 3.235 Thread 0x000000002f29d800 Exception <a 'java/security/PrivilegedActionException'> (0x0000000718267fe0) thrown at [D:\re\workspace\8-2-build-windows-amd64-cygwin\jdk8\2238\hotspot\src\share\vm\prims\jvm.cpp, line 1248] +Event: 3.235 Thread 0x000000002f29d800 Exception <a 'java/security/PrivilegedActionException'> (0x0000000718272438) thrown at [D:\re\workspace\8-2-build-windows-amd64-cygwin\jdk8\2238\hotspot\src\share\vm\prims\jvm.cpp, line 1248] +Event: 3.236 Thread 0x000000002f29d800 Exception <a 'java/security/PrivilegedActionException'> (0x0000000718281b50) thrown at [D:\re\workspace\8-2-build-windows-amd64-cygwin\jdk8\2238\hotspot\src\share\vm\prims\jvm.cpp, line 1248] +Event: 3.236 Thread 0x000000002f29d800 Exception <a 'java/security/PrivilegedActionException'> (0x000000071828cf88) thrown at [D:\re\workspace\8-2-build-windows-amd64-cygwin\jdk8\2238\hotspot\src\share\vm\prims\jvm.cpp, line 1248] +Event: 3.236 Thread 0x000000002f29d800 Exception <a 'java/security/PrivilegedActionException'> (0x0000000718297e78) thrown at [D:\re\workspace\8-2-build-windows-amd64-cygwin\jdk8\2238\hotspot\src\share\vm\prims\jvm.cpp, line 1248] +Event: 3.236 Thread 0x000000002f29d800 Exception <a 'java/security/PrivilegedActionException'> (0x00000007182a2028) thrown at [D:\re\workspace\8-2-build-windows-amd64-cygwin\jdk8\2238\hotspot\src\share\vm\prims\jvm.cpp, line 1248] +Event: 3.236 Thread 0x000000002f29d800 Exception <a 'java/security/PrivilegedActionException'> (0x00000007182ac990) thrown at [D:\re\workspace\8-2-build-windows-amd64-cygwin\jdk8\2238\hotspot\src\share\vm\prims\jvm.cpp, line 1248] +Event: 3.237 Thread 0x000000002f29d800 Exception <a 'java/security/PrivilegedActionException'> (0x00000007182b4d10) thrown at [D:\re\workspace\8-2-build-windows-amd64-cygwin\jdk8\2238\hotspot\src\share\vm\prims\jvm.cpp, line 1248] +Event: 3.238 Thread 0x000000002f29d800 Exception <a 'java/security/PrivilegedActionException'> (0x00000007182d4288) thrown at [D:\re\workspace\8-2-build-windows-amd64-cygwin\jdk8\2238\hotspot\src\share\vm\prims\jvm.cpp, line 1248] +Event: 3.239 Thread 0x000000002f29d800 Exception <a 'java/security/PrivilegedActionException'> (0x00000007182ed8e8) thrown at [D:\re\workspace\8-2-build-windows-amd64-cygwin\jdk8\2238\hotspot\src\share\vm\prims\jvm.cpp, line 1248] + +Events (10 events): +Event: 3.239 loading class com/fasterxml/jackson/core/exc/StreamReadException +Event: 3.239 loading class com/fasterxml/jackson/core/exc/StreamReadException done +Event: 3.239 Thread 0x000000002f29d800 DEOPT PACKING pc=0x00000000048a0e59 sp=0x000000003060b540 +Event: 3.239 Thread 0x000000002f29d800 DEOPT UNPACKING pc=0x0000000004567204 sp=0x000000003060b2b0 mode 1 +Event: 3.239 Thread 0x000000002f29d800 DEOPT PACKING pc=0x000000000489fbd4 sp=0x000000003060b5e0 +Event: 3.239 Thread 0x000000002f29d800 DEOPT UNPACKING pc=0x0000000004567204 sp=0x000000003060b310 mode 1 +Event: 3.239 Thread 0x000000002f29d800 DEOPT PACKING pc=0x0000000004e6315c sp=0x000000003060c1c0 +Event: 3.239 Thread 0x000000002f29d800 DEOPT UNPACKING pc=0x0000000004567204 sp=0x000000003060be08 mode 1 +Event: 3.242 loading class java/util/regex/Pattern$Pos +Event: 3.242 loading class java/util/regex/Pattern$Pos done + + +Dynamic libraries: +0x00007ff72cab0000 - 0x00007ff72cae4000 D:\soft\jdk1.8\bin\java.exe +0x00007ff806e90000 - 0x00007ff8070a7000 C:\WINDOWS\SYSTEM32\ntdll.dll +0x00007ff806020000 - 0x00007ff8060e4000 C:\WINDOWS\System32\KERNEL32.DLL +0x00007ff8040f0000 - 0x00007ff8044c1000 C:\WINDOWS\System32\KERNELBASE.dll +0x00007ff806cb0000 - 0x00007ff806d61000 C:\WINDOWS\System32\ADVAPI32.dll +0x00007ff805bf0000 - 0x00007ff805c97000 C:\WINDOWS\System32\msvcrt.dll +0x00007ff804b60000 - 0x00007ff804c07000 C:\WINDOWS\System32\sechost.dll +0x00007ff8044d0000 - 0x00007ff8044f8000 C:\WINDOWS\System32\bcrypt.dll +0x00007ff805ad0000 - 0x00007ff805be4000 C:\WINDOWS\System32\RPCRT4.dll +0x00007ff805ca0000 - 0x00007ff805e51000 C:\WINDOWS\System32\USER32.dll +0x00007ff804740000 - 0x00007ff804766000 C:\WINDOWS\System32\win32u.dll +0x00007ff805930000 - 0x00007ff805959000 C:\WINDOWS\System32\GDI32.dll +0x00007ff804620000 - 0x00007ff80473b000 C:\WINDOWS\System32\gdi32full.dll +0x00007ff804050000 - 0x00007ff8040ea000 C:\WINDOWS\System32\msvcp_win.dll +0x00007ff804500000 - 0x00007ff804611000 C:\WINDOWS\System32\ucrtbase.dll +0x00007fffe9600000 - 0x00007fffe9892000 C:\WINDOWS\WinSxS\amd64_microsoft.windows.common-controls_6595b64144ccf1df_6.0.22621.4830_none_270fe7d773858e80\COMCTL32.dll +0x00007ff806b40000 - 0x00007ff806b71000 C:\WINDOWS\System32\IMM32.DLL +0x0000000073ac0000 - 0x0000000073acd000 C:\Program Files (x86)\360\360Safe\safemon\SafeWrapper.dll +0x00007ffff0570000 - 0x00007ffff0675000 C:\Program Files (x86)\360\360Safe\safemon\libzdtp64.dll +0x00007ff804c80000 - 0x00007ff805508000 C:\WINDOWS\System32\SHELL32.dll +0x00007ff8047f0000 - 0x00007ff80492f000 C:\WINDOWS\System32\wintypes.dll +0x00007ff8067b0000 - 0x00007ff806b40000 C:\WINDOWS\System32\combase.dll +0x00007ff804c20000 - 0x00007ff804c7e000 C:\WINDOWS\System32\SHLWAPI.dll +0x00007ff803bd0000 - 0x00007ff803bda000 C:\WINDOWS\SYSTEM32\VERSION.dll +0x0000000072050000 - 0x0000000072122000 D:\soft\jdk1.8\jre\bin\msvcr100.dll +0x0000000071820000 - 0x000000007204a000 D:\soft\jdk1.8\jre\bin\server\jvm.dll +0x00007ff805510000 - 0x00007ff805518000 C:\WINDOWS\System32\PSAPI.DLL +0x00007fffda0d0000 - 0x00007fffda0d9000 C:\WINDOWS\SYSTEM32\WSOCK32.dll +0x00007ffffdca0000 - 0x00007ffffdcd4000 C:\WINDOWS\SYSTEM32\WINMM.dll +0x00007ff8058b0000 - 0x00007ff805921000 C:\WINDOWS\System32\WS2_32.dll +0x0000000071810000 - 0x000000007181f000 D:\soft\jdk1.8\jre\bin\verify.dll +0x00000000717e0000 - 0x0000000071808000 D:\soft\jdk1.8\jre\bin\java.dll +0x0000000071710000 - 0x0000000071745000 D:\soft\jdk1.8\jre\bin\jdwp.dll +0x0000000071700000 - 0x0000000071708000 D:\soft\jdk1.8\jre\bin\npt.dll +0x00007fff99e60000 - 0x00007fff9a06e000 C:\Users\25274\AppData\Local\Temp\idea_libasyncProfiler_dll_temp_folder1\libasyncProfiler.dll +0x00000000716d0000 - 0x00000000716f3000 D:\soft\jdk1.8\jre\bin\instrument.dll +0x00000000717c0000 - 0x00000000717d6000 D:\soft\jdk1.8\jre\bin\zip.dll +0x00007ff801e60000 - 0x00007ff80276d000 C:\WINDOWS\SYSTEM32\windows.storage.dll +0x00007ff806110000 - 0x00007ff80621a000 C:\WINDOWS\System32\SHCORE.dll +0x00007ff803f00000 - 0x00007ff803f2b000 C:\WINDOWS\SYSTEM32\profapi.dll +0x00000000716c0000 - 0x00000000716c9000 D:\soft\jdk1.8\jre\bin\dt_socket.dll +0x00007ff803410000 - 0x00007ff80347a000 C:\WINDOWS\system32\mswsock.dll +0x00000000717a0000 - 0x00000000717ba000 D:\soft\jdk1.8\jre\bin\net.dll +0x0000000071750000 - 0x0000000071761000 D:\soft\jdk1.8\jre\bin\nio.dll +0x00000000716b0000 - 0x00000000716bd000 D:\soft\jdk1.8\jre\bin\management.dll +0x00007fffe7b40000 - 0x00007fffe7b57000 C:\WINDOWS\system32\napinsp.dll +0x00007fffe59d0000 - 0x00007fffe59eb000 C:\WINDOWS\system32\pnrpnsp.dll +0x00007ff802a10000 - 0x00007ff802b12000 C:\WINDOWS\SYSTEM32\DNSAPI.dll +0x00007ff803be0000 - 0x00007ff803c0d000 C:\WINDOWS\SYSTEM32\IPHLPAPI.DLL +0x00007ff805520000 - 0x00007ff805529000 C:\WINDOWS\System32\NSI.dll +0x00007fffe59b0000 - 0x00007fffe59c1000 C:\WINDOWS\System32\winrnr.dll +0x00007fffe5990000 - 0x00007fffe59a5000 C:\WINDOWS\system32\wshbth.dll +0x00007fffe5960000 - 0x00007fffe5987000 C:\WINDOWS\system32\nlansp_c.dll +0x00007ffffb8f0000 - 0x00007ffffb8fa000 C:\Windows\System32\rasadhlp.dll +0x00007ffffc9d0000 - 0x00007ffffca53000 C:\WINDOWS\System32\fwpuclnt.dll +0x00007ff801a30000 - 0x00007ff801c62000 C:\WINDOWS\SYSTEM32\dbghelp.dll +0x00007ff8059f0000 - 0x00007ff805ac7000 C:\WINDOWS\System32\OLEAUT32.dll +0x00007ff803fd0000 - 0x00007ff80404b000 C:\WINDOWS\System32\bcryptPrimitives.dll + +VM Arguments: +jvm_args: -agentlib:jdwp=transport=dt_socket,address=127.0.0.1:56096,suspend=y,server=n -ea -agentpath:C:\Users\25274\AppData\Local\Temp\idea_libasyncProfiler_dll_temp_folder1\libasyncProfiler.dll=version,jfr,event=wall,interval=10ms,cstack=no,file=C:\Users\25274\IdeaSnapshots\AppTest_t1_2025_04_11_200333.jfr,dbghelppath=C:\Users\25274\AppData\Local\Temp\idea_dbghelp_dll_temp_folder1\dbghelp.dll,log=C:\Users\25274\AppData\Local\Temp\AppTest_t1_2025_04_11_200333.jfr.log.txt,logLevel=DEBUG -Didea.test.cyclic.buffer.size=1048576 -javaagent:C:\Users\25274\AppData\Local\JetBrains\IntelliJIdea2024.3\captureAgent\debugger-agent.jar -Dkotlinx.coroutines.debug.enable.creation.stack.trace=false -Ddebugger.agent.enable.coroutines=true -Dkotlinx.coroutines.debug.enable.flows.stack.trace=true -Dkotlinx.coroutines.debug.enable.mutable.state.flows.stack.trace=true -Dfile.encoding=UTF-8 +java_command: com.intellij.rt.junit.JUnitStarter -ideVersion5 -junit5 com.se.nsl.AppTest,t1 +java_class_path (initial): C:\Users\25274\.m2\repository\org\junit\platform\junit-platform-launcher\1.8.2\junit-platform-launcher-1.8.2.jar;C:\Program Files\JetBrains\IntelliJ IDEA 2024.3.4.1\lib\idea_rt.jar;C:\Program Files\JetBrains\IntelliJ IDEA 2024.3.4.1\plugins\junit\lib\junit5-rt.jar;C:\Program Files\JetBrains\IntelliJ IDEA 2024.3.4.1\plugins\junit\lib\junit-rt.jar;D:\soft\openjdk-8u402\jre\lib\charsets.jar;D:\soft\openjdk-8u402\jre\lib\ext\access-bridge-64.jar;D:\soft\openjdk-8u402\jre\lib\ext\cldrdata.jar;D:\soft\openjdk-8u402\jre\lib\ext\dnsns.jar;D:\soft\openjdk-8u402\jre\lib\ext\jaccess.jar;D:\soft\openjdk-8u402\jre\lib\ext\jfxrt.jar;D:\soft\openjdk-8u402\jre\lib\ext\localedata.jar;D:\soft\openjdk-8u402\jre\lib\ext\nashorn.jar;D:\soft\openjdk-8u402\jre\lib\ext\sunec.jar;D:\soft\openjdk-8u402\jre\lib\ext\sunjce_provider.jar;D:\soft\openjdk-8u402\jre\lib\ext\sunmscapi.jar;D:\soft\openjdk-8u402\jre\lib\ext\sunpkcs11.jar;D:\soft\openjdk-8u402\jre\lib\ext\zipfs.jar;D:\soft\openjdk-8u402\jre\lib\jce.jar;D:\soft\openjdk-8u402\jre\lib\jfr.jar;D:\soft\openjdk-8u402\jre\lib\jfxswt.jar;D:\soft\openjdk-8u402\jre\lib\jsse.jar;D:\soft\openjdk-8u402\jre\lib\management-agent.jar;D:\soft\openjdk-8u402\jre\lib\resources.jar;D:\soft\openjdk-8u402\jre\lib\rt.jar;D:\terrait\NslServer\target\test-classes;D:\terrait\NslServer\target\classes;C:\Users\25274\.m2\repository\org\springframework\boot\spring-boot-starter-web\2.6.4\spring-boot-starter-web-2.6.4.jar;C:\Users\25274\.m2\repository\org\springframework\boot\spring-boot-starter\2.6.4\spring-boot-starter-2.6.4.jar;C:\Users\25274\.m2\repository\org\springframework\boot\spring-boot\2.6.4\spring-boot-2.6.4.jar;C:\Users\25274\.m2\repository\org\springframework\boot\spring-boot-starter-logging\2.6.4\spring-boot-starter-logging-2.6.4.jar;C:\Users\25274\.m2\repository\ch\qos\logback\logback-classic\1.2.10\logback-classic-1.2.10.jar;C:\Users\25274\.m2\repository\ch\qos\logback\logback-core\1.2.10\logback-core-1.2.10.jar;C:\Users\2 +Launcher Type: SUN_STANDARD + +Environment Variables: +JAVA_HOME=D:\soft\jdk1.8 +CLASSPATH=".;D:\soft\jdk1.8\lib\dt.jar;D:\soft\jdk1.8\lib\tools.jar;"; +PATH=C:\Program Files (x86)\VMware\VMware Workstation\bin\;C:\Windows\system32;C:\Windows;C:\Windows\System32\Wbem;C:\Windows\System32\WindowsPowerShell\v1.0\;C:\Windows\System32\OpenSSH\;C:\Program Files (x86)\NVIDIA Corporation\PhysX\Common;C:\Program Files\NVIDIA Corporation\NVIDIA NvDLISR;C:\WINDOWS\system32;C:\WINDOWS;C:\WINDOWS\System32\Wbem;C:\WINDOWS\System32\WindowsPowerShell\v1.0\;C:\WINDOWS\System32\OpenSSH\;D:\soft\maven\bin;C:\Program Files\Git\cmd;D:\soft\jdk1.8\bin;D:\soft\jdk1.8\jre\bin;C:\Program Files\TortoiseGit\bin;C:\Program Files\Docker\Docker\resources\bin;D:\soft\release-1928-x64-dev\release-1928-x64\bin;C:\Program Files\Microsoft SQL Server\150\Tools\Binn\;C:\Program Files\Microsoft SQL Server\Client SDK\ODBC\170\Tools\Binn\;C:\Program Files\dotnet\;VM_HOME;VM_SYMLINK;C:\Program Files\MySQL\MySQL Shell 8.0\bin\;C:\Users\25274\AppData\Local\Microsoft\WindowsApps;C:\Programs\Microsoft VS Code\bin;C:\Users\25274\.dotnet\tools;C:\Users\25274\AppData\Roaming\nvm;C:\Program Files\nodejs +USERNAME=25274 +OS=Windows_NT +PROCESSOR_IDENTIFIER=AMD64 Family 23 Model 96 Stepping 1, AuthenticAMD + + + +--------------- S Y S T E M --------------- + +OS: Windows 8.1 , 64 bit Build 9600 + +CPU:total 16 (16 cores per cpu, 1 threads per core) family 23 model 96 stepping 1, cmov, cx8, fxsr, mmx, sse, sse2, sse3, ssse3, sse4.1, sse4.2, popcnt, avx, avx2, aes, clmul, mmxext, 3dnowpref, lzcnt, sse4a, tsc, tscinvbit + +Memory: 4k page, physical 33420612k(16111880k free), swap 33453380k(14236236k free) + +vm_info: Java HotSpot(TM) 64-Bit Server VM (25.0-b70) for windows-amd64 JRE (1.8.0-b132), built on Mar 4 2014 03:46:18 by "java_re" with MS VC++ 10.0 (VS2010) + +time: Fri Apr 11 20:03:37 2025 +elapsed time: 3 seconds + diff --git a/libs/cdm-core-5.4.1.jar b/libs/cdm-core-5.4.1.jar new file mode 100644 index 0000000..c441974 --- /dev/null +++ b/libs/cdm-core-5.4.1.jar Binary files differ diff --git a/libs/citygson-1.1.3.8.jar b/libs/citygson-1.1.3.8.jar new file mode 100644 index 0000000..ac6a44b --- /dev/null +++ b/libs/citygson-1.1.3.8.jar Binary files differ diff --git a/libs/init.sql b/libs/init.sql new file mode 100644 index 0000000..92685e9 --- /dev/null +++ b/libs/init.sql @@ -0,0 +1,24 @@ +create extension if not exists postgis cascade; + +create extension if not exists "uuid-ossp"; + +create schema if not exists bs; + +create table bs.simu ( + id serial primary key, + pid integer, + num integer, + name varchar(200), + service_name varchar(200), + data varchar(8000), + status smallint default 0, + result varchar(2000), + create_time timestamp(6) without time zone default now(), + update_time timestamp(6) without time zone, + sem_url varchar(2000), + point_url varchar(2000), + link_url varchar(2000), + bak varchar(2000) +); + +select * from bs.simu; diff --git a/libs/jarhdf5-3.3.2.jar b/libs/jarhdf5-3.3.2.jar new file mode 100644 index 0000000..70f758b --- /dev/null +++ b/libs/jarhdf5-3.3.2.jar Binary files differ diff --git a/libs/javabuilder.jar b/libs/javabuilder.jar new file mode 100644 index 0000000..ca416aa --- /dev/null +++ b/libs/javabuilder.jar Binary files differ diff --git a/libs/jblosc-1.0.1.dev.jar b/libs/jblosc-1.0.1.dev.jar new file mode 100644 index 0000000..3f3b9fc --- /dev/null +++ b/libs/jblosc-1.0.1.dev.jar Binary files differ diff --git a/libs/jzarr-0.4.2.jar b/libs/jzarr-0.4.2.jar new file mode 100644 index 0000000..422de95 --- /dev/null +++ b/libs/jzarr-0.4.2.jar Binary files differ diff --git a/libs/n5-zarr-1.3.5.jar b/libs/n5-zarr-1.3.5.jar new file mode 100644 index 0000000..fc5945a --- /dev/null +++ b/libs/n5-zarr-1.3.5.jar Binary files differ diff --git a/libs/sem-1.1.1.jar b/libs/sem-1.1.1.jar new file mode 100644 index 0000000..45d18cd --- /dev/null +++ b/libs/sem-1.1.1.jar Binary files differ diff --git a/libs/simu.jar b/libs/simu.jar new file mode 100644 index 0000000..adc6616 --- /dev/null +++ b/libs/simu.jar Binary files differ diff --git a/pom.xml b/pom.xml new file mode 100644 index 0000000..5ca9530 --- /dev/null +++ b/pom.xml @@ -0,0 +1,402 @@ +<?xml version="1.0" encoding="UTF-8"?> +<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" + xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd"> + <modelVersion>4.0.0</modelVersion> + <parent> + <groupId>org.springframework.boot</groupId> + <artifactId>spring-boot-starter-parent</artifactId> + <!--<version>2.7.18</version>--> + <version>2.6.4</version> + <relativePath/> + </parent> + <packaging>jar</packaging> + + <groupId>com.se</groupId> + <artifactId>simu</artifactId> + <version>1.0.0</version> + <name>SimuServer</name> + <description>鍐呮稘浠跨湡鏈嶅姟</description> + + <properties> + <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding> + <project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding> + <java.version>1.8</java.version> + </properties> + + <dependencies> + <!--web--> + <dependency> + <groupId>org.springframework.boot</groupId> + <artifactId>spring-boot-starter-web</artifactId> + </dependency> + <!--lombok--> + <dependency> + <groupId>org.projectlombok</groupId> + <artifactId>lombok</artifactId> + <version>1.18.34</version> + <optional>true</optional> + </dependency> + <!--aop--> + <dependency> + <groupId>org.springframework.boot</groupId> + <artifactId>spring-boot-starter-aop</artifactId> + </dependency> + <!--mybatis-plus--> + <dependency> + <groupId>com.baomidou</groupId> + <artifactId>mybatis-plus-boot-starter</artifactId> + <version>3.5.7</version> + </dependency> + <!--redis--> + <dependency> + <groupId>org.springframework.boot</groupId> + <artifactId>spring-boot-starter-data-redis</artifactId> + </dependency> + <!--postgresql--> + <dependency> + <groupId>org.postgresql</groupId> + <artifactId>postgresql</artifactId> + <scope>runtime</scope> + </dependency> + <dependency> + <groupId>org.apache.commons</groupId> + <artifactId>commons-pool2</artifactId> + </dependency> + <!--druid--> + <dependency> + <groupId>com.alibaba</groupId> + <artifactId>druid</artifactId> + <version>1.1.22</version> + </dependency> + <dependency> + <groupId>com.alibaba</groupId> + <artifactId>druid-spring-boot-starter</artifactId> + <version>1.1.22</version> + </dependency> + <!--text--> + <dependency> + <groupId>org.apache.commons</groupId> + <artifactId>commons-text</artifactId> + <version>1.12.0</version> + </dependency> + <!--hutool--> + <dependency> + <groupId>cn.hutool</groupId> + <artifactId>hutool-all</artifactId> + <version>5.8.29</version> + </dependency> + <!--httpclient--> + <dependency> + <groupId>org.apache.httpcomponents</groupId> + <artifactId>httpclient</artifactId> + </dependency> + <!--knife4j--> + <dependency> + <groupId>com.github.xiaoymin</groupId> + <artifactId>knife4j-spring-boot-starter</artifactId> + <version>3.0.3</version> + </dependency> + <!--fast-md5--> + <dependency> + <groupId>com.joyent.util</groupId> + <artifactId>fast-md5</artifactId> + <version>2.7.1</version> + </dependency> + <!--fastjson--> + <dependency> + <groupId>com.alibaba</groupId> + <artifactId>fastjson</artifactId> + <version>1.2.83</version> + </dependency> + <!--dependency> + <groupId>com.alibaba.fastjson2</groupId> + <artifactId>fastjson2</artifactId> + <version>2.0.52</version> + </dependency--> + <!--gdal--> + <dependency> + <groupId>org.gdal</groupId> + <artifactId>gdal</artifactId> + <!--<version>3.9.0</version>--> + <version>3.5.0</version> + <!--version>3.2.0</version--> + </dependency> + <!--rainfall--> + <dependency> + <groupId>com.mathworks.toolbox</groupId> + <artifactId>javabuilder</artifactId> + <version>1.0</version> + <scope>system</scope> + <systemPath>${project.basedir}/libs/javabuilder.jar</systemPath> + </dependency> + <dependency> + <groupId>com.se.simu</groupId> + <artifactId>Rainfall</artifactId> + <version>1.0</version> + <scope>system</scope> + <systemPath>${project.basedir}/libs/simu.jar</systemPath> + </dependency> + <!--Caffeine缂撳瓨--> + <dependency> + <groupId>com.github.ben-manes.caffeine</groupId> + <artifactId>caffeine</artifactId> + <version>2.9.3</version> + </dependency> + <dependency> + <groupId>org.springframework.boot</groupId> + <artifactId>spring-boot-starter-test</artifactId> + <scope>test</scope> + <exclusions> + <exclusion> + <groupId>org.junit.vintage</groupId> + <artifactId>junit-vintage-engine</artifactId> + </exclusion> + </exclusions> + </dependency> + <dependency> + <groupId>hdf.hdf5lib</groupId> + <artifactId>h5</artifactId> + <version>3.3.2</version> + <scope>system</scope> + <systemPath>${project.basedir}/libs/jarhdf5-3.3.2.jar</systemPath> + </dependency><!----> + <dependency> + <groupId>org.yaml</groupId> + <artifactId>snakeyaml</artifactId> + <version>1.33</version> + <scope>compile</scope> + </dependency> + + <!--webclient璇锋眰--> + <!-- https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter-webflux --> + <dependency> + <groupId>org.springframework.boot</groupId> + <artifactId>spring-boot-starter-webflux</artifactId> + <version>3.3.1</version> + </dependency> + + <!--1-sqlite鐩稿叧--> + <!-- https://mvnrepository.com/artifact/org.xerial/sqlite-jdbc --> + <dependency> + <groupId>org.xerial</groupId> + <artifactId>sqlite-jdbc</artifactId> + <version>3.42.0.0</version> + </dependency> + <!--2-sqlite 鏂硅█--> + <dependency> + <groupId>com.zsoltfabok</groupId> + <artifactId>sqlite-dialect</artifactId> + <version>1.0</version> + </dependency> + + <!-- 瀵规暟鎹簱鎿嶄綔鐨勭粺璁� --> + <dependency> + <groupId>p6spy</groupId> + <artifactId>p6spy</artifactId> + <version>3.9.1</version> + </dependency> + + <!-- Apache Commons CSV (鍙��) --> + <dependency> + <groupId>org.apache.commons</groupId> + <artifactId>commons-csv</artifactId> + <version>1.8</version> <!-- 鏍规嵁闇�瑕佽皟鏁寸増鏈� --> + </dependency> + <!-- io甯哥敤宸ュ叿绫� --> + <dependency> + <groupId>commons-io</groupId> + <artifactId>commons-io</artifactId> + <version>2.16.1</version> + </dependency> + <!-- 鏂囦欢涓婁紶宸ュ叿绫� --> + <dependency> + <groupId>commons-fileupload</groupId> + <artifactId>commons-fileupload</artifactId> + <version>1.5</version> + </dependency> + + <!-- https://mvnrepository.com/artifact/dev.zarr/jzarr --> + <dependency> + <groupId>dev.zarr</groupId> + <artifactId>jzarr</artifactId> + <version>0.4.2</version> + <scope>system</scope> + <systemPath>${project.basedir}/libs/jzarr-0.4.2.jar</systemPath> + </dependency> + <!-- https://mvnrepository.com/artifact/edu.ucar/cdm-core --> + <dependency> + <groupId>edu.ucar</groupId> + <artifactId>cdm-core</artifactId> + <version>5.4.1</version> + <scope>system</scope> + <systemPath>${project.basedir}/libs/cdm-core-5.4.1.jar</systemPath> + </dependency> + + <!-- https://mvnrepository.com/artifact/org.blosc/jblosc --> + <dependency> + <groupId>org.blosc</groupId> + <artifactId>jblosc</artifactId> + <version>1.0.1.dev</version> + <scope>system</scope> + <systemPath>${project.basedir}/libs/jblosc-1.0.1.dev.jar</systemPath> + </dependency> + <dependency> + <groupId>net.java.dev.jna</groupId> + <artifactId>jna</artifactId> + <version>4.2.2</version> + </dependency> + + + <!-- zarr --> + <!-- https://mvnrepository.com/artifact/org.janelia.saalfeldlab/n5-zarr --> + <dependency> + <groupId>org.janelia.saalfeldlab</groupId> + <artifactId>n5-zarr</artifactId> + <version>1.3.5</version> + <scope>system</scope> + <systemPath>${project.basedir}/libs/n5-zarr-1.3.5.jar</systemPath> + </dependency> + + <!-- https://mvnrepository.com/artifact/dev.zarr/jzarr --> + <dependency> + <groupId>dev.zarr</groupId> + <artifactId>jzarr</artifactId> + <version>0.4.2</version> + <scope>system</scope> + <systemPath>${project.basedir}/libs/jzarr-0.4.2.jar</systemPath> + </dependency> + <!--gavaghan娴嬭窛--> + <dependency> + <groupId>org.gavaghan</groupId> + <artifactId>geodesy</artifactId> + <version>1.1.3</version> + </dependency> + <!--geotools娴嬭窛--> + <dependency> + <groupId>org.geotools</groupId> + <artifactId>gt-referencing</artifactId> + <version>22.0</version> + </dependency> + <!--璇诲彇shp鏂囦欢--> + <dependency> + <groupId>org.geotools</groupId> + <artifactId>gt-shapefile</artifactId> + <version>22.0</version> + </dependency> + <dependency> + <groupId>com.vividsolutions</groupId> + <artifactId>jts</artifactId> + <version>1.13</version> + </dependency> + <dependency> + <groupId>com.fasterxml.jackson.core</groupId> + <artifactId>jackson-databind</artifactId> + <version>2.13.3</version> + </dependency> + <dependency> + <groupId>com.vividsolutions</groupId> + <artifactId>jts</artifactId> + <version>1.13</version> + </dependency> + <dependency> + <groupId>org.osgeo</groupId> + <artifactId>proj4j</artifactId> + <version>0.1.0</version> + </dependency> + <dependency> + <groupId>org.apache.commons</groupId> + <artifactId>commons-imaging</artifactId> + <version>1.0-alpha2</version> + </dependency> + <dependency> + <groupId>org.citygml4j</groupId> + <artifactId>citygson</artifactId> + <version>1.1.3.8</version> + <scope>system</scope> + <systemPath>${project.basedir}/libs/citygson-1.1.3.8.jar</systemPath> + </dependency> + <dependency> + <groupId>cn.smartearth</groupId> + <artifactId>sem</artifactId> + <version>1.1.1</version> + <scope>system</scope> + <systemPath>${project.basedir}/libs/sem-1.1.1.jar</systemPath> + </dependency> + <dependency> + <groupId>com.google.code.gson</groupId> + <artifactId>gson</artifactId> + <version>2.8.2</version> + </dependency> + <dependency> + <groupId>junit</groupId> + <artifactId>junit</artifactId> + <scope>test</scope> + </dependency> + </dependencies> + <repositories> + <repository> + <id>osgeo</id> + <name>OSGeo Release Repository</name> + <url>https://repo.osgeo.org/repository/release/</url> + <snapshots><enabled>false</enabled></snapshots> + <releases><enabled>true</enabled></releases> + </repository> + <repository> + <id>osgeo-snapshot</id> + <name>OSGeo Snapshot Repository</name> + <url>https://repo.osgeo.org/repository/snapshot/</url> + <snapshots><enabled>true</enabled></snapshots> + <releases><enabled>false</enabled></releases> + </repository> + <repository> + <id>geotools</id> + <name>geotools</name> + <url>http://maven.icm.edu.pl/artifactory/repo/</url> + <releases> + <enabled>true</enabled> + </releases> + </repository> + </repositories> + <build> + <resources> + <resource> + <directory>src/main/resources</directory> + <includes> + <include>win32-x86-64/blosc.dll</include> + <include>**/*.yml</include> + <include>**/*.xml</include> + <include>**/*.json</include> + </includes> + <filtering>false</filtering> + </resource> + </resources> + <finalName>SimuServer</finalName> + <plugins> + <plugin> + <groupId>org.springframework.boot</groupId> + <artifactId>spring-boot-maven-plugin</artifactId> + <version>2.4.1</version> + <configuration> + <mainClass>com.se.nsl.SimuApplication</mainClass> + <!-- https://blog.csdn.net/wangjunjun2008/article/details/136951249 --> + <includeSystemScope>true</includeSystemScope> + <excludes> + <exclude> + <groupId>org.projectlombok</groupId> + <artifactId>lombok</artifactId> + </exclude> + </excludes> + </configuration> + </plugin> + <plugin> + <groupId>org.springframework.boot</groupId> + <artifactId>spring-boot-maven-plugin</artifactId> + <configuration> + <jvmArguments> + -Xms2048m -Xmx4096m -XX:+PrintGCDetails + </jvmArguments> + </configuration> + </plugin> + </plugins> + </build> +</project> diff --git a/src/main/java/com/se/nsl/SimuApplication.java b/src/main/java/com/se/nsl/SimuApplication.java new file mode 100644 index 0000000..d535d9d --- /dev/null +++ b/src/main/java/com/se/nsl/SimuApplication.java @@ -0,0 +1,20 @@ +package com.se.nsl; + +import org.springframework.boot.SpringApplication; +import org.springframework.boot.autoconfigure.SpringBootApplication; +import org.springframework.boot.builder.SpringApplicationBuilder; +import org.springframework.boot.web.servlet.support.SpringBootServletInitializer; + +@SuppressWarnings("ALL") +@SpringBootApplication(scanBasePackages = {"com.se.nsl"}) +//@SpringBootApplication(exclude = { DataSourceAutoConfiguration.class, DataSourceTransactionManagerAutoConfiguration.class, DruidDataSourceAutoConfigure.class, HibernateJpaAutoConfiguration.class}) +public class SimuApplication extends SpringBootServletInitializer { + public static void main(String[] args) { + SpringApplication.run(SimuApplication.class, args); + } + + @Override + protected SpringApplicationBuilder configure(SpringApplicationBuilder springApplicationBuilder) { + return springApplicationBuilder.sources(SimuApplication.class); + } +} diff --git a/src/main/java/com/se/nsl/config/CorsConfig.java b/src/main/java/com/se/nsl/config/CorsConfig.java new file mode 100644 index 0000000..57ab9c5 --- /dev/null +++ b/src/main/java/com/se/nsl/config/CorsConfig.java @@ -0,0 +1,51 @@ +package com.se.nsl.config; + +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.http.HttpHeaders; +import org.springframework.http.HttpMethod; +import org.springframework.http.HttpStatus; +import org.springframework.http.server.reactive.ServerHttpRequest; +import org.springframework.http.server.reactive.ServerHttpResponse; +import org.springframework.web.cors.reactive.CorsUtils; +import org.springframework.web.server.ServerWebExchange; +import org.springframework.web.server.WebFilter; +import org.springframework.web.server.WebFilterChain; +import reactor.core.publisher.Mono; + +//@Configuration +@SuppressWarnings("ALL") +public class CorsConfig +{ + // private static final String ALLOWED_HEADERS = "X-Requested-With, Content-Type, Authorization, credential, X-XSRF-TOKEN, token, Admin-Token, App-Token" + private static final String ALLOWED_HEADERS = "*"; + private static final String ALLOWED_METHODS = "GET,POST,PUT,DELETE,OPTIONS,HEAD"; + private static final String ALLOWED_ORIGIN = "*"; + private static final String ALLOWED_EXPOSE = "*"; + private static final String MAX_AGE = "18000L"; + + @Bean + public WebFilter corsFilter() + { + return (ServerWebExchange ctx, WebFilterChain chain) -> { + ServerHttpRequest request = ctx.getRequest(); + if (CorsUtils.isCorsRequest(request)) + { + ServerHttpResponse response = ctx.getResponse(); + HttpHeaders headers = response.getHeaders(); + headers.add("Access-Control-Allow-Headers", ALLOWED_HEADERS); + headers.add("Access-Control-Allow-Methods", ALLOWED_METHODS); + headers.add("Access-Control-Allow-Origin", ALLOWED_ORIGIN); + headers.add("Access-Control-Expose-Headers", ALLOWED_EXPOSE); + headers.add("Access-Control-Max-Age", MAX_AGE); + headers.add("Access-Control-Allow-Credentials", "true"); + if (request.getMethod() == HttpMethod.OPTIONS) + { + response.setStatusCode(HttpStatus.OK); + return Mono.empty(); + } + } + return chain.filter(ctx); + }; + } +} diff --git a/src/main/java/com/se/nsl/config/InitConfig.java b/src/main/java/com/se/nsl/config/InitConfig.java new file mode 100644 index 0000000..fb701ef --- /dev/null +++ b/src/main/java/com/se/nsl/config/InitConfig.java @@ -0,0 +1,184 @@ +package com.se.nsl.config; + +import com.se.nsl.helper.CaffeineHelper; +import com.se.nsl.helper.GdalHelper; +import com.se.nsl.helper.WebHelper; +import lombok.extern.slf4j.Slf4j; +import org.gdal.gdal.Band; +import org.gdal.gdal.Dataset; +import org.gdal.gdal.Driver; +import org.gdal.gdal.gdal; +import org.gdal.gdalconst.gdalconstConstants; +import org.gdal.ogr.*; +import org.gdal.osr.SpatialReference; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.boot.ApplicationArguments; +import org.springframework.boot.ApplicationRunner; +import org.springframework.core.env.Environment; +import org.springframework.stereotype.Component; + +import javax.annotation.Resource; +import java.util.HashMap; +import java.util.Map; + +@Slf4j +@Component +@SuppressWarnings("ALL") +public class InitConfig implements ApplicationRunner { + @Resource + Environment env; + + @Value("${server.port}") + String serverPort; + + @Value("${config.cacheTime}") + Integer cacheTime; + + @Value("${server.servlet.context-path}") + String contextPath; + + @Override + public void run(ApplicationArguments args) { + // noinspection AlibabaRemoveCommentedCode + try { + log.info("***************** 鍒濆鍖� GDAL *****************" + "\n"); + GdalHelper.init(env.getProperty("config.gdalPath")); + CaffeineHelper.init(cacheTime); + + String path = null != contextPath && contextPath.length() > 1 ? contextPath : ""; + log.info("API鏂囨。:http://localhost:" + serverPort + path + "/doc.html"); + log.info("API鏂囨。:http://{}:{}{}/doc.html", WebHelper.getHostIp(), serverPort, path); + + log.info("***************** 绯荤粺鍚姩瀹屾瘯 *****************" + "\n"); + + //log.info("***************** 璇诲彇tif鏂囦欢 *****************" + "\n"); + // 璇诲彇tif鏂囦欢 + //readTif("D:\\soft\\env\\tif\\srtm_12_03.tif"); + //log.info("***************** 璇诲彇tif鏂囦欢瀹屾瘯 *****************" + "\n"); + + //log.info("***************** 璇诲彇shp鏂囦欢 *****************" + "\n"); + // 璇诲彇shp鏂囦欢 + //readShp("D:\\soft\\env\\闆ㄩ噺绔欑偣鏁版嵁\\闆ㄩ噺绔欑偣_84\\闆ㄩ噺绔欑偣_84.shp"); + //log.info("***************** 璇诲彇shp鏂囦欢瀹屾瘯 *****************" + "\n"); + + + } catch (Exception ex) { + log.error(ex.getMessage(), ex); + } + } + + + + /** + * 璇诲彇tif鏂囦欢 + * + * @param fileName + */ + public static void readTif(String fileName) { + // 璇诲彇褰卞儚鏁版嵁 + Dataset dataset = gdal.Open(fileName, gdalconstConstants.GA_ReadOnly); + if (dataset == null) { + System.out.println("read fail!"); + return; + } + + // providing various methods for a format specific driver. + Driver driver = dataset.GetDriver(); + System.out.println("driver name: " + driver.getLongName()); + + // 璇诲彇褰卞儚淇℃伅 + int xSize = dataset.getRasterXSize(); + int ySzie = dataset.getRasterYSize(); + int rasterCount = dataset.getRasterCount(); + System.out.println("dataset xSize: " + xSize + ", ySzie = " + ySzie + ", rasterCount = " + rasterCount); + + Band band = dataset.GetRasterBand(1); + // the data type of the band. + int type = band.GetRasterDataType(); + System.out.println("data type = " + type + ", " + (type == gdalconstConstants.GDT_Byte)); + + // Frees the native resource associated to a Dataset object and close the file. + dataset.delete(); + + gdal.GDALDestroyDriverManager(); + } + + /** + * 璇诲彇 Shapefile 鏂囦欢骞舵墦鍗板睘鎬у拰鍑犱綍鏁版嵁 + * + * @param shapefilePath SHP 鏂囦欢璺緞 + */ + + public void readShp(String strVectorFile) { + + // 娉ㄥ唽鎵�鏈夌殑椹卞姩 + ogr.RegisterAll(); + // 涓轰簡鏀寔涓枃璺緞锛岃娣诲姞涓嬮潰杩欏彞浠g爜 + gdal.SetConfigOption("GDAL_FILENAME_IS_UTF8", "YES"); + // 涓轰簡浣垮睘鎬ц〃瀛楁鏀寔涓枃锛岃娣诲姞涓嬮潰杩欏彞 + gdal.SetConfigOption("SHAPE_ENCODING", "CP936"); + // 璇诲彇鏁版嵁锛岃繖閲屼互ESRI鐨剆hp鏂囦欢涓轰緥 + String strDriverName = "ESRI Shapefile"; + // 鍒涘缓涓�涓枃浠讹紝鏍规嵁strDriverName鎵╁睍鍚嶈嚜鍔ㄥ垽鏂┍鍔ㄧ被鍨� + + org.gdal.ogr.Driver oDriver = ogr.GetDriverByName(strDriverName); + + if (oDriver == null) { + System.out.println(strDriverName + " 椹卞姩涓嶅彲鐢紒\n"); + return; + } + DataSource dataSource = oDriver.Open(strVectorFile); + //Layer layer = dataSource.GetLayer("test"); + Layer layer = dataSource.GetLayer(0); + + for (int i = 0; i < dataSource.GetLayerCount(); i++) { + Layer layerIdx = dataSource.GetLayer(i); + System.out.println("鍥惧眰鍚嶇О锛�<==>" + layerIdx.GetName()); + } + + String layerName = layer.GetName(); + System.out.println("鍥惧眰鍚嶇О锛�" + layerName); + SpatialReference spatialReference = layer.GetSpatialRef(); + //System.out.println(spatialReference); + System.out.println("绌洪棿鍙傝�冨潗鏍囩郴锛�" + spatialReference.GetAttrValue("AUTHORITY", 0) + + spatialReference.GetAttrValue("AUTHORITY", 1)); + + double[] layerExtent = layer.GetExtent(); + + System.out.println("鍥惧眰鑼冨洿锛歮inx:" + layerExtent[0] + ",maxx:" + layerExtent[1] + ",miny:" + layerExtent[2] + ",maxy:" + layerExtent[3]); + + + FeatureDefn featureDefn = layer.GetLayerDefn(); + + int fieldCount = featureDefn.GetFieldCount(); + + Map<String, String> fieldMap = new HashMap<String, String>(); + for (int i = 0; i < fieldCount; i++) { + FieldDefn fieldDefn = featureDefn.GetFieldDefn(i); + // 寰楀埌灞炴�у瓧娈电被鍨� + int fieldType = fieldDefn.GetFieldType(); + String fieldTypeName = fieldDefn.GetFieldTypeName(fieldType); + // 寰楀埌灞炴�у瓧娈靛悕绉� + String fieldName = fieldDefn.GetName(); + fieldMap.put(fieldTypeName, fieldName); + } + System.out.println(); + System.out.println("fileMap:"); + System.out.println(fieldMap); + + System.out.println(layer.GetFeature(1).GetGeometryRef().ExportToJson()); + System.out.println(layer.GetFeature(2).GetGeometryRef().ExportToJson()); + System.out.println(layer.GetFeature(3).GetGeometryRef().ExportToJson()); + + for (int i = 0; i < 12; i++) { + Feature feature = layer.GetFeature(i); + Object[] arr = fieldMap.values().toArray(); + for (int k = 0; k < arr.length; k++) { + String fvalue = feature.GetFieldAsString(arr[k].toString()); + System.out.println(" 灞炴�у悕绉�:" + arr[k].toString() + ",灞炴�у��:" + fvalue); + } + } + } + + +} diff --git a/src/main/java/com/se/nsl/config/Knife4jConfig.java b/src/main/java/com/se/nsl/config/Knife4jConfig.java new file mode 100644 index 0000000..01b48a9 --- /dev/null +++ b/src/main/java/com/se/nsl/config/Knife4jConfig.java @@ -0,0 +1,68 @@ +package com.se.nsl.config; + +import com.github.xiaoymin.knife4j.spring.annotations.EnableKnife4j; +import io.swagger.annotations.ApiOperation; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.web.servlet.config.annotation.ResourceHandlerRegistry; +import org.springframework.web.servlet.config.annotation.WebMvcConfigurationSupport; +import springfox.documentation.builders.ApiInfoBuilder; +import springfox.documentation.builders.PathSelectors; +import springfox.documentation.builders.RequestHandlerSelectors; +import springfox.documentation.service.ApiInfo; +import springfox.documentation.service.Contact; +import springfox.documentation.spi.DocumentationType; +import springfox.documentation.spring.web.plugins.Docket; + +@Configuration +@EnableKnife4j +@SuppressWarnings("ALL") +public class Knife4jConfig extends WebMvcConfigurationSupport { + @Value("${server.port}") + String serverPort; + + @Value("${knife4j.enabled}") + private boolean enabled; + + @Value("${knife4j.pathMapping}") + private String pathMapping; + + @Value("${server.servlet.context-path}") + String contextPath; + + @Override + public void addResourceHandlers(ResourceHandlerRegistry registry) { + registry.addResourceHandler("doc.html") + .addResourceLocations("classpath:/META-INF/resources/"); + + registry.addResourceHandler("swagger-ui.html") + .addResourceLocations("classpath:/META-INF/resources/"); + registry.addResourceHandler("/webjars/**") + .addResourceLocations("classpath:/META-INF/resources/webjars/"); + } + + @Bean + public Docket createRestApi() { + return new Docket(new DocumentationType("openApi", "3.0")) + .enable(enabled) + .apiInfo(apiInfo()) + .groupName("鏈嶅姟") + .select() + .apis(RequestHandlerSelectors.withMethodAnnotation(ApiOperation.class)) + // .apis(RequestHandlerSelectors.basePackage("com.cn.project.tool.swagger")) + .paths(PathSelectors.any()) + .build() + // .securitySchemes(securitySchemes()) + .pathMapping(pathMapping); + } + + private ApiInfo apiInfo() { + return new ApiInfoBuilder() + .description("鎺ュ彛鏂囨。") + .contact(new Contact("WuWeiwei", "http://127.0.0.1:" + serverPort + contextPath + "/doc.html", "252740454@qq.com")) + .version("0.2") + .title("鍐呮稘浠跨湡鏈嶅姟鏈嶅姟鎺ュ彛鏂囨。") + .build(); + } +} diff --git a/src/main/java/com/se/nsl/config/MybatisPlusConfig.java b/src/main/java/com/se/nsl/config/MybatisPlusConfig.java new file mode 100644 index 0000000..32f5c7f --- /dev/null +++ b/src/main/java/com/se/nsl/config/MybatisPlusConfig.java @@ -0,0 +1,38 @@ +package com.se.nsl.config; + +import com.baomidou.mybatisplus.annotation.DbType; +import com.baomidou.mybatisplus.autoconfigure.ConfigurationCustomizer; +import com.baomidou.mybatisplus.core.MybatisConfiguration; +import com.baomidou.mybatisplus.extension.plugins.MybatisPlusInterceptor; +import com.baomidou.mybatisplus.extension.plugins.inner.PaginationInnerInterceptor; +import org.apache.ibatis.type.JdbcType; +import org.mybatis.spring.annotation.MapperScan; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.transaction.annotation.EnableTransactionManagement; + +@EnableTransactionManagement +@Configuration +@MapperScan("com.se.nsl.mapper") +@SuppressWarnings("ALL") +public class MybatisPlusConfig { + @Bean + public MybatisPlusInterceptor mybatisPlusInterceptor() { + MybatisPlusInterceptor interceptor = new MybatisPlusInterceptor(); + interceptor.addInnerInterceptor(new PaginationInnerInterceptor(DbType.POSTGRE_SQL)); + return interceptor; + } + + @Bean + public ConfigurationCustomizer configurationCustomizer() { + return new ConfigurationCustomizer() { + @Override + public void customize(MybatisConfiguration configuration) { + configuration.setCacheEnabled(true); + configuration.setMapUnderscoreToCamelCase(true); + configuration.setCallSettersOnNulls(true); + configuration.setJdbcTypeForNull(JdbcType.NULL); + } + }; + } +} diff --git a/src/main/java/com/se/nsl/config/PropertiesConfig.java b/src/main/java/com/se/nsl/config/PropertiesConfig.java new file mode 100644 index 0000000..5aa4cce --- /dev/null +++ b/src/main/java/com/se/nsl/config/PropertiesConfig.java @@ -0,0 +1,332 @@ +package com.se.nsl.config; + +import org.springframework.boot.context.properties.ConfigurationProperties; +import org.springframework.context.annotation.Configuration; + +import java.util.List; + +@Configuration +@SuppressWarnings("ALL") +@ConfigurationProperties(prefix = "config") +public class PropertiesConfig { + private String ver; + + private String inPath; + + private String outPath; + + private String host; + + private String user; + + private String pwd; + + private String dbName; + + private Integer pageSize; + + private List<String> layerNames; + + private List<String> shpNames; + + private String junctionName; + + private String junctionFilter; + + private String demName; + + private String demType; + + private String demFile; + + private String zoneName; + + private String barrierName; + + //@Value("#{'${config.sysFields}'}") + private List<String> sysFields; + + private String raingage; + + private String rainStation; + + private Double rainPeriod; + + private String flowUnits; + + private String solverBat; + + private String sww2tifBat; + + private List<Integer> sizes; + + private String terrainFile; + + private String buildingFile; + + private String buildingKey; + + private String waterPath; + + private String flowPath; + + private boolean copyTif; + + private String tifPath; + + public String getVer() { + return ver; + } + + public void setVer(String ver) { + this.ver = ver; + } + + public String getInPath() { + return inPath; + } + + public void setInPath(String inPath) { + this.inPath = inPath; + } + + public String getOutPath() { + return outPath; + } + + public void setOutPath(String outPath) { + this.outPath = outPath; + } + + public String getHost() { + return host; + } + + public void setHost(String host) { + this.host = host; + } + + public String getUser() { + return user; + } + + public void setUser(String user) { + this.user = user; + } + + public String getPwd() { + return pwd; + } + + public void setPwd(String pwd) { + this.pwd = pwd; + } + + public String getDbName() { + return dbName; + } + + public void setDbName(String dbName) { + this.dbName = dbName; + } + + public Integer getPageSize() { + return pageSize; + } + + public void setPageSize(Integer pageSize) { + this.pageSize = pageSize; + } + + public List<String> getLayerNames() { + return layerNames; + } + + public void setLayerNames(List<String> layerNames) { + this.layerNames = layerNames; + } + + public List<String> getShpNames() { + return shpNames; + } + + public void setShpNames(List<String> shpNames) { + this.shpNames = shpNames; + } + + public String getDemName() { + return demName; + } + + public void setDemName(String demName) { + this.demName = demName; + } + + public String getDemType() { + return demType; + } + + public void setDemType(String demType) { + this.demType = demType; + } + + public String getDemFile() { + return demFile; + } + + public void setDemFile(String demFile) { + this.demFile = demFile; + } + + public List<String> getSysFields() { + return sysFields; + } + + public void setSysFields(List<String> sysFields) { + this.sysFields = sysFields; + } + + public String getJunctionName() { + return junctionName; + } + + public void setJunctionName(String junctionName) { + this.junctionName = junctionName; + } + + public String getJunctionFilter() { + return junctionFilter; + } + + public void setJunctionFilter(String junctionFilter) { + this.junctionFilter = junctionFilter; + } + + public String getZoneName() { + return zoneName; + } + + public void setZoneName(String zoneName) { + this.zoneName = zoneName; + } + + public String getRaingage() { + return raingage; + } + + public void setRaingage(String raingage) { + this.raingage = raingage; + } + + public String getFlowUnits() { + return flowUnits; + } + + public void setFlowUnits(String flowUnits) { + this.flowUnits = flowUnits; + } + + public String getBarrierName() { + return barrierName; + } + + public void setBarrierName(String barrierName) { + this.barrierName = barrierName; + } + + public String getSolverBat() { + return solverBat; + } + + public void setSolverBat(String solverBat) { + this.solverBat = solverBat; + } + + public String getRainStation() { + return rainStation; + } + + public void setRainStation(String rainStation) { + this.rainStation = rainStation; + } + + public Double getRainPeriod() { + return rainPeriod; + } + + public void setRainPeriod(Double rainPeriod) { + this.rainPeriod = rainPeriod; + } + + public List<Integer> getSizes() { + return sizes; + } + + public void setSizes(List<Integer> sizes) { + this.sizes = sizes; + } + + public String getTerrainFile() { + return terrainFile; + } + + public void setTerrainFile(String terrainFile) { + this.terrainFile = terrainFile; + } + + public String getBuildingFile() { + return buildingFile; + } + + public void setBuildingFile(String buildingFile) { + this.buildingFile = buildingFile; + } + + public String getWaterPath() { + return waterPath; + } + + public void setWaterPath(String waterPath) { + this.waterPath = waterPath; + } + + public String getFlowPath() { + return flowPath; + } + + public void setFlowPath(String flowPath) { + this.flowPath = flowPath; + } + + public String getSww2tifBat() { + return sww2tifBat; + } + + public void setSww2tifBat(String sww2tifBat) { + this.sww2tifBat = sww2tifBat; + } + + public String getBuildingKey() { + return buildingKey; + } + + public void setBuildingKey(String buildingKey) { + this.buildingKey = buildingKey; + } + + public boolean getCopyTif() { + return copyTif; + } + + public void setCopyTif(boolean copyTif) { + this.copyTif = copyTif; + } + + public String getTifPath() { + return tifPath; + } + + public void setTifPath(String tifPath) { + this.tifPath = tifPath; + } +} diff --git a/src/main/java/com/se/nsl/config/RestTemplateConfig.java b/src/main/java/com/se/nsl/config/RestTemplateConfig.java new file mode 100644 index 0000000..6d71ed2 --- /dev/null +++ b/src/main/java/com/se/nsl/config/RestTemplateConfig.java @@ -0,0 +1,71 @@ +package com.se.nsl.config; + +import org.apache.http.client.HttpClient; +import org.apache.http.impl.client.HttpClientBuilder; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.http.client.ClientHttpRequestFactory; +import org.springframework.http.client.HttpComponentsClientHttpRequestFactory; +import org.springframework.http.client.SimpleClientHttpRequestFactory; +import org.springframework.http.converter.HttpMessageConverter; +import org.springframework.http.converter.StringHttpMessageConverter; +import org.springframework.web.client.RestTemplate; + +import java.nio.charset.StandardCharsets; +import java.util.List; + +@Configuration +@SuppressWarnings("ALL") +public class RestTemplateConfig { + @Value("${remote.maxTotalConnect:0}") + private int maxTotalConnect; + + @Value("${remote.maxConnectPerRoute:1000}") + private int maxConnectPerRoute; + + @Value("${remote.connectTimeout:5000}") + private int connectTimeout; + + @Value("${remote.readTimeout:30000}") + private int readTimeout; + + private ClientHttpRequestFactory createFactory() { + if (this.maxTotalConnect <= 0) { + SimpleClientHttpRequestFactory factory = new SimpleClientHttpRequestFactory(); + factory.setConnectTimeout(this.connectTimeout); + factory.setReadTimeout(this.readTimeout); + return factory; + } + + HttpClient httpClient = HttpClientBuilder.create().setMaxConnTotal(this.maxTotalConnect).setMaxConnPerRoute(this.maxConnectPerRoute).build(); + + HttpComponentsClientHttpRequestFactory factory = new HttpComponentsClientHttpRequestFactory(httpClient); + factory.setConnectTimeout(this.connectTimeout); + factory.setReadTimeout(this.readTimeout); + + return factory; + } + + @Bean + @ConditionalOnMissingBean(RestTemplate.class) + public RestTemplate getRestTemplate() { + RestTemplate restTemplate = new RestTemplate(this.createFactory()); + List<HttpMessageConverter<?>> converterList = restTemplate.getMessageConverters(); + + HttpMessageConverter<?> converterTarget = null; + for (HttpMessageConverter<?> item : converterList) { + if (StringHttpMessageConverter.class == item.getClass()) { + converterTarget = item; + break; + } + } + if (null != converterTarget) { + converterList.remove(converterTarget); + } + converterList.add(1, new StringHttpMessageConverter(StandardCharsets.UTF_8)); + + return restTemplate; + } +} diff --git a/src/main/java/com/se/nsl/config/WebConfig.java b/src/main/java/com/se/nsl/config/WebConfig.java new file mode 100644 index 0000000..9e3fbbc --- /dev/null +++ b/src/main/java/com/se/nsl/config/WebConfig.java @@ -0,0 +1,66 @@ +package com.se.nsl.config; + +import com.alibaba.fastjson.serializer.SerializerFeature; +import com.alibaba.fastjson.support.config.FastJsonConfig; +import com.alibaba.fastjson.support.spring.FastJsonHttpMessageConverter; +import org.springframework.context.annotation.Configuration; +import org.springframework.http.MediaType; +import org.springframework.http.converter.HttpMessageConverter; +import org.springframework.web.servlet.config.annotation.CorsRegistry; +import org.springframework.web.servlet.config.annotation.WebMvcConfigurationSupport; + +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.List; + +@Configuration +@SuppressWarnings("ALL") +public class WebConfig extends WebMvcConfigurationSupport { + @Override + protected void addCorsMappings(CorsRegistry registry) { + registry.addMapping("/**") + //.allowCredentials(true) + .allowedHeaders("*") + .allowedOrigins("*") + .allowedMethods("GET", "POST", "PUT", "DELETE", "OPTIONS") + .allowCredentials(true) + .maxAge(3600); + } + + @Override + public void configureMessageConverters(List<HttpMessageConverter<?>> converters) { + FastJsonHttpMessageConverter converter = new FastJsonHttpMessageConverter(); + FastJsonConfig config = new FastJsonConfig(); + config.setSerializerFeatures( + SerializerFeature.WriteNullListAsEmpty, + SerializerFeature.WriteMapNullValue, + SerializerFeature.WriteNullStringAsEmpty, + SerializerFeature.WriteNullNumberAsZero, + SerializerFeature.WriteNullBooleanAsFalse); + //SerializerFeature.PrettyFormat); + + List<MediaType> supportedMediaTypes = new ArrayList<>(); + supportedMediaTypes.add(MediaType.APPLICATION_JSON); + // supportedMediaTypes.add(MediaType.APPLICATION_JSON_UTF8) + supportedMediaTypes.add(MediaType.APPLICATION_ATOM_XML); + supportedMediaTypes.add(MediaType.APPLICATION_FORM_URLENCODED); + supportedMediaTypes.add(MediaType.APPLICATION_OCTET_STREAM); + supportedMediaTypes.add(MediaType.APPLICATION_PDF); + supportedMediaTypes.add(MediaType.APPLICATION_RSS_XML); + supportedMediaTypes.add(MediaType.APPLICATION_XHTML_XML); + supportedMediaTypes.add(MediaType.APPLICATION_XML); + supportedMediaTypes.add(MediaType.IMAGE_GIF); + supportedMediaTypes.add(MediaType.IMAGE_JPEG); + supportedMediaTypes.add(MediaType.IMAGE_PNG); + supportedMediaTypes.add(MediaType.TEXT_EVENT_STREAM); + supportedMediaTypes.add(MediaType.TEXT_HTML); + supportedMediaTypes.add(MediaType.TEXT_MARKDOWN); + supportedMediaTypes.add(MediaType.TEXT_PLAIN); + supportedMediaTypes.add(MediaType.TEXT_XML); + converter.setSupportedMediaTypes(supportedMediaTypes); + + converter.setFastJsonConfig(config); + converter.setDefaultCharset(StandardCharsets.UTF_8); + converters.add(converter); + } +} diff --git a/src/main/java/com/se/nsl/constant/CacheConstants.java b/src/main/java/com/se/nsl/constant/CacheConstants.java new file mode 100644 index 0000000..79061ee --- /dev/null +++ b/src/main/java/com/se/nsl/constant/CacheConstants.java @@ -0,0 +1,48 @@ +package com.se.nsl.constant; + +/** + * 缂撳瓨鐨刱ey 甯搁噺 + * + * @author ruoyi + */ +public class CacheConstants { + /** + * 鐧诲綍鐢ㄦ埛 redis key + */ + public static final String LOGIN_TOKEN_KEY = "login_tokens:"; + + /** + * 楠岃瘉鐮� redis key + */ + public static final String CAPTCHA_CODE_KEY = "captcha_codes:"; + + /** + * 鍙傛暟绠$悊 cache key + */ + public static final String SYS_CONFIG_KEY = "sys_config:"; + + /** + * 瀛楀吀绠$悊 cache key + */ + public static final String SYS_DICT_KEY = "sys_dict:"; + + /** + * 闃查噸鎻愪氦 redis key + */ + public static final String REPEAT_SUBMIT_KEY = "repeat_submit:"; + + /** + * 闄愭祦 redis key + */ + public static final String RATE_LIMIT_KEY = "rate_limit:"; + + /** + * 鐧诲綍璐︽埛瀵嗙爜閿欒娆℃暟 redis key + */ + public static final String PWD_ERR_CNT_KEY = "pwd_err_cnt:"; + + /** + * 鐢ㄦ埛鑷畾涔夌紦瀛� redis key + */ + public static final String USER_CACHE_KEY = "user_cache_key:"; +} diff --git a/src/main/java/com/se/nsl/constant/RedisCache.java b/src/main/java/com/se/nsl/constant/RedisCache.java new file mode 100644 index 0000000..12ddb00 --- /dev/null +++ b/src/main/java/com/se/nsl/constant/RedisCache.java @@ -0,0 +1,243 @@ +package com.se.nsl.constant; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.data.redis.core.BoundSetOperations; +import org.springframework.data.redis.core.HashOperations; +import org.springframework.data.redis.core.RedisTemplate; +import org.springframework.data.redis.core.ValueOperations; +import org.springframework.stereotype.Component; + +import java.util.*; +import java.util.concurrent.TimeUnit; + +/** + * spring redis 宸ュ叿绫� + * + * @author ruoyi + **/ +@SuppressWarnings(value = {"unchecked", "rawtypes"}) +@Component +public class RedisCache { + @Autowired + public RedisTemplate redisTemplate; + + /** + * 缂撳瓨鍩烘湰鐨勫璞★紝Integer銆丼tring銆佸疄浣撶被绛� + * + * @param key 缂撳瓨鐨勯敭鍊� + * @param value 缂撳瓨鐨勫�� + */ + public <T> void setCacheObject(final String key, final T value) { + redisTemplate.opsForValue().set(key, value); + } + + /** + * 缂撳瓨鍩烘湰鐨勫璞★紝Integer銆丼tring銆佸疄浣撶被绛� + * + * @param key 缂撳瓨鐨勯敭鍊� + * @param value 缂撳瓨鐨勫�� + * @param timeout 鏃堕棿 + * @param timeUnit 鏃堕棿棰楃矑搴� + */ + public <T> void setCacheObject(final String key, final T value, final Integer timeout, final TimeUnit timeUnit) { + redisTemplate.opsForValue().set(key, value, timeout, timeUnit); + } + + /** + * 璁剧疆鏈夋晥鏃堕棿 + * + * @param key Redis閿� + * @param timeout 瓒呮椂鏃堕棿 + * @return true=璁剧疆鎴愬姛锛沠alse=璁剧疆澶辫触 + */ + public boolean expire(final String key, final long timeout) { + return expire(key, timeout, TimeUnit.SECONDS); + } + + /** + * 璁剧疆鏈夋晥鏃堕棿 + * + * @param key Redis閿� + * @param timeout 瓒呮椂鏃堕棿 + * @param unit 鏃堕棿鍗曚綅 + * @return true=璁剧疆鎴愬姛锛沠alse=璁剧疆澶辫触 + */ + public boolean expire(final String key, final long timeout, final TimeUnit unit) { + return redisTemplate.expire(key, timeout, unit); + } + + /** + * 鑾峰彇鏈夋晥鏃堕棿 + * + * @param key Redis閿� + * @return 鏈夋晥鏃堕棿 + */ + public long getExpire(final String key) { + return redisTemplate.getExpire(key); + } + + /** + * 鍒ゆ柇 key鏄惁瀛樺湪 + * + * @param key 閿� + * @return true 瀛樺湪 false涓嶅瓨鍦� + */ + public Boolean hasKey(String key) { + return redisTemplate.hasKey(key); + } + + /** + * 鑾峰緱缂撳瓨鐨勫熀鏈璞°�� + * + * @param key 缂撳瓨閿�� + * @return 缂撳瓨閿�煎搴旂殑鏁版嵁 + */ + public <T> T getCacheObject(final String key) { + ValueOperations<String, T> operation = redisTemplate.opsForValue(); + return operation.get(key); + } + + /** + * 鍒犻櫎鍗曚釜瀵硅薄 + * + * @param key + */ + public boolean deleteObject(final String key) { + return redisTemplate.delete(key); + } + + /** + * 鍒犻櫎闆嗗悎瀵硅薄 + * + * @param collection 澶氫釜瀵硅薄 + * @return + */ + public boolean deleteObject(final Collection collection) { + return redisTemplate.delete(collection) > 0; + } + + /** + * 缂撳瓨List鏁版嵁 + * + * @param key 缂撳瓨鐨勯敭鍊� + * @param dataList 寰呯紦瀛樼殑List鏁版嵁 + * @return 缂撳瓨鐨勫璞� + */ + public <T> long setCacheList(final String key, final List<T> dataList) { + Long count = redisTemplate.opsForList().rightPushAll(key, dataList); + return count == null ? 0 : count; + } + + /** + * 鑾峰緱缂撳瓨鐨刲ist瀵硅薄 + * + * @param key 缂撳瓨鐨勯敭鍊� + * @return 缂撳瓨閿�煎搴旂殑鏁版嵁 + */ + public <T> List<T> getCacheList(final String key) { + return redisTemplate.opsForList().range(key, 0, -1); + } + + /** + * 缂撳瓨Set + * + * @param key 缂撳瓨閿�� + * @param dataSet 缂撳瓨鐨勬暟鎹� + * @return 缂撳瓨鏁版嵁鐨勫璞� + */ + public <T> BoundSetOperations<String, T> setCacheSet(final String key, final Set<T> dataSet) { + BoundSetOperations<String, T> setOperation = redisTemplate.boundSetOps(key); + Iterator<T> it = dataSet.iterator(); + while (it.hasNext()) { + setOperation.add(it.next()); + } + return setOperation; + } + + /** + * 鑾峰緱缂撳瓨鐨剆et + * + * @param key + * @return + */ + public <T> Set<T> getCacheSet(final String key) { + return redisTemplate.opsForSet().members(key); + } + + /** + * 缂撳瓨Map + * + * @param key + * @param dataMap + */ + public <T> void setCacheMap(final String key, final Map<String, T> dataMap) { + if (dataMap != null) { + redisTemplate.opsForHash().putAll(key, dataMap); + } + } + + /** + * 鑾峰緱缂撳瓨鐨凪ap + * + * @param key + * @return + */ + public <T> Map<String, T> getCacheMap(final String key) { + return redisTemplate.opsForHash().entries(key); + } + + /** + * 寰�Hash涓瓨鍏ユ暟鎹� + * + * @param key Redis閿� + * @param hKey Hash閿� + * @param value 鍊� + */ + public <T> void setCacheMapValue(final String key, final String hKey, final T value) { + redisTemplate.opsForHash().put(key, hKey, value); + } + + /** + * 鑾峰彇Hash涓殑鏁版嵁 + * + * @param key Redis閿� + * @param hKey Hash閿� + * @return Hash涓殑瀵硅薄 + */ + public <T> T getCacheMapValue(final String key, final String hKey) { + HashOperations<String, String, T> opsForHash = redisTemplate.opsForHash(); + return opsForHash.get(key, hKey); + } + + /** + * 鑾峰彇澶氫釜Hash涓殑鏁版嵁 + * + * @param key Redis閿� + * @param hKeys Hash閿泦鍚� + * @return Hash瀵硅薄闆嗗悎 + */ + public <T> List<T> getMultiCacheMapValue(final String key, final Collection<Object> hKeys) { + return redisTemplate.opsForHash().multiGet(key, hKeys); + } + + /** + * 鍒犻櫎Hash涓殑鏌愭潯鏁版嵁 + * + * @param key Redis閿� + * @param hKey Hash閿� + * @return 鏄惁鎴愬姛 + */ + public boolean deleteCacheMapValue(final String key, final String hKey) { + return redisTemplate.opsForHash().delete(key, hKey) > 0; + } + + /** + * 鑾峰緱缂撳瓨鐨勫熀鏈璞″垪琛� + * + * @param pattern 瀛楃涓插墠缂� + * @return 瀵硅薄鍒楄〃 + */ + public Collection<String> keys(final String pattern) { + return redisTemplate.keys(pattern); + } +} diff --git a/src/main/java/com/se/nsl/controller/BaseController.java b/src/main/java/com/se/nsl/controller/BaseController.java new file mode 100644 index 0000000..0177121 --- /dev/null +++ b/src/main/java/com/se/nsl/controller/BaseController.java @@ -0,0 +1,43 @@ +package com.se.nsl.controller; + +import com.se.nsl.domain.vo.R; +import lombok.extern.slf4j.Slf4j; +import org.springframework.http.HttpStatus; + +@Slf4j +@SuppressWarnings("ALL") +public class BaseController { + public <T> R<T> success(T data) { + return new R<T>(HttpStatus.OK, data); + } + + public <T> R<T> success(T data, String msg) { + return new R<T>(HttpStatus.OK, data, msg); + } + + public <T> R<T> success(T data, long count) { + return new R<T>(HttpStatus.OK, data, count); + } + + public <T> R<T> success(T data, long count, String msg) { + return new R<T>(HttpStatus.OK, data, count, msg); + } + + public <T> R<T> fail(T data) { + return new R<T>(HttpStatus.INTERNAL_SERVER_ERROR, data); + } + + public <T> R<T> fail(String msg, T data) { + return new R<T>(HttpStatus.INTERNAL_SERVER_ERROR, data, msg); + } + + public <T> R<T> fail(Exception ex) { + log.error(ex.getMessage(), ex); + return new R<T>(HttpStatus.INTERNAL_SERVER_ERROR, null, ex.getMessage()); + } + + public <T> R<T> fail(Exception ex, T data) { + log.error(ex.getMessage(), ex); + return new R<T>(HttpStatus.INTERNAL_SERVER_ERROR, data, ex.getMessage()); + } +} diff --git a/src/main/java/com/se/nsl/controller/BizH5FileParserController.java b/src/main/java/com/se/nsl/controller/BizH5FileParserController.java new file mode 100644 index 0000000..a3d4fec --- /dev/null +++ b/src/main/java/com/se/nsl/controller/BizH5FileParserController.java @@ -0,0 +1,71 @@ +package com.se.nsl.controller; + +import com.se.nsl.domain.vo.R; +import com.se.nsl.service.IBizH5FileParserService; +import hdf.hdf5lib.exceptions.HDF5LibraryException; +import io.swagger.annotations.Api; +import io.swagger.annotations.ApiOperation; +import lombok.extern.slf4j.Slf4j; +import org.springframework.web.bind.annotation.*; + +import javax.annotation.Resource; + +/** + * Bize H5 鏂囦欢瑙f瀽鍣ㄦ帶鍒跺櫒 + * + * @author xingjinshuang@smartearth.cn + * @date 2024/12/06 + */ +@Api(tags = "H5 鏂囦欢瑙f瀽鍣ㄦ帶鍒跺櫒") +@Slf4j +@CrossOrigin(origins = "*", maxAge = 3600) +@RestController +@RequestMapping("/h5") +public class BizH5FileParserController extends BaseController { + + + @Resource + private IBizH5FileParserService bizH5FileParserService; + + /** + * HDF5鏂囦欢 璇存槑 + */ + @ApiOperation("HDF5鏂囦欢 璇存槑") + @GetMapping("/hdf5-illustrate") + public R<Object> hdf5FileIllustrate() throws HDF5LibraryException { + String content = "HDF5锛圚ierarchical Data Format version 5锛夋槸涓�绉嶇敤浜庡瓨鍌ㄥ拰缁勭粐澶ч噺鏁版嵁鐨勬枃浠舵牸寮忥紝瀹冩敮鎸侀珮鏁堝湴瀛樺偍鍜屽鐞嗗ぇ瑙勬ā绉戝鏁版嵁鐨勮兘鍔涖�侶DF5 鏄竴绉嶇伒娲荤殑鏁版嵁妯″瀷锛屽彲浠ュ瓨鍌ㄥ绉嶆暟鎹被鍨嬶紝鍖呮嫭鏁板�兼暟鎹�佸浘鍍忋�佽〃鏍肩瓑绛夛紝骞朵笖鍙互鎸夌収灞傜骇缁撴瀯缁勭粐鏁版嵁銆俓n" + + "HDF5 鏂囦欢锛堥�氬父浣跨敤 .h5 鎵╁睍鍚嶏級鏄竴绉嶄簩杩涘埗鏂囦欢鏍煎紡锛屽畠鍏锋湁浠ヤ笅鐗圭偣锛歕n" + + "1.鐏垫椿鎬э細 HDF5 鏂囦欢鏍煎紡闈炲父鐏垫椿锛屽彲浠ュ瓨鍌ㄥ悇绉嶄笉鍚岀被鍨嬪拰澶у皬鐨勬暟鎹紝浠庣畝鍗曠殑鏁板�兼暟缁勫埌澶嶆潅鐨勫缁存暟鎹泦鍚堛�俓n" + + "2.灞傜骇缁撴瀯锛� HDF5 鏂囦欢鏄互灞傜骇缁撴瀯缁勭粐鐨勶紝鍙互鍦ㄦ枃浠朵腑鍒涘缓澶氫釜鏁版嵁闆嗭紙Datasets锛夊拰缁勶紙Groups锛夛紝鍏佽鐢ㄦ埛鏍规嵁闇�姹傜粍缁囧拰绠$悊鏁版嵁銆俓n" + + "3.澶氱鏁版嵁绫诲瀷锛� HDF5 鏀寔澶氱鏁版嵁绫诲瀷锛屽寘鎷爣閲忋�佹暟缁勩�佸瓧绗︿覆銆佽〃鏍肩瓑绛夛紝骞朵笖鍙互浣跨敤涓嶅悓鐨勫帇缂╂妧鏈鏁版嵁杩涜鍘嬬缉銆俓n" + + "4.楂樻晥鎬ц兘锛� HDF5 鏂囦欢鏍煎紡琚璁$敤浜庨珮鏁堝瓨鍌ㄥ拰澶勭悊澶у瀷鏁版嵁闆嗭紝鍏锋湁楂樻晥鐨勬暟鎹闂拰璇诲啓鎬ц兘銆俓n" + + "5.璺ㄥ钩鍙板吋瀹规�э細 HDF5 鏂囦欢鍙互鍦ㄤ笉鍚岀殑鎿嶄綔绯荤粺涓婁娇鐢紝鍏锋湁鑹ソ鐨勮法骞冲彴鍏煎鎬с�俓n" + + "HDF5 鏂囦欢閫氬父鐢辩瀛﹁绠椼�佹暟鎹垎鏋愩�佹満鍣ㄥ涔犵瓑棰嗗煙鐨勫簲鐢ㄥ箍娉涗娇鐢紝鐢ㄤ簬瀛樺偍澶ц妯℃暟鎹泦锛屼緥濡傚浘鍍忔暟鎹�佷紶鎰熷櫒鏁版嵁銆佹ā鎷熸暟鎹瓑銆傞�氳繃浣跨敤鍚勭缂栫▼璇█鐨� HDF5 搴撳拰宸ュ叿锛屽彲浠ユ柟渚垮湴璇诲啓鍜屽鐞� HDF5 鏍煎紡鐨勬枃浠躲��"; + log.info("HDF5鏂囦欢 璇存槑:{}", content); + return success(content); + } + + /** + * 鎵撳紑HDF5鏂囦欢 + * + * @param filePath 鏂囦欢璺緞 + * @return 鏂囦欢ID + */ + @ApiOperation("鎵撳紑HDF5鏂囦欢") + @PostMapping("/open-hdf5-file") + public R<Object> openHdf5File(String filePath) { + return success(bizH5FileParserService.openH5File(filePath)); + } + + /** + * 瑙f瀽HD5鏂囦欢 + */ + @ApiOperation("瑙f瀽HD5鏂囦欢") + @PostMapping("/parse-hdf5-file") + public R<Object> parseHdf5File(String filePath) throws HDF5LibraryException { + filePath = "/Users/zhangshan/Desktop/hdf5/data.h5"; + log.info("瑙f瀽HD5鏂囦欢:{}", filePath); + return success(bizH5FileParserService.parseH5File(filePath)); + } + +} diff --git a/src/main/java/com/se/nsl/controller/DbController.java b/src/main/java/com/se/nsl/controller/DbController.java new file mode 100644 index 0000000..465f8d5 --- /dev/null +++ b/src/main/java/com/se/nsl/controller/DbController.java @@ -0,0 +1,73 @@ +package com.se.nsl.controller; + +import com.se.nsl.domain.vo.QueryVo; +import com.se.nsl.domain.vo.R; +import com.se.nsl.helper.CaffeineHelper; +import com.se.nsl.helper.StringHelper; +import com.se.nsl.helper.WebHelper; +import com.se.nsl.service.DbService; +import io.swagger.annotations.Api; +import io.swagger.annotations.ApiOperation; +import lombok.extern.slf4j.Slf4j; +import org.springframework.http.HttpStatus; +import org.springframework.web.bind.annotation.*; + +import javax.annotation.Resource; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + +@Api(tags = "鏁版嵁绠$悊") +@Slf4j +@RestController +@RequestMapping("/db") +@SuppressWarnings("ALL") +public class DbController extends BaseController { + @Resource + DbService dbService; + + @ApiOperation(value = "info") + @GetMapping(value = "/info") + public void info(HttpServletRequest req, HttpServletResponse res) { + try { + String rs = dbService.info(); + + WebHelper.writeStr2Page(res, HttpStatus.OK, rs); + } catch (Exception ex) { + WebHelper.writeJson2Page(res, HttpStatus.BAD_REQUEST, ex.getMessage()); + } + } + + @ApiOperation(value = "config") + @GetMapping(value = "/config") + public void config(HttpServletRequest req, HttpServletResponse res) { + try { + String rs = dbService.getConfig(); + + WebHelper.writeStr2Page(res, HttpStatus.OK, rs); + } catch (Exception ex) { + WebHelper.writeJson2Page(res, HttpStatus.BAD_REQUEST, ex.getMessage()); + } + } + + @ApiOperation(value = "query") + @PostMapping(value = "/query") + public void query(@RequestBody QueryVo vo, HttpServletRequest req, HttpServletResponse res) { + try { + if (null == vo || StringHelper.isEmpty(vo.getLayerid())) throw new Exception("layerid涓嶈兘涓虹┖"); + + vo.setDefault(); + + dbService.query(vo, req, res); + } catch (Exception ex) { + WebHelper.writeJson2Page(res, HttpStatus.BAD_REQUEST, ex.getMessage()); + } + } + + @ApiOperation(value = "clearCache") + @GetMapping(value = "/clearCache") + public R<Object> clearCache() { + CaffeineHelper.clear(); + + return success("ok"); + } +} diff --git a/src/main/java/com/se/nsl/controller/FilesUploadController.java b/src/main/java/com/se/nsl/controller/FilesUploadController.java new file mode 100644 index 0000000..66551cf --- /dev/null +++ b/src/main/java/com/se/nsl/controller/FilesUploadController.java @@ -0,0 +1,298 @@ +package com.se.nsl.controller; + +import com.se.nsl.domain.vo.R; +import com.se.nsl.utils.FileUtil; +import com.se.nsl.utils.ZipUtils; +import io.swagger.annotations.Api; +import io.swagger.annotations.ApiImplicitParam; +import io.swagger.annotations.ApiOperation; +import lombok.extern.slf4j.Slf4j; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.http.HttpStatus; +import org.springframework.http.ResponseEntity; +import org.springframework.web.bind.annotation.*; +import org.springframework.web.multipart.MultipartFile; + +import java.io.File; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.time.LocalDate; +import java.time.LocalDateTime; +import java.time.LocalTime; +import java.time.format.DateTimeFormatter; +import java.util.List; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.ExecutionException; +import java.util.zip.ZipEntry; + +@Api(tags = "0-鏂囦欢鐩稿叧鎺ュ彛") +@Slf4j +@CrossOrigin(origins = "*") +@RestController +@RequestMapping("/v1/files") +public class FilesUploadController extends BaseController { + + + @Value("${simu-app.filePath}") + // private final String uploadedFolder = "D:/home/scheme/upload"; + private String uploadedFolder; + + + @ApiOperation("0-鏂囦欢涓婁紶鍦板潃") + @GetMapping("/getUploadPath") + public ResponseEntity<String> getUploadPath() throws Exception { + return ResponseEntity.ok("鏂囦欢涓婁紶鍦板潃涓猴細" + uploadedFolder); + } + + + @ApiOperation("1-涓婁紶鍗曚釜鏂囦欢") + @PostMapping("/upload") + public R<Object> upload(@RequestParam("file") MultipartFile file) throws IOException { + if (file.isEmpty()) { + return success("鏂囦欢涓嶈兘涓虹┖"); + } + // 鑾峰彇褰撳墠骞存湀鏃� + String date = LocalDate.now().format(DateTimeFormatter.ofPattern("yyyyMMdd")); + String targetDir = Paths.get(uploadedFolder, date).toString(); + log.info("鐩爣鐩綍: {}", targetDir); + createDirectoriesIfNotExists(targetDir); + try { + // 鑾峰彇褰撳墠鏃跺垎绉� + String time = LocalTime.now().format(DateTimeFormatter.ofPattern("HHmmss")); + // 鏂囦欢鍦板潃鍏ㄧО + Path filePath = Paths.get(targetDir, time + "_" + file.getOriginalFilename()); + // 鏂囦欢鍚� + file.transferTo(filePath); + return success(targetDir + "\\" + time + "_" + file.getOriginalFilename(), "鏂囦欢涓婁紶鎴愬姛"); + } catch (IOException e) { + log.error("鏂囦欢涓婁紶澶辫触", e); + return fail("鏂囦欢涓婁紶澶辫触"); + } + } + + @ApiOperation("1-涓婁紶鍗曚釜shp鏂囦欢") + @PostMapping("/uploadShp") + public R<Object> uploadShp(@RequestParam("file") MultipartFile file) throws IOException { + if (file.isEmpty()) { + return success("鏂囦欢涓嶈兘涓虹┖"); + } + // 鑾峰彇褰撳墠骞存湀鏃� + String date = LocalDateTime.now().format(DateTimeFormatter.ofPattern("yyyyMMddHHmmss")); + String targetDir = Paths.get(uploadedFolder, date).toString(); + log.info("鐩爣鐩綍: {}", targetDir); + createDirectoriesIfNotExists(targetDir); + try { + // 鏂囦欢鍦板潃鍏ㄧО + Path filePath = Paths.get(targetDir, file.getOriginalFilename()); + // 鏂囦欢鍚� + file.transferTo(filePath); + ZipUtils.unzip(targetDir + "\\" + file.getOriginalFilename(),targetDir); + File zipfile=new File(targetDir + "\\" + file.getOriginalFilename()); + zipfile.delete(); + return success(FileUtil.getShpPath(targetDir), "鏂囦欢涓婁紶鎴愬姛"); + } catch (IOException e) { + log.error("鏂囦欢涓婁紶澶辫触", e); + return fail("鏂囦欢涓婁紶澶辫触"); + } + } + + @ApiOperation("1-涓婁紶鍗曚釜鏂囦欢") + @PostMapping("/uploads") + public ResponseEntity<String> uploads(@RequestParam("file") MultipartFile file) throws IOException { + if (file.isEmpty()) { + return ResponseEntity.status(HttpStatus.BAD_REQUEST).body("鏂囦欢涓嶈兘涓虹┖"); + } + // 鑾峰彇褰撳墠骞存湀鏃� + String date = LocalDate.now().format(DateTimeFormatter.ofPattern("yyyyMMdd")); + String targetDir = Paths.get(uploadedFolder, date).toString(); + log.info("鐩爣鐩綍: {}", targetDir); + createDirectoriesIfNotExists(targetDir); + try { + // 鑾峰彇褰撳墠鏃跺垎绉� + String time = LocalTime.now().format(DateTimeFormatter.ofPattern("HHmmss")); + // 鏂囦欢鍦板潃鍏ㄧО + Path filePath = Paths.get(targetDir, time + "_" + file.getOriginalFilename()); + // 鏂囦欢鍚� + file.transferTo(filePath); + return ResponseEntity.ok("鏂囦欢涓婁紶鎴愬姛"); + } catch (IOException e) { + log.error("鏂囦欢涓婁紶澶辫触", e); + return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).body("鏂囦欢涓婁紶澶辫触"); + } + } + + @ApiOperation("2-0涓婁紶澶氫釜鏂囦欢") + @PostMapping("/uploadFiles") + public ResponseEntity<String> uploadFiles(List<MultipartFile> files) throws IOException { + if (files.isEmpty()) { + return ResponseEntity.status(HttpStatus.BAD_REQUEST).body("鏂囦欢涓嶈兘涓虹┖"); + } + // 涓婁紶鏂囦欢璺緞 + String targetDir = Paths.get(uploadedFolder, "upload").toString(); + log.info("鐩爣鐩綍: {}", targetDir); + createDirectoriesIfNotExists(targetDir); + CompletableFuture<Void> allUploadTasks = CompletableFuture.allOf(files.stream() + .map(file -> CompletableFuture.runAsync(() -> { + try { + file.transferTo(Paths.get(targetDir, file.getOriginalFilename())); + } catch (IOException e) { + log.error("鏂囦欢涓婁紶澶辫触", e); + throw new RuntimeException("鏂囦欢涓婁紶澶辫触"); + } + })).toArray(CompletableFuture[]::new)); + try { + allUploadTasks.get(); + return ResponseEntity.ok("鎵�鏈夋枃浠朵笂浼犳垚鍔�!涓婁紶鐩綍涓猴細" + targetDir); + } catch (InterruptedException | ExecutionException e) { + log.error("鏂囦欢涓婁紶澶辫触", e); + return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).body("鏂囦欢涓婁紶澶辫触"); + } + } + + + @ApiOperation("1-1 -涓婁紶鍗曚釜鏂囦欢") + @ApiImplicitParam(name = "filePathName", value = "鏂囦欢澶瑰悕绉�", required = true, dataType = "String", paramType = "query", example = "upload", dataTypeClass = String.class) + @PostMapping("/uploadSingleFile") + public ResponseEntity<String> uploadSingleFile(@RequestParam("file") MultipartFile file, @RequestParam("filePathName") String filePathName) throws IOException { + if (file.isEmpty()) { + return ResponseEntity.status(HttpStatus.BAD_REQUEST).body("鏂囦欢涓嶈兘涓虹┖"); + } + // 鍒ゆ柇 filePathName 鏄惁涓虹┖ + if (filePathName == null || filePathName.isEmpty()) { + filePathName = "upload"; + //return ResponseEntity.status(HttpStatus.BAD_REQUEST).body("鏂囦欢澶瑰悕绉颁笉鑳戒负绌�"); + } + String targetDir = Paths.get(uploadedFolder, filePathName).toString(); + log.info("鐩爣鐩綍: {}", targetDir); + createDirectoriesIfNotExists(targetDir); + try { + file.transferTo(Paths.get(targetDir, file.getOriginalFilename())); + return ResponseEntity.ok("鏂囦欢涓婁紶鎴愬姛"); + } catch (IOException e) { + log.error("鏂囦欢涓婁紶澶辫触", e); + return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).body("鏂囦欢涓婁紶澶辫触"); + } + } + + @ApiOperation("2-涓婁紶澶氫釜鏂囦欢") + @ApiImplicitParam(name = "filePathName", value = "鏂囦欢澶瑰悕绉�", required = true, dataType = "String", paramType = "query", example = "upload", dataTypeClass = String.class) + @PostMapping("/uploadMultipleFiles") + public ResponseEntity<String> uploadMultipleFiles(List<MultipartFile> files, @RequestParam("filePathName") String filePathName) throws IOException { + if (files.isEmpty()) { + return ResponseEntity.status(HttpStatus.BAD_REQUEST).body("鏂囦欢涓嶈兘涓虹┖"); + } + // 鍒ゆ柇 filePathName 鏄惁涓虹┖ + if (filePathName == null || filePathName.isEmpty()) { + // return ResponseEntity.status(HttpStatus.BAD_REQUEST).body("鏂囦欢澶瑰悕绉颁笉鑳戒负绌�"); + filePathName = "upload"; + } + // 涓婁紶鏂囦欢璺緞 + String targetDir = Paths.get(uploadedFolder, filePathName).toString(); + log.info("鐩爣鐩綍: {}", targetDir); + createDirectoriesIfNotExists(targetDir); + CompletableFuture<Void> allUploadTasks = CompletableFuture.allOf(files.stream() + .map(file -> CompletableFuture.runAsync(() -> { + try { + file.transferTo(Paths.get(targetDir, file.getOriginalFilename())); + } catch (IOException e) { + log.error("鏂囦欢涓婁紶澶辫触", e); + throw new RuntimeException("鏂囦欢涓婁紶澶辫触"); + } + })).toArray(CompletableFuture[]::new)); + try { + allUploadTasks.get(); + return ResponseEntity.ok("鎵�鏈夋枃浠朵笂浼犳垚鍔�!涓婁紶鐩綍涓猴細" + targetDir); + } catch (InterruptedException | ExecutionException e) { + log.error("鏂囦欢涓婁紶澶辫触", e); + return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).body("鏂囦欢涓婁紶澶辫触"); + } + } + + + @ApiOperation("2-1涓婁紶澶氫釜鏂囦欢") + @ApiImplicitParam(name = "filePathName", value = "鏂囦欢澶瑰悕绉�", required = true, dataType = "String", paramType = "query", example = "upload", dataTypeClass = String.class) + @PostMapping("/uploadMultipleFile") + public ResponseEntity<String> uploadMultipleFile(List<MultipartFile> files, @RequestParam("filePathName") String filePathName) throws IOException { + // 涓婁紶鏂囦欢璺緞 + String targetDir = Paths.get(uploadedFolder, filePathName).toString(); + log.info("鐩爣鐩綍: {}", targetDir); + createDirectoriesIfNotExists(targetDir); + try { + for (MultipartFile file : files) { + String fileName = file.getOriginalFilename(); + File targetFile = new File(targetDir, fileName); + file.transferTo(targetFile); + } + return ResponseEntity.ok("鎵�鏈夋枃浠朵笂浼犳垚鍔�!涓婁紶鐩綍涓猴細" + targetDir); + } catch (IOException e) { + log.error("鏂囦欢涓婁紶澶辫触", e); + return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).body("鏂囦欢涓婁紶澶辫触"); + } + } + + + private File newFile(File destinationDir, ZipEntry zipEntry) throws Exception { + File destFile = new File(destinationDir, zipEntry.getName()); + + String destDirPath = destinationDir.getCanonicalPath(); + String destFilePath = destFile.getCanonicalPath(); + + if (!destFilePath.startsWith(destDirPath + File.separator)) { + throw new Exception("Entry is outside of the target dir: " + zipEntry.getName()); + } + + return destFile; + } + + + /** + * 鎸夋枃浠惰矾寰勫垹闄ゆ枃浠跺す + * 閫掑綊鍒犻櫎鏂囦欢澶瑰強鏂囦欢鐨勬柟娉� + * + * @param folder 鏂囦欢澶� + */ + private void deleteFolderByFilePath(File folder) { + if (folder.exists()) { + File[] files = folder.listFiles(); + if (files != null) { + for (File file : files) { + if (file.isDirectory()) { + // 閫掑綊鍒犻櫎瀛愭枃浠跺す + deleteFolderByFilePath(file); + } else { + // 鍒犻櫎鏂囦欢 + file.delete(); + } + } + } + // 鍒犻櫎鏂囦欢澶规湰韬� + folder.delete(); + } + } + + + /** + * @param targetDir 鐩爣鐩綍 + * @throws IOException + */ + private void createDirectoriesIfNotExists(String targetDir) throws IOException { + Path path = Paths.get(targetDir); + if (!Files.exists(path)) { + Files.createDirectories(path); + } + } + + private Boolean directoriesIfNotExists(String targetDir) throws IOException { + Path path = Paths.get(targetDir); + if (!Files.exists(path)) { + // Files.createDirectories(path); + return false; + } else { + return true; + } + } + + +} diff --git a/src/main/java/com/se/nsl/controller/ProjectRelatedController.java b/src/main/java/com/se/nsl/controller/ProjectRelatedController.java new file mode 100644 index 0000000..d026de5 --- /dev/null +++ b/src/main/java/com/se/nsl/controller/ProjectRelatedController.java @@ -0,0 +1,98 @@ +package com.se.nsl.controller; + + +import com.se.nsl.domain.EntityTypeInfo; +import com.se.nsl.domain.LoginParams; +import com.se.nsl.domain.vo.R; +import com.se.nsl.service.ProjectRelatedService; +import io.swagger.annotations.Api; +import io.swagger.annotations.ApiOperation; +import lombok.RequiredArgsConstructor; +import org.springframework.web.bind.annotation.*; + +import javax.annotation.Resource; + +@Api(tags = "瀹炰綋搴撶浉鍏虫帴鍙�") +@CrossOrigin(origins = "*") +@RequiredArgsConstructor +@RestController +@RequestMapping("/api/v1") +public class ProjectRelatedController extends BaseController { + + @Resource + private ProjectRelatedService projectRelatedService; + + + /** + * 鑾峰彇鍏挜 + * + * @Cacheable 娣诲姞缂撳瓨锛岀浉鍚屾潯浠剁殑鏌ヨ涓嶅啀鏌ヨ鏁版嵁搴擄紝鑰屾槸浠庣紦瀛樹腑鏌ヨ锛� + * @CachePut 姣忔閮戒細璁块棶鏁版嵁搴擄紝骞舵洿鏂扮紦瀛橈紱 + * @CacheEvict 娓呴櫎缂撳瓨 + * <p> + * 娉ㄨВ涓弬鏁拌鏄庯細 + * cacheNames/value锛氭寚瀹氱紦瀛樼粍浠剁殑鍚嶅瓧; + * key锛氱紦瀛樻暟鎹娇鐢ㄧ殑key;鍙互鐢ㄥ畠鏉ユ寚瀹氥�傞粯璁ゆ槸浣跨敤鏂规硶鍙傛暟鐨勫�� 1-鏂规硶鍏ㄥ弬 2-鏂规硶鍙傛暟鐨勬煇鍑犱釜 3-key(SpEL) + * keyGenerator锛歬ey鐨勭敓鎴愬櫒锛涘彲浠ヨ嚜宸辨寚瀹歬ey鐨勭敓鎴愬櫒鐨勭粍浠秈d + * cacheManager锛氭寚瀹氱紦瀛樼鐞嗗櫒锛涙垨鑰卌acheResolver鎸囧畾鑾峰彇瑙f瀽鍣� + * condition锛氭寚瀹氱鍚堟潯浠剁殑鎯呭喌涓嬫墠缂撳瓨锛� + * unless锛氬惁瀹氱紦瀛橈紱褰搖nless鎸囧畾鐨勬潯浠朵负true锛屾柟娉曠殑杩斿洖鍊煎氨涓嶄細琚紦瀛橈紱鍙互鑾峰彇鍒扮粨鏋滆繘琛屽垽鏂� + * sync锛氭槸鍚︿娇鐢ㄥ紓姝ユā寮� + */ + @ApiOperation("0-鑾峰彇鍏挜") +// @Cacheable(cacheNames="api",value="cachespace=30", key = "#root.methodName") + @GetMapping("/get-public-key") + public Object getPublicKey() { + return projectRelatedService.getPublicKey(); + } + + /** + * 鐧诲綍瀹炰綋搴� + */ + @ApiOperation("1-鐧诲綍瀹炰綋搴�") +// @Cacheable(cacheNames = "api", key = "#loginParams") + @PostMapping("/login-entity") + public R<Object> loginEntity(@RequestBody LoginParams loginParams) { + return success(projectRelatedService.loginEntity(loginParams)); + } + + + /** + * 鑾峰彇璁块棶瀹炰綋搴撶殑token + */ + @ApiOperation("1-鑾峰彇璁块棶瀹炰綋搴撶殑token") + @GetMapping("/entity-public-key") + public R<Object> getEntityPublicKey() { + return success(projectRelatedService.getEntityPublicKey()); + } + + + /** + * 鑾峰彇璁块棶瀹炰綋搴撴暟鎹簱鍒楄〃 + */ + @ApiOperation("2-鑾峰彇璁块棶瀹炰綋搴撴暟鎹簱鍒楄〃") + @GetMapping("/db-list") + public R<Object> getDbLits() { + return success(projectRelatedService.getDbLits()); + } + + /** + * 鏌ヨ瀹炰綋搴撲笉鍚岀被鍨嬬殑淇℃伅 + */ + @ApiOperation("3-鏌ヨ瀹炰綋搴撲笉鍚岀被鍨嬬殑淇℃伅") + @GetMapping("/entity-type-info") + public Object getEntityTypeInfo(EntityTypeInfo entityTypeInfo) { + return projectRelatedService.getEntityTypeInfo(entityTypeInfo); + } + + /** + * 鏌ヨ瀹炰綋搴撲笉鍚岀被鍨嬬殑淇℃伅 + */ + @ApiOperation("4-鏌ヨ瀹炰綋搴撲笉鍚岀被鍨嬬殑淇℃伅-绠$嚎") + @GetMapping("/entity-pipe-info") + public Object getEntityPipeInfo(EntityTypeInfo entityTypeInfo) { + return projectRelatedService.getEntityPipeInfo(entityTypeInfo); + } + + +} diff --git a/src/main/java/com/se/nsl/controller/SemFilesSimuController.java b/src/main/java/com/se/nsl/controller/SemFilesSimuController.java new file mode 100644 index 0000000..c67fbde --- /dev/null +++ b/src/main/java/com/se/nsl/controller/SemFilesSimuController.java @@ -0,0 +1,92 @@ +package com.se.nsl.controller; + +import com.se.nsl.service.SemFilesSimuService; +import io.swagger.annotations.Api; +import io.swagger.annotations.ApiImplicitParam; +import io.swagger.annotations.ApiOperation; +import lombok.RequiredArgsConstructor; +import org.springframework.http.ResponseEntity; +import org.springframework.web.bind.annotation.*; + +import javax.annotation.Resource; + +/** + * SEM 鏂囦欢 SIMU 鎺у埗鍣� + * + * @author xingjinshuang@smartearth.cn + * @date 2024/12/30 + */ +@Api(tags = "SEM鐩稿叧鎺ュ彛") +@CrossOrigin(origins = "*") +@RequiredArgsConstructor +@RestController +@RequestMapping("/api/v1/sem") +public class SemFilesSimuController { + + @Resource + private SemFilesSimuService semFilesSimuService; + + + /** + * 鑾峰彇 INTRODUCE + * 1銆佸姩鎬佹暟鎹瓨鍌ㄥ湪DYNZAMIZERS琛ㄤ腑锛屽叾涓細 + * url锛氭暟鎹畊rl + * data锛歾arr鏁版嵁锛屼娇鐢ㄧ殑鏄痾arr鐨勫帇缂╁瓨鍌ㄦ牸寮忋�傝瑙亃arr鐨剒ipstore銆� + * gmlId锛氫笌瀹炰綋瀵硅薄鐩稿叧鑱斿瓧娈碉紙浣跨敤ENTITY琛紙瀹炰綋琛級涓殑UUID鐩稿叧鑱旓級 + * <p> + * zarr鏁版嵁缁撴瀯绀轰緥锛� + * Grid鐩稿叧鐨剒arr锛� + * / + * |鈥斺�攄epth (n,height,width) + * |鈥斺�攖ime(n) + * <p> + * time瀛樺偍鏃堕棿搴忓垪 + * depth瀛樺偍姘存繁鐩稿叧淇℃伅锛屼笁缁存暟缁勶紝绗竴缁翠负鏃堕棿 涓巘ime鐩稿搴� + * 鏁扮粍闀垮害n浠h〃鏃堕棿鍒囩墖鐨勪釜鏁� + * height锛寃idth浠h〃鏍呮牸鐨勯暱鍜屽 + * <p> + * 闄嶉洦閲忕浉鍏硓arr锛� + * / + * |鈥斺�攔ainfall(n) + * |鈥斺�攖ime(n) + * <p> + * time瀛樺偍鏃堕棿搴忓垪 + * rainfall 瀛樺偍闄嶉洦閲忕浉鍏充俊鎭紝涓�缁存暟缁勶紝涓巘ime鐩稿搴� + * 鏁扮粍闀垮害n浠h〃鏃堕棿鍒囩墖鐨勪釜鏁� + * <p> + * <p> + * 2銆乼errain鐨勫瓨鍌ㄦ柟寮忥細 + * 绫诲瀷涓衡��+Terrain鈥� + * Entity涓嚑浣曞瓨鍌ㄥ湴褰㈢殑澶栧寘妗嗭紝浣跨敤绾圭悊璐村浘瀛樺偍鍦板舰tif杞嚭鐨刾ng鍥剧墖銆� + * + * @return {@link ResponseEntity}<{@link Object}> + * @throws Exception + */ + @ApiOperation("0-sem浠嬬粛") + @GetMapping("/introduce") + public ResponseEntity<Object> getIntroduce() throws Exception { + return ResponseEntity.ok(semFilesSimuService.getIntroduce()); + } + + + /** + * sem鏂囦欢鍒涘缓妯℃嫙 + */ + @ApiOperation("1-sem鏂囦欢鍒涘缓妯℃嫙") + @PostMapping("/create") + public ResponseEntity<Object> createSimuBySemFile() throws Exception { + return ResponseEntity.ok(semFilesSimuService.createSimuBySemFile()); + } + + /** + * sem鏂囦欢璇诲彇妯℃嫙 + */ + @ApiOperation("2-sem鏂囦欢璇诲彇妯℃嫙") + @ApiImplicitParam(name = "filePath", value = "鏂囦欢鍦板潃", required = true, dataType = "String", paramType = "query", example = "D:\\app\\simulation\\other\\1211SEM鏍蜂緥\\绠$偣.sem", dataTypeClass = String.class) + @PostMapping("/read") + public ResponseEntity<Object> readSemFile(@RequestParam("filePath") String filePath) throws Exception { + return ResponseEntity.ok(semFilesSimuService.readSemFile(filePath)); + } + + +} diff --git a/src/main/java/com/se/nsl/controller/SimuController.java b/src/main/java/com/se/nsl/controller/SimuController.java new file mode 100644 index 0000000..60f2eb0 --- /dev/null +++ b/src/main/java/com/se/nsl/controller/SimuController.java @@ -0,0 +1,1023 @@ +package com.se.nsl.controller; + +import cn.hutool.core.bean.BeanUtil; +import cn.hutool.json.JSONUtil; +import com.alibaba.fastjson.JSONArray; +import com.alibaba.fastjson.JSONObject; +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.google.common.io.Resources; +import com.se.nsl.config.PropertiesConfig; +import com.se.nsl.domain.dto.GeDb; +import com.se.nsl.domain.dto.GeLayer; +import com.se.nsl.domain.dto.GridDto; +import com.se.nsl.domain.po.DataPo; +import com.se.nsl.domain.po.SimuPo; +import com.se.nsl.domain.vo.*; +import com.se.nsl.service.*; +import com.se.nsl.utils.*; +import com.se.nsl.enums.RadioEnums; +import com.se.nsl.enums.SemErrorEnums; +import com.se.nsl.helper.StringHelper; +import com.se.nsl.helper.WebHelper; +import com.se.nsl.mapper.SimuMapper; +import io.swagger.annotations.*; +import lombok.SneakyThrows; +import lombok.extern.slf4j.Slf4j; +import org.apache.commons.lang3.StringUtils; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.web.bind.annotation.*; + +import javax.annotation.Resource; +import java.io.*; +import java.math.BigDecimal; +import java.net.URL; +import java.nio.charset.StandardCharsets; +import java.sql.Timestamp; +import java.text.SimpleDateFormat; +import java.time.LocalDateTime; +import java.time.format.DateTimeFormatter; +import java.util.*; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; + +@Api(tags = "浠跨湡绠$悊") +@Slf4j +@RestController +@RequestMapping("/simu") +@SuppressWarnings("ALL") +public class SimuController extends BaseController { + @Resource + UwService uwService; + + @Resource + GedbService gedbService; + + @Resource + SimuService simuService; + + @Resource + SimuFilesService simuFilesService; + + @Resource + PropertiesConfig config; + + @Resource + SimuMapper simuMapper; + + @Value("${simu-app.filePath}") + private String uploadedFolder; + + @Value("${config.outPath}") + private String outPath; + + @Value("${config.inPath}") + private String inPath; + + + @Resource + ResultService resultService; + + private final List<String> FLOOD_TYPE = new ArrayList<>(Arrays.asList("娌欒", "闃叉按鏉�")); + + @ApiOperation(value = "鍒涘缓") + @PostMapping(value = "/create", produces = "application/json; charset=UTF-8") + public R<Object> create(@RequestBody @ApiParam("鍒涘缓浠跨湡瑙嗗浘绫�") CreateSimuVo vo) { + try { + if (null == vo.getTotal() || vo.getTotal() < 1 || vo.getTotal() > 1000) { + return fail("闄嶉洦閲忎笉鑳戒负绌猴紝涓斿彇鍊煎湪1~1000涔嬮棿"); + } + if (null == vo.getDuration() || vo.getDuration() < 1 || vo.getDuration() > 10080) { + return fail("浠跨湡鏃堕暱涓嶈兘涓虹┖锛屼笖鍙栧�煎湪1~10080涔嬮棿"); + } + if (null == vo.getMinx() || null == vo.getMiny() || null == vo.getMaxx() || null == vo.getMaxy()) { + return fail("閫夋嫨鑼冨洿涓嶈兘涓虹┖", false); + } + if (null == vo.getPid() || vo.getPid() < 0) { + vo.setPid(0); + } + if (null == vo.getNum() || vo.getNum() < 1) { + vo.setNum(simuService.getMaxId() + 1); + } + if (null == vo.getPid() || vo.getPid() < 0) { + vo.setPid(0); + } + if (null == vo.getStartTime()) { + vo.setStartTime(new Date()); + } + if (vo.getPid() > 0) { + SimuPo pp = simuService.getSimuByPid(vo.getPid()); + if (null == pp) { + return fail("pid涓嶅瓨鍦�"); + } + if (null == vo.getFloodStart() || vo.getFloodStart() < 1 || vo.getFloodStart() > vo.getDuration() * 60) { + return fail("闃叉睕寮�濮嬫椂闂翠笉鑳戒负绌猴紝涓斿彇鍊煎湪1~" + (vo.getDuration() * 60) + "涔嬮棿"); + } + if (null == vo.getFloodEnd() || vo.getFloodEnd() < vo.getFloodStart() || vo.getFloodEnd() > vo.getDuration() * 60) { + return fail("闃叉睕缁撴潫鏃堕棿涓嶈兘涓虹┖锛屼笖鍙栧�煎湪" + vo.getFloodStart() + "~" + (vo.getDuration() * 60) + "涔嬮棿"); + } + if (null == vo.getFloodHeight() || vo.getFloodHeight() < 1 || vo.getFloodHeight() > 2000) { + return fail("闃叉睕楂樺害涓嶈兘涓虹┖锛屼笖鍙栧�煎湪1~2000涔嬮棿"); + } + if (!FLOOD_TYPE.contains(vo.getFloodType())) { + return fail("闃叉睕绫诲瀷涓嶈兘涓虹┖锛屼笖鍙兘鏄矙琚嬪拰闃叉按鏉�"); + } + if (null == vo.getFloodMinx() || null == vo.getFloodMiny() || null == vo.getFloodMaxx() || null == vo.getFloodMaxy()) { + return fail("闃叉睕鑼冨洿涓嶈兘涓虹┖", false); + } + } + + boolean flag = simuService.create(vo); + + return success(flag, flag ? "鎴愬姛" : "澶辫触"); + } catch (Exception ex) { + return fail(ex, null); + } + } + + @ApiOperation(value = "鍒犻櫎") + @ApiImplicitParams({ + @ApiImplicitParam(name = "ids", value = "ID", dataType = "Integer", paramType = "query", allowMultiple = true, example = "0") + }) + @GetMapping(value = "/del") + public R<Object> del(@RequestParam List<Integer> ids) { + try { + if (ids == null || ids.isEmpty()) { + return fail("鏃ュ織ID涓嶈兘涓虹┖", null); + } + + int count = simuService.del(ids); + + return success(count); + } catch (Exception ex) { + return fail(ex, null); + } + } + + @ApiOperation(value = "鑾峰彇") + @GetMapping("/get") + public R<Object> get(@ApiParam("浠跨湡瑙嗗浘绫�") SimuVo vo) { + try { + if (null == vo) { + vo = new SimuVo(); + } + if (null == vo.getPageSize() || vo.getPageSize() < 1) { + vo.setPageSize(10); + } + if (null == vo.getPageIndex() || vo.getPageIndex() < 1) { + vo.setPageIndex(1); + } + + IPage<SimuPo> paged = simuService.get(vo); + if (null == paged) { + return success(null, 0); + } + + return success(paged.getRecords(), paged.getTotal()); + } catch (Exception ex) { + return fail(ex, null); + } + } + + @ApiOperation(value = "闄嶆按鑼冨洿鏍¢獙") + @GetMapping("/rangeVerify") + public R<Object> rangeVerify(@RequestParam @ApiParam("鏈�灏廥") double minx, + @RequestParam @ApiParam("鏈�灏弝") double miny, + @RequestParam @ApiParam("鏈�澶") double maxx, + @RequestParam @ApiParam("鏈�澶") double maxy) { + try { + DataPo data = new DataPo(); + data.setMinx(minx); + data.setMiny(miny); + data.setMaxx(maxx); + data.setMaxy(maxy); + data.setEpsg(4326); + + String token = gedbService.getToken(); + GeDb db = gedbService.connectGedb(token, data); + List<GeLayer> layers = gedbService.getLayers(token, db); + boolean result = gedbService.queryBboxCount(token, db, layers); + log.info("result = " + result); + return success(result); + + // return success(gedbService.queryBboxCount(token, db, layers)); + } catch (Exception ex) { + return fail(ex, false); + } + } + + /** + * 鏂囦欢鍒涘缓妯℃嫙 + * <p> + * "鐘舵�侊細 + * 0-鍒涘缓浠跨湡浠诲姟锛� + * 1-杩炴帴GEDB搴擄紝 + * 2-涓嬭浇绌洪棿鏁版嵁锛� + * 3-涓嬭浇楂樼▼鏁版嵁锛� + * 4-鐢熸垚闄嶉洦鏂囦欢锛� + * 5-鐢熸垚閰嶇疆鏂囦欢锛� + * 6-妯℃嫙鍐呮稘浠跨湡锛� + * 7-澶勭悊姘翠綅鏂囦欢锛� + * 8-澶勭悊鎺掓按鏂囦欢锛� + * 9-澶勭悊浠跨湡缁撴灉锛� + * 10-瀹屾垚锛�-10-鍑洪敊 + * + * @param vo VO + * @return {@link R}<{@link Object}> + */ + @ApiOperation(value = "鏂囦欢鍒涘缓") + @PostMapping(value = "/file_create", produces = "application/json; charset=UTF-8") + public R<Object> fileCreate(@RequestBody @ApiParam("鍒涘缓浠跨湡瑙嗗浘绫�") CreateFilesSimuVo vo) throws IOException { + // 鑾峰彇涓婁紶鏂囦欢璺緞 + String targetDir = uploadedFolder; + log.info("涓婁紶鏂囦欢璺緞锛歿}", targetDir); + // 鑾峰彇鍚勪釜鏂囦欢鐨勫湴鍧� + String floodFile = vo.getFloodFile(); + log.info("鑼冨洿鏂囦欢鍦板潃锛歿}", floodFile); + try { + // 鍒ゆ柇鏄惁缁樺埗鍖哄煙 + if (!StringUtils.isEmpty(vo.getMaxx().toString()) && !Objects.isNull(vo.getMaxx())) { + rangeVerify(vo.getMinx(), vo.getMaxx(), vo.getMiny(), vo.getMaxy()); + log.info("缁樺埗鍖哄煙鑼冨洿楠岃瘉閫氳繃!"); + } else + // 鍒ゆ柇鍦板潃涓嶄负绌� + if (StringUtils.isEmpty(floodFile)) { + return fail("鑼冨洿鏂囦欢鍦板潃涓嶈兘涓虹┖", false); + } else { + // todo: 瑙f瀽鑼冨洿鏂囦欢 鏍规嵁涓嶅悓鐨勭殑鏍煎紡鏂囦欢杩涜涓嶅悓鐨勮В鏋愶紝鑾峰彇闇�瑕佽绠楃殑鑼冨洿鍊� + // 涓婁紶鏍煎紡锛�.shp/.tiff/.img/.geojson + // 閲嶆柊缁檝o璧嬪�� + // 鑾峰彇鏂囦欢鐨勫悗缂�鍚� + String fileName = floodFile.substring(floodFile.lastIndexOf(".")); + // 鍒ゆ柇鍚庣紑鍚嶆槸鍚︿负.shp + if (!fileName.equalsIgnoreCase(".shp")) { + return fail("鑼冨洿鏂囦欢鏍煎紡涓嶆纭�", false); + } else { + // 1 璇诲彇shp鏂囦欢锛岃幏鍙栬寖鍥村�� + JSONObject jsonObject = ShpToolUtils.readShp(floodFile); + // 2 鑾峰彇jsonObject涓殑鑼冨洿鍊� + Double minX = jsonObject.getDouble("minY"); + Double maxX = jsonObject.getDouble("maxY"); + Double minY = jsonObject.getDouble("minX"); + Double maxY = jsonObject.getDouble("maxX"); + // 3 鍒ゆ柇鑼冨洿鍊兼槸鍚︿负绌� + vo.setMinx(jsonObject.getDouble("minY")); + vo.setMaxx(jsonObject.getDouble("maxY")); + vo.setMiny(jsonObject.getDouble("minX")); + vo.setMaxy(jsonObject.getDouble("maxX")); + rangeVerify(minX, maxX, minY, maxY); + } + } + } catch (Exception e) { + log.error("瑙f瀽鑼冨洿鏂囦欢澶辫触"); + if (null == vo.getMinx() || null == vo.getMiny() || null == vo.getMaxx() || null == vo.getMaxy()) { + return fail("瑙f瀽鑼冨洿鏂囦欢澶辫触锛侀�夋嫨鑼冨洿涓嶈兘涓虹┖锛岃閲嶆柊閫夋嫨鏂囦欢锛�", false); + } + } + String stationFile = vo.getStationFile(); + log.info("绔欑偣鏂囦欢shp鍦板潃锛歿}", stationFile); + try { + if (RadioEnums.ACCESS.getCode().equals(vo.getRadio()) && StringUtils.isNotEmpty(stationFile)) { + // 1 璇诲彇shp鏂囦欢锛岃幏鍙栫珯鐐瑰潗鏍囧�� + ShpToolUtils.readShpGetLocal(stationFile); + } + } catch (Exception e) { + log.error("瑙f瀽绔欑偣鏂囦欢shp澶辫触"); + } + // 鍒涘缓琛ㄥ悕 鏃堕棿鎴� + String tableName = "station_rain_" + System.currentTimeMillis(); + try { + //鎺ュ叆澶勭悊 + if (RadioEnums.ACCESS.getCode().equals(vo.getRadio())) { + String stationRainFile = vo.getStationRainFile(); + log.info("绔欑偣闆ㄩ噺CSV鏂囦欢鍦板潃锛歿}", floodFile); + // 鍒ゆ柇鍦板潃涓嶄负绌� + if (StringUtils.isEmpty(stationRainFile)) { + return fail("绔欑偣闆ㄩ噺CSV鏂囦欢鍦板潃涓嶈兘涓虹┖", false); + } + // 1 璇诲彇CSV 鏂囦欢 + CsvToSQLiteUtils.readCsvSaveLocal(stationRainFile, tableName); + // 鑾峰彇浠跨湡鏃堕棿 duration + Integer duration = CsvToSQLiteUtils.getDuration(tableName); + log.info("浠跨湡鏃堕棿 duration = {}", duration); + vo.setDuration(duration); + // 鑾峰彇闄嶉洦鎬婚噺 total + Double total = CsvToSQLiteUtils.getTotal(tableName); + log.info("闄嶉洦鎬婚噺 total = {}", total); + vo.setTotal(total); + } else if (RadioEnums.SIMULATE.getCode().equals(vo.getRadio())) { + if (StringUtils.isEmpty(vo.getStation())) { + return fail("妯℃嫙璁$畻绔欑偣涓嶈兘涓虹┖锛�", false); + } + if (vo.getTotal() == null) { + return fail("妯℃嫙璁$畻闄嶉洦閲忎笉鑳戒负绌猴紒", false); + } + if (vo.getDuration() == null || vo.getDuration() <= 0) { + return fail("妯℃嫙璁$畻璁$畻鏃堕暱涓嶈兘涓虹┖锛�", false); + } + if (vo.getStationLatitude() == null) { + return fail("妯℃嫙璁$畻璁$畻鍧愭爣锛�", false); + } + if (vo.getStationLongitude() == null) { + return fail("妯℃嫙璁$畻璁$畻鍧愭爣锛�", false); + } + double aveTotal = BigDecimal.valueOf(vo.getTotal()).divide(BigDecimal.valueOf(vo.getDuration()), 2, BigDecimal.ROUND_HALF_UP).doubleValue(); + JSONArray jsonArray = new JSONArray(); + LocalDateTime now = LocalDateTime.now(); + DateTimeFormatter customFormatter = DateTimeFormatter.ofPattern("yyyy/MM/dd HH:mm"); + for (int i = 0; i < vo.getDuration(); i++) { + JSONObject jsonObject = new JSONObject(); + jsonObject.put("stationName", vo.getStation()); + jsonObject.put("rainfall", aveTotal); + jsonObject.put("longitude", vo.getStationLongitude()); + jsonObject.put("latitude", vo.getStationLatitude()); + LocalDateTime plusMinutes = now.plusMinutes(i); + jsonObject.put("datetime", customFormatter.format(plusMinutes)); + jsonArray.add(jsonObject); + } + CsvToSQLiteUtils.readCsvSaveLocal(jsonArray, tableName); + } else { + return fail("鏃犳晥鐨勬ā鍨嬭绠楁柟寮�", false); + } + } catch (Exception e) { + return fail("闆ㄩ噺澶勭悊澶辫触锛�", false); + } + // TODO: 2024/12/24 鏍规嵁杩欎簺鏂囦欢鐨勫湴鍧�锛岃幏鍙栨枃浠跺唴瀹癸紝鍒涘缓浠跨湡瑙嗗浘 + try { + if (null == vo.getDuration() || vo.getDuration() < 1 || vo.getDuration() > 10080) { + return fail("浠跨湡鏃堕暱涓嶈兘涓虹┖锛屼笖鍙栧�煎湪1~10080涔嬮棿"); + } + if (null == vo.getPid() || vo.getPid() < 0) { + vo.setPid(0); + } + if (null == vo.getNum() || vo.getNum() < 1) { + vo.setNum(simuService.getMaxId() + 1); + } + if (null == vo.getStartTime()) { + vo.setStartTime(new Date()); + } + // 闃叉睕浣滀笟 + if (vo.getPid() > 0) { + SimuPo pp = simuService.getSimuByPid(vo.getPid()); + if (null == pp) { + return fail("pid涓嶅瓨鍦�"); + } + if (null == vo.getFloodStart() || vo.getFloodStart() < 1 || vo.getFloodStart() > vo.getDuration() * 60) { + return fail("闃叉睕寮�濮嬫椂闂翠笉鑳戒负绌猴紝涓斿彇鍊煎湪1~" + (vo.getDuration() * 60) + "涔嬮棿"); + } + if (null == vo.getFloodEnd() || vo.getFloodEnd() < vo.getFloodStart() || vo.getFloodEnd() > vo.getDuration() * 60) { + return fail("闃叉睕缁撴潫鏃堕棿涓嶈兘涓虹┖锛屼笖鍙栧�煎湪" + vo.getFloodStart() + "~" + (vo.getDuration() * 60) + "涔嬮棿"); + } + if (null == vo.getFloodHeight() || vo.getFloodHeight() < 1 || vo.getFloodHeight() > 2000) { + return fail("闃叉睕楂樺害涓嶈兘涓虹┖锛屼笖鍙栧�煎湪1~2000涔嬮棿"); + } + if (!FLOOD_TYPE.contains(vo.getFloodType())) { + return fail("闃叉睕绫诲瀷涓嶈兘涓虹┖锛屼笖鍙兘鏄矙琚嬪拰闃叉按鏉�"); + } + if (null == vo.getFloodMinx() || null == vo.getFloodMiny() || null == vo.getFloodMaxx() || null == vo.getFloodMaxy()) { + return fail("闃叉睕鑼冨洿涓嶈兘涓虹┖", false); + } + } + if (StringUtils.isEmpty(vo.getSemUrl())) { + return fail("sem鏂囦欢鐢熸垚璺緞涓嶈兘涓虹┖锛�", false); + } + List<GridDto> dtos = new ArrayList<>(); + dtos.add(new GridDto(vo.getMiny(), vo.getMaxx())); + dtos.add(new GridDto(vo.getMaxy(), vo.getMaxx())); + dtos.add(new GridDto(vo.getMaxy(), vo.getMinx())); + dtos.add(new GridDto(vo.getMiny(), vo.getMinx())); + String bbox = ""; + //鐢熸垚鐐瑰拰绾跨殑瀹炰綋搴撴ā鍨嬪苟淇濆瓨璺緞 + String name = vo.getSemUrl(); + String token = EntityLibraryUtils.login(); + vo.setPointName(name + "-point"); + vo.setLinkName(name + "-line"); + JSONArray min = ProjectionToGeographicUtil.get4548Point(dtos.get(3).getLon(), dtos.get(3).getLat()); + JSONArray max = ProjectionToGeographicUtil.get4548Point(dtos.get(1).getLon(), dtos.get(1).getLat()); + bbox = bbox + min.getDouble(0) + "," + min.getDouble(1) + "," + max.getDouble(0) + "," + max.getDouble(1); + JSONObject point = getModule("layerQueryPointParams.json"); + String pointUrl = assemble(vo.getPointName(), point.getString("layerid"), bbox, token); + vo.setPointName(pointUrl); + JSONObject link = getModule("layerQueryParams.json"); + String linkUrl = assemble(vo.getLinkName(), link.getString("layerid"), bbox, token); + vo.setLinkName(linkUrl); + SimuPo simu = saveSimu(vo); + //鐢熸垚sem鏂囦欢 + asyncCall(tableName, dtos, vo, token, simu); + + // 寮�濮嬫ā鎷熻绠� + //boolean flag = simuFilesService.createByfiles(vo, token); + return success(simu.getId() != null ? "鎴愬姛" : "澶辫触"); + } catch (Exception ex) { + return fail(ex, null); + } + } + + public SimuPo saveSimu(CreateFilesSimuVo vo) { + Date now = new Date(); + String date = StringHelper.YMDHMS2_FORMAT.format(now); + if (StringHelper.isEmpty(vo.getName())) { + vo.setName(date); + } + DataPo data = BeanUtil.copyProperties(vo, DataPo.class); + data.setPath(date, date); + SimuPo simu = new SimuPo(vo.getNum(), vo.getPid(), vo.getName(), JSONUtil.toJsonStr(data), 0, vo.getBak()); + simu.setServiceName(date); + simu.setCreateTime(new Timestamp(now.getTime())); + simu.setSemUrl(vo.getSemUrl()); + simu.setPointUrl(vo.getPointName()); + simu.setLinkUrl(vo.getLinkName()); + simu.setSemUrl(inPath + "\\" + vo.getSemUrl()); + int rows = simuMapper.insert(simu); + return simu; + } + + @ApiOperation(value = "鑾峰彇鐐硅矾寰�") + @GetMapping("/getPointUrl") + public R<String> getPointUrl(@RequestParam("id") Integer id) throws Exception { + SimuPo po = simuService.getSimuById(id); + String token = EntityLibraryUtils.login(); + if (po != null) { + return success(po.getPointUrl().replace("{token}", token)); + } + return success(null); + } + + @ApiOperation(value = "鑾峰彇绾胯矾寰�") + @GetMapping("/getLineUrl") + public R<String> getLineUrl(@RequestParam("id") Integer id) throws Exception { + SimuPo po = simuService.getSimuById(id); + String token = EntityLibraryUtils.login(); + if (po != null) { + return success(po.getLinkUrl().replace("{token}", token)); + } + return success(null); + } + + private void asyncCall(String tableName, List<GridDto> dtos, CreateFilesSimuVo vo, String token, SimuPo simu) { + ExecutorService executor = Executors.newSingleThreadExecutor(); + executor.execute(new Runnable() { + @Override + @SneakyThrows + public void run() { + createSem(tableName, dtos, vo, token, simu); + } + }); + executor.shutdown(); + } + + public void createSem(String tableName, List<GridDto> dtos, CreateFilesSimuVo vo, String token, SimuPo simu) throws Exception { + int code=1; + try { + vo.setSemUrl(inPath + "\\" + vo.getSemUrl()); + boolean ends = vo.getSemUrl().endsWith("\\"); + if (!ends) { + vo.setSemUrl(vo.getSemUrl() + "\\"); + } + File file = new File(vo.getSemUrl()); + if (!file.exists()) { + file.mkdirs(); + } + //鍒濆鍖� + update(simu, code, null); + code+=1; + //闄嶉洦鏂囦欢鐢熸垚 + saveZarr(tableName, vo.getSemUrl()); + update(simu, code, null); + code+=1; + //绠$偣鐢熸垚 + pointTosem(dtos, token, vo.getSemUrl()); + update(simu, code, null); + code+=1; + //绠$嚎鐢熸垚 + lineToSem(dtos, token, vo.getSemUrl()); + update(simu, code, null); + code+=1; + //鑼冨洿鐢熸垚 + gridToCityJson(dtos, vo.getSemUrl()); + update(simu, code, null); + code+=1; + //娌虫祦鐢熸垚 + riverToSem(vo.getSemUrl()); + update(simu, code, null); + code+=1; + //鍦熷湴鍒╃敤鐢熸垚 + landuseToSem(vo.getSemUrl()); + update(simu, code, null); + code+=1; + //鍦板舰鐢熸垚 + terrainToSem(dtos, vo.getSemUrl()); + update(simu, 10, null); + code+=1; + //9=璋冪敤姹傝В/10=姹傝В鍒嗘瀽 + } catch (Exception e) { + log.error(e.getMessage(), e); + update(simu, -simu.getStatus(), SemErrorEnums.of(code)); + } + + + } + + private void update(SimuPo simu, int status, String rs) { + simu.setStatus(status); + if (null != rs) simu.setResult(rs); + simu.setUpdateTime(WebHelper.getCurrentTimestamp()); + + simuMapper.updateById(simu); + } + + public String assemble(String name, String layerid, String bbox, String token) throws Exception { + JSONObject jsonObject = EntityLibraryUtils.createAssemble(name, token); + String packageid = jsonObject.getString("packageid"); + EntityLibraryUtils.addAssemble(layerid, token, packageid, bbox); + JSONObject result = EntityLibraryUtils.submitAssemble(token, packageid); + Random random = new Random(); + int randomNumber = random.nextInt(90000) + 10000; + String url = "http://106.120.22.26:8024/geo-service/package/3d/normal/85257774fdb64e5f99f6778696cad02a/" + packageid + "/" + layerid + "//l0/tileset.json?r=" + randomNumber + "&token={token}"; + return url; + } + + public R<String> saveZarr(String tableName, String semUrl) throws Exception { + List<String> list = CsvToSQLiteUtils.getNameList(tableName); + String path = outPath + "\\"; + String rainfall = "rainfall\\"; + String basePath = path + tableName + "\\" + rainfall; + JSONObject jsonObject = getModule("rainfallmodule.json"); + String startTime = null; + String endTime = null; + for (String src : list + ) { + File directories = new File(basePath); + if (!directories.exists()) { + directories.mkdirs(); + System.out.println("Directories created successfully."); + } else { + System.out.println("Directories already exist."); + } + List<StationRainVo> stationRainVos = CsvToSQLiteUtils.getList(tableName, src); + if (startTime == null) { + startTime = stationRainVos.get(0).getDatetime(); + } + if (endTime == null) { + endTime = stationRainVos.get(stationRainVos.size() - 1).getDatetime(); + } + ZarrUtils.saveZarrRainfall(basePath + src, stationRainVos); + ZarrUtils.saveZarrTime(basePath + src, stationRainVos); + ZipUtils.toZarr(basePath + src, basePath + src + ".zip"); + System.out.println(src + "鐨剒arr鏁版嵁鐢熸垚====================="); + //json鎷艰 + String uuid = "UUID_" + UUID.randomUUID().toString(); + //鎷艰zarr + JSONObject dynamizer = new JSONObject(); + dynamizer.put("url", rainfall.replace("\\", "/") + src + ".zarr"); + dynamizer.put("gmlId", uuid); + jsonObject.getJSONArray("Dynamizers").add(dynamizer); + //鎷艰鍧愭爣 + JSONArray vertice = new JSONArray(); + vertice.add(stationRainVos.get(0).getLongitude()); + vertice.add(stationRainVos.get(0).getLatitude()); + vertice.add(0.0); + jsonObject.getJSONArray("vertices").add(vertice); + //鎷艰鍩虹淇℃伅 + JSONObject cityObject = new JSONObject(); + cityObject.put("type", "+Rainfall"); + JSONObject attribute = new JSONObject(); + attribute.put("name", src); + cityObject.put("attributes", attribute); + JSONArray geometry = new JSONArray(); + JSONObject metry = new JSONObject(); + metry.put("type", "MultiPoint"); + metry.put("lod", 0); + JSONArray boundarie = new JSONArray(); + boundarie.add(jsonObject.getJSONArray("vertices").size() - 1); + metry.put("boundaries", boundarie); + geometry.add(metry); + cityObject.put("geometry", geometry); + jsonObject.getJSONObject("CityObjects").put(uuid, cityObject); + } + File jsonFile = new File(path + tableName + "\\闄嶉洦閲�.json"); + if (jsonFile.exists()) { + jsonFile.createNewFile(); + } + FileWriter fileWriter = new FileWriter(path + tableName + "\\闄嶉洦閲�.json"); + fileWriter.write(jsonObject.toJSONString()); + fileWriter.close(); + SemUtils.cityJsonToSem(path + tableName + "\\闄嶉洦閲�.json", semUrl + "raingage.sem"); + getConfigJson(startTime, endTime, semUrl); + return success(semUrl + "raingage.sem"); + } + + public void getConfigJson(String startTime, String endTime, String semUrl) throws Exception { + SimpleDateFormat sdf = new SimpleDateFormat("yyyy/MM/dd HH:mm"); + Date start = sdf.parse(startTime); + Date end = sdf.parse(endTime); + SimpleDateFormat simpleDateFormat = new SimpleDateFormat("MM/dd/yyyy HH:mm:ss"); + JSONObject jsonObject = getModule("config.json"); + jsonObject.put("start_datetime", simpleDateFormat.format(start)); + jsonObject.put("end_datetime", simpleDateFormat.format(end)); + FileWriter fileWriter = new FileWriter(semUrl + "\\config.json"); + fileWriter.write(jsonObject.toJSONString()); + fileWriter.close(); + } + + public R<String> pointTosem(List<GridDto> dtos, String token, String semUrl) throws Exception { + String path = outPath + "\\point\\"; + String publicKey = EntityLibraryUtils.getPublicKey(); + JSONArray jsonArrayList = EntityLibraryUtils.getPointInfo(dtos, token); + JSONObject jsonObject = getModule("pointmodule.json"); + for (int s = 0; s < jsonArrayList.size(); s++) { + JSONObject pointObject = jsonArrayList.getJSONObject(s); + //鎷艰鍧愭爣 + JSONArray vertice = new JSONArray(); + vertice.add(Double.valueOf(pointObject.getString("lon"))); + vertice.add(Double.valueOf(pointObject.getString("lat"))); + vertice.add(0.0); + jsonObject.getJSONArray("vertices").add(vertice); + //鎷艰鍩虹淇℃伅 + JSONObject cityObject = new JSONObject(); + cityObject.put("type", "+PipePoint"); + JSONObject attribute = new JSONObject(); + attribute.put("缁存姢鏃堕棿", pointObject.get("operatetime")); + attribute.put("缁存姢浜�", pointObject.get("operator")); + attribute.put("鍞竴缂栫爜", pointObject.get("seid")); + attribute.put("ID", pointObject.get("id")); + attribute.put("enti_uuid", pointObject.get("enti_uuid")); + attribute.put("鏍囪瘑鐮�", pointObject.get("bsm")); + attribute.put("绠$嚎绉嶇被", pointObject.get("gxzl")); + attribute.put("鍦伴潰楂樼▼", pointObject.get("dmgc")); + attribute.put("鐗瑰緛", pointObject.get("tz")); + attribute.put("闄勫睘鐗�", pointObject.get("fsw")); + attribute.put("寤虹瓚鐗�", pointObject.get("jzw")); + attribute.put("浜曞簳楂樼▼", pointObject.get("jdgc")); + attribute.put("浜曞簳鍩嬫繁", pointObject.get("jdms")); + attribute.put("浜曡剸鍩嬫繁", pointObject.get("jbms")); + attribute.put("浜曠洊褰㈢姸", pointObject.get("jgxz")); + attribute.put("浜曠洊灏哄", pointObject.get("jgcc")); + attribute.put("浜曠洊鏉愯川", pointObject.get("jgcz")); + attribute.put("浜曠洊鐘舵��", pointObject.get("jgzt")); + attribute.put("浜曡剸鏉愯川", pointObject.get("jbcz")); + attribute.put("浜曡剸灏哄", pointObject.get("jbcc")); + attribute.put("浜曟潗璐�", pointObject.get("jcz")); + attribute.put("浜曞昂瀵�", pointObject.get("jcc")); + attribute.put("浣跨敤鐘跺喌", pointObject.get("syzk")); + attribute.put("lon", Double.valueOf(pointObject.getString("lon"))); + attribute.put("lat", Double.valueOf(pointObject.getString("lat"))); + attribute.put("atti", pointObject.get("atti")); + attribute.put("闆ㄦ按闆嗙粨鐐�", pointObject.get("ysjjd")); + cityObject.put("attributes", attribute); + JSONArray geometry = new JSONArray(); + JSONObject metry = new JSONObject(); + metry.put("type", "MultiPoint"); + metry.put("lod", 0); + JSONArray boundarie = new JSONArray(); + boundarie.add(jsonObject.getJSONArray("vertices").size() - 1); + metry.put("boundaries", boundarie); + geometry.add(metry); + cityObject.put("geometry", geometry); + jsonObject.getJSONObject("CityObjects").put("UUID_" + UUID.randomUUID().toString(), cityObject); + } + long times = System.currentTimeMillis(); + String pointPath = path + times + "\\绠$偣.json"; + File dirFile = new File(path + times); + if (!dirFile.exists()) { + dirFile.mkdirs(); + } + FileWriter fileWriter = new FileWriter(pointPath); + fileWriter.write(jsonObject.toJSONString()); + fileWriter.close(); + SemUtils.cityJsonToSem(pointPath, semUrl + "node.sem"); + return success(semUrl + "node.sem"); + } + + + public R<String> pointToCityJson(String shpPath) throws Exception { + String path = outPath + "\\point\\"; + List<Map<String, Object>> list = ShpReadUtils.readPointShp(shpPath); + JSONObject jsonObject = getModule("pointmodule.json"); + for (Map<String, Object> map : list + ) { + //鎷艰鍧愭爣 + JSONArray vertice = new JSONArray(); + vertice.add(map.get("lon")); + vertice.add(map.get("lat")); + vertice.add(0.0); + jsonObject.getJSONArray("vertices").add(vertice); + //鎷艰鍩虹淇℃伅 + JSONObject cityObject = new JSONObject(); + cityObject.put("type", "+PipePoint"); + JSONObject attribute = new JSONObject(); + attribute.put("name", map.get("fsw")); + cityObject.put("attributes", attribute); + JSONArray geometry = new JSONArray(); + JSONObject metry = new JSONObject(); + metry.put("type", "MultiPoint"); + metry.put("lod", 0); + JSONArray boundarie = new JSONArray(); + boundarie.add(jsonObject.getJSONArray("vertices").size() - 1); + metry.put("boundaries", boundarie); + geometry.add(metry); + cityObject.put("geometry", geometry); + jsonObject.getJSONObject("CityObjects").put("UUID_" + UUID.randomUUID().toString(), cityObject); + } + long times = System.currentTimeMillis(); + String pointPath = path + times + "\\绠$偣.json"; + File dirFile = new File(path + times); + if (!dirFile.exists()) { + dirFile.mkdirs(); + } + FileWriter fileWriter = new FileWriter(pointPath); + fileWriter.write(jsonObject.toJSONString()); + fileWriter.close(); + String filePatn = inPath + "\\tongzhou\\"; + File file = new File(filePatn); + if (!file.exists()) { + file.mkdirs(); + } + SemUtils.cityJsonToSem(pointPath, filePatn + "node.sem"); + return success(filePatn + "node.sem"); + } + + public R<String> lineToSem(List<GridDto> dtos, String token, String semUrl) throws Exception { + String path = outPath + "\\line\\"; + String publicKey = EntityLibraryUtils.getPublicKey(); + JSONArray jsonArrayList = EntityLibraryUtils.getLineInfo(dtos, token); + if (jsonArrayList.size() == 0) { + return fail("璇ュ尯鍩熶笉瀛樺湪绠$嚎锛�"); + } + StringBuffer stringBuffer = new StringBuffer("id in ("); + for (int i = 0; i < jsonArrayList.size(); i++) { + jsonArrayList.getJSONObject(i).getInteger("id"); + stringBuffer.append("'" + jsonArrayList.getJSONObject(i).getInteger("id") + "',"); + } + String sql = stringBuffer.toString(); + sql = sql.substring(0, sql.length() - 1) + ")"; + StringBuffer paramBuffer = new StringBuffer(""); + while (!"".equals(sql)) { + if (sql.length() > 110) { + String item = sql.substring(0, 110); + paramBuffer.append(EntityLibraryUtils.encrypt(item, publicKey) + ","); + sql = sql.replace(item, ""); + } else { + paramBuffer.append(EntityLibraryUtils.encrypt(sql, publicKey) + ","); + sql = ""; + } + } + String src = paramBuffer.toString().substring(0, paramBuffer.length() - 1); + JSONArray arrayDetail = EntityLibraryUtils.getLineDetail(src, token); + JSONObject jsonObject = getModule("linemodule.json"); + for (int i = 0; i < arrayDetail.size(); i++) { + JSONObject detailJSONObject = arrayDetail.getJSONObject(i); + //鎷艰鍩虹淇℃伅 + JSONObject cityObject = new JSONObject(); + cityObject.put("type", "+PipeLine"); + JSONObject attribute = new JSONObject(); + attribute.put("缁存姢鏃堕棿", detailJSONObject.getJSONObject("properties").getLong("operatetime")); + attribute.put("缁存姢浜�", detailJSONObject.getJSONObject("properties").getString("operator")); + attribute.put("鍞竴缂栫爜", detailJSONObject.getJSONObject("properties").get("seid")); + attribute.put("ENTI_UUID", detailJSONObject.getJSONObject("properties").get("enti_uuid")); + attribute.put("ID", detailJSONObject.getJSONObject("properties").get("id")); + attribute.put("lon", detailJSONObject.getJSONObject("properties").get("lon")); + attribute.put("lat", detailJSONObject.getJSONObject("properties").get("lat")); + attribute.put("atti", detailJSONObject.getJSONObject("properties").get("atti")); + attribute.put("绠$嚎绉嶇被", detailJSONObject.getJSONObject("properties").get("gxzl")); + attribute.put("璧风偣鏍囪瘑鐮�", detailJSONObject.getJSONObject("properties").get("qdbsm")); + attribute.put("姝㈢偣鏍囪瘑鐮�", detailJSONObject.getJSONObject("properties").get("zdbsm")); + attribute.put("璧风偣楂樼▼", detailJSONObject.getJSONObject("properties").get("qdgc")); + attribute.put("璧烽珮绋嬬被鍨�", detailJSONObject.getJSONObject("properties").get("qgclx")); + attribute.put("璧风偣鍩嬫繁", detailJSONObject.getJSONObject("properties").get("qdms")); + attribute.put("姝㈢偣楂樼▼", detailJSONObject.getJSONObject("properties").get("zdgc")); + attribute.put("姝㈤珮绋嬬被鍨�", detailJSONObject.getJSONObject("properties").get("zgclx")); + attribute.put("姝㈢偣鍩嬫繁", detailJSONObject.getJSONObject("properties").get("zdms")); + attribute.put("鏉愯川", detailJSONObject.getJSONObject("properties").get("cz")); + attribute.put("鍩嬭鏂瑰紡", detailJSONObject.getJSONObject("properties").get("msfs")); + attribute.put("绠″緞1", detailJSONObject.getJSONObject("properties").get("gja")); + attribute.put("绠″緞2", detailJSONObject.getJSONObject("properties").get("gjb")); + attribute.put("浣跨敤鐘跺喌", detailJSONObject.getJSONObject("properties").get("syzk")); + attribute.put("绠$嚎娈甸暱搴�", detailJSONObject.getJSONObject("properties").get("gxdcd")); + attribute.put("groupID", detailJSONObject.getJSONObject("properties").get("groupid")); + attribute.put("Shape_Length", detailJSONObject.getJSONObject("properties").get("shape_length")); + attribute.put("鏍囪瘑鐮�", detailJSONObject.getJSONObject("properties").get("bsm")); + JSONArray geometry = new JSONArray(); + JSONObject metry = new JSONObject(); + metry.put("type", "MultiLineString"); + metry.put("lod", 0); + JSONArray boundarie = new JSONArray(); + JSONArray array = detailJSONObject.getJSONObject("geometry").getJSONArray("coordinates").getJSONArray(0); + for (int m = 0; m < array.size(); m++) { + JSONArray object = array.getJSONArray(m); + jsonObject.getJSONArray("vertices").add(ProjectionToGeographicUtil.getPoint(object.getDouble(0), object.getDouble(1))); + boundarie.add(jsonObject.getJSONArray("vertices").size() - 1); + } + JSONArray jsonArray = new JSONArray(); + jsonArray.add(boundarie); + metry.put("boundaries", jsonArray); + JSONArray metryArray = new JSONArray(); + metryArray.add(metry); + cityObject.put("geometry", metryArray); + cityObject.put("attributes", attribute); + cityObject.put("attributes", attribute); + geometry.add(metry); + jsonObject.getJSONObject("CityObjects").put("UUID_" + UUID.randomUUID().toString(), cityObject); + } + long times = System.currentTimeMillis(); + String pointPath = path + times + "\\绠$嚎.json"; + File dirFile = new File(path + times); + if (!dirFile.exists()) { + dirFile.mkdirs(); + } + FileWriter fileWriter = new FileWriter(pointPath); + fileWriter.write(jsonObject.toJSONString()); + fileWriter.close(); + SemUtils.cityJsonToSem(pointPath, semUrl + "link.sem"); + return success(semUrl + "link.sem"); + } + + public R<String> lineToCityJson(String shpPath) throws Exception { + String path = outPath + "\\line\\"; + List<Map<String, Object>> list = ShpReadUtils.readPointShp(shpPath); + JSONObject jsonObject = getModule("linemodule.json"); + for (Map<String, Object> map : list + ) { + //鎷艰鍩虹淇℃伅 + JSONObject cityObject = new JSONObject(); + cityObject.put("type", "+PipeLine"); + JSONObject attribute = new JSONObject(); + attribute.put("name", map.get("msfs")); + JSONArray geometry = new JSONArray(); + JSONObject metry = new JSONObject(); + metry.put("type", "MultiLineString"); + metry.put("lod", 0); + JSONArray boundarie = new JSONArray(); + JSONArray array = JSONObject.parseObject(map.get("the_geom").toString()).getJSONArray("coordinates"); + for (int i = 0; i < array.size(); i++) { + JSONObject object = JSONObject.parseObject(array.get(i).toString()); + jsonObject.getJSONArray("vertices").add(ProjectionToGeographicUtil.getPoint(Double.valueOf(object.get("x").toString()), Double.valueOf(object.get("y").toString()))); + boundarie.add(jsonObject.getJSONArray("vertices").size() - 1); + } + JSONArray jsonArray = new JSONArray(); + jsonArray.add(boundarie); + metry.put("boundaries", jsonArray); + JSONArray metryArray = new JSONArray(); + metryArray.add(metry); + cityObject.put("geometry", metryArray); + cityObject.put("attributes", attribute); + cityObject.put("attributes", attribute); + geometry.add(metry); + jsonObject.getJSONObject("CityObjects").put("UUID_" + UUID.randomUUID().toString(), cityObject); + } + long times = System.currentTimeMillis(); + String pointPath = path + times + "\\绠$嚎.json"; + File dirFile = new File(path + times); + if (!dirFile.exists()) { + dirFile.mkdirs(); + } + FileWriter fileWriter = new FileWriter(pointPath); + fileWriter.write(jsonObject.toJSONString()); + fileWriter.close(); + String filePatn = inPath + "\\tongzhou\\"; + File file = new File(filePatn); + if (!file.exists()) { + file.mkdirs(); + } + SemUtils.cityJsonToSem(pointPath, filePatn + "link.sem"); + return success(filePatn + "link.sem"); + } + + public R<String> terrainToSem(List<GridDto> dtos, String semUrl) throws Exception { + long times = System.currentTimeMillis(); + String path = outPath + "\\terrain\\" + times + "\\"; + File dirFile = new File(path + "appearance"); + if (!dirFile.exists()) { + dirFile.mkdirs(); + } + double minx = 180.0; + double maxx = 0.0; + double miny = 180.0; + double maxy = 0.0; + for (GridDto dto : dtos + ) { + if (dto.getLon() > maxx) { + maxx = dto.getLon(); + } + if (dto.getLon() < minx) { + minx = dto.getLon(); + } + if (dto.getLat() > maxy) { + maxy = dto.getLat(); + } + if (dto.getLat() < miny) { + miny = dto.getLat(); + } + } + JSONArray min = ProjectionToGeographicUtil.get4548Point(minx, miny); + JSONArray max = ProjectionToGeographicUtil.get4548Point(maxx, maxy); + String tifPath = path + "terrain.tif"; + TiffClipper.cropTiffByLatLon(config.getTifPath(), tifPath, min.getDouble(0), min.getDouble(1), max.getDouble(0), max.getDouble(1)); + String pngPath = path + "appearance\\terrain.png"; + TiffToRGBUtil.tifToPng(tifPath, pngPath); + JSONObject jsonObject = getModule("terrainmodule.json"); + JSONArray array=new JSONArray(); + array.add(ProjectionToGeographicUtil.getPointAndHight(dtos.get(0).getLon(),dtos.get(0).getLat())); + array.add(ProjectionToGeographicUtil.getPointAndHight(dtos.get(1).getLon(),dtos.get(1).getLat())); + array.add(ProjectionToGeographicUtil.getPointAndHight(dtos.get(2).getLon(),dtos.get(2).getLat())); + array.add(ProjectionToGeographicUtil.getPointAndHight(dtos.get(3).getLon(),dtos.get(3).getLat())); + //jsonObject.put("vertices", TiffCoordinateExtractorUtil.getCoordinate(tifPath)); + jsonObject.put("vertices", array); + FileWriter fileWriter = new FileWriter(path + "terrain.json"); + fileWriter.write(jsonObject.toJSONString()); + fileWriter.close(); + SemUtils.cityJsonToSem(path + "terrain.json", semUrl + "terrain.sem"); + return success(semUrl + "terrain.sem"); + } + + public R<String> terrainToCityJson(String tifPath) throws Exception { + long times = System.currentTimeMillis(); + String path = outPath + "\\terrain\\" + times + "\\"; + File dirFile = new File(path + "appearance"); + if (!dirFile.exists()) { + dirFile.mkdirs(); + } + String pngPath = path + "appearance\\terrain.png"; + TiffToRGBUtil.tifToPng(tifPath, pngPath); + JSONObject jsonObject = getModule("terrainmodule.json"); + jsonObject.put("vertices", TiffCoordinateExtractorUtil.getCoordinate(tifPath)); + FileWriter fileWriter = new FileWriter(path + "terrain.json"); + fileWriter.write(jsonObject.toJSONString()); + fileWriter.close(); + String filePatn = inPath + "\\tongzhou\\"; + File file = new File(filePatn); + if (!file.exists()) { + file.mkdirs(); + } + SemUtils.cityJsonToSem(path + "terrain.json", filePatn + "terrain.sem"); + return success(filePatn + "terrain.sem"); + } + + public R<String> gridToCityJson(List<GridDto> dtos, String semUrl) throws Exception { + long times = System.currentTimeMillis(); + String path = outPath + "\\grid\\" + times + "\\"; + File dirFile = new File(path); + if (!dirFile.exists()) { + dirFile.mkdirs(); + } + JSONObject jsonObject = getModule("grid.json"); + JSONArray array = jsonObject.getJSONArray("vertices"); + for (GridDto dto : dtos + ) { + JSONArray jsonArray = new JSONArray(); + jsonArray.add(dto.getLon()); + jsonArray.add(dto.getLat()); + jsonArray.add(0); + array.add(jsonArray); + } + jsonObject.put("vertices", array); + FileWriter fileWriter = new FileWriter(path + "grid.json"); + fileWriter.write(jsonObject.toJSONString()); + fileWriter.close(); + SemUtils.cityJsonToSem(path + "grid.json", semUrl + "grid.sem"); + return success(semUrl + "grid.sem"); + } + + public R<String> riverToSem(String semUrl) throws Exception { + long times = System.currentTimeMillis(); + String path = outPath + "\\river\\" + times + "\\"; + File dirFile = new File(path); + if (!dirFile.exists()) { + dirFile.mkdirs(); + } + JSONObject jsonObject = getModule("river.json"); + FileWriter fileWriter = new FileWriter(path + "river.json"); + fileWriter.write(jsonObject.toJSONString()); + fileWriter.close(); + SemUtils.cityJsonToSem(path + "river.json", semUrl + "river.sem"); + return success(semUrl + "river.sem"); + } + + public R<String> landuseToSem(String semUrl) throws Exception { + long times = System.currentTimeMillis(); + String path = outPath + "\\landuse\\" + times + "\\"; + File dirFile = new File(path); + if (!dirFile.exists()) { + dirFile.mkdirs(); + } + JSONObject jsonObject = getModule("landuse.json"); + FileWriter fileWriter = new FileWriter(path + "landuse.json"); + fileWriter.write(jsonObject.toJSONString()); + fileWriter.close(); + SemUtils.cityJsonToSem(path + "landuse.json", semUrl + "landuse.sem"); + return success(semUrl + "landuse.sem"); + } + + public JSONObject getModule(String moduleName) { + JSONObject jsonObject = new JSONObject(); + try { + URL resource = Resources.getResource(moduleName); + String fileContent = Resources.toString(resource, StandardCharsets.UTF_8); + jsonObject = JSONObject.parseObject(fileContent); + System.out.println(fileContent); + } catch (Exception e) { + e.printStackTrace(); + } + return jsonObject; + } + +} diff --git a/src/main/java/com/se/nsl/controller/SwwFilesDealController.java b/src/main/java/com/se/nsl/controller/SwwFilesDealController.java new file mode 100644 index 0000000..5e0d26b --- /dev/null +++ b/src/main/java/com/se/nsl/controller/SwwFilesDealController.java @@ -0,0 +1,46 @@ +package com.se.nsl.controller; + +import com.se.nsl.service.SwwFilesDealService; +import io.swagger.annotations.Api; +import io.swagger.annotations.ApiOperation; +import lombok.RequiredArgsConstructor; +import org.springframework.http.ResponseEntity; +import org.springframework.web.bind.annotation.CrossOrigin; +import org.springframework.web.bind.annotation.PostMapping; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RestController; + +import javax.annotation.Resource; + + +/** + * sww 鏂囦欢浜ゆ槗鎺у埗鍣� + * + * @author xingjinshuang@smartearth.cn + * @date 2025/02/20 + */ +@Api(tags = " Sww 鐩稿叧鎺ュ彛") +@CrossOrigin(origins = "*") +@RequiredArgsConstructor +@RestController +@RequestMapping("/api/v1/sww") +public class SwwFilesDealController { + // 澶勭悊 Sww 鏂囦欢鐨勭浉鍏抽�昏緫 + + @Resource + private SwwFilesDealService swwFilesDealService; + + /** + * Sww 鏂囦欢璇诲彇妯℃嫙 + */ + @ApiOperation("1 - Sww 鏂囦欢璇诲彇妯℃嫙") + @PostMapping("/read") + public ResponseEntity<Object> readSwwFile() throws Exception { + String filePath = "D:\\0a_project\\simulation\\other\\result_new.sem.db"; + return ResponseEntity.ok(swwFilesDealService.readSwwFile(filePath)); + } + + + + +} diff --git a/src/main/java/com/se/nsl/controller/TestController.java b/src/main/java/com/se/nsl/controller/TestController.java new file mode 100644 index 0000000..a5a6fc3 --- /dev/null +++ b/src/main/java/com/se/nsl/controller/TestController.java @@ -0,0 +1,161 @@ +package com.se.nsl.controller; + +import com.se.nsl.domain.po.DataPo; +import com.se.nsl.domain.vo.R; +import com.se.nsl.helper.ShpHelper; +import com.se.nsl.helper.StringHelper; +import com.se.nsl.service.*; +import io.swagger.annotations.Api; +import io.swagger.annotations.ApiImplicitParam; +import io.swagger.annotations.ApiImplicitParams; +import io.swagger.annotations.ApiOperation; +import lombok.extern.slf4j.Slf4j; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RestController; + +import javax.annotation.Resource; + +@Api(tags = "Test") +@Slf4j +@RestController +@RequestMapping("/test") +@SuppressWarnings("ALL") +public class TestController extends BaseController { + @Resource + UwService uwService; + + @Resource + GedbService gedbService; + + @Resource + SimuService simuService; + + @Resource + Hdf5Service hdf5Service; + + @Resource + ResultService resultService; + + @ApiOperation(value = "褰撳墠鏃堕棿 *") + @GetMapping("/getTime") + public Object getTime() { + return System.currentTimeMillis(); + } + + @ApiOperation(value = "testCallExe *") + @ApiImplicitParams({ + @ApiImplicitParam(name = "path", value = "璺緞", dataType = "String", paramType = "query", example = "20241010095328") + }) + @GetMapping("/testCallExe") + public R<Object> testCallExe(String path) { + try { + DataPo data = new DataPo(); + data.setInPath(path); + + String str = uwService.callExe(data); + + return success(str); + } catch (Exception ex) { + return fail(ex, null); + } + } + + @ApiOperation(value = "testResuslt *") + @ApiImplicitParams({ + @ApiImplicitParam(name = "path", value = "璺緞", dataType = "String", paramType = "query", example = "20241107092342"), + @ApiImplicitParam(name = "time", value = "鏃堕棿", dataType = "String", paramType = "query", example = "2024-11-07 09:23:42") + }) + @GetMapping("/testResuslt") + public R<Object> testResuslt(String path, String time) { + try { + DataPo data = new DataPo(); + data.setEpsg(4548); + data.setInPath(path); + data.setStartTime(StringHelper.YMDHMS_FORMAT.parse(time)); + + resultService.process(data); + + return success("ok"); + } catch (Exception ex) { + return fail(ex, null); + } + } + + @ApiOperation(value = "testSww2Tif *") + @GetMapping("/testSww2Tif") + public R<Object> testSww2Tif() { + try { + DataPo data = new DataPo(); + data.setEpsg(4548); + data.setInPath("20241010095328"); + data.setStartTime(StringHelper.YMDHMS_FORMAT.parse("2024-09-30 00:00:00")); + + Object rs = uwService.copeDrainFiles(data); + + return success(rs); + } catch (Exception ex) { + return fail(ex, null); + } + } + + @ApiOperation(value = "testRainfall *") + @GetMapping("/testRainfall") + public R<Object> testRainfall() { + try { + DataPo data = new DataPo(); + data.setEpsg(4548); + data.setTotal(60.0); + data.setDuration(60); + data.setInPath("20241010095328"); + data.setStartTime(StringHelper.YMDHMS_FORMAT.parse("2024-07-01 00:00:00")); + + uwService.createRainFile(data); + + return success("ok"); + } catch (Exception ex) { + return fail(ex, null); + } + } + + @ApiOperation(value = "testToken *") + @GetMapping("/testToken") + public R<Object> testToken() { + try { + String token = gedbService.getToken(); + + return success(token); + } catch (Exception ex) { + return fail(ex, null); + } + } + + @ApiOperation(value = "testPolygonize *") + @GetMapping("/testPolygonize") + public R<Object> testPolygonize() { + try { + ShpHelper.test(); + + return success("ok"); + } catch (Exception ex) { + return fail(ex, null); + } + } + + @ApiOperation(value = "testH5 *") + @GetMapping("/testH5") + public R<Object> testH5() { + try { + DataPo data = new DataPo(); + data.setEpsg(4548); + data.setInPath("20241010095328"); + data.setStartTime(StringHelper.YMDHMS_FORMAT.parse("2024-09-30 00:00:00")); + + hdf5Service.test(data); + + return success("ok"); + } catch (Exception ex) { + return fail(ex, null); + } + } +} diff --git a/src/main/java/com/se/nsl/controller/WaterController.java b/src/main/java/com/se/nsl/controller/WaterController.java new file mode 100644 index 0000000..53988de --- /dev/null +++ b/src/main/java/com/se/nsl/controller/WaterController.java @@ -0,0 +1,259 @@ +package com.se.nsl.controller; + +import com.se.nsl.domain.po.SimuPo; +import com.se.nsl.domain.vo.PondingVo; +import com.se.nsl.domain.vo.R; +import com.se.nsl.helper.StringHelper; +import com.se.nsl.helper.WebHelper; +import com.se.nsl.service.SimuService; +import com.se.nsl.service.WaterService; +import io.swagger.annotations.Api; +import io.swagger.annotations.ApiImplicitParam; +import io.swagger.annotations.ApiImplicitParams; +import io.swagger.annotations.ApiOperation; +import lombok.extern.slf4j.Slf4j; +import org.springframework.http.HttpStatus; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RestController; + +import javax.annotation.Resource; +import javax.servlet.http.HttpServletResponse; +import java.io.File; + +@Api(tags = "鍐呮稘绠$悊") +@Slf4j +@RestController +@SuppressWarnings("ALL") +@RequestMapping("/waterlogging") +public class WaterController extends BaseController { + @Resource + SimuService simuService; + + @Resource + WaterService waterService; + + private final static int MIN_SIZE = 10; + + private final static int MAX_SIZE = 80000; + + private final static long Y2000 = 949334400000L; + + @ApiOperation(value = "鑾峰彇鍏冩暟鎹甁SON") + @GetMapping("/{serviceName}/layer.json") + public void getLayerJson(@PathVariable String serviceName, HttpServletResponse res) { + try { + if (!validate(serviceName, res)) { + return; + } + + byte[] bytes = waterService.getson(serviceName, "layer.json"); + + WebHelper.writeBytes(bytes, res); + } catch (Exception ex) { + log.error(ex.getMessage(), ex); + WebHelper.writeStr2Page(res, HttpStatus.INTERNAL_SERVER_ERROR, ex.getMessage()); + } + } + + @ApiOperation(value = "鑾峰彇闄嶆按鏇茬嚎JSON") + @GetMapping("/{serviceName}/rainfall.json") + public void getRainfallJson(@PathVariable String serviceName, HttpServletResponse res) { + try { + if (!validate(serviceName, res)) { + return; + } + + byte[] bytes = waterService.getson(serviceName, "rainfall.json"); + + WebHelper.writeBytes(bytes, res); + } catch (Exception ex) { + log.error(ex.getMessage(), ex); + WebHelper.writeStr2Page(res, HttpStatus.INTERNAL_SERVER_ERROR, ex.getMessage()); + } + } + + @ApiOperation(value = "鑾峰彇寤虹瓚鐗╂秹姘碕SON") + @GetMapping("/{serviceName}/building.json") + public void getBuildingJson(@PathVariable String serviceName, HttpServletResponse res) { + try { + if (!validate(serviceName, res)) { + return; + } + + byte[] bytes = waterService.getson(serviceName, "building.json"); + + WebHelper.writeBytes(bytes, res); + } catch (Exception ex) { + log.error(ex.getMessage(), ex); + WebHelper.writeStr2Page(res, HttpStatus.INTERNAL_SERVER_ERROR, ex.getMessage()); + } + } + + @ApiOperation(value = "鑾峰彇绉按JSON") + @GetMapping("/{serviceName}/{timestamp}/water.json") + public void getWaterJson(@PathVariable String serviceName, @PathVariable String timestamp, HttpServletResponse res) { + try { + if (!validate(serviceName, res)) { + return; + } + + byte[] bytes = waterService.getson(serviceName, "waters" + File.separator + timestamp + File.separator + "water.json"); + + WebHelper.writeBytes(bytes, res); + } catch (Exception ex) { + log.error(ex.getMessage(), ex); + WebHelper.writeStr2Page(res, HttpStatus.INTERNAL_SERVER_ERROR, ex.getMessage()); + } + } + + @ApiOperation(value = "鑾峰彇鍦板舰楂樺害鍥�") + @GetMapping("/{serviceName}/terrain") + public void getTerraMap(@PathVariable String serviceName, Integer width, Integer height, HttpServletResponse res) { + try { + if (!validate(serviceName, width, height, res)) { + return; + } + + String file = waterService.getTerraMap(serviceName, width, height); + + WebHelper.writePng(file, res); + } catch (Exception ex) { + log.error(ex.getMessage(), ex); + WebHelper.writeStr2Page(res, HttpStatus.INTERNAL_SERVER_ERROR, ex.getMessage()); + } + } + + @ApiOperation(value = "鑾峰彇姘撮潰楂樺害鍥�") + @GetMapping("/{serviceName}/waterMap") + public void getWaterMap(@PathVariable String serviceName, Integer width, Integer height, Long timestamp, HttpServletResponse res) { + try { + if (!validate(serviceName, width, height, timestamp, res)) { + return; + } + + String file = waterService.getWaterMap(serviceName, width, height, timestamp); + + WebHelper.writePng(file, res); + } catch (Exception ex) { + log.error(ex.getMessage(), ex); + WebHelper.writeStr2Page(res, HttpStatus.INTERNAL_SERVER_ERROR, ex.getMessage()); + } + } + + @ApiOperation(value = "鑾峰彇姘存祦鍚戞祦閫熷浘") + @GetMapping("/{serviceName}/flowMap") + public void getFlowMap(@PathVariable String serviceName, Integer width, Integer height, Long timestamp, HttpServletResponse res) { + try { + if (!validate(serviceName, width, height, timestamp, res)) { + return; + } + + String file = waterService.getFlowMap(serviceName, width, height, timestamp); + + WebHelper.writePng(file, res); + } catch (Exception ex) { + log.error(ex.getMessage(), ex); + WebHelper.writeStr2Page(res, HttpStatus.INTERNAL_SERVER_ERROR, ex.getMessage()); + } + } + + @ApiOperation(value = "鏍规嵁鍧愭爣鏌ヨ绉按娣卞害") + @ApiImplicitParams({ + @ApiImplicitParam(name = "serviceName", value = "鏈嶅姟鍚�", dataType = "String", paramType = "path", example = "20241010095328"), + @ApiImplicitParam(name = "x", value = "X", dataType = "double", paramType = "query", example = "116.6447998"), + @ApiImplicitParam(name = "y", value = "Y", dataType = "double", paramType = "query", example = "39.8868915"), + @ApiImplicitParam(name = "timestamp", value = "鏃堕棿鎴�", dataType = "long", paramType = "query", example = "1730217660000") + }) + @GetMapping("/{serviceName}/getWaterHeight") + public R<Object> getWaterHeight(@PathVariable String serviceName, double x, double y, long timestamp, HttpServletResponse res) { + try { + SimuPo simu = simuService.getSimuByServiceName(serviceName); + if (null == simu) { + return null; + } + + Double depth = waterService.getWaterHeight(simu, x, y, timestamp); + Double area = waterService.getWaterArea(simu, x, y, timestamp); + + return success(new PondingVo(depth, area)); + } catch (Exception ex) { + log.error(ex.getMessage(), ex); + return fail(ex); + } + } + + @ApiOperation(value = "鏍规嵁seid鏌ヨ寤虹瓚鐗╂秹姘存繁搴�") + @ApiImplicitParams({ + @ApiImplicitParam(name = "serviceName", value = "鏈嶅姟鍚�", dataType = "String", paramType = "path", example = "20241010095328"), + @ApiImplicitParam(name = "seid", value = "X", dataType = "String", paramType = "query", example = "5_f128d8b1aba6455c88d2f42334ca62bb") + }) + @GetMapping("/{serviceName}/getBuildingDepthBySeid") + public R<Object> getBuildingDepthBySeid(@PathVariable String serviceName, String seid) { + try { + if (StringHelper.isEmpty(serviceName) || StringHelper.isEmpty(seid)) { + return null; + } + + // 鏍规嵁鏈嶅姟鍚�+鏃堕棿鎴�+鍧愭爣锛屾煡璇㈠搴旂殑绉按娣卞害 + return success(waterService.getBuildingDepthBySeid(serviceName, seid)); + } catch (Exception ex) { + log.error(ex.getMessage(), ex); + return fail(ex); + } + } + + + @ApiOperation(value = "鏍规嵁timestamp鏌ヨ寤虹瓚鐗╂秹姘存繁搴�") + @ApiImplicitParams({ + @ApiImplicitParam(name = "serviceName", value = "鏈嶅姟鍚�", dataType = "String", paramType = "path", example = "20241010095328"), + @ApiImplicitParam(name = "timestamp", value = "鏃堕棿鎴�", dataType = "Long", paramType = "query", example = "1730217660000") + }) + @GetMapping("/{serviceName}/getBuildingDepthByTime") + public R<Object> getBuildingDepthByTime(@PathVariable String serviceName, Long timestamp) { + try { + if (StringHelper.isEmpty(serviceName) || null == timestamp) { + return null; + } + + // 鏍规嵁鏈嶅姟鍚�+鏃堕棿鎴�+鍧愭爣锛屾煡璇㈠搴旂殑绉按娣卞害 + return success(waterService.getBuildingDepthByTime(serviceName, timestamp)); + } catch (Exception ex) { + log.error(ex.getMessage(), ex); + return fail(ex); + } + } + + private boolean validate(String serviceName, HttpServletResponse res) { + if (WebHelper.isEmpty(serviceName)) { + return WebHelper.writeJson2Page(res, HttpStatus.BAD_REQUEST, "鏈嶅姟鍚嶄笉鑳戒负绌�"); + } + + return true; + } + + private boolean validate(String serviceName, Integer width, Integer height, HttpServletResponse res) { + return validate(serviceName, width, height, Y2000, res); + } + + /** + * 楠岃瘉 + */ + private boolean validate(String serviceName, Integer width, Integer height, Long timestamp, HttpServletResponse res) { + if (WebHelper.isEmpty(serviceName)) { + return WebHelper.writeJson2Page(res, HttpStatus.BAD_REQUEST, "鏈嶅姟鍚嶄笉鑳戒负绌�"); + } + if (null == width || width < MIN_SIZE || width > MAX_SIZE) { + return WebHelper.writeJson2Page(res, HttpStatus.BAD_REQUEST, "鍥惧儚瀹藉害涓嶈兘涓虹┖涓斿彇鍊艰寖鍥翠负" + MIN_SIZE + "~" + MAX_SIZE + "涔嬮棿"); + } + if (null == height || height < MIN_SIZE || height > MAX_SIZE) { + return WebHelper.writeJson2Page(res, HttpStatus.BAD_REQUEST, "鍥惧儚楂樺害涓嶈兘涓虹┖涓斿彇鍊艰寖鍥翠负" + MIN_SIZE + "~" + MAX_SIZE + "涔嬮棿"); + } + if (null == timestamp || timestamp < 0) { + return WebHelper.writeJson2Page(res, HttpStatus.BAD_REQUEST, "鏃堕棿涓嶈兘涓虹┖涓斿ぇ浜�0"); + } + + return true; + } +} diff --git a/src/main/java/com/se/nsl/domain/EntityDataBase.java b/src/main/java/com/se/nsl/domain/EntityDataBase.java new file mode 100644 index 0000000..a609869 --- /dev/null +++ b/src/main/java/com/se/nsl/domain/EntityDataBase.java @@ -0,0 +1,38 @@ +package com.se.nsl.domain; + +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import lombok.Data; + +import java.io.Serializable; + +/** + * 瀹炰綋鏁版嵁搴� + * + * @author xingjinshuang + * @date 2024/07/04 + */ +@Data +@ApiModel(value = "EntityDataBase", description = "瀹炰綋鏁版嵁搴撶浉鍏冲唴瀹�") +public class EntityDataBase implements Serializable { + private static final long serialVersionUID = 1L; + + /** + * 鐢ㄦ埛鍚� + */ + @ApiModelProperty("鐢ㄦ埛鍚�") + private String userName; + + /** + * 鐢ㄦ埛瀵嗙爜 + */ + @ApiModelProperty("鐢ㄦ埛瀵嗙爜") + private String userPassword; + + /** + * 鏁版嵁搴撳悕 + */ + @ApiModelProperty("鏁版嵁搴撳悕") + private String databaseName; + +} diff --git a/src/main/java/com/se/nsl/domain/EntityTypeInfo.java b/src/main/java/com/se/nsl/domain/EntityTypeInfo.java new file mode 100644 index 0000000..f0d62ab --- /dev/null +++ b/src/main/java/com/se/nsl/domain/EntityTypeInfo.java @@ -0,0 +1,30 @@ +package com.se.nsl.domain; + +import io.swagger.annotations.ApiModel; +import lombok.Data; + +import java.io.Serializable; + +@Data +@ApiModel(value = "EntityTypeInfo", description = "瀹炰綋搴撲笉鍚岀被鍨嬬殑淇℃伅") +public class EntityTypeInfo implements Serializable { + + private static final long serialVersionUID = 1L; + + private String token; + + private Integer start; + + private boolean containCount; + + private Integer count; + + private String dbid; + + private String layerid; + + private String like; + + private String querytype; + +} diff --git a/src/main/java/com/se/nsl/domain/LoginParams.java b/src/main/java/com/se/nsl/domain/LoginParams.java new file mode 100644 index 0000000..b8ae05c --- /dev/null +++ b/src/main/java/com/se/nsl/domain/LoginParams.java @@ -0,0 +1,17 @@ +package com.se.nsl.domain; + +import lombok.Data; + +import java.io.Serializable; + +@Data +public class LoginParams implements Serializable { + + private static final long serialVersionUID = 1L; + + // 鐢ㄦ埛鍚� + private String userid; + // 瀵嗙爜 + private String password; + +} diff --git a/src/main/java/com/se/nsl/domain/dto/BuildingDto.java b/src/main/java/com/se/nsl/domain/dto/BuildingDto.java new file mode 100644 index 0000000..427c2db --- /dev/null +++ b/src/main/java/com/se/nsl/domain/dto/BuildingDto.java @@ -0,0 +1,34 @@ +package com.se.nsl.domain.dto; + +import org.gdal.ogr.Geometry; + +@SuppressWarnings("ALL") +public class BuildingDto { + private String id; + + private Geometry geom; + + public BuildingDto() { + } + + public BuildingDto(String id, Geometry geom) { + this.id = id; + this.geom = geom; + } + + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public Geometry getGeom() { + return geom; + } + + public void setGeom(Geometry geom) { + this.geom = geom; + } +} diff --git a/src/main/java/com/se/nsl/domain/dto/ConfigDto.java b/src/main/java/com/se/nsl/domain/dto/ConfigDto.java new file mode 100644 index 0000000..9f58880 --- /dev/null +++ b/src/main/java/com/se/nsl/domain/dto/ConfigDto.java @@ -0,0 +1,459 @@ +package com.se.nsl.domain.dto; + +import com.se.nsl.config.PropertiesConfig; +import com.se.nsl.helper.WebHelper; + +import java.text.SimpleDateFormat; +import java.util.Calendar; +import java.util.Date; + +@SuppressWarnings("ALL") +public class ConfigDto { + public static final SimpleDateFormat DATE_FORMAT = new SimpleDateFormat("MM/dd/yyyy HH:mm:ss"); + + private String casedir = "case"; + private String terrain_file = "DEM4548.tif"; + private String mesh_file = "tongzhou-mini.msh"; + private String studyzone_file = "studyzone_difference.shp"; + private String refinezones_file = "refinezones.shp"; + private String buildings_file = "buildings_inside.shp"; + private String nodes_file = "pipeline-demo-point.shp"; + private String links_file = "pipeline-demo-conduit.shp"; + private String junctions_file = "pipeline-demo-point-junctions.shp"; + private String raingage_file = "RainGage.dat"; + private String barrier_file = "barrier.shp"; + private String drainage_outfile = "tongzhou-mini-drainage.h5"; + private String sww_outfile = "tongzhou-mini.sww"; + private String flow_units = "LPS"; + private String infiltration = "HORTON"; + private String link_offsets = "DEPTH"; + private Double min_slope = 0.01; + private String allow_ponding = "NO"; + private String skip_steady_state = "NO"; + private String flow_routing = "DYNWAVE"; + private String start_datetime = "06/12/2024 12:00:00"; + private String end_datetime = "06/12/2024 13:00:00"; + private String report_start_datetime = "06/12/2024 00:00:00"; + private String report_step = "00:01:00"; + private String routing_step = "00:00:10"; + private String inertial_damping = "PARTIAL"; + private String normal_flow_limited = "BOTH"; + private String force_main_equation = "H-W"; + private Integer max_trials = 20; + private Double minimum_step = 0.1; + private Double min_surfarea = 0.5; + private Integer threads = 8; + private Double Manning = 0.014; + private Integer areas_slope = 1; + private Integer areas_imperv = 80; + private Double areas_n_imperv = 0.015; + private Double areas_n_perv = 0.24; + private Double areas_storage_imperv = 0.06; + private Integer areas_storage_perv = 3; + private Integer areas_pct_zero = 25; + private Integer init_depth = 0; + private Double friction = 0.01; + private Double terrain_smoothing = 0.1; + private String flow_algorithm = "DE1"; + private Integer mesh_size = 5; + private Integer save_step = 1; + + public void setProperties(String dirName, Date startTime, Integer minutes, PropertiesConfig config) { + this.casedir = dirName; + this.terrain_file = config.getDemFile(); + this.studyzone_file = config.getZoneName(); + this.buildings_file = config.getShpNames().get(2); + this.nodes_file = config.getShpNames().get(0); + this.links_file = config.getShpNames().get(1); + this.junctions_file = config.getJunctionName(); + this.raingage_file = config.getRaingage(); + this.barrier_file = config.getBarrierName(); + this.drainage_outfile = dirName + ".h5"; + this.sww_outfile = dirName + ".sww"; + this.flow_units = config.getFlowUnits(); + this.threads = WebHelper.getCpuCores(); + + Calendar calendar = Calendar.getInstance(); + calendar.setTime(startTime); + //calendar.set(Calendar.HOUR_OF_DAY, 0); + //calendar.set(Calendar.MINUTE, 0); + //calendar.set(Calendar.SECOND, 0); + calendar.set(Calendar.MILLISECOND, 0); + + String start = DATE_FORMAT.format(calendar.getTime()); + calendar.add(Calendar.MINUTE, minutes); + String end = DATE_FORMAT.format(calendar.getTime()); + + this.start_datetime = start; + this.report_start_datetime = start; + this.end_datetime = end; + } + + public String getCasedir() { + return casedir; + } + + public void setCasedir(String casedir) { + this.casedir = casedir; + } + + public String getTerrain_file() { + return terrain_file; + } + + public void setTerrain_file(String terrain_file) { + this.terrain_file = terrain_file; + } + + public String getMesh_file() { + return mesh_file; + } + + public void setMesh_file(String mesh_file) { + this.mesh_file = mesh_file; + } + + public String getStudyzone_file() { + return studyzone_file; + } + + public void setStudyzone_file(String studyzone_file) { + this.studyzone_file = studyzone_file; + } + + public String getRefinezones_file() { + return refinezones_file; + } + + public void setRefinezones_file(String refinezones_file) { + this.refinezones_file = refinezones_file; + } + + public String getBuildings_file() { + return buildings_file; + } + + public void setBuildings_file(String buildings_file) { + this.buildings_file = buildings_file; + } + + public String getNodes_file() { + return nodes_file; + } + + public void setNodes_file(String nodes_file) { + this.nodes_file = nodes_file; + } + + public String getLinks_file() { + return links_file; + } + + public void setLinks_file(String links_file) { + this.links_file = links_file; + } + + public String getJunctions_file() { + return junctions_file; + } + + public void setJunctions_file(String junctions_file) { + this.junctions_file = junctions_file; + } + + public String getRaingage_file() { + return raingage_file; + } + + public void setRaingage_file(String raingage_file) { + this.raingage_file = raingage_file; + } + + public String getBarrier_file() { + return barrier_file; + } + + public void setBarrier_file(String barrier_file) { + this.barrier_file = barrier_file; + } + + public String getDrainage_outfile() { + return drainage_outfile; + } + + public void setDrainage_outfile(String drainage_outfile) { + this.drainage_outfile = drainage_outfile; + } + + public String getSww_outfile() { + return sww_outfile; + } + + public void setSww_outfile(String sww_outfile) { + this.sww_outfile = sww_outfile; + } + + public String getFlow_units() { + return flow_units; + } + + public void setFlow_units(String flow_units) { + this.flow_units = flow_units; + } + + public String getInfiltration() { + return infiltration; + } + + public void setInfiltration(String infiltration) { + this.infiltration = infiltration; + } + + public String getLink_offsets() { + return link_offsets; + } + + public void setLink_offsets(String link_offsets) { + this.link_offsets = link_offsets; + } + + public Double getMin_slope() { + return min_slope; + } + + public void setMin_slope(Double min_slope) { + this.min_slope = min_slope; + } + + public String getAllow_ponding() { + return allow_ponding; + } + + public void setAllow_ponding(String allow_ponding) { + this.allow_ponding = allow_ponding; + } + + public String getSkip_steady_state() { + return skip_steady_state; + } + + public void setSkip_steady_state(String skip_steady_state) { + this.skip_steady_state = skip_steady_state; + } + + public String getFlow_routing() { + return flow_routing; + } + + public void setFlow_routing(String flow_routing) { + this.flow_routing = flow_routing; + } + + public String getStart_datetime() { + return start_datetime; + } + + public void setStart_datetime(String start_datetime) { + this.start_datetime = start_datetime; + } + + public String getEnd_datetime() { + return end_datetime; + } + + public void setEnd_datetime(String end_datetime) { + this.end_datetime = end_datetime; + } + + public String getReport_start_datetime() { + return report_start_datetime; + } + + public void setReport_start_datetime(String report_start_datetime) { + this.report_start_datetime = report_start_datetime; + } + + public String getReport_step() { + return report_step; + } + + public void setReport_step(String report_step) { + this.report_step = report_step; + } + + public String getRouting_step() { + return routing_step; + } + + public void setRouting_step(String routing_step) { + this.routing_step = routing_step; + } + + public String getInertial_damping() { + return inertial_damping; + } + + public void setInertial_damping(String inertial_damping) { + this.inertial_damping = inertial_damping; + } + + public String getNormal_flow_limited() { + return normal_flow_limited; + } + + public void setNormal_flow_limited(String normal_flow_limited) { + this.normal_flow_limited = normal_flow_limited; + } + + public String getForce_main_equation() { + return force_main_equation; + } + + public void setForce_main_equation(String force_main_equation) { + this.force_main_equation = force_main_equation; + } + + public Integer getMax_trials() { + return max_trials; + } + + public void setMax_trials(Integer max_trials) { + this.max_trials = max_trials; + } + + public Double getMinimum_step() { + return minimum_step; + } + + public void setMinimum_step(Double minimum_step) { + this.minimum_step = minimum_step; + } + + public Double getMin_surfarea() { + return min_surfarea; + } + + public void setMin_surfarea(Double min_surfarea) { + this.min_surfarea = min_surfarea; + } + + public Integer getThreads() { + return threads; + } + + public void setThreads(Integer threads) { + this.threads = threads; + } + + public Double getManning() { + return Manning; + } + + public void setManning(Double manning) { + Manning = manning; + } + + public Integer getAreas_slope() { + return areas_slope; + } + + public void setAreas_slope(Integer areas_slope) { + this.areas_slope = areas_slope; + } + + public Integer getAreas_imperv() { + return areas_imperv; + } + + public void setAreas_imperv(Integer areas_imperv) { + this.areas_imperv = areas_imperv; + } + + public Double getAreas_n_imperv() { + return areas_n_imperv; + } + + public void setAreas_n_imperv(Double areas_n_imperv) { + this.areas_n_imperv = areas_n_imperv; + } + + public Double getAreas_n_perv() { + return areas_n_perv; + } + + public void setAreas_n_perv(Double areas_n_perv) { + this.areas_n_perv = areas_n_perv; + } + + public Double getAreas_storage_imperv() { + return areas_storage_imperv; + } + + public void setAreas_storage_imperv(Double areas_storage_imperv) { + this.areas_storage_imperv = areas_storage_imperv; + } + + public Integer getAreas_storage_perv() { + return areas_storage_perv; + } + + public void setAreas_storage_perv(Integer areas_storage_perv) { + this.areas_storage_perv = areas_storage_perv; + } + + public Integer getAreas_pct_zero() { + return areas_pct_zero; + } + + public void setAreas_pct_zero(Integer areas_pct_zero) { + this.areas_pct_zero = areas_pct_zero; + } + + public Integer getInit_depth() { + return init_depth; + } + + public void setInit_depth(Integer init_depth) { + this.init_depth = init_depth; + } + + public Double getFriction() { + return friction; + } + + public void setFriction(Double friction) { + this.friction = friction; + } + + public Double getTerrain_smoothing() { + return terrain_smoothing; + } + + public void setTerrain_smoothing(Double terrain_smoothing) { + this.terrain_smoothing = terrain_smoothing; + } + + public String getFlow_algorithm() { + return flow_algorithm; + } + + public void setFlow_algorithm(String flow_algorithm) { + this.flow_algorithm = flow_algorithm; + } + + public Integer getMesh_size() { + return mesh_size; + } + + public void setMesh_size(Integer mesh_size) { + this.mesh_size = mesh_size; + } + + public Integer getSave_step() { + return save_step; + } + + public void setSave_step(Integer save_step) { + this.save_step = save_step; + } +} diff --git a/src/main/java/com/se/nsl/domain/dto/DurationDto.java b/src/main/java/com/se/nsl/domain/dto/DurationDto.java new file mode 100644 index 0000000..f64f702 --- /dev/null +++ b/src/main/java/com/se/nsl/domain/dto/DurationDto.java @@ -0,0 +1,32 @@ +package com.se.nsl.domain.dto; + +@SuppressWarnings("ALL") +public class DurationDto { + private long start; + + private long end; + + public DurationDto() { + } + + public DurationDto(long start, long end) { + this.start = start; + this.end = end; + } + + public long getStart() { + return start; + } + + public void setStart(long start) { + this.start = start; + } + + public long getEnd() { + return end; + } + + public void setEnd(long end) { + this.end = end; + } +} diff --git a/src/main/java/com/se/nsl/domain/dto/ExtensionDto.java b/src/main/java/com/se/nsl/domain/dto/ExtensionDto.java new file mode 100644 index 0000000..f015004 --- /dev/null +++ b/src/main/java/com/se/nsl/domain/dto/ExtensionDto.java @@ -0,0 +1,100 @@ +package com.se.nsl.domain.dto; + +import com.alibaba.fastjson.annotation.JSONField; + +@SuppressWarnings("ALL") +public class ExtensionDto { + private double minx; + + private double miny; + + private double maxx; + + private double maxy; + + private double minHeight; + + private double maxHeight; + + @JSONField(serialize = false) + private Double differ; + + public ExtensionDto() { + } + + public ExtensionDto(double minx, double miny, double maxx, double maxy) { + this.minx = minx; + this.miny = miny; + this.maxx = maxx; + this.maxy = maxy; + } + + public ExtensionDto(double minx, double miny, double maxx, double maxy, double minHeight, double maxHeight) { + this.minx = minx; + this.miny = miny; + this.maxx = maxx; + this.maxy = maxy; + this.minHeight = minHeight; + this.maxHeight = maxHeight; + } + + public double getMinx() { + return minx; + } + + public void setMinx(double minx) { + this.minx = minx; + } + + public double getMiny() { + return miny; + } + + public void setMiny(double miny) { + this.miny = miny; + } + + public double getMaxx() { + return maxx; + } + + public void setMaxx(double maxx) { + this.maxx = maxx; + } + + public double getMaxy() { + return maxy; + } + + public void setMaxy(double maxy) { + this.maxy = maxy; + } + + public double getMinHeight() { + return minHeight; + } + + public void setMinHeight(double minHeight) { + this.minHeight = minHeight; + } + + public double getMaxHeight() { + return maxHeight; + } + + public void setMaxHeight(double maxHeight) { + this.maxHeight = maxHeight; + } + + public Double getDiffer() { + return differ; + } + + public void setDiffer(Double differ) { + this.differ = differ; + } + + public void setDiffer() { + this.differ = this.maxHeight - this.minHeight; + } +} diff --git a/src/main/java/com/se/nsl/domain/dto/GeDb.java b/src/main/java/com/se/nsl/domain/dto/GeDb.java new file mode 100644 index 0000000..85a8cc4 --- /dev/null +++ b/src/main/java/com/se/nsl/domain/dto/GeDb.java @@ -0,0 +1,62 @@ +package com.se.nsl.domain.dto; + +import org.gdal.osr.SpatialReference; +import org.gdal.osr.osr; + +@SuppressWarnings("ALL") +public class GeDb { + private String dbid; + + private String name; + + private Integer epsg; + + private String bbox; + + private SpatialReference sr; + + public GeDb() { + } + + public SpatialReference getSpatialReference() { + if (null == sr) { + sr = new SpatialReference(); + sr.ImportFromEPSG(epsg); + sr.SetAxisMappingStrategy(osr.OAMS_TRADITIONAL_GIS_ORDER); + } + + return sr; + } + + public String getDbid() { + return dbid; + } + + public void setDbid(String dbid) { + this.dbid = dbid; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public Integer getEpsg() { + return epsg; + } + + public void setEpsg(Integer epsg) { + this.epsg = epsg; + } + + public String getBbox() { + return bbox; + } + + public void setBbox(String bbox) { + this.bbox = bbox; + } +} diff --git a/src/main/java/com/se/nsl/domain/dto/GeField.java b/src/main/java/com/se/nsl/domain/dto/GeField.java new file mode 100644 index 0000000..2e085bc --- /dev/null +++ b/src/main/java/com/se/nsl/domain/dto/GeField.java @@ -0,0 +1,84 @@ +package com.se.nsl.domain.dto; + +@SuppressWarnings("ALL") +public class GeField { + String name; + + String alias; + + String type; + + Boolean nullable; + + Integer precision; + + Integer length; + + Integer format; + + String tablename; + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getAlias() { + return alias; + } + + public void setAlias(String alias) { + this.alias = alias; + } + + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + public Boolean getNullable() { + return nullable; + } + + public void setNullable(Boolean nullable) { + this.nullable = nullable; + } + + public Integer getPrecision() { + return precision; + } + + public void setPrecision(Integer precision) { + this.precision = precision; + } + + public Integer getLength() { + return length; + } + + public void setLength(Integer length) { + this.length = length; + } + + public Integer getFormat() { + return format; + } + + public void setFormat(Integer format) { + this.format = format; + } + + public String getTablename() { + return tablename; + } + + public void setTablename(String tablename) { + this.tablename = tablename; + } +} diff --git a/src/main/java/com/se/nsl/domain/dto/GeFile.java b/src/main/java/com/se/nsl/domain/dto/GeFile.java new file mode 100644 index 0000000..f4f8170 --- /dev/null +++ b/src/main/java/com/se/nsl/domain/dto/GeFile.java @@ -0,0 +1,34 @@ +package com.se.nsl.domain.dto; + +@SuppressWarnings("ALL") +public class GeFile { + String name; + + Long size; + + String type; + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public Long getSize() { + return size; + } + + public void setSize(Long size) { + this.size = size; + } + + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } +} diff --git a/src/main/java/com/se/nsl/domain/dto/GeLayer.java b/src/main/java/com/se/nsl/domain/dto/GeLayer.java new file mode 100644 index 0000000..84d06ed --- /dev/null +++ b/src/main/java/com/se/nsl/domain/dto/GeLayer.java @@ -0,0 +1,112 @@ +package com.se.nsl.domain.dto; + +import cn.hutool.json.JSONArray; + +import java.util.List; + +@SuppressWarnings("ALL") +public class GeLayer { + private String id; + + private String name; + + private List<GeField> fields; + + private String queryType; + + private String shpName; + + private GeDb db; + + private JSONArray data; + + public GeLayer() { + this.data = new JSONArray(); + } + + public GeLayer(String id, String name) { + this(); + this.id = id; + this.name = name; + } + + public GeLayer(GeLayer layer,JSONArray data) { + this.id = layer.getId(); + this.name = layer.getName(); + this.queryType = layer.getQueryType(); + this.fields = layer.getFields(); + this.shpName = layer.getShpName(); + this.db = layer.getDb(); + this.data = data; + } + + public GeLayer(String id, String name, String queryType, List<GeField> fields, String shpName, GeDb db) { + this(); + this.id = id; + this.name = name; + this.queryType = queryType; + this.fields = fields; + this.shpName = shpName; + this.db = db; + } + + public void addData(JSONArray arr) { + this.data.addAll(arr); + } + + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public List<GeField> getFields() { + return fields; + } + + public void setFields(List<GeField> fields) { + this.fields = fields; + } + + public JSONArray getData() { + return data; + } + + public void setData(JSONArray data) { + this.data = data; + } + + public String getQueryType() { + return queryType; + } + + public void setQueryType(String queryType) { + this.queryType = queryType; + } + + public String getShpName() { + return shpName; + } + + public void setShpName(String shpName) { + this.shpName = shpName; + } + + public GeDb getDb() { + return db; + } + + public void setDb(GeDb db) { + this.db = db; + } +} diff --git a/src/main/java/com/se/nsl/domain/dto/GridDto.java b/src/main/java/com/se/nsl/domain/dto/GridDto.java new file mode 100644 index 0000000..314a078 --- /dev/null +++ b/src/main/java/com/se/nsl/domain/dto/GridDto.java @@ -0,0 +1,34 @@ +package com.se.nsl.domain.dto; + +import java.io.Serializable; + +public class GridDto implements Serializable { + + private Double lat; + + private Double lon; + public GridDto(){ + + } + + public GridDto(Double lat,Double lon){ + this.lat=lat; + this.lon=lon; + } + + public Double getLat() { + return lat; + } + + public void setLat(Double lat) { + this.lat = lat; + } + + public Double getLon() { + return lon; + } + + public void setLon(Double lon) { + this.lon = lon; + } +} diff --git a/src/main/java/com/se/nsl/domain/dto/LayerDto.java b/src/main/java/com/se/nsl/domain/dto/LayerDto.java new file mode 100644 index 0000000..95dc087 --- /dev/null +++ b/src/main/java/com/se/nsl/domain/dto/LayerDto.java @@ -0,0 +1,67 @@ +package com.se.nsl.domain.dto; + +import java.util.List; + +@SuppressWarnings("ALL") +public class LayerDto { + private String version; + + private DurationDto duration; + + private ExtensionDto extension; + + private TerrainDto terrain; + + private WaterDto waters; + + public LayerDto() { + } + + public LayerDto(String ver, int epsg, List<Integer> sizes) { + this.version = ver; + this.waters = new WaterDto(); + this.duration = new DurationDto(); + this.terrain = new TerrainDto(sizes); + this.terrain.setEpsg(epsg + ""); + } + + public String getVersion() { + return version; + } + + public void setVersion(String version) { + this.version = version; + } + + public DurationDto getDuration() { + return duration; + } + + public void setDuration(DurationDto duration) { + this.duration = duration; + } + + public ExtensionDto getExtension() { + return extension; + } + + public void setExtension(ExtensionDto extension) { + this.extension = extension; + } + + public TerrainDto getTerrain() { + return terrain; + } + + public void setTerrain(TerrainDto terrain) { + this.terrain = terrain; + } + + public WaterDto getWaters() { + return waters; + } + + public void setWaters(WaterDto waters) { + this.waters = waters; + } +} diff --git a/src/main/java/com/se/nsl/domain/dto/PointDto.java b/src/main/java/com/se/nsl/domain/dto/PointDto.java new file mode 100644 index 0000000..90e399d --- /dev/null +++ b/src/main/java/com/se/nsl/domain/dto/PointDto.java @@ -0,0 +1,48 @@ +package com.se.nsl.domain.dto; + +@SuppressWarnings("ALL") +public class PointDto implements Comparable<PointDto> { + private double x; + + private double y; + + private double val; + + PointDto() { + } + + public PointDto(double x, double y, double val) { + this.x = x; + this.y = y; + this.val = val; + } + + public double getX() { + return x; + } + + public void setX(double x) { + this.x = x; + } + + public double getY() { + return y; + } + + public void setY(double y) { + this.y = y; + } + + public double getVal() { + return val; + } + + public void setVal(double val) { + this.val = val; + } + + @Override + public int compareTo(PointDto other) { + return Double.compare(this.getVal(), other.getVal()); + } +} diff --git a/src/main/java/com/se/nsl/domain/dto/ResultDto.java b/src/main/java/com/se/nsl/domain/dto/ResultDto.java new file mode 100644 index 0000000..f604702 --- /dev/null +++ b/src/main/java/com/se/nsl/domain/dto/ResultDto.java @@ -0,0 +1,173 @@ +package com.se.nsl.domain.dto; + +import cn.hutool.core.io.FileUtil; +import com.se.nsl.helper.GdalHelper; +import org.gdal.osr.SpatialReference; + +import java.io.File; +import java.util.*; + +@SuppressWarnings("ALL") +public class ResultDto { + private String serviceName; + + private String terrainFile; + + private String buildingFile; + + private String waterPath; + + private String flowPath; + + private String h5Path; + + private String inPath; + + private String outPath; + + private String temp; + + private int epsg; + + private SpatialReference spatialReference; + + private Map<String, float[]> buildings; + + private List<BuildingDto> buildingList; + + public ResultDto() { + this.buildings = new HashMap<>(); + this.buildingList = new ArrayList<>(); + } + + public ResultDto(String serviceName, String terrainFile, String buildingFile, String waterPath, String flowPath, String inPath, String outPath, int epsg) { + this(); + this.serviceName = serviceName; + this.terrainFile = terrainFile; + this.buildingFile = buildingFile; + this.waterPath = waterPath; + this.flowPath = flowPath; + this.inPath = inPath + File.separator + serviceName; + this.h5Path = this.inPath + File.separator + ".save" + File.separator + serviceName + ".h5"; + this.outPath = outPath + File.separator + serviceName; + this.temp = outPath + File.separator + serviceName + File.separator + "temp"; + this.epsg = epsg; + this.spatialReference = GdalHelper.createSpatialReference(epsg); + + File dir = new File(this.outPath); + if (dir.exists() && dir.isDirectory()) { + FileUtil.del(dir); + } + dir.mkdirs(); + + dir = new File(this.temp); + if (dir.exists() && dir.isDirectory()) { + FileUtil.del(dir); + } + dir.mkdirs(); + } + + public String getServiceName() { + return serviceName; + } + + public void setServiceName(String serviceName) { + this.serviceName = serviceName; + } + + public String getTerrainFile() { + return terrainFile; + } + + public void setTerrainFile(String terrainFile) { + this.terrainFile = terrainFile; + } + + public String getBuildingFile() { + return buildingFile; + } + + public void setBuildingFile(String buildingFile) { + this.buildingFile = buildingFile; + } + + public String getWaterPath() { + return waterPath; + } + + public void setWaterPath(String waterPath) { + this.waterPath = waterPath; + } + + public String getFlowPath() { + return flowPath; + } + + public void setFlowPath(String flowPath) { + this.flowPath = flowPath; + } + + public String getInPath() { + return inPath; + } + + public void setInPath(String inPath) { + this.inPath = inPath; + } + + public String getOutPath() { + return outPath; + } + + public void setOutPath(String outPath) { + this.outPath = outPath; + } + + public String getTemp() { + return temp; + } + + public void setTemp(String temp) { + this.temp = temp; + } + + public Map<String, float[]> getBuildings() { + return buildings; + } + + public void setBuildings(Map<String, float[]> buildings) { + this.buildings = buildings; + } + + public List<BuildingDto> getBuildingList() { + return buildingList; + } + + public void setBuildingList(List<BuildingDto> buildingList) { + this.buildingList = buildingList; + } + + public int getEpsg() { + return epsg; + } + + public void setEpsg(int epsg) { + this.epsg = epsg; + } + + public SpatialReference getSpatialReference() { + return spatialReference; + } + + public void setSpatialReference(SpatialReference spatialReference) { + this.spatialReference = spatialReference; + } + + public String getH5Path() { + return h5Path; + } + + public void setH5Path(String h5Path) { + this.h5Path = h5Path; + } +} diff --git a/src/main/java/com/se/nsl/domain/dto/TerrainDto.java b/src/main/java/com/se/nsl/domain/dto/TerrainDto.java new file mode 100644 index 0000000..6f991df --- /dev/null +++ b/src/main/java/com/se/nsl/domain/dto/TerrainDto.java @@ -0,0 +1,73 @@ +package com.se.nsl.domain.dto; + +import com.alibaba.fastjson.annotation.JSONField; + +import java.util.*; + +@SuppressWarnings("ALL") +public class TerrainDto { + private List<int[]> size; + + @JSONField(serialize = false) + private String epsg; + + @JSONField(serialize = false) + private Map<String, float[]> vals; + + @JSONField(serialize = false) + private Map<String, List<XYO>> xyo; + + public TerrainDto() { + vals = new HashMap<>(); + size = new ArrayList<>(); + xyo = new HashMap<>(); + } + + public TerrainDto(List<Integer> sizes) { + this(); + + for (Integer i : sizes) { + size.add(new int[]{i, i}); + + /*List<XYO> list = new ArrayList<>(); + for (int x = 0; x < i; x++) { + for (int y = 0; y < i; y++) { + list.add(new XYO(x, y, i)); + } + } + xyo.put(i + "_" + i, list);*/ + } + } + + public List<int[]> getSize() { + return size; + } + + public void setSize(List<int[]> size) { + this.size = size; + } + + public String getEpsg() { + return epsg; + } + + public void setEpsg(String epsg) { + this.epsg = epsg; + } + + public Map<String, float[]> getVals() { + return vals; + } + + public void setVals(Map<String, float[]> vals) { + this.vals = vals; + } + + public Map<String, List<XYO>> getXyo() { + return xyo; + } + + public void setXyo(Map<String, List<XYO>> xyo) { + this.xyo = xyo; + } +} diff --git a/src/main/java/com/se/nsl/domain/dto/WaterDto.java b/src/main/java/com/se/nsl/domain/dto/WaterDto.java new file mode 100644 index 0000000..7e2aeac --- /dev/null +++ b/src/main/java/com/se/nsl/domain/dto/WaterDto.java @@ -0,0 +1,70 @@ +package com.se.nsl.domain.dto; + +import com.alibaba.fastjson.annotation.JSONField; + +import java.util.ArrayList; +import java.util.List; + +@SuppressWarnings("ALL") +public class WaterDto { + @JSONField(serialize = false) + private List<String> files; + + @JSONField(serialize = false) + private Double minHeight; + + @JSONField(serialize = false) + private Double maxHeight; + + private List<Long> data; + + public WaterDto() { + this.files = new ArrayList<>(); + this.data = new ArrayList<>(); + this.minHeight = Double.MAX_VALUE; + this.maxHeight = Double.MIN_VALUE; + } + + public void setHeight(double minHeight, double maxHeight) { + synchronized (this) { + if (this.minHeight > minHeight) { + this.minHeight = minHeight; + } + if (this.maxHeight < maxHeight) { + this.maxHeight = maxHeight; + } + } + } + + public List<String> getFiles() { + return files; + } + + public void setFiles(List<String> files) { + this.files = files; + } + + public List<Long> getData() { + return data; + } + + public void setData(List<Long> data) { + this.data = data; + } + + public Double getMinHeight() { + return minHeight; + } + + public void setMinHeight(Double minHeight) { + this.minHeight = minHeight; + } + + public Double getMaxHeight() { + return maxHeight; + } + + public void setMaxHeight(Double maxHeight) { + this.maxHeight = maxHeight; + } +} diff --git a/src/main/java/com/se/nsl/domain/dto/XYDto.java b/src/main/java/com/se/nsl/domain/dto/XYDto.java new file mode 100644 index 0000000..99fc326 --- /dev/null +++ b/src/main/java/com/se/nsl/domain/dto/XYDto.java @@ -0,0 +1,32 @@ +package com.se.nsl.domain.dto; + +@SuppressWarnings("ALL") +public class XYDto { + private int x; + + private int y; + + public XYDto() { + } + + public XYDto(int x, int y) { + this.x = x; + this.y = y; + } + + public int getX() { + return x; + } + + public void setX(int x) { + this.x = x; + } + + public int getY() { + return y; + } + + public void setY(int y) { + this.y = y; + } +} diff --git a/src/main/java/com/se/nsl/domain/dto/XYO.java b/src/main/java/com/se/nsl/domain/dto/XYO.java new file mode 100644 index 0000000..3ff9568 --- /dev/null +++ b/src/main/java/com/se/nsl/domain/dto/XYO.java @@ -0,0 +1,43 @@ +package com.se.nsl.domain.dto; + +@SuppressWarnings("ALL") +public class XYO { + private int x; + + private int y; + + private int offset; + + public XYO() { + } + + public XYO(int x, int y, int width) { + this.x = x; + this.y = y; + this.offset = x + y * width; + } + + public int getX() { + return x; + } + + public void setX(int x) { + this.x = x; + } + + public int getY() { + return y; + } + + public void setY(int y) { + this.y = y; + } + + public int getOffset() { + return offset; + } + + public void setOffset(int offset) { + this.offset = offset; + } +} diff --git a/src/main/java/com/se/nsl/domain/po/DataPo.java b/src/main/java/com/se/nsl/domain/po/DataPo.java new file mode 100644 index 0000000..e0abe81 --- /dev/null +++ b/src/main/java/com/se/nsl/domain/po/DataPo.java @@ -0,0 +1,274 @@ +package com.se.nsl.domain.po; + +import com.alibaba.fastjson.annotation.JSONField; +import com.fasterxml.jackson.annotation.JsonFormat; +import com.se.nsl.helper.GdalHelper; +import io.swagger.annotations.ApiModelProperty; +import org.gdal.osr.SpatialReference; + +import java.util.Date; + +@SuppressWarnings("ALL") +public class DataPo { + @ApiModelProperty("鐖禝D") + private Integer pid; + + @ApiModelProperty("鍚嶇О") + private String name; + + @ApiModelProperty("杈撳叆璺緞") + private String inPath; + + @ApiModelProperty("杈撳嚭璺緞") + private String outPath; + + @ApiModelProperty("寮�濮嬫椂闂�") + @JSONField(format = "yyyy-MM-dd HH:mm:ss") + @JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8") + private Date startTime; + + @ApiModelProperty("鏈�灏廥") + private Double minx; + + @ApiModelProperty("鏈�灏廦") + private Double miny; + + @ApiModelProperty("鏈�澶") + private Double maxx; + + @ApiModelProperty("鏈�澶") + private Double maxy; + + @ApiModelProperty("闄嶉洦閲�(mm)") + private Double total; + + @ApiModelProperty("鏃堕暱(min)") + private Integer duration; + + @ApiModelProperty("鏄惁涓洪槻姹�(0-鍚︼紝1-鏄�)") + private Integer isFlood; + + @ApiModelProperty("闃叉睕寮�濮嬫椂闂�(绉�)") + private Integer floodStart; + + @ApiModelProperty("闃叉睕缁撴潫鏃堕棿(绉�)") + private Integer floodEnd; + + @ApiModelProperty("闃叉睕楂樺害(mm)") + private Double floodHeight; + + @ApiModelProperty("闃叉睕绫诲瀷(娌欒锛岄槻姘存澘)") + private String floodType; + + @ApiModelProperty("闃叉睕鏈�灏廥") + private Double floodMinx; + + @ApiModelProperty("闃叉睕鏈�灏廦") + private Double floodMiny; + + @ApiModelProperty("闃叉睕鏈�澶") + private Double floodMaxx; + + @ApiModelProperty("闃叉睕鏈�澶") + private Double floodMaxy; + + @ApiModelProperty("鍧愭爣绯籌D") + private Integer epsg; + + public DataPo() { + } + + public void setPath(String inPath, String outPath) { + this.inPath = inPath; + this.outPath = outPath; + } + + public String getBbox() { + // "116.64388473935195,39.884315914604464,116.64754729082588,39.887069143903496"; + return minx + "," + miny + "," + maxx + "," + maxy; + } + + public DataPo(String name, String inPath, String outPath, Double minx, Double miny, Double maxx, Double maxy, Double total, Integer duration) { + this.name = name; + this.inPath = inPath; + this.outPath = outPath; + this.minx = minx; + this.miny = miny; + this.maxx = maxx; + this.maxy = maxy; + this.total = total; + this.duration = duration; + } + + public Integer getPid() { + return pid; + } + + public void setPid(Integer pid) { + this.pid = pid; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getInPath() { + return inPath; + } + + public void setInPath(String inPath) { + this.inPath = inPath; + } + + public String getOutPath() { + return outPath; + } + + public void setOutPath(String outPath) { + this.outPath = outPath; + } + + public Date getStartTime() { + return startTime; + } + + public void setStartTime(Date startTime) { + this.startTime = startTime; + } + + public Double getMinx() { + return minx; + } + + public void setMinx(Double minx) { + this.minx = minx; + } + + public Double getMiny() { + return miny; + } + + public void setMiny(Double miny) { + this.miny = miny; + } + + public Double getMaxx() { + return maxx; + } + + public void setMaxx(Double maxx) { + this.maxx = maxx; + } + + public Double getMaxy() { + return maxy; + } + + public void setMaxy(Double maxy) { + this.maxy = maxy; + } + + public Double getTotal() { + return total; + } + + public void setTotal(Double total) { + this.total = total; + } + + public Integer getDuration() { + return duration; + } + + public void setDuration(Integer duration) { + this.duration = duration; + } + + public Integer getIsFlood() { + return isFlood; + } + + public void setIsFlood(Integer isFlood) { + this.isFlood = isFlood; + } + + public Integer getFloodStart() { + return floodStart; + } + + public void setFloodStart(Integer floodStart) { + this.floodStart = floodStart; + } + + public Integer getFloodEnd() { + return floodEnd; + } + + public void setFloodEnd(Integer floodEnd) { + this.floodEnd = floodEnd; + } + + public Double getFloodHeight() { + return floodHeight; + } + + public void setFloodHeight(Double floodHeight) { + this.floodHeight = floodHeight; + } + + public String getFloodType() { + return floodType; + } + + public void setFloodType(String floodType) { + this.floodType = floodType; + } + + public Double getFloodMinx() { + return floodMinx; + } + + public void setFloodMinx(Double floodMinx) { + this.floodMinx = floodMinx; + } + + public Double getFloodMiny() { + return floodMiny; + } + + public void setFloodMiny(Double floodMiny) { + this.floodMiny = floodMiny; + } + + public Double getFloodMaxx() { + return floodMaxx; + } + + public void setFloodMaxx(Double floodMaxx) { + this.floodMaxx = floodMaxx; + } + + public Double getFloodMaxy() { + return floodMaxy; + } + + public void setFloodMaxy(Double floodMaxy) { + this.floodMaxy = floodMaxy; + } + + public Integer getEpsg() { + return epsg; + } + + public void setEpsg(Integer epsg) { + this.epsg = epsg; + } + + public SpatialReference getSpatialReference() { + return GdalHelper.createSpatialReference(this.getEpsg()); + } +} diff --git a/src/main/java/com/se/nsl/domain/po/PondingPo.java b/src/main/java/com/se/nsl/domain/po/PondingPo.java new file mode 100644 index 0000000..2b6c351 --- /dev/null +++ b/src/main/java/com/se/nsl/domain/po/PondingPo.java @@ -0,0 +1,79 @@ +package com.se.nsl.domain.po; + +import com.se.nsl.domain.dto.PointDto; +import com.se.nsl.helper.GdalHelper; +import io.swagger.annotations.ApiModelProperty; +import org.gdal.ogr.Geometry; +import org.gdal.ogr.ogr; + +@SuppressWarnings("ALL") +public class PondingPo { + @ApiModelProperty("绉按鍖哄煙") + private String polygon; + + @ApiModelProperty("绉按鐐�") + private String point; + + @ApiModelProperty("绉按娣卞害") + private Double depth; + + @ApiModelProperty("闈㈢Н") + private Double area; + + public PondingPo() { + } + + public PondingPo(Geometry polygon, PointDto dto) { + Geometry point = new Geometry(ogr.wkbPoint); + point.AddPoint_2D(dto.getX(), dto.getY()); + point.AssignSpatialReference(polygon.GetSpatialReference()); + + //GdalHelper.toWgs84(polygon.GetSpatialReference(), polygon); + //GdalHelper.toWgs84(polygon.GetSpatialReference(), point); + polygon.TransformTo(GdalHelper.SR4326); + point.TransformTo(GdalHelper.SR4326); + + this.polygon = polygon.ExportToWkt(); + this.point = point.ExportToWkt(); + this.depth = dto.getVal(); + this.area = polygon.Area(); + } + + public PondingPo(String polygon, String point, Double depth) { + this.polygon = polygon; + this.point = point; + this.depth = depth; + } + + public String getPolygon() { + return polygon; + } + + public void setPolygon(String polygon) { + this.polygon = polygon; + } + + public String getPoint() { + return point; + } + + public void setPoint(String point) { + this.point = point; + } + + public Double getDepth() { + return depth; + } + + public void setDepth(Double depth) { + this.depth = depth; + } + + public Double getArea() { + return area; + } + + public void setArea(Double area) { + this.area = area; + } +} diff --git a/src/main/java/com/se/nsl/domain/po/SimuPo.java b/src/main/java/com/se/nsl/domain/po/SimuPo.java new file mode 100644 index 0000000..744ed13 --- /dev/null +++ b/src/main/java/com/se/nsl/domain/po/SimuPo.java @@ -0,0 +1,179 @@ +package com.se.nsl.domain.po; + +import com.baomidou.mybatisplus.annotation.IdType; +import com.baomidou.mybatisplus.annotation.TableId; +import com.baomidou.mybatisplus.annotation.TableName; +import io.swagger.annotations.ApiModelProperty; + +import java.sql.Timestamp; + +@TableName("bs.simu") +@SuppressWarnings("ALL") +public class SimuPo { + @TableId(type = IdType.AUTO) + @ApiModelProperty("涓婚敭ID") + private Long id; + + @ApiModelProperty("鐖禝D") + private Integer pid; + + @ApiModelProperty("搴忓彿") + private Integer num; + + @ApiModelProperty("鍚嶇О") + private String name; + + @ApiModelProperty("鏈嶅姟鍚�") + private String serviceName; + + @ApiModelProperty("鏁版嵁(JSON)") + private String data; + + @ApiModelProperty("鐘舵�侊細0-鍒涘缓浠跨湡浠诲姟锛�1-杩炴帴GEDB搴擄紝2-涓嬭浇绌洪棿鏁版嵁锛�3-涓嬭浇楂樼▼鏁版嵁锛�4-鐢熸垚闄嶉洦鏂囦欢锛�5-鐢熸垚閰嶇疆鏂囦欢锛�6-妯℃嫙鍐呮稘浠跨湡锛�7-澶勭悊姘翠綅鏂囦欢锛�8-澶勭悊鎺掓按鏂囦欢锛�9-澶勭悊浠跨湡缁撴灉锛�10-瀹屾垚锛�-10-鍑洪敊") + private Integer status; + + @ApiModelProperty("缁撴灉") + private String result; + + @ApiModelProperty("鍒涘缓鏃堕棿") + private Timestamp createTime; + + @ApiModelProperty("鏇存柊鏃堕棿") + private Timestamp updateTime; + + @ApiModelProperty("澶囨敞") + private String bak; + + @ApiModelProperty("sem璺緞") + private String semUrl; + + @ApiModelProperty("鐐规暟鎹矾寰�") + private String pointUrl; + + @ApiModelProperty("绾挎暟鎹矾寰�") + private String linkUrl; + + public SimuPo() { + } + + public SimuPo(Integer num, Integer pid, String name, String data, Integer status, String bak) { + this.num = num; + this.pid = pid; + this.name = name; + this.data = data; + this.status = status; + this.bak = bak; + } + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public Integer getPid() { + return pid; + } + + public void setPid(Integer pid) { + this.pid = pid; + } + + public Integer getNum() { + return num; + } + + public void setNum(Integer num) { + this.num = num; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getServiceName() { + return serviceName; + } + + public void setServiceName(String serviceName) { + this.serviceName = serviceName; + } + + public String getData() { + return data; + } + + public void setData(String data) { + this.data = data; + } + + public Integer getStatus() { + return status; + } + + public void setStatus(Integer status) { + this.status = status; + } + + public String getResult() { + return result; + } + + public void setResult(String result) { + this.result = result; + } + + public Timestamp getCreateTime() { + return createTime; + } + + public void setCreateTime(Timestamp createTime) { + this.createTime = createTime; + } + + public Timestamp getUpdateTime() { + return updateTime; + } + + public void setUpdateTime(Timestamp updateTime) { + this.updateTime = updateTime; + } + + public String getBak() { + return bak; + } + + public void setBak(String bak) { + this.bak = bak; + } + + public String getSemUrl() { + return semUrl; + } + + public void setSemUrl(String semUrl) { + this.semUrl = semUrl; + } + + public String getPointUrl() { + return pointUrl; + } + + public void setPointUrl(String pointUrl) { + this.pointUrl = pointUrl; + } + + public String getLinkUrl() { + return linkUrl; + } + + public void setLinkUrl(String linkUrl) { + this.linkUrl = linkUrl; + } +} diff --git a/src/main/java/com/se/nsl/domain/vo/BuildingDepthVo.java b/src/main/java/com/se/nsl/domain/vo/BuildingDepthVo.java new file mode 100644 index 0000000..e18edbb --- /dev/null +++ b/src/main/java/com/se/nsl/domain/vo/BuildingDepthVo.java @@ -0,0 +1,54 @@ +package com.se.nsl.domain.vo; + +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import lombok.EqualsAndHashCode; +import lombok.ToString; + +@ToString +@SuppressWarnings("ALL") +@ApiModel(value = "寤虹瓚鐗╂秹姘存繁搴﹁鍥剧被") +@EqualsAndHashCode(callSuper = false) +public class BuildingDepthVo { + public BuildingDepthVo() { + } + + public BuildingDepthVo(String id, Long timestamp, Double depth) { + this.id = id; + this.timestamp = timestamp; + this.depth = depth; + } + + @ApiModelProperty("涓婚敭ID") + private String id; + + @ApiModelProperty("鏃堕棿鎴�") + private Long timestamp; + + @ApiModelProperty("娑夋按灏濊瘯") + private Double depth; + + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public Long getTimestamp() { + return timestamp; + } + + public void setTimestamp(Long timestamp) { + this.timestamp = timestamp; + } + + public Double getDepth() { + return depth; + } + + public void setDepth(Double depth) { + this.depth = depth; + } +} diff --git a/src/main/java/com/se/nsl/domain/vo/CreateFilesSimuVo.java b/src/main/java/com/se/nsl/domain/vo/CreateFilesSimuVo.java new file mode 100644 index 0000000..8ee1b76 --- /dev/null +++ b/src/main/java/com/se/nsl/domain/vo/CreateFilesSimuVo.java @@ -0,0 +1,109 @@ +package com.se.nsl.domain.vo; + +import com.alibaba.fastjson.annotation.JSONField; +import com.fasterxml.jackson.annotation.JsonFormat; +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import lombok.Data; + +import java.io.Serializable; +import java.util.Date; + +@Data +@ApiModel(value = "CreateFilesSimuVo", description = "") +public class CreateFilesSimuVo implements Serializable { + + private static final long serialVersionUID = 1L; + @ApiModelProperty("鏈�灏廥") + private Double minx; + + @ApiModelProperty("鏈�灏廦") + private Double miny; + + @ApiModelProperty("鏈�澶") + private Double maxx; + + @ApiModelProperty("鏈�澶") + private Double maxy; + + @ApiModelProperty("搴忓彿") + private Integer num; + + @ApiModelProperty("闄嶉洦閲�(mm)") + private Double total; + + @ApiModelProperty("鏃堕暱(min)") + private Integer duration; + + @ApiModelProperty("浠跨湡鍚嶇О") + private String name; + + @ApiModelProperty("寮�濮嬫椂闂�") + @JSONField(format = "yyyy-MM-dd HH:mm:ss") + @JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8") + private Date startTime; + + @ApiModelProperty("浠跨湡澶囨敞") + private String bak; + + @ApiModelProperty("鐖禝D(澶т簬0涓洪槻姹�)") + private Integer pid; + + @ApiModelProperty("闃叉睕寮�濮嬫椂闂�(绉�)") + private Integer floodStart; + + @ApiModelProperty("闃叉睕缁撴潫鏃堕棿(绉�)") + private Integer floodEnd; + + @ApiModelProperty("闃叉睕楂樺害(mm)") + private Double floodHeight; + + @ApiModelProperty("闃叉睕绫诲瀷(娌欒锛岄槻姘存澘)") + private String floodType; + + @ApiModelProperty("闃叉睕鏈�灏廥") + private Double floodMinx; + + @ApiModelProperty("闃叉睕鏈�灏廦") + private Double floodMiny; + + @ApiModelProperty("闃叉睕鏈�澶") + private Double floodMaxx; + + @ApiModelProperty("闃叉睕鏈�澶") + private Double floodMaxy; + + @ApiModelProperty("鑼冨洿鏂囦欢鍦板潃") + private String floodFile; + + @ApiModelProperty("绔欑偣鏂囦欢shp鍦板潃") + private String stationFile; + + @ApiModelProperty("绔欑偣闆ㄩ噺CSV鏂囦欢鍦板潃") + private String stationRainFile; + + @ApiModelProperty("璁$畻妯″瀷绫诲瀷") + private String modelType; + + @ApiModelProperty("sem璺緞") + private String semUrl; + + @ApiModelProperty("鐐圭敓鎴愯矾寰�") + private String pointName; + + @ApiModelProperty("绾跨敓鎴愯矾寰�") + private String linkName; + + @ApiModelProperty("妯″瀷璁$畻鏂瑰紡锛�1=妯℃嫙锛�2=鎺ュ叆") + private String radio; + + @ApiModelProperty("绔欑偣鍚嶇О") + private String station; + + @ApiModelProperty("绔欑偣缁忓害") + private Double stationLongitude; + + @ApiModelProperty("绔欑偣绾害") + private Double stationLatitude; + +} diff --git a/src/main/java/com/se/nsl/domain/vo/CreateSimuVo.java b/src/main/java/com/se/nsl/domain/vo/CreateSimuVo.java new file mode 100644 index 0000000..9b5534f --- /dev/null +++ b/src/main/java/com/se/nsl/domain/vo/CreateSimuVo.java @@ -0,0 +1,227 @@ +package com.se.nsl.domain.vo; + +import com.alibaba.fastjson.annotation.JSONField; +import com.fasterxml.jackson.annotation.JsonFormat; +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import lombok.EqualsAndHashCode; +import lombok.ToString; + +import java.util.Date; + +@ToString +@SuppressWarnings("ALL") +@ApiModel(value = "鍒涘缓浠跨湡瑙嗗浘绫�") +@EqualsAndHashCode(callSuper = false) +public class CreateSimuVo { + @ApiModelProperty("鏈�灏廥") + private Double minx; + + @ApiModelProperty("鏈�灏廦") + private Double miny; + + @ApiModelProperty("鏈�澶") + private Double maxx; + + @ApiModelProperty("鏈�澶") + private Double maxy; + + @ApiModelProperty("搴忓彿") + private Integer num; + + @ApiModelProperty("闄嶉洦閲�(mm)") + private Double total; + + @ApiModelProperty("鏃堕暱(min)") + private Integer duration; + + @ApiModelProperty("浠跨湡鍚嶇О") + private String name; + + @ApiModelProperty("寮�濮嬫椂闂�") + @JSONField(format = "yyyy-MM-dd HH:mm:ss") + @JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8") + private Date startTime; + + @ApiModelProperty("浠跨湡澶囨敞") + private String bak; + + @ApiModelProperty("鐖禝D(澶т簬0涓洪槻姹�)") + private Integer pid; + + @ApiModelProperty("闃叉睕寮�濮嬫椂闂�(绉�)") + private Integer floodStart; + + @ApiModelProperty("闃叉睕缁撴潫鏃堕棿(绉�)") + private Integer floodEnd; + + @ApiModelProperty("闃叉睕楂樺害(mm)") + private Double floodHeight; + + @ApiModelProperty("闃叉睕绫诲瀷(娌欒锛岄槻姘存澘)") + private String floodType; + + @ApiModelProperty("闃叉睕鏈�灏廥") + private Double floodMinx; + + @ApiModelProperty("闃叉睕鏈�灏廦") + private Double floodMiny; + + @ApiModelProperty("闃叉睕鏈�澶") + private Double floodMaxx; + + @ApiModelProperty("闃叉睕鏈�澶") + private Double floodMaxy; + + public Double getMinx() { + return minx; + } + + public void setMinx(Double minx) { + this.minx = minx; + } + + public Double getMiny() { + return miny; + } + + public void setMiny(Double miny) { + this.miny = miny; + } + + public Double getMaxx() { + return maxx; + } + + public void setMaxx(Double maxx) { + this.maxx = maxx; + } + + public Double getMaxy() { + return maxy; + } + + public void setMaxy(Double maxy) { + this.maxy = maxy; + } + + public Integer getNum() { + return num; + } + + public void setNum(Integer num) { + this.num = num; + } + + public Double getTotal() { + return total; + } + + public void setTotal(Double total) { + this.total = total; + } + + public Integer getDuration() { + return duration; + } + + public void setDuration(Integer duration) { + this.duration = duration; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getBak() { + return bak; + } + + public void setBak(String bak) { + this.bak = bak; + } + + public Integer getPid() { + return pid; + } + + public void setPid(Integer pid) { + this.pid = pid; + } + + public Integer getFloodStart() { + return floodStart; + } + + public void setFloodStart(Integer floodStart) { + this.floodStart = floodStart; + } + + public Integer getFloodEnd() { + return floodEnd; + } + + public void setFloodEnd(Integer floodEnd) { + this.floodEnd = floodEnd; + } + + public Double getFloodHeight() { + return floodHeight; + } + + public void setFloodHeight(Double floodHeight) { + this.floodHeight = floodHeight; + } + + public String getFloodType() { + return floodType; + } + + public void setFloodType(String floodType) { + this.floodType = floodType; + } + + public Double getFloodMinx() { + return floodMinx; + } + + public void setFloodMinx(Double floodMinx) { + this.floodMinx = floodMinx; + } + + public Double getFloodMiny() { + return floodMiny; + } + + public void setFloodMiny(Double floodMiny) { + this.floodMiny = floodMiny; + } + + public Double getFloodMaxx() { + return floodMaxx; + } + + public void setFloodMaxx(Double floodMaxx) { + this.floodMaxx = floodMaxx; + } + + public Double getFloodMaxy() { + return floodMaxy; + } + + public void setFloodMaxy(Double floodMaxy) { + this.floodMaxy = floodMaxy; + } + + public Date getStartTime() { + return startTime; + } + + public void setStartTime(Date startTime) { + this.startTime = startTime; + } +} diff --git a/src/main/java/com/se/nsl/domain/vo/PondingVo.java b/src/main/java/com/se/nsl/domain/vo/PondingVo.java new file mode 100644 index 0000000..68eac35 --- /dev/null +++ b/src/main/java/com/se/nsl/domain/vo/PondingVo.java @@ -0,0 +1,36 @@ +package com.se.nsl.domain.vo; + +import io.swagger.annotations.ApiModelProperty; + +@SuppressWarnings("ALL") +public class PondingVo { + @ApiModelProperty("绉按娣卞害") + private Double depth; + + @ApiModelProperty("绉按闈㈢Н") + private Double area; + + public PondingVo() { + } + + public PondingVo(Double depth, Double area) { + this.depth = depth; + this.area = area; + } + + public Double getDepth() { + return depth; + } + + public void setDepth(Double depth) { + this.depth = depth; + } + + public Double getArea() { + return area; + } + + public void setArea(Double area) { + this.area = area; + } +} diff --git a/src/main/java/com/se/nsl/domain/vo/QueryVo.java b/src/main/java/com/se/nsl/domain/vo/QueryVo.java new file mode 100644 index 0000000..005149d --- /dev/null +++ b/src/main/java/com/se/nsl/domain/vo/QueryVo.java @@ -0,0 +1,107 @@ +package com.se.nsl.domain.vo; + +@SuppressWarnings("ALL") +public class QueryVo { + private Integer start; + + private Integer count; + + private String layerid; + + private Boolean containCount; + + private String where; + + private String geometry; + + private Double buffer; + + private Boolean returnGeom; + + public QueryVo() { + } + + public void setDefault() { + if (null == start || start < 1) start = 1; + if (null == count || count < 0) count = 10; + if (count > 10000) count = 10000; + if (null == containCount) containCount = false; + if (null == returnGeom) returnGeom = false; + } + + public int getCount() { + return count; + } + + public void setCount(int count) { + this.count = count; + } + + public String getLayerid() { + return layerid; + } + + public void setLayerid(String layerid) { + this.layerid = layerid; + } + + public boolean isContainCount() { + return containCount; + } + + public void setContainCount(boolean containCount) { + this.containCount = containCount; + } + + public String getWhere() { + return where; + } + + public void setWhere(String where) { + this.where = where; + } + + public Integer getStart() { + return start; + } + + public void setStart(Integer start) { + this.start = start; + } + + public void setCount(Integer count) { + this.count = count; + } + + public Boolean getContainCount() { + return containCount; + } + + public void setContainCount(Boolean containCount) { + this.containCount = containCount; + } + + public String getGeometry() { + return geometry; + } + + public void setGeometry(String geometry) { + this.geometry = geometry; + } + + public Double getBuffer() { + return buffer; + } + + public void setBuffer(Double buffer) { + this.buffer = buffer; + } + + public Boolean getReturnGeom() { + return returnGeom; + } + + public void setReturnGeom(Boolean returnGeom) { + this.returnGeom = returnGeom; + } +} diff --git a/src/main/java/com/se/nsl/domain/vo/R.java b/src/main/java/com/se/nsl/domain/vo/R.java new file mode 100644 index 0000000..67a24d9 --- /dev/null +++ b/src/main/java/com/se/nsl/domain/vo/R.java @@ -0,0 +1,96 @@ +package com.se.nsl.domain.vo; + +import io.swagger.annotations.ApiModelProperty; +import org.springframework.http.HttpStatus; + +@SuppressWarnings("ALL") +public class R<T> { + @ApiModelProperty("鐘舵�佺爜锛�200-姝e父锛�400-璇锋眰閿欒锛�500-鏈嶅姟鍣ㄩ敊璇�") + private int code; + + @ApiModelProperty("娑堟伅") + private String msg; + + @ApiModelProperty("琛屾暟") + private long count; + + @ApiModelProperty("鏁版嵁") + private T data; + + @ApiModelProperty("鏃堕棿") + private long time; + + public R() { + this.time = System.currentTimeMillis(); + } + + public R(HttpStatus code, T data) { + this.data = data; + this.code = code.value(); + this.time = System.currentTimeMillis(); + this.msg = this.code == 200 ? "鎴愬姛" : "澶辫触"; + } + + public R(HttpStatus code, T data, String msg) { + this.msg = msg; + this.data = data; + this.code = code.value(); + this.time = System.currentTimeMillis(); + } + + public R(HttpStatus code, T data, long count) { + this.count = count; + this.data = data; + this.code = code.value(); + this.time = System.currentTimeMillis(); + this.msg = this.code == 200 ? "鎴愬姛" : "澶辫触"; + } + + public R(HttpStatus code, T data, long count, String msg) { + this.msg = msg; + this.data = data; + this.count = count; + this.code = code.value(); + this.time = System.currentTimeMillis(); + } + + public int getCode() { + return code; + } + + public void setCode(int code) { + this.code = code; + } + + public String getMsg() { + return msg; + } + + public void setMsg(String msg) { + this.msg = msg; + } + + public long getCount() { + return count; + } + + public void setCount(long count) { + this.count = count; + } + + public T getData() { + return data; + } + + public void setData(T data) { + this.data = data; + } + + public long getTime() { + return time; + } + + public void setTime(long time) { + this.time = time; + } +} diff --git a/src/main/java/com/se/nsl/domain/vo/SimuVo.java b/src/main/java/com/se/nsl/domain/vo/SimuVo.java new file mode 100644 index 0000000..7285c6a --- /dev/null +++ b/src/main/java/com/se/nsl/domain/vo/SimuVo.java @@ -0,0 +1,87 @@ +package com.se.nsl.domain.vo; + +import com.baomidou.mybatisplus.annotation.TableName; +import io.swagger.annotations.ApiModelProperty; + +import java.util.List; + +@TableName("bs.simu") +@SuppressWarnings("ALL") +public class SimuVo { + @ApiModelProperty("ID") + private Long id; + + @ApiModelProperty("鐖禝D") + private Integer pid; + + @ApiModelProperty("搴忓彿") + private Integer num; + + @ApiModelProperty("鍚嶇О") + private String name; + + @ApiModelProperty("鐘舵�侊細0-鍒涘缓浠跨湡锛�1-杩炴帴GEDB搴擄紝2-涓嬭浇绌洪棿鏁版嵁锛�3-涓嬭浇楂樼▼鏁版嵁锛�4-妯℃嫙鍐呮稘浠跨湡锛�5-澶勭悊浠跨湡鏁版嵁锛�10-瀹屾垚锛�-10-鍑洪敊") + private List<Short> status; + + @ApiModelProperty("鍒嗛〉澶у皬") + private Integer pageSize; + + @ApiModelProperty("褰撳墠椤电爜") + private Integer pageIndex; + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public Integer getPid() { + return pid; + } + + public void setPid(Integer pid) { + this.pid = pid; + } + + public Integer getNum() { + return num; + } + + public void setNum(Integer num) { + this.num = num; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public List<Short> getStatus() { + return status; + } + + public void setStatus(List<Short> status) { + this.status = status; + } + + public Integer getPageSize() { + return pageSize; + } + + public void setPageSize(Integer pageSize) { + this.pageSize = pageSize; + } + + public Integer getPageIndex() { + return pageIndex; + } + + public void setPageIndex(Integer pageIndex) { + this.pageIndex = pageIndex; + } +} diff --git a/src/main/java/com/se/nsl/domain/vo/StationRainVo.java b/src/main/java/com/se/nsl/domain/vo/StationRainVo.java new file mode 100644 index 0000000..cc5cd45 --- /dev/null +++ b/src/main/java/com/se/nsl/domain/vo/StationRainVo.java @@ -0,0 +1,65 @@ +package com.se.nsl.domain.vo; + + +public class StationRainVo { + + private Long id; + + private String stationName; + + private Double rainfall; + + private Double longitude; + + private Double latitude; + + private String datetime; + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public String getStationName() { + return stationName; + } + + public void setStationName(String stationName) { + this.stationName = stationName; + } + + public Double getRainfall() { + return rainfall; + } + + public void setRainfall(Double rainfall) { + this.rainfall = rainfall; + } + + public Double getLongitude() { + return longitude; + } + + public void setLongitude(Double longitude) { + this.longitude = longitude; + } + + public Double getLatitude() { + return latitude; + } + + public void setLatitude(Double latitude) { + this.latitude = latitude; + } + + public String getDatetime() { + return datetime; + } + + public void setDatetime(String datetime) { + this.datetime = datetime; + } +} diff --git a/src/main/java/com/se/nsl/enums/RadioEnums.java b/src/main/java/com/se/nsl/enums/RadioEnums.java new file mode 100644 index 0000000..e187999 --- /dev/null +++ b/src/main/java/com/se/nsl/enums/RadioEnums.java @@ -0,0 +1,33 @@ +package com.se.nsl.enums; + + + +public enum RadioEnums { + SIMULATE("1", "妯℃嫙"), + ACCESS("2", "鎺ュ叆"); + + private String code; + private String name; + + public String getCode() { + return code; + } + + public void setCode(String code) { + this.code = code; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + RadioEnums(String code, String name) { + this.code = code; + this.name = name; + } + +} diff --git a/src/main/java/com/se/nsl/enums/SemErrorEnums.java b/src/main/java/com/se/nsl/enums/SemErrorEnums.java new file mode 100644 index 0000000..5918ab0 --- /dev/null +++ b/src/main/java/com/se/nsl/enums/SemErrorEnums.java @@ -0,0 +1,47 @@ +package com.se.nsl.enums; + + +public enum SemErrorEnums { + INIT(1, "鍒濆鍖栧け璐�"), + ZARR(2, "闄嶉洦鏂囦欢鐢熸垚澶辫触"), + POINT(3, "绠$偣鐢熸垚澶辫触"), + LINE(4, "绠$嚎鐢熸垚澶辫触"), + GRID(5, "鑼冨洿鐢熸垚澶辫触"), + RIVER(6, "娌虫祦鐢熸垚澶辫触"), + LAND(7, "鍦熷湴鍒╃敤鐢熸垚澶辫触"), + TERRAIN(8, "鍦板舰鐢熸垚澶辫触"); + + private Integer code; + private String name; + + public Integer getCode() { + return code; + } + + public void setCode(Integer code) { + this.code = code; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + SemErrorEnums(Integer code, String name) { + this.code = code; + this.name = name; + } + + public static String of(Integer code) { + SemErrorEnums[] values = SemErrorEnums.values(); + for (SemErrorEnums value : values) { + if (value.getCode().equals(code)) { + return value.getName(); + } + } + return null; + } +} diff --git a/src/main/java/com/se/nsl/helper/CaffeineHelper.java b/src/main/java/com/se/nsl/helper/CaffeineHelper.java new file mode 100644 index 0000000..29108d1 --- /dev/null +++ b/src/main/java/com/se/nsl/helper/CaffeineHelper.java @@ -0,0 +1,79 @@ +package com.se.nsl.helper; + +import com.github.benmanes.caffeine.cache.Cache; +import com.github.benmanes.caffeine.cache.Caffeine; +import lombok.extern.slf4j.Slf4j; + +import java.math.BigInteger; +import java.security.MessageDigest; +import java.util.List; +import java.util.concurrent.TimeUnit; + +@Slf4j +@SuppressWarnings("ALL") +public class CaffeineHelper { + private static Cache<String, Object> cache; + + public static void init(Integer cacheTime) { + cache = Caffeine.newBuilder() + .initialCapacity(16) + .maximumSize(4096) + .expireAfterWrite(cacheTime, TimeUnit.MINUTES) + .build(); + } + + public static Object get(String key) { + return cache.getIfPresent(key); + } + + public static void put(String key, Object obj) { + cache.put(key, obj); + } + + public static void remove(String key) { + cache.invalidate(key); + } + + public static void clear() { + cache.invalidateAll(); + } + + public static <T> List<T> getListByKey(String key) { + Object obj = get(key); + if (obj instanceof List<?>) { + return (List<T>) obj; + } + + return null; + } + + public static <T> void putListByKey(String key, List<T> list) { + if (null != list && list.size() > 0) { + put(key, list); + } + } + + public static String getMd5(String str) { + if (StringHelper.isEmpty(str)) { + return null; + } + + try { + MessageDigest md5 = MessageDigest.getInstance("MD5"); + md5.update(str.getBytes()); + byte[] byteArray = md5.digest(); + + BigInteger bigInt = new BigInteger(1, byteArray); + + String result = bigInt.toString(16); + + while (result.length() < 32) { + result = "0" + result; + } + + return result; + } catch (Exception e) { + return null; + } + } +} diff --git a/src/main/java/com/se/nsl/helper/ComHelper.java b/src/main/java/com/se/nsl/helper/ComHelper.java new file mode 100644 index 0000000..ee09a54 --- /dev/null +++ b/src/main/java/com/se/nsl/helper/ComHelper.java @@ -0,0 +1,250 @@ +package com.se.nsl.helper; + +import com.se.nsl.domain.dto.*; +import lombok.extern.slf4j.Slf4j; +import org.gdal.gdal.Band; +import org.gdal.gdal.Dataset; +import org.gdal.gdal.WarpOptions; +import org.gdal.gdal.gdal; +import org.gdal.gdalconst.gdalconst; +import org.gdal.ogr.Geometry; +import org.gdal.ogr.ogr; + +import javax.imageio.ImageIO; +import java.awt.*; +import java.awt.image.BufferedImage; +import java.io.BufferedWriter; +import java.io.File; +import java.io.FileWriter; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Vector; +import java.util.concurrent.CopyOnWriteArrayList; + +@Slf4j +@SuppressWarnings("ALL") +public class ComHelper { + public static BufferedImage createImage(int width, int height) { + BufferedImage image = new BufferedImage(width, height, BufferedImage.TYPE_INT_ARGB); + Graphics2D graphic = image.createGraphics(); + Color transparent = new Color(0, 0, 0, 0); + graphic.setColor(transparent); + graphic.clearRect(0, 0, width, height); + graphic.dispose(); + + return image; + } + + public static void savePng(BufferedImage image, String png) { + try { + ImageIO.write(image, "png", new File(png)); + } catch (Exception ex) { + log.error(ex.getMessage(), ex); + } + } + + public static void Resample(Dataset ds, String dest, int width, int height, LayerDto layer) { + Vector<String> vector = new Vector<>(); + //vector.add("-s_srs"); + //vector.add("EPSG:" + 4548); + //vector.add("-t_srs"); + //vector.add("EPSG:" + 4326); + vector.add("-ts"); + vector.add("" + width); + vector.add("" + height); + //vector.add("-te"); + //vector.add("" + layer.getExtension().getMinx()); + //vector.add("" + layer.getExtension().getMiny()); + //vector.add("" + layer.getExtension().getMaxx()); + //vector.add("" + layer.getExtension().getMaxy()); + //vector.add("-te_srs"); + //vector.add("EPSG:" + 4326); + vector.add("-r"); + vector.add("bilinear"); + vector.add("-of"); + vector.add("GTiff"); + WarpOptions warpOptions = new WarpOptions(vector); + + Dataset destDs = gdal.Warp(dest, new Dataset[]{ds}, warpOptions); + destDs.delete(); + } + + public static BuildingDto intersects(ResultDto dto, double x, double y) { + Geometry p = new Geometry(ogr.wkbPoint); + p.AddPoint_2D(x, y); + p.AssignSpatialReference(dto.getBuildingList().get(0).getGeom().GetSpatialReference()); + + return dto.getBuildingList().parallelStream().filter(b -> b.getGeom().Intersects(p)).findFirst().orElse(null); + } + + public static boolean isContains(Geometry g, double x, double y) { + Geometry p = new Geometry(ogr.wkbPoint); + p.AddPoint_2D(x, y); + p.AssignSpatialReference(g.GetSpatialReference()); + + return g.Contains(p); + } + + public static double getMinVal(double val, double radix) { + return ((long) Math.floor(val * radix)) / radix; + } + + public static double getMaxVal(double val, double radix) { + return ((long) Math.ceil(val * radix)) / radix; + } + + public static float getFloatValue(float val) { + return (Float.isNaN(val) || val < -999) ? Float.NaN : val; + } + + public static int getSafeValue(int val) { + if (val < 0) return 0; + if (val > 255) return 255; + + return val; + } + + public static boolean isValid(Double val) { + return !Double.isNaN(val) && val > Integer.MIN_VALUE; + } + + public static void getFiles(List<String> files, File file, String suffix) { + if (!file.exists()) return; + + if (file.isDirectory()) { + File[] fileList = file.listFiles(); + for (File f : fileList) { + if (f.isDirectory()) { + getFiles(files, f, suffix); + } else { + if (f.getName().toLowerCase().endsWith(suffix)) { + files.add(f.getPath()); + } + } + } + } else { + if (file.getName().toLowerCase().endsWith(suffix)) { + files.add(file.getPath()); + } + } + } + + public static String getNameWithExt(String file) { + return file.substring(file.lastIndexOf(File.separator) + 1, file.lastIndexOf(".")); + } + + public static void writeJson(String filePath, String json) throws IOException { + FileWriter fw = new FileWriter(filePath); + BufferedWriter bw = new BufferedWriter(fw); + bw.write(json); + bw.close(); + fw.close(); + } + + private List<PointDto> getValues(Dataset ds, Geometry g, double[] transform, int xSize, int ySize) { + double[] env = new double[4]; + g.GetEnvelope(env); + + int startX = (int) Math.floor((env[0] - transform[0]) / transform[1]); + int endX = (int) Math.floor((env[1] - transform[0]) / transform[1]); + int startY = (int) Math.floor((transform[3] - env[3]) / Math.abs(transform[5])); + int endY = (int) Math.floor((transform[3] - env[2]) / Math.abs(transform[5])); + if (startX < 0) startX = 0; + if (startY < 0) startY = 0; + if (endX > ds.getRasterXSize()) endX = ds.getRasterXSize(); + if (endY > ds.getRasterYSize()) endY = ds.getRasterYSize(); + if (endX - startX < 1 || endY - startY < 1) return null; + + float[] values = new float[1]; + List<PointDto> points = new ArrayList<>(); + for (int x = startX; x <= endX; x++) { + for (int y = startY; y <= endY; y++) { + double X = transform[0] + x * transform[1] + y * transform[2]; + double Y = transform[3] + x * transform[4] + y * transform[5]; + ds.GetRasterBand(1).ReadRaster(x, y, 1, 1, values); + + if (Float.isNaN(values[0]) || values[0] < -999 || !isContains(g, X, Y)) continue; + points.add(new PointDto(X, Y, values[0])); + } + } + + return null; + } + + private void water2Png2(ResultDto dto, LayerDto layer, String tif, String png, int width, int height) { + Dataset ds = null; + try { + ds = gdal.Open(tif, gdalconst.GA_ReadOnly); + if (null == ds || 0 == ds.getRasterCount()) return; + + Band band = ds.GetRasterBand(1); + float[] buffer = new float[width * height]; + band.ReadRaster(0, 0, width, height, buffer); + + BufferedImage image = new BufferedImage(width, height, BufferedImage.TYPE_INT_ARGB); + double differ = layer.getExtension().getDiffer(), maxHeight = layer.getExtension().getMaxHeight(), minHeight = layer.getExtension().getMinHeight(); + float[] ts = layer.getTerrain().getVals().get(width + "_" + height); + + /*layer.getTerrain().getXyo().get(width + "_" + height).stream().forEach(xyo -> { + + });*/ + for (XYO xyo : layer.getTerrain().getXyo().get(width + "_" + height)) { + float depth = buffer[xyo.getOffset()] + ts[xyo.getOffset()]; + if (Float.isNaN(depth) || depth < minHeight) continue; + + int r = 0, g, b; + if (depth > maxHeight) { + g = b = 255; + } else { + int val = (int) ((depth - minHeight) / differ * 65535); + g = val / 256; + b = val % 256; + } + + Color color = new Color(r, g, b, 127); + image.setRGB(xyo.getX(), xyo.getY(), color.getRGB()); + } + + //savePng(image, png); + } finally { + if (null != ds) ds.delete(); + } + } + + private List<PointDto> getValues2(Dataset ds, Geometry g, double[] transform, int xSize, int ySize) { + double[] env = new double[4]; + g.GetEnvelope(env); + + int startX = (int) Math.floor((env[0] - transform[0]) / transform[1]); + int endX = (int) Math.floor((env[1] - transform[0]) / transform[1]); + int startY = (int) Math.floor((transform[3] - env[3]) / Math.abs(transform[5])); + int endY = (int) Math.floor((transform[3] - env[2]) / Math.abs(transform[5])); + if (startX < 0) startX = 0; + if (startY < 0) startY = 0; + if (endX > ds.getRasterXSize()) endX = ds.getRasterXSize(); + if (endY > ds.getRasterYSize()) endY = ds.getRasterYSize(); + if (endX - startX < 1 || endY - startY < 1) return null; + + List<XYDto> xyList = new ArrayList<>(); + for (int x = startX; x <= endX; x++) { + for (int y = startY; y <= endY; y++) { + xyList.add(new XYDto(x, y)); + } + } + + List<PointDto> points = new CopyOnWriteArrayList<>(); + xyList.stream().forEach(xy -> { + double X = transform[0] + xy.getX() * transform[1] + xy.getY() * transform[2]; + double Y = transform[3] + xy.getX() * transform[4] + xy.getY() * transform[5]; + + float[] values = new float[1]; + ds.GetRasterBand(1).ReadRaster(xy.getX(), xy.getY(), 1, 1, values); + if (Float.isNaN(values[0]) || values[0] < -999 || !ComHelper.isContains(g, X, Y)) return; + + points.add(new PointDto(X, Y, values[0])); + }); + + return points; + } +} diff --git a/src/main/java/com/se/nsl/helper/FileHelper.java b/src/main/java/com/se/nsl/helper/FileHelper.java new file mode 100644 index 0000000..20b71d7 --- /dev/null +++ b/src/main/java/com/se/nsl/helper/FileHelper.java @@ -0,0 +1,279 @@ +package com.se.nsl.helper; + +import com.twmacinta.util.MD5; +import lombok.extern.slf4j.Slf4j; +import org.apache.commons.codec.digest.DigestUtils; + +import java.io.*; +import java.nio.ByteBuffer; +import java.nio.channels.FileChannel; +import java.nio.charset.StandardCharsets; +import java.security.MessageDigest; +import java.text.DecimalFormat; +import java.util.List; + +@Slf4j +@SuppressWarnings("ALL") +public class FileHelper { + public final static String POINT = "."; + + public final static int I16 = 16; + + public static final double D1024 = 1024.0; + + public static final double D1050 = 1050.0; + + public final static int I1000000 = 1000000; + + public static final char[] HEX_DIGITS = {'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f'}; + + public static String getFileName(String file) { + int idx = file.lastIndexOf(File.separator); + if (idx > -1) { + return file.substring(idx + 1); + } + + return ""; + } + + public static String getName(String file) { + String fileName = getFileName(file); + int idx = fileName.lastIndexOf("."); + if (idx > -1) { + return fileName.substring(0, idx); + } + + return fileName; + } + + public static String getExtension(File file) { + if (file == null) { + return null; + } + + String fileName = file.getName().toLowerCase(); + + int idx = fileName.indexOf(POINT); + if (idx == -1) { + return ""; + } + + return fileName.substring(idx); + } + + public static String getExtension(String fileName) { + if (StringHelper.isEmpty(fileName)) { + return ""; + } + + int idx = fileName.lastIndexOf(POINT); + if (idx == -1) { + return ""; + } + + return fileName.substring(idx).toLowerCase(); + } + + public static String formatByte(long byteNumber) { + double kbNumber = byteNumber / D1024; + if (kbNumber < D1024) { + return new DecimalFormat("#.##KB").format(kbNumber); + } + double mbNumber = kbNumber / D1024; + if (mbNumber < D1024) { + return new DecimalFormat("#.##MB").format(mbNumber); + } + double gbNumber = mbNumber / D1024; + if (gbNumber < D1024) { + return new DecimalFormat("#.##GB").format(gbNumber); + } + double tbNumber = gbNumber / D1024; + + return new DecimalFormat("#.##TB").format(tbNumber); + } + + public static String getSquareMeter(double num) { + if (num < I1000000) { + return new DecimalFormat("#.##骞虫柟绫�").format(num); + } + + double knum = num / I1000000; + + return new DecimalFormat("#.##骞虫柟鍗冪背").format(knum); + } + + public static double sizeToMb(long size) { + if (size < D1050) { + return 0.001; + } + + String str = String.format("%.3f", size / D1024 / D1024); + + return Double.parseDouble(str); + } + + public static String getMd5ByJdk(String filePath) throws IOException { + FileInputStream fileStream = new FileInputStream(filePath); + String md5 = DigestUtils.md5Hex(fileStream); + fileStream.close(); + + return md5; + } + + public static String getFastMd5(String filePath) throws IOException { + String hash = MD5.asHex(MD5.getHash(new File(filePath))); + + MD5 md5 = new MD5(); + md5.Update(hash, null); + + return md5.asHex(); + } + + public static void deleteDir(String dir) { + File file = new File(dir); + + deleteFiles(file); + } + + public static void deleteFiles(File file) { + if (null == file || !file.exists()) { + return; + } + + if (file.isDirectory()) { + File[] files = file.listFiles(); + if (null != files && files.length > 0) { + for (File f : files) { + if (f.isDirectory()) { + deleteFiles(f); + } else { + f.delete(); + } + } + } + } + + file.delete(); + } + + public static String getRelativePath(String file) { + if (StringHelper.isEmpty(file)) { + return null; + } + + int idx = file.lastIndexOf(File.separator); + int start = file.lastIndexOf(File.separator, idx - 1); + + return file.substring(start + 1); + } + + public static String getPath(String file) { + if (StringHelper.isEmpty(file)) { + return null; + } + + int end = file.lastIndexOf(File.separator); + + return file.substring(0, end); + } + + @SuppressWarnings("unused") + public static String getFileMd5(String filePath) { + FileInputStream fis = null; + try { + MessageDigest md = MessageDigest.getInstance("MD5"); + + fis = new FileInputStream(new File(filePath)); + FileChannel fChannel = fis.getChannel(); + ByteBuffer buffer = ByteBuffer.allocateDirect(1024 * 1024); + + while (fChannel.read(buffer) != -1) { + buffer.flip(); + md.update(buffer); + buffer.compact(); + } + byte[] b = md.digest(); + + return byteToHexString(b); + } catch (Exception ex) { + ex.printStackTrace(); + return null; + } finally { + try { + if (null != fis) { + fis.close(); + } + } catch (IOException ex) { + ex.printStackTrace(); + } + } + } + + public static String byteToHexString(byte[] tmp) { + char[] str = new char[16 * 2]; + + int k = 0; + for (int i = 0; i < I16; i++) { + byte byte0 = tmp[i]; + str[k++] = HEX_DIGITS[byte0 >>> 4 & 0xf]; + str[k++] = HEX_DIGITS[byte0 & 0xf]; + } + + return new String(str); + } + + public static String getStringMd5(String text) { + StringBuilder builder = new StringBuilder(); + try { + MessageDigest md5 = MessageDigest.getInstance("MD5"); + + byte[] bytes = md5.digest(text.getBytes(StandardCharsets.UTF_8)); + for (byte aByte : bytes) { + builder.append(Integer.toHexString((0x000000FF & aByte) | 0xFFFFFF00).substring(6)); + } + } catch (Exception ex) { + log.error(ex.getMessage(), ex); + } + + return builder.toString(); + } + + public static void getFilesByPath(List<String> list, String path) { + File file = new File(path); + if (file.isDirectory()) { + File[] files = file.listFiles(); + if (null == files) { + return; + } + + for (File f : files) { + if (f.isDirectory()) { + getFilesByPath(list, f.getPath()); + } else { + list.add(f.getPath()); + } + } + } else { + list.add(file.getPath()); + } + } + + public static void copyFile(File src, File dest) throws IOException { + InputStream is = null; + OutputStream os = null; + try { + is = new FileInputStream(src); + os = new FileOutputStream(dest); + + byte[] buffer = new byte[1024]; + + int length; + while ((length = is.read(buffer)) > 0) { + os.write(buffer, 0, length); + } + } finally { + os.close(); + is.close(); + } + } +} diff --git a/src/main/java/com/se/nsl/helper/GdalHelper.java b/src/main/java/com/se/nsl/helper/GdalHelper.java new file mode 100644 index 0000000..84dbe91 --- /dev/null +++ b/src/main/java/com/se/nsl/helper/GdalHelper.java @@ -0,0 +1,231 @@ +package com.se.nsl.helper; + +import lombok.extern.slf4j.Slf4j; +import org.gdal.gdal.Band; +import org.gdal.gdal.Dataset; +import org.gdal.gdal.gdal; +import org.gdal.gdalconst.gdalconst; +import org.gdal.ogr.*; +import org.gdal.osr.CoordinateTransformation; +import org.gdal.osr.SpatialReference; +import org.gdal.osr.osr; + +import java.io.File; + +@Slf4j +@SuppressWarnings("ALL") +public class GdalHelper { + public final static int I4326 = 4326; + + public final static int I4490 = 4490; + + public static SpatialReference SR4326; + + public static SpatialReference SR4490; + + public final static String CGCS2000 = "CGCS2000"; + + /** + * 鍒濆鍖� + */ + public static void init(String gdalPath) { + // 閰嶇疆鐜鍙橀噺 + if (!StringHelper.isEmpty(gdalPath)) { + if (WebHelper.isWin()) { + gdal.SetConfigOption("GDAL_DATA", gdalPath + "/gdal-data"); + gdal.SetConfigOption("PROJ_LIB", gdalPath + "/proj7/share"); + //System.setProperty("PROJ_LIB", gdalPath + "/proj7/share") + gdal.SetConfigOption("GDAL_DRIVER_PATH", gdalPath + "/gdalplugins"); + + String path = System.getenv("PATH"); + if (!path.contains(gdalPath)) { + System.setProperty("PATH", path + ";" + gdalPath); + } + } else { + //System.setProperty("java.library.path", gdalPath); + } + } + + gdal.SetConfigOption("GDAL_FILENAME_IS_UTF8", "YES"); + gdal.SetConfigOption("SHAPE_ENCODING", ""); + gdal.SetConfigOption("PGEO_DRIVER_TEMPLATE", "DRIVER=Microsoft Access Driver (*.mdb, *.accdb);DBQ=%s"); + gdal.SetConfigOption("MDB_DRIVER_TEMPLATE", "DRIVER=Microsoft Access Driver (*.mdb, *.accdb);DBQ=%s"); + + gdal.AllRegister(); + ogr.RegisterAll(); + initSr(); + } + + public static void initSr() { + try { + SR4326 = new SpatialReference(); + SR4326.ImportFromEPSG(I4326); + SR4326.SetAxisMappingStrategy(osr.OAMS_TRADITIONAL_GIS_ORDER); + + SR4490 = new SpatialReference(); + SR4490.ImportFromEPSG(I4490); + SR4490.SetAxisMappingStrategy(osr.OAMS_TRADITIONAL_GIS_ORDER); + } catch (Exception ex) { + log.error(ex.getMessage(), ex); + } + } + + public static SpatialReference createSpatialReference(int epsg) { + SpatialReference sr = new SpatialReference(); + sr.ImportFromEPSG(epsg); + sr.SetAxisMappingStrategy(osr.OAMS_TRADITIONAL_GIS_ORDER); + + return sr; + } + + public static void createPyramid(String file) { + Dataset ds = null; + try { + File f = new File(file); + if (!f.exists() || f.isDirectory()) { + return; + } + + ds = gdal.Open(file, gdalconst.GA_ReadOnly); + if (null == ds || ds.getRasterCount() < 1 || null == ds.GetSpatialRef()) { + return; + } + + Band band = ds.GetRasterBand(1); + if (0 == band.GetOverviewCount()) { + ds.BuildOverviews("nearest", new int[]{2, 4, 6, 8, 16}, null); + } + } catch (Exception ex) { + log.error(ex.getMessage(), ex); + } finally { + if (null != ds) { + ds.delete(); + } + } + } + + public static void delete(Layer layer, DataSource dataSource, Driver driver) { + try { + if (null != layer) { + layer.delete(); + } + } catch (Exception ex) { + log.error(ex.getMessage(), ex); + } + try { + if (null != dataSource) { + dataSource.delete(); + } + } catch (Exception ex) { + log.error(ex.getMessage(), ex); + } + try { + if (null != driver) { + driver.delete(); + } + } catch (Exception ex) { + log.error(ex.getMessage(), ex); + } + } + + public static Geometry getMinPoint(Dataset ds) { + double[] transform = new double[6]; + ds.GetGeoTransform(transform); + + double xMin = transform[0]; + double yMin = transform[3] - ds.getRasterYSize() * transform[1]; + + Geometry point = new Geometry(ogr.wkbPoint); + point.AddPoint(xMin, yMin, 0); + + return Transform(ds, point); + } + + public static Geometry getMaxPoint(Dataset ds) { + double[] transform = new double[6]; + ds.GetGeoTransform(transform); + + double xMax = transform[0] + (ds.getRasterYSize() * transform[1]); + double yMax = transform[3]; + + Geometry point = new Geometry(ogr.wkbPoint); + point.AddPoint(xMax, yMax, 0); + + return Transform(ds, point); + } + + public static Geometry Transform(Dataset ds, Geometry point) { + point.AssignSpatialReference(ds.GetSpatialRef()); + if (ds.GetSpatialRef().IsGeographic() > 0) { + return point; + } + + String srsName = ds.GetSpatialRef().GetName(); + //if (srsName.Contains(CGCS2000)) + //{ + // point.TransformTo(sr4490); + //} + //else + //{ + point.TransformTo(SR4326); + //} + //point.SwapXY(); + + return point; + } + + public static Geometry toWgs84(SpatialReference sr, double x, double y) { + Geometry point = new Geometry(ogr.wkbPoint); + point.AssignSpatialReference(sr); + point.AddPoint(x, y); + + point.TransformTo(GdalHelper.SR4326); + //point.SwapXY(); + + return point; + } + + public static int toWgs84(SpatialReference sr, Geometry g) { + CoordinateTransformation ct = new CoordinateTransformation(sr, GdalHelper.SR4326); + if (sr.IsProjected() != 1) { + sr.SetAxisMappingStrategy(osr.OAMS_TRADITIONAL_GIS_ORDER); + } + + return g.TransformTo(GdalHelper.SR4326); + } + + public static double[] fromWgs84(SpatialReference sr, double x, double y) { + CoordinateTransformation ct = new CoordinateTransformation(GdalHelper.SR4326, sr); + if (sr.IsProjected() != 1) { + sr.SetAxisMappingStrategy(osr.OAMS_TRADITIONAL_GIS_ORDER); + } + + return ct.TransformPoint(x, y); + } + + public static int fromWgs84(SpatialReference sr, Geometry g) { + CoordinateTransformation ct = new CoordinateTransformation(GdalHelper.SR4326, sr); + if (sr.IsProjected() != 1) { + sr.SetAxisMappingStrategy(osr.OAMS_TRADITIONAL_GIS_ORDER); + } + + return g.TransformTo(sr); + } + + public static Geometry createPolygon(SpatialReference sr, Double minx, Double miny, Double maxx, Double maxy) { + Geometry ring = new Geometry(ogr.wkbLinearRing); + ring.AddPoint_2D(minx, maxy); + ring.AddPoint_2D(maxx, maxy); + ring.AddPoint_2D(maxx, miny); + ring.AddPoint_2D(minx, miny); + ring.AddPoint_2D(minx, maxy); + + Geometry poly = new Geometry(ogr.wkbPolygon); + poly.AddGeometry(ring); + if (null != sr) { + poly.AssignSpatialReference(sr); + } + + return poly; + } +} diff --git a/src/main/java/com/se/nsl/helper/H5ReadHelper.java b/src/main/java/com/se/nsl/helper/H5ReadHelper.java new file mode 100644 index 0000000..5cd4925 --- /dev/null +++ b/src/main/java/com/se/nsl/helper/H5ReadHelper.java @@ -0,0 +1,393 @@ +package com.se.nsl.helper; + +import java.io.File; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.io.PrintWriter; +import java.io.UnsupportedEncodingException; +import java.nio.ByteBuffer; +import java.nio.ByteOrder; +import java.nio.charset.Charset; +import java.text.DecimalFormat; +import java.util.EnumSet; +import java.util.HashMap; +import java.util.Map; + +import hdf.hdf5lib.H5; +import hdf.hdf5lib.HDF5Constants; + +public class H5ReadHelper { + //浼犲叆璺熺粍 + private static String PATH = "/"; + //鏁版嵁闆嗗悕绉� + private static String DATASETNAME; + //璁剧疆鏁版嵁闆嗚鏁� 涔嬪悗浼氬姩鎬佽幏寰楄鏁帮紝瀹炵幇瀵瑰簲缂╁锛堝缓璁垵濮嬪�艰缃ぇ浜涳級 + private static final long DIM0 = 100000; + //璁剧疆鏁村瀷闀垮害 + protected static final int INTEGERSIZE = 4; + //璁剧疆闀挎暣鍨嬮暱搴� + protected static final int LONGSIZE = 8; + //璁剧疆鍗曠簿搴﹂暱搴� + protected static final int FLOATSIZE = 4; + //璁剧疆鍙岀簿搴﹀瀷闀垮害 + protected static final int DOUBLESIZE = 8; + //璁剧疆瀛楃涓叉渶澶ч暱搴� + protected final static int MAXSTRINGSIZE = 80; + + static class Sensor_Datatype { + static int numberMembers = 5;//琛ㄧず鍒楅」鏁� + static int[] memberDims = {1, 1, 1, 1, 1}; + static String[] memberNames = { //瀹氫箟琛ㄤ腑鎵�鏈夊瓧娈靛悕绉� + "trading_day", "updatetime", "instrument_id", "gap_number", "reserve" + };//鏁版嵁鍏冪礌涓哄叿浣撶殑琛ㄥ瓧娈靛悕绉� + static long[] memberMemTypes = { //瀵瑰簲瀛楁绫诲瀷鍊� + HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5T_NATIVE_LONG, HDF5Constants.H5T_NATIVE_FLOAT, HDF5Constants.H5T_NATIVE_DOUBLE, HDF5Constants.H5T_C_S1 + };//鐢ㄤ簬瀹氫箟姣忎釜瀛楁鍏冪礌鐨勭被鍨� 鎸囧畾瀛楁鍏冪礌绫诲瀷鐨勬椂鍊欐敞鎰忕被鍨嬮暱搴︼紝濡傛灉灏忎簬瀛樺偍鏁伴暱搴︼紝鍒欎細鍙戠敓鏁版嵁婧㈠嚭 + static long[] memberFileTypes = { //瀵瑰簲瀛楁绫诲瀷澶у皬 + HDF5Constants.H5T_STD_I32BE, HDF5Constants.H5T_STD_I64BE, HDF5Constants.H5T_IEEE_F32BE, HDF5Constants.H5T_IEEE_F64BE, HDF5Constants.H5T_C_S1 + };//瀵瑰簲鐨勫瓧娈电被鍨嬬殑澶у皬 + static int[] memberStorage = { //瀵瑰簲瀛楁绫诲瀷鍊奸暱搴� + INTEGERSIZE, LONGSIZE, FLOATSIZE, DOUBLESIZE, MAXSTRINGSIZE + };//瀹氫箟瀵瑰簲绫诲瀷鐨勯暱搴﹀ぇ灏� + + // Data size is the storage size for the members. + static long getTotalDataSize() { + long data_size = 0; + for (int indx = 0; indx < numberMembers; indx++) { + data_size += memberStorage[indx] * memberDims[indx]; + } + return DIM0 * data_size; + } + + static long getDataSize() { + long data_size = 0; + for (int indx = 0; indx < numberMembers; indx++) { + data_size += memberStorage[indx] * memberDims[indx]; + } + return data_size; + } + + static int getOffset(int memberItem) { + int data_offset = 0; + for (int indx = 0; indx < memberItem; indx++) { + data_offset += memberStorage[indx]; + } + return data_offset; + } + } + + static class Sensor { + public Integer trading_day; + public Long updatetime; + public Float instrument_id; + public Double gap_number; + public String reserve; + + public Sensor(Integer trading_day, Long updatetime, Float instrument_id, Double gap_number, String reserve) { + super(); + this.trading_day = trading_day; + this.updatetime = updatetime; + this.instrument_id = instrument_id; + this.gap_number = gap_number; + this.reserve = reserve; + } + + Sensor(ByteBuffer databuf, int dbposition) { + readBuffer(databuf, dbposition); + } + + //閬嶅巻.h5涓嬭竟鐨勬墍鏈夋暟鎹泦锛屽苟浠ユ暟缁勭殑鏂瑰紡杩斿洖鏁版嵁闆嗗悕绉� + private static String[] do_iterate(String filePath) { + int file_id = -1; + + // Open a file using default properties. + try { + file_id = H5.H5Fopen(filePath, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT); + } catch (Exception e) { + e.printStackTrace(); + } + + // Begin iteration. + System.out.println("Objects in root group:"); + try { + if (file_id >= 0) { + int count = (int) H5.H5Gn_members(file_id, PATH); + String[] oname = new String[count]; + int[] otype = new int[count]; + int[] ltype = new int[count]; + long[] orefs = new long[count]; + H5.H5Gget_obj_info_all(file_id, PATH, oname, otype, ltype, orefs, HDF5Constants.H5_INDEX_NAME); + // Get type of the object and display its name and type. + for (int indx = 0; indx < otype.length; indx++) { + switch (H5O_type.get(otype[indx])) { + case H5O_TYPE_GROUP: + System.out.print(" Group: " + oname[indx] + "," + oname.length + " "); + break; + case H5O_TYPE_DATASET: + System.out.print(" Dataset: " + oname[indx] + "," + oname.length + " "); + break; + case H5O_TYPE_NAMED_DATATYPE: + System.out.print(" Datatype: " + oname[indx] + "," + oname.length + " "); + break; + default: + System.out.print(" Unknown: " + oname[indx] + "," + oname.length + " "); + } + } + System.out.println(); + return oname; + } + } catch (Exception e) { + e.printStackTrace(); + } + + // Close the file. + try { + if (file_id >= 0) { + H5.H5Fclose(file_id); + } + } catch (Exception e) { + e.printStackTrace(); + } + return new String[]{"鏁版嵁闆嗛亶鍘嗗嚭閿�"}; + } + + //璇绘暟鎹� + void readBuffer(ByteBuffer databuf, int dbposition) { + //0 + this.trading_day = databuf.getInt(dbposition + Sensor_Datatype.getOffset(0)); + //1 + this.updatetime = databuf.getLong(dbposition + Sensor_Datatype.getOffset(1)); + //2 + this.instrument_id = databuf.getFloat(dbposition + Sensor_Datatype.getOffset(2)); + //3 + this.gap_number = databuf.getDouble(dbposition + Sensor_Datatype.getOffset(3)); + //4 + ByteBuffer stringbuf_reserve = databuf.duplicate(); + stringbuf_reserve.position(dbposition + Sensor_Datatype.getOffset(4)); + stringbuf_reserve.limit(dbposition + Sensor_Datatype.getOffset(4) + MAXSTRINGSIZE); + byte[] bytearr_reserve = new byte[stringbuf_reserve.remaining()]; + stringbuf_reserve.get(bytearr_reserve); + this.reserve = new String(bytearr_reserve, Charset.forName("UTF-8")).trim(); + } + + //閲嶅啓toString()鏂规硶 + @Override + public String toString() { + DecimalFormat df = new DecimalFormat("#0.0000");//娴偣鏁颁繚鐣�4浣嶅皬鏁帮紝涓嶈冻锛岀敤0琛ラ綈 + return trading_day + ", " + updatetime + ", " + + df.format(instrument_id) + ", " + df.format(gap_number) + ", " + reserve; + } + } + + //璇诲彇琛ㄧ粨鏋勬暟鎹泦 + private static void ReadDataset(String filePath) { + int file_id = -1; + int strtype_id = -1; + int memtype_id = -1; + int dataspace_id = -1; + int dataset_id = -1; + long[] dims = {DIM0}; + Sensor[] object_data2 = new Sensor[(int) dims[0]]; + ; + byte[] dset_data; + String[] dsetName = {}; + //閬嶅巻.h5鏂囦欢涓嬭竟鐨勬墍鏈夋暟鎹泦 + dsetName = Sensor.do_iterate(filePath); + //閬嶅巻寰楀埌鏂囦欢涓嬭竟鐨勫悇涓暟鎹泦 Open an existing dataset. + for (int i = 0; i < dsetName.length; i++) { + try { + file_id = H5.H5Fopen(filePath, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT); + } catch (Exception e) { + e.printStackTrace(); + } + DATASETNAME = dsetName[i]; + try { + if (file_id >= 0) { + dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT); + } + } catch (Exception e) { + e.printStackTrace(); + } + // Get dataspace and allocate memory for read buffer. + try { + if (dataset_id >= 0) { + dataspace_id = H5.H5Dget_space(dataset_id); + } + } catch (Exception e) { + e.printStackTrace(); + } + try { + if (dataspace_id >= 0) { + H5.H5Sget_simple_extent_dims(dataspace_id, dims, null); + } + } catch (Exception e) { + e.printStackTrace(); + } + // Create string datatype. + try { + strtype_id = H5.H5Tcopy(HDF5Constants.H5T_C_S1); + if (strtype_id >= 0) { + H5.H5Tset_size(strtype_id, MAXSTRINGSIZE); + } + } catch (Exception e) { + e.printStackTrace(); + } + // Create the compound datatype for memory. + try { + memtype_id = H5.H5Tcreate(HDF5Constants.H5T_COMPOUND, Sensor_Datatype.getDataSize()); + if (memtype_id >= 0) { + for (int indx = 0; indx < Sensor_Datatype.numberMembers; indx++) { + int type_id = (int) Sensor_Datatype.memberMemTypes[indx]; + if (type_id == HDF5Constants.H5T_C_S1) { + type_id = strtype_id; + } + H5.H5Tinsert(memtype_id, Sensor_Datatype.memberNames[indx], Sensor_Datatype.getOffset(indx), + type_id); + } + } + } catch (Exception e) { + e.printStackTrace(); + } + // allocate memory for read buffer. + dset_data = new byte[(int) dims[0] * (int) Sensor_Datatype.getDataSize()]; + // Read data. + try { + if ((dataset_id >= 0) && (memtype_id >= 0)) { + H5.H5Dread(dataset_id, memtype_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, dset_data); + } + ByteBuffer inBuf = ByteBuffer.wrap(dset_data); + inBuf.order(ByteOrder.nativeOrder()); + for (int indx = 0; indx < (int) dims[0]; indx++) { + object_data2[indx] = new Sensor(inBuf, indx * (int) Sensor_Datatype.getDataSize());//灏嗚鍙栧埌鐨勬暟鎹瓨鍏ヨ鏁扮粍 + } + } catch (Exception e) { + e.printStackTrace(); + } + // Output the data to the screen. + for (int indx = 0; indx < dims[0]; indx++) { + System.out.println(DATASETNAME + " [" + indx + "]:"); + System.out.println(object_data2[indx].toString()); + } + //鍐欏埌鏈湴鐨勬枃鏈腑 + H5ReadHelper.writeLocalExcel(object_data2, DATASETNAME, dims[0]); + try { + if (dataset_id >= 0) { + H5.H5Dclose(dataset_id); + } + } catch (Exception e) { + e.printStackTrace(); + } + // Terminate access to the data space. + try { + if (dataspace_id >= 0) { + H5.H5Sclose(dataspace_id); + } + } catch (Exception e) { + e.printStackTrace(); + } + // Terminate access to the mem type. + try { + if (memtype_id >= 0) { + H5.H5Tclose(memtype_id); + } + } catch (Exception e) { + e.printStackTrace(); + } + try { + if (strtype_id >= 0) { + H5.H5Tclose(strtype_id); + } + } catch (Exception e) { + e.printStackTrace(); + } + // Close the file. + try { + if (file_id >= 0) { + H5.H5Fclose(file_id); + } + } catch (Exception e) { + e.printStackTrace(); + } + } + + } + + enum H5O_type { + H5O_TYPE_UNKNOWN(-1), // Unknown object type + H5O_TYPE_GROUP(0), // Object is a group + H5O_TYPE_DATASET(1), // Object is a dataset + H5O_TYPE_NAMED_DATATYPE(2), // Object is a named data type + H5O_TYPE_NTYPES(3); // Number of different object types + private static final Map<Integer, H5O_type> lookup = new HashMap<Integer, H5O_type>(); + + static { + for (H5O_type s : EnumSet.allOf(H5O_type.class)) { + lookup.put(s.getCode(), s); + } + } + + private int code; + + H5O_type(int layout_type) { + this.code = layout_type; + } + + public int getCode() { + return this.code; + } + + public static H5O_type get(int code) { + return lookup.get(code); + } + } + + //灏嗚鍑虹殑鏁版嵁鍐欏埌鏈湴 + public static void writeLocalExcel(Sensor[] dataSensors, String datasetName, long dims) { + //娴嬭瘯鏁版嵁鍐欏嚭 io + String url = "D:/hdf5_write_txt/"; + String dsetUrl = datasetName; + String path = url + dsetUrl + ".txt"; + File file = new File(path); + if (file.exists()) { + file.delete(); + } else { + file = new File(path); + try { + file.createNewFile(); + } catch (IOException e) { + e.printStackTrace(); + } + } + //鎵ц閬嶅巻鍐欏嚭 + try { + PrintWriter pw = new PrintWriter(file, "UTF-8"); + pw.append( + "trading_day, updatetime, instrument_id, gap_number, reserve " + ); + pw.print("\n"); + for (int i = 0; i < dims; i++) { + StringBuffer sb = new StringBuffer(""); + //褰撳墠鑾峰彇鐨勫瓧娈靛�� + Sensor data = dataSensors[i]; + if (i < dataSensors.length - 1) { + sb.append(data).append("," + "\t"); + } else { + sb.append(data).append(" "); + } + //姝ゆ椂涓嶅仛鍒ゆ柇浼氬彂鐢熺┖鎸囬拡寮傚父 瀹氫箟鐨刪5鏂囦欢琛屾暟鍥哄畾锛屽悇涓枃浠惰鏁颁笉鍚� + if (null != data) { + pw.print(data.toString() + "\n"); + } + } + pw.close(); + } catch (FileNotFoundException | UnsupportedEncodingException e) { + e.printStackTrace(); + } + } + + public static void main(String[] args) { + H5ReadHelper.ReadDataset("D:/tongzhou.h5"); + } +} + diff --git a/src/main/java/com/se/nsl/helper/HDF5ReaderHelper.java b/src/main/java/com/se/nsl/helper/HDF5ReaderHelper.java new file mode 100644 index 0000000..34d21f0 --- /dev/null +++ b/src/main/java/com/se/nsl/helper/HDF5ReaderHelper.java @@ -0,0 +1,10 @@ +package com.se.nsl.helper; + +public class HDF5ReaderHelper { + public static void main(String[] args) { + + + } + + +} diff --git a/src/main/java/com/se/nsl/helper/HttpHelper.java b/src/main/java/com/se/nsl/helper/HttpHelper.java new file mode 100644 index 0000000..497589c --- /dev/null +++ b/src/main/java/com/se/nsl/helper/HttpHelper.java @@ -0,0 +1,269 @@ +package com.se.nsl.helper; + +import org.apache.http.*; +import org.apache.http.client.config.CookieSpecs; +import org.apache.http.client.config.RequestConfig; +import org.apache.http.client.utils.URIUtils; +import org.apache.http.entity.InputStreamEntity; +import org.apache.http.impl.client.CloseableHttpClient; +import org.apache.http.impl.client.HttpClients; +import org.apache.http.message.BasicHeader; +import org.apache.http.message.BasicHttpEntityEnclosingRequest; +import org.apache.http.message.BasicHttpRequest; +import org.apache.http.message.HeaderGroup; +import org.apache.http.util.EntityUtils; + +import javax.servlet.ServletException; +import javax.servlet.http.Cookie; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; +import java.io.IOException; +import java.net.HttpCookie; +import java.net.URI; +import java.net.URISyntaxException; +import java.util.Enumeration; + +@SuppressWarnings("ALL") +public class HttpHelper { + private final static String HTTP_SLASH2 = "://"; + + private final static String HTTP_SLASH = "/"; + + private final static Integer THREE = 3; + + protected static final HeaderGroup HOP_HEADERS; + + static { + HOP_HEADERS = new HeaderGroup(); + + String[] headers = new String[]{ + "Connection", "Keep-Alive", "Proxy-Authenticate", "Proxy-Authorization", + "TE", "Trailers", "Transfer-Encoding", "Upgrade", + //"X-RateLimit-Burst-Capacity", "X-RateLimit-Remaining", "X-RateLimit-Replenish-Rate", + "Access-Control-Allow-Origin", "Access-Control-Allow-Credentials", "Access-Control-Allow-Headers"}; + + for (String header : headers) { + HOP_HEADERS.addHeader(new BasicHeader(header, null)); + } + } + + public void service(HttpServletRequest request, HttpServletResponse response, String url) throws ServletException, IOException { + HttpRequest proxyRequest; + if (request.getHeader(HttpHeaders.CONTENT_LENGTH) != null || request.getHeader(HttpHeaders.TRANSFER_ENCODING) != null) { + proxyRequest = newProxyRequestWithEntity(request, url); + } else { + proxyRequest = new BasicHttpRequest(request.getMethod(), url); + } + + HttpHost host = this.getTargetHost(url); + // copyRequestHeaders(request, proxyRequest, host); + //setXrForwardedForHeader(request, proxyRequest); + + // if (!StringHelper.isEmpty(cookie)) proxyRequest.addHeader("Cookie", cookie + "; ") + + CloseableHttpClient client = null; + HttpResponse proxyResponse = null; + try { + client = this.createHttpClient(); + proxyResponse = client.execute(host, proxyRequest); + + int statusCode = proxyResponse.getStatusLine().getStatusCode(); + // response.setStatus(statusCode, proxyResponse.getStatusLine().getReasonPhrase()) + response.setStatus(statusCode); + + copyResponseHeaders(proxyResponse, request, response, url); + + if (statusCode == HttpServletResponse.SC_NOT_MODIFIED) { + response.setIntHeader(HttpHeaders.CONTENT_LENGTH, 0); + } else { + copyResponseEntity(proxyResponse, request, response); + } + } catch (Exception ex) { + throw new ServletException(ex.getMessage()); + } finally { + if (proxyResponse != null) { + EntityUtils.consumeQuietly(proxyResponse.getEntity()); + } + if (client != null) { + client.close(); + } + } + } + + protected HttpRequest newProxyRequestWithEntity(HttpServletRequest request, String url) throws IOException { + String method = request.getMethod(); + HttpEntityEnclosingRequest proxyRequest = new BasicHttpEntityEnclosingRequest(method, url); + proxyRequest.setEntity(new InputStreamEntity(request.getInputStream(), getContentLength(request))); + //String str = EntityUtils.toString(proxyRequest.getEntity(), "UTF-8") + + return proxyRequest; + } + + private long getContentLength(HttpServletRequest request) { + String contentLengthHeader = request.getHeader("Content-Length"); + if (contentLengthHeader != null) { + return Long.parseLong(contentLengthHeader); + } + + return -1L; + } + + protected void copyRequestHeaders(HttpServletRequest request, HttpRequest proxyRequest, HttpHost host) { + @SuppressWarnings("unchecked") + Enumeration<String> enumerationOfHeaderNames = request.getHeaderNames(); + + while (enumerationOfHeaderNames.hasMoreElements()) { + String headerName = enumerationOfHeaderNames.nextElement(); + copyRequestHeader(request, proxyRequest, host, headerName); + } + } + + protected void copyRequestHeader(HttpServletRequest request, HttpRequest proxyRequest, HttpHost host, String headerName) { + if (headerName.equalsIgnoreCase(HttpHeaders.CONTENT_LENGTH) || HOP_HEADERS.containsHeader(headerName)) { + return; + } + + @SuppressWarnings("unchecked") + Enumeration<String> headers = request.getHeaders(headerName); + while (headers.hasMoreElements()) { + String headerValue = headers.nextElement(); + if (headerName.equalsIgnoreCase(HttpHeaders.HOST)) { + headerValue = host.getHostName(); + if (host.getPort() != -1) { + headerValue += ":" + host.getPort(); + } + } else if (headerName.equalsIgnoreCase(org.apache.http.cookie.SM.COOKIE)) { + headerValue = getRealCookie(headerValue); + } + + proxyRequest.addHeader(headerName, headerValue); + } + } + + protected HttpHost getTargetHost(String url) throws ServletException { + try { + URI uri = new URI(url); + + return URIUtils.extractHost(uri); + } catch (URISyntaxException ex) { + throw new ServletException(ex.getMessage()); + } + } + + protected String getRealCookie(String cookieValue) { + StringBuilder escapedCookie = new StringBuilder(); + String[] cookies = cookieValue.split("[;,]"); + for (String cookie : cookies) { + String[] cookieSplit = cookie.split("="); + if (cookieSplit.length == 2) { + String cookieName = cookieSplit[0].trim(); + if (cookieName.startsWith(cookieName)) { + cookieName = cookieName.substring(cookieName.length()); + if (escapedCookie.length() > 0) { + escapedCookie.append("; "); + } + escapedCookie.append(cookieName).append("=").append(cookieSplit[1].trim()); + } + } + } + + return escapedCookie.toString(); + } + + private void setXrForwardedForHeader(HttpServletRequest request, HttpRequest proxyRequest) { + String forHeaderName = "X-Forwarded-For"; + String forHeader = request.getRemoteAddr(); + String existingForHeader = request.getHeader(forHeaderName); + if (existingForHeader != null) { + forHeader = existingForHeader + ", " + forHeader; + } + proxyRequest.setHeader(forHeaderName, forHeader); + + String protoHeaderName = "X-Forwarded-Proto"; + String protoHeader = request.getScheme(); + proxyRequest.setHeader(protoHeaderName, protoHeader); + } + + protected CloseableHttpClient createHttpClient() { + RequestConfig requestConfig = RequestConfig.custom() + .setRedirectsEnabled(false) + .setCookieSpec(CookieSpecs.IGNORE_COOKIES) + .setConnectTimeout(-1) + .setSocketTimeout(-1) + .build(); + + // return HttpClientBuilder.create().setDefaultRequestConfig(requestConfig).build() + return HttpClients.custom() + .setDefaultRequestConfig(requestConfig) + .build(); + } + + protected void copyResponseHeaders(HttpResponse proxyResponse, HttpServletRequest request, HttpServletResponse response, String url) { + for (Header header : proxyResponse.getAllHeaders()) { + copyResponseHeader(request, response, header, url); + } + } + + protected void copyResponseHeader(HttpServletRequest request, HttpServletResponse response, Header header, String url) { + String headerName = header.getName(); + if (HOP_HEADERS.containsHeader(headerName)) { + return; + } + + String headerValue = header.getValue(); + if (headerName.equalsIgnoreCase(org.apache.http.cookie.SM.SET_COOKIE) || headerName.equalsIgnoreCase(org.apache.http.cookie.SM.SET_COOKIE2)) { + copyProxyCookie(request, response, headerValue); + } else if (headerName.equalsIgnoreCase(HttpHeaders.LOCATION)) { + response.addHeader(headerName, rewriteUrlFromResponse(request, url, headerValue)); + } else { + response.addHeader(headerName, headerValue); + } + } + + protected void copyProxyCookie(HttpServletRequest request, HttpServletResponse response, String headerValue) { + String path = request.getContextPath() + request.getServletPath(); + if (path.isEmpty()) { + path = "/"; + } + + for (HttpCookie cookie : HttpCookie.parse(headerValue)) { + Cookie servletCookie = new Cookie(cookie.getName(), cookie.getValue()); + servletCookie.setComment(cookie.getComment()); + servletCookie.setMaxAge((int) cookie.getMaxAge()); + servletCookie.setPath(path); + + servletCookie.setSecure(cookie.getSecure()); + servletCookie.setVersion(cookie.getVersion()); + response.addCookie(servletCookie); + } + } + + protected String rewriteUrlFromResponse(HttpServletRequest request, String targetUri, String theUrl) { + if (theUrl.startsWith(targetUri)) { + StringBuffer curUrl = request.getRequestURL(); + + int pos; + if ((pos = curUrl.indexOf(HTTP_SLASH2)) >= 0) { + if ((pos = curUrl.indexOf(HTTP_SLASH, pos + THREE)) >= 0) { + curUrl.setLength(pos); + } + } + + curUrl.append(request.getContextPath()); + curUrl.append(request.getServletPath()); + curUrl.append(theUrl, targetUri.length(), theUrl.length()); + + return curUrl.toString(); + } + + return theUrl; + } + + protected void copyResponseEntity(HttpResponse proxyResponse, HttpServletRequest request, HttpServletResponse response) throws IOException { + HttpEntity entity = proxyResponse.getEntity(); + if (null == entity) { + return; + } + entity.writeTo(response.getOutputStream()); + } +} diff --git a/src/main/java/com/se/nsl/helper/RsaHelper.java b/src/main/java/com/se/nsl/helper/RsaHelper.java new file mode 100644 index 0000000..ec5f560 --- /dev/null +++ b/src/main/java/com/se/nsl/helper/RsaHelper.java @@ -0,0 +1,153 @@ +package com.se.nsl.helper; + +import lombok.extern.slf4j.Slf4j; +import org.apache.commons.codec.binary.Base64; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.springframework.core.io.ClassPathResource; + +import javax.crypto.Cipher; +import java.io.*; +import java.nio.charset.StandardCharsets; +import java.security.KeyFactory; +import java.security.KeyPair; +import java.security.KeyPairGenerator; +import java.security.SecureRandom; +import java.security.interfaces.RSAPrivateKey; +import java.security.interfaces.RSAPublicKey; +import java.security.spec.PKCS8EncodedKeySpec; +import java.security.spec.X509EncodedKeySpec; +import java.util.HashMap; +import java.util.Map; + +@Slf4j +@SuppressWarnings("ALL") +public class RsaHelper { + private static String privateKey; + + private static String publicKey; + + private static final String KEY_ALGORITHM = "RSA"; + + private static final int DEFAULT_RSA_KEY_SIZE = 1024; + + public static void generate() { + Map<String, String> result = generateRsaKey(DEFAULT_RSA_KEY_SIZE); + System.out.println("鍏挜涓猴細" + result.get("publicKey")); + System.out.println("绉侀挜涓猴細" + result.get("privateKey")); + } + + public static String getPrivateKey() throws IOException { + if (privateKey == null) { + InputStream inPrivate = new ClassPathResource("config" + File.separator + "rsa_private_key.txt").getInputStream(); + privateKey = inputStream2String(inPrivate); + inPrivate.close(); + } + + return privateKey; + } + + public static void setPublicKey(String key) { + publicKey = key; + } + + public static String getPublicKey() throws IOException { + if (publicKey == null) { + InputStream inPrivate = new ClassPathResource("config" + File.separator + "rsa_public_key.txt").getInputStream(); + publicKey = inputStream2String(inPrivate); + inPrivate.close(); + } + + return publicKey; + } + + public static String readFile(String fileName) throws IOException { + File file = new File(fileName); + BufferedReader br = new BufferedReader(new FileReader(file)); + + StringBuilder result = new StringBuilder(); + + String line = null; + while ((line = br.readLine()) != null) { + result.append(System.lineSeparator() + line); + } + br.close(); + + return result.toString(); + } + + private static String inputStream2String(InputStream is) throws IOException { + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + + int i = -1; + while ((i = is.read()) != -1) { + baos.write(i); + } + + String str = baos.toString(); + baos.close(); + + return str; + } + + public static Map<String, String> generateRsaKey(int keySize) { + Map<String, String> result = new HashMap<>(2); + try { + KeyPairGenerator keyPairGen = KeyPairGenerator.getInstance(KEY_ALGORITHM); + + keyPairGen.initialize(keySize, new SecureRandom()); + + KeyPair keyPair = keyPairGen.generateKeyPair(); + + String pub = new String(Base64.encodeBase64(keyPair.getPublic().getEncoded())); + result.put("publicKey", pub); + + String pri = new String(Base64.encodeBase64(keyPair.getPrivate().getEncoded())); + result.put("privateKey", pri); + } catch (Exception ex) { + log.error(ex.getMessage(), ex); + } + + return result; + } + + public static String decrypt(String str) throws Exception { + byte[] inputByte = Base64.decodeBase64(str.getBytes(StandardCharsets.UTF_8)); + + byte[] decoded = Base64.decodeBase64(getPrivateKey()); + RSAPrivateKey priKey = (RSAPrivateKey) KeyFactory.getInstance("RSA").generatePrivate(new PKCS8EncodedKeySpec(decoded)); + + Cipher cipher = Cipher.getInstance("RSA"); + cipher.init(Cipher.DECRYPT_MODE, priKey); + + String outStr = new String(cipher.doFinal(inputByte)); + + return outStr; + } + + public static String encrypt(String str) throws Exception { + byte[] decoded = Base64.decodeBase64(getPublicKey()); + + RSAPublicKey pubKey = (RSAPublicKey) KeyFactory.getInstance("RSA").generatePublic(new X509EncodedKeySpec(decoded)); + + Cipher cipher = Cipher.getInstance("RSA"); + cipher.init(Cipher.ENCRYPT_MODE, pubKey); + + String outStr = Base64.encodeBase64String(cipher.doFinal(str.getBytes(StandardCharsets.UTF_8))); + + return outStr; + } + + public static String encrypt(String key, String str) throws Exception { + byte[] decoded = Base64.decodeBase64(key); + + RSAPublicKey pubKey = (RSAPublicKey) KeyFactory.getInstance("RSA").generatePublic(new X509EncodedKeySpec(decoded)); + + Cipher cipher = Cipher.getInstance("RSA"); + cipher.init(Cipher.ENCRYPT_MODE, pubKey); + + String outStr = Base64.encodeBase64String(cipher.doFinal(str.getBytes(StandardCharsets.UTF_8))); + + return outStr; + } +} diff --git a/src/main/java/com/se/nsl/helper/ShpHelper.java b/src/main/java/com/se/nsl/helper/ShpHelper.java new file mode 100644 index 0000000..1fc3215 --- /dev/null +++ b/src/main/java/com/se/nsl/helper/ShpHelper.java @@ -0,0 +1,359 @@ +package com.se.nsl.helper; + +import cn.hutool.core.io.FileUtil; +import cn.hutool.json.JSONArray; +import cn.hutool.json.JSONObject; +import com.se.nsl.domain.dto.GeField; +import com.se.nsl.domain.dto.GeLayer; +import lombok.extern.slf4j.Slf4j; +import org.gdal.gdal.Dataset; +import org.gdal.gdal.gdal; +import org.gdal.gdalconst.gdalconst; +import org.gdal.ogr.*; +import org.gdal.osr.SpatialReference; + +import java.sql.Timestamp; +import java.time.LocalDateTime; +import java.util.List; +import java.util.Map; +import java.util.Vector; + +@Slf4j +@SuppressWarnings("ALL") +public class ShpHelper { + private static Vector<String> options; + + public static Vector<String> getOptions() { + if (null == options) { + options = new Vector<>(); + options.add("ENCODING=UTF-8"); + } + + return options; + } + + public static boolean createShp(String filePath, Map<String, Object> map, SpatialReference sr, Double minx, Double miny, Double maxx, Double maxy) { + Driver driver = null; + DataSource dataSource = null; + Layer layer = null; + try { + driver = ogr.GetDriverByName("ESRI shapefile"); + if (null == driver) return false; + + dataSource = driver.CreateDataSource(filePath, null); + if (null == dataSource) return false; + + layer = dataSource.CreateLayer(FileUtil.getName(filePath), sr, ogr.wkbPolygon, getOptions()); + if (null == layer) return false; + + if (null != map) createFields(layer, map); + + Feature f = new Feature(layer.GetLayerDefn()); + Geometry g = createPolygon(sr, minx, miny, maxx, maxy); + f.SetGeometry(g); + if (null != map) setValues(f, map); + + layer.CreateFeature(f); + + return true; + } catch (Exception ex) { + log.error(ex.getMessage(), ex); + return false; + } finally { + GdalHelper.delete(layer, dataSource, driver); + } + } + + private static void createFields(Layer layer, Map<String, Object> map) { + for (String key : map.keySet()) { + Object val = map.get(key); + switch (val.getClass().getTypeName()) { + //case "java.math.BigDecimal": + case "java.lang.Double": + case "double": + layer.CreateField(new FieldDefn(key, ogr.OFTReal)); + break; + case "java.lang.Long": + case "long": + layer.CreateField(new FieldDefn(key, ogr.OFTInteger64)); + break; + case "java.lang.Integer": + case "int": + layer.CreateField(new FieldDefn(key, ogr.OFTInteger)); + break; + //case "java.sql.Timestamp": + //case "java.time.LocalDate": + // layer.CreateField(new FieldDefn(key, ogr.OFTDateTime)); + // break; + default: + layer.CreateField(new FieldDefn(key, ogr.OFTString)); + break; + } + } + } + + private static void setValues(Feature f, Map<String, Object> map) { + for (String key : map.keySet()) { + Object val = map.get(key); + switch (val.getClass().getTypeName()) { + case "java.lang.Double": + case "double": + f.SetField(key, Double.parseDouble(val.toString())); + break; + case "java.lang.Long": + case "long": + f.SetField(key, Long.parseLong(val.toString())); + break; + case "java.lang.Integer": + case "int": + f.SetField(key, Integer.parseInt(val.toString())); + break; + default: + f.SetField(key, val.toString()); + break; + } + } + } + + public static Geometry createPolygon(SpatialReference sr, Double minx, Double miny, Double maxx, Double maxy) { + String epsg = sr.GetAuthorityCode(null); + if (!("4326".equals(epsg) || "4490".equals(epsg))) { + double[] dmin = GdalHelper.fromWgs84(sr, minx, miny); + double[] dmax = GdalHelper.fromWgs84(sr, maxx, maxy); + minx = dmin[0]; + miny = dmin[1]; + maxx = dmax[0]; + maxy = dmax[1]; + } + + Geometry ring = new Geometry(ogr.wkbLinearRing); + ring.AddPoint_2D(minx, maxy); + ring.AddPoint_2D(maxx, maxy); + ring.AddPoint_2D(maxx, miny); + ring.AddPoint_2D(minx, miny); + ring.AddPoint_2D(minx, maxy); + + Geometry poly = new Geometry(ogr.wkbPolygon); + poly.AddGeometry(ring); + poly.AssignSpatialReference(sr); + + return poly; + } + + public static boolean createShp(String filePath, GeLayer geLayer) { + Driver driver = null; + DataSource dataSource = null; + Layer layer = null; + try { + driver = ogr.GetDriverByName("ESRI shapefile"); + if (null == driver) return false; + + dataSource = driver.CreateDataSource(filePath, null); + if (null == dataSource) return false; + + int geoType = getGeometryType(geLayer.getQueryType()); + layer = dataSource.CreateLayer(FileUtil.getName(filePath), geLayer.getDb().getSpatialReference(), geoType, getOptions()); + if (null == layer) return false; + + createLayerFields(layer, geLayer.getFields()); + + FeatureDefn featureDefn = layer.GetLayerDefn(); + for (int i = 0, c = geLayer.getData().size(); i < c; i++) { + Feature f = new Feature(featureDefn); + JSONObject data = geLayer.getData().getJSONObject(i).getJSONObject("properties"); + setFeatureData(f, geLayer.getFields(), data); + + JSONObject geom = geLayer.getData().getJSONObject(i).getJSONObject("geometry"); + Geometry g = createGeometry(geLayer, geom); + f.SetGeometry(g); + + layer.CreateFeature(f); + } + + return true; + } catch (Exception ex) { + log.error(ex.getMessage(), ex); + return false; + } finally { + GdalHelper.delete(layer, dataSource, driver); + } + } + + private static void setFeatureData(Feature f, List<GeField> fields, JSONObject data) { + for (int i = 0, c = fields.size(); i < c; i++) { + GeField geField = fields.get(i); + switch (geField.getType()) { + case "int": + f.SetField(i, data.getInt(geField.getName())); + break; + case "long": + f.SetField(i, data.getLong(geField.getName())); + break; + case "double": + f.SetField(i, data.getDouble(geField.getName())); + break; + case "datetime": + long date = data.getLong(geField.getName()); + Timestamp time = new Timestamp(date); + setTimestamp(f, i, time); + break; + default: + f.SetField(i, data.getStr(geField.getName())); + break; + } + } + } + + private static void setTimestamp(Feature f, int i, Timestamp time) { + if (null == time) return; + + LocalDateTime local = time.toLocalDateTime(); + f.SetField(i, local.getYear(), local.getMonthValue(), local.getDayOfMonth(), local.getHour(), local.getMinute(), local.getSecond(), 8); + } + + public static Geometry createGeometry(GeLayer geLayer, JSONObject geom) { + String type = geom.getStr("type"); + JSONArray cs = geom.getJSONArray("coordinates"); + + Geometry g = null; + switch (type) { + case "Point": + g = new Geometry(ogr.wkbPoint); + g.AddPoint_2D(cs.getDouble(0), cs.getDouble(1)); + break; + case "MultiLineString": + g = new Geometry(ogr.wkbMultiLineString); + for (int i = 0, c = cs.size(); i < c; i++) { + Geometry line = new Geometry(ogr.wkbLineString); + JSONArray lineArr = cs.getJSONArray(i); + for (int j = 0, d = lineArr.size(); j < d; j++) { + JSONArray arr = lineArr.getJSONArray(j); + line.AddPoint_2D(arr.getDouble(0), arr.getDouble(1)); + } + g.AddGeometry(line); + } + break; + case "MultiPolygon": + g = new Geometry(ogr.wkbMultiPolygon); + for (int i = 0, c = cs.size(); i < c; i++) { + Geometry poly = new Geometry(ogr.wkbPolygon); + JSONArray polyArr = cs.getJSONArray(i); + for (int j = 0, d = polyArr.size(); j < d; j++) { + Geometry ring = new Geometry(ogr.wkbLinearRing); + JSONArray ringArr = polyArr.getJSONArray(j); + for (int k = 0, e = ringArr.size(); k < e; k++) { + JSONArray arr = ringArr.getJSONArray(k); + ring.AddPoint_2D(arr.getDouble(0), arr.getDouble(1)); + } + poly.AddGeometry(ring); + } + g.AddGeometry(poly); + } + break; + } + return g; + } + + private static int getGeometryType(String type) { + switch (type) { + case "point": + return ogr.wkbPoint; + case "polyline": + return ogr.wkbMultiLineString; + case "polygon": + return ogr.wkbMultiPolygon; + default: + return ogr.wkbUnknown; + } + } + + private static void createLayerFields(Layer layer, List<GeField> fields) { + for (int i = 0, c = fields.size(); i < c; i++) { + GeField f = fields.get(i); + FieldDefn fd = new FieldDefn(f.getName(), getFieldType(f)); + + layer.CreateField(fd, i); + } + } + + private static Integer getFieldType(GeField f) { + switch (f.getType()) { + case "int": + return ogr.OFTInteger; + case "long": + return ogr.OFTInteger64; + case "double": + return ogr.OFTReal; + case "datetime": + return ogr.OFTDateTime; + default: + return ogr.OFTString; + } + } + + private String getEpsg(SpatialReference sr) { + return sr.GetAuthorityCode(null); + } + + public static boolean polygonize2Shp(Dataset ds, String filePath) { + Driver driver = null; + DataSource dataSource = null; + Layer layer = null; + try { + driver = ogr.GetDriverByName("ESRI shapefile"); + if (null == driver) return false; + + dataSource = driver.CreateDataSource(filePath, null); + if (null == dataSource) return false; + + layer = dataSource.CreateLayer(FileUtil.getName(filePath), ds.GetSpatialRef(), ogr.wkbPolygon, getOptions()); + if (null == layer) return false; + + layer.CreateField(new FieldDefn("val", ogr.OFTReal)); + + gdal.Polygonize(ds.GetRasterBand(1), ds.GetRasterBand(1).GetMaskBand(), layer, 0); + + return true; + } catch (Exception ex) { + log.error(ex.getMessage(), ex); + return false; + } finally { + GdalHelper.delete(layer, dataSource, driver); + } + } + + public static boolean polygonize2Geojson(Dataset ds, String filePath) { + Driver driver = null; + DataSource dataSource = null; + Layer layer = null; + try { + driver = ogr.GetDriverByName("GeoJSON"); + if (null == driver) return false; + + dataSource = driver.CreateDataSource(filePath, null); + if (null == dataSource) return false; + + layer = dataSource.CreateLayer(FileUtil.getName(filePath), ds.GetSpatialRef(), ogr.wkbPolygon); + if (null == layer) return false; + + layer.CreateField(new FieldDefn("val", ogr.OFTReal)); + + gdal.Polygonize(ds.GetRasterBand(1), ds.GetRasterBand(1).GetMaskBand(), layer, 0); + + return true; + } catch (Exception ex) { + log.error(ex.getMessage(), ex); + return false; + } finally { + GdalHelper.delete(layer, dataSource, driver); + } + } + + public static void test() { + String path = "D:\\simu\\out\\20241010095328\\waters\\1730217635000\\"; + + Dataset ds = gdal.Open(path + "water.tif", gdalconst.GA_ReadOnly); + ds.SetSpatialRef(GdalHelper.createSpatialReference(4548)); + polygonize2Geojson(ds, path + "water.geojson"); + } +} diff --git a/src/main/java/com/se/nsl/helper/StringHelper.java b/src/main/java/com/se/nsl/helper/StringHelper.java new file mode 100644 index 0000000..97ead29 --- /dev/null +++ b/src/main/java/com/se/nsl/helper/StringHelper.java @@ -0,0 +1,166 @@ +package com.se.nsl.helper; + +import java.math.BigDecimal; +import java.sql.Timestamp; +import java.text.SimpleDateFormat; +import java.util.ArrayList; +import java.util.Date; +import java.util.List; +import java.util.UUID; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +@SuppressWarnings("ALL") +public class StringHelper { + public final static String COMMA = ","; + + public final static String PWD_REG = "^(?![a-zA-Z]+$)(?![A-Z0-9]+$)(?![A-Z\\W!@#$%^&*`~()\\-_+=,.?;<>]+$)(?![a-z0-9]+$)(?![a-z\\W!@#$%^&*`~()\\-_+=,.?;<>]+$)(?![0-9\\W!@#$%^&*`~()\\-_+=,.?;<>]+$)[a-zA-Z0-9\\W!@#$%^&*`~()\\-_+=,.?;<>]{12,20}$"; + + public static final Pattern NUMBER_PATTERN = Pattern.compile("-?\\d+(\\.\\d+)?"); + + public static final SimpleDateFormat YMD_FORMAT = new SimpleDateFormat("yyyy-MM-dd"); + + public static final SimpleDateFormat YMDHMS_FORMAT = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); + + public static final SimpleDateFormat YMD2_FORMAT = new SimpleDateFormat("yyyyMMdd"); + + public static final SimpleDateFormat YMDHMS2_FORMAT = new SimpleDateFormat("yyyyMMddHHmmss"); + + public static boolean isInteger(String str) { + return str != null && str.matches("[0-9]+"); + } + + public static boolean isNumeric(String str) { + return str != null && str.matches("-?\\d+(\\.\\d+)?"); + } + + public static boolean isNumeric2(String str) { + return str != null && NUMBER_PATTERN.matcher(str).matches(); + } + + public static Pattern datePattern = Pattern.compile("^((\\d{2}(([02468][048])|([13579][26]))[\\-\\/]((((0?[13578])|(1[02]))[\\-\\/]((0?[1-9])|([1-2][0-9])|(3[01])))|(((0?[469])|(11))[\\-\\/]((0?[1-9])|([1-2][0-9])|(30)))|(0?2[\\-\\/]((0?[1-9])|([1-2][0-9])))))|(\\d{2}(([02468][1235679])|([13579][01345789]))[\\-\\/]((((0?[13578])|(1[02]))[\\-\\/]((0?[1-9])|([1-2][0-9])|(3[01])))|(((0?[469])|(11))[\\-\\/]((0?[1-9])|([1-2][0-9])|(30)))|(0?2[\\-\\/]((0?[1-9])|(1[0-9])|(2[0-8]))))))(\\s(((0?[0-9])|([1-2][0-3]))\\:([0-5]?[0-9])((\\s)|(\\:([0-5]?[0-9])))))?$"); + + public static Pattern sqlPattern = Pattern.compile("|and|exec|execute|insert|select|delete|update|count|drop|\\*|%|chr|mid|master|truncate|char|declare|sitename|net user|xp_cmdshell|;|or|-|\\+|,|like"); + + public static Date parseDate(String str) { + try { + return YMD_FORMAT.parse(str); + } catch (Exception ex) { + return null; + } + } + + public static Date parseTime(String str) { + try { + return YMDHMS_FORMAT.parse(str); + } catch (Exception e) { + return null; + } + } + + public static boolean isDate(String strDate) { + Matcher m = datePattern.matcher(strDate); + + return m.matches(); + } + + public static boolean isNull(String str) { + return null == str || str.length() == 0; + } + + public static boolean isEmpty(String str) { + // return null == str || "".equals(str) + return null == str || "".equals(str.trim()); + } + + public static String getLikeStr(String str) { + return isEmpty(str) ? null : "%" + str.trim() + "%"; + } + + public static String getLikeUpperStr(String str) { + return isEmpty(str) ? null : "%" + str.trim().toUpperCase() + "%"; + } + + public static String getRightLike(String str) { + return isEmpty(str) ? null : str.trim() + "%"; + } + + public static String getGeomWkt(String wkt) { + if (isEmpty(wkt)) { + return "null"; + } + + return String.format("ST_GeomFromText('%s')", wkt); + } + + public static String firstCharToUpperCase(String str) { + return str.substring(0, 1).toUpperCase() + str.substring(1); + } + + public static String firstCharToLowerCase(String str) { + return str.substring(0, 1).toLowerCase() + str.substring(1); + } + + public static boolean isSqlInjection(String str) { + if (null == str) { + return false; + } + + Matcher m = sqlPattern.matcher(str); + + return m.matches(); + } + + public static boolean isPwdInvalid(String pwd) { + return !Pattern.matches(PWD_REG, pwd); + } + + public static String getGuid() { + return UUID.randomUUID().toString(); + } + + public static long getMinuteDifference(Timestamp ts) { + return (ts.getTime() - System.currentTimeMillis()) / 1000 / 60; + } + + public static <T> String join(List<T> list, String join) { + if (null == list || list.isEmpty()) { + return ""; + } + + StringBuilder sb = new StringBuilder(); + for (T t : list) { + if (null != t) { + sb.append(t.toString()).append(join); + } + } + + if (sb.length() > 0 && sb.lastIndexOf(join) == sb.length() - join.length()) { + sb.delete(sb.length() - join.length(), sb.length()); + } + + return sb.toString(); + } + + public static List<Integer> strToIntegers(String str) { + if (isEmpty(str)) { + return null; + } + + List<Integer> list = new ArrayList<>(); + for (String s : str.split(COMMA)) { + list.add(Integer.parseInt(s)); + } + + return list; + } + + public static double setScale(double val, int scale) { + BigDecimal bd = new BigDecimal(Double.toString(val)); + BigDecimal roundedValue = bd.setScale(scale, BigDecimal.ROUND_HALF_UP); + + String str = roundedValue.toPlainString(); + + return Double.parseDouble(str); + } +} diff --git a/src/main/java/com/se/nsl/helper/WebHelper.java b/src/main/java/com/se/nsl/helper/WebHelper.java new file mode 100644 index 0000000..ffeeb4c --- /dev/null +++ b/src/main/java/com/se/nsl/helper/WebHelper.java @@ -0,0 +1,316 @@ +package com.se.nsl.helper; + +import com.alibaba.fastjson.JSON; +import lombok.extern.slf4j.Slf4j; +import org.springframework.http.HttpStatus; +import org.springframework.web.context.request.RequestContextHolder; +import org.springframework.web.context.request.ServletRequestAttributes; + +import javax.servlet.ServletOutputStream; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; +import java.io.*; +import java.net.InetAddress; +import java.net.URLEncoder; +import java.net.UnknownHostException; +import java.sql.Timestamp; +import java.text.SimpleDateFormat; +import java.util.*; + +@Slf4j +@SuppressWarnings("ALL") +public class WebHelper { + public final static String POINT = "."; + + private final static String COMMA = ","; + + private final static String UNKNOWN = "unknown"; + + public static boolean isWin() { + String osName = System.getProperty("os.name"); + + return osName.startsWith("Windows"); + } + + public static int getCpuCores() { + return Runtime.getRuntime().availableProcessors(); + } + + public final static SimpleDateFormat YMDHMS = new SimpleDateFormat("yyyyMMddHHmmss"); + + public static boolean isEmpty(String str) { + return null == str || "".equals(str.trim()); + } + + public static String getGuid() { + return UUID.randomUUID().toString(); + } + + public static String getHostIp() { + try { + return InetAddress.getLocalHost().getHostAddress(); + } catch (UnknownHostException e) { + // + } + return "127.0.0.1"; + } + + public static String getIpAddress(HttpServletRequest request) { + String ip = request.getHeader("X-Forwarded-For"); + if (ip == null || ip.length() == 0 || UNKNOWN.equalsIgnoreCase(ip)) { + ip = request.getHeader("Proxy-Client-IP"); + } + if (ip == null || ip.length() == 0 || UNKNOWN.equalsIgnoreCase(ip)) { + ip = request.getHeader("WL-Proxy-Client-IP"); + } + if (ip == null || ip.length() == 0 || UNKNOWN.equalsIgnoreCase(ip)) { + ip = request.getHeader("HTTP_X_FORWARDED_FOR"); + } + if (ip == null || ip.length() == 0 || UNKNOWN.equalsIgnoreCase(ip)) { + ip = request.getHeader("HTTP_X_FORWARDED"); + } + if (ip == null || ip.length() == 0 || UNKNOWN.equalsIgnoreCase(ip)) { + ip = request.getHeader("HTTP_X_CLUSTER_CLIENT_IP"); + } + if (ip == null || ip.length() == 0 || UNKNOWN.equalsIgnoreCase(ip)) { + ip = request.getHeader("HTTP_CLIENT_IP"); + } + if (ip == null || ip.length() == 0 || UNKNOWN.equalsIgnoreCase(ip)) { + ip = request.getHeader("HTTP_FORWARDED_FOR"); + } + if (ip == null || ip.length() == 0 || UNKNOWN.equalsIgnoreCase(ip)) { + ip = request.getHeader("HTTP_FORWARDED"); + } + if (ip == null || ip.length() == 0 || UNKNOWN.equalsIgnoreCase(ip)) { + ip = request.getHeader("HTTP_VIA"); + } + if (ip == null || ip.length() == 0 || UNKNOWN.equalsIgnoreCase(ip)) { + ip = request.getHeader("REMOTE_ADDR"); + } + if (ip == null || ip.length() == 0 || UNKNOWN.equalsIgnoreCase(ip)) { + ip = request.getRemoteAddr(); + } + if (ip.contains(COMMA)) { + return ip.split(",")[0]; + } + + return ip; + } + + public static Timestamp getCurrentTimestamp() { + return new Timestamp(System.currentTimeMillis()); + } + + public static HttpServletRequest getRequest() { + ServletRequestAttributes servletRequestAttributes = (ServletRequestAttributes) RequestContextHolder.getRequestAttributes(); + + return servletRequestAttributes.getRequest(); + } + + public static HttpServletResponse getResponse() { + ServletRequestAttributes servletRequestAttributes = (ServletRequestAttributes) RequestContextHolder.getRequestAttributes(); + + return servletRequestAttributes.getResponse(); + } + + public static boolean writeJson2Page(HttpServletResponse res, HttpStatus status, Object obj) { + res.setStatus(status.value()); + + return writeStr2Page(res, JSON.toJSONString(obj)); + } + + public static boolean writeJson2Page(HttpServletResponse res, HttpStatus status, String str) { + res.setStatus(status.value()); + + Map<String, Object> map = new HashMap(2); + map.put("code", status.value() >= 400 ? -1 : 0); + map.put("msg", str); + + return writeStr2Page(res, JSON.toJSONString(map)); + } + + public static boolean writeStr2Page(HttpServletResponse res, HttpStatus status, String str) { + res.setStatus(status.value()); + + return writeStr2Page(res, str); + } + + public static boolean writeStr2Page(HttpServletResponse res, String str) { + try { + res.setContentType("application/json;charset=UTF-8"); + res.setHeader("Cache-Control", "no-cache"); + res.setHeader("Pragma", "No-cache"); + res.setDateHeader("Expires", 0); + + PrintWriter out = res.getWriter(); + out.print(str); + + out.flush(); + out.close(); + } catch (Exception ex) { + log.error(ex.getMessage(), ex); + } + + return false; + } + + public static void writeBytes(byte[] bytes, HttpServletResponse res) throws IOException { + res.setContentType("application/octet-stream"); + + if (null == bytes) { + res.setStatus(HttpStatus.NOT_FOUND.value()); + return; + } + + OutputStream os = res.getOutputStream(); + os.write(bytes, 0, bytes.length); + os.close(); + } + + public static void writePng(String filePath, HttpServletResponse res) throws IOException { + File file = new File(filePath); + if (!file.exists() || file.isDirectory()) { + res.setStatus(HttpStatus.NOT_FOUND.value()); + return; + } + + String fileName = URLEncoder.encode(filePath, "UTF-8").replace("+", "%20"); + res.setHeader("Content-Disposition", "attachment; filename*=UTF-8''" + fileName); + res.setCharacterEncoding("UTF-8"); + res.setContentType("image/png"); + + writeFile(filePath, res); + } + + public static int getRandomInt(int min, int max) { + return new Random().nextInt(max) % (max - min + 1) + min; + } + + public static void download(String file, String fileName, HttpServletResponse res) throws Exception { + download(file, fileName, false, res); + } + + public static void download(String file, String fileName, boolean inline, HttpServletResponse res) throws Exception { + if (isEmpty(fileName)) { + fileName = YMDHMS.format(new Date()); + } + fileName = URLEncoder.encode(fileName, "UTF-8").replace("+", "%20"); + String dispose = inline ? "inline" : "attachment"; + + res.setHeader("Content-Disposition", dispose + "; filename*=UTF-8''" + fileName); + res.setCharacterEncoding("UTF-8"); + + String ext = getExtension(file); + String mime = getMime(ext); + res.setContentType(mime); + + writeFile(file, res); + } + + private static void writeFile(String file, HttpServletResponse res) throws IOException { + ServletOutputStream outputStream = res.getOutputStream(); + FileInputStream fileInputStream = new FileInputStream(file); + + int len = 0; + byte[] bytes = new byte[1024]; + while ((len = fileInputStream.read(bytes)) != -1) { + outputStream.write(bytes, 0, len); + outputStream.flush(); + } + + fileInputStream.close(); + outputStream.close(); + } + + public static String getExtension(String fileName) { + if (isEmpty(fileName)) { + return ""; + } + + int idx = fileName.lastIndexOf(POINT); + if (idx == -1) { + return ""; + } + + return fileName.substring(idx).toLowerCase(); + } + + public static String getMime(String ext) { + switch (ext) { + case ".tif": + case ".tiff": + return "image/tiff"; + case ".img": + return "application/x-img"; + case ".gif": + return "image/gif"; + case ".jpg": + case ".jpeg": + return "image/jpeg"; + case ".png": + return "image/png"; + case ".mp3": + return "audio/mp3"; + case ".mp4": + return "video/mpeg4"; + case ".avi": + return "video/avi"; + case ".mpg": + case ".mpeg": + return "video/mpg"; + case ".wav": + return "audio/wav"; + case ".wma": + return "audio/x-ms-wma"; + case ".swf": + return "application/x-shockwave-flash"; + case ".wmv": + return "video/x-ms-wmv"; + case ".rm": + return "application/vnd.rn-realmedia"; + case ".rmvb": + return "application/vnd.rn-realmedia-vbr"; + case ".js": + return "application/x-javascript"; + case ".css": + return "text/css"; + case ".asp": + return "text/asp"; + case ".mht": + return "message/rfc822"; + case ".jsp": + case ".htm": + case ".html": + case ".xhtml": + return "text/html"; + case ".xml": + case ".svg": + return "text/xml"; + case ".txt": + return "text/plain"; + case ".dbf": + return "application/x-dbf"; + case ".mdb": + return "application/msaccess"; + case ".pdf": + return "application/pdf"; + case ".ppt": + case ".pptx": + return "application/x-ppt"; + case ".doc": + case ".docx": + return "application/msword"; + case ".xls": + case ".xlsx": + return "application/vnd.ms-excel"; + case ".dgn": + return "application/x-dgn"; + case ".dwg": + return "application/x-dwg"; + case ".ext": + return "application/x-msdownload"; + default: + return "application/octet-stream"; + } + } +} diff --git a/src/main/java/com/se/nsl/mapper/SimuMapper.java b/src/main/java/com/se/nsl/mapper/SimuMapper.java new file mode 100644 index 0000000..5e037f3 --- /dev/null +++ b/src/main/java/com/se/nsl/mapper/SimuMapper.java @@ -0,0 +1,13 @@ +package com.se.nsl.mapper; + +import com.baomidou.mybatisplus.core.mapper.BaseMapper; +import com.se.nsl.domain.po.SimuPo; +import org.apache.ibatis.annotations.Mapper; +import org.springframework.stereotype.Repository; + +@Mapper +@Repository +@SuppressWarnings("ALL") +public interface SimuMapper extends BaseMapper<SimuPo> { + Integer selectMaxId(); +} diff --git a/src/main/java/com/se/nsl/service/DbService.java b/src/main/java/com/se/nsl/service/DbService.java new file mode 100644 index 0000000..6afe463 --- /dev/null +++ b/src/main/java/com/se/nsl/service/DbService.java @@ -0,0 +1,153 @@ +package com.se.nsl.service; + +import cn.hutool.json.JSONArray; +import cn.hutool.json.JSONObject; +import cn.hutool.json.JSONUtil; +import com.se.nsl.config.PropertiesConfig; +import com.se.nsl.domain.dto.GeDb; +import com.se.nsl.domain.vo.QueryVo; +import com.se.nsl.helper.CaffeineHelper; +import com.se.nsl.helper.RsaHelper; +import com.se.nsl.helper.StringHelper; +import com.se.nsl.helper.WebHelper; +import lombok.extern.slf4j.Slf4j; +import org.gdal.ogr.Geometry; +import org.springframework.http.HttpStatus; +import org.springframework.stereotype.Service; +import org.springframework.web.client.RestTemplate; + +import javax.annotation.Resource; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; +import java.util.Date; +import java.util.HashMap; +import java.util.Map; + +@Slf4j +@Service +@SuppressWarnings("ALL") +public class DbService { + @Resource + PropertiesConfig config; + + @Resource + RestTemplate restTemplate; + + @Resource + GedbService gedbService; + + private final static String INFO_KEY = "db_info_key"; + + private final static String CONFIG_KEY = "db_config_key"; + + private final static String PUBLIC_KEY = "db_public_key"; + + private final static long ONE_DAY = 24 * 60 * 60 * 1000; + + // helper.service(req, res, uri); + public String info() throws Exception { + Object obj = CaffeineHelper.get(INFO_KEY); + if (obj instanceof String) { + return (String) obj; + } + + String token = gedbService.getToken(); + GeDb db = gedbService.getGeDb(token); + String date = StringHelper.YMD_FORMAT.format(new Date(System.currentTimeMillis() - ONE_DAY)); + String url = String.format("%sgeo-service/statis/layer/data/info?dbid=%s&token=%s&caldate=%s", config.getHost(), db.getDbid(), token, date); + + String rs = restTemplate.getForObject(url, String.class); + if (!StringHelper.isEmpty(rs)) { + CaffeineHelper.put(INFO_KEY, rs); + } + + return rs; + } + + public String getConfig() throws Exception { + Object obj = CaffeineHelper.get(CONFIG_KEY); + if (obj instanceof String) { + return (String) obj; + } + + String token = gedbService.getToken(); + GeDb db = gedbService.getGeDb(token); + String url = String.format("%sgeo-service/entitydb/map/config?dbid=%s&token=%s", config.getHost(), db.getDbid(), token); + + String rs = restTemplate.getForObject(url, String.class); + if (!StringHelper.isEmpty(rs)) { + CaffeineHelper.put(CONFIG_KEY, rs); + } + + return rs; + } + + public void query(QueryVo vo, HttpServletRequest req, HttpServletResponse res) throws Exception { + String token = gedbService.getToken(); + GeDb db = gedbService.getGeDb(token); + String url = config.getHost() + "geo-service/entitydbdata/layer/query"; + + Map<String, Object> map = new HashMap<>(6); + map.put("token", token); + map.put("dbid", db.getDbid()); + map.put("layerid", vo.getLayerid()); + //map.put("returnCountOnly", true); + //map.put("inSR", 4326); + map.put("containCount", vo.getContainCount()); + map.put("count", vo.getCount()); + map.put("start", vo.getStart()); + map.put("querytype", vo.getReturnGeom() ? getQueryType(vo.getLayerid()) : "entity"); + if (!StringHelper.isEmpty(vo.getWhere())) { + String where = RsaHelper.encrypt(getPublicKey(), vo.getWhere()); + map.put("where", where); + } + if (!StringHelper.isEmpty(vo.getGeometry())) { + Geometry g = Geometry.CreateFromWkt(vo.getGeometry()); + if (null != vo.getBuffer()) { + g = g.Buffer(vo.getBuffer() * 0.00000899928); + } + + map.put("geometry", g.ExportToWkt()); + map.put("inSR", 4326); + map.put("outSR", 4326); + } + + String rs = restTemplate.postForObject(url, map, String.class); + WebHelper.writeStr2Page(res, HttpStatus.OK, rs); + } + + private String getPublicKey() { + Object obj = CaffeineHelper.get(PUBLIC_KEY); + if (obj instanceof String) { + return (String) obj; + } + + JSONObject jsonObject = restTemplate.getForObject(config.getHost() + "geo-service/setting/publickey", JSONObject.class); + + String key = jsonObject.getStr("data"); + if (!StringHelper.isEmpty(key)) { + CaffeineHelper.put(PUBLIC_KEY, key); + } + + return key; + } + + private String getQueryType(String layerid) { + try { + String json = getConfig(); + JSONObject obj = JSONUtil.parseObj(json); + + JSONArray layers = obj.getJSONObject("data").getJSONArray("layers"); + for (int i = 0, c = layers.size(); i < c; i++) { + JSONObject layer = layers.getJSONObject(i); + if (layerid.equals(layer.getStr("id"))) { + return GedbService.getQueryType(layer); + } + } + } catch (Exception ex) { + log.error(ex.getMessage(), ex); + } + + return "entity"; + } +} diff --git a/src/main/java/com/se/nsl/service/GedbService.java b/src/main/java/com/se/nsl/service/GedbService.java new file mode 100644 index 0000000..80dcbb7 --- /dev/null +++ b/src/main/java/com/se/nsl/service/GedbService.java @@ -0,0 +1,515 @@ +package com.se.nsl.service; + +import cn.hutool.core.io.FileUtil; +import cn.hutool.json.JSONArray; +import cn.hutool.json.JSONObject; +import cn.hutool.json.JSONUtil; +import com.se.nsl.config.PropertiesConfig; +import com.se.nsl.domain.dto.GeDb; +import com.se.nsl.domain.dto.GeField; +import com.se.nsl.domain.dto.GeFile; +import com.se.nsl.domain.dto.GeLayer; +import com.se.nsl.domain.po.DataPo; +import com.se.nsl.helper.CaffeineHelper; +import com.se.nsl.helper.RsaHelper; +import com.se.nsl.helper.ShpHelper; +import com.se.nsl.helper.StringHelper; +import lombok.extern.slf4j.Slf4j; +import org.gdal.gdal.Dataset; +import org.gdal.gdal.WarpOptions; +import org.gdal.gdal.gdal; +import org.gdal.gdalconst.gdalconst; +import org.gdal.ogr.Geometry; +import org.gdal.osr.SpatialReference; +import org.springframework.stereotype.Service; +import org.springframework.util.CollectionUtils; +import org.springframework.web.client.RestTemplate; + +import javax.annotation.Resource; +import java.io.File; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.net.URL; +import java.net.URLConnection; +import java.util.*; +import java.util.stream.Collectors; + +@Slf4j +@Service +@SuppressWarnings("ALL") +public class GedbService { + String password; + + @Resource + PropertiesConfig config; + + @Resource + RestTemplate restTemplate; + + private final static String DB_KEY = "gedb_db"; + + private final static String TOKEN_KEY = "gedb_token"; + + public boolean test(DataPo data) throws Exception { + createPath(config.getInPath() + File.separator + data.getInPath()); + + String token = getToken(); + GeDb db = connectGedb(token, data); + + copeVectors(token, data, db); + + copeDem(token, data); + + return true; + } + + private void createPath(String path) { + File f = new File(path); + if (f.exists() && f.isDirectory()) { + FileUtil.del(f); + } + f.mkdirs(); + } + + public String getToken() throws Exception { + Object obj = CaffeineHelper.get(TOKEN_KEY); + if (obj instanceof String) { + return obj.toString(); + } + + String token = getTokenByServer(); + if (null == token) throw new Exception("鑾峰彇GEDB浠ょ墝澶辫触"); + + CaffeineHelper.put(TOKEN_KEY, token); + + return token; + } + + private String getTokenByServer() throws Exception { + Map<String, Object> map = new HashMap<>(2); + map.put("userid", config.getUser()); + map.put("password", getPassword()); + + JSONObject obj = restTemplate.postForObject(config.getHost() + "account-service/security/login", map, JSONObject.class); + log.info(obj.toString()); + + JSONObject data = obj.getJSONObject("data"); + + return data.getStr("token"); + } + + private String getPassword() throws Exception { + if (StringHelper.isEmpty(password)) { + String key = getPublicKey(); + RsaHelper.setPublicKey(key); + password = RsaHelper.encrypt(config.getPwd()); + } + + return password; + } + + public String getPublicKey() { + //{"datetime":"2024-09-12 17:24:38","code":200,"data":"MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQCtFwJCh2taVTEi05C8qT2oG7Y+rDmJhlO4zicpSeRtiro9LsytePeWI7BXM6sfDU0WeKun1izawcfgGkZgnoJuMBluAOKI1tL0uCrR+DreNLqMVtnXHwoWEIk/hGJedDWaf3q22aGDyEB5h9qCq0JklSShP1Ih4ppap4LmgxdTPQIDAQAB"} + JSONObject obj = restTemplate.getForObject(config.getHost() + "account-service/security/publickey", JSONObject.class); + + return obj.getStr("data"); + } + + public GeDb connectGedb(String token, DataPo data) { + GeDb db = getGeDb(token); + db.setBbox(data.getBbox()); + data.setEpsg(db.getEpsg()); + + return db; + } + + public GeDb getGeDb(String token) { + Object obj = CaffeineHelper.get(DB_KEY); + if (obj instanceof GeDb) { + return (GeDb) obj; + } + + Map<String, Object> map = new HashMap<>(1); + map.put("token", token); + + JSONObject jsonObject = restTemplate.postForObject(config.getHost() + "geo-service/entitydb/list/canview", map, JSONObject.class); + JSONArray data = jsonObject.getJSONArray("data"); + + List<GeDb> list = JSONUtil.toList(data, GeDb.class); + if (CollectionUtils.isEmpty(list)) return null; + + GeDb gedb = list.stream().filter(db -> null != db.getName() && db.getName().contains(config.getDbName())).findFirst().orElse(null); + if (null != gedb) CaffeineHelper.put(DB_KEY, gedb); + + return gedb; + } + + public List<GeLayer> getLayers(String token, GeDb db) { + String uri = String.format("%sgeo-service/entitydb/map/config?dbid=%s&token=%s", config.getHost(), db.getDbid(), token); + JSONObject obj = restTemplate.getForObject(uri, JSONObject.class); + + JSONObject data = obj.getJSONObject("data"); + JSONArray arr = data.getJSONArray("layers"); + if (null == arr || arr.size() == 0) return null; + + List<GeLayer> layers = new ArrayList<>(); + for (int i = 0, c = arr.size(); i < c; i++) { + JSONObject jb = arr.getJSONObject(i); + String name = jb.getStr("name"); + if (config.getLayerNames().contains(name)) { + String id = jb.getStr("id"); + String queryType = getQueryType(jb); + List<GeField> fields = JSONUtil.toList(jb.getJSONArray("fields"), GeField.class); + fields = fields.stream().filter(f -> !config.getSysFields().contains(f.getName())).collect(Collectors.toList()); + String shpName = config.getShpNames().get(config.getLayerNames().indexOf(name)); + + layers.add(new GeLayer(id, name, queryType, fields, shpName, db)); + } + } + + return layers; + } + + public static String getQueryType(JSONObject jb) { + String qt = jb.getStr("pointlod"); + if (!StringHelper.isEmpty(qt)) { + return "point"; + } + + qt = jb.getStr("polylinelod"); + if (!StringHelper.isEmpty(qt)) { + return "polyline"; + } + + return "polygon"; + } + + public void queryData(String token, GeDb db, List<GeLayer> layers) throws Exception { + for (GeLayer layer : layers) { + int count = getCount(token, db, layer); + if (0 == count) throw new Exception(layer.getName() + "锛屽浘灞傛暟鎹负绌�"); + + int pageCount = (count - 1) / config.getPageSize() + 1; + for (int i = 0; i < pageCount; i++) { + JSONArray data = query(token, db, layer, i + 1, config.getPageSize()); + if (null != data && data.size() > 0) { + layer.addData(data); + } + } + } + } + + public boolean queryBboxCount(String token, GeDb db, List<GeLayer> layers) { + for (GeLayer layer : layers) { + int count = getCount(token, db, layer); + if (0 == count) return false; + } + + return true; + } + + private int getCount(String token, GeDb db, GeLayer layer) { + Map<String, Object> map = new HashMap<>(6); + map.put("token", token); + map.put("dbid", db.getDbid()); + map.put("bbox", db.getBbox()); + map.put("layerid", layer.getId()); + map.put("returnCountOnly", true); + map.put("inSR", 4326); + + JSONObject obj = restTemplate.postForObject(config.getHost() + "geo-service/entitydbdata/layer/query", map, JSONObject.class); + if (null == obj || 200 != obj.getInt("code")) return 0; + + return obj.getInt("data"); + } + + private JSONArray query(String token, GeDb db, GeLayer layer, int start, int count) { + Map<String, Object> map = new HashMap<>(9); + map.put("token", token); + map.put("start", start); + map.put("count", count); + map.put("dbid", db.getDbid()); + map.put("bbox", db.getBbox()); + map.put("containCount", false); + map.put("layerid", layer.getId()); + map.put("querytype", layer.getQueryType()); + map.put("inSR", 4326); + + JSONObject obj = restTemplate.postForObject(config.getHost() + "geo-service/entitydbdata/layer/query", map, JSONObject.class); + if (null == obj || 200 != obj.getInt("code")) return null; + + JSONObject data = obj.getJSONObject("data"); + + return data.getJSONArray("features"); + } + + public void copeVectors(String token, DataPo data, GeDb db) throws Exception { + String basePath = config.getInPath() + File.separator + data.getInPath(); + + List<GeLayer> layers = getLayers(token, db); + queryData(token, db, layers); + checkData(data, db, layers); + createShps(basePath, layers); + createZoneShp(basePath, data, db.getSpatialReference()); + if (data.getPid() > 0) { + createFloodShp(basePath, data, db.getSpatialReference()); + } + } + + private void checkData(DataPo data, GeDb db, List<GeLayer> layers) { + GeLayer point = getLayerByName(layers, config.getLayerNames().get(0)); + GeLayer line = getLayerByName(layers, config.getLayerNames().get(1)); + GeLayer build = getLayerByName(layers, config.getLayerNames().get(2)); + + Geometry extent = ShpHelper.createPolygon(db.getSpatialReference(), data.getMinx(), data.getMiny(), data.getMaxx(), data.getMaxy()); + checkSpatialRange(extent, point); + checkSpatialRange(extent, build); + + List<String> bsm = getValues(point, "bsm"); + List<String> bsm2 = new ArrayList<>(bsm); + List<String> qdbsm = getValues(line, "qdbsm"); + List<String> qdbsm2 = new ArrayList<>(qdbsm); + List<String> zdbsm = getValues(line, "zdbsm"); + List<String> zdbsm2 = new ArrayList<>(zdbsm); + + qdbsm.removeAll(bsm2); + zdbsm.removeAll(bsm2); + removeValues(line, "qdbsm", qdbsm); + removeValues(line, "zdbsm", zdbsm); + + qdbsm = getValues(line, "qdbsm"); + zdbsm = getValues(line, "zdbsm"); + bsm.removeAll(qdbsm); + bsm.removeAll(zdbsm); + removeValues(point, "bsm", bsm); + + GeLayer juncLayer = new GeLayer(point, filterLayerData(point.getData())); + juncLayer.setName("闆嗘按鐐�"); + juncLayer.setShpName(config.getJunctionName()); + layers.add(juncLayer); + } + + private void checkSpatialRange(Geometry extent, GeLayer geLayer) { + int i = 0; + while (i < geLayer.getData().size()) { + JSONObject geom = geLayer.getData().getJSONObject(i).getJSONObject("geometry"); + Geometry g = ShpHelper.createGeometry(geLayer, geom); + g.AssignSpatialReference(extent.GetSpatialReference()); + + if (!extent.Intersects(g)) { + geLayer.getData().remove(i); + continue; + } + i++; + } + } + + private GeLayer getLayerByName(List<GeLayer> layers, String name) { + return layers.stream().filter(a -> a.getName().equals(name)).findFirst().orElse(null); + } + + private List<String> getValues(GeLayer layer, String field) { + JSONArray data = layer.getData(); + List<String> list = new ArrayList<>(); + + int i = 0; + while (i < data.size()) { + JSONObject obj = data.getJSONObject(i).getJSONObject("properties"); + if (StringHelper.isEmpty(obj.getStr(field))) { + data.remove(i); + continue; + } + list.add(obj.getStr(field)); + i++; + } + + return list; + } + + private void removeValues(GeLayer layer, String field, List<String> values) { + if (CollectionUtils.isEmpty(values)) return; + + int i = 0; + JSONArray data = layer.getData(); + while (i < data.size()) { + JSONObject obj = data.getJSONObject(i).getJSONObject("properties"); + if (values.contains(obj.getStr(field))) { + data.remove(i); + continue; + } + i++; + } + } + + private void createShps(String basePath, List<GeLayer> layers) throws Exception { + for (GeLayer layer : layers) { + String path = basePath + File.separator + layer.getShpName(); + if (layer.getData().isEmpty() || !ShpHelper.createShp(path, layer)) { + throw new Exception(layer.getName() + "锛屽垱寤篠hapeFile鏂囦欢澶辫触锛�"); + } + } + } + + private JSONArray filterLayerData(JSONArray data) { + JSONArray arr = new JSONArray(); + String[] strs = config.getJunctionFilter().split("="); + for (int i = 0, c = data.size(); i < c; i++) { + JSONObject obj = data.getJSONObject(i).getJSONObject("properties"); + if (strs[1].equals(obj.getStr(strs[0]))) { + arr.put(data.getJSONObject(i)); + } + } + + return arr; + } + + private void createZoneShp(String basePath, DataPo data, SpatialReference sr) { + String filePath = basePath + File.separator + config.getZoneName(); + ShpHelper.createShp(filePath, null, sr, data.getMinx(), data.getMiny(), data.getMaxx(), data.getMaxy()); + } + + private void createFloodShp(String basePath, DataPo data, SpatialReference sr) { + String filePath = basePath + File.separator + config.getBarrierName(); + Map<String, Object> map = new HashMap<>(); + map.put("height", data.getFloodHeight()); + map.put("type", data.getFloodType()); + + ShpHelper.createShp(filePath, map, sr, data.getFloodMinx(), data.getFloodMiny(), data.getFloodMaxx(), data.getFloodMaxy()); + } + + public void copeDem(String token, DataPo data) throws Exception { + GeDb fileDb = getFileDb(token); + String fileId = getFileId(token, fileDb.getDbid()); + List<GeFile> files = getFileNames(token, fileDb.getDbid(), fileId); + + String filePath = config.getInPath() + File.separator + fileDb.getDbid(); + downloadFiles(token, filePath, files, fileDb.getDbid(), fileId); + clipDemFile(filePath, files, data); + } + + private GeDb getFileDb(String token) { + String uri = String.format("%sfile-service/docdb/query/canview?token=%s", config.getHost(), token); + JSONObject obj = restTemplate.getForObject(uri, JSONObject.class); + JSONArray data = obj.getJSONArray("data"); + + List<GeDb> list = JSONUtil.toList(data, GeDb.class); + if (CollectionUtils.isEmpty(list)) return null; + + return list.stream().filter(db -> null != db.getName() && db.getName().contains(config.getDbName())).findFirst().orElse(null); + } + + private String getFileId(String token, String dbid) { + String uri = String.format("%sfile-service/doc/catagory/file/query?token=%s&dbid=%s&catagory=%s&count=%d&start=%d&like=", + config.getHost(), token, dbid, "image", 9999, 1); + + JSONObject obj = restTemplate.getForObject(uri, JSONObject.class); + JSONArray items = obj.getJSONObject("data").getJSONArray("items"); + + for (int i = 0, c = items.size(); i < c; i++) { + JSONObject jb = items.getJSONObject(i); + if (config.getDemName().equals(jb.getStr("filename"))) { + return jb.getStr("fileid"); + } + } + + return null; + } + + private List<GeFile> getFileNames(String token, String dbid, String fileId) { + String uri = String.format("%sfile-service/doc/cluster/struct/list?token=%s&dbid=%s&cluster_fileid=%s&onlychild=true&folder_stairs=", + config.getHost(), token, dbid, fileId); + + JSONObject obj = restTemplate.getForObject(uri, JSONObject.class); + JSONArray data = obj.getJSONArray("data"); + + return JSONUtil.toList(data, GeFile.class); + } + + private void downloadFiles(String token, String path, List<GeFile> files, String dbid, String fileId) throws IOException { + File f = new File(path); + if (!f.exists() || !f.isDirectory()) { + f.mkdirs(); + } + + for (GeFile geFile : files) { + String filePath = path + File.separator + geFile.getName(); + f = new File(filePath); + if (f.exists() && f.length() == geFile.getSize()) { + continue; + } + if (f.exists() && f.length() < geFile.getSize()) { + f.delete(); + } + + String uri = String.format("%sfile-service/fileparser/cluster/download/%s?token=%s&dbid=%s&cluster_fileid=%s", + config.getHost(), geFile.getName(), token, dbid, fileId); + downloadFile(uri, filePath); + } + } + + private void downloadFile(String uri, String filePath) throws IOException { + URL url = new URL(uri); + URLConnection conn = url.openConnection(); + InputStream is = conn.getInputStream(); + + byte[] buffer = new byte[1024]; + FileOutputStream fs = new FileOutputStream(filePath); + + int read = 0, sum = 0; + while ((read = is.read(buffer)) != -1) { + sum += read; + fs.write(buffer, 0, read); + } + + fs.flush(); + fs.close(); + is.close(); + } + + private void clipDemFile(String filePath, List<GeFile> files, DataPo data) throws Exception { + String target = config.getInPath() + File.separator + data.getInPath() + File.separator + config.getDemFile(); + for (GeFile file : files) { + if (file.getName().toLowerCase().endsWith(config.getDemType())) { + String source = filePath + File.separator + file.getName(); + clipDem(source, target, data); + break; + } + } + } + + private void clipDem(String source, String target, DataPo data) throws Exception { + Dataset ds = null; + try { + ds = gdal.Open(source, gdalconst.GA_ReadOnly); + if (null == ds || ds.getRasterCount() < 1 || null == ds.GetSpatialRef()) throw new Exception("DEM鏁版嵁鏃犳晥"); + + // String bbox = "116.64388473935195,39.884315914604464,116.64754729082588,39.887069143903496"; + Vector<String> vector = new Vector<>(); + //vector.add("-s_srs"); + //vector.add("EPSG:" + 4326); + vector.add("-t_srs"); + vector.add("EPSG:" + data.getEpsg()); + vector.add("-r"); + vector.add("bilinear"); + vector.add("-of"); + vector.add("GTiff"); + vector.add("-te"); + vector.add(data.getMinx().toString()); + vector.add(data.getMiny().toString()); + vector.add(data.getMaxx().toString()); + vector.add(data.getMaxy().toString()); + vector.add("-te_srs"); + vector.add("EPSG:" + 4326); + WarpOptions warpOptions = new WarpOptions(vector); + + // gdalwarp -ot UInt16 -s_srs EPSG:4326 -t_srs EPSG:2382 -r bilinear -of GTiff -te 116.526854182 40.0481829856 116.532848182 40.0541769856 + // -te_srs EPSG:4326 -co COMPRESS=DEFLATE -co PREDICTOR=1 -co ZLEVEL=6 -co TILED=YES -wo OPTIMIZE_SIZE=TRUE E:\GDALhomework\000002.tif E:/CSDN/warped1.tif + + Dataset destDs = gdal.Warp(target, new Dataset[]{ds}, warpOptions); + destDs.delete(); + } finally { + if (null != ds) ds.delete(); + } + } +} diff --git a/src/main/java/com/se/nsl/service/Hdf5Service.java b/src/main/java/com/se/nsl/service/Hdf5Service.java new file mode 100644 index 0000000..73c1bbb --- /dev/null +++ b/src/main/java/com/se/nsl/service/Hdf5Service.java @@ -0,0 +1,81 @@ +package com.se.nsl.service; + +import cn.hutool.core.io.FileUtil; +import com.se.nsl.config.PropertiesConfig; +import com.se.nsl.domain.dto.LayerDto; +import com.se.nsl.domain.dto.ResultDto; +import com.se.nsl.domain.po.DataPo; + +import hdf.hdf5lib.H5; +import hdf.hdf5lib.HDF5Constants; +import lombok.extern.slf4j.Slf4j; +import org.springframework.stereotype.Service; +import javax.annotation.Resource; +import java.io.File; + +@Slf4j +@Service +@SuppressWarnings("ALL") +public class Hdf5Service { + @Resource + PropertiesConfig config; + + public void test(DataPo data) throws Exception { + String basePath = config.getInPath() + File.separator + data.getInPath() + File.separator; + ResultDto dto = new ResultDto( + data.getInPath(), + basePath + config.getTerrainFile(), + basePath + config.getBuildingFile(), + basePath + config.getWaterPath(), + basePath + config.getFlowPath(), + config.getInPath(), + config.getOutPath(), + data.getEpsg()); + LayerDto layer = new LayerDto(config.getVer(), data.getEpsg(), config.getSizes()); + process(dto, layer); + } + + // https://blog.51cto.com/u_16213355/12235346 + private void process(ResultDto dto, LayerDto layer) throws Exception { + int fileId = 0; + int datasetId = 0; + try { + if (!FileUtil.exist(dto.getH5Path())) return; + + fileId = H5.H5Fopen(dto.getH5Path(), HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT); + if (fileId == 0) return; + + // H5F_OBJ_DATASET,H5E_DATASET,H5G_DATASET=1, + //int dsCount = H5.H5Fget_obj_count(fileId, HDF5Constants.H5G_DATASET); + //for (int i = 0; i < dsCount; i++) { + // System.out.println("Dataset: " + ""); + //} + + //datasetId = H5.H5Dopen(fileId, "", HDF5Constants.H5P_DEFAULT); + + int groupId = H5.H5Gopen(fileId, "data"); + if (groupId == 0) return; + + /*datasetId = H5.H5Dopen(fileId, "data", HDF5Constants.H5P_DEFAULT); + if (datasetId == 0) return; + + // 鑾峰彇鏁版嵁闆嗙殑缁村害 + //long[] dims = H5.H5Dget_dims(datasetId); + + // 鏍规嵁鏁版嵁闆嗙殑缁村害鍒涘缓涓�涓狫ava鏁扮粍鏉ュ瓨鍌ㄦ暟鎹� + int[] buffer = new int[1]; + + // 璇诲彇鏁版嵁鍒癑ava鏁扮粍 + int herr = H5.H5Dread(datasetId, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, buffer); + + // 杈撳嚭璇诲彇鍒扮殑鏁版嵁 + System.out.println(buffer[0]);*/ + + H5.H5Dclose(groupId); + } catch (Exception ex) { + ex.printStackTrace(); + } finally { + if (fileId > 0) H5.H5Dclose(fileId); + } + } +} diff --git a/src/main/java/com/se/nsl/service/IBizH5FileParserService.java b/src/main/java/com/se/nsl/service/IBizH5FileParserService.java new file mode 100644 index 0000000..8824fb9 --- /dev/null +++ b/src/main/java/com/se/nsl/service/IBizH5FileParserService.java @@ -0,0 +1,46 @@ +package com.se.nsl.service; + +import hdf.hdf5lib.exceptions.HDF5LibraryException; + +/** + * ibiz H5 鏂囦欢瑙f瀽鍣� + * + * @author xingjinshuang@smartearth.cn + * @date 2024/12/06 + */ +public interface IBizH5FileParserService { + + /** + * 鎵撳紑HDF5鏂囦欢 + * + * @param filePath 鏂囦欢璺緞 + * @return 鏂囦欢ID + */ + long openH5File(String filePath) ; + + + /** + * 璇诲彇鎸囧畾鏁版嵁闆嗙殑鏁版嵁 + * + * @param fileId 鏂囦欢ID + * @param datasetName 鏁版嵁闆嗗悕绉� + * @return 鏁版嵁闆嗙殑鍊� + */ + Object readDataset(long fileId, String datasetName); + + + /** + * 鍏抽棴HDF5鏂囦欢 + * + * @param fileId 鏂囦欢ID + */ + void closeH5File(long fileId) throws HDF5LibraryException; + + /** + * 瑙f瀽 H5 鏂囦欢 + * + * @param filePath + * @return {@link Object} + */ + Object parseH5File(String filePath); +} diff --git a/src/main/java/com/se/nsl/service/Impl/BizH5FileParserServiceImpl.java b/src/main/java/com/se/nsl/service/Impl/BizH5FileParserServiceImpl.java new file mode 100644 index 0000000..489f524 --- /dev/null +++ b/src/main/java/com/se/nsl/service/Impl/BizH5FileParserServiceImpl.java @@ -0,0 +1,101 @@ +package com.se.nsl.service.Impl; + +import com.se.nsl.service.IBizH5FileParserService; +import hdf.hdf5lib.H5; +import hdf.hdf5lib.HDF5Constants; +import hdf.hdf5lib.exceptions.HDF5Exception; +import hdf.hdf5lib.exceptions.HDF5LibraryException; +import org.springframework.stereotype.Service; + +@Service +public class BizH5FileParserServiceImpl implements IBizH5FileParserService { + + + @Override + public long openH5File(String filePath) { + int hdf5File = -1; + try { + hdf5File = H5.H5Fopen(filePath, HDF5Constants.H5F_ACC_RDWR, HDF5Constants.H5P_DEFAULT); + // 鍦ㄨ繖閲岃繘琛屾枃浠舵搷浣� + } catch (Exception e) { + e.printStackTrace(); + } finally { + if (hdf5File >= 0) { + try { + H5.H5Fclose(hdf5File); + } catch (Exception e) { + e.printStackTrace(); + } + } + } + return hdf5File; + } + + @Override + public Object readDataset(long fileId, String datasetName) { + // long datasetId = H5.H5Dopen((int) fileId, datasetName, HDF5Constants.H5P_DEFAULT); + // long datatypeId = H5.H5Dget_type((int) datasetId); + + int[] data = new int[1]; // 鍋囪鏁版嵁闆嗕负鏁村瀷鏁扮粍 + // H5.H5Dread((int) datasetId, datatypeId, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, data); +// + // H5.H5Dclose(datasetId); + // H5.H5Tclose(datatypeId); + + return data[0]; + } + + @Override + public void closeH5File(long fileId) throws HDF5LibraryException { + H5.H5Fclose((int) fileId); + } + + @Override + public Object parseH5File(String filePath) { + String fileName = "D:\\xingjs\\A_daily\\PycharmProjects\\pythonProject\\dev\\example.h5"; + String datasetName = "dataset2"; + + + try { + + + // 鎵撳紑.h5鏂囦欢 + int fileId = H5.H5Fopen(fileName, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT); + + // 鎵撳紑鏁版嵁闆� + int datasetId = H5.H5Dopen(fileId, datasetName, HDF5Constants.H5P_DEFAULT); + + // 鑾峰彇鏁版嵁闆嗙殑鏁版嵁绫诲瀷 + int datatypeId = H5.H5Dget_type(datasetId); + int datatypeSize = H5.H5Tget_size(datatypeId); + + // 鑾峰彇鏁版嵁闆嗙殑鏁版嵁绌洪棿 + int dataspaceId = H5.H5Dget_space(datasetId); + long[] dims = new long[HDF5Constants.H5S_MAX_RANK]; + H5.H5Sget_simple_extent_dims(dataspaceId, dims, null); + + // 璇诲彇鏁版嵁闆嗙殑鏁版嵁 + byte[] data = new byte[(int) (dims[0] * datatypeSize)]; + H5.H5Dread(datasetId, datatypeId, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, data); + + // 鍏抽棴鏁版嵁闆� + H5.H5Dclose(datasetId); + + // 鍏抽棴鏂囦欢 + H5.H5Fclose(fileId); + + // 澶勭悊鏁版嵁 + // ... + + // 鎵撳嵃鏁版嵁绀轰緥 + for (int i = 0; i < data.length; i++) { + System.out.print(data[i] + " "); + } + } catch (HDF5Exception e) { + e.printStackTrace(); + } + return null; + } + + +} diff --git a/src/main/java/com/se/nsl/service/Impl/ProjectRelatedServiceImpl.java b/src/main/java/com/se/nsl/service/Impl/ProjectRelatedServiceImpl.java new file mode 100644 index 0000000..01b3f0d --- /dev/null +++ b/src/main/java/com/se/nsl/service/Impl/ProjectRelatedServiceImpl.java @@ -0,0 +1,406 @@ +package com.se.nsl.service.Impl; + +import cn.hutool.crypto.SecureUtil; +import cn.hutool.crypto.asymmetric.AsymmetricAlgorithm; +import cn.hutool.crypto.asymmetric.AsymmetricCrypto; +import cn.hutool.crypto.asymmetric.KeyType; +import cn.hutool.crypto.asymmetric.RSA; +import com.alibaba.fastjson.JSONObject; +import com.se.nsl.constant.CacheConstants; +import com.se.nsl.domain.EntityTypeInfo; +import com.se.nsl.domain.LoginParams; +import com.se.nsl.helper.CaffeineHelper; +import com.se.nsl.service.ProjectRelatedService; +import com.se.nsl.utils.CustomWebClient; +import com.se.nsl.utils.EntityLibraryUtils; +import lombok.extern.slf4j.Slf4j; +import org.apache.commons.lang3.StringUtils; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.stereotype.Service; +import reactor.core.publisher.Mono; + +import java.security.KeyPair; +import java.security.PrivateKey; +import java.security.PublicKey; +import java.util.HashMap; +import java.util.Objects; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.atomic.AtomicReference; + +@Slf4j +@Service +public class ProjectRelatedServiceImpl implements ProjectRelatedService { + + + // 鍏挜鍦板潃 + @Value(value = "${app-server.publicKeyUrl}") + private String publicKeyUrl; + // devops 鐧诲綍鍦板潃 + @Value(value = "${app-server.loginUrl}") + private String loginUrl; + @Value(value = "${app-server.getDbUrl}") + private String getDbUrl; + + // 鏌ヨ鍦板潃 + @Value(value = "${app-server.queryUrl}") + private String queryUrl; + + + /** + * 鑾峰彇鍏挜 + * + * @return {@link Object} + */ + @Override + public Object getPublicKey() { + HashMap<String, String> headers = new HashMap<>(); + headers.put("Content-Type", "application/json"); + CompletableFuture<String> postResponse = CustomWebClient.postAsFuture(publicKeyUrl, "", headers, String.class); + // 寮傛澶勭悊鍝嶅簲 + postResponse.thenAccept(response -> { + log.info("response: {}", response); + if (response.contains("code")) { + try { + JSONObject postResponseJson = JSONObject.parseObject(response); + int statusCode = postResponseJson.getIntValue("code"); + log.info("statusCode = " + statusCode); + String data = postResponseJson.getString("data"); + // 缂撳瓨data,骞惰缃�1灏忔椂鏈夋晥鏈� + CaffeineHelper.remove(CacheConstants.USER_CACHE_KEY + "rsa_data_set"); + CaffeineHelper.put(CacheConstants.USER_CACHE_KEY + "rsa_data_set", data); + log.info("publicKey = " + data); + } catch (Exception e) { + log.info("Failed to parse JSON: " + e.getMessage()); + } + } else { + log.info("No 'code' field in the response: " + response); + } + }); + return JSONObject.parseObject(postResponse.join()); + } + + /** + * 鐧诲綍瀹炰綋 + * + * @param loginParams 鐧诲綍鍙傛暟 + * @return {@link String} + */ + @Override + public Object loginEntity(LoginParams loginParams) { + // 鍒ゆ柇redis涓紦瀛樻槸鍚﹀瓨鍦紙杩囨湡锛� + boolean isExists = Objects.isNull(CaffeineHelper.get(CacheConstants.USER_CACHE_KEY + "entity_db_token")); + if (!isExists) { + return JSONObject.parseObject((String) CaffeineHelper.get(CacheConstants.USER_CACHE_KEY + "entity_db_response")); + } else { + // 娓呴櫎鎵�鏈夌紦瀛� + CaffeineHelper.remove(CacheConstants.USER_CACHE_KEY + "entity_db_response"); + CaffeineHelper.remove(CacheConstants.USER_CACHE_KEY + "EntityPublicKey"); + CaffeineHelper.remove(CacheConstants.USER_CACHE_KEY + "entity_db_token"); + CaffeineHelper.remove(CacheConstants.USER_CACHE_KEY + "rsa_data_set"); + } + // 鑾峰彇绉侀挜鍜屽叕閽ワ紝闀垮害蹇呴』鏄�16銆�24鎴�32 + String publicKey = (String) CaffeineHelper.get(CacheConstants.USER_CACHE_KEY + "rsa_data_set"); + // 鍋囪浠嶳edis涓幏鍙栧埌鐢ㄦ埛鍚�,鍒ゆ柇 + if (!StringUtils.isNotBlank(publicKey) && Objects.isNull(publicKey)) { + // 缂撳瓨涓病鏈夌敤鎴峰悕,鍒欒繘琛岀櫥褰� + getPublicKey(); + log.info("璋冪敤浜嗙櫥褰曡幏鍙栫敤鎴峰悕鏂规硶 $= "); + } + // 闃叉涓荤嚎绋嬫彁鍓嶇粨鏉� + try { + // 绛夊緟寮傛璇锋眰瀹屾垚 + Thread.sleep(500); + } catch (InterruptedException e) { + e.printStackTrace(); + } + publicKey = (String) CaffeineHelper.get(CacheConstants.USER_CACHE_KEY + "rsa_data_set"); + log.info("publicKey = " + publicKey); + // 鍏挜鍔犲瘑 + AsymmetricCrypto rsa = SecureUtil.rsa(null, publicKey); + String encrypt = rsa.encryptBase64(loginParams.getPassword(), KeyType.PublicKey); + // 璁剧疆鍔犲瘑鍚庣殑瀵嗙爜 + loginParams.setPassword(encrypt); + // 灏唋oginParams瀹炰綋杞垚JSON瀛楃涓� +// String json = JSON.toJSONString(loginParams); +// log.info("json = " + json); + // 鍙戦�佺櫥褰曡姹� + Mono<String> postResponse = CustomWebClient.postAsMono(loginUrl, loginParams, String.class); + // 鐢ㄤ簬淇濆瓨鍝嶅簲鏁版嵁 + AtomicReference<String> responseData = new AtomicReference<>(); + postResponse.subscribe(response -> { + // 灏嗗搷搴旀暟鎹繚瀛樺埌鍙橀噺涓� + responseData.set(response); + String code = JSONObject.parseObject(response).getString("code"); + // 濡傛灉code鏄�200鍒欑櫥褰曟垚鍔� + if (code.equals("200")) { + // 鐧诲綍鎴愬姛鍚�,鑾峰彇data + String data = JSONObject.parseObject(response).getString("data"); + String token = JSONObject.parseObject(data).getString("token"); + log.info("token = " + token); + // 璁剧疆data鍒癛edis涓� + CaffeineHelper.remove(CacheConstants.USER_CACHE_KEY + "entity_db_response"); + CaffeineHelper.put(CacheConstants.USER_CACHE_KEY + "entity_db_response", response); + // 璁剧疆data鍒癛edis涓� + CaffeineHelper.remove(CacheConstants.USER_CACHE_KEY + "entity_db_token"); + CaffeineHelper.put(CacheConstants.USER_CACHE_KEY + "entity_db_token", token); + } else { + // 鐧诲綍澶辫触 + log.error("鐧诲綍澶辫触 $= " + response); + } + }); + // 闃叉涓荤嚎绋嬫彁鍓嶇粨鏉� + try { + postResponse.toFuture().get(); // 闃诲绛夊緟璁㈤槄鎿嶄綔瀹屾垚 + // 绛夊緟寮傛璇锋眰瀹屾垚 + Thread.sleep(1000); + } catch (InterruptedException | ExecutionException e) { + e.printStackTrace(); + } + // 杩斿洖token + return JSONObject.parseObject(responseData.get()); + } + + /** + * 鑷畾涔夊瘑閽ョ敓鎴愬姞瀵嗘暟鎹� + * + * @param data 鏁版嵁 + * @param publicKey 鍏挜 + * @return {@link String} + */ + private static String customKeysGenerateEncryptedData(String data, String publicKey) { + AsymmetricCrypto rsa = SecureUtil.rsa(null, publicKey); + String encrypt = rsa.encryptBase64(data, KeyType.PublicKey); + System.out.println("encrypt = " + encrypt); + return encrypt; + } + + /** + * 鑷畾涔� RSAgenerate 鍔犲瘑鏁版嵁0 + */ + private static void customRSAGenerateEncryptedData0() { + RSA rsa = new RSA(); + // 鑾峰彇鍏挜鍜岀閽� + System.out.println(rsa.getPublicKey()); + System.out.println(rsa.getPrivateKeyBase64()); + System.out.println(rsa.getPrivateKey()); + System.out.println(rsa.getPrivateKeyBase64()); + // 绉侀挜鍔犲瘑,鍏挜瑙e瘑 + System.out.println(new String(rsa.encrypt("testaa", KeyType.PrivateKey))); + System.out.println(new String(rsa.decrypt(rsa.encrypt("testaa", KeyType.PrivateKey), KeyType.PublicKey))); + // 鍏挜鍔犲瘑,绉侀挜瑙e瘑 + System.out.println(new String(rsa.encrypt("testaa", KeyType.PublicKey))); + System.out.println(new String(rsa.decrypt(rsa.encrypt("testaa", KeyType.PublicKey), KeyType.PrivateKey))); + } + + /** + * 鑷畾涔� RSAgenerate 鍔犲瘑鏁版嵁1 + */ + private static void customRSAGenerateEncryptedData1() { + KeyPair keyPair = SecureUtil.generateKeyPair(AsymmetricAlgorithm.RSA.getValue()); + PrivateKey privateKey = keyPair.getPrivate(); + PublicKey publicKey = keyPair.getPublic(); + System.out.println(publicKey); + System.out.println(privateKey); + System.out.println("----------"); + + RSA rsa = new RSA(privateKey, publicKey); + // 绉侀挜鍔犲瘑,鍏挜瑙e瘑 + System.out.println(new String(rsa.encrypt("testaa", KeyType.PrivateKey))); + System.out.println(new String(rsa.decrypt(rsa.encrypt("testaa", KeyType.PrivateKey), KeyType.PublicKey))); + // 鍏挜鍔犲瘑,绉侀挜瑙e瘑 + System.out.println(new String(rsa.encrypt("testaa", KeyType.PublicKey))); + System.out.println(new String(rsa.decrypt(rsa.encrypt("testaa", KeyType.PublicKey), KeyType.PrivateKey))); + + } + + + /** + * 鑾峰彇璁块棶瀹炰綋搴撶殑token + * + * @return {@link Object} + */ + @Override + public Object getEntityPublicKey() { + // 鍒ゆ柇redis涓紦瀛樻槸鍚﹀瓨鍦紙杩囨湡锛� + boolean isExists = Objects.isNull(CaffeineHelper.get(CacheConstants.USER_CACHE_KEY + "EntityPublicKey")); + if (!isExists) { + return JSONObject.parseObject((String) CaffeineHelper.get(CacheConstants.USER_CACHE_KEY + "EntityPublicKey")); + } else { + // 娓呴櫎鎵�鏈夌紦瀛� + CaffeineHelper.remove(CacheConstants.USER_CACHE_KEY + "entity_db_response"); + CaffeineHelper.remove(CacheConstants.USER_CACHE_KEY + "EntityPublicKey"); + CaffeineHelper.remove(CacheConstants.USER_CACHE_KEY + "entity_db_token"); + CaffeineHelper.remove(CacheConstants.USER_CACHE_KEY + "rsa_data_set"); + } + HashMap<String, String> headers = new HashMap<>(); + headers.put("Content-Type", "application/json"); + CompletableFuture<String> postResponse = CustomWebClient.postAsFuture(queryUrl, "", headers, String.class); + // 寮傛澶勭悊鍝嶅簲 + postResponse.thenAccept(response -> { + log.info("response: {}", response); + if (response.contains("code")) { + try { + JSONObject postResponseJson = JSONObject.parseObject(response); + int statusCode = postResponseJson.getIntValue("code"); + log.info("statusCode = " + statusCode); + String data = postResponseJson.getString("data"); + // 缂撳瓨data,骞惰缃�1灏忔椂鏈夋晥鏈� + CaffeineHelper.remove(CacheConstants.USER_CACHE_KEY + "EntityPublicKey"); + CaffeineHelper.put(CacheConstants.USER_CACHE_KEY + "EntityPublicKey", response); + log.info("EntityPublicKey = " + data); + } catch (Exception e) { + log.info("Failed to parse JSON: " + e.getMessage()); + } + } else { + log.info("No 'code' field in the response: " + response); + } + }); + return JSONObject.parseObject(postResponse.join()); + } + + + /** + * 鑾峰彇璁块棶瀹炰綋搴撶殑token + * + * @return {@link Object} + */ + @Override + public Object getDbLits() { + HashMap<String, String> headers = new HashMap<>(); + headers.put("Content-Type", "application/json"); + String token = (String) CaffeineHelper.get(CacheConstants.USER_CACHE_KEY + "entity_db_token"); + log.info("token = " + token); + // 娣诲姞form鍙傛暟 + HashMap<String, Object> params = new HashMap<>(); + params.put("token", token); + + CompletableFuture<String> postResponse = CustomWebClient.postAsFuture(getDbUrl, params, headers, String.class); + // 寮傛澶勭悊鍝嶅簲 + postResponse.thenAccept(response -> { + log.info("response: {}", response); + if (response.contains("code")) { + try { + JSONObject postResponseJson = JSONObject.parseObject(response); + int statusCode = postResponseJson.getIntValue("code"); + log.info("statusCode = " + statusCode); + String data = postResponseJson.getString("data"); + // 缂撳瓨data,骞惰缃�1灏忔椂鏈夋晥鏈� + CaffeineHelper.remove(CacheConstants.USER_CACHE_KEY + "EntityDbNameList"); + CaffeineHelper.put(CacheConstants.USER_CACHE_KEY + "EntityDbNameList", response); + log.info("EntityDbNameList = " + data); + } catch (Exception e) { + log.info("Failed to parse JSON: " + e.getMessage()); + } + } else { + log.info("No 'code' field in the response: " + response); + } + }); + return JSONObject.parseObject(postResponse.join()); + } + + @Override + public Object getEntityTypeInfo(EntityTypeInfo entityTypeInfo) { + HashMap<String, String> headers = new HashMap<>(); + headers.put("Content-Type", "application/json"); + String token; + // 鍒ゆ柇redis涓紦瀛樻槸鍚﹀瓨鍦紙杩囨湡锛� + boolean isExists = Objects.isNull(CaffeineHelper.get(CacheConstants.USER_CACHE_KEY + "entity_db_token")); + if (!isExists) { + token = (String) CaffeineHelper.get(CacheConstants.USER_CACHE_KEY + "entity_db_token"); + } else { + try { + token = EntityLibraryUtils.login(); + CaffeineHelper.remove(CacheConstants.USER_CACHE_KEY + "entity_db_token"); + CaffeineHelper.put(CacheConstants.USER_CACHE_KEY + "entity_db_token", token); + }catch (Exception e){ + return "鐧诲綍澶辫触锛�"; + } + } + log.info("token = " + token); + // 娣诲姞form鍙傛暟 + HashMap<String, Object> params = new HashMap<>(); + params.put("token", token); + params.put("start", Objects.nonNull(entityTypeInfo.getStart()) ? entityTypeInfo.getStart() : 1); + params.put("containCount", true); + params.put("count", Objects.nonNull(entityTypeInfo.getCount()) ? entityTypeInfo.getCount() : 20); + params.put("dbid", Objects.nonNull(entityTypeInfo.getDbid()) ? entityTypeInfo.getDbid() : "85257774fdb64e5f99f6778696cad02a"); + params.put("layerid", Objects.nonNull(entityTypeInfo.getLayerid()) ? entityTypeInfo.getLayerid() : "8208c5be-adc1-4e7b-b952-37362e0bef32"); + params.put("like", ""); + params.put("querytype", "entity"); + + CompletableFuture<String> postResponse = CustomWebClient.postAsFuture(queryUrl, params, headers, String.class); + // 寮傛澶勭悊鍝嶅簲 + postResponse.thenAccept(response -> { + log.info("response: {}", response); + if (response.contains("code")) { + try { + JSONObject postResponseJson = JSONObject.parseObject(response); + int statusCode = postResponseJson.getIntValue("code"); + log.info("statusCode = " + statusCode); + String data = postResponseJson.getString("data"); + log.info("getEntityTypeInfo = " + data); + } catch (Exception e) { + log.info("Failed to parse JSON: " + e.getMessage()); + } + } else { + log.info("No 'code' field in the response: " + response); + } + }); + return JSONObject.parseObject(postResponse.join()); + } + + + @Override + public Object getEntityPipeInfo(EntityTypeInfo entityTypeInfo) { + HashMap<String, String> headers = new HashMap<>(); + headers.put("Content-Type", "application/json"); + String token; + // 鍒ゆ柇redis涓紦瀛樻槸鍚﹀瓨鍦紙杩囨湡锛� + boolean isExists = Objects.isNull(CaffeineHelper.get(CacheConstants.USER_CACHE_KEY + "entity_db_token")); + if (!isExists) { + token = (String) CaffeineHelper.get(CacheConstants.USER_CACHE_KEY + "entity_db_token"); + } else { + // 鑾峰彇token + LoginParams loginParams = new LoginParams(); + loginParams.setUserid("admin"); + loginParams.setPassword("admin"); + loginEntity(loginParams); + token = (String) CaffeineHelper.get(CacheConstants.USER_CACHE_KEY + "entity_db_token"); + } + log.info("token = " + token); + // 娣诲姞form鍙傛暟 + HashMap<String, Object> params = new HashMap<>(); + // e7e2af87096c45e0a14c0a4855cb0b90 + params.put("token", token); + params.put("start", Objects.nonNull(entityTypeInfo.getStart()) ? entityTypeInfo.getStart() : 1); + params.put("containCount", true); + params.put("count", Objects.nonNull(entityTypeInfo.getCount()) ? entityTypeInfo.getCount() : 20); + params.put("dbid", Objects.nonNull(entityTypeInfo.getDbid()) ? entityTypeInfo.getDbid() : "85257774fdb64e5f99f6778696cad02a"); + params.put("layerid", Objects.nonNull(entityTypeInfo.getLayerid()) ? entityTypeInfo.getLayerid() : "1e677d48-8dff-4975-b9a0-c16500193629"); + params.put("like", ""); + params.put("querytype", "entity"); + + CompletableFuture<String> postResponse = CustomWebClient.postAsFuture(queryUrl, params, headers, String.class); + // 寮傛澶勭悊鍝嶅簲 + postResponse.thenAccept(response -> { + log.info("response: {}", response); + if (response.contains("code")) { + try { + JSONObject postResponseJson = JSONObject.parseObject(response); + int statusCode = postResponseJson.getIntValue("code"); + log.info("statusCode = " + statusCode); + String data = postResponseJson.getString("data"); + log.info("getEntityTypeInfo = " + data); + } catch (Exception e) { + log.info("Failed to parse JSON: " + e.getMessage()); + } + } else { + log.info("No 'code' field in the response: " + response); + } + }); + return JSONObject.parseObject(postResponse.join()); + } + + +} diff --git a/src/main/java/com/se/nsl/service/Impl/SemFilesSimuServiceImpl.java b/src/main/java/com/se/nsl/service/Impl/SemFilesSimuServiceImpl.java new file mode 100644 index 0000000..be2e1ad --- /dev/null +++ b/src/main/java/com/se/nsl/service/Impl/SemFilesSimuServiceImpl.java @@ -0,0 +1,338 @@ +package com.se.nsl.service.Impl; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONArray; +import com.alibaba.fastjson.JSONObject; +import com.se.nsl.service.SemFilesSimuService; +import com.se.nsl.utils.ZipUtils; +import lombok.extern.slf4j.Slf4j; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.stereotype.Service; + +import java.io.File; +import java.io.FileNotFoundException; +import java.io.FileOutputStream; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.StandardCopyOption; +import java.sql.*; +import java.time.LocalDateTime; +import java.time.format.DateTimeFormatter; +import java.util.HashMap; + +@Slf4j +@Service +public class SemFilesSimuServiceImpl implements SemFilesSimuService { + + @Value("${config.outPath}") + private String outPath; + + /** + * 鑾峰彇 INTRODUCE + * + * @return {@link Object} + */ + @Override + public Object getIntroduce() { + HashMap<String, Object> introduceMap = new HashMap<>(); + String introduce = "SEM瀹氫箟鍩庡競绌洪棿瀵硅薄锛堝寤虹瓚鐗┿�佹瀯绛戠墿绛夛級绫诲瀷鍜岀浉浜掑叧绯荤殑瀛樺偍缁撴瀯锛屾敮鎸佷负绌洪棿瀹炰綋瀵硅薄鎸傛帴闈炵粨鏋勫寲鐨勬暟鎹紝濡備汉宸ユā鍨嬨�佺偣浜戞ā鍨嬨�佽棰戞枃浠剁瓑銆係EM鍙瓨鍌ㄥ煄甯傜┖闂村璞$殑鍏冩暟鎹紙Metadata锛夈�佸疄浣撳璞★紙Entity锛夈�侀摼鎺ュ璞★紙LinkObject锛夈�佸睘鎬э紙Attribute锛夈�佹潗璐紙Material锛夈�佺汗鐞嗭紙Texture锛夈�佺汗鐞嗛《鐐癸紙TextureVertice锛夈�佸浘鐗囨暟鎹紙Image锛夈�佸嚑浣曟ā鏉匡紙Template锛変互鍙婃墿灞曞唴瀹癸紙ExtensionSchema锛夌瓑"; + introduceMap.put("introduce", introduce); + introduceMap.put("10寮犺〃", "SEM鍖呭惈鍏冩暟鎹〃銆佸疄浣撳璞¤〃銆侀摼鎺ュ璞¤〃銆佸睘鎬ц〃銆佹潗璐ㄨ〃銆佺汗鐞嗚〃銆佺汗鐞嗛《鐐硅〃銆佸浘鐗囨暟鎹〃銆佸嚑浣曟ā鏉胯〃鍜屾墿灞曡〃"); + introduceMap.put("Metadata锛堝厓鏁版嵁琛級", "鐢ㄤ簬瀛樺偍 SEM 鍩虹淇℃伅"); + introduceMap.put("Entity锛堝疄浣撳璞¤〃锛�", "鐢ㄤ簬瀛樺偍绌洪棿瀹炰綋瀵硅薄鐨勫嚑浣曟暟鎹�"); + introduceMap.put("LinkObject锛堥摼鎺ュ璞¤〃锛�", "鐢ㄤ簬瀛樺偍绌洪棿瀹炰綋瀵硅薄鐨勬寕鎺ュ璞′俊鎭�"); + introduceMap.put("Attribute锛堝睘鎬ц〃锛�", "鐢ㄤ簬瀛樺偍绌洪棿瀹炰綋瀵硅薄鐨勫睘鎬�"); + introduceMap.put("Material锛堟潗璐ㄨ〃锛�", "鐢ㄤ簬瀛樺偍绌洪棿瀹炰綋瀵硅薄鐨勬潗璐ㄤ俊鎭�"); + introduceMap.put("Texture锛堢汗鐞嗚〃锛�", "鐢ㄤ簬瀛樺偍绌洪棿瀹炰綋瀵硅薄鐨勭汗鐞嗕俊鎭�"); + introduceMap.put("TextureVertice锛堢汗鐞嗛《鐐硅〃锛�", "璁板綍绌洪棿瀹炰綋瀵硅薄鐨勭汗鐞嗛《鐐瑰潗鏍囧��"); + introduceMap.put("Image锛堝浘鐗囨暟鎹〃锛�", "瀛樺偍绌洪棿瀹炰綋瀵硅薄鐨勭汗鐞嗘垨鎸傛帴鐨勪汉宸ユā鍨嬫墍搴旂敤鐨勫浘鐗囨暟鎹�"); + introduceMap.put("Template锛堝嚑浣曟ā鏉胯〃锛�", "瀛樺偍绌洪棿瀹炰綋瀵硅薄鐨勫嚑浣曟ā鏉�"); + introduceMap.put("ExtensionSchema锛堟墿灞曡〃锛�", "鎻忚堪棰嗗煙鏈綋鐨勬墿灞曞睘鎬у拰鎵╁睍淇℃伅"); + // 鏂板琛� + String dynzamizers = "1銆佸姩鎬佹暟鎹瓨鍌ㄥ湪DYNZAMIZERS琛ㄤ腑锛屽叾涓細" + "url锛氭暟鎹畊rl" + "data锛歾arr鏁版嵁锛屼娇鐢ㄧ殑鏄痾arr鐨勫帇缂╁瓨鍌ㄦ牸寮忋�傝瑙亃arr鐨剒ipstore銆�" + "gmlId锛氫笌瀹炰綋瀵硅薄鐩稿叧鑱斿瓧娈碉紙浣跨敤ENTITY琛紙瀹炰綋琛級涓殑UUID鐩稿叧鑱旓級" + + "" + "zarr鏁版嵁缁撴瀯绀轰緥锛�" + "Grid鐩稿叧鐨剒arr锛�" + "/" + "|鈥斺�攄epth (n,height,width) " + "|鈥斺�攖ime(n)" + "" + "time瀛樺偍鏃堕棿搴忓垪" + "depth瀛樺偍姘存繁鐩稿叧淇℃伅锛屼笁缁存暟缁勶紝绗竴缁翠负鏃堕棿 涓巘ime鐩稿搴�" + + "鏁扮粍闀垮害n浠h〃鏃堕棿鍒囩墖鐨勪釜鏁�" + "height锛寃idth浠h〃鏍呮牸鐨勯暱鍜屽" + "闄嶉洦閲忕浉鍏硓arr锛�" + "/" + "|鈥斺�攔ainfall(n)" + "|鈥斺�攖ime(n)" + "" + "time瀛樺偍鏃堕棿搴忓垪" + "rainfall 瀛樺偍闄嶉洦閲忕浉鍏充俊鎭紝涓�缁存暟缁勶紝涓巘ime鐩稿搴�" + + "鏁扮粍闀垮害n浠h〃鏃堕棿鍒囩墖鐨勪釜鏁�" + "" + "" + "2銆乼errain鐨勫瓨鍌ㄦ柟寮忥細" + "绫诲瀷涓衡��+Terrain鈥�" + + "Entity涓嚑浣曞瓨鍌ㄥ湴褰㈢殑澶栧寘妗嗭紝浣跨敤绾圭悊璐村浘瀛樺偍鍦板舰tif杞嚭鐨刾ng鍥剧墖銆�"; + introduceMap.put("DYNZAMIZERS锛堟柊澧烇細鍔ㄦ�佹暟鎹瓨鍌級", dynzamizers); + //return dynzamizers.getBytes(StandardCharsets.UTF_8); + return introduceMap; + } + + @Override + public Object createSimuBySemFile() { + return null; + } + + @Override + public Object readSemFile(String filePath) { + // 鏂囦欢鍦板潃 + log.info("filePath:{}", filePath); + // 澶勭悊鏂囦欢 + JSONObject result = new JSONObject(); + try { + // 娴嬭瘯杩炴帴SQLite鏁版嵁搴� + Connection connection = connectToSQLiteWithCopy(filePath); + System.out.println("SQLite鏁版嵁搴撹繛鎺ユ垚鍔燂紒"); + Statement stmt = connection.createStatement(); + // 鏌ヨENTITY琛ㄧ殑鏁版嵁骞惰繑鍥濲SON鏍煎紡鐨勭粨鏋� +// result = queryEntityTable(stmt); + result = queryDynamizersTable(stmt); + // 鍏抽棴杩炴帴 + connection.close(); + return result; + } catch (SQLException | IOException e) { + System.err.println("鎿嶄綔澶辫触: " + e.getMessage()); + return e.getMessage(); + } + } + + + /** + * 鏍规嵁浼犲叆鐨勬枃浠惰矾寰勶紝娣诲姞鍚庣紑 `.db` 骞惰繛鎺� SQLite 鏁版嵁搴撱�� + * + * @param filePath 鏂囦欢璺緞 + * @return SQLite鏁版嵁搴撶殑杩炴帴 + * @throws SQLException 濡傛灉SQLite杩炴帴澶辫触 + */ + public static Connection connectToSQLite(String filePath) throws SQLException { + // 妫�鏌ユ枃浠惰矾寰勬槸鍚︿负绌� + if (filePath == null || filePath.trim().isEmpty()) { + throw new IllegalArgumentException("鏂囦欢璺緞涓嶈兘涓虹┖"); + } + + // 妫�鏌ユ枃浠舵槸鍚﹀凡缁忔湁.db鍚庣紑锛屾病鏈夊垯娣诲姞 + if (!filePath.endsWith(".db")) { + filePath = filePath + ".db"; + } + log.info("Connecting to SQLite database: " + filePath); + + // 鍒涘缓鏂囦欢瀵硅薄 + File dbFile = new File(filePath); + + // 濡傛灉鏂囦欢涓嶅瓨鍦紝鍒欏垱寤轰竴涓柊鏂囦欢 + if (!dbFile.exists()) { + log.info("鏂囦欢涓嶅瓨鍦紝璇锋鏌ユ枃浠朵綅缃槸鍚︽纭�..."); + return null; + // try { + // // 閫氳繃璋冪敤 createNewFile() 鏂规硶鍒涘缓鏂版枃浠� + // boolean created = dbFile.createNewFile(); + // if (!created) { + // throw new SQLException("鏃犳硶鍒涘缓鏁版嵁搴撴枃浠�"); + // } + // } catch (Exception e) { + // throw new SQLException("鍒涘缓鏁版嵁搴撴枃浠舵椂鍑洪敊: " + e.getMessage(), e); + // } + } + + // 浣跨敤 SQLite JDBC 杩炴帴瀛楃涓茶繛鎺ユ暟鎹簱 + String url = "jdbc:sqlite:" + dbFile.getAbsolutePath(); + log.info("杩炴帴鍒癝QLite鏁版嵁搴�: {}", url); + + // 鍒涘缓骞惰繑鍥炴暟鎹簱杩炴帴 + return DriverManager.getConnection(url); + } + + /** + * 鏍规嵁浼犲叆鐨勬枃浠惰矾寰勶紝澶嶅埗鏂囦欢骞剁粰澶嶅埗鐨勬枃浠舵坊鍔犲悗缂� `.db`锛岀劧鍚庤繛鎺� SQLite 鏁版嵁搴撱�� + * + * @param filePath 鍘熷鏂囦欢璺緞 + * @return SQLite鏁版嵁搴撶殑杩炴帴 + * @throws SQLException 濡傛灉SQLite杩炴帴澶辫触 + * @throws IOException 濡傛灉鏂囦欢澶嶅埗澶辫触 + */ + public static Connection connectToSQLiteWithCopy(String filePath) throws SQLException, IOException { + // 妫�鏌ユ枃浠惰矾寰勬槸鍚︿负绌� + if (filePath == null || filePath.trim().isEmpty()) { + throw new IllegalArgumentException("鏂囦欢璺緞涓嶈兘涓虹┖"); + } + // 鍒涘缓鍘熷鏂囦欢瀵硅薄 + File originalFile = new File(filePath); + // 妫�鏌ュ師鏂囦欢鏄惁瀛樺湪 + if (!originalFile.exists()) { + throw new FileNotFoundException("鍘熷鏂囦欢涓嶅瓨鍦細" + filePath); + } + // 鑾峰彇褰撳墠鏃堕棿鎴充綔涓烘枃浠跺悕鐨勪竴閮ㄥ垎 + String timestamp = String.valueOf(System.currentTimeMillis()); + // 鍒涘缓涓�涓柊鐨勬枃浠跺悕锛屾坊鍔犱竴涓殢鏈哄悗缂�浠ラ伩鍏嶆枃浠跺悕鍐茬獊 + String newFilePath = filePath + "." + timestamp + ".db"; + // 澶嶅埗鏂囦欢鍒版柊鐨勮矾寰� + copyFile(originalFile, new File(newFilePath)); + // 浣跨敤 SQLite JDBC 杩炴帴瀛楃涓茶繛鎺ユ柊鐨勬暟鎹簱鏂囦欢 + String url = "jdbc:sqlite:" + newFilePath; + log.info("杩炴帴鍒癝QLite鏁版嵁搴�: {}", url); + // 杩斿洖SQLite鏁版嵁搴撹繛鎺� + return DriverManager.getConnection(url); + } + + + /** + * 澶嶅埗鏂囦欢鍒版柊鐨勪綅缃� + * + * @param sourceFile 鍘熷鏂囦欢 + * @param destFile 鐩爣鏂囦欢 + * @throws IOException 濡傛灉澶嶅埗杩囩▼涓彂鐢熼敊璇� + */ + private static void copyFile(File sourceFile, File destFile) throws IOException { + // 浣跨敤NIO鐨凢iles.copy鏂规硶杩涜楂樻晥鐨勬枃浠跺鍒� + Path sourcePath = sourceFile.toPath(); + Path destinationPath = destFile.toPath(); + + // 澶嶅埗鏂囦欢骞惰鐩栫洰鏍囨枃浠� + Files.copy(sourcePath, destinationPath, StandardCopyOption.REPLACE_EXISTING); + log.info("鏂囦欢宸插鍒跺埌锛�" + destinationPath.toString()); + } + + + /** + * 鏌ヨENTITY琛ㄧ殑鏁版嵁骞惰繑鍥濲SON鏍煎紡鐨勬暟鎹� + * + * @param stmt SQLite鏁版嵁搴撹繛鎺� + * @return JSON鏍煎紡鐨勬煡璇㈢粨鏋� + * @throws SQLException 濡傛灉鏌ヨ杩囩▼涓彂鐢熼敊璇� + */ + public static JSONObject queryEntityTable(Statement stmt) throws SQLException { + // 鏋勫缓SQL鏌ヨ璇彞 + String querySql = "SELECT ID,DATA,SCHEMA,UUID FROM ENTITY"; + // 鎵ц鏌ヨ骞惰繑鍥炵粨鏋� + try (ResultSet rs = stmt.executeQuery(querySql)) { + // 鍒涘缓涓�涓� JSON 鏁扮粍鐢ㄤ簬瀛樺偍澶氭潯璁板綍 + JSONArray jsonArray = new JSONArray(); + // 灏嗘煡璇㈢粨鏋滆浆鍖栦负JSON鏁扮粍 + JSONObject resJsonObject = new JSONObject(); + // 閬嶅巻鏌ヨ缁撴灉 + while (rs.next()) { + JSONObject jsonObject = new JSONObject(); + jsonObject.put("ID", rs.getInt("ID")); + String data = rs.getString("DATA"); + // 灏咲ATA瀛楁杩涜鍙嶅簭鍒楀寲 + jsonObject.put("DATA", JSON.parse(data)); + jsonObject.put("SCHEMA", rs.getInt("SCHEMA")); + jsonObject.put("UUID", rs.getString("UUID")); + // 灏嗘瘡涓�琛岀殑缁撴灉娣诲姞鍒癑SON鏁扮粍 + jsonArray.add(jsonObject); + } + resJsonObject.put("entity", jsonArray); + return resJsonObject; + } + } + + /** + * 鏌ヨDYNAMIZERS琛ㄧ殑鏁版嵁骞惰繑鍥濲SON鏍煎紡鐨勬暟鎹� + * 瀵逛簬BLOB瀛楁锛岃浆鎹负Base64瀛楃涓� + * + * @param stmt SQL璇彞鎵ц瀵硅薄 + * @return JSON鏍煎紡鐨勬煡璇㈢粨鏋� + * @throws SQLException 濡傛灉鏌ヨ杩囩▼涓彂鐢熼敊璇� + */ + public JSONObject queryDynamizersTable(Statement stmt) throws SQLException { + // 鏋勫缓SQL鏌ヨ璇彞 + String querySql = "SELECT URL, GMLID, DATA FROM DYNAMIZERS"; + // 鍒涘缓涓�涓� JSON 瀵硅薄鐢ㄤ簬瀛樺偍缁撴灉 + JSONObject resultJson = new JSONObject(); + // 鍒涘缓涓�涓� JSON 鏁扮粍鐢ㄤ簬瀛樺偍澶氭潯璁板綍 + JSONArray jsonArray = new JSONArray(); + // 鎵ц鏌ヨ骞惰繑鍥炵粨鏋� + try (ResultSet rs = stmt.executeQuery(querySql)) { + // 閬嶅巻鏌ヨ缁撴灉 + while (rs.next()) { + JSONObject jsonObject = new JSONObject(); + jsonObject.put("URL", rs.getString("URL")); + jsonObject.put("GMLID", rs.getString("GMLID")); + // 鑾峰彇 BLOB 鏁版嵁骞跺皢鍏惰浆鎹负 Base64 缂栫爜瀛楃涓� + byte[] blobData = rs.getBytes("DATA"); + DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyyMMddHHmmss"); + String dirpath = outPath + "\\" + formatter.format(LocalDateTime.now()); + String filepath = outPath + "\\" + formatter.format(LocalDateTime.now()) + "\\" + rs.getString("URL"); + try { + File file = new File(dirpath); + if (!file.exists()) { + file.mkdirs(); + } + File resultFile = new File(filepath); + resultFile.createNewFile(); + try (FileOutputStream fos = new FileOutputStream(filepath)) { + fos.write(blobData); + ZipUtils.unzip(filepath,dirpath); + System.out.println("Bytes written to file successfully."); + resultFile.delete(); + } catch (IOException e) { + e.printStackTrace(); + } + } catch (Exception e) { + e.printStackTrace(); + } + jsonObject.put("DATA", filepath); + jsonArray.add(jsonObject); + } + // 灏嗘煡璇㈢粨鏋滄斁鍏ユ渶缁堢殑 JSON 瀵硅薄涓� + resultJson.put("dynamizers", jsonArray); + } + return resultJson; + } + + /** + * 浣跨敤鍒嗛〉 Query Entity Table + * // 鍋囪姣忛〉鏌ヨ1000鏉℃暟鎹紝鏌ヨ绗�2椤电殑鏁版嵁 + * // JSONArray result = queryEntityTableWithPagination(stmt, 1000, 2); + * + * @param stmt STMT + * @param pageSize + * @param pageNumber 椤电爜 + * @return {@link JSONArray} + * @throws SQLException sql寮傚父 + */ + public static JSONArray queryEntityTableWithPagination(Statement stmt, int pageSize, int pageNumber) throws SQLException { + // 璁$畻鏌ヨ鐨勫亸绉婚噺 + int offset = (pageNumber - 1) * pageSize; + + // 鏋勫缓SQL鏌ヨ璇彞锛屼娇鐢↙IMIT鍜孫FFSET杩涜鍒嗛〉 + String querySql = "SELECT * FROM ENTITY LIMIT " + pageSize + " OFFSET " + offset; + + try (ResultSet rs = stmt.executeQuery(querySql)) { + JSONArray jsonArray = new JSONArray(); + + // 閬嶅巻鏌ヨ缁撴灉 + while (rs.next()) { + JSONObject jsonObject = new JSONObject(); + jsonObject.put("ID", rs.getInt("ID")); + jsonObject.put("DATA", rs.getString("DATA")); + jsonObject.put("SCHEMA", rs.getString("SCHEMA")); + jsonObject.put("UUID", rs.getString("UUID")); + + jsonArray.add(jsonObject); + } + + return jsonArray; + } + } + + +// public static void main(String[] args) { +// try { +// // 娴嬭瘯杩炴帴SQLite鏁版嵁搴� +// Connection connection = connectToSQLiteWithCopy("D:\\0a_project\\simulation\\other\\1211SEM鏍蜂緥\\绠$偣.sem"); +// System.out.println("SQLite鏁版嵁搴撹繛鎺ユ垚鍔燂紒"); +// // 鍏抽棴杩炴帴 +// connection.close(); +// } catch (SQLException | IOException e) { +// System.err.println("鎿嶄綔澶辫触: " + e.getMessage()); +// } +// } + + /** + * 鍒涘缓鏁版嵁搴撹繛鎺� + */ + private static Connection getConnection(String sqliteDbPath) throws SQLException { + // 鍒涘缓SQLite鏁版嵁搴撹繛鎺� + Connection conn = DriverManager.getConnection("jdbc:sqlite:" + sqliteDbPath); + Statement stmt = conn.createStatement(); + + // 鍏抽棴鏁版嵁搴撹繛鎺� + //conn.close(); + return conn; + } + + +} diff --git a/src/main/java/com/se/nsl/service/Impl/SwwFilesDealServiceImpl.java b/src/main/java/com/se/nsl/service/Impl/SwwFilesDealServiceImpl.java new file mode 100644 index 0000000..b236244 --- /dev/null +++ b/src/main/java/com/se/nsl/service/Impl/SwwFilesDealServiceImpl.java @@ -0,0 +1,134 @@ +package com.se.nsl.service.Impl; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONObject; +import com.se.nsl.config.PropertiesConfig; +import com.se.nsl.domain.po.DataPo; +import com.se.nsl.service.ResultService; +import com.se.nsl.service.SwwFilesDealService; +import com.se.nsl.service.UwService; +import lombok.SneakyThrows; +import lombok.extern.slf4j.Slf4j; +import org.springframework.stereotype.Service; + +import javax.annotation.Resource; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; + +@Slf4j +@Service +public class SwwFilesDealServiceImpl implements SwwFilesDealService { + + @Resource + private PropertiesConfig config; + + @Resource + private UwService uwService; + + @Resource + private ResultService resultService; + + @Override + public Object readSwwFile(String filePath) { + DataPo dataPo = new DataPo(); + dataPo.setPid(0); + dataPo.setName("澶勭悊sww鏂囦欢"); + dataPo.setInPath("H:\\simu\\semout\\testsem\\.out\\testsem.sww"); + dataPo.setOutPath("H:\\simu\\semout\\testsem\\.out"); + + String json_data = "{\"pid\":0,\"name\":\"20241213135203\",\"inPath\":\"20241213135203\",\"outPath\":\"20241213135203\",\"startTime\":1727661600000,\"minx\":116.64388473935195,\"miny\":39.884315914604464,\"maxx\":116.64754729082588,\"maxy\":39.887069143903496,\"total\":50,\"duration\":60,\"floodStart\":60,\"floodEnd\":180,\"floodHeight\":1,\"floodType\":\"娌欒\",\"floodMinx\":116.64388473935195,\"floodMiny\":39.884315914604464,\"floodMaxx\":116.64754729082588,\"floodMaxy\":39.887069143903496,\"epsg\":4548}"; + JSONObject jsonObject = JSON.parseObject(json_data); + System.out.println("jsonObject = " + jsonObject); + + +// +// @ApiModelProperty("寮�濮嬫椂闂�") +// @JSONField(format = "yyyy-MM-dd HH:mm:ss") +// @JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8") +// private Date startTime; +// +// @ApiModelProperty("鏈�灏廥") +// private Double minx; +// +// @ApiModelProperty("鏈�灏廦") +// private Double miny; +// +// @ApiModelProperty("鏈�澶") +// private Double maxx; +// +// @ApiModelProperty("鏈�澶") +// private Double maxy; +// +// @ApiModelProperty("闄嶉洦閲�(mm)") +// private Double total; +// +// @ApiModelProperty("鏃堕暱(min)") +// private Integer duration; +// +// @ApiModelProperty("鏄惁涓洪槻姹�(0-鍚︼紝1-鏄�)") +// private Integer isFlood; +// +// @ApiModelProperty("闃叉睕寮�濮嬫椂闂�(绉�)") +// private Integer floodStart; +// +// @ApiModelProperty("闃叉睕缁撴潫鏃堕棿(绉�)") +// private Integer floodEnd; +// +// @ApiModelProperty("闃叉睕楂樺害(mm)") +// private Double floodHeight; +// +// @ApiModelProperty("闃叉睕绫诲瀷(娌欒锛岄槻姘存澘)") +// private String floodType; +// +// @ApiModelProperty("闃叉睕鏈�灏廥") +// private Double floodMinx; +// +// @ApiModelProperty("闃叉睕鏈�灏廦") +// private Double floodMiny; +// +// @ApiModelProperty("闃叉睕鏈�澶") +// private Double floodMaxx; +// +// @ApiModelProperty("闃叉睕鏈�澶") +// private Double floodMaxy; +// +// @ApiModelProperty("鍧愭爣绯籌D") +// private Integer epsg; + + + + + + + // 璇诲彇sww鏂囦欢 + asyncCall(dataPo); + return null; + } + + private void asyncCall(DataPo dataPo) { + ExecutorService executor = Executors.newSingleThreadExecutor(); + executor.execute(new Runnable() { + @Override + @SneakyThrows + public void run() { + copeDeal(dataPo); + } + }); + executor.shutdown(); + } + + private void copeDeal(DataPo data) { + try { + uwService.callExe(data); +// uwService.copeWaterFiles(); +// uwService.copeDrainFiles(data); + uwService.copeSwwDrainFiles(data); +// resultService.process(data); + } catch (Exception ex) { + log.error(ex.getMessage(), ex); + } + } + + + +} diff --git a/src/main/java/com/se/nsl/service/ProjectRelatedService.java b/src/main/java/com/se/nsl/service/ProjectRelatedService.java new file mode 100644 index 0000000..1589b60 --- /dev/null +++ b/src/main/java/com/se/nsl/service/ProjectRelatedService.java @@ -0,0 +1,35 @@ +package com.se.nsl.service; + +import com.se.nsl.domain.EntityTypeInfo; +import com.se.nsl.domain.LoginParams; + +public interface ProjectRelatedService { + + /** + * 鑾峰彇鍏挜 + * + * @return {@link String} + */ + Object getPublicKey(); + + /** + * 鐧诲綍瀹炰綋 + * + * @param loginParams 鐧诲綍鍙傛暟 + * @return {@link String} + */ + Object loginEntity(LoginParams loginParams); + + /** + * 鑾峰彇璁块棶瀹炰綋搴撶殑token + * + * @return {@link Object} + */ + Object getEntityPublicKey(); + + Object getDbLits(); + + Object getEntityTypeInfo(EntityTypeInfo entityTypeInfo); + + Object getEntityPipeInfo(EntityTypeInfo entityTypeInfo); +} diff --git a/src/main/java/com/se/nsl/service/ResultService.java b/src/main/java/com/se/nsl/service/ResultService.java new file mode 100644 index 0000000..63b8cad --- /dev/null +++ b/src/main/java/com/se/nsl/service/ResultService.java @@ -0,0 +1,615 @@ +package com.se.nsl.service; + +import cn.hutool.core.io.FileUtil; +import com.alibaba.fastjson.JSON; +import com.se.nsl.config.PropertiesConfig; +import com.se.nsl.domain.dto.*; +import com.se.nsl.domain.po.DataPo; +import com.se.nsl.domain.po.PondingPo; +import com.se.nsl.domain.vo.BuildingDepthVo; +import com.se.nsl.helper.ComHelper; +import com.se.nsl.helper.GdalHelper; +import com.se.nsl.helper.ShpHelper; +import lombok.extern.slf4j.Slf4j; +import org.gdal.gdal.Band; +import org.gdal.gdal.Dataset; +import org.gdal.gdal.gdal; +import org.gdal.gdalconst.gdalconst; +import org.gdal.ogr.*; +import org.springframework.stereotype.Service; +import org.springframework.util.CollectionUtils; + +import javax.annotation.Resource; +import java.awt.*; +import java.awt.image.BufferedImage; +import java.io.*; +import java.math.BigDecimal; +import java.math.RoundingMode; +import java.text.ParseException; +import java.text.SimpleDateFormat; +import java.util.*; +import java.util.List; +import java.util.concurrent.CopyOnWriteArrayList; + +@Slf4j +@Service +@SuppressWarnings("ALL") +public class ResultService { + @Resource + PropertiesConfig config; + + public final static double MAX_X_OFFSET = 0; + + private final static SimpleDateFormat SDF = new SimpleDateFormat("yyyy-MM-dd HH:mm"); + + public void process(DataPo data) throws Exception { + String basePath = config.getInPath() + File.separator + data.getInPath() + File.separator; + ResultDto dto = new ResultDto( + data.getInPath(), + basePath + config.getTerrainFile(), + basePath + config.getBuildingFile(), + basePath + config.getWaterPath(), + basePath + config.getFlowPath(), + config.getInPath(), + config.getOutPath(), + data.getEpsg()); + LayerDto layer = new LayerDto(config.getVer(), data.getEpsg(), config.getSizes()); + process(dto, layer); + } + + private void process(ResultDto dto, LayerDto layer) throws Exception { + try { + copeTerrain(dto, layer); + copeBuilding(dto, layer); + List<BuildingDepthVo> buildings = copeWater(dto, layer); + copeFlow(dto, layer); + copeLayerJson(dto, layer); + copeRainFallJson(dto, layer); + copeBuildingDepthJson(dto, buildings); + } finally { + File dir = new File(dto.getTemp()); + if (dir.exists()) { + FileUtil.del(dir); + } + } + } + + private void copeTerrain(ResultDto dto, LayerDto layer) { + Dataset ds = null; + try { + ds = gdal.Open(dto.getTerrainFile(), gdalconst.GA_ReadOnly); + if (null == ds || 0 == ds.getRasterCount()) return; + + setTerrainInfo(ds, layer); + createTerrainPng(dto, ds, layer); + setWaterInfo(dto, layer); + } finally { + if (null != ds) ds.delete(); + } + } + + private void setTerrainInfo(Dataset ds, LayerDto layer) { + Geometry minPoint = GdalHelper.getMinPoint(ds); + Geometry maxPoint = GdalHelper.getMaxPoint(ds); + double minx = ComHelper.getMinVal(minPoint.GetX(0), 10000000); + double miny = ComHelper.getMinVal(minPoint.GetY(0), 10000000); + double maxx = ComHelper.getMaxVal(maxPoint.GetX(0) + MAX_X_OFFSET, 10000000); + double maxy = ComHelper.getMaxVal(maxPoint.GetY(0), 10000000); + //layer.setExtension(new ExtensionDto(minx, miny, maxx, maxy, Double.MAX_VALUE, Double.MIN_VALUE)); + + Band band = ds.GetRasterBand(1); + double[] mm = new double[2]; + band.ComputeRasterMinMax(mm, 0); + //layer.getTerrain().setHeight(getMinVal(mm[0], 1000), getMaxVal(mm[1], 1000)); + layer.setExtension(new ExtensionDto(minx, miny, maxx, maxy, mm[0], mm[1])); + } + + private void createTerrainPng(ResultDto dto, Dataset ds, LayerDto layer) { + String terrainPath = dto.getOutPath() + File.separator + "terrain"; + File f = new File(terrainPath); + if (!f.exists() || !f.isDirectory()) f.mkdirs(); + + for (int[] sizes : layer.getTerrain().getSize()) { + String tif = dto.getTemp() + File.separator + "terrain_" + sizes[0] + "_" + sizes[1] + ".tif"; + ComHelper.Resample(ds, tif, sizes[0], sizes[1], layer); + if (!new File(tif).exists()) continue; + + String png = terrainPath + File.separator + sizes[0] + "_" + sizes[1] + ".png"; + Terrain2Png(layer, tif, png, sizes[0], sizes[1]); + } + } + + private void Terrain2Png(LayerDto layer, String tif, String png, int width, int height) { + Dataset ds = null; + try { + ds = gdal.Open(tif, gdalconst.GA_ReadOnly); + if (null == ds || 0 == ds.getRasterCount()) return; + + Band band = ds.GetRasterBand(1); + float[] buffer = new float[width * height]; + //band.ReadRaster(0, 0, width, height, buffer, width, height, 0, 0); + band.ReadRaster(0, 0, width, height, buffer); + layer.getTerrain().getVals().put(width + "_" + height, buffer); + + BufferedImage image = new BufferedImage(width, height, BufferedImage.TYPE_INT_ARGB); + double differ = layer.getExtension().getMaxHeight() - layer.getExtension().getMinHeight(), minHeight = layer.getExtension().getMinHeight(); + for (int x = 0; x < width; x++) { + for (int y = 0; y < height; y++) { + int offset = x + y * width; + if (Float.isNaN(buffer[offset]) || buffer[offset] < -999 || buffer[offset] < minHeight) continue; + + int r = 0, g, b; + if (buffer[offset] - layer.getExtension().getMaxHeight() > 0) { + g = b = 255; + } else { + int val = (int) ((buffer[offset] - minHeight) / differ * 65535); + g = val / 256; + b = val % 256; + } + + Color color = new Color(r, g, b, 127); + //graphic.drawImage(image, x, y, 1, 1, color, null); + image.setRGB(x, y, color.getRGB()); + } + } + ComHelper.savePng(image, png); + } finally { + if (null != ds) ds.delete(); + } + } + + private void copeBuilding(ResultDto dto, LayerDto layer) { + Driver driver = null; + DataSource dataSource = null; + Layer shpLayer = null; + try { + driver = ogr.GetDriverByName("ESRI shapefile"); + if (null == driver) return; + + dataSource = driver.Open(dto.getBuildingFile()); + if (null == dataSource) return; + + shpLayer = dataSource.GetLayer(0); + if (shpLayer.GetFeatureCount() < 1) return; + + do { + Feature f = shpLayer.GetNextFeature(); + if (null == f) break; + + String id = f.GetFieldAsString(config.getBuildingKey()); + BuildingDto building = new BuildingDto(id, f.GetGeometryRef()); + dto.getBuildingList().add(building); + } while (true); + } finally { + if (null != shpLayer) shpLayer.delete(); + if (null != dataSource) dataSource.delete(); + if (null != driver) driver.delete(); + } + } + + private void setWaterInfo(ResultDto dto, LayerDto layer) { + List<String> files = getFiles(dto.getWaterPath(), ".tif"); + layer.getWaters().setFiles(files); + if (null == files || files.size() == 0) return; + + setWaterData(layer, files); + setWaterHeight(layer, files); + } + + private List<BuildingDepthVo> copeWater(ResultDto dto, LayerDto layer) { + List<String> files = layer.getWaters().getFiles(); + if (files.size() == 0 || files.size() != layer.getWaters().getData().size()) return null; + + processWaters(dto, files, layer); + + return processBuilding(dto, files, layer); + } + + private List<String> getFiles(String path, String suffix) { + List<String> files = new ArrayList<>(); + ComHelper.getFiles(files, new File(path), suffix); + files.sort((a, b) -> a.compareToIgnoreCase(b)); + + return files; + } + + private void setWaterData(LayerDto layer, List<String> files) { + Calendar calendar = Calendar.getInstance(); + calendar.setTime(new Date()); + calendar.set(Calendar.MILLISECOND, 0); + + for (String file : files) { + String fileName = ComHelper.getNameWithExt(file); + int year = Integer.parseInt(fileName.substring(0, 4)); + int month = Integer.parseInt(fileName.substring(4, 6)); + int day = Integer.parseInt(fileName.substring(6, 8)); + int hour = Integer.parseInt(fileName.substring(8, 10)); + int minute = Integer.parseInt(fileName.substring(10, 12)); + int second = Integer.parseInt(fileName.substring(12, 14)); + + calendar.set(Calendar.YEAR, year); + calendar.set(Calendar.MONTH, month); + calendar.set(Calendar.DAY_OF_MONTH, day); + calendar.set(Calendar.HOUR_OF_DAY, hour); + calendar.set(Calendar.MINUTE, minute); + calendar.set(Calendar.SECOND, second); + + layer.getWaters().getData().add(calendar.getTime().getTime()); + } + layer.getDuration().setStart(layer.getWaters().getData().get(0)); + layer.getDuration().setEnd(layer.getWaters().getData().get(layer.getWaters().getData().size() - 1)); + } + + private void setWaterHeight(LayerDto layer, List<String> files) { + files.parallelStream().forEach(file -> { + Dataset ds = null; + try { + ds = gdal.Open(file, gdalconst.GA_ReadOnly); + if (null == ds || 0 == ds.getRasterCount()) return; + + double[] mm = new double[2]; + ds.GetRasterBand(1).ComputeRasterMinMax(mm, 0); + //layer.getExtension().setHeight(mm[0], mm[1]); + layer.getWaters().setHeight(mm[0], mm[1]); + } finally { + if (null != ds) ds.delete(); + } + }); + //layer.getExtension().setMinHeight(getMinVal(layer.getExtension().getMinHeight() - 1, 1000)); + //layer.getExtension().setMaxHeight(getMaxVal(layer.getExtension().getMaxHeight() + 1, 1000)); + //layer.getWaters().setMinHeight(getMinVal(layer.getWaters().getMinHeight() - 1, 1000)); + //layer.getWaters().setMaxHeight(getMaxVal(layer.getWaters().getMaxHeight() + 1, 1000)); + layer.getExtension().setMaxHeight(layer.getExtension().getMaxHeight() + layer.getWaters().getMaxHeight()); + layer.getExtension().setMaxHeight(ComHelper.getMaxVal(layer.getExtension().getMaxHeight(), 1000000)); + layer.getExtension().setMinHeight(ComHelper.getMaxVal(layer.getExtension().getMinHeight(), 1000000)); + layer.getExtension().setDiffer(); + } + + private void processWaters(ResultDto dto, List<String> files, LayerDto layer) { + for (int i = 0, c = files.size(); i < c; i++) { + Dataset ds = null; + try { + ds = gdal.Open(files.get(i), gdalconst.GA_ReadOnly); + if (null == ds || 0 == ds.getRasterCount()) return; + if (null == ds.GetSpatialRef()) ds.SetSpatialRef(dto.getSpatialReference()); + + createWaterPng(dto, ds, layer, layer.getWaters().getData().get(i)); + if (config.getCopyTif()) copyWaterTif(dto, ds, layer.getWaters().getData().get(i)); + createVectors(dto, ds, layer, layer.getWaters().getData().get(i)); + } finally { + if (null != ds) ds.delete(); + } + } + } + + private void createWaterPng(ResultDto dto, Dataset ds, LayerDto layer, long ticks) { + String waterPath = dto.getOutPath() + File.separator + "waters" + File.separator + ticks; + File dir = new File(waterPath); + if (!dir.exists() || !dir.isDirectory()) dir.mkdirs(); + + for (int[] sizes : layer.getTerrain().getSize()) { + String fileName = ComHelper.getNameWithExt(ds.GetDescription()) + "_" + sizes[0] + "_" + sizes[1]; + String tif = dto.getTemp() + File.separator + fileName + ".tif"; + ComHelper.Resample(ds, tif, sizes[0], sizes[1], layer); + if (!new File(tif).exists()) continue; + + String png = waterPath + File.separator + sizes[0] + "_" + sizes[1] + ".png"; + water2Png(dto, layer, tif, png, sizes[0], sizes[1]); + } + } + + private void water2Png(ResultDto dto, LayerDto layer, String tif, String png, int width, int height) { + Dataset ds = null; + try { + ds = gdal.Open(tif, gdalconst.GA_ReadOnly); + if (null == ds || 0 == ds.getRasterCount()) return; + + Band band = ds.GetRasterBand(1); + float[] buffer = new float[width * height]; + band.ReadRaster(0, 0, width, height, buffer); + double[] transform = ds.GetGeoTransform(); + + BufferedImage image = new BufferedImage(width, height, BufferedImage.TYPE_INT_ARGB); + //double differ = layer.getWaters().getMaxHeight() - layer.getWaters().getMinHeight(), minHeight = layer.getWaters().getMinHeight(); + double differ = layer.getExtension().getDiffer(), maxHeight = layer.getExtension().getMaxHeight(), minHeight = layer.getExtension().getMinHeight(); + float[] ts = layer.getTerrain().getVals().get(width + "_" + height); + for (int x = 0; x < width; x++) { + for (int y = 0; y < height; y++) { + int offset = x + y * width; + //if (Float.isNaN(buffer[offset]) || buffer[offset] < -999 || buffer[offset] < minHeight) continue; + if (Float.isNaN(buffer[offset]) || buffer[offset] <= 0 || Float.isNaN(ts[offset])) continue; + + //double X = transform[0] + x * transform[1] + y * transform[2]; + //double Y = transform[3] + x * transform[4] + y * transform[5]; + //BuildingDto building = intersects(dto, X, Y); + //if (null != building) continue; + + int r = 0, g, b; + if (buffer[offset] + ts[offset] > maxHeight) { + g = b = 255; + } else { + int val = (int) ((buffer[offset] + ts[offset] - minHeight) / differ * 65535); + g = val / 256; + b = val % 256; + } + + Color color = new Color(r, g, b, 127); + image.setRGB(x, y, color.getRGB()); + } + } + ComHelper.savePng(image, png); + } finally { + if (null != ds) ds.delete(); + } + } + + private List<BuildingDepthVo> processBuilding(ResultDto dto, List<String> files, LayerDto layer) { + List<BuildingDepthVo> list = new CopyOnWriteArrayList<>(); + for (int i = 0, c = files.size(); i < c; i++) { + Dataset ds = null; + try { + ds = gdal.Open(files.get(i), gdalconst.GA_ReadOnly); + if (null == ds || 0 == ds.getRasterCount()) continue; + if (null == ds.GetSpatialRef()) ds.SetSpatialRef(dto.getSpatialReference()); + + copeBuildingDepth(dto, ds, layer.getWaters().getData().get(i), list); + } finally { + if (null != ds) ds.delete(); + } + } + + return list; + } + + private void copeBuildingDepth(ResultDto dto, Dataset ds, long ticks, List<BuildingDepthVo> list) { + double[] transform = ds.GetGeoTransform(); + int xSize = ds.getRasterXSize(), ySize = ds.getRasterYSize(); + double minX = transform[0], pixelWidth = transform[1], maxY = transform[3], pixelHeight = Math.abs(transform[5]); + + for (BuildingDto building : dto.getBuildingList()) { + Double val = getValue(ds, building, xSize, ySize, minX, maxY, pixelWidth, pixelHeight); + list.add(new BuildingDepthVo(building.getId(), ticks, val)); + } + } + + private Double getValue(Dataset ds, BuildingDto building, int xSize, int ySize, double minX, double maxY, double pixelWidth, double pixelHeight) { + double[] env = new double[4]; + building.getGeom().GetEnvelope(env); + + int startX = (int) Math.floor((env[0] - minX) / pixelWidth); + int endX = (int) Math.floor((env[1] - minX) / pixelWidth); + int startY = (int) Math.floor((maxY - env[3]) / Math.abs(pixelHeight)); + int endY = (int) Math.floor((maxY - env[2]) / Math.abs(pixelHeight)); + if (startX < 0) startX = 0; + if (startY < 0) startY = 0; + if (endX > ds.getRasterXSize()) endX = ds.getRasterXSize(); + if (endY > ds.getRasterYSize()) endY = ds.getRasterYSize(); + if (endX - startX < 1 || endY - startY < 1) return null; + + int width = endX - startX; + int height = endY - startY; + double[] pixelValues = new double[width * height]; + ds.GetRasterBand(1).ReadRaster(startX, startY, width, height, pixelValues); + + Double val = Arrays.stream(pixelValues).max().getAsDouble(); + + return ComHelper.isValid(val) ? val : null; + } + + private void copyWaterTif(ResultDto dto, Dataset ds, long ticks) { + String source = ds.GetDescription(); + String target = dto.getOutPath() + File.separator + "waters" + File.separator + ticks + File.separator + "water.tif"; + FileUtil.copyFile(source, target); + } + + private void createVectors(ResultDto dto, Dataset ds, LayerDto layer, Long ticks) { + String filePath = dto.getOutPath() + File.separator + "waters" + File.separator + ticks + File.separator + "polygonize.geojson"; + ShpHelper.polygonize2Geojson(ds, filePath); + + List<Geometry> geometries = getGeometries(filePath); + if (CollectionUtils.isEmpty(geometries)) return; + + List<PondingPo> list = copePonding(dto, ds, layer, geometries); + if (CollectionUtils.isEmpty(list)) return; + + try { + filePath = dto.getOutPath() + File.separator + "waters" + File.separator + ticks + File.separator + "water.json"; + ComHelper.writeJson(filePath, JSON.toJSONString(list)); + } catch (Exception ex) { + log.error(ex.getMessage(), ex); + } + } + + private List<Geometry> getGeometries(String filePath) { + if (!FileUtil.exist(filePath)) return null; + + Driver driver = null; + DataSource dataSource = null; + org.gdal.ogr.Layer layer = null; + try { + driver = ogr.GetDriverByName("GeoJSON"); + if (null == driver) return null; + + DataSource ds = driver.Open(filePath); + if (null == ds) return null; + + layer = ds.GetLayer(0); + List<Geometry> list = new ArrayList<>(); + for (long i = 0, d = layer.GetFeatureCount(); i < d; i++) { + Feature f = layer.GetFeature(i); + if (f.GetFieldAsDouble("val") < -999) continue; + + list.add(f.GetGeometryRef()); + } + + return list; + } catch (Exception ex) { + log.error(ex.getMessage(), ex); + return null; + } finally { + GdalHelper.delete(layer, dataSource, driver); + } + } + + private List<PondingPo> copePonding(ResultDto dto, Dataset ds, LayerDto layer, List<Geometry> geometries) { + double[] transform = ds.GetGeoTransform(); + int xSize = ds.getRasterXSize(), ySize = ds.getRasterYSize(); + + List<PondingPo> list = new ArrayList<>(); + for (Geometry geometry : geometries) { + //List<PointDto> points = getValues(ds, geometry, transform, xSize, ySize); + //if (CollectionUtils.isEmpty(points)) continue; + + //PointDto point = Collections.max(points); + PointDto point = getValues(ds, geometry, transform, xSize, ySize); + if (null == point) continue; + + list.add(new PondingPo(geometry, point)); + } + + return list; + } + + private PointDto getValues(Dataset ds, Geometry g, double[] transform, int xSize, int ySize) { + double[] env = new double[4]; + g.GetEnvelope(env); + + int startX = (int) Math.floor((env[0] - transform[0]) / transform[1]); + int endX = (int) Math.floor((env[1] - transform[0]) / transform[1]); + int startY = (int) Math.floor((transform[3] - env[3]) / Math.abs(transform[5])); + int endY = (int) Math.floor((transform[3] - env[2]) / Math.abs(transform[5])); + if (startX < 0) startX = 0; + if (startY < 0) startY = 0; + if (endX > ds.getRasterXSize()) endX = ds.getRasterXSize(); + if (endY > ds.getRasterYSize()) endY = ds.getRasterYSize(); + if (endX - startX < 1 || endY - startY < 1) return null; + + int width = endX - startX; + int height = endY - startY; + double[] pixelValues = new double[width * height]; + ds.GetRasterBand(1).ReadRaster(startX, startY, width, height, pixelValues); + + Double val = Arrays.stream(pixelValues).max().getAsDouble(); + + return new PointDto(g.Centroid().GetX(), g.Centroid().GetY(), ComHelper.isValid(val) ? val : 0.0); + } + + private void copeFlow(ResultDto dto, LayerDto layer) { + List<String> files = getFiles(dto.getFlowPath(), ".tif"); + if (null == files || files.size() != layer.getWaters().getData().size()) return; + + for (int i = 0, c = files.size(); i < c; i++) { + Dataset ds = null; + try { + ds = gdal.Open(files.get(i), gdalconst.GA_ReadOnly); + if (null == ds || 0 == ds.getRasterCount()) return; + if (null == ds.GetSpatialRef()) ds.SetSpatialRef(dto.getSpatialReference()); + + createFlowPng(dto, ds, layer, layer.getWaters().getData().get(i)); + } finally { + if (null != ds) ds.delete(); + } + } + } + + private void createFlowPng(ResultDto dto, Dataset ds, LayerDto layer, long ticks) { + String flowPath = dto.getOutPath() + File.separator + "flows" + File.separator + ticks; + File dir = new File(flowPath); + if (!dir.exists() || !dir.isDirectory()) dir.mkdirs(); + + for (int[] sizes : layer.getTerrain().getSize()) { + String name = ComHelper.getNameWithExt(ds.GetDescription()) + "_" + sizes[0] + "_" + sizes[1]; + String tif = dto.getTemp() + File.separator + name + ".tif"; + ComHelper.Resample(ds, tif, sizes[0], sizes[1], layer); + if (!new File(tif).exists()) continue; + + String png = flowPath + File.separator + sizes[0] + "_" + sizes[1] + ".png"; + vxyTif2Png(layer, tif, png, sizes[0], sizes[1]); + } + } + + private void vxyTif2Png(LayerDto layer, String tif, String png, int width, int height) { + Dataset ds = null; + try { + ds = gdal.Open(tif, gdalconst.GA_ReadOnly); + if (null == ds || 0 == ds.getRasterCount()) return; + + float[] vxBuffer = new float[width * height], vyBuffer = new float[width * height]; + ds.GetRasterBand(1).ReadRaster(0, 0, width, height, vxBuffer); + ds.GetRasterBand(2).ReadRaster(0, 0, width, height, vyBuffer); + + createFlowPng(vxBuffer, vyBuffer, png, width, height); + } finally { + if (null != ds) ds.delete(); + } + } + + private void createFlowPng(float[] vxBuffer, float[] vyBuffer, String png, int width, int height) { + BufferedImage image = new BufferedImage(width, height, BufferedImage.TYPE_INT_ARGB); + for (int x = 0; x < width; x++) { + for (int y = 0; y < height; y++) { + int offset = x + y * width; + float fx = ComHelper.getFloatValue(vxBuffer[offset]); + float fy = ComHelper.getFloatValue(vyBuffer[offset]); + if (Float.isNaN(fx) && Float.isNaN(fy) || (fx == 0 && fy == 0)) continue; + + fx = Float.isNaN(fx) ? 0 : fx; + fy = Float.isNaN(fy) ? 0 : fy; + double dr = Math.sqrt(Math.pow(fx, 2) + Math.pow(fy, 2)); + + int r = (int) (dr / 4 * 255); + int g = (int) ((fx / dr * 0.5 + 0.5) * 255); + int b = (int) ((fy / dr * 0.5 + 0.5) * 255); + + Color color = new Color(ComHelper.getSafeValue(r), ComHelper.getSafeValue(g), ComHelper.getSafeValue(b), 127); + image.setRGB(x, y, color.getRGB()); + } + } + ComHelper.savePng(image, png); + } + + private void copeLayerJson(ResultDto dto, LayerDto layer) throws IOException { + layer.getWaters().setFiles(null); + layer.getTerrain().setEpsg(null); + layer.getExtension().setDiffer(null); + + String json = JSON.toJSONString(layer); + // String json = JSONUtil.toJsonPrettyStr(layer); + String filePath = dto.getOutPath() + File.separator + "layer.json"; + + ComHelper.writeJson(filePath, json); + } + + public void copeRainFallJson(ResultDto dto, LayerDto layer) throws IOException, ParseException { + String rainGageFilePath = config.getInPath() + File.separator + dto.getServiceName() + File.separator + "RainGage.dat"; + String filePath = dto.getOutPath() + File.separator + "rainfall.json"; + + Map<String, Double> map = new LinkedHashMap<>(); + FileReader fr = new FileReader(rainGageFilePath); + BufferedReader br = new BufferedReader(fr); + + String line = br.readLine(); + while ((line = br.readLine()) != null) { + String[] rainFall = line.split(" "); + if (rainFall.length < 7) continue; + + String sdt = rainFall[1] + "-" + rainFall[2] + "-" + rainFall[3] + " " + rainFall[4] + ":" + rainFall[5]; + BigDecimal num = new BigDecimal(rainFall[6]); + map.put("" + SDF.parse(sdt).getTime(), num.setScale(2, RoundingMode.HALF_UP).doubleValue()); + } + br.close(); + fr.close(); + + String json = JSON.toJSONString(map); + ComHelper.writeJson(filePath, json); + } + + private void copeBuildingDepthJson(ResultDto dto, List<BuildingDepthVo> list) throws IOException { + if (CollectionUtils.isEmpty(list)) return; + + String json = JSON.toJSONString(list); + String filePath = dto.getOutPath() + File.separator + "building.json"; + + ComHelper.writeJson(filePath, json); + } +} diff --git a/src/main/java/com/se/nsl/service/SemFilesSimuService.java b/src/main/java/com/se/nsl/service/SemFilesSimuService.java new file mode 100644 index 0000000..cab3007 --- /dev/null +++ b/src/main/java/com/se/nsl/service/SemFilesSimuService.java @@ -0,0 +1,17 @@ +package com.se.nsl.service; + +public interface SemFilesSimuService { + + + /** + * 鑾峰彇 INTRODUCE + * + * @return {@link Object} + */ + Object getIntroduce(); + + + Object createSimuBySemFile(); + + Object readSemFile(String filePath); +} diff --git a/src/main/java/com/se/nsl/service/SimuFilesService.java b/src/main/java/com/se/nsl/service/SimuFilesService.java new file mode 100644 index 0000000..7fe96fc --- /dev/null +++ b/src/main/java/com/se/nsl/service/SimuFilesService.java @@ -0,0 +1,229 @@ +package com.se.nsl.service; + +import cn.hutool.core.bean.BeanUtil; +import cn.hutool.core.collection.CollUtil; +import cn.hutool.core.io.FileUtil; +import cn.hutool.json.JSONUtil; +import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper; +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.core.metadata.OrderItem; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; +import com.se.nsl.config.PropertiesConfig; +import com.se.nsl.domain.dto.GeDb; +import com.se.nsl.domain.po.DataPo; +import com.se.nsl.domain.po.SimuPo; +import com.se.nsl.domain.vo.CreateFilesSimuVo; +import com.se.nsl.domain.vo.SimuVo; +import com.se.nsl.helper.StringHelper; +import com.se.nsl.helper.WebHelper; +import com.se.nsl.mapper.SimuMapper; +import lombok.SneakyThrows; +import lombok.extern.slf4j.Slf4j; +import org.springframework.stereotype.Service; + +import javax.annotation.Resource; +import java.io.File; +import java.sql.Timestamp; +import java.util.Date; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; + +/** + * SIMU 鏂囦欢鏈嶅姟 + * + * @author xingjinshuang@smartearth.cn + * @date 2024/12/24 + */ +@Slf4j +@Service + +public class SimuFilesService { + + @Resource + SimuMapper simuMapper; + + @Resource + PropertiesConfig config; + + @Resource + GedbService gedbService; + + @Resource + UwService uwService; + + @Resource + ResultService resultService; + + public IPage<SimuPo> get(SimuVo vo) { + QueryWrapper<SimuPo> wrapper = getPageWrapper(vo); + + Page<SimuPo> page = new Page<>(vo.getPageIndex(), vo.getPageSize()); + page.addOrder(OrderItem.desc("id")); + + IPage<SimuPo> paged = simuMapper.selectPage(page, wrapper); + + return paged; + } + + private QueryWrapper<SimuPo> getPageWrapper(SimuVo vo) { + QueryWrapper<SimuPo> wrapper = new QueryWrapper<>(); + if (null != vo.getId()) { + wrapper.eq("id", vo.getId()); + } + if (null != vo.getPid()) { + wrapper.eq("pid", vo.getPid()); + } + if (null != vo.getNum()) { + wrapper.eq("num", vo.getNum()); + } + if (!StringHelper.isEmpty(vo.getName())) { + wrapper.like("lower(name)", vo.getName().trim().toLowerCase()); + } + if (!CollUtil.isEmpty(vo.getStatus())) { + wrapper.in("status", vo.getStatus()); + } + + return wrapper; + } + + + private boolean delDir(String dir) { + File file = new File(dir); + if (!file.exists() || !file.isDirectory()) { + return false; + } + + return FileUtil.del(dir); + } + + + /** + * "鐘舵�侊細 + * 0-鍒涘缓浠跨湡浠诲姟锛� + * 1-杩炴帴GEDB搴擄紝 + * 2-涓嬭浇绌洪棿鏁版嵁锛� + * 3-涓嬭浇楂樼▼鏁版嵁锛� + * 4-鐢熸垚闄嶉洦鏂囦欢锛� + * 5-鐢熸垚閰嶇疆鏂囦欢锛� + * 6-妯℃嫙鍐呮稘浠跨湡锛� + * 7-澶勭悊姘翠綅鏂囦欢锛� + * 8-澶勭悊鎺掓按鏂囦欢锛� + * 9-澶勭悊浠跨湡缁撴灉锛� + * 10-瀹屾垚锛�-10-鍑洪敊 + * + * @param vo VO + * @return boolean + */ + public boolean createByfiles(CreateFilesSimuVo vo,String token) { + Date now = new Date(); + String date = StringHelper.YMDHMS2_FORMAT.format(now); + if (StringHelper.isEmpty(vo.getName())) { + vo.setName(date); + } + DataPo data = BeanUtil.copyProperties(vo, DataPo.class); + data.setPath(date, date); + initPath(data); + SimuPo simu = new SimuPo(vo.getNum(), vo.getPid(), vo.getName(), JSONUtil.toJsonStr(data), 0, vo.getBak()); + simu.setServiceName(date); + simu.setCreateTime(new Timestamp(now.getTime())); + simu.setSemUrl(vo.getSemUrl()); + simu.setPointUrl(vo.getPointName()); + simu.setLinkUrl(vo.getLinkName()); + int rows = simuMapper.insert(simu); + if (rows > 0) { + asyncCall(simu,token); + } + + return rows > 0; + } + + private void initPath(DataPo data) { + createDir(config.getInPath() + File.separator + data.getInPath()); + createDir(config.getOutPath() + File.separator + data.getOutPath()); + } + + private void createDir(String path) { + File f = new File(path); + if (f.exists() && f.isDirectory()) { + FileUtil.del(f); + } + f.mkdirs(); + } + + private void asyncCall(SimuPo simu,String token) { + ExecutorService executor = Executors.newSingleThreadExecutor(); + executor.execute(new Runnable() { + @Override + @SneakyThrows + public void run() { + cope(simu,token); + } + }); + executor.shutdown(); + } + + + /** + * "鐘舵�侊細 + * 0-鍒涘缓浠跨湡浠诲姟锛� + * 1-杩炴帴GEDB搴擄紝 + * 2-涓嬭浇绌洪棿鏁版嵁锛� + * 3-涓嬭浇楂樼▼鏁版嵁锛� + * 4-鐢熸垚闄嶉洦鏂囦欢锛� + * 5-鐢熸垚閰嶇疆鏂囦欢锛� + * 6-妯℃嫙鍐呮稘浠跨湡锛� + * 7-澶勭悊姘翠綅鏂囦欢锛� + * 8-澶勭悊鎺掓按鏂囦欢锛� + * 9-澶勭悊浠跨湡缁撴灉锛� + * 10-瀹屾垚锛�-10-鍑洪敊 + * + * @param simu 妯℃嫙 + */ + + private void cope(SimuPo simu,String token) { + try { + DataPo data = JSONUtil.toBean(simu.getData(), DataPo.class); + + update(simu, 1, null); + GeDb db = gedbService.connectGedb(token, data); + simu.setData(JSONUtil.toJsonStr(data)); + + update(simu, 2, null); + gedbService.copeVectors(token, data, db); + + update(simu, 3, null); + gedbService.copeDem(token, data); + + update(simu, 4, null); + uwService.createRainFile(data); + + update(simu, 5, null); + uwService.createConfig(data); + + update(simu, 6, null); + uwService.callExe(data); + + update(simu, 7, null); + //uwService.copeWaterFiles(); + + update(simu, 8, null); + uwService.copeDrainFiles(data); + + update(simu, 9, null); + resultService.process(data); + + update(simu, 10, "瀹屾垚"); + } catch (Exception ex) { + log.error(ex.getMessage(), ex); + update(simu, -simu.getStatus(), ex.getMessage()); + } + } + + private void update(SimuPo simu, int status, String rs) { + simu.setStatus(status); + if (null != rs) simu.setResult(rs); + simu.setUpdateTime(WebHelper.getCurrentTimestamp()); + + simuMapper.updateById(simu); + } +} diff --git a/src/main/java/com/se/nsl/service/SimuService.java b/src/main/java/com/se/nsl/service/SimuService.java new file mode 100644 index 0000000..e924066 --- /dev/null +++ b/src/main/java/com/se/nsl/service/SimuService.java @@ -0,0 +1,236 @@ +package com.se.nsl.service; + +import cn.hutool.core.bean.BeanUtil; +import cn.hutool.core.collection.CollUtil; +import cn.hutool.core.io.FileUtil; +import cn.hutool.json.JSONUtil; +import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper; +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.core.metadata.OrderItem; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; +import com.se.nsl.config.PropertiesConfig; +import com.se.nsl.domain.dto.GeDb; +import com.se.nsl.domain.po.DataPo; +import com.se.nsl.domain.po.SimuPo; +import com.se.nsl.domain.vo.CreateSimuVo; +import com.se.nsl.domain.vo.SimuVo; +import com.se.nsl.helper.StringHelper; +import com.se.nsl.helper.WebHelper; +import com.se.nsl.mapper.SimuMapper; +import lombok.SneakyThrows; +import lombok.extern.slf4j.Slf4j; +import org.springframework.stereotype.Service; + +import javax.annotation.Resource; +import java.io.File; +import java.sql.Timestamp; +import java.util.Date; +import java.util.List; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; + +@Slf4j +@Service +@SuppressWarnings("ALL") +public class SimuService { + @Resource + SimuMapper simuMapper; + + @Resource + PropertiesConfig config; + + @Resource + GedbService gedbService; + + @Resource + UwService uwService; + + @Resource + ResultService resultService; + + public IPage<SimuPo> get(SimuVo vo) { + QueryWrapper<SimuPo> wrapper = getPageWrapper(vo); + + Page<SimuPo> page = new Page<>(vo.getPageIndex(), vo.getPageSize()); + page.addOrder(OrderItem.desc("id")); + + IPage<SimuPo> paged = simuMapper.selectPage(page, wrapper); + + return paged; + } + + private QueryWrapper<SimuPo> getPageWrapper(SimuVo vo) { + QueryWrapper<SimuPo> wrapper = new QueryWrapper<>(); + if (null != vo.getId()) { + wrapper.eq("id", vo.getId()); + } + if (null != vo.getPid()) { + wrapper.eq("pid", vo.getPid()); + } + if (null != vo.getNum()) { + wrapper.eq("num", vo.getNum()); + } + if (!StringHelper.isEmpty(vo.getName())) { + wrapper.like("lower(name)", vo.getName().trim().toLowerCase()); + } + if (!CollUtil.isEmpty(vo.getStatus())) { + wrapper.in("status", vo.getStatus()); + } + + return wrapper; + } + + public int del(List<Integer> ids) { + List<SimuPo> list = simuMapper.selectBatchIds(ids); + if (null != list && list.size() > 0) { + for (SimuPo po : list) { + try { + if (StringHelper.isEmpty(po.getData())) continue; + + DataPo dp = JSONUtil.toBean(po.getData(), DataPo.class); + if (null == dp) continue; + + delDir(config.getInPath() + File.separator + dp.getInPath()); + delDir(config.getOutPath() + File.separator + dp.getOutPath()); + FileUtil.del(config.getInPath() + File.separator + dp.getInPath() + ".json"); + } catch (Exception ex) { + log.error(ex.getMessage(), ex); + } + } + } + + return simuMapper.deleteBatchIds(ids); + } + + private boolean delDir(String dir) { + File file = new File(dir); + if (!file.exists() || !file.isDirectory()) { + return false; + } + + return FileUtil.del(dir); + } + + public Integer getMaxId() { + return simuMapper.selectMaxId(); + } + + public SimuPo getSimuByPid(Integer pid) { + QueryWrapper<SimuPo> wrapper = new QueryWrapper<>(); + wrapper.eq("id", pid); + wrapper.last("limit 1"); + + return simuMapper.selectOne(wrapper); + } + public SimuPo getSimuById(Integer id) { + return simuMapper.selectById(id); + } + + public SimuPo getSimuByServiceName(String serviceName) { + if (StringHelper.isEmpty(serviceName)) { + return null; + } + + QueryWrapper<SimuPo> wrapper = new QueryWrapper<>(); + wrapper.eq("service_name", serviceName); + wrapper.last("limit 1"); + + return simuMapper.selectOne(wrapper); + } + + public boolean create(CreateSimuVo vo) { + Date now = new Date(); + String date = StringHelper.YMDHMS2_FORMAT.format(now); + if (StringHelper.isEmpty(vo.getName())) { + vo.setName(date); + } + + DataPo data = BeanUtil.copyProperties(vo, DataPo.class); + data.setPath(date, date); + initPath(data); + + SimuPo simu = new SimuPo(vo.getNum(), vo.getPid(), vo.getName(), JSONUtil.toJsonStr(data), 0, vo.getBak()); + simu.setServiceName(date); + simu.setCreateTime(new Timestamp(now.getTime())); + + int rows = simuMapper.insert(simu); + if (rows > 0) { + asyncCall(simu); + } + + return rows > 0; + } + + private void initPath(DataPo data) { + createDir(config.getInPath() + File.separator + data.getInPath()); + createDir(config.getOutPath() + File.separator + data.getOutPath()); + } + + private void createDir(String path) { + File f = new File(path); + if (f.exists() && f.isDirectory()) { + FileUtil.del(f); + } + f.mkdirs(); + } + + private void asyncCall(SimuPo simu) { + ExecutorService executor = Executors.newSingleThreadExecutor(); + executor.execute(new Runnable() { + @Override + @SneakyThrows + public void run() { + cope(simu); + } + }); + executor.shutdown(); + } + + private void cope(SimuPo simu) { + try { + DataPo data = JSONUtil.toBean(simu.getData(), DataPo.class); + + update(simu, 1, null); + String token = gedbService.getToken(); + GeDb db = gedbService.connectGedb(token, data); + simu.setData(JSONUtil.toJsonStr(data)); + + update(simu, 2, null); + gedbService.copeVectors(token, data, db); + + update(simu, 3, null); + gedbService.copeDem(token, data); + + update(simu, 4, null); + uwService.createRainFile(data); + + update(simu, 5, null); + uwService.createConfig(data); + + update(simu, 6, null); + uwService.callExe(data); + + update(simu, 7, null); + //uwService.copeWaterFiles(); + + update(simu, 8, null); + uwService.copeDrainFiles(data); + + update(simu, 9, null); + resultService.process(data); + + update(simu, 10, "瀹屾垚"); + } catch (Exception ex) { + log.error(ex.getMessage(), ex); + update(simu, -simu.getStatus(), ex.getMessage()); + } + } + + private void update(SimuPo simu, int status, String rs) { + simu.setStatus(status); + if (null != rs) simu.setResult(rs); + simu.setUpdateTime(WebHelper.getCurrentTimestamp()); + + simuMapper.updateById(simu); + } +} diff --git a/src/main/java/com/se/nsl/service/SwwFilesDealService.java b/src/main/java/com/se/nsl/service/SwwFilesDealService.java new file mode 100644 index 0000000..b0a9878 --- /dev/null +++ b/src/main/java/com/se/nsl/service/SwwFilesDealService.java @@ -0,0 +1,5 @@ +package com.se.nsl.service; + +public interface SwwFilesDealService { + Object readSwwFile(String filePath); +} diff --git a/src/main/java/com/se/nsl/service/UwService.java b/src/main/java/com/se/nsl/service/UwService.java new file mode 100644 index 0000000..6787a78 --- /dev/null +++ b/src/main/java/com/se/nsl/service/UwService.java @@ -0,0 +1,207 @@ +package com.se.nsl.service; + +import cn.hutool.json.JSONUtil; +import com.se.nsl.config.PropertiesConfig; +import com.se.nsl.domain.dto.ConfigDto; +import com.se.nsl.domain.po.DataPo; +import com.se.nsl.helper.StringHelper; +import com.se.nsl.helper.WebHelper; +import com.se.simu.Rainfall; +import lombok.extern.slf4j.Slf4j; +import org.springframework.stereotype.Service; + +import javax.annotation.Resource; +import java.io.*; + +@Slf4j +@Service +@SuppressWarnings("ALL") +public class UwService { + @Resource + PropertiesConfig config; + + static Rainfall _rainfall = null; + + public static Rainfall getTainfall() throws Exception { + if (null == _rainfall) { + _rainfall = new Rainfall(); + } + + return _rainfall; + } + + public void createRainFile(DataPo data) throws Exception { + String filePath = config.getInPath() + File.separator + data.getInPath() + File.separator + config.getRaingage(); + String startTime = StringHelper.YMDHMS_FORMAT.format(data.getStartTime()); + + //MWCharArray file = new MWCharArray(filePath); + //MWCharArray station = new MWCharArray(config.getRainStation()); + //MWCharArray time = new MWCharArray(startTime); + + Rainfall rainfall = getTainfall(); + //rainfall('D:\simu\in\RainGage.dat','Tongzhou','2024-09-29 00:00:00',60,0.5,10) + //Object[] rs = rainfall.rainfall(filePath, config.getRainStation(), startTime, Double.valueOf(data.getDuration()), 0.5, config.getRainPeriod()); + + //rainfall('D:\simu\in\RainGage.dat','Tongzhou','2024-07-01 00:00:00',60,60) + Object[] rs = rainfall.rainfall(filePath, config.getRainStation(), startTime, Double.valueOf(data.getDuration()), data.getTotal()); + + // file.dispose(); + //station.dispose(); + //time.dispose(); + } + + public void createConfig(DataPo data) throws IOException { + ConfigDto dto = new ConfigDto(); + dto.setProperties(data.getInPath(), data.getStartTime(), data.getDuration(), config); + + String json = JSONUtil.toJsonPrettyStr(dto); + String filePath = config.getInPath() + File.separator + data.getInPath() + ".json"; + + FileWriter fw = new FileWriter(filePath); + BufferedWriter bw = new BufferedWriter(fw); + bw.write(json); + bw.close(); + fw.close(); + } + + public String callExe(DataPo data) throws Exception { + String cmd = String.format("%s %d %s", config.getSolverBat(), WebHelper.getCpuCores(), data.getInPath() + ".json"); + + //return exec(cmd); + return execCmdLine(cmd); + } + + private String exec(String cmd) throws Exception { + Process process = null; + BufferedReader nr = null; + BufferedReader er = null; + try { + // new String[] { "/bin/sh", "-c", cmd } + process = Runtime.getRuntime().exec(cmd); + nr = new BufferedReader(new InputStreamReader(process.getInputStream(), "GBK")); + er = new BufferedReader(new InputStreamReader(process.getErrorStream(), "GBK")); + + String line; + StringBuilder sb = new StringBuilder(); + while ((line = nr.readLine()) != null) { + sb.append(line); + log.info(line); + } + + String errorLine; + while ((errorLine = er.readLine()) != null) { + log.error(errorLine); + } + + int exitCode = process.waitFor(); + + return sb.toString(); + } catch (Exception ex) { + throw ex; + } finally { + closeReader(er); + closeReader(nr); + closeProcess(process); + } + } + + private static void closeReader(Reader reader) { + if (null != reader) { + try { + reader.close(); + } catch (Exception ex) { + log.error(ex.getMessage(), ex); + } + } + } + + private static void closeProcess(Process process) { + if (null != process) { + try { + process.destroy(); + } catch (Exception ex) { + log.error(ex.getMessage(), ex); + } + } + } + + private String execCmdLine(String cmd) throws IOException, InterruptedException { + Process process = Runtime.getRuntime().exec(cmd); + + new Thread(() -> { + InputStreamReader ir = null; + BufferedReader br = null; + try { + ir = new InputStreamReader(process.getErrorStream(), "GBK"); + br = new BufferedReader(ir); + + String line; + while ((line = br.readLine()) != null) { + log.error(line); + } + } catch (Exception e) { + e.printStackTrace(); + } finally { + try { + if (null != br) br.close(); + if (null != ir) ir.close(); + } catch (IOException e) { + e.printStackTrace(); + } + } + }).start(); + + StringBuilder sb = new StringBuilder(); + //new Thread(() -> { + InputStreamReader ir = null; + BufferedReader br = null; + try { + ir = new InputStreamReader(process.getInputStream(), "GBK"); + br = new BufferedReader(ir); + + String line; + while ((line = br.readLine()) != null) { + log.info(line); + sb.append(line); + } + } catch (Exception e) { + e.printStackTrace(); + } finally { + try { + if (null != br) br.close(); + if (null != ir) ir.close(); + } catch (IOException e) { + e.printStackTrace(); + } + } + //}).start(); + + process.waitFor(); + process.destroy(); + + return sb.toString(); + } + + public void copeWaterFiles() { + // + } + + public String copeDrainFiles(DataPo data) throws Exception { + String time = StringHelper.YMDHMS_FORMAT.format(data.getStartTime()); + String inPath = config.getInPath() + File.separator + data.getInPath(); + String sww = inPath + File.separator + ".save" + File.separator + data.getInPath() + ".sww"; + + String cmd = config.getSww2tifBat() + " " + sww + " \"" + time + "\" " + data.getEpsg() + " " + inPath; + + return exec(cmd); + } + public String copeSwwDrainFiles(DataPo data) throws Exception { + String time = StringHelper.YMDHMS_FORMAT.format(data.getStartTime()); + String inPath = "H:\\simu\\semout"; + String sww = "H:\\simu\\semout\\testsem\\.out\\" + "testsem.sww"; + + String cmd = config.getSww2tifBat() + " " + sww + " \"" + time + "\" " + data.getEpsg() + " " + inPath; + + return exec(cmd); + } +} diff --git a/src/main/java/com/se/nsl/service/WaterService.java b/src/main/java/com/se/nsl/service/WaterService.java new file mode 100644 index 0000000..abb0780 --- /dev/null +++ b/src/main/java/com/se/nsl/service/WaterService.java @@ -0,0 +1,235 @@ +package com.se.nsl.service; + +import cn.hutool.core.io.FileUtil; +import cn.hutool.json.JSONUtil; +import com.se.nsl.config.PropertiesConfig; +import com.se.nsl.domain.po.DataPo; +import com.se.nsl.domain.po.PondingPo; +import com.se.nsl.domain.po.SimuPo; +import com.se.nsl.domain.vo.BuildingDepthVo; +import com.se.nsl.helper.GdalHelper; +import com.se.nsl.helper.StringHelper; +import lombok.extern.slf4j.Slf4j; +import org.gdal.gdal.Dataset; +import org.gdal.gdal.gdal; +import org.gdal.gdalconst.gdalconst; +import org.gdal.ogr.*; +import org.gdal.osr.SpatialReference; +import org.springframework.stereotype.Service; +import org.springframework.util.CollectionUtils; + +import javax.annotation.Resource; +import java.io.File; +import java.io.FileInputStream; +import java.util.List; +import java.util.stream.Collectors; + +@Slf4j +@Service +@SuppressWarnings("ALL") +public class WaterService { + @Resource + PropertiesConfig config; + + public byte[] getson(String serviceName, String json) { + try { + String filePath = config.getOutPath() + File.separator + serviceName + File.separator + json; + + File dat = new File(filePath); + if (!dat.exists()) { + return null; + } + + byte[] bytes = new byte[(int) dat.length()]; + + FileInputStream fs = new FileInputStream(filePath); + fs.read(bytes); + fs.close(); + + return bytes; + } catch (Exception ex) { + log.error(ex.getMessage(), ex); + return null; + } + } + + public String getTerraMap(String serviceName, Integer width, Integer height) { + return config.getOutPath() + File.separator + serviceName + File.separator + "terrain" + File.separator + width + "_" + height + ".png"; + } + + public String getWaterMap(String serviceName, Integer width, Integer height, Long timestamp) { + return config.getOutPath() + File.separator + serviceName + File.separator + "waters" + File.separator + timestamp + File.separator + width + "_" + height + ".png"; + } + + public String getFlowMap(String serviceName, Integer width, Integer height, Long timestamp) { + return config.getOutPath() + File.separator + serviceName + File.separator + "flows" + File.separator + timestamp + File.separator + width + "_" + height + ".png"; + } + + public Double getWaterHeight(SimuPo simu, double x, double y, Long timestamp) { + String filePath = config.getOutPath() + File.separator + simu.getServiceName() + File.separator + "waters" + + File.separator + timestamp + File.separator + "water.tif"; + if (!FileUtil.exist(filePath)) return null; + + Dataset ds = null; + try { + ds = gdal.Open(filePath, gdalconst.GA_ReadOnly); + if (null == ds || ds.getRasterCount() < 1) { + return null; + } + if (null == ds.GetSpatialRef()) { + ds.SetSpatialRef(getSpatialRef(simu)); + } + + double[] gt = ds.GetGeoTransform(); + double[] xy = GdalHelper.fromWgs84(ds.GetSpatialRef(), x, y); + int[] XY = coordinates2ColRow(gt, xy[0], xy[1]); + + if (XY[0] < 0 || XY[1] < 0 || XY[0] > ds.getRasterXSize() || XY[1] > ds.getRasterYSize()) { + return null; + } + + double[] vals = new double[1]; + ds.GetRasterBand(1).ReadRaster(XY[0], XY[1], 1, 1, vals); + + return isValid(vals[0]) ? vals[0] : null; + } catch (Exception ex) { + log.error(ex.getMessage(), ex); + return null; + } finally { + if (null != ds) ds.delete(); + } + } + + private SpatialReference getSpatialRef(SimuPo simu) { + DataPo data = JSONUtil.toBean(simu.getData(), DataPo.class); + + return data.getSpatialReference(); + } + + public int[] coordinates2ColRow(double[] gt, double x, double y) { + //Double col = Math.floor(((y - gt[3]) * gt[1] - (x - gt[0]) * gt[4]) / (gt[5] * gt[1] - gt[2] * gt[4])); + Double col = Math.floor((y * gt[1] - x * gt[4] + gt[0] * gt[4] - gt[3] * gt[1]) / (gt[5] * gt[1] - gt[2] * gt[4])); + Double row = Math.floor((x - gt[0] - col * gt[2]) / gt[1]); + + return new int[]{row.intValue(), col.intValue()}; + } + + public static boolean isValid(double val) { + return !Double.isNaN(val) && val > Integer.MIN_VALUE; + } + + public Double getWaterArea(SimuPo simu, double x, double y, Long timestamp) { + List<PondingPo> list = readWaterJson(simu.getServiceName(), timestamp); + if (CollectionUtils.isEmpty(list)) return null; + + //DataPo data = JSONUtil.toBean(simu.getData(), DataPo.class); + //SpatialReference sr = GdalHelper.createSpatialReference(data.getEpsg()); + + Geometry p = new Geometry(ogr.wkbPoint); + p.AddPoint_2D(x, y); + p.AssignSpatialReference(GdalHelper.SR4326); + + for (PondingPo po : list) { + if (StringHelper.isEmpty(po.getPolygon())) continue; + + Geometry polygon = Geometry.CreateFromWkt(po.getPolygon()); + polygon.AssignSpatialReference(GdalHelper.SR4326); + if (polygon.Contains(p)) { + return po.getArea(); + } + } + + return null; + } + + private List<PondingPo> readWaterJson(String serviceName, Long timestamp) { + String filePath = config.getOutPath() + File.separator + serviceName + File.separator + "waters" + File.separator + timestamp + File.separator + "water.json"; + String json = getText(filePath); + if (StringHelper.isEmpty(json)) { + return null; + } + + return JSONUtil.toList(json, PondingPo.class); + } + + public Double getWaterArea2(SimuPo simu, double x, double y, Long timestamp) { + String filePath = config.getOutPath() + File.separator + simu.getServiceName() + File.separator + "waters" + + File.separator + timestamp + File.separator + "water.geojson"; + if (!FileUtil.exist(filePath)) return null; + + Driver driver = null; + DataSource dataSource = null; + org.gdal.ogr.Layer layer = null; + try { + driver = ogr.GetDriverByName("GeoJSON"); + if (null == driver) return null; + + DataSource ds = driver.Open(filePath); + if (null == ds) return null; + + layer = ds.GetLayer(0); + double[] xy = GdalHelper.fromWgs84(layer.GetSpatialRef(), x, y); + + Geometry p = new Geometry(ogr.wkbPoint); + p.AddPoint_2D(xy[0], xy[1]); + p.AssignSpatialReference(layer.GetSpatialRef()); + + for (long i = 0, d = layer.GetFeatureCount(); i < d; i++) { + Feature f = layer.GetFeature(i); + if (f.GetGeometryRef().Intersects(p)) { + /*f.GetFieldAsDouble("val"); + Geometry g = f.GetGeometryRef(); + GdalHelper.fromWgs84(layer.GetSpatialRef(), g); + Double area= g.GetArea();*/ + + return f.GetGeometryRef().Area(); + } + } + + return null; + } catch (Exception ex) { + log.error(ex.getMessage(), ex); + return null; + } finally { + GdalHelper.delete(layer, dataSource, driver); + } + } + + public List<BuildingDepthVo> getBuildingDepthBySeid(String serviceName, String seid) { + List<BuildingDepthVo> list = readBuildingJson(serviceName); + if (CollectionUtils.isEmpty(list)) return null; + + return list.parallelStream() + .filter(b -> seid.equals(b.getId())) + .sorted((a, b) -> a.getTimestamp().compareTo(b.getTimestamp())) + .collect(Collectors.toList()); + } + + public List<BuildingDepthVo> getBuildingDepthByTime(String serviceName, Long timestamp) { + List<BuildingDepthVo> list = readBuildingJson(serviceName); + if (CollectionUtils.isEmpty(list)) { + return null; + } + + return list.parallelStream().filter(b -> timestamp.equals(b.getTimestamp())).collect(Collectors.toList()); + } + + private List<BuildingDepthVo> readBuildingJson(String serviceName) { + String filePath = config.getOutPath() + File.separator + serviceName + File.separator + "building.json"; + String json = getText(filePath); + if (StringHelper.isEmpty(json)) { + return null; + } + + return JSONUtil.toList(json, BuildingDepthVo.class); + } + + private String getText(String filePath) { + File file = new File(filePath); + if (!file.exists()) { + return null; + } + + return FileUtil.readUtf8String(file); + } +} diff --git a/src/main/java/com/se/nsl/utils/CsvToSQLiteUtils.java b/src/main/java/com/se/nsl/utils/CsvToSQLiteUtils.java new file mode 100644 index 0000000..6b4b61e --- /dev/null +++ b/src/main/java/com/se/nsl/utils/CsvToSQLiteUtils.java @@ -0,0 +1,436 @@ +package com.se.nsl.utils; + +import com.alibaba.fastjson.JSONArray; +import com.alibaba.fastjson.JSONObject; +import com.se.nsl.domain.vo.StationRainVo; +import org.apache.commons.csv.CSVFormat; +import org.apache.commons.csv.CSVRecord; + +import java.io.IOException; +import java.io.InputStreamReader; +import java.io.Reader; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.sql.*; +import java.util.ArrayList; +import java.util.List; + +/** + * CSV 鍒� SQLite 瀹炵敤绋嬪簭 + * + * @author xingjinshuang@smartearth.cn + * @date 2024/12/26 + */ +public class CsvToSQLiteUtils { + + private static final String DATABASE_URL = "jdbc:sqlite:D:\\0a_project\\simulation\\simuserver\\rainfall.db"; // SQLite鏁版嵁搴撹矾寰� + private static final String CREATE_TABLE_SQL = + "CREATE TABLE IF NOT EXISTS rainfall (" + + " id INTEGER PRIMARY KEY AUTOINCREMENT," + + " station_name TEXT NOT NULL," + + " rainfall REAL NOT NULL," + + " longitude REAL NOT NULL," + + " latitude REAL NOT NULL," + + " datetime TEXT NOT NULL" + + ");"; + + private static final String INSERT_DATA_SQL = + "INSERT INTO rainfall (station_name, rainfall, longitude, latitude, datetime) " + + "VALUES (?, ?, ?, ?, ?);"; + + public static void main(String[] args) { + // CSV鏂囦欢璺緞 + String csvFilePath = "D:\\0a_project\\model\\shp\\闆ㄩ噺绔欑偣鏁版嵁\\闆ㄩ噺绔欏寘鍚潗鏍�.csv"; + + // 1. 鍒涘缓SQLite鏁版嵁搴撹繛鎺� + try (Connection conn = DriverManager.getConnection(DATABASE_URL)) { + if (conn != null) { + // 鑾峰彇褰撳墠鏃堕棿鎴� + Timestamp timestamp = new Timestamp(System.currentTimeMillis()); + String tableName = "rainfall_" + timestamp.getTime(); + System.out.println("tableName = " + tableName); + // 2. 鍒涘缓琛� + createTableIfNotExists(conn, tableName); + + // 3. 璁剧疆 SQLite 杩炴帴鐨勫瓧绗︾紪鐮佷负 UTF-8 + try (Statement stmt = conn.createStatement()) { + stmt.execute("PRAGMA encoding = 'UTF-8';"); // 璁剧疆SQLite缂栫爜涓篣TF-8 + } + + // 4. 寮�濮嬩簨鍔� + conn.setAutoCommit(false); + + // 5. 璇诲彇CSV鏂囦欢骞舵彃鍏ユ暟鎹� + readCsvAndInsertData(csvFilePath, conn, tableName); + + // 6. 鎻愪氦浜嬪姟 + conn.commit(); + + System.out.println("鏁版嵁鎴愬姛鎻掑叆鍒癝QLite鏁版嵁搴擄紒"); + } + } catch (SQLException e) { + System.err.println("SQLite杩炴帴澶辫触: " + e.getMessage()); + } + } + + /** + * 鍒涘缓琛紝濡傛灉涓嶅瓨鍦ㄧ殑璇� + */ + private static void createTableIfNotExists(Connection conn) throws SQLException { + try (Statement stmt = conn.createStatement()) { + stmt.execute(CREATE_TABLE_SQL); + } + } + + private static void createTableIfNotExists(Connection conn, String tableName) throws SQLException { + + try (Statement stmt = conn.createStatement()) { + String CREATE_TABLE_SQL = + "CREATE TABLE IF NOT EXISTS " + tableName + " (" + + " id INTEGER PRIMARY KEY AUTOINCREMENT," + + " station_name TEXT NOT NULL," + + " rainfall REAL NOT NULL," + + " longitude REAL NOT NULL," + + " latitude REAL NOT NULL," + + " datetime TEXT NOT NULL" + + ");"; + stmt.execute(CREATE_TABLE_SQL); + } + } + + /** + * 璇诲彇CSV鏂囦欢骞跺皢鏁版嵁鎻掑叆鍒癝QLite鏁版嵁搴� + * + * @param csvFilePath CSV鏂囦欢璺緞 + * @param conn SQLite鏁版嵁搴撹繛鎺� + */ + private static void readCsvAndInsertData(String csvFilePath, Connection conn, String tableName) { + // 浣跨敤 Apache Commons CSV 璇诲彇CSV鏂囦欢 + try (Reader reader = new InputStreamReader(Files.newInputStream(Paths.get(csvFilePath)), "GBK")) { +// try (Reader reader = new InputStreamReader(Files.newInputStream(Paths.get(csvFilePath)), StandardCharsets.UTF_8)) { + Iterable<CSVRecord> records = CSVFormat.DEFAULT + .withHeader("闆ㄩ噺绔�", "闄嶉洦閲�", "缁忓害", "绾害", "datatime") + .withSkipHeaderRecord() // 璺宠繃琛ㄥご + .parse(reader); + + String INSERT_DATA_SQL = + "INSERT INTO " + tableName + " (station_name, rainfall, longitude, latitude, datetime) " + + "VALUES (?, ?, ?, ?, ?);"; + + // 鍑嗗SQL鎻掑叆璇彞 + try (PreparedStatement pstmt = conn.prepareStatement(INSERT_DATA_SQL)) { + int batchSize = 1000; // 姣忔壒鎻掑叆鐨勬暟閲� + int count = 0; + + for (CSVRecord record : records) { + // 鑾峰彇姣忎竴鍒楃殑鏁版嵁 + String stationName = record.get("闆ㄩ噺绔�"); + double rainfall = Double.parseDouble(record.get("闄嶉洦閲�")); + double longitude = Double.parseDouble(record.get("缁忓害")); + double latitude = Double.parseDouble(record.get("绾害")); + String datetime = record.get("datatime"); + + // 璁剧疆鎻掑叆鏁版嵁鐨勫弬鏁� + pstmt.setString(1, stationName); + pstmt.setDouble(2, rainfall); + pstmt.setDouble(3, longitude); + pstmt.setDouble(4, latitude); + pstmt.setString(5, datetime); + + // 鎵归噺娣诲姞鍒版壒澶勭悊涓� + pstmt.addBatch(); + count++; + + // 姣�1000鏉℃暟鎹墽琛屼竴娆℃壒澶勭悊 + if (count % batchSize == 0) { + pstmt.executeBatch(); // 鎵ц鎵归噺鎻掑叆 + } + } + // 鎵ц鍓╀綑鐨勬壒閲忔彃鍏� + pstmt.executeBatch(); + } + } catch (IOException | SQLException e) { + System.err.println("璇诲彇CSV鎴栨彃鍏ユ暟鎹椂鍑洪敊: " + e.getMessage()); + } + } + + /** + * 璇诲彇 CSV 淇濆瓨鏈湴 + * + * @param array 鏁版嵁 + * @param tableName 琛ㄥ悕绉� + */ + public static void readCsvSaveLocal(JSONArray array, String tableName) { + // 1. 鍒涘缓SQLite鏁版嵁搴撹繛鎺� + try (Connection conn = DriverManager.getConnection("jdbc:sqlite:rainfall.db")) { + if (conn != null) { + // 2. 鍒涘缓琛紙濡傛灉涓嶅瓨鍦級 + createTableIfNotExists(conn, tableName); + // 3. 璁剧疆SQLite杩炴帴鐨勫瓧绗︾紪鐮佷负UTF-8 + try (Statement stmt = conn.createStatement()) { + // 璁剧疆SQLite缂栫爜涓篣TF-8 + stmt.execute("PRAGMA encoding = 'UTF-8';"); + } + // 4. 寮�濮嬩簨鍔� + conn.setAutoCommit(false); + // 5. 璇诲彇CSV鏂囦欢骞舵彃鍏ユ暟鎹� + readCsvAndInsertDatas(array, conn, tableName); + // 6. 鎻愪氦浜嬪姟 + conn.commit(); + System.out.println("鏁版嵁鎴愬姛鎻掑叆鍒癝QLite鏁版嵁搴擄紒"); + } + } catch (SQLException e) { + System.err.println("SQLite杩炴帴澶辫触: " + e.getMessage()); + } + } + + /** + * 璇诲彇 CSV 淇濆瓨鏈湴 + * + * @param stationRainFile Station Rain 鏂囦欢 + * @param tableName 琛ㄥ悕绉� + */ + public static JSONArray readCsvSaveLocal(String stationRainFile, String tableName) { + // 1. 鍒涘缓SQLite鏁版嵁搴撹繛鎺� + JSONArray array = null; + try (Connection conn = DriverManager.getConnection("jdbc:sqlite:rainfall.db")) { + if (conn != null) { + // 2. 鍒涘缓琛紙濡傛灉涓嶅瓨鍦級 + createTableIfNotExists(conn, tableName); + // 3. 璁剧疆SQLite杩炴帴鐨勫瓧绗︾紪鐮佷负UTF-8 + try (Statement stmt = conn.createStatement()) { + // 璁剧疆SQLite缂栫爜涓篣TF-8 + stmt.execute("PRAGMA encoding = 'UTF-8';"); + } + // 4. 寮�濮嬩簨鍔� + conn.setAutoCommit(false); + // 5. 璇诲彇CSV鏂囦欢骞舵彃鍏ユ暟鎹� + array = readCsvAndInsertDatas(stationRainFile, conn, tableName); + // 6. 鎻愪氦浜嬪姟 + conn.commit(); + System.out.println("鏁版嵁鎴愬姛鎻掑叆鍒癝QLite鏁版嵁搴擄紒"); + } + } catch (SQLException e) { + System.err.println("SQLite杩炴帴澶辫触: " + e.getMessage()); + } + return array; + } + + private static void readCsvAndInsertDatas(JSONArray array, Connection conn, String tableName) { + + String insertDataSql = "INSERT INTO " + tableName + " (station_name, rainfall, longitude, latitude, datetime) VALUES (?, ?, ?, ?, ?);"; + try (PreparedStatement pstmt = conn.prepareStatement(insertDataSql)) { + // 鎵归噺澶у皬 + int batchSize = 1000; + int count = 0; + for (int i = 0; i < array.size(); i++) { + JSONObject object = array.getJSONObject(i); + // 鑾峰彇姣忎竴鍒楃殑鏁版嵁 + String stationName = object.getString("stationName"); + double rainfall = object.getDouble("rainfall"); + double longitude = object.getDouble("longitude"); + double latitude = object.getDouble("latitude"); + String datetime = object.getString("datetime"); + // 璁剧疆鎻掑叆鏁版嵁鐨勫弬鏁� + pstmt.setString(1, stationName); + pstmt.setDouble(2, rainfall); + pstmt.setDouble(3, longitude); + pstmt.setDouble(4, latitude); + pstmt.setString(5, datetime); + // 娣诲姞鍒版壒澶勭悊 + pstmt.addBatch(); + count++; + // 姣廱atchSize鏉℃暟鎹墽琛屼竴娆℃壒閲忔彃鍏� + if (count % batchSize == 0) { + pstmt.executeBatch(); + } + } + // 鎵ц鍓╀綑鐨勬壒閲忔彃鍏� + pstmt.executeBatch(); + } catch (SQLException e) { + System.err.println("鎵归噺鎻掑叆鏁版嵁鏃跺嚭閿�: " + e.getMessage()); + } + } + + private static JSONArray readCsvAndInsertDatas(String csvFilePath, Connection conn, String tableName) { + // 浣跨敤 Apache Commons CSV 璇诲彇CSV鏂囦欢 + JSONArray array = new JSONArray(); + try (Reader reader = new InputStreamReader(Files.newInputStream(Paths.get(csvFilePath)), "GBK")) { + Iterable<CSVRecord> records = CSVFormat.DEFAULT + .withHeader("闆ㄩ噺绔�", "闄嶉洦閲�", "缁忓害", "绾害", "datatime") + .withSkipHeaderRecord() // 璺宠繃琛ㄥご + .parse(reader); + + String insertDataSql = "INSERT INTO " + tableName + " (station_name, rainfall, longitude, latitude, datetime) VALUES (?, ?, ?, ?, ?);"; + try (PreparedStatement pstmt = conn.prepareStatement(insertDataSql)) { + // 鎵归噺澶у皬 + int batchSize = 1000; + int count = 0; + + for (CSVRecord record : records) { + // 鑾峰彇姣忎竴鍒楃殑鏁版嵁 + String stationName = record.get("闆ㄩ噺绔�"); + double rainfall = Double.parseDouble(record.get("闄嶉洦閲�")); + double longitude = Double.parseDouble(record.get("缁忓害")); + double latitude = Double.parseDouble(record.get("绾害")); + String datetime = record.get("datatime"); + + // 璁剧疆鎻掑叆鏁版嵁鐨勫弬鏁� + pstmt.setString(1, stationName); + pstmt.setDouble(2, rainfall); + pstmt.setDouble(3, longitude); + pstmt.setDouble(4, latitude); + pstmt.setString(5, datetime); + JSONObject jsonObject = new JSONObject(); + jsonObject.put("stationName", stationName); + jsonObject.put("rainfall", rainfall); + jsonObject.put("longitude", longitude); + jsonObject.put("latitude", latitude); + jsonObject.put("datetime", datetime); + array.add(jsonObject); + // 娣诲姞鍒版壒澶勭悊 + pstmt.addBatch(); + count++; + + // 姣廱atchSize鏉℃暟鎹墽琛屼竴娆℃壒閲忔彃鍏� + if (count % batchSize == 0) { + pstmt.executeBatch(); + } + } + // 鎵ц鍓╀綑鐨勬壒閲忔彃鍏� + pstmt.executeBatch(); + } catch (SQLException e) { + System.err.println("鎵归噺鎻掑叆鏁版嵁鏃跺嚭閿�: " + e.getMessage()); + } + } catch (IOException e) { + System.err.println("璇诲彇CSV鎴栨彃鍏ユ暟鎹椂鍑洪敊: " + e.getMessage()); + } + return array; + } + + /** + * 鑾峰彇鎬昏 + * + * @param tableName 琛ㄥ悕绉� + * @return {@link Double} + */ + public static Double getTotal(String tableName) { + // 1. 鍒涘缓SQLite鏁版嵁搴撹繛鎺� + try (Connection conn = DriverManager.getConnection("jdbc:sqlite:rainfall.db")) { + if (conn != null) { + // 2. 鎵цSQL鏌ヨ + String queryDataSql = "select sum(rainfall_difference) as total from(SELECT ABS( (SELECT rainfall FROM " + tableName + " WHERE station_name = s.station_name ORDER BY datetime ASC LIMIT 1) - (SELECT rainfall FROM " + tableName + " WHERE station_name = s.station_name ORDER BY datetime DESC LIMIT 1)) AS rainfall_difference FROM " + tableName + " s GROUP BY station_name)"; + // 3. 澶勭悊鏌ヨ缁撴灉 + try (PreparedStatement pstmt = conn.prepareStatement(queryDataSql)) { + ResultSet rs = pstmt.executeQuery(); + while (rs.next()) { + // 鑾峰彇鎬诲拰 + return rs.getDouble("total"); + } + } catch (SQLException e) { + System.err.println("鏌ヨ鏁版嵁鏃跺嚭閿�: " + e.getMessage()); + } + } + } catch (SQLException e) { + System.err.println("SQLite杩炴帴澶辫触: " + e.getMessage()); + } + return null; + } + + /** + * @param tableName 琛ㄥ悕绉� + * @return {@link Integer} + */ + public static Integer getDuration(String tableName) throws SQLException { + // // 1. 杩炴帴鏁版嵁搴� + // try (Connection conn = DriverManager.getConnection("jdbc:sqlite:rainfall.db")) { + // // 2. 鎵ц鏌ヨ璇彞 + // String queryDataSql = "SELECT duration FROM " + tableName; + // try (PreparedStatement pstmt = conn.prepareStatement(queryDataSql)) { + // ResultSet rs = pstmt.executeQuery(); + // while (rs.next()) { + // // 鑾峰彇鎬诲拰 + // return rs.getInt("duration"); + // } + // } catch (SQLException e) { + // throw new RuntimeException(e); + // } + // } catch (SQLException e) { + // System.err.println("SQLite杩炴帴澶辫触: " + e.getMessage()); + // } + + // 鐩墠鍏堟牴鎹枃浠剁殑鍐呭锛屾墜鍔ㄧ粰鍊硷紝鍚庣画浣跨敤瑙f瀽鏂囦欢鍐呭涓殑鏃堕棿 + // TODO: 2024/12/27 鐩墠鍏堟牴鎹枃浠剁殑鍐呭锛屾墜鍔ㄧ粰鍊硷紝鎻愰珮澶勭悊閫熷害銆傚悗缁娇鐢ㄨВ鏋愭枃浠跺唴瀹逛腑鐨勬椂闂淬�� + return 1440; + } + + /** + * 鍒嗙粍鏌ヨ + * + * @param tableName 琛ㄥ悕绉� + * @return {@link Double} + */ + public static List<String> getNameList(String tableName) { + // 1. 鍒涘缓SQLite鏁版嵁搴撹繛鎺� + try (Connection conn = DriverManager.getConnection("jdbc:sqlite:rainfall.db")) { + if (conn != null) { + // 2. 鎵цSQL鏌ヨ + String queryDataSql = "SELECT station_name FROM " + tableName + " GROUP BY station_name"; + // 3. 澶勭悊鏌ヨ缁撴灉 + try (PreparedStatement pstmt = conn.prepareStatement(queryDataSql)) { + ResultSet rs = pstmt.executeQuery(); + List<String> list = new ArrayList<>(); + while (rs.next()) { + // 鑾峰彇鎬诲拰 + list.add(rs.getString("station_name")); + } + return list; + } catch (SQLException e) { + System.err.println("鏌ヨ鏁版嵁鏃跺嚭閿�: " + e.getMessage()); + } + } + } catch (SQLException e) { + System.err.println("SQLite杩炴帴澶辫触: " + e.getMessage()); + } + return null; + } + + /** + * 鍒嗙粍鏌ヨ + * + * @param tableName 琛ㄥ悕绉� + * @return {@link Double} + */ + public static List<StationRainVo> getList(String tableName, String name) { + // 1. 鍒涘缓SQLite鏁版嵁搴撹繛鎺� + try (Connection conn = DriverManager.getConnection("jdbc:sqlite:rainfall.db")) { + if (conn != null) { + // 2. 鎵цSQL鏌ヨ + String queryDataSql = "SELECT * FROM " + tableName + " WHERE station_name='" + name + "' ORDER BY id asc"; + // 3. 澶勭悊鏌ヨ缁撴灉 + try (PreparedStatement pstmt = conn.prepareStatement(queryDataSql)) { + ResultSet rs = pstmt.executeQuery(); + List<StationRainVo> list = new ArrayList<>(); + StationRainVo vo; + while (rs.next()) { + vo = new StationRainVo(); + vo.setId(rs.getLong("id")); + vo.setStationName(rs.getString("station_name")); + vo.setRainfall(rs.getDouble("rainfall")); + vo.setLatitude(rs.getDouble("latitude")); + vo.setLongitude(rs.getDouble("longitude")); + vo.setDatetime(rs.getString("datetime")); + list.add(vo); + } + return list; + } catch (SQLException e) { + System.err.println("鏌ヨ鏁版嵁鏃跺嚭閿�: " + e.getMessage()); + } + } + } catch (SQLException e) { + System.err.println("SQLite杩炴帴澶辫触: " + e.getMessage()); + } + return null; + } + +} diff --git a/src/main/java/com/se/nsl/utils/CustomWebClient.java b/src/main/java/com/se/nsl/utils/CustomWebClient.java new file mode 100644 index 0000000..2114770 --- /dev/null +++ b/src/main/java/com/se/nsl/utils/CustomWebClient.java @@ -0,0 +1,500 @@ +package com.se.nsl.utils; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.http.HttpHeaders; +import org.springframework.http.HttpMethod; +import org.springframework.http.HttpStatus; +import org.springframework.http.MediaType; +import org.springframework.util.LinkedMultiValueMap; +import org.springframework.util.MultiValueMap; +import org.springframework.web.reactive.function.BodyInserters; +import org.springframework.web.reactive.function.client.WebClient; +import reactor.core.publisher.Mono; + +import java.time.Duration; +import java.util.HashMap; +import java.util.Map; +import java.util.concurrent.CompletableFuture; + +/** + * 鑷畾涔� Web 瀹㈡埛绔� + * + * @author xingjinshuang@smartearth.cn + * @date 2024/06/27 + */ +public class CustomWebClient { + + private static final Logger logger = LoggerFactory.getLogger(CustomWebClient.class); + + + private static final WebClient webClient; + + static { + // 鍦ㄩ潤鎬佷唬鐮佸潡涓疄渚嬪寲WebClient.Builder骞舵瀯寤篧ebClient瀵硅薄 + webClient = WebClient.builder().build(); + } + + //====鍩虹璇锋眰===================================================================================================================================== + public static Mono<String> get(String url) { + return webClient.get() + .uri(url) + .retrieve() + .bodyToMono(String.class); + } + + public static Mono<String> post(String url, Object requestBody) { + return webClient.post() + .uri(url) + .bodyValue(requestBody) + .retrieve() + .bodyToMono(String.class); + } + + public static Mono<String> postForm(String url, MultiValueMap<String, String> formData) { + return webClient.post() + .uri(url) + .contentType(MediaType.APPLICATION_FORM_URLENCODED) + .body(BodyInserters.fromFormData(formData)) + .retrieve() + .bodyToMono(String.class); + } + + public static Mono<String> put(String url, Object requestBody) { + return webClient.put() + .uri(url) + .bodyValue(requestBody) + .retrieve() + .bodyToMono(String.class); + } + + // 鍙戦�丳UT璇锋眰 + public static <T, R> Mono<T> put(String url, R requestBody, Class<T> responseType) { + return webClient.put() + .uri(url) + .contentType(MediaType.APPLICATION_JSON) + .body(BodyInserters.fromValue(requestBody)) + .retrieve() + .bodyToMono(responseType); + } + + public static Mono<String> delete(String url) { + return webClient.delete() + .uri(url) + .retrieve() + .onStatus(HttpStatus::is4xxClientError, + clientResponse -> Mono.error(new CustomWebClientException1("Client error: " + clientResponse.statusCode()))) + .onStatus(HttpStatus::is5xxServerError, + clientResponse -> Mono.error(new CustomWebClientException1("Server error: " + clientResponse.statusCode()))) + .bodyToMono(String.class); + } + + //=======鑷畾涔夎繑鍥炵被鍨嬬殑璇锋眰=================================================================================================================================== + public static <T> Mono<T> getAndParse(String url, Class<T> responseType) { + return webClient.get() + .uri(url) + .accept(MediaType.APPLICATION_JSON) + .retrieve() + .bodyToMono(responseType); + } + + public static <T, R> Mono<T> postAndParse(String url, R requestBody, Class<T> responseType) { + return webClient.post() + .uri(url) + .contentType(MediaType.APPLICATION_JSON) + .body(BodyInserters.fromValue(requestBody)) + .retrieve() + .bodyToMono(responseType); + } + + /** + * toBodilessEntity涓嬭浇鎴栬�呭垹闄ょ殑鏃跺�欎笉闇�瑕佸鍝嶅簲浣撳鐞嗙殑 + * toBodilessEntity() 鏄� Java Play Framework 涓� WebClient 绫荤殑涓�涓柟娉曪紝瀹冪敤浜庡皢鍝嶅簲杞崲涓烘病鏈変綋鐨勫搷搴斿疄浣撱�� + * 閫氬父锛屾垜浠湪澶勭悊涓嶉渶瑕佽鍙栧搷搴斾綋鐨勫ぇ鍨嬩笅杞芥椂锛屽彲浠ヤ娇鐢ㄨ繖涓柟娉曟潵閬垮厤涓嶅繀瑕佺殑鍐呭瓨鍗犵敤銆� + */ + public static Mono<String> postAndReceiveLocation(String url, Object requestBody) { + return webClient.post() + .uri(url) + .bodyValue(requestBody) + .retrieve() + .toBodilessEntity() + .flatMap(response -> { + if (response.getHeaders().getLocation() != null) { + return Mono.just(response.getHeaders().getLocation().toString()); + } else { + // 濡傛灉Location涓虹┖锛岃繑鍥瀝esponse + return Mono.just(response.getStatusCode().toString()); + } + }); + } + + public static Mono<String> postFormAndReceiveLocation(String url, MultiValueMap<String, String> formData) { + return webClient.post() + .uri(url) + .contentType(MediaType.APPLICATION_FORM_URLENCODED) + .body(BodyInserters.fromFormData(formData)) + .retrieve() + .toBodilessEntity() + .flatMap(response -> { + if (response.getHeaders().getLocation() != null) { + return Mono.just(response.getHeaders().getLocation().toString()); + } else { + // 濡傛灉Location涓虹┖锛岃繑鍥瀝esponse鐨勭姸鎬佺爜 + return Mono.just(response.getStatusCode().toString()); + } + }); + } + + + //========================================================================================================================================== + // 寮傛璇锋眰鐨凣ET銆丳OST鏂瑰紡 + public static <T> Mono<T> getAsMono(String path, Class<T> responseType) { + return webClient.get() + .uri(path) + .accept(MediaType.APPLICATION_JSON) + .retrieve() + .bodyToMono(responseType); + } + + public static <T> Mono<T> postAsMono(String path, Object requestBody, Class<T> responseType) { + return webClient.post() + .uri(path) + .contentType(MediaType.APPLICATION_JSON) + .body(Mono.just(requestBody), requestBody.getClass()) + .retrieve() + .bodyToMono(responseType); + } + + public static <T> CompletableFuture<T> getAsFuture(String path, Class<T> responseType) { + return webClient + .method(HttpMethod.GET) + .uri(path) + .accept(MediaType.APPLICATION_JSON) + .retrieve() + .bodyToMono(responseType) + .toFuture(); + } + + public static <T> CompletableFuture<T> postAsFuture(String path, Object requestBody, HashMap<String, String> headers, Class<T> responseType) { + return webClient + .method(HttpMethod.POST) + .uri(path) + .contentType(MediaType.APPLICATION_JSON) + .headers(h -> headers.forEach(h::add)) + .bodyValue(requestBody) + .retrieve() + .bodyToMono(responseType) + .toFuture(); + } + + + //========================================================================================================================================== + // 鍏朵粬鏂瑰紡涓�鍙戣捣璇锋眰 + private static final WebClient WEB_CLIENT = WebClient.create(); + + /** + * 鍙戣捣GET璇锋眰锛屾敮鎸丟et parameter + */ + public static CompletableFuture<String> getParam(String url, HttpHeaders headers, MultiValueMap<String, String> queryParams) { + return Mono.from(WEB_CLIENT.get() + .uri(uriBuilder -> uriBuilder + .path(url) + .queryParams(queryParams) + .build()) + .headers(httpHeaders -> httpHeaders.putAll(headers)) + //.headers(h -> headers.forEach(h::add)) + .retrieve() + .onStatus(HttpStatus::isError, clientResponse -> Mono.error(new RuntimeException("HTTP error status: " + clientResponse.statusCode()))) + .bodyToMono(String.class)) + .onErrorResume(error -> Mono.just("Error: " + error.getMessage())) // 濡傛灉鏈夐敊璇紝杩斿洖閿欒淇℃伅 + .toFuture(); + } + + /** + * 鍙戣捣GET璇锋眰锛屾敮鎸丟et parameter + * 鍙互鐢� + */ + public static CompletableFuture<String> getNoParam(String url, HttpHeaders headers) { + return Mono.from(WEB_CLIENT.get() + .uri(url) + .headers(httpHeaders -> httpHeaders.putAll(headers)) + //.headers(h -> headers.forEach(h::add)) + .retrieve() + .onStatus(HttpStatus::isError, clientResponse -> Mono.error(new RuntimeException("HTTP error status: " + clientResponse.statusCode()))) + .bodyToMono(String.class)) + .onErrorResume(error -> Mono.just("Error: " + error.getMessage())) // 濡傛灉鏈夐敊璇紝杩斿洖閿欒淇℃伅 + .toFuture(); + } + + /** + * 鍙戣捣POST璇锋眰锛屾敮鎸丣SON body + */ + public static CompletableFuture<String> postJson(String url, Object body, HashMap<String, String> headers) { + return Mono.from(WEB_CLIENT.post() + .uri(url) + .contentType(MediaType.APPLICATION_JSON) + .headers(h -> headers.forEach(h::add)) + .bodyValue(body) + .retrieve() + .onStatus(HttpStatus::isError, clientResponse -> Mono.error(new RuntimeException("HTTP error status: " + clientResponse.statusCode()))) + .bodyToMono(String.class)) + .onErrorResume(error -> Mono.just("Error: " + error.getMessage())) // 濡傛灉鏈夐敊璇紝杩斿洖閿欒淇℃伅 + .toFuture(); + } + + /** + * 鍙戣捣POST璇锋眰锛屾敮鎸佽〃鍗曟暟鎹� + */ + public static CompletableFuture<String> postForm(String url, MultiValueMap<String, String> formData, Map<String, String> headers) { + return Mono.from(WEB_CLIENT.post() + .uri(url) + .headers(h -> headers.forEach(h::add)) + .contentType(MediaType.APPLICATION_FORM_URLENCODED) + .body(BodyInserters.fromFormData(formData)) + .retrieve() + .bodyToMono(String.class)) + .toFuture(); + } + + //========================================================================================================================================= + // 鍏朵粬璇锋眰鏂瑰紡浜� + public Mono<String> getRequest(String url, long timeoutSeconds, int retryCount) { + return executeRequest(url, HttpMethod.GET, null, HttpHeaders.EMPTY, timeoutSeconds, retryCount); + } + + public Mono<String> postRequest(String url, Object requestBody, HttpHeaders headers, long timeoutSeconds, int retryCount) { + return executeRequest(url, HttpMethod.POST, requestBody, headers, timeoutSeconds, retryCount); + } + + /** + * 鎵ц璇锋眰 + * + * @param url 缃戝潃 + * @param method 鏂规硶 + * @param requestBody 璇锋眰姝f枃 + * @param headers 澶� + * @param timeoutSeconds 瓒呮椂绉掓暟 + * @param retryCount 閲嶈瘯璁℃暟 + * @return {@link Mono}<{@link String}> + */ + private Mono<String> executeRequest(String url, HttpMethod method, Object requestBody, HttpHeaders headers, long timeoutSeconds, int retryCount) { + return executeRequestInternal(url, method, requestBody, headers, timeoutSeconds, retryCount) + .onErrorResume(throwable -> { + logger.error("Error during request: {}", throwable.getMessage()); + return Mono.error(throwable); + }); + } + + /** + * 鍐呴儴鎵ц璇锋眰 + * + * @param url 缃戝潃 + * @param method 鏂规硶 + * @param requestBody 璇锋眰姝f枃 + * @param headers 澶� + * @param timeoutSeconds 瓒呮椂绉掓暟 + * @param retryCount 閲嶈瘯璁℃暟 + * @return {@link Mono}<{@link String}> + */ + private Mono<String> executeRequestInternal(String url, HttpMethod method, Object requestBody, HttpHeaders headers, long timeoutSeconds, int retryCount) { + return webClient.method(method) + .uri(url) + .headers(httpHeaders -> httpHeaders.addAll(headers)) + .bodyValue(requestBody) + .retrieve() + .bodyToMono(String.class) + .timeout(Duration.ofSeconds(timeoutSeconds)) + .doOnError(error -> logger.error("Error during request: {}", error)) + .retry(retryCount); + } + + //========================================================================================================================================= + // 鍏朵粬鍙傛暟 + + /** + * 浣跨敤瓒呮椂鑾峰彇 + * + * @param url 绔偣 + * @param timeoutSeconds 瓒呮椂绉掓暟 + * @return {@link Mono}<{@link String}> + */ + public Mono<String> getWithTimeout(String url, long timeoutSeconds) { + return webClient.get() + .uri(url) + .accept(MediaType.APPLICATION_JSON) + .retrieve() + .bodyToMono(String.class) + .timeout(Duration.ofSeconds(timeoutSeconds)) + .onErrorMap(error -> new CustomWebClientException("Request timeout", error)); + } + + /** + * 甯︽爣棰樺笘瀛� + * + * @param url 绔偣 + * @param requestBody 璇锋眰姝f枃 + * @param headers 澶� + * @return {@link Mono}<{@link String}> + */ + public Mono<String> postWithHeaders(String url, Object requestBody, HttpHeaders headers) { + return webClient.post() + .uri(url) + .bodyValue(requestBody) + .headers(httpHeaders -> httpHeaders.addAll(headers)) + .retrieve() + .bodyToMono(String.class); + } + + /** + * 鑾峰彇骞跺彂 + * + * @param endpoint1 绔偣 1 + * @param endpoint2 绔偣 2 + * @return {@link Mono}<{@link String}> + */ + public Mono<String> getConcurrently(String endpoint1, String endpoint2) { + Mono<String> result1 = webClient.get() + .uri(endpoint1) + .retrieve() + .bodyToMono(String.class); + Mono<String> result2 = webClient.get() + .uri(endpoint2) + .retrieve() + .bodyToMono(String.class); + return result1.zipWith(result2).map(tuple -> tuple.getT1() + tuple.getT2()); + } + //========================================================================================================================================== + + /** + * 鑷畾涔� Web 瀹㈡埛绔紓甯� + * Custom exception class for WebClient error handling + * + * @author xingjinshuang@smartearth.cn + * @date 2024/06/27 + */ + public static class CustomWebClientException1 extends RuntimeException { + public CustomWebClientException1(String message) { + super(message); + } + } + + /** + * 鑷畾涔� Web 瀹㈡埛绔紓甯� + * + * @author xingjinshuang + * @date 2024/06/27 + */ + public static class CustomWebClientException extends RuntimeException { + public CustomWebClientException(String message, Throwable cause) { + super(message, cause); + } + } + + + /** + * 涓昏 + * + * @param args 鍙傛暟 + */ + public static void main(String[] args) { + HashMap<String, String> headers = new HashMap<>(); + headers.put("Content-Type", "application/json"); + + Mono<String> res = getAndParse("https://api.example.com/data", String.class); + res.subscribe(dataResponse -> { + // 澶勭悊鏁版嵁鍝嶅簲 + System.out.println("Received data response: " + dataResponse); + }); + + Mono<String> res1 = get("https://api.example.com/textdata"); + res1.subscribe(textData -> { + // 澶勭悊鏂囨湰鏁版嵁鍝嶅簲 + System.out.println("Received text data: " + textData); + }); + + String requestBody00 = new String("test"); + Mono<String> res2 = postAndReceiveLocation("https://api.example.com/resource", requestBody00); + res2.subscribe(location -> { + // 澶勭悊杩斿洖鐨勮祫婧愪綅缃� + System.out.println("Resource location: " + location); + }); + + MultiValueMap<String, String> formData = new LinkedMultiValueMap<>(); + formData.add("key1", "value1"); + formData.add("key2", "value2"); + Mono<String> res3 = postFormAndReceiveLocation("https://api.example.com/formsubmit", formData); + res3.subscribe(location -> { + // 澶勭悊杩斿洖鐨勮〃鍗曟彁浜や綅缃� + System.out.println("Form submission location: " + location); + }); + + + // 寮傛GET璇锋眰,閫氳繃subscribe鏂规硶鏉ュ鐞嗗搷搴� + Mono<String> asyncResponse0 = getAsMono("/api/resource", String.class); + // asyncResponse0.subscribe(System.out::println); + asyncResponse0.flatMap(response -> { + System.out.println("GET璇锋眰缁撴灉锛�" + response); + return Mono.just(response); + }).subscribe(); + + // 寮傛POST璇锋眰,閫氳繃subscribe鏂规硶鏉ュ鐞嗗搷搴� + String requestBody0 = new String("data"); + Mono<String> asyncPostedResponse0 = postAsMono("/api/resource", requestBody0, String.class); + // asyncPostedResponse0.subscribe(System.out::println); + asyncPostedResponse0.flatMap(response -> { + System.out.println("POST璇锋眰缁撴灉锛�" + response); + return Mono.just(response); + }).subscribe(); + + // 寮傛GET璇锋眰锛屼笉浼氱洿鎺ヨ繑鍥炶繑鍥炰綋 + CompletableFuture<String> asyncResponse = getAsFuture("/api/resource", String.class); + asyncResponse.thenAccept(response -> { + System.out.println("GET璇锋眰缁撴灉锛�" + response); + }); + + // 寮傛POST璇锋眰锛屼笉浼氱洿鎺ヨ繑鍥炶繑鍥炰綋 + String requestBody = new String("data"); + CompletableFuture<String> asyncPostedResponse = postAsFuture("/api/resource", requestBody, headers, String.class); + asyncPostedResponse.thenAccept(response -> { + System.out.println("POST璇锋眰缁撴灉锛�" + response); + }); + + // henAccept鏂规硶鏄竴涓秷璐瑰瀷鐨勬柟娉曪紝瀹冧笉浼氳繑鍥炰换浣曞�笺�� + // 瑕佽幏鍙栧紓姝ヨ姹傜殑杩斿洖鍊硷紝鍙互浣跨敤thenApply鏂规硶锛岃繖涓柟娉曚細杩斿洖涓�涓柊鐨凜ompletableFuture瀵硅薄锛岄噷闈㈠寘鍚粡杩囧鐞嗗悗鐨勮繑鍥炲�� + // 寮傛GET璇锋眰锛岃繑鍥炶繑鍥炰綋 + CompletableFuture<String> asyncResponseRes = getAsFuture("/api/resource", String.class); + asyncResponse.thenApply(response -> { + System.out.println("GET璇锋眰缁撴灉锛�" + response); + return response; + }); + + // henAccept鏂规硶鏄竴涓秷璐瑰瀷鐨勬柟娉曪紝瀹冧笉浼氳繑鍥炰换浣曞�笺�� + // 瑕佽幏鍙栧紓姝ヨ姹傜殑杩斿洖鍊硷紝鍙互浣跨敤thenApply鏂规硶锛岃繖涓柟娉曚細杩斿洖涓�涓柊鐨凜ompletableFuture瀵硅薄锛岄噷闈㈠寘鍚粡杩囧鐞嗗悗鐨勮繑鍥炲�� + // 寮傛POST璇锋眰锛岃繑鍥炶繑鍥炰綋 + String requestBody1 = new String("data"); + CompletableFuture<String> asyncPostedResponseRes = postAsFuture("/api/resource", requestBody1, headers, String.class); + asyncPostedResponse.thenApply(response -> { + System.out.println("POST璇锋眰缁撴灉锛�" + response); + return response; + }); + + // 鍚屾鏂瑰紡涓嬭幏鍙栧搷搴斾綋锛屽彲浠ヤ娇鐢╦oin鏂规硶鏉ョ瓑寰呭紓姝ユ搷浣滅殑瀹屾垚骞惰幏鍙栨渶缁堢殑缁撴灉銆傝繖鏍峰彲浠ョ‘淇濆湪鑾峰彇缁撴灉涔嬪墠闃诲褰撳墠绾跨▼锛岀洿鍒板紓姝ユ搷浣滃畬鎴愩�� + // 浣跨敤join鏂规硶鏉ュ悓姝ヨ幏鍙栧搷搴斾綋锛� + String requestBody2 = new String("data"); + CompletableFuture<String> asyncPostedResponse2 = CustomWebClient.postAsFuture("/api/resource", requestBody2, headers, String.class); + asyncPostedResponse2.thenAccept(response -> { + System.out.println("POST璇锋眰缁撴灉锛�" + response); + }); + String syncResponse = asyncPostedResponse2.join(); + System.out.println("鍚屾鑾峰彇鐨勫搷搴斾綋锛�" + syncResponse); + + // 闃叉涓荤嚎绋嬫彁鍓嶇粨鏉� + try { + // 绛夊緟寮傛璇锋眰瀹屾垚 + Thread.sleep(5000); + } catch (InterruptedException e) { + e.printStackTrace(); + } + } + +} diff --git a/src/main/java/com/se/nsl/utils/EntityLibraryUtils.java b/src/main/java/com/se/nsl/utils/EntityLibraryUtils.java new file mode 100644 index 0000000..971b111 --- /dev/null +++ b/src/main/java/com/se/nsl/utils/EntityLibraryUtils.java @@ -0,0 +1,313 @@ +package com.se.nsl.utils; + +import com.alibaba.fastjson.JSONArray; +import com.alibaba.fastjson.JSONObject; +import com.google.common.io.Resources; +import com.se.nsl.domain.dto.GridDto; +import org.apache.commons.codec.binary.Base64; +import org.springframework.http.*; +import org.springframework.http.converter.StringHttpMessageConverter; +import org.springframework.web.client.RestTemplate; + +import javax.crypto.Cipher; +import java.net.URL; +import java.nio.charset.StandardCharsets; +import java.security.KeyFactory; +import java.security.interfaces.RSAPrivateKey; +import java.security.interfaces.RSAPublicKey; +import java.security.spec.PKCS8EncodedKeySpec; +import java.security.spec.X509EncodedKeySpec; +import java.util.List; + +/** + * 瀹炰綋搴撹姹� + */ +public class EntityLibraryUtils { + /** + * 鑾峰彇鍔犲瘑鍏挜 + * + * @return 鍏遍挜 + */ + public static String getPublicKey() { + JSONObject jsonObject = new JSONObject(); + String json = jsonObject.toJSONString(); + // 鍙戦�丣SON鏍煎紡鐨凱OST璇锋眰 + HttpHeaders headers = new HttpHeaders(); + headers.setContentType(MediaType.APPLICATION_JSON); + HttpEntity<String> request = new HttpEntity<>(json, headers); + RestTemplate restTemplate = new RestTemplate(); + ResponseEntity<String> responseEntity = restTemplate.postForEntity("http://106.120.22.26:8024/geo-service/setting/publickey", request, String.class); + if (responseEntity.getStatusCode().is2xxSuccessful()) { + String body = responseEntity.getBody(); + return JSONObject.parseObject(body).getString("data"); + } + return null; + } + + public static String getLoginPublicKey() { + JSONObject jsonObject = new JSONObject(); + String json = jsonObject.toJSONString(); + // 鍙戦�丣SON鏍煎紡鐨凱OST璇锋眰 + HttpHeaders headers = new HttpHeaders(); + headers.setContentType(MediaType.APPLICATION_JSON); + HttpEntity<String> request = new HttpEntity<>(json, headers); + RestTemplate restTemplate = new RestTemplate(); + ResponseEntity<String> responseEntity = restTemplate.postForEntity("http://106.120.22.26:8024/account-service/security/publickey", request, String.class); + if (responseEntity.getStatusCode().is2xxSuccessful()) { + String body = responseEntity.getBody(); + return JSONObject.parseObject(body).getString("data"); + } + return null; + } + + /** + * 鐧诲綍瀹炰綋搴� + * + * @return 鍙傛暟鍐呰鎯� + */ + public static String login() throws Exception { + String publicKey = getLoginPublicKey(); + JSONObject jsonObject = new JSONObject(); + jsonObject.put("userid", "admin"); + jsonObject.put("password", encrypt("admin", publicKey)); + RestTemplate restTemplate = new RestTemplate(); + // 鍙戦�丣SON鏍煎紡鐨凱OST璇锋眰 + StringHttpMessageConverter converter = new StringHttpMessageConverter(StandardCharsets.UTF_8); + converter.setSupportedMediaTypes(MediaType.parseMediaTypes("text/plain;charset=UTF-8")); + restTemplate.getMessageConverters().add(0, converter); + HttpHeaders headers = new HttpHeaders(); + headers.setContentType(MediaType.APPLICATION_JSON); + headers.add("Access-Control-Allow-Origin", "*"); + String json = jsonObject.toJSONString(); + HttpEntity<String> request = new HttpEntity<>(json, headers); + ResponseEntity<String> responseEntity = restTemplate.exchange("http://106.120.22.26:8024/account-service/security/login", HttpMethod.POST, request, String.class); + if (responseEntity.getStatusCode().is2xxSuccessful()) { + String body = responseEntity.getBody(); + return JSONObject.parseObject(body).getJSONObject("data").getString("token"); + } + return null; + } + + public static String encrypt(String str, String publicKey) throws Exception { + //Base64缂栫爜鐨勫叕閽� + byte[] decoded = Base64.decodeBase64(publicKey); + RSAPublicKey pubKey = (RSAPublicKey) KeyFactory.getInstance("RSA").generatePublic(new X509EncodedKeySpec(decoded)); + // RSA鍔犲瘑:RSA/ECB/NoPadding + Cipher cipher = Cipher.getInstance("RSA"); + cipher.init(Cipher.ENCRYPT_MODE, pubKey); + String outstr = Base64.encodeBase64String(cipher.doFinal(str.getBytes(StandardCharsets.UTF_8))); + return outstr; + } + + public static String decrypt(String str, String privateKey) throws Exception { + if (str == null || "".equals(str)) { + return str; + } + //64浣嶈В鐮佸姞瀵嗗悗鐨勫瓧绗︿覆 + byte[] inputByte = Base64.decodeBase64(str.getBytes(StandardCharsets.UTF_8)); + //Base64缂栫爜鐨勭閽� + byte[] decoded = Base64.decodeBase64(privateKey); + RSAPrivateKey priKey = (RSAPrivateKey) KeyFactory.getInstance("RSA").generatePrivate(new PKCS8EncodedKeySpec(decoded)); + //RSA瑙e瘑:RSA/ECB/NoPadding + Cipher cipher = Cipher.getInstance("RSA"); + cipher.init(Cipher.DECRYPT_MODE, priKey); + String outstn = new String(cipher.doFinal(inputByte), StandardCharsets.UTF_8); + return outstn; + } + + /** + * 鍒涘缓缁勮 + * + * @param name 缁勮鍚嶇О + * @return 鍒涘缓缁撴灉 + */ + public static JSONObject createAssemble(String name, String token) { + JSONObject jsonObject = new JSONObject(); + jsonObject.put("token", token); + jsonObject.put("description", null); + jsonObject.put("name", name); + jsonObject.put("dbid", "85257774fdb64e5f99f6778696cad02a"); + RestTemplate restTemplate = new RestTemplate(); + // 鍙戦�丣SON鏍煎紡鐨凱OST璇锋眰 + HttpHeaders headers = new HttpHeaders(); + headers.setContentType(MediaType.APPLICATION_JSON); + String json = jsonObject.toJSONString(); + HttpEntity<String> request = new HttpEntity<>(json, headers); + ResponseEntity<String> responseEntity = restTemplate.postForEntity("http://106.120.22.26:8024/geo-service/package/self/add", request, String.class); + if (responseEntity.getStatusCode().is2xxSuccessful()) { + String body = responseEntity.getBody(); + JSONObject result = JSONObject.parseObject(body); + if (result.getInteger("code") == 200) { + return result.getJSONObject("data"); + } + } + return null; + } + + /** + * 鎻愪氦缁勮 + * @param token token + * @param packageid 缁勮id + * @return 鎻愪氦缁撴灉 + */ + public static JSONObject submitAssemble(String token, String packageid) { + JSONObject jsonObject = new JSONObject(); + jsonObject.put("dbid", "85257774fdb64e5f99f6778696cad02a"); + jsonObject.put("packageid", packageid); + jsonObject.put("token", token); + RestTemplate restTemplate = new RestTemplate(); + // 鍙戦�丣SON鏍煎紡鐨凱OST璇锋眰 + HttpHeaders headers = new HttpHeaders(); + headers.setContentType(MediaType.APPLICATION_JSON); + String json = jsonObject.toJSONString(); + HttpEntity<String> request = new HttpEntity<>(json, headers); + ResponseEntity<String> responseEntity = restTemplate.postForEntity("http://106.120.22.26:8024/geo-service/package/status/complete", request, String.class); + if (responseEntity.getStatusCode().is2xxSuccessful()) { + String body = responseEntity.getBody(); + JSONObject result = JSONObject.parseObject(body); + if (result.getInteger("code") == 200) { + return result.getJSONObject("data"); + } + } + return null; + } + + /** + * 鍔犲叆缁勮 + * + * @param layerid 鍥惧眰id + * @param token token + * @param packageid 缁勮id + * @param bbox 鑼冨洿 + * @return 鏄惁鍔犲叆鎴愬姛 + */ + public static Boolean addAssemble(String layerid, String token, String packageid, String bbox) { + JSONObject jsonObject = new JSONObject(); + jsonObject.put("dbid", "85257774fdb64e5f99f6778696cad02a"); + jsonObject.put("layerid", layerid); + jsonObject.put("packageid", packageid); + jsonObject.put("token", token); + jsonObject.put("bbox", bbox); + RestTemplate restTemplate = new RestTemplate(); + // 鍙戦�丣SON鏍煎紡鐨凱OST璇锋眰 + HttpHeaders headers = new HttpHeaders(); + headers.setContentType(MediaType.APPLICATION_JSON); + String json = jsonObject.toJSONString(); + HttpEntity<String> request = new HttpEntity<>(json, headers); + ResponseEntity<String> responseEntity = restTemplate.postForEntity("http://106.120.22.26:8024/geo-service/package/data/add", request, String.class); + if (responseEntity.getStatusCode().is2xxSuccessful()) { + String body = responseEntity.getBody(); + JSONObject result = JSONObject.parseObject(body); + if (result.getInteger("code") == 200) { + return result.getBoolean("data"); + } + } + return null; + } + + /** + * 绠$嚎鍒楄〃 + * + * @param dtos 鍥涗釜鍧愭爣鐐� + * @return 妗嗛�夊唴閮ㄧ绾� + */ + public static JSONArray getPointInfo(List<GridDto> dtos, String token) { + JSONArray paramArray = new JSONArray(); + for (GridDto dto : dtos + ) { + paramArray.add(ProjectionToGeographicUtil.get4548Point(dto.getLon(), dto.getLat())); + } + paramArray.add(ProjectionToGeographicUtil.get4548Point(dtos.get(0).getLon(), dtos.get(0).getLat())); + JSONObject jsonObject = getModule("layerQueryPointParams.json"); + jsonObject.getJSONObject("geometry").getJSONArray("coordinates").add(paramArray); + jsonObject.put("token", token); + RestTemplate restTemplate = new RestTemplate(); + // 鍙戦�丣SON鏍煎紡鐨凱OST璇锋眰 + HttpHeaders headers = new HttpHeaders(); + headers.setContentType(MediaType.APPLICATION_JSON); + String json = jsonObject.toJSONString(); + HttpEntity<String> request = new HttpEntity<>(json, headers); + ResponseEntity<String> responseEntity = restTemplate.postForEntity("http://106.120.22.26:8024/geo-service/entitydbdata/layer/query", request, String.class); + if (responseEntity.getStatusCode().is2xxSuccessful()) { + String body = responseEntity.getBody(); + return JSONObject.parseObject(body).getJSONObject("data").getJSONArray("items"); + } + return null; + } + + /** + * 绠$嚎鍒楄〃 + * + * @param dtos 鍥涗釜鍧愭爣鐐� + * @return 妗嗛�夊唴閮ㄧ绾� + */ + public static JSONArray getLineInfo(List<GridDto> dtos, String token) { + JSONArray paramArray = new JSONArray(); + for (GridDto dto : dtos + ) { + paramArray.add(ProjectionToGeographicUtil.get4548Point(dto.getLon(), dto.getLat())); + } + paramArray.add(ProjectionToGeographicUtil.get4548Point(dtos.get(0).getLon(), dtos.get(0).getLat())); + JSONObject jsonObject = getModule("layerQueryParams.json"); + jsonObject.getJSONObject("geometry").getJSONArray("coordinates").add(paramArray); + jsonObject.put("token", token); + RestTemplate restTemplate = new RestTemplate(); + // 鍙戦�丣SON鏍煎紡鐨凱OST璇锋眰 + HttpHeaders headers = new HttpHeaders(); + headers.setContentType(MediaType.APPLICATION_JSON); + String json = jsonObject.toJSONString(); + HttpEntity<String> request = new HttpEntity<>(json, headers); + ResponseEntity<String> responseEntity = restTemplate.postForEntity("http://106.120.22.26:8024/geo-service/entitydbdata/layer/query", request, String.class); + if (responseEntity.getStatusCode().is2xxSuccessful()) { + String body = responseEntity.getBody(); + return JSONObject.parseObject(body).getJSONObject("data").getJSONArray("items"); + } + return null; + } + + /** + * 绠$嚎鍒楄〃璇︽儏 + * + * @param param 鏌ヨ鍙傛暟 + * @return 鍙傛暟鍐呰鎯� + */ + public static JSONArray getLineDetail(String param, String token) { + JSONObject jsonObject = getModule("layerQueryDetailParams.json"); + jsonObject.put("where", param); + jsonObject.put("token", token); + RestTemplate restTemplate = new RestTemplate(); + // 鍙戦�丣SON鏍煎紡鐨凱OST璇锋眰 + StringHttpMessageConverter converter = new StringHttpMessageConverter(StandardCharsets.UTF_8); + converter.setSupportedMediaTypes(MediaType.parseMediaTypes("text/plain;charset=UTF-8")); + restTemplate.getMessageConverters().add(0, converter); + HttpHeaders headers = new HttpHeaders(); + headers.setContentType(MediaType.APPLICATION_JSON); + String json = jsonObject.toJSONString(); + HttpEntity<String> request = new HttpEntity<>(json, headers); + ResponseEntity<String> responseEntity = restTemplate.exchange("http://106.120.22.26:8024/geo-service/entitydbdata/layer/query", HttpMethod.POST, request, String.class); + if (responseEntity.getStatusCode().is2xxSuccessful()) { + String body = responseEntity.getBody(); + return JSONObject.parseObject(body).getJSONArray("features"); + } + return null; + } + + /** + * 鑾峰彇璇锋眰json + * + * @param moduleName json鍚� + * @return json鍐呭 + */ + private static JSONObject getModule(String moduleName) { + JSONObject jsonObject = new JSONObject(); + try { + URL resource = Resources.getResource(moduleName); + String fileContent = Resources.toString(resource, StandardCharsets.UTF_8); + jsonObject = JSONObject.parseObject(fileContent); + System.out.println(fileContent); + } catch (Exception e) { + e.printStackTrace(); + } + return jsonObject; + } +} diff --git a/src/main/java/com/se/nsl/utils/FileUtil.java b/src/main/java/com/se/nsl/utils/FileUtil.java new file mode 100644 index 0000000..579e3a2 --- /dev/null +++ b/src/main/java/com/se/nsl/utils/FileUtil.java @@ -0,0 +1,37 @@ +package com.se.nsl.utils; + +import java.io.File; +import java.util.ArrayList; +import java.util.List; + +/** + * + */ +public class FileUtil { + public static String getShpPath(String dirPath) { + File root = new File(dirPath); + List<String> filePath=new ArrayList<>(); + traverse(root,filePath); + for (String path:filePath + ) { + if (path.endsWith(".shp")){ + return path; + } + } + return null; + } + + public static void traverse(File dir,List<String> filePath) { + File[] files = dir.listFiles(); + if (files != null) { + for (File file : files) { + if (file.isDirectory()) { + traverse(file, filePath); + } else { + filePath.add(file.getAbsolutePath()); + } + } + } + } + +} diff --git a/src/main/java/com/se/nsl/utils/H5.java b/src/main/java/com/se/nsl/utils/H5.java new file mode 100644 index 0000000..eeed80d --- /dev/null +++ b/src/main/java/com/se/nsl/utils/H5.java @@ -0,0 +1,2397 @@ +package com.se.nsl.utils; + +import hdf.hdf5lib.HDF5Constants; +import hdf.hdf5lib.HDF5GroupInfo; +import hdf.hdf5lib.HDFArray; +import hdf.hdf5lib.callbacks.H5A_iterate_cb; +import hdf.hdf5lib.callbacks.H5A_iterate_t; +import hdf.hdf5lib.callbacks.H5D_iterate_cb; +import hdf.hdf5lib.callbacks.H5D_iterate_t; +import hdf.hdf5lib.callbacks.H5E_walk_cb; +import hdf.hdf5lib.callbacks.H5E_walk_t; +import hdf.hdf5lib.callbacks.H5L_iterate_cb; +import hdf.hdf5lib.callbacks.H5L_iterate_t; +import hdf.hdf5lib.callbacks.H5O_iterate_cb; +import hdf.hdf5lib.callbacks.H5O_iterate_t; +import hdf.hdf5lib.callbacks.H5P_iterate_cb; +import hdf.hdf5lib.callbacks.H5P_iterate_t; +import hdf.hdf5lib.exceptions.HDF5Exception; +import hdf.hdf5lib.exceptions.HDF5JavaException; +import hdf.hdf5lib.exceptions.HDF5LibraryException; +import hdf.hdf5lib.structs.H5AC_cache_config_t; +import hdf.hdf5lib.structs.H5A_info_t; +import hdf.hdf5lib.structs.H5F_info_t; +import hdf.hdf5lib.structs.H5G_info_t; +import hdf.hdf5lib.structs.H5L_info_t; +import hdf.hdf5lib.structs.H5O_info_t; +import java.io.File; +import java.io.Serializable; +import java.nio.ByteBuffer; +import java.util.Collection; +import java.util.LinkedHashSet; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class H5 implements Serializable { + private static final long serialVersionUID = 6129888282117053288L; + private static final Logger log = LoggerFactory.getLogger(H5.class); + public static final int[] LIB_VERSION = new int[]{1, 8, 19}; + public static final String H5PATH_PROPERTY_KEY = "hdf.hdf5lib.H5.hdf5lib"; + public static final String H5_LIBRARY_NAME_PROPERTY_KEY = "hdf.hdf5lib.H5.loadLibraryName"; + private static String s_libraryName; + private static boolean isLibraryLoaded = false; + private static final boolean IS_CRITICAL_PINNING = true; + private static final LinkedHashSet<Integer> OPEN_IDS = new LinkedHashSet(); + + public H5() { + } + + public static void loadH5Lib() { + if (!isLibraryLoaded) { +// System.setProperty("hdf.hdf5lib.H5.loadLibraryName", "hdf5"); + s_libraryName = System.getProperty("hdf.hdf5lib.H5.loadLibraryName", (String)null); + String var0 = null; + if (s_libraryName != null && s_libraryName.length() > 0) { + try { + var0 = System.mapLibraryName(s_libraryName); + System.loadLibrary(s_libraryName); + isLibraryLoaded = true; + } catch (Throwable var30) { + var30.printStackTrace(); + isLibraryLoaded = false; + } finally { + log.info("HDF5 library: " + s_libraryName); + log.debug(" resolved to: " + var0 + "; "); + log.info((isLibraryLoaded ? "" : " NOT") + " successfully loaded from system property"); + } + } + + if (!isLibraryLoaded) { + String var1 = System.getProperty("hdf.hdf5lib.H5.hdf5lib", (String)null); + if (var1 != null && var1.length() > 0) { + File var2 = new File(var1); + if (!var2.exists() || !var2.canRead() || !var2.isFile()) { + isLibraryLoaded = false; + throw new UnsatisfiedLinkError("Invalid HDF5 library, " + var1); + } + + try { + System.load(var1); + isLibraryLoaded = true; + } catch (Throwable var29) { + var29.printStackTrace(); + isLibraryLoaded = false; + } finally { + log.info("HDF5 library: "); + log.debug(var1); + log.info((isLibraryLoaded ? "" : " NOT") + " successfully loaded."); + } + } + } + + if (!isLibraryLoaded) { + try { + s_libraryName = "jhdf5"; + var0 = System.mapLibraryName(s_libraryName); + System.loadLibrary("jhdf5"); + isLibraryLoaded = true; + } catch (Throwable var28) { + var28.printStackTrace(); + isLibraryLoaded = false; + } finally { + log.info("HDF5 library: " + s_libraryName); + log.debug(" resolved to: " + var0 + "; "); + log.info((isLibraryLoaded ? "" : " NOT") + " successfully loaded from java.library.path"); + } + } + + try { + H5dont_atexit(); + } catch (HDF5LibraryException var27) { + System.exit(1); + } + + if (!log.isDebugEnabled()) { + H5error_off(); + } + + Integer var34 = Integer.getInteger("hdf.hdf5lib.H5.hdf5maj", (Integer)null); + Integer var35 = Integer.getInteger("hdf.hdf5lib.H5.hdf5min", (Integer)null); + Integer var3 = Integer.getInteger("hdf.hdf5lib.H5.hdf5rel", (Integer)null); + if (var34 != null && var35 != null && var3 != null) { + H5check_version(var34, var35, var3); + } + + } + } + + public static final int getOpenIDCount() { + return OPEN_IDS.size(); + } + + /** @deprecated */ + @Deprecated + public static final int getOpenID(int var0) { + byte var1 = -1; + return var1; + } + + public static final Collection<Integer> getOpenIDs() { + return OPEN_IDS; + } + + public static synchronized native int H5check_version(int var0, int var1, int var2); + + public static synchronized native int H5close() throws HDF5LibraryException; + + public static synchronized native int H5open() throws HDF5LibraryException; + + private static synchronized native int H5dont_atexit() throws HDF5LibraryException; + + public static synchronized native int H5error_off(); + + public static synchronized native int H5garbage_collect() throws HDF5LibraryException; + + public static synchronized native int H5get_libversion(int[] var0) throws HDF5LibraryException; + + public static synchronized native int H5set_free_list_limits(int var0, int var1, int var2, int var3, int var4, int var5) throws HDF5LibraryException; + + public static synchronized native void H5export_dataset(String var0, String var1, String var2, int var3) throws HDF5LibraryException; + + private static synchronized native boolean H5is_library_threadsafe(); + + public static int H5Aclose(int var0) throws HDF5LibraryException { + if (var0 < 0) { + return 0; + } else { + log.trace("OPEN_IDS: H5Aclose remove {}", var0); + OPEN_IDS.remove(var0); + log.trace("OPEN_IDS: {}", OPEN_IDS.size()); + return _H5Aclose(var0); + } + } + + private static synchronized native int _H5Aclose(int var0) throws HDF5LibraryException; + + public static synchronized native int H5Acopy(int var0, int var1) throws HDF5LibraryException; + + /** @deprecated */ + @Deprecated + public static int H5Acreate(int var0, String var1, int var2, int var3, int var4) throws HDF5LibraryException, NullPointerException { + int var5 = _H5Acreate(var0, var1, var2, var3, var4); + if (var5 > 0) { + log.trace("OPEN_IDS: H5A add {}", var5); + OPEN_IDS.add(var5); + log.trace("OPEN_IDS: {}", OPEN_IDS.size()); + } + + return var5; + } + + private static synchronized native int _H5Acreate(int var0, String var1, int var2, int var3, int var4) throws HDF5LibraryException, NullPointerException; + + public static int H5Acreate(int var0, String var1, int var2, int var3, int var4, int var5) throws HDF5LibraryException, NullPointerException { + int var6 = _H5Acreate2(var0, var1, var2, var3, var4, var5); + if (var6 > 0) { + log.trace("OPEN_IDS: H5A create add {}", var6); + OPEN_IDS.add(var6); + log.trace("OPEN_IDS: {}", OPEN_IDS.size()); + } + + return var6; + } + + private static synchronized native int _H5Acreate2(int var0, String var1, int var2, int var3, int var4, int var5) throws HDF5LibraryException, NullPointerException; + + public static int H5Acreate_by_name(int var0, String var1, String var2, int var3, int var4, int var5, int var6, int var7) throws HDF5LibraryException, NullPointerException { + int var8 = _H5Acreate_by_name(var0, var1, var2, var3, var4, var5, var6, var7); + if (var8 > 0) { + log.trace("OPEN_IDS: H5Acreate_by_name add {}", var8); + OPEN_IDS.add(var8); + log.trace("OPEN_IDS: {}", OPEN_IDS.size()); + } + + return var8; + } + + private static synchronized native int _H5Acreate_by_name(int var0, String var1, String var2, int var3, int var4, int var5, int var6, int var7) throws HDF5LibraryException, NullPointerException; + + public static synchronized native int H5Adelete(int var0, String var1) throws HDF5LibraryException, NullPointerException; + + public static synchronized native void H5Adelete_by_idx(int var0, String var1, int var2, int var3, long var4, int var6) throws HDF5LibraryException, NullPointerException; + + public static synchronized native int H5Adelete_by_name(int var0, String var1, String var2, int var3) throws HDF5LibraryException, NullPointerException; + + public static synchronized native boolean H5Aexists(int var0, String var1) throws HDF5LibraryException, NullPointerException; + + public static synchronized native boolean H5Aexists_by_name(int var0, String var1, String var2, int var3) throws HDF5LibraryException, NullPointerException; + + public static synchronized native H5A_info_t H5Aget_info(int var0) throws HDF5LibraryException; + + public static synchronized native H5A_info_t H5Aget_info_by_idx(int var0, String var1, int var2, int var3, long var4, int var6) throws HDF5LibraryException, NullPointerException; + + public static synchronized native H5A_info_t H5Aget_info_by_name(int var0, String var1, String var2, int var3) throws HDF5LibraryException, NullPointerException; + + public static synchronized native String H5Aget_name(int var0) throws HDF5LibraryException; + + public static synchronized native String H5Aget_name_by_idx(int var0, String var1, int var2, int var3, long var4, int var6) throws HDF5LibraryException, NullPointerException; + + /** @deprecated */ + @Deprecated + public static synchronized native int H5Aget_num_attrs(int var0) throws HDF5LibraryException; + + public static int H5Aget_space(int var0) throws HDF5LibraryException { + int var1 = _H5Aget_space(var0); + if (var1 > 0) { + log.trace("OPEN_IDS: H5Aget_space add {}", var1); + OPEN_IDS.add(var1); + log.trace("OPEN_IDS: {}", OPEN_IDS.size()); + } + + return var1; + } + + private static synchronized native int _H5Aget_space(int var0) throws HDF5LibraryException; + + public static synchronized native long H5Aget_storage_size(int var0) throws HDF5LibraryException; + + public static int H5Aget_type(int var0) throws HDF5LibraryException { + int var1 = _H5Aget_type(var0); + if (var1 > 0) { + log.trace("OPEN_IDS: H5Aget_type add {}", var1); + OPEN_IDS.add(var1); + log.trace("OPEN_IDS: {}", OPEN_IDS.size()); + } + + return var1; + } + + private static synchronized native int _H5Aget_type(int var0) throws HDF5LibraryException; + + public static int H5Aopen(int var0, String var1, int var2) throws HDF5LibraryException, NullPointerException { + int var3 = _H5Aopen(var0, var1, var2); + if (var3 > 0) { + log.trace("OPEN_IDS: H5Aopen add {}", var3); + OPEN_IDS.add(var3); + log.trace("OPEN_IDS: {}", OPEN_IDS.size()); + } + + return var3; + } + + private static synchronized native int _H5Aopen(int var0, String var1, int var2) throws HDF5LibraryException, NullPointerException; + + public static int H5Aopen_by_idx(int var0, String var1, int var2, int var3, long var4, int var6, int var7) throws HDF5LibraryException, NullPointerException { + int var8 = _H5Aopen_by_idx(var0, var1, var2, var3, var4, var6, var7); + if (var8 > 0) { + log.trace("OPEN_IDS: H5Aopen_by_idx add {}", var8); + OPEN_IDS.add(var8); + log.trace("OPEN_IDS: {}", OPEN_IDS.size()); + } + + return var8; + } + + private static synchronized native int _H5Aopen_by_idx(int var0, String var1, int var2, int var3, long var4, int var6, int var7) throws HDF5LibraryException, NullPointerException; + + public static int H5Aopen_by_name(int var0, String var1, String var2, int var3, int var4) throws HDF5LibraryException, NullPointerException { + int var5 = _H5Aopen_by_name(var0, var1, var2, var3, var4); + if (var5 > 0) { + log.trace("OPEN_IDS: H5Aopen_by_name add {}", var5); + OPEN_IDS.add(var5); + log.trace("OPEN_IDS: {}", OPEN_IDS.size()); + } + + return var5; + } + + private static synchronized native int _H5Aopen_by_name(int var0, String var1, String var2, int var3, int var4) throws HDF5LibraryException, NullPointerException; + + /** @deprecated */ + @Deprecated + public static int H5Aopen_idx(int var0, int var1) throws HDF5LibraryException { + int var2 = _H5Aopen_idx(var0, var1); + if (var2 > 0) { + log.trace("OPEN_IDS: H5A add {}", var2); + OPEN_IDS.add(var2); + log.trace("OPEN_IDS: {}", OPEN_IDS.size()); + } + + return var2; + } + + private static synchronized native int _H5Aopen_idx(int var0, int var1) throws HDF5LibraryException; + + /** @deprecated */ + @Deprecated + public static int H5Aopen_name(int var0, String var1) throws HDF5LibraryException, NullPointerException { + int var2 = _H5Aopen_name(var0, var1); + if (var2 > 0) { + log.trace("OPEN_IDS: H5A add {}", var2); + OPEN_IDS.add(var2); + log.trace("OPEN_IDS: {}", OPEN_IDS.size()); + } + + return var2; + } + + private static synchronized native int _H5Aopen_name(int var0, String var1) throws HDF5LibraryException, NullPointerException; + + public static synchronized native int H5Aread(int var0, int var1, byte[] var2) throws HDF5LibraryException, NullPointerException; + + public static synchronized int H5Aread(int var0, int var1, Object var2) throws HDF5Exception, NullPointerException { + HDFArray var3 = new HDFArray(var2); + byte[] var4 = var3.emptyBytes(); + int var5 = H5Aread(var0, var1, var4); + if (var5 >= 0) { + var3.arrayify(var4); + } + + return var5; + } + + public static synchronized native int H5AreadVL(int var0, int var1, String[] var2) throws HDF5LibraryException, NullPointerException; + + public static synchronized native int H5AreadComplex(int var0, int var1, String[] var2) throws HDF5LibraryException, NullPointerException; + + public static synchronized native int H5Arename(int var0, String var1, String var2) throws HDF5LibraryException, NullPointerException; + + public static synchronized native int H5Arename_by_name(int var0, String var1, String var2, String var3, int var4) throws HDF5LibraryException, NullPointerException; + + public static synchronized native int H5Awrite(int var0, int var1, byte[] var2) throws HDF5LibraryException, NullPointerException; + + public static synchronized int H5Awrite(int var0, int var1, Object var2) throws HDF5Exception, NullPointerException { + HDFArray var3 = new HDFArray(var2); + byte[] var4 = var3.byteify(); + int var5 = H5Awrite(var0, var1, var4); + Object var6 = null; + var3 = null; + return var5; + } + + public static synchronized native int H5AwriteVL(int var0, int var1, String[] var2) throws HDF5LibraryException, NullPointerException; + + public static int H5Aget_create_plist(int var0) throws HDF5LibraryException { + int var1 = _H5Aget_create_plist(var0); + if (var1 > 0) { + log.trace("OPEN_IDS: H5Aget_create_plist add {}", var1); + OPEN_IDS.add(var1); + log.trace("OPEN_IDS: {}", OPEN_IDS.size()); + } + + return var1; + } + + private static synchronized native int _H5Aget_create_plist(int var0) throws HDF5LibraryException; + + public static synchronized native int H5Aiterate(int var0, int var1, int var2, long var3, H5A_iterate_cb var5, H5A_iterate_t var6) throws HDF5LibraryException, NullPointerException; + + public static synchronized native int H5Aiterate_by_name(int var0, String var1, int var2, int var3, long var4, H5A_iterate_cb var6, H5A_iterate_t var7, int var8) throws HDF5LibraryException, NullPointerException; + + public static synchronized native int H5Dcopy(int var0, int var1) throws HDF5LibraryException; + + public static int H5Dclose(int var0) throws HDF5LibraryException { + if (var0 < 0) { + return 0; + } else { + log.trace("OPEN_IDS: H5Dclose remove {}", var0); + OPEN_IDS.remove(var0); + log.trace("OPEN_IDS: {}", OPEN_IDS.size()); + return _H5Dclose(var0); + } + } + + private static synchronized native int _H5Dclose(int var0) throws HDF5LibraryException; + + /** @deprecated */ + @Deprecated + public static int H5Dcreate(int var0, String var1, int var2, int var3, int var4) throws HDF5LibraryException, NullPointerException { + int var5 = _H5Dcreate(var0, var1, var2, var3, var4); + if (var5 > 0) { + log.trace("OPEN_IDS: H5D add {}", var5); + OPEN_IDS.add(var5); + log.trace("OPEN_IDS: {}", OPEN_IDS.size()); + } + + return var5; + } + + private static synchronized native int _H5Dcreate(int var0, String var1, int var2, int var3, int var4) throws HDF5LibraryException, NullPointerException; + + public static int H5Dcreate(int var0, String var1, int var2, int var3, int var4, int var5, int var6) throws HDF5LibraryException, NullPointerException { + int var7 = _H5Dcreate2(var0, var1, var2, var3, var4, var5, var6); + if (var7 > 0) { + log.trace("OPEN_IDS: H5Dcreate add {}", var7); + OPEN_IDS.add(var7); + log.trace("OPEN_IDS: {}", OPEN_IDS.size()); + } + + return var7; + } + + private static synchronized native int _H5Dcreate2(int var0, String var1, int var2, int var3, int var4, int var5, int var6) throws HDF5LibraryException, NullPointerException; + + public static int H5Dcreate_anon(int var0, int var1, int var2, int var3, int var4) throws HDF5LibraryException { + int var5 = _H5Dcreate_anon(var0, var1, var2, var3, var4); + if (var5 > 0) { + log.trace("OPEN_IDS: H5Dcreate_anon add {}", var5); + OPEN_IDS.add(var5); + log.trace("OPEN_IDS: {}", OPEN_IDS.size()); + } + + return var5; + } + + private static synchronized native int _H5Dcreate_anon(int var0, int var1, int var2, int var3, int var4) throws HDF5LibraryException; + + /** @deprecated */ + @Deprecated + public static synchronized native int H5Dextend(int var0, byte[] var1) throws HDF5LibraryException, NullPointerException; + + /** @deprecated */ + @Deprecated + public static synchronized int H5Dextend(int var0, long[] var1) throws HDF5Exception, NullPointerException { + boolean var2 = true; + HDFArray var3 = new HDFArray(var1); + byte[] var4 = var3.byteify(); + int var5 = H5Dextend(var0, var4); + Object var6 = null; + var3 = null; + return var5; + } + + public static synchronized native void H5Dfill(byte[] var0, int var1, byte[] var2, int var3, int var4) throws HDF5LibraryException, NullPointerException; + + public static synchronized native int H5Dget_access_plist(int var0) throws HDF5LibraryException; + + public static int H5Dget_create_plist(int var0) throws HDF5LibraryException { + int var1 = _H5Dget_create_plist(var0); + if (var1 > 0) { + log.trace("OPEN_IDS: H5Dget_create_plist add {}", var1); + OPEN_IDS.add(var1); + log.trace("OPEN_IDS: {}", OPEN_IDS.size()); + } + + return var1; + } + + private static synchronized native int _H5Dget_create_plist(int var0) throws HDF5LibraryException; + + public static synchronized native long H5Dget_offset(int var0) throws HDF5LibraryException; + + public static int H5Dget_space(int var0) throws HDF5LibraryException { + int var1 = _H5Dget_space(var0); + if (var1 > 0) { + log.trace("OPEN_IDS: H5Dget_space add {}", var1); + OPEN_IDS.add(var1); + log.trace("OPEN_IDS: {}", OPEN_IDS.size()); + } + + return var1; + } + + private static synchronized native int _H5Dget_space(int var0) throws HDF5LibraryException; + + public static synchronized native int H5Dget_space_status(int var0) throws HDF5LibraryException; + + public static int H5Dget_space_status(int var0, int[] var1) throws HDF5LibraryException, NullPointerException { + return _H5Dget_space_status(var0, var1); + } + + private static synchronized native int _H5Dget_space_status(int var0, int[] var1) throws HDF5LibraryException, NullPointerException; + + public static synchronized native long H5Dget_storage_size(int var0) throws HDF5LibraryException, IllegalArgumentException; + + public static int H5Dget_type(int var0) throws HDF5LibraryException { + int var1 = _H5Dget_type(var0); + if (var1 > 0) { + log.trace("OPEN_IDS: H5Dget_type add {}", var1); + OPEN_IDS.add(var1); + log.trace("OPEN_IDS: {}", OPEN_IDS.size()); + } + + return var1; + } + + private static synchronized native int _H5Dget_type(int var0) throws HDF5LibraryException; + + public static synchronized native int H5Diterate(byte[] var0, int var1, int var2, H5D_iterate_cb var3, H5D_iterate_t var4) throws HDF5LibraryException, NullPointerException; + + /** @deprecated */ + @Deprecated + public static int H5Dopen(int var0, String var1) throws HDF5LibraryException, NullPointerException { + int var2 = _H5Dopen(var0, var1); + if (var2 > 0) { + log.trace("OPEN_IDS: H5D add {}", var2); + OPEN_IDS.add(var2); + log.trace("OPEN_IDS: {}", OPEN_IDS.size()); + } + + return var2; + } + + private static synchronized native int _H5Dopen(int var0, String var1) throws HDF5LibraryException, NullPointerException; + + public static int H5Dopen(int var0, String var1, int var2) throws HDF5LibraryException, NullPointerException { + int var3 = _H5Dopen2(var0, var1, var2); + if (var3 > 0) { + log.trace("OPEN_IDS: H5Dopen add {}", var3); + OPEN_IDS.add(var3); + log.trace("OPEN_IDS: {}", OPEN_IDS.size()); + } + + return var3; + } + + private static synchronized native int _H5Dopen2(int var0, String var1, int var2) throws HDF5LibraryException, NullPointerException; + + public static synchronized native int H5Dread(int var0, int var1, int var2, int var3, int var4, byte[] var5, boolean var6) throws HDF5LibraryException, NullPointerException; + + public static synchronized int H5Dread(int var0, int var1, int var2, int var3, int var4, byte[] var5) throws HDF5LibraryException, NullPointerException { + return H5Dread(var0, var1, var2, var3, var4, var5, true); + } + + public static synchronized int H5Dread(int var0, int var1, int var2, int var3, int var4, Object var5) throws HDF5Exception, HDF5LibraryException, NullPointerException { + return H5Dread(var0, var1, var2, var3, var4, var5, true); + } + + public static synchronized int H5Dread(int var0, int var1, int var2, int var3, int var4, Object var5, boolean var6) throws HDF5Exception, HDF5LibraryException, NullPointerException { + boolean var7 = true; + boolean var8 = false; + Class var9 = var5.getClass(); + if (!var9.isArray()) { + throw new HDF5JavaException("H5Dread: data is not an array"); + } else { + String var10 = var9.getName(); + var8 = var10.lastIndexOf(91) == var10.indexOf(91); + char var11 = var10.charAt(var10.lastIndexOf("[") + 1); + log.trace("H5Dread: cname={} is1D={} dname={}", new Object[]{var10, var8, var11}); + int var14; + if (var8 && var11 == 'B') { + log.trace("H5Dread_dname_B"); + var14 = H5Dread(var0, var1, var2, var3, var4, (byte[])((byte[])var5), var6); + } else if (var8 && var11 == 'S') { + log.trace("H5Dread_dname_S"); + var14 = H5Dread_short(var0, var1, var2, var3, var4, (short[])((short[])var5), var6); + } else if (var8 && var11 == 'I') { + log.trace("H5Dread_dname_I"); + var14 = H5Dread_int(var0, var1, var2, var3, var4, (int[])((int[])var5), var6); + } else if (var8 && var11 == 'J') { + log.trace("H5Dread_dname_J"); + var14 = H5Dread_long(var0, var1, var2, var3, var4, (long[])((long[])var5)); + } else if (var8 && var11 == 'F') { + log.trace("H5Dread_dname_F"); + var14 = H5Dread_float(var0, var1, var2, var3, var4, (float[])((float[])var5), var6); + } else if (var8 && var11 == 'D') { + log.trace("H5Dread_dname_D"); + var14 = H5Dread_double(var0, var1, var2, var3, var4, (double[])((double[])var5), var6); + } else if (H5Tequal(var1, HDF5Constants.H5T_STD_REF_DSETREG)) { + log.trace("H5Dread_reg_ref"); + var14 = H5Dread_reg_ref(var0, var1, var2, var3, var4, (String[])((String[])var5)); + } else if (var8 && var9.getComponentType() == String.class) { + log.trace("H5Dread_string type"); + if (H5Tis_variable_str(var1)) { + var14 = H5DreadVL(var0, var1, var2, var3, var4, (Object[])((Object[])var5)); + } else { + var14 = H5Dread_string(var0, var1, var2, var3, var4, (String[])((String[])var5)); + } + } else { + HDFArray var12 = new HDFArray(var5); + byte[] var13 = var12.emptyBytes(); + log.trace("H5Dread_else"); + var14 = H5Dread(var0, var1, var2, var3, var4, var13, var6); + if (var14 >= 0) { + var12.arrayify(var13); + } + + Object var15 = null; + var12 = null; + } + + return var14; + } + } + + public static synchronized native int H5Dread_double(int var0, int var1, int var2, int var3, int var4, double[] var5, boolean var6) throws HDF5LibraryException, NullPointerException; + + public static synchronized int H5Dread_double(int var0, int var1, int var2, int var3, int var4, double[] var5) throws HDF5LibraryException, NullPointerException { + return H5Dread_double(var0, var1, var2, var3, var4, var5, true); + } + + public static synchronized native int H5Dread_float(int var0, int var1, int var2, int var3, int var4, float[] var5, boolean var6) throws HDF5LibraryException, NullPointerException; + + public static synchronized int H5Dread_float(int var0, int var1, int var2, int var3, int var4, float[] var5) throws HDF5LibraryException, NullPointerException { + return H5Dread_float(var0, var1, var2, var3, var4, var5, true); + } + + public static synchronized native int H5Dread_int(int var0, int var1, int var2, int var3, int var4, int[] var5, boolean var6) throws HDF5LibraryException, NullPointerException; + + public static synchronized int H5Dread_int(int var0, int var1, int var2, int var3, int var4, int[] var5) throws HDF5LibraryException, NullPointerException { + return H5Dread_int(var0, var1, var2, var3, var4, var5, true); + } + + public static synchronized native int H5Dread_long(int var0, int var1, int var2, int var3, int var4, long[] var5, boolean var6) throws HDF5LibraryException, NullPointerException; + + public static synchronized int H5Dread_long(int var0, int var1, int var2, int var3, int var4, long[] var5) throws HDF5LibraryException, NullPointerException { + return H5Dread_long(var0, var1, var2, var3, var4, var5, true); + } + + public static synchronized native int H5Dread_reg_ref(int var0, int var1, int var2, int var3, int var4, String[] var5) throws HDF5LibraryException, NullPointerException; + + public static synchronized native int H5Dread_reg_ref_data(int var0, int var1, int var2, int var3, int var4, String[] var5) throws HDF5LibraryException, NullPointerException; + + public static synchronized native int H5Dread_short(int var0, int var1, int var2, int var3, int var4, short[] var5, boolean var6) throws HDF5LibraryException, NullPointerException; + + public static synchronized int H5Dread_short(int var0, int var1, int var2, int var3, int var4, short[] var5) throws HDF5LibraryException, NullPointerException { + return H5Dread_short(var0, var1, var2, var3, var4, var5, true); + } + + public static synchronized native int H5Dread_string(int var0, int var1, int var2, int var3, int var4, String[] var5) throws HDF5LibraryException, NullPointerException; + + public static synchronized native int H5DreadVL(int var0, int var1, int var2, int var3, int var4, Object[] var5) throws HDF5LibraryException, NullPointerException; + + public static synchronized native void H5Dset_extent(int var0, long[] var1) throws HDF5LibraryException, NullPointerException; + + public static synchronized native int H5Dvlen_get_buf_size(int var0, int var1, int var2, int[] var3) throws HDF5LibraryException; + + public static synchronized native long H5Dvlen_get_buf_size_long(int var0, int var1, int var2) throws HDF5LibraryException; + + public static synchronized native int H5Dvlen_reclaim(int var0, int var1, int var2, byte[] var3) throws HDF5LibraryException, NullPointerException; + + public static synchronized native int H5Dwrite(int var0, int var1, int var2, int var3, int var4, byte[] var5, boolean var6) throws HDF5LibraryException, NullPointerException; + + public static synchronized int H5Dwrite(int var0, int var1, int var2, int var3, int var4, byte[] var5) throws HDF5LibraryException, NullPointerException { + return H5Dwrite(var0, var1, var2, var3, var4, var5, true); + } + + public static synchronized int H5Dwrite(int var0, int var1, int var2, int var3, int var4, Object var5) throws HDF5Exception, HDF5LibraryException, NullPointerException { + return H5Dwrite(var0, var1, var2, var3, var4, var5, true); + } + + public static synchronized int H5Dwrite(int var0, int var1, int var2, int var3, int var4, Object var5, boolean var6) throws HDF5Exception, HDF5LibraryException, NullPointerException { + boolean var7 = true; + boolean var8 = false; + Class var9 = var5.getClass(); + if (!var9.isArray()) { + throw new HDF5JavaException("H5Dread: data is not an array"); + } else { + String var10 = var9.getName(); + var8 = var10.lastIndexOf(91) == var10.indexOf(91); + char var11 = var10.charAt(var10.lastIndexOf("[") + 1); + int var14; + if (var8 && var11 == 'B') { + var14 = H5Dwrite(var0, var1, var2, var3, var4, (byte[])((byte[])var5), var6); + } else if (var8 && var11 == 'S') { + var14 = H5Dwrite_short(var0, var1, var2, var3, var4, (short[])((short[])var5), var6); + } else if (var8 && var11 == 'I') { + var14 = H5Dwrite_int(var0, var1, var2, var3, var4, (int[])((int[])var5), var6); + } else if (var8 && var11 == 'J') { + var14 = H5Dwrite_long(var0, var1, var2, var3, var4, (long[])((long[])var5), var6); + } else if (var8 && var11 == 'F') { + var14 = H5Dwrite_float(var0, var1, var2, var3, var4, (float[])((float[])var5), var6); + } else if (var8 && var11 == 'D') { + var14 = H5Dwrite_double(var0, var1, var2, var3, var4, (double[])((double[])var5), var6); + } else if (H5Tget_class(var1) == HDF5Constants.H5T_STRING && H5Tis_variable_str(var1) && var9.isArray() && var9.getComponentType() == String.class && var8) { + var14 = H5DwriteString(var0, var1, var2, var3, var4, (String[])((String[])var5)); + } else { + HDFArray var12 = new HDFArray(var5); + byte[] var13 = var12.byteify(); + var14 = H5Dwrite(var0, var1, var2, var3, var4, var13, var6); + Object var15 = null; + var12 = null; + } + + return var14; + } + } + + public static synchronized native int H5Dwrite_double(int var0, int var1, int var2, int var3, int var4, double[] var5, boolean var6) throws HDF5LibraryException, NullPointerException; + + public static synchronized int H5Dwrite_double(int var0, int var1, int var2, int var3, int var4, double[] var5) throws HDF5LibraryException, NullPointerException { + return H5Dwrite_double(var0, var1, var2, var3, var4, var5, true); + } + + public static synchronized native int H5Dwrite_float(int var0, int var1, int var2, int var3, int var4, float[] var5, boolean var6) throws HDF5LibraryException, NullPointerException; + + public static synchronized int H5Dwrite_float(int var0, int var1, int var2, int var3, int var4, float[] var5) throws HDF5LibraryException, NullPointerException { + return H5Dwrite_float(var0, var1, var2, var3, var4, var5, true); + } + + public static synchronized native int H5Dwrite_int(int var0, int var1, int var2, int var3, int var4, int[] var5, boolean var6) throws HDF5LibraryException, NullPointerException; + + public static synchronized int H5Dwrite_int(int var0, int var1, int var2, int var3, int var4, int[] var5) throws HDF5LibraryException, NullPointerException { + return H5Dwrite_int(var0, var1, var2, var3, var4, var5, true); + } + + public static synchronized native int H5Dwrite_long(int var0, int var1, int var2, int var3, int var4, long[] var5, boolean var6) throws HDF5LibraryException, NullPointerException; + + public static synchronized int H5Dwrite_long(int var0, int var1, int var2, int var3, int var4, long[] var5) throws HDF5LibraryException, NullPointerException { + return H5Dwrite_long(var0, var1, var2, var3, var4, var5, true); + } + + public static synchronized native int H5Dwrite_short(int var0, int var1, int var2, int var3, int var4, short[] var5, boolean var6) throws HDF5LibraryException, NullPointerException; + + public static synchronized int H5Dwrite_short(int var0, int var1, int var2, int var3, int var4, short[] var5) throws HDF5LibraryException, NullPointerException { + return H5Dwrite_short(var0, var1, var2, var3, var4, var5, true); + } + + public static synchronized native int H5DwriteString(int var0, int var1, int var2, int var3, int var4, String[] var5) throws HDF5LibraryException, NullPointerException; + + public static synchronized native boolean H5Eauto_is_v2(int var0) throws HDF5LibraryException; + + public static int H5Eclear() throws HDF5LibraryException { + H5Eclear2(HDF5Constants.H5E_DEFAULT); + return 0; + } + + public static void H5Eclear(int var0) throws HDF5LibraryException { + H5Eclear2(var0); + } + + public static synchronized native void H5Eclear2(int var0) throws HDF5LibraryException; + + public static synchronized native void H5Eclose_msg(int var0) throws HDF5LibraryException; + + public static synchronized native void H5Eclose_stack(int var0) throws HDF5LibraryException; + + public static synchronized native int H5Ecreate_msg(int var0, int var1, String var2) throws HDF5LibraryException, NullPointerException; + + public static synchronized native int H5Ecreate_stack() throws HDF5LibraryException; + + public static synchronized native String H5Eget_class_name(int var0) throws HDF5LibraryException, NullPointerException; + + public static synchronized native int H5Eget_current_stack() throws HDF5LibraryException; + + public static synchronized native void H5Eset_current_stack(int var0) throws HDF5LibraryException; + + public static synchronized native String H5Eget_msg(int var0, int[] var1) throws HDF5LibraryException; + + public static synchronized native long H5Eget_num(int var0) throws HDF5LibraryException, NullPointerException; + + /** @deprecated */ + @Deprecated + public static synchronized native void H5Eprint1(Object var0) throws HDF5LibraryException; + + public static synchronized native void H5Eprint2(int var0, Object var1) throws HDF5LibraryException; + + public static synchronized native void H5Epop(int var0, long var1) throws HDF5LibraryException; + + public static void H5Epush(int var0, String var1, String var2, int var3, int var4, int var5, int var6, String var7) throws HDF5LibraryException, NullPointerException { + H5Epush2(var0, var1, var2, var3, var4, var5, var6, var7); + } + + public static synchronized native void H5Epush2(int var0, String var1, String var2, int var3, int var4, int var5, int var6, String var7) throws HDF5LibraryException, NullPointerException; + + public static synchronized native int H5Eregister_class(String var0, String var1, String var2) throws HDF5LibraryException, NullPointerException; + + public static synchronized native void H5Eunregister_class(int var0) throws HDF5LibraryException; + + public static void H5Ewalk(int var0, int var1, H5E_walk_cb var2, H5E_walk_t var3) throws HDF5LibraryException, NullPointerException { + H5Ewalk2(var0, var1, var2, var3); + } + + public static synchronized native void H5Ewalk2(int var0, int var1, H5E_walk_cb var2, H5E_walk_t var3) throws HDF5LibraryException, NullPointerException; + + public static int H5Fclose(int var0) throws HDF5LibraryException { + if (var0 < 0) { + return 0; + } else { + log.trace("OPEN_IDS: H5Fclose remove {}", var0); + OPEN_IDS.remove(var0); + log.trace("OPEN_IDS: {}", OPEN_IDS.size()); + return _H5Fclose(var0); + } + } + + private static synchronized native int _H5Fclose(int var0) throws HDF5LibraryException; + + public static int H5Fopen(String var0, int var1, int var2) throws HDF5LibraryException, NullPointerException { + int var3 = _H5Fopen(var0, var1, var2); + if (var3 > 0) { + log.trace("OPEN_IDS: H5Fopen add {}", var3); + OPEN_IDS.add(var3); + log.trace("OPEN_IDS: {}", OPEN_IDS.size()); + } + + return var3; + } + + private static synchronized native int _H5Fopen(String var0, int var1, int var2) throws HDF5LibraryException, NullPointerException; + + public static int H5Freopen(int var0) throws HDF5LibraryException { + int var1 = _H5Freopen(var0); + if (var1 > 0) { + log.trace("OPEN_IDS: H5Freopen add {}", var1); + OPEN_IDS.add(var1); + log.trace("OPEN_IDS: {}", OPEN_IDS.size()); + } + + return var1; + } + + private static synchronized native int _H5Freopen(int var0) throws HDF5LibraryException; + + public static int H5Fcreate(String var0, int var1, int var2, int var3) throws HDF5LibraryException, NullPointerException { + int var4 = _H5Fcreate(var0, var1, var2, var3); + if (var4 > 0) { + log.trace("OPEN_IDS: H5Fcreate add {}", var4); + OPEN_IDS.add(var4); + log.trace("OPEN_IDS: {}", OPEN_IDS.size()); + } + + return var4; + } + + private static synchronized native int _H5Fcreate(String var0, int var1, int var2, int var3) throws HDF5LibraryException, NullPointerException; + + public static synchronized native int H5Fflush(int var0, int var1) throws HDF5LibraryException; + + public static int H5Fget_access_plist(int var0) throws HDF5LibraryException { + int var1 = _H5Fget_access_plist(var0); + if (var1 > 0) { + log.trace("OPEN_IDS: H5Fget_access_plist add {}", var1); + OPEN_IDS.add(var1); + log.trace("OPEN_IDS: {}", OPEN_IDS.size()); + } + + return var1; + } + + private static synchronized native int _H5Fget_access_plist(int var0) throws HDF5LibraryException; + + public static int H5Fget_create_plist(int var0) throws HDF5LibraryException { + int var1 = _H5Fget_create_plist(var0); + if (var1 > 0) { + log.trace("OPEN_IDS: H5Fget_create_plist add {}", var1); + OPEN_IDS.add(var1); + log.trace("OPEN_IDS: {}", OPEN_IDS.size()); + } + + return var1; + } + + private static synchronized native int _H5Fget_create_plist(int var0) throws HDF5LibraryException; + + public static synchronized native long H5Fget_filesize(int var0) throws HDF5LibraryException; + + public static synchronized native long H5Fget_freespace(int var0) throws HDF5LibraryException; + + public static synchronized native int H5Fget_intent(int var0) throws HDF5LibraryException; + + public static synchronized native double H5Fget_mdc_hit_rate(int var0) throws HDF5LibraryException; + + public static synchronized native int H5Fget_mdc_size(int var0, long[] var1) throws HDF5LibraryException, NullPointerException, IllegalArgumentException; + + public static synchronized native String H5Fget_name(int var0) throws HDF5LibraryException; + + public static synchronized native String H5Fget_name(int var0, int var1) throws HDF5LibraryException; + + public static synchronized native int H5Fget_obj_count(int var0, int var1) throws HDF5LibraryException; + + public static synchronized native long H5Fget_obj_count_long(int var0, int var1) throws HDF5LibraryException; + + public static synchronized native int H5Fget_obj_ids(int var0, int var1, int var2, int[] var3) throws HDF5LibraryException, NullPointerException; + + public static synchronized native long H5Fget_obj_ids_long(int var0, int var1, long var2, int[] var4) throws HDF5LibraryException, NullPointerException; + + public static synchronized native boolean H5Fis_hdf5(String var0) throws HDF5LibraryException, NullPointerException; + + public static synchronized native int H5Fmount(int var0, String var1, int var2, int var3) throws HDF5LibraryException, NullPointerException; + + public static synchronized native int H5Funmount(int var0, String var1) throws HDF5LibraryException, NullPointerException; + + public static synchronized native void H5Freset_mdc_hit_rate_stats(int var0) throws HDF5LibraryException; + + public static synchronized native H5F_info_t H5Fget_info(int var0) throws HDF5LibraryException; + + public static synchronized native void H5Fclear_elink_file_cache(int var0) throws HDF5LibraryException; + + public static int H5Gclose(int var0) throws HDF5LibraryException { + if (var0 < 0) { + return 0; + } else { + log.trace("OPEN_IDS: H5Gclose remove {}", var0); + OPEN_IDS.remove(var0); + log.trace("OPEN_IDS: {}", OPEN_IDS.size()); + return _H5Gclose(var0); + } + } + + private static synchronized native int _H5Gclose(int var0) throws HDF5LibraryException; + + /** @deprecated */ + @Deprecated + public static int H5Gcreate(int var0, String var1, long var2) throws HDF5LibraryException, NullPointerException { + int var4 = _H5Gcreate(var0, var1, var2); + if (var4 > 0) { + log.trace("OPEN_IDS: H5Gcreate add {}", var4); + OPEN_IDS.add(var4); + log.trace("OPEN_IDS: {}", OPEN_IDS.size()); + } + + return var4; + } + + private static synchronized native int _H5Gcreate(int var0, String var1, long var2) throws HDF5LibraryException, NullPointerException; + + public static int H5Gcreate(int var0, String var1, int var2, int var3, int var4) throws HDF5LibraryException, NullPointerException { + int var5 = _H5Gcreate2(var0, var1, var2, var3, var4); + if (var5 > 0) { + log.trace("OPEN_IDS: H5Gcreate add {}", var5); + OPEN_IDS.add(var5); + log.trace("OPEN_IDS: {}", OPEN_IDS.size()); + } + + return var5; + } + + private static synchronized native int _H5Gcreate2(int var0, String var1, int var2, int var3, int var4) throws HDF5LibraryException, NullPointerException; + + public static int H5Gcreate_anon(int var0, int var1, int var2) throws HDF5LibraryException { + int var3 = _H5Gcreate_anon(var0, var1, var2); + if (var3 > 0) { + log.trace("OPEN_IDS: H5Gcreate_anon add {}", var3); + OPEN_IDS.add(var3); + log.trace("OPEN_IDS: {}", OPEN_IDS.size()); + } + + return var3; + } + + private static synchronized native int _H5Gcreate_anon(int var0, int var1, int var2) throws HDF5LibraryException; + + /** @deprecated */ + @Deprecated + public static synchronized native int H5Gget_comment(int var0, String var1, int var2, String[] var3) throws ArrayIndexOutOfBoundsException, ArrayStoreException, HDF5LibraryException, NullPointerException, IllegalArgumentException; + + /** @deprecated */ + @Deprecated + public static synchronized native int H5Gset_comment(int var0, String var1, String var2) throws HDF5LibraryException, NullPointerException; + + public static synchronized native int H5Gget_create_plist(int var0) throws HDF5LibraryException; + + public static synchronized native H5G_info_t H5Gget_info(int var0) throws HDF5LibraryException; + + public static synchronized native H5G_info_t H5Gget_info_by_idx(int var0, String var1, int var2, int var3, long var4, int var6) throws HDF5LibraryException, NullPointerException; + + public static synchronized native H5G_info_t H5Gget_info_by_name(int var0, String var1, int var2) throws HDF5LibraryException, NullPointerException; + + /** @deprecated */ + @Deprecated + public static synchronized native int H5Gget_linkval(int var0, String var1, int var2, String[] var3) throws ArrayIndexOutOfBoundsException, ArrayStoreException, HDF5LibraryException, NullPointerException, IllegalArgumentException; + + /** @deprecated */ + @Deprecated + public static synchronized native int H5Gget_num_objs(int var0, long[] var1) throws HDF5LibraryException, NullPointerException; + + public static synchronized int H5Gget_obj_info_all(int var0, String var1, String[] var2, int[] var3, long[] var4) throws HDF5LibraryException, NullPointerException { + if (var2 == null) { + throw new NullPointerException("H5Gget_obj_info_all(): name array is null"); + } else { + return H5Gget_obj_info_all(var0, var1, var2, var3, (int[])null, (long[])null, var4, HDF5Constants.H5_INDEX_NAME); + } + } + + public static synchronized int H5Gget_obj_info_all(int var0, String var1, String[] var2, int[] var3, int[] var4, long[] var5, int var6) throws HDF5LibraryException, NullPointerException { + return H5Gget_obj_info_full(var0, var1, var2, var3, var4, (long[])null, var5, var6, -1); + } + + public static synchronized int H5Gget_obj_info_all(int var0, String var1, String[] var2, int[] var3, int[] var4, long[] var5, long[] var6, int var7) throws HDF5LibraryException, NullPointerException { + return H5Gget_obj_info_full(var0, var1, var2, var3, var4, var5, var6, var2.length, var7, -1); + } + + public static synchronized int H5Gget_obj_info_full(int var0, String var1, String[] var2, int[] var3, int[] var4, long[] var5, long[] var6, int var7, int var8) throws HDF5LibraryException, NullPointerException { + if (var2 == null) { + throw new NullPointerException("H5Gget_obj_info_full(): name array is null"); + } else if (var3 == null) { + throw new NullPointerException("H5Gget_obj_info_full(): object type array is null"); + } else if (var2.length == 0) { + throw new HDF5LibraryException("H5Gget_obj_info_full(): array size is zero"); + } else if (var2.length != var3.length) { + throw new HDF5LibraryException("H5Gget_obj_info_full(): name and type array sizes are different"); + } else { + if (var4 == null) { + var4 = new int[var3.length]; + } + + if (var5 == null) { + var5 = new long[var6.length]; + } + + if (var7 < 0) { + var7 = HDF5Constants.H5_INDEX_NAME; + } + + if (var8 < 0) { + var8 = HDF5Constants.H5_ITER_INC; + } + + log.trace("H5Gget_obj_info_full: oname_len={}", var2.length); + int var9 = H5Gget_obj_info_full(var0, var1, var2, var3, var4, var5, var6, var2.length, var7, var8); + + for(int var10 = 0; var10 < var2.length; ++var10) { + log.trace("H5Gget_obj_info_full: oname={}", var2[var10]); + } + + return var9; + } + } + + private static synchronized native int H5Gget_obj_info_full(int var0, String var1, String[] var2, int[] var3, int[] var4, long[] var5, long[] var6, int var7, int var8, int var9) throws HDF5LibraryException, NullPointerException; + + public static synchronized int H5Gget_obj_info_idx(int var0, String var1, int var2, String[] var3, int[] var4) throws HDF5LibraryException, NullPointerException { + long var5 = 4096L; + String[] var7 = new String[]{new String("")}; + int var8 = H5Gopen(var0, var1); + long var9 = H5Gget_objname_by_idx(var8, (long)var2, var7, var5); + int var11 = H5Gget_objtype_by_idx(var8, (long)var2); + var3[0] = new String(var7[0]); + var4[0] = var11; + int var12 = (new Long(var9)).intValue(); + return var12; + } + + public static synchronized int H5Gget_obj_info_max(int var0, String[] var1, int[] var2, int[] var3, long[] var4, int var5) throws HDF5LibraryException, NullPointerException { + if (var1 == null) { + throw new NullPointerException("H5Gget_obj_info_max(): name array is null"); + } else if (var2 == null) { + throw new NullPointerException("H5Gget_obj_info_max(): object type array is null"); + } else if (var3 == null) { + throw new NullPointerException("H5Gget_obj_info_max(): link type array is null"); + } else if (var1.length <= 0) { + throw new HDF5LibraryException("H5Gget_obj_info_max(): array size is zero"); + } else if (var5 <= 0) { + throw new HDF5LibraryException("H5Gget_obj_info_max(): maximum array size is zero"); + } else if (var1.length != var2.length) { + throw new HDF5LibraryException("H5Gget_obj_info_max(): name and type array sizes are different"); + } else { + return H5Gget_obj_info_max(var0, var1, var2, var3, var4, var5, var1.length); + } + } + + private static synchronized native int H5Gget_obj_info_max(int var0, String[] var1, int[] var2, int[] var3, long[] var4, int var5, int var6) throws HDF5LibraryException, NullPointerException; + + /** @deprecated */ + @Deprecated + public static synchronized native int H5Gget_objinfo(int var0, String var1, boolean var2, long[] var3, long[] var4, int[] var5, long[] var6) throws HDF5LibraryException, NullPointerException, IllegalArgumentException; + + /** @deprecated */ + @Deprecated + public static synchronized int H5Gget_objinfo(int var0, String var1, boolean var2, HDF5GroupInfo var3) throws HDF5LibraryException, NullPointerException { + boolean var4 = true; + long[] var5 = new long[2]; + long[] var6 = new long[2]; + int[] var7 = new int[3]; + long[] var8 = new long[1]; + int var9 = H5Gget_objinfo(var0, var1, var2, var5, var6, var7, var8); + if (var9 >= 0) { + var3.setGroupInfo(var5, var6, var7[0], var7[1], var8[0], var7[2]); + } + + return var9; + } + + /** @deprecated */ + @Deprecated + public static synchronized native long H5Gget_objname_by_idx(int var0, long var1, String[] var3, long var4) throws HDF5LibraryException, NullPointerException; + + /** @deprecated */ + @Deprecated + public static synchronized native int H5Gget_objtype_by_idx(int var0, long var1) throws HDF5LibraryException; + + /** @deprecated */ + @Deprecated + public static synchronized native int H5Glink(int var0, int var1, String var2, String var3) throws HDF5LibraryException, NullPointerException; + + /** @deprecated */ + @Deprecated + public static synchronized native int H5Glink2(int var0, String var1, int var2, int var3, String var4) throws HDF5LibraryException, NullPointerException; + + /** @deprecated */ + @Deprecated + public static synchronized native int H5Gunlink(int var0, String var1) throws HDF5LibraryException, NullPointerException; + + /** @deprecated */ + @Deprecated + public static synchronized native int H5Gmove(int var0, String var1, String var2) throws HDF5LibraryException, NullPointerException; + + public static synchronized long H5Gn_members_long(int var0, String var1) throws HDF5LibraryException, NullPointerException { + int var2 = H5Gopen(var0, var1); + long var3 = -1L; + + try { + H5G_info_t var5 = H5Gget_info(var2); + var3 = var5.nlinks; + } finally { + H5Gclose(var2); + } + + return var3; + } + + public static synchronized int H5Gn_members(int var0, String var1) throws HDF5LibraryException, NullPointerException { + return (int)H5Gn_members_long(var0, var1); + } + + /** @deprecated */ + @Deprecated + public static int H5Gopen(int var0, String var1) throws HDF5LibraryException, NullPointerException { + int var2 = _H5Gopen(var0, var1); + if (var2 > 0) { + log.trace("OPEN_IDS: H5G add {}", var2); + OPEN_IDS.add(var2); + log.trace("OPEN_IDS: {}", OPEN_IDS.size()); + } + + return var2; + } + + private static synchronized native int _H5Gopen(int var0, String var1) throws HDF5LibraryException, NullPointerException; + + public static int H5Gopen(int var0, String var1, int var2) throws HDF5LibraryException, NullPointerException { + int var3 = _H5Gopen2(var0, var1, var2); + if (var3 > 0) { + log.trace("OPEN_IDS: H5Gopen add {}", var3); + OPEN_IDS.add(var3); + log.trace("OPEN_IDS: {}", OPEN_IDS.size()); + } + + return var3; + } + + private static synchronized native int _H5Gopen2(int var0, String var1, int var2) throws HDF5LibraryException, NullPointerException; + + public static synchronized native int H5Iget_file_id(int var0) throws HDF5LibraryException; + + /** @deprecated */ + @Deprecated + public static synchronized native long H5Iget_name_long(int var0, String[] var1, long var2) throws HDF5LibraryException, NullPointerException; + + public static synchronized native String H5Iget_name(int var0) throws HDF5LibraryException; + + public static synchronized native int H5Iget_ref(int var0) throws HDF5LibraryException, NullPointerException; + + public static synchronized native int H5Idec_ref(int var0) throws HDF5LibraryException, NullPointerException; + + public static synchronized native int H5Iinc_ref(int var0) throws HDF5LibraryException, NullPointerException; + + public static synchronized native int H5Iget_type(int var0) throws HDF5LibraryException; + + public static synchronized native int H5Iget_type_ref(int var0) throws HDF5LibraryException; + + public static synchronized native int H5Idec_type_ref(int var0) throws HDF5LibraryException; + + public static synchronized native int H5Iinc_type_ref(int var0) throws HDF5LibraryException; + + public static synchronized native int H5Inmembers(int var0) throws HDF5LibraryException; + + public static synchronized native boolean H5Iis_valid(int var0) throws HDF5LibraryException; + + public static synchronized native boolean H5Itype_exists(int var0) throws HDF5LibraryException; + + public static synchronized native void H5Iclear_type(int var0, boolean var1) throws HDF5LibraryException; + + public static synchronized native void H5Idestroy_type(int var0) throws HDF5LibraryException; + + public static synchronized native void H5Lcopy(int var0, String var1, int var2, String var3, int var4, int var5) throws HDF5LibraryException, NullPointerException; + + public static synchronized native void H5Lcreate_external(String var0, String var1, int var2, String var3, int var4, int var5) throws HDF5LibraryException, NullPointerException; + + public static synchronized native void H5Lcreate_hard(int var0, String var1, int var2, String var3, int var4, int var5) throws HDF5LibraryException, NullPointerException; + + public static synchronized native void H5Lcreate_soft(String var0, int var1, String var2, int var3, int var4) throws HDF5LibraryException, NullPointerException; + + public static synchronized native void H5Ldelete(int var0, String var1, int var2) throws HDF5LibraryException, NullPointerException; + + public static synchronized native void H5Ldelete_by_idx(int var0, String var1, int var2, int var3, long var4, int var6) throws HDF5LibraryException, NullPointerException; + + public static synchronized native boolean H5Lexists(int var0, String var1, int var2) throws HDF5LibraryException, NullPointerException; + + public static synchronized native H5L_info_t H5Lget_info(int var0, String var1, int var2) throws HDF5LibraryException, NullPointerException; + + public static synchronized native H5L_info_t H5Lget_info_by_idx(int var0, String var1, int var2, int var3, long var4, int var6) throws HDF5LibraryException, NullPointerException; + + public static synchronized native String H5Lget_name_by_idx(int var0, String var1, int var2, int var3, long var4, int var6) throws HDF5LibraryException, NullPointerException; + + public static synchronized native int H5Lget_val(int var0, String var1, String[] var2, int var3) throws HDF5LibraryException, NullPointerException; + + public static synchronized native int H5Lget_val_by_idx(int var0, String var1, int var2, int var3, long var4, String[] var6, int var7) throws HDF5LibraryException, NullPointerException; + + public static synchronized native int H5Literate(int var0, int var1, int var2, long var3, H5L_iterate_cb var5, H5L_iterate_t var6) throws HDF5LibraryException; + + public static synchronized native int H5Literate_by_name(int var0, String var1, int var2, int var3, long var4, H5L_iterate_cb var6, H5L_iterate_t var7, int var8) throws HDF5LibraryException, NullPointerException; + + public static synchronized native void H5Lmove(int var0, String var1, int var2, String var3, int var4, int var5) throws HDF5LibraryException, NullPointerException; + + public static synchronized native int H5Lvisit(int var0, int var1, int var2, H5L_iterate_cb var3, H5L_iterate_t var4) throws HDF5LibraryException; + + public static synchronized native int H5Lvisit_by_name(int var0, String var1, int var2, int var3, H5L_iterate_cb var4, H5L_iterate_t var5, int var6) throws HDF5LibraryException, NullPointerException; + + public static synchronized native int H5Lis_registered(int var0) throws HDF5LibraryException; + + public static synchronized native void H5Lunregister(int var0) throws HDF5LibraryException; + + public static int H5Oclose(int var0) throws HDF5LibraryException { + if (var0 < 0) { + return 0; + } else { + log.trace("OPEN_IDS: H5Oclose remove {}", var0); + OPEN_IDS.remove(var0); + log.trace("OPEN_IDS: {}", OPEN_IDS.size()); + return _H5Oclose(var0); + } + } + + private static synchronized native int _H5Oclose(int var0) throws HDF5LibraryException; + + public static synchronized native void H5Ocopy(int var0, String var1, int var2, String var3, int var4, int var5) throws HDF5LibraryException, NullPointerException; + + public static synchronized native String H5Oget_comment(int var0) throws HDF5LibraryException; + + /** @deprecated */ + @Deprecated + public static synchronized native void H5Oset_comment(int var0, String var1) throws HDF5LibraryException; + + public static synchronized native String H5Oget_comment_by_name(int var0, String var1, int var2) throws HDF5LibraryException, NullPointerException; + + /** @deprecated */ + @Deprecated + public static synchronized native void H5Oset_comment_by_name(int var0, String var1, String var2, int var3) throws HDF5LibraryException, NullPointerException; + + public static synchronized native H5O_info_t H5Oget_info(int var0) throws HDF5LibraryException, NullPointerException; + + public static synchronized native H5O_info_t H5Oget_info_by_idx(int var0, String var1, int var2, int var3, long var4, int var6) throws HDF5LibraryException, NullPointerException; + + public static synchronized native H5O_info_t H5Oget_info_by_name(int var0, String var1, int var2) throws HDF5LibraryException, NullPointerException; + + public static synchronized native void H5Olink(int var0, int var1, String var2, int var3, int var4) throws HDF5LibraryException, NullPointerException; + + public static int H5Oopen(int var0, String var1, int var2) throws HDF5LibraryException, NullPointerException { + int var3 = _H5Oopen(var0, var1, var2); + if (var3 > 0) { + log.trace("OPEN_IDS: H5Oopen add {}", var3); + OPEN_IDS.add(var3); + log.trace("OPEN_IDS: {}", OPEN_IDS.size()); + } + + return var3; + } + + private static synchronized native int _H5Oopen(int var0, String var1, int var2) throws HDF5LibraryException, NullPointerException; + + public static synchronized native int H5Ovisit(int var0, int var1, int var2, H5O_iterate_cb var3, H5O_iterate_t var4) throws HDF5LibraryException, NullPointerException; + + public static synchronized native int H5Ovisit_by_name(int var0, String var1, int var2, int var3, H5O_iterate_cb var4, H5O_iterate_t var5, int var6) throws HDF5LibraryException, NullPointerException; + + public static synchronized native boolean H5Oexists_by_name(int var0, String var1, int var2) throws HDF5LibraryException, NullPointerException; + + public static synchronized native void H5Odecr_refcount(int var0) throws HDF5LibraryException; + + public static synchronized native void H5Oincr_refcount(int var0) throws HDF5LibraryException; + + public static int H5Oopen_by_addr(int var0, long var1) throws HDF5LibraryException { + int var3 = _H5Oopen_by_addr(var0, var1); + if (var3 > 0) { + log.trace("OPEN_IDS: H5Oopen_by_addr add {}", var3); + OPEN_IDS.add(var3); + log.trace("OPEN_IDS: {}", OPEN_IDS.size()); + } + + return var3; + } + + private static synchronized native int _H5Oopen_by_addr(int var0, long var1) throws HDF5LibraryException, NullPointerException; + + public static int H5Oopen_by_idx(int var0, String var1, int var2, int var3, long var4, int var6) throws HDF5LibraryException, NullPointerException { + int var7 = _H5Oopen_by_idx(var0, var1, var2, var3, var4, var6); + if (var7 > 0) { + log.trace("OPEN_IDS: H5Oopen_by_idx add {}", var7); + OPEN_IDS.add(var7); + log.trace("OPEN_IDS: {}", OPEN_IDS.size()); + } + + return var7; + } + + public static synchronized native int _H5Oopen_by_idx(int var0, String var1, int var2, int var3, long var4, int var6) throws HDF5LibraryException, NullPointerException; + + public static synchronized native String H5Pget_class_name(int var0) throws HDF5LibraryException; + + public static int H5Pcreate(int var0) throws HDF5LibraryException { + int var1 = _H5Pcreate(var0); + if (var1 > 0) { + log.trace("OPEN_IDS: H5Pcreate add {}", var1); + OPEN_IDS.add(var1); + log.trace("OPEN_IDS: {}", OPEN_IDS.size()); + } + + return var1; + } + + private static synchronized native int _H5Pcreate(int var0) throws HDF5LibraryException; + + public static synchronized native int H5Pget(int var0, String var1) throws HDF5LibraryException; + + public static synchronized native int H5Pset(int var0, String var1, int var2) throws HDF5LibraryException; + + public static synchronized native int H5Pexist(int var0, String var1) throws HDF5LibraryException; + + public static synchronized native long H5Pget_size(int var0, String var1) throws HDF5LibraryException; + + public static synchronized native long H5Pget_nprops(int var0) throws HDF5LibraryException; + + public static synchronized native int H5Pget_class(int var0) throws HDF5LibraryException; + + public static synchronized native int H5Pget_class_parent(int var0) throws HDF5LibraryException; + + public static synchronized native int H5Pequal(int var0, int var1) throws HDF5LibraryException; + + public static boolean H5P_equal(int var0, int var1) throws HDF5LibraryException { + return H5Pequal(var0, var1) == 1; + } + + public static synchronized native int H5Pisa_class(int var0, int var1) throws HDF5LibraryException; + + public static synchronized native int H5Pcopy_prop(int var0, int var1, String var2) throws HDF5LibraryException; + + public static synchronized native int H5Premove(int var0, String var1) throws HDF5LibraryException; + + public static synchronized native int H5Punregister(int var0, String var1) throws HDF5LibraryException; + + public static int H5Pclose_class(int var0) throws HDF5LibraryException { + if (var0 < 0) { + return 0; + } else { + log.trace("OPEN_IDS: H5Pclose_class remove {}", var0); + OPEN_IDS.remove(var0); + log.trace("OPEN_IDS: {}", OPEN_IDS.size()); + return _H5Pclose_class(var0); + } + } + + public static synchronized native int _H5Pclose_class(int var0) throws HDF5LibraryException; + + public static int H5Pclose(int var0) throws HDF5LibraryException { + if (var0 < 0) { + return 0; + } else { + log.trace("OPEN_IDS: H5Pclose remove {}", var0); + OPEN_IDS.remove(var0); + log.trace("OPEN_IDS: {}", OPEN_IDS.size()); + return _H5Pclose(var0); + } + } + + private static synchronized native int _H5Pclose(int var0) throws HDF5LibraryException; + + public static int H5Pcopy(int var0) throws HDF5LibraryException { + int var1 = _H5Pcopy(var0); + if (var1 > 0) { + log.trace("OPEN_IDS: H5Pcopy add {}", var1); + OPEN_IDS.add(var1); + log.trace("OPEN_IDS: {}", OPEN_IDS.size()); + } + + return var1; + } + + private static synchronized native int _H5Pcopy(int var0) throws HDF5LibraryException; + + public static int H5Pcreate_class_nocb(int var0, String var1) throws HDF5LibraryException { + int var2 = _H5Pcreate_class_nocb(var0, var1); + if (var2 > 0) { + log.trace("OPEN_IDS: H5Pcreate_class_nocb add {}", var2); + OPEN_IDS.add(var2); + log.trace("OPEN_IDS: {}", OPEN_IDS.size()); + } + + return var2; + } + + private static synchronized native int _H5Pcreate_class_nocb(int var0, String var1) throws HDF5LibraryException; + + public static synchronized native void H5Pregister2_nocb(int var0, String var1, long var2, byte[] var4) throws HDF5LibraryException; + + public static synchronized native void H5Pinsert2_nocb(int var0, String var1, long var2, byte[] var4) throws HDF5LibraryException; + + public static synchronized native int H5Piterate(int var0, int[] var1, H5P_iterate_cb var2, H5P_iterate_t var3) throws HDF5LibraryException; + + public static synchronized native int H5Pget_attr_phase_change(int var0, int[] var1) throws HDF5LibraryException, NullPointerException; + + public static synchronized native void H5Pset_attr_phase_change(int var0, int var1, int var2) throws HDF5LibraryException; + + public static synchronized native int H5Pget_attr_creation_order(int var0) throws HDF5LibraryException; + + public static synchronized native int H5Pset_attr_creation_order(int var0, int var1) throws HDF5LibraryException; + + public static synchronized native boolean H5Pget_obj_track_times(int var0) throws HDF5LibraryException; + + public static synchronized native void H5Pset_obj_track_times(int var0, boolean var1) throws HDF5LibraryException; + + public static synchronized native int H5Pmodify_filter(int var0, int var1, int var2, long var3, int[] var5) throws HDF5LibraryException, NullPointerException; + + public static synchronized native int H5Pset_filter(int var0, int var1, int var2, long var3, int[] var5) throws HDF5LibraryException; + + public static synchronized native int H5Pget_nfilters(int var0) throws HDF5LibraryException; + + /** @deprecated */ + @Deprecated + public static synchronized native int H5Pget_filter(int var0, int var1, int[] var2, int[] var3, int[] var4, int var5, String[] var6) throws ArrayIndexOutOfBoundsException, ArrayStoreException, HDF5LibraryException, NullPointerException; + + public static int H5Pget_filter(int var0, int var1, int[] var2, long[] var3, int[] var4, long var5, String[] var7, int[] var8) throws ArrayIndexOutOfBoundsException, ArrayStoreException, HDF5LibraryException, NullPointerException { + return H5Pget_filter2(var0, var1, var2, var3, var4, var5, var7, var8); + } + + private static synchronized native int H5Pget_filter2(int var0, int var1, int[] var2, long[] var3, int[] var4, long var5, String[] var7, int[] var8) throws ArrayIndexOutOfBoundsException, ArrayStoreException, HDF5LibraryException, NullPointerException; + + /** @deprecated */ + @Deprecated + public static synchronized native int H5Pget_filter_by_id(int var0, int var1, int[] var2, long[] var3, int[] var4, long var5, String[] var7) throws HDF5LibraryException, NullPointerException; + + public static int H5Pget_filter_by_id(int var0, int var1, int[] var2, long[] var3, int[] var4, long var5, String[] var7, int[] var8) throws ArrayIndexOutOfBoundsException, ArrayStoreException, HDF5LibraryException, NullPointerException { + return H5Pget_filter_by_id2(var0, var1, var2, var3, var4, var5, var7, var8); + } + + public static synchronized native int H5Pget_filter_by_id2(int var0, int var1, int[] var2, long[] var3, int[] var4, long var5, String[] var7, int[] var8) throws HDF5LibraryException, NullPointerException; + + public static synchronized native boolean H5Pall_filters_avail(int var0) throws HDF5LibraryException, NullPointerException; + + public static synchronized native int H5Premove_filter(int var0, int var1) throws HDF5LibraryException; + + public static synchronized native int H5Pset_deflate(int var0, int var1) throws HDF5LibraryException; + + public static synchronized native int H5Pset_fletcher32(int var0) throws HDF5LibraryException, NullPointerException; + + public static synchronized native int H5Pget_userblock(int var0, long[] var1) throws HDF5LibraryException, NullPointerException; + + public static synchronized native int H5Pset_userblock(int var0, long var1) throws HDF5LibraryException; + + public static synchronized native int H5Pget_sizes(int var0, long[] var1) throws HDF5LibraryException, NullPointerException, IllegalArgumentException; + + public static synchronized native int H5Pset_sizes(int var0, int var1, int var2) throws HDF5LibraryException; + + public static synchronized native int H5Pget_sym_k(int var0, int[] var1) throws HDF5LibraryException, NullPointerException, IllegalArgumentException; + + public static synchronized native int H5Pset_sym_k(int var0, int var1, int var2) throws HDF5LibraryException; + + public static synchronized native int H5Pget_istore_k(int var0, int[] var1) throws HDF5LibraryException, NullPointerException; + + public static synchronized native int H5Pset_istore_k(int var0, int var1) throws HDF5LibraryException; + + public static synchronized native int H5Pget_shared_mesg_nindexes(int var0) throws HDF5LibraryException; + + public static synchronized native int H5Pset_shared_mesg_nindexes(int var0, int var1) throws HDF5LibraryException, IllegalArgumentException; + + public static synchronized native int H5Pget_shared_mesg_index(int var0, int var1, int[] var2) throws HDF5LibraryException, NullPointerException, IllegalArgumentException; + + public static synchronized native int H5Pset_shared_mesg_index(int var0, int var1, int var2, int var3) throws HDF5LibraryException, IllegalArgumentException; + + public static synchronized native int H5Pget_shared_mesg_phase_change(int var0, int[] var1) throws HDF5LibraryException, NullPointerException; + + public static synchronized native int H5Pset_shared_mesg_phase_change(int var0, int var1, int var2) throws HDF5LibraryException, IllegalArgumentException; + + public static synchronized native int H5Pget_alignment(int var0, long[] var1) throws HDF5LibraryException, NullPointerException, IllegalArgumentException; + + public static synchronized native int H5Pset_alignment(int var0, long var1, long var3) throws HDF5LibraryException; + + public static synchronized native int H5Pget_driver(int var0) throws HDF5LibraryException; + + public static synchronized native long H5Pget_family_offset(int var0) throws HDF5LibraryException, NullPointerException; + + public static synchronized native int H5Pset_family_offset(int var0, long var1) throws HDF5LibraryException, NullPointerException; + + public static synchronized native int H5Pget_cache(int var0, int[] var1, long[] var2, long[] var3, double[] var4) throws HDF5LibraryException, NullPointerException; + + /** @deprecated */ + @Deprecated + public static int H5Pget_cache(int var0, int[] var1, int[] var2, int[] var3, double[] var4) throws HDF5LibraryException, NullPointerException { + long[] var5 = new long[]{(long)var2[0]}; + long[] var6 = new long[]{(long)var3[0]}; + int var7 = H5Pget_cache(var0, var1, var5, var6, var4); + var2[0] = (int)var5[0]; + var3[0] = (int)var6[0]; + return var7; + } + + public static synchronized native int H5Pset_cache(int var0, int var1, long var2, long var4, double var6) throws HDF5LibraryException; + + public static synchronized native H5AC_cache_config_t H5Pget_mdc_config(int var0) throws HDF5LibraryException; + + public static synchronized native void H5Pset_mdc_config(int var0, H5AC_cache_config_t var1) throws HDF5LibraryException; + + public static synchronized native int H5Pget_gc_references(int var0, boolean[] var1) throws HDF5LibraryException, NullPointerException; + + public static synchronized native boolean H5Pget_gcreferences(int var0) throws HDF5LibraryException; + + public static synchronized int H5Pget_gc_reference(int var0, boolean[] var1) throws HDF5LibraryException, NullPointerException { + return H5Pget_gc_references(var0, var1); + } + + public static synchronized native int H5Pset_gc_references(int var0, boolean var1) throws HDF5LibraryException; + + public static synchronized native int H5Pget_fclose_degree(int var0) throws HDF5LibraryException, NullPointerException; + + public static synchronized native int H5Pset_fclose_degree(int var0, int var1) throws HDF5LibraryException, NullPointerException; + + public static synchronized native long H5Pget_meta_block_size(int var0) throws HDF5LibraryException; + + public static synchronized native void H5Pset_meta_block_size(int var0, long var1) throws HDF5LibraryException; + + public static synchronized native long H5Pget_sieve_buf_size(int var0) throws HDF5LibraryException; + + public static synchronized native void H5Pset_sieve_buf_size(int var0, long var1) throws HDF5LibraryException; + + public static synchronized native int H5Pget_small_data_block_size(int var0, long[] var1) throws HDF5LibraryException, NullPointerException; + + public static synchronized native long H5Pget_small_data_block_size_long(int var0) throws HDF5LibraryException; + + public static synchronized native int H5Pset_small_data_block_size(int var0, long var1) throws HDF5LibraryException; + + public static synchronized native int H5Pget_libver_bounds(int var0, int[] var1) throws HDF5LibraryException, NullPointerException; + + public static synchronized native int H5Pset_libver_bounds(int var0, int var1, int var2) throws HDF5LibraryException, IllegalArgumentException; + + public static synchronized native int H5Pget_elink_file_cache_size(int var0) throws HDF5LibraryException; + + public static synchronized native void H5Pset_elink_file_cache_size(int var0, int var1) throws HDF5LibraryException; + + public static synchronized native int H5Pget_layout(int var0) throws HDF5LibraryException; + + public static synchronized native int H5Pset_layout(int var0, int var1) throws HDF5LibraryException; + + public static synchronized native int H5Pget_chunk(int var0, int var1, long[] var2) throws HDF5LibraryException, NullPointerException, IllegalArgumentException; + + public static synchronized native int H5Pset_chunk(int var0, int var1, byte[] var2) throws HDF5LibraryException, NullPointerException, IllegalArgumentException; + + public static synchronized int H5Pset_chunk(int var0, int var1, long[] var2) throws HDF5Exception, NullPointerException, IllegalArgumentException { + if (var2 == null) { + return -1; + } else { + HDFArray var3 = new HDFArray(var2); + byte[] var4 = var3.byteify(); + int var5 = H5Pset_chunk(var0, var1, var4); + Object var6 = null; + var3 = null; + return var5; + } + } + + public static synchronized native int H5Pget_external(int var0, int var1, long var2, String[] var4, long[] var5) throws ArrayIndexOutOfBoundsException, ArrayStoreException, HDF5LibraryException, NullPointerException, IllegalArgumentException; + + public static synchronized native int H5Pset_external(int var0, String var1, long var2, long var4) throws HDF5LibraryException, NullPointerException; + + public static synchronized native int H5Pget_external_count(int var0) throws HDF5LibraryException; + + public static synchronized native int H5Pset_szip(int var0, int var1, int var2) throws HDF5LibraryException, NullPointerException; + + public static synchronized native int H5Pset_shuffle(int var0) throws HDF5LibraryException, NullPointerException; + + public static synchronized native int H5Pset_nbit(int var0) throws HDF5LibraryException; + + public static synchronized native int H5Pset_scaleoffset(int var0, int var1, int var2) throws HDF5LibraryException, IllegalArgumentException; + + public static synchronized native int H5Pget_fill_value(int var0, int var1, byte[] var2) throws HDF5Exception; + + public static synchronized int H5Pget_fill_value(int var0, int var1, Object var2) throws HDF5Exception { + HDFArray var3 = new HDFArray(var2); + byte[] var4 = var3.emptyBytes(); + int var5 = H5Pget_fill_value(var0, var1, var4); + if (var5 >= 0) { + var3.arrayify(var4); + } + + return var5; + } + + public static synchronized native int H5Pset_fill_value(int var0, int var1, byte[] var2) throws HDF5Exception; + + public static synchronized int H5Pset_fill_value(int var0, int var1, Object var2) throws HDF5Exception { + HDFArray var3 = new HDFArray(var2); + byte[] var4 = var3.byteify(); + int var5 = H5Pset_fill_value(var0, var1, var4); + Object var6 = null; + var3 = null; + return var5; + } + + public static synchronized native int H5Pfill_value_defined(int var0, int[] var1) throws HDF5LibraryException, NullPointerException; + + public static synchronized native int H5Pget_alloc_time(int var0, int[] var1) throws HDF5LibraryException, NullPointerException; + + public static synchronized native int H5Pset_alloc_time(int var0, int var1) throws HDF5LibraryException, NullPointerException; + + public static synchronized native int H5Pget_fill_time(int var0, int[] var1) throws HDF5LibraryException, NullPointerException; + + public static synchronized native int H5Pset_fill_time(int var0, int var1) throws HDF5LibraryException, NullPointerException; + + public static synchronized native void H5Pget_chunk_cache(int var0, long[] var1, long[] var2, double[] var3) throws HDF5LibraryException, NullPointerException; + + public static synchronized native void H5Pset_chunk_cache(int var0, long var1, long var3, double var5) throws HDF5LibraryException; + + public static synchronized native long H5Pget_data_transform(int var0, String[] var1, long var2) throws HDF5LibraryException, IllegalArgumentException; + + public static synchronized native int H5Pset_data_transform(int var0, String var1) throws HDF5LibraryException, NullPointerException; + + public static synchronized native int H5Pget_buffer(int var0, byte[] var1, byte[] var2) throws HDF5LibraryException, IllegalArgumentException; + + public static synchronized native long H5Pget_buffer_size(int var0) throws HDF5LibraryException, IllegalArgumentException; + + public static synchronized native void H5Pset_buffer_size(int var0, long var1) throws HDF5LibraryException, IllegalArgumentException; + + public static synchronized native int H5Pget_edc_check(int var0) throws HDF5LibraryException, NullPointerException; + + public static synchronized native int H5Pset_edc_check(int var0, int var1) throws HDF5LibraryException, NullPointerException; + + public static synchronized native int H5Pget_btree_ratios(int var0, double[] var1, double[] var2, double[] var3) throws HDF5LibraryException, NullPointerException; + + public static synchronized native int H5Pset_btree_ratios(int var0, double var1, double var3, double var5) throws HDF5LibraryException; + + public static synchronized native int H5Pget_hyper_vector_size(int var0, long[] var1) throws HDF5LibraryException, NullPointerException; + + public static synchronized native int H5Pset_hyper_vector_size(int var0, long var1) throws HDF5LibraryException, NullPointerException; + + public static synchronized native boolean H5Pget_create_intermediate_group(int var0) throws HDF5LibraryException; + + public static synchronized native int H5Pset_create_intermediate_group(int var0, boolean var1) throws HDF5LibraryException; + + public static synchronized native long H5Pget_local_heap_size_hint(int var0) throws HDF5LibraryException; + + public static synchronized native int H5Pset_local_heap_size_hint(int var0, long var1) throws HDF5LibraryException; + + public static synchronized native int H5Pget_link_phase_change(int var0, int[] var1) throws HDF5LibraryException, NullPointerException; + + public static synchronized native int H5Pset_link_phase_change(int var0, int var1, int var2) throws HDF5LibraryException, IllegalArgumentException; + + public static synchronized native int H5Pget_est_link_info(int var0, int[] var1) throws HDF5LibraryException, NullPointerException; + + public static synchronized native int H5Pset_est_link_info(int var0, int var1, int var2) throws HDF5LibraryException, IllegalArgumentException; + + public static synchronized native int H5Pget_link_creation_order(int var0) throws HDF5LibraryException; + + public static synchronized native int H5Pset_link_creation_order(int var0, int var1) throws HDF5LibraryException; + + public static synchronized native int H5Pget_char_encoding(int var0) throws HDF5LibraryException; + + public static synchronized native void H5Pset_char_encoding(int var0, int var1) throws HDF5LibraryException; + + public static synchronized native long H5Pget_nlinks(int var0) throws HDF5LibraryException; + + public static synchronized native int H5Pset_nlinks(int var0, long var1) throws HDF5LibraryException, IllegalArgumentException; + + public static synchronized native long H5Pget_elink_prefix(int var0, String[] var1) throws HDF5LibraryException, NullPointerException; + + public static synchronized native int H5Pset_elink_prefix(int var0, String var1) throws HDF5LibraryException, NullPointerException; + + public static int H5Pget_elink_fapl(int var0) throws HDF5LibraryException { + int var1 = _H5Pget_elink_fapl(var0); + if (var1 > 0) { + log.trace("OPEN_IDS: H5Pget_elink_fapl add {}", var1); + OPEN_IDS.add(var1); + log.trace("OPEN_IDS: {}", OPEN_IDS.size()); + } + + return var1; + } + + private static synchronized native int _H5Pget_elink_fapl(int var0) throws HDF5LibraryException; + + public static synchronized native int H5Pset_elink_fapl(int var0, int var1) throws HDF5LibraryException; + + public static synchronized native int H5Pget_elink_acc_flags(int var0) throws HDF5LibraryException; + + public static synchronized native int H5Pset_elink_acc_flags(int var0, int var1) throws HDF5LibraryException, IllegalArgumentException; + + public static synchronized native int H5Pget_copy_object(int var0) throws HDF5LibraryException; + + public static synchronized native void H5Pset_copy_object(int var0, int var1) throws HDF5LibraryException; + + public static synchronized native int H5Pget_version(int var0, int[] var1) throws HDF5LibraryException, NullPointerException, IllegalArgumentException; + + public static synchronized native void H5Pget_fapl_core(int var0, long[] var1, boolean[] var2) throws HDF5LibraryException, NullPointerException; + + public static synchronized native int H5Pset_fapl_core(int var0, long var1, boolean var3) throws HDF5LibraryException, NullPointerException; + + public static synchronized native int H5Pget_fapl_direct(int var0, long[] var1) throws HDF5LibraryException; + + public static synchronized native int H5Pset_fapl_direct(int var0, long var1, long var3, long var5) throws HDF5LibraryException; + + public static synchronized native int H5Pget_fapl_family(int var0, long[] var1, int[] var2) throws HDF5LibraryException, NullPointerException; + + public static synchronized native int H5Pset_fapl_family(int var0, long var1, int var3) throws HDF5LibraryException, NullPointerException; + + public static synchronized native boolean H5Pget_fapl_multi(int var0, int[] var1, int[] var2, String[] var3, long[] var4) throws HDF5LibraryException, NullPointerException; + + public static synchronized native void H5Pset_fapl_multi(int var0, int[] var1, int[] var2, String[] var3, long[] var4, boolean var5) throws HDF5LibraryException, NullPointerException; + + /** @deprecated */ + @Deprecated + public static synchronized native int H5Pget_preserve(int var0) throws HDF5LibraryException; + + /** @deprecated */ + @Deprecated + public static synchronized native int H5Pset_preserve(int var0, boolean var1) throws HDF5LibraryException, IllegalArgumentException; + + /** @deprecated */ + @Deprecated + public static int H5Pset_fapl_log(int var0, String var1, int var2, int var3) throws HDF5LibraryException, NullPointerException { + H5Pset_fapl_log(var0, var1, (long)var2, (long)var3); + return 1; + } + + public static synchronized native void H5Pset_fapl_log(int var0, String var1, long var2, long var4) throws HDF5LibraryException, NullPointerException; + + public static synchronized native int H5Pset_fapl_sec2(int var0) throws HDF5LibraryException, NullPointerException; + + public static synchronized native void H5Pset_fapl_split(int var0, String var1, int var2, String var3, int var4) throws HDF5LibraryException, NullPointerException; + + public static synchronized native int H5Pset_fapl_stdio(int var0) throws HDF5LibraryException, NullPointerException; + + public static synchronized native int H5Pset_fapl_windows(int var0) throws HDF5LibraryException, NullPointerException; + + public static synchronized native void H5PLset_loading_state(int var0) throws HDF5LibraryException; + + public static synchronized native int H5PLget_loading_state() throws HDF5LibraryException; + + private static synchronized native int H5Rcreate(byte[] var0, int var1, String var2, int var3, int var4) throws HDF5LibraryException, NullPointerException, IllegalArgumentException; + + public static synchronized byte[] H5Rcreate(int var0, String var1, int var2, int var3) throws HDF5LibraryException, NullPointerException, IllegalArgumentException { + byte var4 = 8; + if (var2 == HDF5Constants.H5R_DATASET_REGION) { + var4 = 12; + } + + byte[] var5 = new byte[var4]; + H5Rcreate(var5, var0, var1, var2, var3); + return var5; + } + + public static int H5Rdereference(int var0, int var1, byte[] var2) throws HDF5LibraryException, NullPointerException, IllegalArgumentException { + int var3 = _H5Rdereference(var0, var1, var2); + if (var3 > 0) { + log.trace("OPEN_IDS: H5Rdereference add {}", var3); + OPEN_IDS.add(var3); + log.trace("OPEN_IDS: {}", OPEN_IDS.size()); + } + + return var3; + } + + private static synchronized native int _H5Rdereference(int var0, int var1, byte[] var2) throws HDF5LibraryException, NullPointerException, IllegalArgumentException; + + public static synchronized native long H5Rget_name(int var0, int var1, byte[] var2, String[] var3, long var4) throws HDF5LibraryException, NullPointerException, IllegalArgumentException; + + /** @deprecated */ + @Deprecated + public static synchronized native int H5Rget_obj_type(int var0, int var1, byte[] var2) throws HDF5LibraryException, NullPointerException, IllegalArgumentException; + + public static int H5Rget_obj_type(int var0, int var1, byte[] var2, int[] var3) throws HDF5LibraryException, NullPointerException, IllegalArgumentException { + return H5Rget_obj_type2(var0, var1, var2, var3); + } + + private static synchronized native int H5Rget_obj_type2(int var0, int var1, byte[] var2, int[] var3) throws HDF5LibraryException, NullPointerException, IllegalArgumentException; + + public static int H5Rget_region(int var0, int var1, byte[] var2) throws HDF5LibraryException, NullPointerException, IllegalArgumentException { + int var3 = _H5Rget_region(var0, var1, var2); + if (var3 > 0) { + log.trace("OPEN_IDS: H5Rget_region add {}", var3); + OPEN_IDS.add(var3); + log.trace("OPEN_IDS: {}", OPEN_IDS.size()); + } + + return var3; + } + + private static synchronized native int _H5Rget_region(int var0, int var1, byte[] var2) throws HDF5LibraryException, NullPointerException, IllegalArgumentException; + + public static int H5Sclose(int var0) throws HDF5LibraryException { + if (var0 < 0) { + return 0; + } else { + log.trace("OPEN_IDS: H5Sclose remove {}", var0); + OPEN_IDS.remove(var0); + log.trace("OPEN_IDS: {}", OPEN_IDS.size()); + return _H5Sclose(var0); + } + } + + private static synchronized native int _H5Sclose(int var0) throws HDF5LibraryException; + + public static int H5Scopy(int var0) throws HDF5LibraryException { + int var1 = _H5Scopy(var0); + if (var1 > 0) { + log.trace("OPEN_IDS: H5Scopy add {}", var1); + OPEN_IDS.add(var1); + log.trace("OPEN_IDS: {}", OPEN_IDS.size()); + } + + return var1; + } + + private static synchronized native int _H5Scopy(int var0) throws HDF5LibraryException; + + public static int H5Screate(int var0) throws HDF5LibraryException { + int var1 = _H5Screate(var0); + if (var1 > 0) { + log.trace("OPEN_IDS: H5Screate add {}", var1); + OPEN_IDS.add(var1); + log.trace("OPEN_IDS: {}", OPEN_IDS.size()); + } + + return var1; + } + + private static synchronized native int _H5Screate(int var0) throws HDF5LibraryException; + + public static int H5Screate_simple(int var0, long[] var1, long[] var2) throws HDF5Exception, NullPointerException { + int var3 = _H5Screate_simple(var0, var1, var2); + if (var3 > 0) { + log.trace("OPEN_IDS: H5Screate_simple add {}", var3); + OPEN_IDS.add(var3); + log.trace("OPEN_IDS: {}", OPEN_IDS.size()); + } + + return var3; + } + + private static synchronized native int _H5Screate_simple(int var0, long[] var1, long[] var2) throws HDF5Exception, NullPointerException; + + /** @deprecated */ + @Deprecated + public static int H5Screate_simple(int var0, byte[] var1, byte[] var2) throws HDF5Exception, NullPointerException { + ByteBuffer var3 = ByteBuffer.wrap(var1); + long[] var4 = var3.asLongBuffer().array(); + ByteBuffer var5 = ByteBuffer.wrap(var2); + long[] var6 = var5.asLongBuffer().array(); + int var7 = _H5Screate_simple(var0, var4, var6); + if (var7 > 0) { + log.trace("OPEN_IDS: H5S add {}", var7); + OPEN_IDS.add(var7); + log.trace("OPEN_IDS: {}", OPEN_IDS.size()); + } + + return var7; + } + + public static synchronized native int H5Sdecode(byte[] var0) throws HDF5LibraryException, NullPointerException; + + public static synchronized native byte[] H5Sencode(int var0) throws HDF5LibraryException, NullPointerException; + + public static synchronized native int H5Sextent_copy(int var0, int var1) throws HDF5LibraryException; + + public static synchronized native boolean H5Sextent_equal(int var0, int var1) throws HDF5LibraryException; + + public static synchronized native int H5Sget_select_bounds(int var0, long[] var1, long[] var2) throws HDF5LibraryException, NullPointerException; + + public static synchronized native long H5Sget_select_elem_npoints(int var0) throws HDF5LibraryException; + + public static synchronized native int H5Sget_select_elem_pointlist(int var0, long var1, long var3, long[] var5) throws HDF5LibraryException, NullPointerException; + + public static synchronized native int H5Sget_select_hyper_blocklist(int var0, long var1, long var3, long[] var5) throws HDF5LibraryException, NullPointerException; + + public static synchronized native long H5Sget_select_hyper_nblocks(int var0) throws HDF5LibraryException; + + public static synchronized native long H5Sget_select_npoints(int var0) throws HDF5LibraryException; + + public static synchronized native int H5Sget_select_type(int var0) throws HDF5LibraryException; + + public static synchronized native int H5Sget_simple_extent_dims(int var0, long[] var1, long[] var2) throws HDF5LibraryException, NullPointerException; + + public static synchronized native int H5Sget_simple_extent_ndims(int var0) throws HDF5LibraryException; + + public static synchronized native long H5Sget_simple_extent_npoints(int var0) throws HDF5LibraryException; + + public static synchronized native int H5Sget_simple_extent_type(int var0) throws HDF5LibraryException; + + public static synchronized native boolean H5Sis_simple(int var0) throws HDF5LibraryException; + + public static synchronized native int H5Soffset_simple(int var0, byte[] var1) throws HDF5LibraryException, NullPointerException; + + public static synchronized int H5Soffset_simple(int var0, long[] var1) throws HDF5Exception, NullPointerException { + if (var1 == null) { + return -1; + } else { + HDFArray var2 = new HDFArray(var1); + byte[] var3 = var2.byteify(); + int var4 = H5Soffset_simple(var0, var3); + Object var5 = null; + var2 = null; + return var4; + } + } + + public static synchronized native int H5Sselect_all(int var0) throws HDF5LibraryException; + + private static synchronized native int H5Sselect_elements(int var0, int var1, int var2, byte[] var3) throws HDF5LibraryException, NullPointerException; + + public static synchronized int H5Sselect_elements(int var0, int var1, int var2, long[][] var3) throws HDF5Exception, HDF5LibraryException, NullPointerException { + if (var3 == null) { + return -1; + } else { + HDFArray var4 = new HDFArray(var3); + byte[] var5 = var4.byteify(); + int var6 = H5Sselect_elements(var0, var1, var2, var5); + Object var7 = null; + var4 = null; + return var6; + } + } + + public static synchronized int H5Sselect_hyperslab(int var0, int var1, byte[] var2, byte[] var3, byte[] var4, byte[] var5) throws HDF5LibraryException, NullPointerException, IllegalArgumentException { + ByteBuffer var6 = ByteBuffer.wrap(var2); + long[] var7 = var6.asLongBuffer().array(); + ByteBuffer var8 = ByteBuffer.wrap(var3); + long[] var9 = var8.asLongBuffer().array(); + ByteBuffer var10 = ByteBuffer.wrap(var4); + long[] var11 = var10.asLongBuffer().array(); + ByteBuffer var12 = ByteBuffer.wrap(var5); + long[] var13 = var12.asLongBuffer().array(); + return H5Sselect_hyperslab(var0, var1, var7, var9, var11, var13); + } + + public static synchronized native int H5Sselect_hyperslab(int var0, int var1, long[] var2, long[] var3, long[] var4, long[] var5) throws HDF5LibraryException, NullPointerException, IllegalArgumentException; + + public static synchronized native int H5Sselect_none(int var0) throws HDF5LibraryException; + + public static synchronized native boolean H5Sselect_valid(int var0) throws HDF5LibraryException; + + public static synchronized native int H5Sset_extent_none(int var0) throws HDF5LibraryException; + + public static synchronized native int H5Sset_extent_simple(int var0, int var1, long[] var2, long[] var3) throws HDF5LibraryException, NullPointerException; + + public static synchronized int H5Sset_extent_simple(int var0, int var1, byte[] var2, byte[] var3) throws HDF5LibraryException, NullPointerException { + ByteBuffer var4 = ByteBuffer.wrap(var2); + long[] var5 = var4.asLongBuffer().array(); + ByteBuffer var6 = ByteBuffer.wrap(var3); + long[] var7 = var6.asLongBuffer().array(); + return H5Sset_extent_simple(var0, var1, var5, var7); + } + + /** @deprecated */ + @Deprecated + public static int H5Tarray_create(int var0, int var1, int[] var2, int[] var3) throws HDF5LibraryException, NullPointerException { + int var4 = _H5Tarray_create(var0, var1, var2, var3); + if (var4 > 0) { + log.trace("OPEN_IDS: H5T add {}", var4); + OPEN_IDS.add(var4); + log.trace("OPEN_IDS: {}", OPEN_IDS.size()); + } + + return var4; + } + + private static synchronized native int _H5Tarray_create(int var0, int var1, int[] var2, int[] var3) throws HDF5LibraryException, NullPointerException; + + public static int H5Tarray_create(int var0, int var1, long[] var2) throws HDF5LibraryException, NullPointerException { + int var3 = _H5Tarray_create2(var0, var1, var2); + if (var3 > 0) { + log.trace("OPEN_IDS: H5Tarray_create add {}", var3); + OPEN_IDS.add(var3); + log.trace("OPEN_IDS: {}", OPEN_IDS.size()); + } + + return var3; + } + + private static synchronized native int _H5Tarray_create2(int var0, int var1, long[] var2) throws HDF5LibraryException, NullPointerException; + + public static int H5Tclose(int var0) throws HDF5LibraryException { + if (var0 < 0) { + return 0; + } else { + log.trace("OPEN_IDS: H5Tclose remove {}", var0); + OPEN_IDS.remove(var0); + log.trace("OPEN_IDS: {}", OPEN_IDS.size()); + return _H5Tclose(var0); + } + } + + private static synchronized native int _H5Tclose(int var0) throws HDF5LibraryException; + + /** @deprecated */ + @Deprecated + public static int H5Tcommit(int var0, String var1, int var2) throws HDF5LibraryException, NullPointerException { + return H5Tcommit1(var0, var1, var2); + } + + /** @deprecated */ + @Deprecated + public static synchronized native int H5Tcommit1(int var0, String var1, int var2) throws HDF5LibraryException, NullPointerException; + + public static synchronized native void H5Tcommit(int var0, String var1, int var2, int var3, int var4, int var5) throws HDF5LibraryException, NullPointerException; + + public static synchronized native void H5Tcommit_anon(int var0, int var1, int var2, int var3) throws HDF5LibraryException; + + public static synchronized native boolean H5Tcommitted(int var0) throws HDF5LibraryException; + + public static synchronized native void H5Tcompiler_conv(int var0, int var1) throws HDF5LibraryException; + + public static synchronized native void H5Tconvert(int var0, int var1, long var2, byte[] var4, byte[] var5, int var6) throws HDF5LibraryException, NullPointerException; + + public static int H5Tcopy(int var0) throws HDF5LibraryException { + int var1 = _H5Tcopy(var0); + if (var1 > 0) { + log.trace("OPEN_IDS: H5Tcopy add {}", var1); + OPEN_IDS.add(var1); + log.trace("OPEN_IDS: {}", OPEN_IDS.size()); + } + + return var1; + } + + private static synchronized native int _H5Tcopy(int var0) throws HDF5LibraryException; + + public static int H5Tcreate(int var0, int var1) throws HDF5LibraryException { + return H5Tcreate(var0, (long)var1); + } + + public static int H5Tcreate(int var0, long var1) throws HDF5LibraryException { + int var3 = _H5Tcreate(var0, var1); + if (var3 > 0) { + log.trace("OPEN_IDS: H5Tcreate add {}", var3); + OPEN_IDS.add(var3); + log.trace("OPEN_IDS: {}", OPEN_IDS.size()); + } + + return var3; + } + + private static synchronized native int _H5Tcreate(int var0, long var1) throws HDF5LibraryException; + + public static int H5Tdecode(byte[] var0) throws HDF5LibraryException, NullPointerException { + int var1 = _H5Tdecode(var0); + if (var1 > 0) { + log.trace("OPEN_IDS: H5Tdecode add {}", var1); + OPEN_IDS.add(var1); + log.trace("OPEN_IDS: {}", OPEN_IDS.size()); + } + + return var1; + } + + private static synchronized native int _H5Tdecode(byte[] var0) throws HDF5LibraryException, NullPointerException; + + public static synchronized native boolean H5Tdetect_class(int var0, int var1) throws HDF5LibraryException; + + public static synchronized native int H5Tencode(int var0, byte[] var1, long var2) throws HDF5LibraryException, NullPointerException; + + public static int H5Tenum_create(int var0) throws HDF5LibraryException { + int var1 = _H5Tenum_create(var0); + if (var1 > 0) { + log.trace("OPEN_IDS: H5Tenum_create add {}", var1); + OPEN_IDS.add(var1); + log.trace("OPEN_IDS: {}", OPEN_IDS.size()); + } + + return var1; + } + + private static synchronized native int _H5Tenum_create(int var0) throws HDF5LibraryException; + + public static synchronized native void H5Tenum_insert(int var0, String var1, byte[] var2) throws HDF5LibraryException, NullPointerException; + + public static int H5Tenum_insert(int var0, String var1, int[] var2) throws HDF5LibraryException, NullPointerException { + return H5Tenum_insert_int(var0, var1, var2); + } + + public static int H5Tenum_insert(int var0, String var1, int var2) throws HDF5LibraryException, NullPointerException { + int[] var3 = new int[]{var2}; + return H5Tenum_insert_int(var0, var1, var3); + } + + private static synchronized native int H5Tenum_insert_int(int var0, String var1, int[] var2) throws HDF5LibraryException, NullPointerException; + + public static synchronized native String H5Tenum_nameof(int var0, byte[] var1, long var2) throws HDF5LibraryException, NullPointerException; + + public static int H5Tenum_nameof(int var0, int[] var1, String[] var2, int var3) throws HDF5LibraryException, NullPointerException { + return H5Tenum_nameof_int(var0, var1, var2, var3); + } + + private static synchronized native int H5Tenum_nameof_int(int var0, int[] var1, String[] var2, int var3) throws HDF5LibraryException, NullPointerException; + + public static synchronized native void H5Tenum_valueof(int var0, String var1, byte[] var2) throws HDF5LibraryException, NullPointerException; + + public static int H5Tenum_valueof(int var0, String var1, int[] var2) throws HDF5LibraryException, NullPointerException { + return H5Tenum_valueof_int(var0, var1, var2); + } + + private static synchronized native int H5Tenum_valueof_int(int var0, String var1, int[] var2) throws HDF5LibraryException, NullPointerException; + + public static synchronized native boolean H5Tequal(int var0, int var1) throws HDF5LibraryException; + + /** @deprecated */ + @Deprecated + public static synchronized native int H5Tget_array_dims(int var0, int[] var1, int[] var2) throws HDF5LibraryException, NullPointerException; + + /** @deprecated */ + @Deprecated + public static int H5Tget_array_dims(int var0, long[] var1, int[] var2) throws HDF5LibraryException, NullPointerException { + return H5Tget_array_dims1(var0, var1, var2); + } + + /** @deprecated */ + @Deprecated + private static synchronized native int H5Tget_array_dims1(int var0, long[] var1, int[] var2) throws HDF5LibraryException, NullPointerException; + + public static int H5Tget_array_dims(int var0, long[] var1) throws HDF5LibraryException, NullPointerException { + return H5Tget_array_dims2(var0, var1); + } + + public static synchronized native int H5Tget_array_dims2(int var0, long[] var1) throws HDF5LibraryException, NullPointerException; + + public static synchronized native int H5Tget_array_ndims(int var0) throws HDF5LibraryException; + + public static synchronized native int H5Tget_class(int var0) throws HDF5LibraryException; + + public static String H5Tget_class_name(int var0) { + String var1 = null; + if (HDF5Constants.H5T_INTEGER == var0) { + var1 = "H5T_INTEGER"; + } else if (HDF5Constants.H5T_FLOAT == var0) { + var1 = "H5T_FLOAT"; + } else if (HDF5Constants.H5T_TIME == var0) { + var1 = "H5T_TIME"; + } else if (HDF5Constants.H5T_STRING == var0) { + var1 = "H5T_STRING"; + } else if (HDF5Constants.H5T_BITFIELD == var0) { + var1 = "H5T_BITFIELD"; + } else if (HDF5Constants.H5T_OPAQUE == var0) { + var1 = "H5T_OPAQUE"; + } else if (HDF5Constants.H5T_COMPOUND == var0) { + var1 = "H5T_COMPOUND"; + } else if (HDF5Constants.H5T_REFERENCE == var0) { + var1 = "H5T_REFERENCE"; + } else if (HDF5Constants.H5T_ENUM == var0) { + var1 = "H5T_ENUM"; + } else if (HDF5Constants.H5T_VLEN == var0) { + var1 = "H5T_VLEN"; + } else if (HDF5Constants.H5T_ARRAY == var0) { + var1 = "H5T_ARRAY"; + } else { + var1 = "H5T_NO_CLASS"; + } + + return var1; + } + + public static int H5Tget_create_plist(int var0) throws HDF5LibraryException { + int var1 = _H5Tget_create_plist(var0); + if (var1 > 0) { + log.trace("OPEN_IDS: _H5Tget_create_plist add {}", var1); + OPEN_IDS.add(var1); + log.trace("OPEN_IDS: {}", OPEN_IDS.size()); + } + + return var1; + } + + private static synchronized native int _H5Tget_create_plist(int var0) throws HDF5LibraryException; + + public static synchronized native int H5Tget_cset(int var0) throws HDF5LibraryException; + + public static synchronized native int H5Tset_cset(int var0, int var1) throws HDF5LibraryException; + + public static synchronized native int H5Tget_ebias(int var0) throws HDF5LibraryException; + + public static int H5Tset_ebias(int var0, int var1) throws HDF5LibraryException { + H5Tset_ebias(var0, (long)var1); + return 0; + } + + public static synchronized native long H5Tget_ebias_long(int var0) throws HDF5LibraryException; + + public static synchronized native void H5Tset_ebias(int var0, long var1) throws HDF5LibraryException; + + public static synchronized native void H5Tget_fields(int var0, long[] var1) throws HDF5LibraryException, NullPointerException, IllegalArgumentException; + + public static int H5Tget_fields(int var0, int[] var1) throws HDF5LibraryException, NullPointerException, IllegalArgumentException { + return H5Tget_fields_int(var0, var1); + } + + private static synchronized native int H5Tget_fields_int(int var0, int[] var1) throws HDF5LibraryException, NullPointerException, IllegalArgumentException; + + public static synchronized native void H5Tset_fields(int var0, long var1, long var3, long var5, long var7, long var9) throws HDF5LibraryException; + + public static int H5Tset_fields(int var0, int var1, int var2, int var3, int var4, int var5) throws HDF5LibraryException { + H5Tset_fields(var0, (long)var1, (long)var2, (long)var3, (long)var4, (long)var5); + return 0; + } + + public static synchronized native int H5Tget_inpad(int var0) throws HDF5LibraryException; + + public static synchronized native int H5Tset_inpad(int var0, int var1) throws HDF5LibraryException; + + public static synchronized native int H5Tget_member_class(int var0, int var1) throws HDF5LibraryException; + + public static synchronized native int H5Tget_member_index(int var0, String var1); + + public static synchronized native String H5Tget_member_name(int var0, int var1); + + public static synchronized native long H5Tget_member_offset(int var0, int var1) throws HDF5LibraryException; + + public static int H5Tget_member_type(int var0, int var1) throws HDF5LibraryException { + int var2 = _H5Tget_member_type(var0, var1); + if (var2 > 0) { + log.trace("OPEN_IDS: H5Tget_member_type add {}", var2); + OPEN_IDS.add(var2); + log.trace("OPEN_IDS: {}", OPEN_IDS.size()); + } + + return var2; + } + + private static synchronized native int _H5Tget_member_type(int var0, int var1) throws HDF5LibraryException; + + public static synchronized native void H5Tget_member_value(int var0, int var1, byte[] var2) throws HDF5LibraryException, NullPointerException; + + public static int H5Tget_member_value(int var0, int var1, int[] var2) throws HDF5LibraryException, NullPointerException { + return H5Tget_member_value_int(var0, var1, var2); + } + + private static synchronized native int H5Tget_member_value_int(int var0, int var1, int[] var2) throws HDF5LibraryException, NullPointerException; + + public static synchronized int H5Tget_native_type(int var0) throws HDF5LibraryException { + return H5Tget_native_type(var0, HDF5Constants.H5T_DIR_ASCEND); + } + + public static int H5Tget_native_type(int var0, int var1) throws HDF5LibraryException { + int var2 = _H5Tget_native_type(var0, var1); + if (var2 > 0) { + log.trace("OPEN_IDS: H5Tget_native_type add {}", var2); + OPEN_IDS.add(var2); + log.trace("OPEN_IDS: {}", OPEN_IDS.size()); + } + + return var2; + } + + private static synchronized native int _H5Tget_native_type(int var0, int var1) throws HDF5LibraryException; + + public static synchronized native int H5Tget_nmembers(int var0) throws HDF5LibraryException; + + public static synchronized native int H5Tget_norm(int var0) throws HDF5LibraryException; + + public static synchronized native int H5Tset_norm(int var0, int var1) throws HDF5LibraryException; + + public static synchronized native int H5Tget_offset(int var0) throws HDF5LibraryException; + + public static int H5Tset_offset(int var0, int var1) throws HDF5LibraryException { + H5Tset_offset(var0, (long)var1); + return 0; + } + + public static synchronized native void H5Tset_offset(int var0, long var1) throws HDF5LibraryException; + + public static synchronized native int H5Tget_order(int var0) throws HDF5LibraryException; + + public static synchronized native int H5Tset_order(int var0, int var1) throws HDF5LibraryException; + + public static synchronized native int H5Tget_pad(int var0, int[] var1) throws HDF5LibraryException, NullPointerException; + + public static synchronized native int H5Tset_pad(int var0, int var1, int var2) throws HDF5LibraryException; + + public static synchronized native int H5Tget_precision(int var0) throws HDF5LibraryException; + + public static int H5Tset_precision(int var0, int var1) throws HDF5LibraryException { + H5Tset_precision(var0, (long)var1); + return 0; + } + + public static synchronized native long H5Tget_precision_long(int var0) throws HDF5LibraryException; + + public static synchronized native void H5Tset_precision(int var0, long var1) throws HDF5LibraryException; + + public static synchronized native int H5Tget_sign(int var0) throws HDF5LibraryException; + + public static synchronized native int H5Tset_sign(int var0, int var1) throws HDF5LibraryException; + + public static synchronized native int H5Tget_size(int var0) throws HDF5LibraryException; + + public static int H5Tset_size(int var0, int var1) throws HDF5LibraryException { + H5Tset_size(var0, (long)var1); + return 0; + } + + public static synchronized native long H5Tget_size_long(int var0) throws HDF5LibraryException; + + public static synchronized native void H5Tset_size(int var0, long var1) throws HDF5LibraryException; + + public static synchronized native int H5Tget_strpad(int var0) throws HDF5LibraryException; + + public static synchronized native int H5Tset_strpad(int var0, int var1) throws HDF5LibraryException; + + public static int H5Tget_super(int var0) throws HDF5LibraryException { + int var1 = _H5Tget_super(var0); + if (var1 > 0) { + log.trace("OPEN_IDS: H5Tget_super add {}", var1); + OPEN_IDS.add(var1); + log.trace("OPEN_IDS: {}", OPEN_IDS.size()); + } + + return var1; + } + + private static synchronized native int _H5Tget_super(int var0) throws HDF5LibraryException; + + public static synchronized native String H5Tget_tag(int var0) throws HDF5LibraryException; + + public static synchronized native int H5Tset_tag(int var0, String var1) throws HDF5LibraryException; + + public static synchronized native int H5Tinsert(int var0, String var1, long var2, int var4) throws HDF5LibraryException, NullPointerException; + + public static synchronized native boolean H5Tis_variable_str(int var0) throws HDF5LibraryException; + + public static synchronized native int H5Tlock(int var0) throws HDF5LibraryException; + + /** @deprecated */ + @Deprecated + public static int H5Topen(int var0, String var1) throws HDF5LibraryException, NullPointerException { + int var2 = _H5Topen(var0, var1); + if (var2 > 0) { + log.trace("OPEN_IDS: H5Topen add {}", var2); + OPEN_IDS.add(var2); + log.trace("OPEN_IDS: {}", OPEN_IDS.size()); + } + + return var2; + } + + private static synchronized native int _H5Topen(int var0, String var1) throws HDF5LibraryException, NullPointerException; + + public static int H5Topen(int var0, String var1, int var2) throws HDF5LibraryException, NullPointerException { + int var3 = _H5Topen2(var0, var1, var2); + if (var3 > 0) { + log.trace("OPEN_IDS: H5Topen add {}", var3); + OPEN_IDS.add(var3); + log.trace("OPEN_IDS: {}", OPEN_IDS.size()); + } + + return var3; + } + + private static synchronized native int _H5Topen2(int var0, String var1, int var2) throws HDF5LibraryException, NullPointerException; + + public static synchronized native int H5Tpack(int var0) throws HDF5LibraryException; + + public static int H5Tvlen_create(int var0) throws HDF5LibraryException { + int var1 = _H5Tvlen_create(var0); + if (var1 > 0) { + log.trace("OPEN_IDS: H5Tvlen_create add {}", var1); + OPEN_IDS.add(var1); + log.trace("OPEN_IDS: {}", OPEN_IDS.size()); + } + + return var1; + } + + private static synchronized native int _H5Tvlen_create(int var0) throws HDF5LibraryException; + + public static synchronized native int H5Zfilter_avail(int var0) throws HDF5LibraryException, NullPointerException; + + public static synchronized native int H5Zget_filter_info(int var0) throws HDF5LibraryException; + + public static synchronized native int H5Zunregister(int var0) throws HDF5LibraryException, NullPointerException; + + static { + loadH5Lib(); + } +} diff --git a/src/main/java/com/se/nsl/utils/ProjectionToGeographicUtil.java b/src/main/java/com/se/nsl/utils/ProjectionToGeographicUtil.java new file mode 100644 index 0000000..8c3094c --- /dev/null +++ b/src/main/java/com/se/nsl/utils/ProjectionToGeographicUtil.java @@ -0,0 +1,66 @@ +package com.se.nsl.utils; + +import com.alibaba.fastjson.JSONArray; +import org.osgeo.proj4j.BasicCoordinateTransform; +import org.osgeo.proj4j.CRSFactory; +import org.osgeo.proj4j.CoordinateReferenceSystem; +import org.osgeo.proj4j.ProjCoordinate; + +public class ProjectionToGeographicUtil { + + public static JSONArray getPointAndHight(Double x,Double y) { + // 鍒涘缓 CRSFactory 瀵硅薄 + JSONArray vertice = new JSONArray(); + vertice.add(x); + vertice.add(y); + vertice.add(0.0); + return vertice; + } + public static JSONArray getPoint(Double x,Double y) { + // 鍒涘缓 CRSFactory 瀵硅薄 + CRSFactory crsFactory = new CRSFactory(); + // 瀹氫箟鎶曞奖鍧愭爣绯荤粺锛岃繖閲屼互 UTM 鎶曞奖涓轰緥锛寊one 33N + CoordinateReferenceSystem sourceCRS = crsFactory.createFromName("EPSG:4548"); + // 瀹氫箟鍦扮悊鍧愭爣绯荤粺锛岃繖閲屼娇鐢� WGS84 + CoordinateReferenceSystem targetCRS = crsFactory.createFromName("EPSG:4326"); + // 鍒涘缓鍧愭爣杞崲瀵硅薄 + BasicCoordinateTransform transform = new BasicCoordinateTransform(sourceCRS, targetCRS); + //lon=116.683795 lat=39.90849042 + // 杈撳叆鎶曞奖鍧愭爣锛屼互 UTM 鎶曞奖鍧愭爣涓轰緥锛屽崟浣嶆槸绫� + ProjCoordinate sourceCoord = new ProjCoordinate(x, y); + ProjCoordinate targetCoord = new ProjCoordinate(); + + // 鎵ц鍧愭爣杞崲 + transform.transform(sourceCoord, targetCoord); + JSONArray vertice = new JSONArray(); + vertice.add(targetCoord.x); + vertice.add(targetCoord.y); + vertice.add(0.0); + return vertice; + } + + public static JSONArray get4548Point(Double x,Double y) { + // 鍒涘缓 CRSFactory 瀵硅薄 + CRSFactory crsFactory = new CRSFactory(); + // 瀹氫箟鎶曞奖鍧愭爣绯荤粺锛岃繖閲屼互 UTM 鎶曞奖涓轰緥锛寊one 33N + CoordinateReferenceSystem sourceCRS = crsFactory.createFromName("EPSG:4326"); + // 瀹氫箟鍦扮悊鍧愭爣绯荤粺锛岃繖閲屼娇鐢� WGS84 + CoordinateReferenceSystem targetCRS = crsFactory.createFromName("EPSG:4548"); + // 鍒涘缓鍧愭爣杞崲瀵硅薄 + BasicCoordinateTransform transform = new BasicCoordinateTransform(sourceCRS, targetCRS); + // 杈撳叆鎶曞奖鍧愭爣锛屼互 UTM 鎶曞奖鍧愭爣涓轰緥锛屽崟浣嶆槸绫� + ProjCoordinate sourceCoord = new ProjCoordinate(x, y); + ProjCoordinate targetCoord = new ProjCoordinate(); + // 鎵ц鍧愭爣杞崲 + transform.transform(sourceCoord, targetCoord); + JSONArray vertice = new JSONArray(); + vertice.add(targetCoord.x); + vertice.add(targetCoord.y); + return vertice; + } + + public static void main(String[] args) { + System.out.println(getPoint(469257.26224087493,4416938.9521611305)); + System.out.println(getPoint(116.64058,39.88605)); + } +} diff --git a/src/main/java/com/se/nsl/utils/SemUtils.java b/src/main/java/com/se/nsl/utils/SemUtils.java new file mode 100644 index 0000000..4b8da02 --- /dev/null +++ b/src/main/java/com/se/nsl/utils/SemUtils.java @@ -0,0 +1,20 @@ +package com.se.nsl.utils; + + +import cn.smartearth.sem.SEM; + +public class SemUtils { + public static void main(String[] args) throws Exception { + semToCityJson(); + } + public static void cityJsonToSem(String jsonUrl,String semUrl) throws Exception { + //cityjson杞瑂em + SEM sem = SEM.fromCityJSON(jsonUrl); + sem.writeToFile(semUrl); + } + public static void semToCityJson() throws Exception { + //sem杞琧ityjson + SEM sem2 = new SEM("D:\\uwsolver\\tongzhou\\link.sem"); + sem2.writeToCityJSON("D:\\uwsolver\\tongzhou\\link\\link.json"); + } +} diff --git a/src/main/java/com/se/nsl/utils/ShpReadUtils.java b/src/main/java/com/se/nsl/utils/ShpReadUtils.java new file mode 100644 index 0000000..db30bb3 --- /dev/null +++ b/src/main/java/com/se/nsl/utils/ShpReadUtils.java @@ -0,0 +1,82 @@ +package com.se.nsl.utils; + +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.module.SimpleModule; +import org.geotools.data.DataStore; +import org.geotools.data.DataStoreFinder; +import org.geotools.data.FeatureSource; +import org.geotools.data.shapefile.ShapefileDataStore; +import org.geotools.feature.FeatureCollection; +import org.geotools.feature.FeatureIterator; +import org.locationtech.jts.geom.Geometry; +import org.locationtech.jts.io.WKTReader; +import org.opengis.feature.Property; +import org.opengis.feature.simple.SimpleFeature; +import org.opengis.feature.simple.SimpleFeatureType; + +import java.io.File; +import java.io.IOException; +import java.nio.charset.Charset; +import java.util.*; + +public class ShpReadUtils { + public static void main(String[] args) throws Exception { + readPointShp("D:\\鍩庡競鍐呮稘\\sem\\绠$偣\\pipeline-point.shp"); + //readPointShp("D:\\鍩庡競鍐呮稘\\sem\\绠$嚎\\pipeline-conduit.shp"); + } + static class GeometrySerializer extends JsonSerializer<Geometry> { + @Override + public void serialize(Geometry value, JsonGenerator gen, SerializerProvider serializers) throws IOException { + gen.writeStartObject(); + gen.writeStringField("type", value.getGeometryType()); + gen.writeObjectField("coordinates", value.getCoordinates()); + gen.writeEndObject(); + } + } + /** + * @param url shp鏂囦欢璺緞 + * @return shp瑙f瀽鍚庣殑鍐呭 + * @throws Exception 寮傚父 + */ + public static List<Map<String, Object>> readPointShp(String url) throws Exception { + Map<String, Object> map = new HashMap<String, Object>(); + File file = new File(url); + map.put("url", file.toURI().toURL()); + // 蹇呴』鏄疷RL绫诲瀷 + DataStore dataStore = DataStoreFinder.getDataStore(map); + //瀛楃杞爜锛岄槻姝腑鏂囦贡鐮� + ((ShapefileDataStore) dataStore).setCharset(Charset.forName("utf8")); + String typeName = dataStore.getTypeNames()[0]; + FeatureSource<SimpleFeatureType, SimpleFeature> source = dataStore.getFeatureSource(typeName); + FeatureCollection<SimpleFeatureType, SimpleFeature> collection = source.getFeatures(); + FeatureIterator<SimpleFeature> features = collection.features(); + List<Map<String, Object>> list = new ArrayList<>(); + WKTReader reader = new WKTReader(); + while (features.hasNext()) { + // 杩唬鎻愬彇灞炴�� + SimpleFeature feature = features.next(); + Iterator<? extends Property> iterator = feature.getValue().iterator(); + Map<String, Object> objectMap = new HashMap<>(); + while (iterator.hasNext()) { + Property property = iterator.next(); + if ("the_geom".equals(property.getName().toString())) { + Geometry geometry = reader.read(property.getValue().toString()); + SimpleModule module = new SimpleModule(); + module.addSerializer(Geometry.class, new GeometrySerializer()); + ObjectMapper mapper = new ObjectMapper(); + mapper.registerModule(module); + String jsonString = mapper.writeValueAsString(geometry); + objectMap.put(property.getName().toString(), jsonString); + } else { + objectMap.put(property.getName().toString(), property.getValue()); + } + } + list.add(objectMap); + } + //璇诲彇property涓殑鍏冪礌骞惰繑鍥� + return list; + } +} diff --git a/src/main/java/com/se/nsl/utils/ShpToolUtils.java b/src/main/java/com/se/nsl/utils/ShpToolUtils.java new file mode 100644 index 0000000..7966606 --- /dev/null +++ b/src/main/java/com/se/nsl/utils/ShpToolUtils.java @@ -0,0 +1,237 @@ +package com.se.nsl.utils; + +import com.alibaba.fastjson.JSONArray; +import com.alibaba.fastjson.JSONObject; +import org.gdal.gdal.gdal; +import org.gdal.ogr.*; +import org.gdal.osr.CoordinateTransformation; +import org.gdal.osr.SpatialReference; + +import java.util.Arrays; + +/** + * shp 宸ュ叿瀹炵敤绋嬪簭 + * + * @author xingjinshuang@smartearth.cn + * @date 2024/12/26 + */ +public class ShpToolUtils { + + // 澹版槑涓虹被鎴愬憳鍙橀噺 + private static double minX = Double.MAX_VALUE; + private static double maxX = Double.MIN_VALUE; + private static double minY = Double.MAX_VALUE; + private static double maxY = Double.MIN_VALUE; + + // 鐢ㄤ簬瀛樺偍鎵�鏈夎浆鎹㈠悗鐨勭粡绾害 + private static JSONArray coordinatesArray = new JSONArray(); + + /** + * 闃呰 SHP + * + * @param strVectorFile str 鍚戦噺鏂囦欢 + * @return {@link JSONObject} + */ + public static JSONObject readShp(String strVectorFile) { + // 娉ㄥ唽鎵�鏈夌殑椹卞姩 + ogr.RegisterAll(); + // 涓轰簡鏀寔涓枃璺緞锛岃娣诲姞涓嬮潰杩欏彞浠g爜 + gdal.SetConfigOption("GDAL_FILENAME_IS_UTF8", "YES"); + + // 璇诲彇鏁版嵁锛岃繖閲屼互ESRI鐨剆hp鏂囦欢涓轰緥 + String strDriverName = "ESRI Shapefile"; + org.gdal.ogr.Driver oDriver = ogr.GetDriverByName(strDriverName); + + if (oDriver == null) { + System.out.println(strDriverName + " 椹卞姩涓嶅彲鐢紒\n"); + return null; + } + + // 鎵撳紑鏁版嵁婧� + DataSource dataSource = oDriver.Open(strVectorFile); + if (dataSource == null) { + System.out.println("鏃犳硶鎵撳紑 Shapefile 鏂囦欢锛�"); + return null; + } + + Layer layer = dataSource.GetLayer(0); + + // 鑾峰彇鍥惧眰鐨勮寖鍥达紙鍒濇鐨勭煩褰㈣寖鍥达級 + double[] layerExtent = layer.GetExtent(); + System.out.println("鍒濆鍥惧眰鑼冨洿锛歮inx:" + layerExtent[0] + ", maxx:" + layerExtent[1] + ", miny:" + layerExtent[2] + ", maxy:" + layerExtent[3]); + + // 鑾峰彇鍥惧眰鐨勭┖闂村弬鑰� + SpatialReference layerSpatialRef = layer.GetSpatialRef(); + + // 鍒涘缓鐩爣绌洪棿鍙傝�� (EPSG:4326) + SpatialReference targetSpatialRef = new SpatialReference(); + targetSpatialRef.ImportFromEPSG(4326); // EPSG:4326 鏄� WGS84 + + // 鍒涘缓鍧愭爣杞崲瀵硅薄 + CoordinateTransformation coordTransform = CoordinateTransformation.CreateCoordinateTransformation(layerSpatialRef, targetSpatialRef); + + // 閬嶅巻姣忎釜 feature 鑾峰彇鍧愭爣鐐� + for (int i = 0; i < layer.GetFeatureCount(); i++) { + Feature feature = layer.GetFeature(i); + Geometry geometry = feature.GetGeometryRef(); + // 鍒ゆ柇鍑犱綍绫诲瀷骞跺鐞� + if (geometry != null) { + if (geometry.GetGeometryType() == ogr.wkbPoint) { + // 鍗曚釜鐐圭殑澶勭悊 + processPointGeometry(geometry, coordTransform); + } else if (geometry.GetGeometryType() == ogr.wkbMultiPoint) { + // 澶氫釜鐐圭殑澶勭悊 + System.out.println("geometry = " + geometry); + //processMultiPointGeometry(geometry, coordTransform); + } + } + } + // 鎵撳嵃杞崲鍚庣殑鐭╁舰鑼冨洿锛堢粡绾害锛� + System.out.println("鎵�鏈夌偣鐨勭粡绾害鐭╁舰鑼冨洿锛歮inX = " + minX + ", maxX = " + maxX + ", minY = " + minY + ", maxY = " + maxY); + JSONObject json = new JSONObject(); + json.put("minX", minX); + json.put("maxX", maxX); + json.put("minY", minY); + json.put("maxY", maxY); + return json; + } + + + /** + * 璇诲彇 shp get local + * + * @param strVectorFile str 鍚戦噺鏂囦欢 + * @return {@link JSONObject} + */ + public static JSONArray readShpGetLocal(String strVectorFile) { + // 娉ㄥ唽鎵�鏈夌殑椹卞姩 + ogr.RegisterAll(); + // 涓轰簡鏀寔涓枃璺緞锛岃娣诲姞涓嬮潰杩欏彞浠g爜 + gdal.SetConfigOption("GDAL_FILENAME_IS_UTF8", "YES"); + + // 璇诲彇鏁版嵁锛岃繖閲屼互ESRI鐨剆hp鏂囦欢涓轰緥 + String strDriverName = "ESRI Shapefile"; + org.gdal.ogr.Driver oDriver = ogr.GetDriverByName(strDriverName); + + if (oDriver == null) { + System.out.println(strDriverName + " 椹卞姩涓嶅彲鐢紒\n"); + return null; + } + + // 鎵撳紑鏁版嵁婧� + DataSource dataSource = oDriver.Open(strVectorFile); + if (dataSource == null) { + System.out.println("鏃犳硶鎵撳紑 Shapefile 鏂囦欢锛�"); + return null; + } + + Layer layer = dataSource.GetLayer(0); + + // 鑾峰彇鍥惧眰鐨勮寖鍥达紙鍒濇鐨勭煩褰㈣寖鍥达級 + double[] layerExtent = layer.GetExtent(); + System.out.println("鍒濆鍥惧眰鑼冨洿锛歮inx:" + layerExtent[0] + ", maxx:" + layerExtent[1] + ", miny:" + layerExtent[2] + ", maxy:" + layerExtent[3]); + + // 鑾峰彇鍥惧眰鐨勭┖闂村弬鑰� + SpatialReference layerSpatialRef = layer.GetSpatialRef(); + + // 鍒涘缓鐩爣绌洪棿鍙傝�� (EPSG:4326) + SpatialReference targetSpatialRef = new SpatialReference(); + targetSpatialRef.ImportFromEPSG(4326); // EPSG:4326 鏄� WGS84 + + // 鍒涘缓鍧愭爣杞崲瀵硅薄 + CoordinateTransformation coordTransform = CoordinateTransformation.CreateCoordinateTransformation(layerSpatialRef, targetSpatialRef); + + // 閬嶅巻姣忎釜 feature 鑾峰彇鍧愭爣鐐� + for (int i = 0; i < layer.GetFeatureCount(); i++) { + Feature feature = layer.GetFeature(i); + Geometry geometry = feature.GetGeometryRef(); + // 鍒ゆ柇鍑犱綍绫诲瀷骞跺鐞� + if (geometry != null) { + if (geometry.GetGeometryType() == ogr.wkbPoint) { + // 鍗曚釜鐐圭殑澶勭悊 + processPointGeometry(geometry, coordTransform); + } else if (geometry.GetGeometryType() == ogr.wkbMultiPoint) { + // 澶氫釜鐐圭殑澶勭悊 + System.out.println("geometry = " + geometry); + //processMultiPointGeometry(geometry, coordTransform); + } + } + } + // 鎵撳嵃杞崲鍚庣殑鐭╁舰鑼冨洿锛堢粡绾害锛� + System.out.println("鎵�鏈夌偣鐨勭粡绾害鐭╁舰鑼冨洿锛歮inX = " + minX + ", maxX = " + maxX + ", minY = " + minY + ", maxY = " + maxY); + return coordinatesArray; + } + + // 澶勭悊鍗曚釜鐐圭殑鍑犱綍浣� + private static void processPointGeometry(Geometry geometry, CoordinateTransformation coordTransform) { + double x = geometry.GetX(); + double y = geometry.GetY(); + + // 鍒涘缓鍖呭惈 3 涓厓绱犵殑鏁扮粍锛孼 鍧愭爣鍙互璁句负 0 + double[] coords = new double[]{x, y, 0}; // Z 鍧愭爣榛樿鍊间负 0 + //System.out.println("鍘熷鍧愭爣1锛歝oords = " + Arrays.toString(coords)); + + // 杞崲鍧愭爣 + coordTransform.TransformPoint(coords); + //System.out.println("杞崲鍚庣殑鍧愭爣锛歝oords = " + Arrays.toString(coords)); + + // 鍧愭爣椤哄簭鏄� [latitude, longitude]锛屼氦鎹㈤『搴忎负 [longitude, latitude] + double longitude = coords[0]; + double latitude = coords[1]; + //System.out.println("杞崲鍚庣殑缁忕含搴︼細longitude = " + longitude + ", latitude = " + latitude); + // 鍒涘缓涓�涓� JSON 瀵硅薄淇濆瓨缁忕含搴︿俊鎭� + JSONObject coordObj = new JSONObject(); + coordObj.put("lat", longitude); + coordObj.put("lon", latitude); + // 灏嗘 JSON 瀵硅薄娣诲姞鍒� JSONArray 涓� + coordinatesArray.add(coordObj); + + // 鏇存柊鐭╁舰杈圭晫锛堟澶勪娇鐢ㄥ叏灞�鍙橀噺鎴栬繑鍥炲�艰繘琛岃绠楋級 + updateRectangleBounds(coords); + } + + // 澶勭悊澶氱偣鐨勫嚑浣曚綋 + private static void processMultiPointGeometry(Geometry geometry, CoordinateTransformation coordTransform) { + int numPoints = geometry.GetGeometryCount(); + for (int j = 0; j < numPoints; j++) { + Geometry pointGeometry = geometry.GetGeometryRef(j); + double x = pointGeometry.GetX(); + double y = pointGeometry.GetY(); + + // 鍒涘缓鍖呭惈 3 涓厓绱犵殑鏁扮粍锛孼 鍧愭爣鍙互璁句负 0 + double[] coords = new double[]{x, y, 0}; // Z 鍧愭爣榛樿鍊间负 0 + System.out.println("鍘熷鍧愭爣2锛歝oords = " + Arrays.toString(coords)); + + // 杞崲鍧愭爣 + coordTransform.TransformPoint(coords); + System.out.println("杞崲鍚庣殑鍧愭爣锛歝oords = " + Arrays.toString(coords)); + + // 鍧愭爣椤哄簭鏄� [latitude, longitude]锛屼氦鎹㈤『搴忎负 [longitude, latitude] + double longitude = coords[0]; + double latitude = coords[1]; + System.out.println("杞崲鍚庣殑缁忕含搴︼細longitude = " + longitude + ", latitude = " + latitude); + + // 鏇存柊鐭╁舰杈圭晫 + updateRectangleBounds(coords); + } + } + + // 鏇存柊鐭╁舰鐨勮竟鐣屽�� + private static void updateRectangleBounds(double[] coords) { + double x = coords[0]; + double y = coords[1]; + + // 鏇存柊鏈�灏忔渶澶у�� + minX = Math.min(minX, x); + maxX = Math.max(maxX, x); + minY = Math.min(minY, y); + maxY = Math.max(maxY, y); + } + + public static void main(String[] args) { + // 璇诲彇shp鏂囦欢 + readShp("D:\\0a_project\\model\\shp\\闆ㄩ噺绔欑偣鏁版嵁\\闆ㄩ噺绔欑偣_4548\\闆ㄩ噺绔欑偣_4548.shp"); + System.out.println(coordinatesArray.toString()); // Pretty print with indent + + } +} diff --git a/src/main/java/com/se/nsl/utils/TiffClipper.java b/src/main/java/com/se/nsl/utils/TiffClipper.java new file mode 100644 index 0000000..eed8fbf --- /dev/null +++ b/src/main/java/com/se/nsl/utils/TiffClipper.java @@ -0,0 +1,42 @@ +package com.se.nsl.utils; + +import org.gdal.gdal.Dataset; +import org.gdal.gdal.TranslateOptions; +import org.gdal.gdal.gdal; +import org.gdal.gdalconst.gdalconstConstants; + +import java.util.Vector; + +public class TiffClipper { + public static void cropTiffByLatLon(String inputFilePath, String outputFilePath, + double minX, double minY, double maxX, double maxY) throws Exception{ + // 娉ㄥ唽鎵�鏈� GDAL 椹卞姩 + gdal.AllRegister(); + Vector vector=new Vector(); + vector.add("-projwin"); + vector.add(String.valueOf(minX)); + vector.add(String.valueOf(maxY)); + vector.add(String.valueOf(maxX)); + vector.add(String.valueOf(minY)); + TranslateOptions options = new TranslateOptions(vector); + // 鎵撳紑杈撳叆鏁版嵁闆� + Dataset ds = gdal.Open(inputFilePath, gdalconstConstants.GA_ReadOnly); + if (ds == null) { + throw new Exception("鏃犳硶鎵撳紑杈撳叆鏂囦欢: " + inputFilePath); + } + // 鎵ц瑁佸壀鎿嶄綔 + Dataset outputDataset = gdal.Translate(outputFilePath, ds, options); + if (outputDataset != null) { + System.out.println("瑁佸壀鎴愬姛锛岃緭鍑烘枃浠�: " + outputFilePath); + // 閲婃斁鏁版嵁闆嗚祫婧� + outputDataset.delete(); + } else { + System.err.println("瑁佸壀澶辫触"); + } + // 閿�姣� GDAL 椹卞姩绠$悊鍣� + gdal.GDALDestroyDriverManager(); + } + public static void main(String[] args) throws Exception{ + cropTiffByLatLon("D:\\鍩庡競鍐呮稘\\sem\\DEM.tif","D:\\鍩庡競鍐呮稘\\sem\\DEM100.tif",470066.5191659781,4417962.449727667,470922.1436516798,4418059.07638588); + } +} diff --git a/src/main/java/com/se/nsl/utils/TiffCoordinateExtractorUtil.java b/src/main/java/com/se/nsl/utils/TiffCoordinateExtractorUtil.java new file mode 100644 index 0000000..c23b176 --- /dev/null +++ b/src/main/java/com/se/nsl/utils/TiffCoordinateExtractorUtil.java @@ -0,0 +1,35 @@ +package com.se.nsl.utils; + +import com.alibaba.fastjson.JSONArray; +import org.gdal.gdal.Dataset; +import org.gdal.gdal.gdal; + +public class TiffCoordinateExtractorUtil { + public static void main(String[] args) { + getCoordinate("D:\\鍩庡競鍐呮稘\\sem\\DEM.tif"); + } + public static JSONArray getCoordinate(String tifPath){ + gdal.AllRegister(); + JSONArray array=new JSONArray(); + Dataset dataset = gdal.Open(tifPath); + if (dataset!= null) { + double[] geotransform = dataset.GetGeoTransform(); + if (geotransform!= null) { + double xmin = geotransform[0]; + double ymax = geotransform[3]; + double xmax = geotransform[0] + geotransform[1] * dataset.getRasterXSize(); + double ymin = geotransform[3] + geotransform[5] * dataset.getRasterYSize(); + System.out.println("宸︿笂瑙掔粡绾害: (" + xmin + ", " + ymax + ")"); + array.add(ProjectionToGeographicUtil.getPoint(xmin,ymax)); + System.out.println("鍙充笂瑙掔粡绾害: (" + xmax + ", " + ymax + ")"); + array.add(ProjectionToGeographicUtil.getPoint(xmax,ymax)); + System.out.println("宸︿笅瑙掔粡绾害: (" + xmin + ", " + ymin + ")"); + array.add(ProjectionToGeographicUtil.getPoint(xmin,ymin)); + System.out.println("鍙充笅瑙掔粡绾害: (" + xmax + ", " + ymin + ")"); + array.add(ProjectionToGeographicUtil.getPoint(xmax,ymin)); + } + } + System.out.println(array.toJSONString()); + return array; + } +} diff --git a/src/main/java/com/se/nsl/utils/TiffToRGBUtil.java b/src/main/java/com/se/nsl/utils/TiffToRGBUtil.java new file mode 100644 index 0000000..c81148f --- /dev/null +++ b/src/main/java/com/se/nsl/utils/TiffToRGBUtil.java @@ -0,0 +1,109 @@ +package com.se.nsl.utils; + +import org.apache.commons.imaging.*; +import org.gdal.gdal.Dataset; +import org.gdal.gdal.Band; +import org.gdal.gdal.gdal; +import org.gdal.gdalconst.gdalconstConstants; + +import javax.imageio.ImageIO; +import java.awt.*; +import java.awt.image.BufferedImage; +import java.io.File; +import java.io.IOException; + +public class TiffToRGBUtil { + public static void main(String[] args) throws Exception{ + tifToPng("D:\\鍩庡競鍐呮稘\\sem\\tongzhou_1m_tif\\tongzhou_raster_4548_1m_clip_river_fill.tif","D:\\鍩庡競鍐呮稘\\sem\\tongzhou_1m_tif\\tongzhou_raster_4548_1m_clip_river_fill.png"); + } + public static void tifToPng(String tifPath, String pngPath) throws Exception { + // 娉ㄥ唽鎵�鏈夌殑 GDAL 椹卞姩 + gdal.AllRegister(); + // 杈撳叆鐨� TIFF 鏂囦欢璺緞 + // 鎵撳紑杈撳叆鐨� TIFF 鏁版嵁闆� + Dataset dataset = gdal.Open(tifPath, gdalconstConstants.GA_ReadOnly); + if (dataset == null) { + System.err.println("鏃犳硶鎵撳紑杈撳叆鐨� TIFF 鏂囦欢"); + return; + } + // 鑾峰彇鍦扮悊鍙樻崲淇℃伅 + double[] geoTransform = dataset.GetGeoTransform(); + // 鑾峰彇绗竴涓尝娈� + Band band = dataset.GetRasterBand(1); + // 鑾峰彇鍥惧儚瀹藉害鍜岄珮搴� + int width = dataset.getRasterXSize(); + int height = dataset.getRasterYSize(); + // 璇诲彇鍥惧儚鏁版嵁鍒板瓧鑺傛暟缁� + byte[] imageData = new byte[width * height]; + band.ReadRaster(0, 0, width, height, imageData); + BufferedImage pngImage = new BufferedImage(width, height, BufferedImage.TYPE_INT_ARGB); + // 鍋囪楂樺害淇℃伅瀛樺偍鍦ㄧ伆搴﹀�间腑锛屾牴鎹湴鐞嗗彉鎹俊鎭拰鍍忕礌鍊艰绠楀疄闄呴珮搴� + for (int y = 0; y < height; y++) { + for (int x = 0; x < width; x++) { + int pixelValue = imageData[y * width + x] & 0xFF; + // 鍋囪楂樺害淇℃伅瀛樺偍鍦ㄥ儚绱犲�间腑 + double pixelHeight = pixelValue * geoTransform[5]; + //height = -10000 + ((R * 256 * 256 + G * 256 + B) * 0.1) + int value = (int) ((pixelHeight + 10000) * 10); + //value=(R * 256 * 256 + G * 256 + B); + int r = value / (256 * 256); + value = value % (256 * 256); + int g = value / 256; + int b = value % 256; + Color color = new Color(r, g, b); + int newRgb = color.getRGB(); + // 灏嗗鐞嗗悗鐨勫儚绱犻鑹插�艰缃埌鏂扮殑 PNG 鍥惧儚涓� + pngImage.setRGB(x, y, newRgb); + } + } + // 淇濆瓨涓� PNG 鍥惧儚 + ImageIO.write(pngImage, "png", new File(pngPath)); + // 閲婃斁璧勬簮 + dataset.delete(); + } + + public static void tiffToPng(String tifPath, String pngPath) throws Exception { + // 杈撳嚭鐨� PNG 鏂囦欢璺緞 + String outputPngFilePath = pngPath; + try { + // 璇诲彇 TIF 鍥惧儚 + File tiffFile = new File(tifPath); + BufferedImage tifImage = Imaging.getBufferedImage(tiffFile); + int width = tifImage.getWidth(); + int height = tifImage.getHeight(); + // 鍒涘缓涓�涓柊鐨� BufferedImage 瀵硅薄锛岀被鍨嬩负 TYPE_INT_ARGB + BufferedImage pngImage = new BufferedImage(width, height, BufferedImage.TYPE_INT_ARGB); + // 閬嶅巻 TIF 鍥惧儚鐨勬瘡涓儚绱� + for (int y = 0; y < height; y++) { + for (int x = 0; x < width; x++) { + // 鑾峰彇 TIF 鍥惧儚涓綋鍓嶅儚绱犵殑棰滆壊鍊� + int pixel = tifImage.getRGB(x, y); + // 鍦ㄨ繖閲屽彲浠ュ鍍忕礌鐨勯鑹插�艰繘琛屼竴浜涘鐞嗭紝渚嬪鏍规嵁楂樺害淇℃伅璋冩暣棰滆壊 + // 鍋囪鎴戜滑绠�鍗曞湴灏嗗儚绱犵殑绾㈣壊鍒嗛噺鏍规嵁楂樺害杩涜璋冩暣 + int red = (pixel >> 16) & 0xff; + int green = (pixel >> 8) & 0xff; + int blue = (pixel) & 0xff; + // 鍋囪楂樺害淇℃伅瀛樺偍鍦ㄧ豢鑹插垎閲忎腑锛屼綘鍙互鏍规嵁瀹為檯鎯呭喌璋冩暣 + //height = -10000 + ((R * 256 * 256 + G * 256 + B) * 0.1) + int heightValue = (int) (-10000 + ((red * 256 * 256 + green * 256 + blue) * 0.1)); + System.out.println(heightValue); + // 绠�鍗曞湴灏嗙孩鑹插垎閲忔牴鎹珮搴﹀�艰繘琛岃皟鏁达紝渚嬪锛岃秺楂樿秺绾� + // 纭繚绾㈣壊鍒嗛噺涓嶈秴杩� 255 + red = Math.min(red, 255); + green = Math.min(green, 255); + blue = Math.min(blue, 255); + // 閲嶆柊缁勫悎 ARGB 鍊� + Color color = new Color(red, green, blue); + System.out.printf("Pixel (%d, %d): R=%d, G=%d, B=%d%n", x, y, red, green, blue); + int newRgb = color.getRGB(); + // 灏嗗鐞嗗悗鐨勫儚绱犻鑹插�艰缃埌鏂扮殑 PNG 鍥惧儚涓� + pngImage.setRGB(x, y, newRgb); + } + } + // 淇濆瓨涓� PNG 鍥惧儚 + ImageIO.write(pngImage, "png", new File(outputPngFilePath)); + } catch (IOException e) { + e.printStackTrace(); + } + } +} diff --git a/src/main/java/com/se/nsl/utils/ZarrUtils.java b/src/main/java/com/se/nsl/utils/ZarrUtils.java new file mode 100644 index 0000000..ddba8d6 --- /dev/null +++ b/src/main/java/com/se/nsl/utils/ZarrUtils.java @@ -0,0 +1,90 @@ +package com.se.nsl.utils; + +import com.bc.zarr.ArrayParams; +import com.bc.zarr.DataType; +import com.bc.zarr.ZarrArray; +import com.bc.zarr.ZarrGroup; +import com.se.nsl.domain.vo.StationRainVo; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.text.SimpleDateFormat; +import java.util.List; + +public class ZarrUtils { + + public static void saveZarrRainfall(String path, List<StationRainVo> stationRainVos){ + try { + // 鑾峰彇 CSV 鍒楁暟 + int numColumns = 5; + // 璁$畻 CSV 琛屾暟 + int numRows = stationRainVos.size(); + // 鍒涘缓 Zarr 缁� + Path zarrPath = Paths.get(path); + ZarrGroup zarrGroup = ZarrGroup.create(zarrPath); + // 瀹氫箟 Zarr 鏁扮粍鐨勭淮搴� + int[] shape = {numRows, numColumns}; + // 瀹氫箟鍒嗗潡澶у皬 + int check=stationRainVos.size(); + int[] chunks = {check}; + int[] offset = {0}; + // 鍒涘缓 Zarr 鏁扮粍 + ArrayParams params = new ArrayParams(); + params.shape(chunks); + params.chunks(chunks); + params.dataType(DataType.f4); + ZarrArray zarrArray = zarrGroup.createArray("rainfall", params); + int rowIndex = 0; + double[] values=new double[stationRainVos.size()]; + for (StationRainVo record : stationRainVos) { +// double value = record.getRainfall(); +// zarrArray.write(value); + values[rowIndex]=record.getRainfall(); + rowIndex++; + } + zarrArray.write(values,chunks,offset); + // 鍏抽棴 Zarr 缁� + //zarrGroup.close(); + } catch (Exception e) { + e.printStackTrace(); + } + } + + public static void saveZarrTime(String path, List<StationRainVo> stationRainVos){ + try { + // 鑾峰彇 CSV 鍒楁暟 + int numColumns = 5; + // 璁$畻 CSV 琛屾暟 + int numRows = stationRainVos.size(); + // 鍒涘缓 Zarr 缁� + Path zarrPath = Paths.get(path); + ZarrGroup zarrGroup = ZarrGroup.create(zarrPath); + // 瀹氫箟 Zarr 鏁扮粍鐨勭淮搴� + int[] shape = {numRows, numColumns}; + // 瀹氫箟鍒嗗潡澶у皬 + int check=stationRainVos.size(); + int[] chunks = {check}; + int[] offset = {0}; + // 鍒涘缓 Zarr 鏁扮粍 + ArrayParams params = new ArrayParams(); + params.shape(chunks); + params.chunks(chunks); + params.dataType(DataType.i4); + ZarrArray zarrArray = zarrGroup.createArray("time", params); + int rowIndex = 0; + SimpleDateFormat sdf = new SimpleDateFormat("yyyy/MM/dd HH:mm"); + long[] values=new long[stationRainVos.size()]; + for (StationRainVo record : stationRainVos) { + values[rowIndex]=sdf.parse(record.getDatetime()).getTime()/1000; +// long value = sdf.parse(record.getDatetime()).getTime()/1000; +// zarrArray.write(value); + rowIndex++; + } + zarrArray.write(values,chunks,offset); + // 鍏抽棴 Zarr 缁� + //zarrGroup.close(); + } catch (Exception e) { + e.printStackTrace(); + } + } + +} diff --git a/src/main/java/com/se/nsl/utils/ZipUtils.java b/src/main/java/com/se/nsl/utils/ZipUtils.java new file mode 100644 index 0000000..f05e207 --- /dev/null +++ b/src/main/java/com/se/nsl/utils/ZipUtils.java @@ -0,0 +1,171 @@ +package com.se.nsl.utils; + +import cn.hutool.core.util.CharsetUtil; +import cn.hutool.core.util.ZipUtil; + +import java.io.*; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.ArrayList; +import java.util.Enumeration; +import java.util.List; +import java.util.zip.ZipEntry; +import java.util.zip.ZipFile; +import java.util.zip.ZipOutputStream; + +/** + * zip + */ +public class ZipUtils { + // 瑙e帇zip鏍煎紡 + public static void unzip(String zipFilePath, String destDir) { + //鎵�鏈夌殑鏂囦欢璺緞 + List<String> filePaths = new ArrayList<>(); + + File destDirectory = new File(destDir); + if (!destDirectory.exists()) { + if (!destDirectory.mkdirs()) { + } + } + try (ZipFile zipFile = new ZipFile(zipFilePath)) { + Enumeration<? extends ZipEntry> entries = zipFile.entries(); + + while (entries.hasMoreElements()) { + ZipEntry entry = entries.nextElement(); + String filePath = destDir + File.separator + entry.getName(); + // 纭繚鐩爣鏂囦欢璺緞鐨勭埗鐩綍瀛樺湪 + File entryDestination = new File(filePath); + File parentDirectory = entryDestination.getParentFile(); + if (parentDirectory != null && !parentDirectory.exists()) { + parentDirectory.mkdirs(); + } + if (!entry.isDirectory()) { + // 鎻愬彇鏂囦欢 + try (InputStream is = zipFile.getInputStream(entry); + BufferedOutputStream bos = new BufferedOutputStream(new FileOutputStream(filePath))) { + byte[] bytesIn = new byte[4096]; + int read; + while ((read = is.read(bytesIn)) != -1) { + bos.write(bytesIn, 0, read); + } + } + // 鎵撳嵃鏂囦欢璺緞 + filePaths.add(filePath); + } else { + // 濡傛灉鏄洰褰曪紝鍒欏垱寤虹洰褰曪紙娉ㄦ剰锛氬湪涓婇潰鐨刴kdirs璋冪敤涓凡缁忓鐞嗕簡锛� + // 杩欓噷涔熷彲浠ラ�夋嫨鎵撳嵃鐩綍璺緞 + } + } + } catch (Exception e) { + e.printStackTrace(); + System.out.println("GBK缂栫爜"); + try { + File file = ZipUtil.unzip(zipFilePath, destDir, CharsetUtil.CHARSET_GBK); + File[] file1 = file.listFiles(); + for (int i = 0; i < file1.length; i++) { + filePaths.add(file1[i].getAbsolutePath()); + } + } catch (Exception ex) { + e.printStackTrace(); + } + } + } + + public static void zip(String sourceDirPath, String zipFilePath) throws IOException { + Path sourceDir = Paths.get(sourceDirPath); + try (ZipOutputStream zipOutputStream = new ZipOutputStream(new FileOutputStream(zipFilePath))) { + Files.walk(sourceDir).forEach(path -> { + try { + String zipEntryName = sourceDir.relativize(path).toString(); + if (Files.isDirectory(path)) { + // 濡傛灉鏄洰褰曪紝鍒涘缓涓�涓┖鐨刏IP鐩綍 + if (!zipEntryName.isEmpty()) { + zipOutputStream.putNextEntry(new ZipEntry(zipEntryName + "/")); + } + } else { + // 濡傛灉鏄枃浠讹紝鍐欏叆ZIP鏂囦欢 + zipOutputStream.putNextEntry(new ZipEntry(zipEntryName)); + Files.copy(path, zipOutputStream); + } + zipOutputStream.closeEntry(); + } catch (IOException e) { + e.printStackTrace(); + } + }); + } + } + + public static void toZarr(String sourceDir, String zipFile) { + try { + zip(sourceDir, zipFile); + System.out.println("鏂囦欢澶瑰凡鎴愬姛鍘嬬缉涓�: " + zipFile); + updateFileSuffix(zipFile, "zarr"); + deleteFile(sourceDir); + } catch (IOException e) { + e.printStackTrace(); + } + } + + public static void updateFileSuffix(String filePath, String newSuffix) { + // 鍒涘缓File瀵硅薄 + File file = new File(filePath); + + // 妫�鏌ユ枃浠舵槸鍚﹀瓨鍦� + if (file.exists()) { + // 鑾峰彇鏂囦欢鍚嶅拰鍚庣紑 + String fileName = file.getName(); + int dotIndex = fileName.lastIndexOf('.'); + if (dotIndex > 0) { + // 鍒嗙鏂囦欢鍚嶅拰鍚庣紑 + String name = fileName.substring(0, dotIndex); + // 缁勫悎鏂扮殑鏂囦欢鍚� + String newFileName = name + '.' + newSuffix; + + // 閲嶅懡鍚嶆枃浠� + File newFile = new File(file.getParent(), newFileName); + if (file.renameTo(newFile)) { + System.out.println("鏂囦欢閲嶅懡鍚嶆垚鍔燂細" + newFile.getName()); + } else { + System.out.println("鏂囦欢閲嶅懡鍚嶅け璐ャ��"); + } + } else { + System.out.println("鏂囦欢娌℃湁鍚庣紑銆�"); + } + } else { + System.out.println("鏂囦欢涓嶅瓨鍦ㄣ��"); + } + } + + public static void deleteFile(String path) { + // 鐩爣鏂囦欢澶圭殑璺緞 + File directory = new File(path); + // 妫�鏌ユ枃浠跺す鏄惁瀛樺湪 + if (!directory.exists()) { + System.out.println("鏂囦欢澶逛笉瀛樺湪锛�"); + return; // 鏂囦欢澶逛笉瀛樺湪锛岄��鍑虹▼搴� + } + // 璋冪敤deleteDirectory鏂规硶鍒犻櫎鏂囦欢澶瑰強鍏跺唴瀹� + if (deleteDirectory(directory)) { + System.out.println("鏂囦欢澶瑰凡鎴愬姛鍒犻櫎锛�"); + } else { + System.out.println("鏂囦欢澶瑰垹闄ゅけ璐ワ紒"); + } + } + + // 閫掑綊鍒犻櫎鏂囦欢澶瑰強鍏跺唴瀹圭殑鏂规硶 + public static boolean deleteDirectory(File directory) { + if (directory.isDirectory()) { + File[] files = directory.listFiles(); + if (files != null) { + for (File file : files) { + // 閫掑綊澶勭悊 + deleteDirectory(file); + } + } + } + // 鍒犻櫎褰撳墠鏂囦欢鎴栨枃浠跺す + return directory.delete(); + } + +} diff --git a/src/main/resources/application-prod.yml b/src/main/resources/application-prod.yml new file mode 100644 index 0000000..a21b34c --- /dev/null +++ b/src/main/resources/application-prod.yml @@ -0,0 +1,149 @@ +server: + port: 8079 + servlet: + context-path: /api + +spring: + mvc: + pathmatch: + matching-strategy: ant_path_matcher + application: + name: SimuServer + datasource: + name: prod + # JDBC 鍩烘湰閰嶇疆 ¤tSchema=public + url: jdbc:postgresql://127.0.0.1:5432/ai?useAffectedRows=true + username: postgres + password: postgres + driver-class-name: org.postgresql.Driver + platform: POSTGRESQL + type: com.alibaba.druid.pool.DruidDataSource + # 閰嶇疆鐩戞帶缁熻鎷︽埅鐨刦ilters锛宻tat:鐩戞帶缁熻銆乴og4j锛氭棩蹇楄褰曘�亀all锛氶槻寰ql娉ㄥ叆 + filters: stat,wall,log4j + # 閫氳繃connectProperties灞炴�ф潵鎵撳紑mergeSql鍔熻兘锛涙參SQL璁板綍 + connectionProperties: druid.stat.mergeSql=true;druid.stat.slowSqlMillis=5000 + # 瓒呰繃鏃堕棿闄愬埗鏄惁鍥炴敹 + removeAbandoned: true + # 瓒呮椂鏃堕棿锛涘崟浣嶄负绉掋��180绉�=3鍒嗛挓 + removeAbandonedTimeout: 180 + # 鍏抽棴 abanded 杩炴帴鏃惰緭鍑洪敊璇棩蹇� + logAbandoned: true + # 閰嶇疆杩炴帴姹犱俊鎭� + druid: + ## 鍒濆鍖栧ぇ灏忥紝鏈�灏忥紝鏈�澶� + initial-size: 5 + min-idle: 5 + max-active: 100 + ## 閰嶇疆鑾峰彇杩炴帴绛夊緟瓒呮椂鐨勬椂闂达紝ms + max-wait: 60000 + # 閰嶇疆闂撮殧澶氫箙鎵嶈繘琛屼竴娆℃娴嬶紝妫�娴嬮渶瑕佸叧闂殑绌洪棽杩炴帴锛屽崟浣嶆槸姣 + time-between-eviction-runs-millis: 60000 + # 閰嶇疆涓�涓繛鎺ュ湪姹犱腑鏈�灏忕敓瀛樼殑鏃堕棿锛屽崟浣嶆槸姣 + min-evictable-idle-time-millis: 300000 + max-pool-prepared-statement-per-connection-size: 50 + pool-prepared-statements: true + # 鐢ㄦ潵娴嬭瘯杩炴帴鏄惁鍙敤鐨凷QL璇彞 + validation-query: SELECT 1 + # 搴旂敤鍚戣繛鎺ユ睜鐢宠杩炴帴锛屽苟涓攖estOnBorrow涓篺alse鏃讹紝杩炴帴姹犲皢浼氬垽鏂繛鎺ユ槸鍚﹀浜庣┖闂茬姸鎬侊紝濡傛灉鏄紝鍒欓獙璇佽繖鏉¤繛鎺ユ槸鍚﹀彲鐢� + test-while-idle: true + # 濡傛灉涓簍rue锛岄粯璁ゆ槸false锛屽簲鐢ㄥ悜杩炴帴姹犵敵璇疯繛鎺ユ椂锛岃繛鎺ユ睜浼氬垽鏂繖鏉¤繛鎺ユ槸鍚︽槸鍙敤鐨� + test-on-borrow: false + # 濡傛灉涓簍rue锛堥粯璁alse锛夛紝褰撳簲鐢ㄤ娇鐢ㄥ畬杩炴帴锛岃繛鎺ユ睜鍥炴敹杩炴帴鐨勬椂鍊欎細鍒ゆ柇璇ヨ繛鎺ユ槸鍚﹁繕鍙敤 + test-on-return: false + # servlet閰嶇疆 + stat-view-servlet: + # sql鐩戞帶寮�鍏� + enabled: true + # 璁块棶鍐呯疆鐩戞帶椤甸潰鐨勮矾寰勶紝鍐呯疆鐩戞帶椤甸潰鐨勯椤垫槸/druid/index.html + url-pattern: /druid/* + # 鏄惁鍏佽娓呯┖缁熻鏁版嵁锛岄噸鏂拌绠� true:鍏佽 false:涓嶅厑璁� + reset-enable: false + # 閰嶇疆鐩戞帶椤甸潰璁块棶璐﹀彿瀵嗙爜 + login-username: admin + login-password: ad_!Druid!_min + # 鍏佽璁块棶鐨勫湴鍧�锛屽鏋渁llow娌℃湁閰嶇疆鎴栬�呬负绌猴紝鍒欏厑璁告墍鏈夎闂� + allow: + # 鎷掔粷璁块棶鐨勫湴鍧�锛宒eny浼樺厛浜巃llow + deny: + +mybatis-plus: + config-location: classpath:mybatis.xml + mapper-locations: classpath:mapper/**/*.xml + +knife4j: + # 鏄惁寮�鍚� + enabled: true + # 璇锋眰鍓嶇紑 + pathMapping: + # 鏄惁寮�鍚寮烘ā寮� + enable: true + +pagehelper: + autoDialect: true + autoRuntimeDialect: true + reasonable: true + supportMethodsArguments: true + params: count=countSql + +remote: + maxTotalConnect: 0 + maxConnectPerRoute: 1000 + connectTimeout: -1 + readTimeout: -1 + +# 椤圭洰瀹炰綋搴撴湇鍔′腑閰嶇疆 +app-server: + # 鍏挜鍦板潃 + publicKeyUrl: http://106.120.22.26:8024/account-service/security/publickey + # 鐧诲綍鍦板潃 + loginUrl: http://106.120.22.26:8024/account-service/security/login + # 鏌ヨ鏁版嵁搴撳湴鍧� + getDbUrl: http://106.120.22.26:8024/geo-service/entitydb/list/canview + # 鏌ヨ涓嶅悓鏁版嵁搴撶被鍨嬩笅鐨勬暟鎹湴鍧� + queryUrl: http://106.120.22.26:8024/geo-service/entitydbdata/layer/query + + + + +config: + ver: 0.2 + cacheTime: 60 + # Gdal椹卞姩鐩綍 + gdalPath: H:/simu/release-1928-x64-dev/release-1928-x64/bin + #inPath: D:\simu\in + inPath: H:\simu\uwsolver + outPath: H:\simu\out + host: http://106.120.22.26:8024/ + user: admin + pwd: admin + dbName: 閫氬窞姘村埄 + pageSize: 2000 + layerNames: 绠$偣,绠$嚎,寤虹瓚鐗� + shpNames: + - pipeline-point.shp + - pipeline-conduit.shp + - buildings_inside.shp + junctionName: pipeline-point-junctions.shp + junctionFilter: ysjjd=1 + demName: DEM + demType: .tif + demFile: DEM.tif + zoneName: studyzone.shp + barrierName: barrier.shp + sysFields: _ext_attr,_meta_id,_attach_files,_x,_y,_z,_is_latest,_data_type,operatetime,operator,shape_length,shape_area + raingage: RainGage.dat + rainStation: Tongzhou + rainPeriod: 10 + # 娴侀噺鍗曚綅锛孡PS锛堝崌/绉掞級銆丆MS锛堢珛鏂圭背/绉�)銆丆FS(绔嬫柟鑻卞昂/绉�) + flowUnits: CMS + solverBat: H:\simu\uwsolver\run_solver.bat + sww2tifBat: H:\simu\uwsolver\sww2tif.bat + sizes: 64,128,256,512,1024,2048 + # 杈撳嚭鏂囦欢 + terrainFile: DEM.tif + buildingFile: buildings_inside.shp + # buildingKey: seid + buildingKey: KJSFBM + waterPath: depth + flowPath: velocity + copyTif: false diff --git a/src/main/resources/application-zyy.yml b/src/main/resources/application-zyy.yml new file mode 100644 index 0000000..0c609a4 --- /dev/null +++ b/src/main/resources/application-zyy.yml @@ -0,0 +1,156 @@ +server: + port: 8079 + servlet: + context-path: / + +spring: + mvc: + pathmatch: + matching-strategy: ant_path_matcher + application: + name: SimuServer + datasource: + name: prod + # JDBC 鍩烘湰閰嶇疆 ¤tSchema=public + url: jdbc:postgresql://127.0.0.1:5432/ai?useAffectedRows=true + username: postgres + password: postgres + driver-class-name: org.postgresql.Driver + platform: POSTGRESQL + type: com.alibaba.druid.pool.DruidDataSource + # 閰嶇疆鐩戞帶缁熻鎷︽埅鐨刦ilters锛宻tat:鐩戞帶缁熻銆乴og4j锛氭棩蹇楄褰曘�亀all锛氶槻寰ql娉ㄥ叆 + filters: stat,wall,log4j + # 閫氳繃connectProperties灞炴�ф潵鎵撳紑mergeSql鍔熻兘锛涙參SQL璁板綍 + connectionProperties: druid.stat.mergeSql=true;druid.stat.slowSqlMillis=5000 + # 瓒呰繃鏃堕棿闄愬埗鏄惁鍥炴敹 + removeAbandoned: true + # 瓒呮椂鏃堕棿锛涘崟浣嶄负绉掋��180绉�=3鍒嗛挓 + removeAbandonedTimeout: 180 + # 鍏抽棴 abanded 杩炴帴鏃惰緭鍑洪敊璇棩蹇� + logAbandoned: true + # 閰嶇疆杩炴帴姹犱俊鎭� + druid: + ## 鍒濆鍖栧ぇ灏忥紝鏈�灏忥紝鏈�澶� + initial-size: 5 + min-idle: 5 + max-active: 100 + ## 閰嶇疆鑾峰彇杩炴帴绛夊緟瓒呮椂鐨勬椂闂达紝ms + max-wait: 60000 + # 閰嶇疆闂撮殧澶氫箙鎵嶈繘琛屼竴娆℃娴嬶紝妫�娴嬮渶瑕佸叧闂殑绌洪棽杩炴帴锛屽崟浣嶆槸姣 + time-between-eviction-runs-millis: 60000 + # 閰嶇疆涓�涓繛鎺ュ湪姹犱腑鏈�灏忕敓瀛樼殑鏃堕棿锛屽崟浣嶆槸姣 + min-evictable-idle-time-millis: 300000 + max-pool-prepared-statement-per-connection-size: 50 + pool-prepared-statements: true + # 鐢ㄦ潵娴嬭瘯杩炴帴鏄惁鍙敤鐨凷QL璇彞 + validation-query: SELECT 1 + # 搴旂敤鍚戣繛鎺ユ睜鐢宠杩炴帴锛屽苟涓攖estOnBorrow涓篺alse鏃讹紝杩炴帴姹犲皢浼氬垽鏂繛鎺ユ槸鍚﹀浜庣┖闂茬姸鎬侊紝濡傛灉鏄紝鍒欓獙璇佽繖鏉¤繛鎺ユ槸鍚﹀彲鐢� + test-while-idle: true + # 濡傛灉涓簍rue锛岄粯璁ゆ槸false锛屽簲鐢ㄥ悜杩炴帴姹犵敵璇疯繛鎺ユ椂锛岃繛鎺ユ睜浼氬垽鏂繖鏉¤繛鎺ユ槸鍚︽槸鍙敤鐨� + test-on-borrow: false + # 濡傛灉涓簍rue锛堥粯璁alse锛夛紝褰撳簲鐢ㄤ娇鐢ㄥ畬杩炴帴锛岃繛鎺ユ睜鍥炴敹杩炴帴鐨勬椂鍊欎細鍒ゆ柇璇ヨ繛鎺ユ槸鍚﹁繕鍙敤 + test-on-return: false + # servlet閰嶇疆 + stat-view-servlet: + # sql鐩戞帶寮�鍏� + enabled: true + # 璁块棶鍐呯疆鐩戞帶椤甸潰鐨勮矾寰勶紝鍐呯疆鐩戞帶椤甸潰鐨勯椤垫槸/druid/index.html + url-pattern: /druid/* + # 鏄惁鍏佽娓呯┖缁熻鏁版嵁锛岄噸鏂拌绠� true:鍏佽 false:涓嶅厑璁� + reset-enable: false + # 閰嶇疆鐩戞帶椤甸潰璁块棶璐﹀彿瀵嗙爜 + login-username: admin + login-password: ad_!Druid!_min + # 鍏佽璁块棶鐨勫湴鍧�锛屽鏋渁llow娌℃湁閰嶇疆鎴栬�呬负绌猴紝鍒欏厑璁告墍鏈夎闂� + allow: + # 鎷掔粷璁块棶鐨勫湴鍧�锛宒eny浼樺厛浜巃llow + deny: + +mybatis-plus: + config-location: classpath:mybatis.xml + mapper-locations: classpath:mapper/**/*.xml + +knife4j: + # 鏄惁寮�鍚� + enabled: true + # 璇锋眰鍓嶇紑 + pathMapping: + # 鏄惁寮�鍚寮烘ā寮� + enable: true + +pagehelper: + autoDialect: true + autoRuntimeDialect: true + reasonable: true + supportMethodsArguments: true + params: count=countSql + +remote: + maxTotalConnect: 0 + maxConnectPerRoute: 1000 + connectTimeout: -1 + readTimeout: -1 + +# 椤圭洰瀹炰綋搴撴湇鍔′腑閰嶇疆 +app-server: + # 鍏挜鍦板潃 + publicKeyUrl: http://106.120.22.26:8024/account-service/security/publickey + # 鐧诲綍鍦板潃 + loginUrl: http://106.120.22.26:8024/account-service/security/login + # 鏌ヨ鏁版嵁搴撳湴鍧� + getDbUrl: http://106.120.22.26:8024/geo-service/entitydb/list/canview + # 鏌ヨ涓嶅悓鏁版嵁搴撶被鍨嬩笅鐨勬暟鎹湴鍧� + queryUrl: http://106.120.22.26:8024/geo-service/entitydbdata/layer/query + +# 妯℃嫙椤圭洰 +simu-app: + # 椤圭洰妯℃嫙鏂囦欢涓婁紶瀛樻斁鐨勪綅缃� + filePath: D:\simu\files + +config: + ver: 0.2 + cacheTime: 60 + # Gdal椹卞姩鐩綍 + gdalPath: D:\gdal\release-1928-x64-dev\release-1928-x64\bin + #inPath: D:\simu\in + inPath: D:\uwsolver + outPath: D:\out + # + host: http://106.120.22.26:8024/ + user: admin + pwd: admin + # host: http://106.120.22.26:8013/ + # user: WUWEIWEI + # pwd: WUWEIWEI + dbName: 閫氬窞姘村埄 + pageSize: 2000 + layerNames: 绠$偣,绠$嚎,寤虹瓚鐗� + shpNames: + - pipeline-point.shp + - pipeline-conduit.shp + - buildings_inside.shp + junctionName: pipeline-point-junctions.shp + junctionFilter: ysjjd=1 + demName: DEM + demType: .tif + demFile: DEM.tif + zoneName: studyzone.shp + barrierName: barrier.shp + sysFields: _ext_attr,_meta_id,_attach_files,_x,_y,_z,_is_latest,_data_type,operatetime,operator,shape_length,shape_area + raingage: RainGage.dat + rainStation: Tongzhou + rainPeriod: 10 + # 娴侀噺鍗曚綅锛孡PS锛堝崌/绉掞級銆丆MS锛堢珛鏂圭背/绉�)銆丆FS(绔嬫柟鑻卞昂/绉�) + flowUnits: CMS + solverBat: D:\uwsolver\run_solver.bat + sww2tifBat: D:\uwsolver\sww2tif.bat + sizes: 64,128,256,512,1024,2048 + # 杈撳嚭鏂囦欢 + terrainFile: DEM.tif + buildingFile: buildings_inside.shp + # buildingKey: seid + buildingKey: KJSFBM + waterPath: depth + flowPath: velocity + copyTif: false + tifPath: D:\鍩庡競鍐呮稘\sem\tongzhou_1m_tif\tongzhou_raster_4548_1m_clip_river_fill.tif diff --git a/src/main/resources/application.yml b/src/main/resources/application.yml new file mode 100644 index 0000000..85767f4 --- /dev/null +++ b/src/main/resources/application.yml @@ -0,0 +1,12 @@ +spring: + profiles: + active: prod + servlet: + multipart: + # 璁剧疆鍗曚釜鏂囦欢鐨勬渶澶т笂浼犲ぇ灏� + max-file-size: 50MB + # 璁剧疆璇锋眰鐨勬渶澶т笂浼犲ぇ灏忥紙濡傛灉鍖呭惈澶氫釜鏂囦欢锛� + max-request-size: 100MB +# autoconfigure: +# exclude: +# - org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration \ No newline at end of file diff --git a/src/main/resources/config.json b/src/main/resources/config.json new file mode 100644 index 0000000..a0bfba1 --- /dev/null +++ b/src/main/resources/config.json @@ -0,0 +1,15 @@ +{ + "source_crs": "EPSG:4326", + "semfile_node": "node.sem", + "semfile_link": "link.sem", + "semfile_grid": "grid.sem", + "semfile_river": "river.sem", + "semfile_terrain": "terrain.sem", + "semfile_raingage": "raingage.sem", + "semfile_landuse": "landuse.sem", + "start_datetime": "07/31/2023 00:00:00", + "end_datetime": "07/31/2023 08:00:00", + "report_step": "00:01:00", + "mesh_size": 10.0, + "pixel_size":3.0 +} \ No newline at end of file diff --git a/src/main/resources/grid.json b/src/main/resources/grid.json new file mode 100644 index 0000000..f3ddb4c --- /dev/null +++ b/src/main/resources/grid.json @@ -0,0 +1 @@ +{"type":"CityJSON","version":"1.0","metadata":{"datasetPointOfContact":{}},"Dynamizers":[{"gmlId":"UUID_f5a04e3c-ba56-44d0-922d-de80f208267a"}],"CityObjects":{"UUID_f5a04e3c-ba56-44d0-922d-de80f208267a":{"type":"+Grid","geometry":[{"type":"MultiSurface","boundaries":[[[0,1,2,3]]],"lod":0}]}},"vertices":[],"appearance":{"materials":[],"textures":[],"vertices-texture":[]},"geometry-templates":{"templates":[],"vertices-templates":[]}} \ No newline at end of file diff --git a/src/main/resources/landuse.json b/src/main/resources/landuse.json new file mode 100644 index 0000000..02ffbef --- /dev/null +++ b/src/main/resources/landuse.json @@ -0,0 +1 @@ +{"type":"CityJSON","metadata":{"referenceSystem":"urn:ogc:def:crs:EPSG::4326","datasetPointOfContact":{}},"version":"1.0","CityObjects":{},"vertices":[]} \ No newline at end of file diff --git a/src/main/resources/layerQueryDetailParams.json b/src/main/resources/layerQueryDetailParams.json new file mode 100644 index 0000000..0c76911 --- /dev/null +++ b/src/main/resources/layerQueryDetailParams.json @@ -0,0 +1 @@ +{"dbid":"85257774fdb64e5f99f6778696cad02a","querytype":"polyline","count":100000,"layerid":"1e677d48-8dff-4975-b9a0-c16500193629","where":"","containCount":false,"token":"54bc48d424ee49c6b07c1c6f91a583ea"} \ No newline at end of file diff --git a/src/main/resources/layerQueryParams.json b/src/main/resources/layerQueryParams.json new file mode 100644 index 0000000..295c637 --- /dev/null +++ b/src/main/resources/layerQueryParams.json @@ -0,0 +1 @@ +{"dbid":"85257774fdb64e5f99f6778696cad02a","layerid":"1e677d48-8dff-4975-b9a0-c16500193629","token":"54bc48d424ee49c6b07c1c6f91a583ea","start":1,"count":100000,"like":null,"outfields":"seid,bsm","containCount":true,"geometry":{"type":"Polygon","coordinates":[]}} \ No newline at end of file diff --git a/src/main/resources/layerQueryPointParams.json b/src/main/resources/layerQueryPointParams.json new file mode 100644 index 0000000..b621423 --- /dev/null +++ b/src/main/resources/layerQueryPointParams.json @@ -0,0 +1 @@ +{"dbid":"85257774fdb64e5f99f6778696cad02a","layerid":"ede07887-7b58-47b4-ae96-b8a6a30e9c11","token":"54bc48d424ee49c6b07c1c6f91a583ea","start":1,"count":100000,"like":null,"outfields":"seid,enti_uuid","containCount":true,"geometry":{"type":"Polygon","coordinates":[]}} \ No newline at end of file diff --git a/src/main/resources/linemodule.json b/src/main/resources/linemodule.json new file mode 100644 index 0000000..9780e6e --- /dev/null +++ b/src/main/resources/linemodule.json @@ -0,0 +1 @@ +{"type":"CityJSON","version":"1.0","CityObjects":{},"vertices":[]} \ No newline at end of file diff --git a/src/main/resources/logback-spring.xml b/src/main/resources/logback-spring.xml new file mode 100644 index 0000000..b2f26c9 --- /dev/null +++ b/src/main/resources/logback-spring.xml @@ -0,0 +1,106 @@ +<?xml version="1.0" encoding="UTF-8"?> +<!-- scan 閰嶇疆鏂囦欢濡傛灉鍙戠敓鏀瑰彉锛屽皢浼氳閲嶆柊鍔犺浇 scanPeriod 妫�娴嬮棿闅旀椂闂�--> +<configuration scan="true" scanPeriod="60 seconds" debug="false"> + <contextName>SimuServer</contextName> + + <!-- 鏂囦欢鍚嶇О --> + <property name="log.name.info" value="info" /> + <property name="log.name.error" value="error" /> + + <!-- info 鍦板潃 --> + <property name="log.path.info" value="logs/" /> + <property name="log.file.info" value="logs/info.log" /> + + <!-- error,閿欒璺緞 --> + <property name="log.path.error" value="logs/" /> + <property name="log.file.error" value="logs/error.log" /> + <include resource="org/springframework/boot/logging/logback/base.xml"/> + + <!-- 鏅�氭棩蹇� --> + <appender name="INFO_FILE" class="ch.qos.logback.core.rolling.RollingFileAppender"> + <file>${log.file.info}</file> + <!-- 寰幆鏀跨瓥锛氬熀浜庢椂闂村垱寤烘棩蹇楁枃浠� --> + <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy"> + <!-- 鏃ュ織鍛藉悕:鍗曚釜鏂囦欢澶т簬256MB 鎸夌収鏃堕棿+鑷i 鐢熸垚log鏂囦欢 --> + <fileNamePattern>${log.path.info}${log.name.info}-%d{yyyy-MM-dd}.%i.log</fileNamePattern> + <timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP"> + <maxFileSize>256MB</maxFileSize> + </timeBasedFileNamingAndTriggeringPolicy> + <!-- 鏈�澶т繚瀛樻椂闂达細30澶�--> + <maxHistory>30</maxHistory> + </rollingPolicy> + <append>true</append> + <encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder"> + <charset>utf-8</charset> + <pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} %-5level %logger Line:%-3L - %msg%n</pattern> + </encoder> + <!-- 鏃ュ織绾у埆杩囨护鍣� --> + <filter class="ch.qos.logback.classic.filter.LevelFilter"> + <!-- 杩囨护鐨勭骇鍒� --> + <level>INFO</level> + <!-- 鍖归厤鏃剁殑鎿嶄綔锛氭帴鏀讹紙璁板綍锛� --> + <onMatch>ACCEPT</onMatch> + <!-- 涓嶅尮閰嶆椂鐨勬搷浣滐細鎷掔粷锛堜笉璁板綍锛� --> + <onMismatch>DENY</onMismatch> + </filter> + </appender> + + <!-- 閿欒鏃ュ織 --> + <appender name="ERROR_FILE" class="ch.qos.logback.core.rolling.RollingFileAppender"> + <file>${log.file.error}</file> + <!-- 寰幆鏀跨瓥锛氬熀浜庢椂闂村垱寤烘棩蹇楁枃浠� --> + <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy"> + <!-- 鏃ュ織鍛藉悕:鍗曚釜鏂囦欢澶т簬128MB 鎸夌収鏃堕棿+鑷i 鐢熸垚log鏂囦欢 --> + <fileNamePattern>${log.path.error}${log.name.error}-%d{yyyy-MM-dd}.%i.log</fileNamePattern> + <timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP"> + <maxFileSize>128MB</maxFileSize> + </timeBasedFileNamingAndTriggeringPolicy> + <!-- 鏈�澶т繚瀛樻椂闂达細180澶�--> + <maxHistory>180</maxHistory> + </rollingPolicy> + <append>true</append> + <!-- 鏃ュ織鏍煎紡 --> + <encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder"> + <pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} %-5level %logger Line:%-3L - %msg%n</pattern> + <charset>utf-8</charset> + </encoder> + <!-- 鏃ュ織绾у埆杩囨护鍣� --> + <filter class="ch.qos.logback.classic.filter.LevelFilter"> + <!-- 杩囨护鐨勭骇鍒� --> + <level>ERROR</level> + <!-- 鍖归厤鏃剁殑鎿嶄綔锛氭帴鏀讹紙璁板綍锛� --> + <onMatch>ACCEPT</onMatch> + <!-- 涓嶅尮閰嶆椂鐨勬搷浣滐細鎷掔粷锛堜笉璁板綍锛� --> + <onMismatch>DENY</onMismatch> + </filter> + </appender> + + <!-- 鎺у埗鍙� --> + <appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender"> + <!-- 鏃ュ織鏍煎紡 --> + <encoder> + <charset>utf-8</charset> + <pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} %-5level %logger Line:%-3L - %msg%n</pattern> + </encoder> + <!--姝ゆ棩蹇梐ppender鏄负寮�鍙戜娇鐢紝鍙厤缃渶搴曠骇鍒紝鎺у埗鍙拌緭鍑虹殑鏃ュ織绾у埆鏄ぇ浜庢垨绛変簬姝ょ骇鍒殑鏃ュ織淇℃伅--> + <filter class="ch.qos.logback.classic.filter.ThresholdFilter"> + <!-- 鍙湁杩欎釜鏃ュ織鏉冮檺鎵嶈兘鐪嬶紝sql璇彞 --> + <level>DEBUG</level> + </filter> + </appender> + + <!-- 杈撳嚭sql鏃ュ織:ERROR,INFO,DEBUG --> + <logger name="com.apache.ibatis" level="INFO"/> + + <!-- additivity:鏄惁鍦ㄧ埗(杩欓噷涓簉oot鑺傜偣)杈撳嚭, 榛樿 true; --> + <logger name="com.se.nsl" level="INFO" additivity="true"> + <appender-ref ref="INFO_FILE"/> + <appender-ref ref="ERROR_FILE"/> + </logger> + + <!--20241209 瑙e喅鏃ュ織閲嶅鎵撳嵃闂 --> + <!--<root level="INFO"> + <appender-ref ref="STDOUT" /> + </root>--> + +</configuration> diff --git a/src/main/resources/mapper/SimuMapper.xml b/src/main/resources/mapper/SimuMapper.xml new file mode 100644 index 0000000..a4029ad --- /dev/null +++ b/src/main/resources/mapper/SimuMapper.xml @@ -0,0 +1,7 @@ +<?xml version="1.0" encoding="UTF-8" ?> +<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd"> +<mapper namespace="com.se.nsl.mapper.SimuMapper"> + <select id="selectMaxId" resultType="java.lang.Integer"> + select coalesce(max(id), 0) from bs.simu; + </select> +</mapper> \ No newline at end of file diff --git a/src/main/resources/mybatis.xml b/src/main/resources/mybatis.xml new file mode 100644 index 0000000..7efed65 --- /dev/null +++ b/src/main/resources/mybatis.xml @@ -0,0 +1,37 @@ +<?xml version="1.0" encoding="UTF-8" ?> +<!DOCTYPE configuration + PUBLIC "-//mybatis.org//DTD Config 3.0//EN" + "http://mybatis.org/dtd/mybatis-3-config.dtd"> +<configuration> + <settings> + <!--閰嶇疆褰卞搷鎵�鏈夋槧灏勫櫒涓厤缃紦瀛�--> + <setting name="cacheEnabled" value="true"/> + <!--寤惰繜鍔犺浇--> + <setting name="lazyLoadingEnabled" value="true"/> + <!--鏄惁鍏佽鍗曚竴璇彞杩斿洖澶氱粨鏋滈泦--> + <setting name="multipleResultSetsEnabled" value="true"/> + <!--浣跨敤鍒楁爣绛句唬鏇垮垪鍚�--> + <setting name="useColumnLabel" value="true"/> + <!--鍏佽JDBC 鏀寔鑷姩鐢熸垚涓婚敭--> + <setting name="useGeneratedKeys" value="false"/> + <!--璁剧疆SQL瓒呮椂鏃堕棿锛岀--> + <setting name="defaultStatementTimeout" value="360"/> + <!--榛樿杩斿洖鐨勬潯鏁伴檺鍒�--> + <setting name="defaultFetchSize" value="1000"/> + <!--鏄惁寮�鍚嚜鍔ㄩ┘宄板懡鍚嶈鍒欐槧灏勶紝鍗充粠缁忓吀鏁版嵁搴撳垪鍚� A_COLUMN 鍒扮粡鍏� Java 灞炴�у悕 aColumn 鐨勭被浼兼槧灏�--> + <setting name="mapUnderscoreToCamelCase" value="true"/> + <!--MyBatis 鍒╃敤鏈湴缂撳瓨鏈哄埗锛圠ocal Cache锛夐槻姝㈠惊鐜紩鐢紙circular references锛夊拰鍔犻�熻仈澶嶅祵濂楁熁璇€��--> + <setting name="localCacheScope" value="SESSION"/> + <!--褰撴病鏈変负鍙傛暟鎻愪緵鐗瑰畾鐨� JDBC 绫诲瀷鏃讹紝涓虹┖鍊兼寚瀹� JDBC 绫诲瀷--> + <setting name="jdbcTypeForNull" value="NULL"/> + <!--鎸囧畾鍝釜瀵硅薄鐨勬柟娉曡Е鍙戜竴娆″欢杩熷姞杞�--> + <setting name="lazyLoadTriggerMethods" value="equals,clone,hashCode,toString"/> + <!--鎸囧畾褰撶粨鏋滈泦涓�间负 null 鏃讹紝鏄惁璋冪敤鏄犲皠瀵硅薄鐨� setter锛坢ap 瀵硅薄鏃朵负 put锛夋柟娉�--> + <setting name="callSettersOnNulls" value="true"/> + <!--鎸囧畾 MyBatis 鎵�鐢ㄦ棩蹇楃殑鍏蜂綋瀹炵幇锛歀OG4J/LOG4J2/SLF4J/STDOUT_LOGGING/NO_LOGGING--> + <setting name="logImpl" value="NO_LOGGING" /> + </settings> + <typeAliases> + <package name="com.se.nsl.domain"/> + </typeAliases> +</configuration> \ No newline at end of file diff --git a/src/main/resources/pointmodule.json b/src/main/resources/pointmodule.json new file mode 100644 index 0000000..9780e6e --- /dev/null +++ b/src/main/resources/pointmodule.json @@ -0,0 +1 @@ +{"type":"CityJSON","version":"1.0","CityObjects":{},"vertices":[]} \ No newline at end of file diff --git a/src/main/resources/rainfallmodule.json b/src/main/resources/rainfallmodule.json new file mode 100644 index 0000000..f3b7363 --- /dev/null +++ b/src/main/resources/rainfallmodule.json @@ -0,0 +1 @@ +{"type":"CityJSON","version":"1.0","Dynamizers":[],"CityObjects":{},"vertices":[]} \ No newline at end of file diff --git a/src/main/resources/river.json b/src/main/resources/river.json new file mode 100644 index 0000000..2eb29f2 --- /dev/null +++ b/src/main/resources/river.json @@ -0,0 +1 @@ +{"type":"CityJSON","version":"1.0","metadata":{"referenceSystem":"urn:ogc:def:crs:EPSG::4326","datasetPointOfContact":{}},"CityObjects":{},"vertices":[],"appearance":{"materials":[],"textures":[],"vertices-texture":[]},"geometry-templates":{"templates":[],"vertices-templates":[]}} \ No newline at end of file diff --git a/src/main/resources/terrainmodule.json b/src/main/resources/terrainmodule.json new file mode 100644 index 0000000..fb5a9ea --- /dev/null +++ b/src/main/resources/terrainmodule.json @@ -0,0 +1 @@ +{"type":"CityJSON","version":"1.0","CityObjects":{"UUID_63b11762-6a6a-4d91-b497-6314fee1ce0f":{"type":"+Terrain","geometry":[{"type":"MultiSurface","boundaries":[[[0,1,2,3]]],"texture":{"visual":{"values":[[[0,0,1,2,3]]]}},"lod":0}]}},"appearance":{"textures":[{"type":"PNG","image":"appearance/terrain.png"}],"vertices-texture":[[0.0,0.0],[0.0,1.0],[1.0,1.0],[1.0,0.0]]}} \ No newline at end of file diff --git a/src/main/resources/win32-x86-64/blosc.dll b/src/main/resources/win32-x86-64/blosc.dll new file mode 100644 index 0000000..2093a88 --- /dev/null +++ b/src/main/resources/win32-x86-64/blosc.dll Binary files differ diff --git a/src/test/java/com/se/nsl/AppTest.java b/src/test/java/com/se/nsl/AppTest.java new file mode 100644 index 0000000..b3a8521 --- /dev/null +++ b/src/test/java/com/se/nsl/AppTest.java @@ -0,0 +1,28 @@ +package com.se.nsl; + +import org.junit.Test; +import org.springframework.boot.test.context.SpringBootTest; + +import com.alibaba.fastjson.JSONObject; +import java.io.File; +import java.io.IOException; +import java.net.*; +import java.nio.ByteBuffer; +import java.nio.channels.DatagramChannel; +import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.nio.file.StandardCopyOption; +import java.util.ArrayList; +import java.util.Calendar; +import java.util.List; + +@SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.DEFINED_PORT) +public class AppTest { + @Test + public void t1() { + System.out.println("t1..."); + } +} -- Gitblit v1.9.3