Pārlūkot izejas kodu

Merge branch '20200908' of C:\Users\86159\git_repos\water-iot with conflicts.

pengdi@zoniot.com 4 gadi atpakaļ
vecāks
revīzija
5383c2c971
100 mainītis faili ar 3656 papildinājumiem un 7 dzēšanām
  1. 226 0
      data-processor/README.md
  2. BIN
      data-processor/img/UML.png
  3. BIN
      data-processor/img/flow-chart.png
  4. 62 0
      data-processor/pom.xml
  5. 42 0
      meter-reading-batch/pom.xml
  6. 18 0
      meter-reading-batch/src/main/java/com/huaxu/zoniot/MeterReadingBatchApplication.java
  7. 64 0
      meter-reading-batch/src/main/java/com/huaxu/zoniot/consumer/JobTaskConsumer.java
  8. 20 0
      meter-reading-batch/src/main/resources/application-batch-dev.properties
  9. 1 0
      meter-reading-batch/src/main/resources/application.properties
  10. 23 0
      meter-reading-batch/src/main/resources/logback-spring.xml
  11. 37 0
      meter-reading-common/target/classes/application-common-dev.properties
  12. BIN
      meter-reading-common/target/classes/com/huaxu/zoniot/common/ApplicationException.class
  13. BIN
      meter-reading-common/target/classes/com/huaxu/zoniot/common/Constants.class
  14. BIN
      meter-reading-common/target/classes/com/huaxu/zoniot/common/ResultStatus.class
  15. BIN
      meter-reading-common/target/classes/com/huaxu/zoniot/common/ServiceException.class
  16. BIN
      meter-reading-common/target/classes/com/huaxu/zoniot/common/TaskConstants.class
  17. BIN
      meter-reading-common/target/classes/com/huaxu/zoniot/common/TaskState.class
  18. BIN
      meter-reading-common/target/classes/com/huaxu/zoniot/config/SnowflakeIdWorkerConfig.class
  19. BIN
      meter-reading-common/target/classes/com/huaxu/zoniot/dao/JobTaskMapper.class
  20. BIN
      meter-reading-common/target/classes/com/huaxu/zoniot/dao/MeterReadRateMapper.class
  21. BIN
      meter-reading-common/target/classes/com/huaxu/zoniot/dao/MeterReadRecordMapper.class
  22. BIN
      meter-reading-common/target/classes/com/huaxu/zoniot/dao/WaterMeterMapper.class
  23. BIN
      meter-reading-common/target/classes/com/huaxu/zoniot/entity/JobResult.class
  24. BIN
      meter-reading-common/target/classes/com/huaxu/zoniot/entity/JobTask.class
  25. BIN
      meter-reading-common/target/classes/com/huaxu/zoniot/entity/MeasuringData.class
  26. BIN
      meter-reading-common/target/classes/com/huaxu/zoniot/entity/MeterReadRecord.class
  27. BIN
      meter-reading-common/target/classes/com/huaxu/zoniot/entity/WaterMeter.class
  28. BIN
      meter-reading-common/target/classes/com/huaxu/zoniot/service/JobTaskService.class
  29. BIN
      meter-reading-common/target/classes/com/huaxu/zoniot/service/MeterReadRateService.class
  30. BIN
      meter-reading-common/target/classes/com/huaxu/zoniot/service/MeterReadRecordService.class
  31. BIN
      meter-reading-common/target/classes/com/huaxu/zoniot/service/impl/JobTaskServiceImpl.class
  32. BIN
      meter-reading-common/target/classes/com/huaxu/zoniot/service/impl/MeterReadRateServiceImpl.class
  33. BIN
      meter-reading-common/target/classes/com/huaxu/zoniot/service/impl/MeterReadRecordServiceImpl$1.class
  34. BIN
      meter-reading-common/target/classes/com/huaxu/zoniot/service/impl/MeterReadRecordServiceImpl.class
  35. BIN
      meter-reading-common/target/classes/com/huaxu/zoniot/utils/RedisUtil.class
  36. BIN
      meter-reading-common/target/classes/com/huaxu/zoniot/utils/SnowflakeIdWorker.class
  37. 149 0
      meter-reading-common/target/classes/mapper/JobTaskMapper.xml
  38. 7 0
      meter-reading-common/target/classes/mapper/MeterReadRateMapper.xml
  39. 410 0
      meter-reading-common/target/classes/mapper/MeterReadRecordMapper.xml
  40. 120 0
      meter-reading-common/target/classes/mapper/WaterMeterMapper.xml
  41. 20 0
      meter-reading-common/target/classes/script/sql/stat_meter_read_rate_by_building.sql
  42. 42 0
      meter-reading-common/target/classes/script/sql/stat_meter_read_rate_by_building_15day.sql
  43. 41 0
      meter-reading-common/target/classes/script/sql/stat_meter_read_rate_by_building_7day.sql
  44. 41 0
      meter-reading-common/target/classes/script/sql/stat_meter_read_rate_by_building_month.sql
  45. 0 0
      meter-reading-common/target/classes/script/sql/stat_meter_read_rate_by_collector_15day.sql
  46. 0 0
      meter-reading-common/target/classes/script/sql/stat_meter_read_rate_by_collector_7day.sql
  47. 25 0
      meter-reading-common/target/classes/script/sql/stat_meter_read_rate_by_collector_day.sql
  48. 60 0
      meter-reading-common/target/classes/script/sql/stat_meter_read_rate_by_community.sql
  49. 45 0
      meter-reading-common/target/classes/script/sql/stat_meter_read_rate_by_community_15day.sql
  50. 44 0
      meter-reading-common/target/classes/script/sql/stat_meter_read_rate_by_community_7day.sql
  51. 24 0
      meter-reading-common/target/classes/script/sql/stat_meter_read_rate_by_community_day.sql
  52. 44 0
      meter-reading-common/target/classes/script/sql/stat_meter_read_rate_by_concentrator_15day.sql
  53. 44 0
      meter-reading-common/target/classes/script/sql/stat_meter_read_rate_by_concentrator_7day.sql
  54. 25 0
      meter-reading-common/target/classes/script/sql/stat_meter_read_rate_by_concentrator_day.sql
  55. 45 0
      meter-reading-common/target/classes/script/sql/stat_meter_read_rate_by_concentrator_month.sql
  56. 21 0
      meter-reading-common/target/classes/script/sql/stat_meter_unread_device_by_building.sql
  57. 21 0
      meter-reading-common/target/classes/script/sql/stat_meter_unread_device_by_building_15day.sql
  58. 21 0
      meter-reading-common/target/classes/script/sql/stat_meter_unread_device_by_building_7day.sql
  59. 20 0
      meter-reading-common/target/classes/script/sql/stat_meter_unread_device_by_building_month.sql
  60. 26 0
      meter-reading-common/target/classes/script/sql/stat_meter_unread_device_by_building_month_v2.sql
  61. 2 0
      meter-reading-common/target/classes/script/sql/water_meter_error_day.sql
  62. 60 0
      meter-reading-job/pom.xml
  63. 19 0
      meter-reading-job/src/main/java/com/huaxu/zoniot/MeterReadingJobApplication.java
  64. 76 0
      meter-reading-job/src/main/java/com/huaxu/zoniot/config/XxlJobConfig.java
  65. 93 0
      meter-reading-job/src/main/java/com/huaxu/zoniot/job/MeterReadJob.java
  66. 258 0
      meter-reading-job/src/main/java/com/huaxu/zoniot/job/MeterReadRateJob.java
  67. 86 0
      meter-reading-job/src/main/java/com/huaxu/zoniot/producer/JobTaskProducer.java
  68. 29 0
      meter-reading-job/src/main/resources/application-job-dev.properties
  69. 29 0
      meter-reading-job/src/main/resources/application-job-sit.properties
  70. 1 0
      meter-reading-job/src/main/resources/application.properties
  71. 27 0
      meter-reading-job/src/main/resources/logback.xml
  72. 80 0
      meter-reading-service/pom.xml
  73. 18 0
      meter-reading-service/src/main/java/com/huaxu/zoniot/MeterReadingServiceApplication.java
  74. 31 0
      meter-reading-service/src/main/java/com/huaxu/zoniot/config/RabbitConfig.java
  75. 124 0
      meter-reading-service/src/main/java/com/huaxu/zoniot/service/WaterMeterDataProvider.java
  76. 182 0
      meter-reading-service/src/main/java/com/huaxu/zoniot/utils/SnowflakeIdWorker.java
  77. 36 0
      meter-reading-service/src/main/java/com/huaxu/zoniot/utils/SpringContextUtil.java
  78. 17 0
      meter-reading-service/src/main/java/com/huaxu/zoniot/web/MeterReadingController.java
  79. 21 0
      meter-reading-service/src/main/resources/application-service-dev.properties
  80. 1 0
      meter-reading-service/src/main/resources/application.properties
  81. 23 0
      meter-reading-service/src/main/resources/logback-spring.xml
  82. 31 0
      meter-reading-service/src/test/java/com/huaxu/zoniot/MeterReadRateServiceTests.java
  83. 135 0
      meter-reading-service/src/test/java/com/huaxu/zoniot/MeterReadingServiceTests.java
  84. 23 0
      meter-reading-service/src/test/java/com/huaxu/zoniot/ShardTest.java
  85. 83 0
      meter-reading-tianjin/pom.xml
  86. 20 0
      meter-reading-tianjin/src/main/java/com/huaxu/zoniot/MeterReadingApplication.java
  87. 19 0
      meter-reading-tianjin/src/main/java/com/huaxu/zoniot/common/ErrorConstants.java
  88. 17 0
      meter-reading-tianjin/src/main/java/com/huaxu/zoniot/entity/Device.java
  89. 73 0
      meter-reading-tianjin/src/main/java/com/huaxu/zoniot/entity/RegistData.java
  90. 19 0
      meter-reading-tianjin/src/main/java/com/huaxu/zoniot/entity/RespData.java
  91. 58 0
      meter-reading-tianjin/src/main/java/com/huaxu/zoniot/entity/RtnData.java
  92. 134 0
      meter-reading-tianjin/src/main/java/com/huaxu/zoniot/web/IntegrationController.java
  93. 35 0
      meter-reading-tianjin/src/main/resources/application-dev.properties
  94. 1 0
      meter-reading-tianjin/src/main/resources/application.properties
  95. 1 1
      smart-city-bat/src/main/java/com/zcxk/smartcity/bat/job/SQLAnalysisJob.java
  96. 2 2
      smart-city-bat/src/main/resources/application-test.properties
  97. 1 1
      smart-city-bat/src/main/resources/application.properties
  98. 1 1
      smart-city-bat/src/main/resources/sql/stat_meter_read_rate_by_concentrator_15day.sql
  99. 1 1
      smart-city-bat/src/main/resources/sql/stat_meter_read_rate_by_concentrator_7day.sql
  100. 1 1
      smart-city-bat/src/main/resources/sql/stat_meter_read_rate_by_concentrator_day.sql

+ 226 - 0
data-processor/README.md

@@ -0,0 +1,226 @@
+# 数据处理通用框架
+
+数据处理通用框架,用于将大量的数据进行分片分页分批处理的通用处理框架
+
+# 背景
+
+随着 **系统迁移工作** 的展开,数据迁移的需求越来越多:从SqlServer迁移到Mysql,或者SqlServer/Mysql到ES
+
+这些迁移工作都有一个共同点,就是量比较大,耗时比较长,属于IO密集型的任务,可以使用多线程提升效率。
+
+此框架脱胎于ES数据导入任务,该任务需要将 2000 万左右的商机主体数据从数据库查出并调用其他接口查询到商机的附加字段,然后导入到ES中。
+
+其难点在于,数据量较大,耗时较长,商机没有自增主键无法按主键进行翻页,而且时间长的情况下,如果导到一半程序挂掉,需要支持断点续传(只重新导入失败的数据)
+
+# 解决方案:数据分片分页分批处理框架
+
+## 用户导向
+
+使用者只需要关注**数据从哪来,要做什么处理**,其他的事情都交给框架解决。合理的默认值设置,通常情况下不需要关心处理的细节,如果有更高要求,则可以对一些参数进行定制化的调节。
+
+## 实现原理
+
+框架将需要分片分页分批处理的数据的处理过程进行抽象,使用模板模式将业务无关的模板代码抽离出来,让使用者只需要关注自己的业务实现,而无需关心实现的技术细节,如线程池的创建、切片处理、分批处理、出错记录等。
+
+![分片分页分批处理流程图](img/flow-chart.png)
+
+每个分片做为任务条件提交到线程池进行处理,每个分片线程建立自己的线程池,以分片做为分页的依据获取来源资源,将分页数据分解成各个批次,将每批次的数据做为条件创建任务提交到分片线程中的线程池进行处理。各个批次提交完毕之后,分片线程等待任务执行完毕后再记录当前切片的处理结果。
+
+## 核心接口 DataProcessor
+
+本框架的核心接口 DataProcessor 展示了处理所需要的所有功能
+
+* process 执行处理逻辑
+* processErrorSlices 导入失败的分片
+* resumeProgress 恢复上次未完成的任务(断点续传)
+
+```java
+public interface DataProcessor {
+    /**
+     * 处理数据
+     */
+    boolean process();
+    /**
+     * 导入失败的分片
+     */
+    boolean processErrorSlices();
+    /**
+     * 恢复上次未完成的任务(断点续传)
+     */
+    boolean resumeProgress();
+}
+```
+
+# 使用方式
+
+## 引入
+
+```xml
+<dependency>
+   <groupId>com.github.dadiyang</groupId>
+   <artifactId>dataprocessor</artifactId>
+   <version>1.0.0</version>
+</dependency>
+```
+
+使用者唯一需要关心的就是自己的业务逻辑(**数据从哪来,要做什么处理**),主要就是以下三个方法:
+
+ * **generateSlices** 用于生成分片,以根据分片进行分批处理(**Long和Date类型分片规则已有相应的实现类**)
+ 
+ * **getResources**   根据分片和分页条件从来源处获取需要被处理的数据(*数据从哪来*)
+ 
+ * **createTask**     创建根据给定的分批数据进行实际处理的任务(*要到哪里去*)
+
+根据提供以上三个方法的不同方式,有实现`DataProvider`接口和继承`DataProcessorTemplate`抽象类两种使用方式
+
+## 方式一:实现 DataProvider 接口传给 DefaultDataProcessor 类(推荐)
+
+DataProvider接口
+
+```java
+public interface DataProvider<T, S> {
+    /**
+     * 获取所有分片
+     * @return 分片
+     */
+    Set<Slice<S>> generateSlices();
+    /**
+     * 从数据源获取需要被处理的资源
+     * @param slice     分片
+     * @param lastPage 上一页,即刚刚处理完成的这一页,如果是第一次获取则为 null
+     * @return 需要被处理的资源,若hashNext()返回false则认为本批次已处理完成
+     */
+    Page<T> getResources(Slice<S> slice, Page<T> lastPage) throws Exception;
+    /**
+     * 创建实际处理逻辑的任务
+     * @return 实际处理逻辑的任务,注意:Callable调用后抛出异常,则认为本批次处理失败
+     */
+    Callable<?> createTask(List<T> resources);
+}
+```
+
+实现DataProvider接口,然后生成实现类的实例,使用该实例创建 DataProcessor 类
+
+```java
+// 创建 DataProvider 
+DataProvider<Opportuntiy, Date> dataProvider = new ...;
+DataProcessor<Opportuntiy, Date> processor = new DefaultDataProcessor<>(dataProvider);
+// 开始处理
+processor.process();
+```
+
+**注意:** 框架中提供了 `DateSliceDataProvider` 和 `LongSliceDataProvider` 两个实现 DataProvider 接口的抽象类,如果你是根据 Date 或者 Long 类型进行切片,可以选择继承一个以省去切片规则的实现。
+
+## 方式二:继承 `DataProcessorTemplate` 抽象类
+
+继承 `DataProcessorTemplate` 抽象类并实现 `generateSlices`、`getResources`、`createTask` 三个方法
+ 
+使用方式与方式一类似
+
+## 参数配置
+
+**DefaultDataProcessor** 可以调整以下几个参数,可以调用相应的 setter 方法进行调整
+
+* slicesThreadNum 分片线程数,即同时进行处理的分片数,默认 8
+* numPerBatch     每批处理的数理,默认 1000
+* launchInterval  多个分片同时启动时,每个启动之间的间隔,单位毫秒。有些查询会比较耗时,如果同时启动太多个分片,会导致数据库压力过大导致超时,建议在查询会给数据库造成压力的时候适当调整此参数,默认 3000
+* retryTime 失败重试次数,默认 3
+* retryNullable 被重试的方法是否可以接受null值,若不能接受,方法返回null值视为失败,会进行重试(仅对分片任务处理有效),默认 true
+
+**注意**:这些属性的 `setter` 方法不支持运行时调用,如果你调用了` process()` 方法而且处理过程还没有结束,不允许修改这几个属性,否则会抛出 `throw new IllegalStateException("当前有任务正在执行");`
+
+# 断点续传
+
+如果处理途中程序挂掉,可以使用断点续传功能恢复上次的任务。其粒度为切片级别,即恢复上次任务意味着已完成的切片不再重复处理,如果是处理一半的切片会重新处理。
+
+原理是读取上次切分的所有分片和已完成的分片,取差集进行继续处理。
+
+## 使用方式
+
+```java
+processor.resumeProgress()
+```
+
+## 注意事项
+
+* 断点续传依赖 SliceRecorder 类记录上次的全部切片和已完成切片,恢复时读取全部切片和已完成切片进行取差值,所以必须保证使用的是同一个 SliceRecorder。默认的SliceRecorder实现基于文件存储,必须保证调用 `resumeProgress()` 方法时,其所使用的文件在指定的位置上。
+* 处理到一半的切片会重新处理,意味着你的处理逻辑必须自行解决同一条数据会被重复处理的问题
+
+# 扩展
+
+## SliceParser 切片解析器
+
+用于对切片进行序列化和反序列化的工具
+
+默认使用FastJson实现
+
+如果有需要可以自己实现接口传给DataProcessor
+
+## SliceRecorder 切片记录器
+
+用于记录和读取切片,包括启动时获取到的全部切片,处理完成的切片和处理出错的切片。
+
+默认使用本地文件进行记录,将记录写到启动目录的data文件夹下。
+
+你可以实现自己的记录器,只需要实现该接口并传给DataProcessor就可以
+
+## ThreadPoolFactory 线程池工厂
+
+用于生成线程池,程序会给定一个经过计算认为合适的线程池大小,你可以根据自己的需要定制自己的实现
+
+默认使用建议的线程池大小生成固定大小线程池,拒绝策略使用阻塞式,即当队列满时再添加任务将会被阻塞并且为每个线程命名
+
+# 依赖
+
+* slf4j-api 日志
+* FastJson (在DefaultSliceParser中用于对分片的序列化和反序列化)
+
+# 基本原则
+
+最后,我想尝试以软件设计的五大基本原则在本框架中的应用,有兴趣的可以了解一下,否则可以直接跳过这一节
+
+* 依赖反转原则
+
+> 程序要依赖于抽象接口,不要依赖于具体实现
+
+![UML](img/UML.png)
+
+本框架主要由`DataProcessor、DataProvider、SliceParser、SliceRecorder、ThreadPoolFactory`五个接口组成,由这些接口相互配合实现所有的功能。
+
+`DataProcessorTemplate`抽象类结合使用其他`SliceParser、SliceRecorder、ThreadPoolFactory`接口负责实现`DataProcessor`的通用部分功能
+
+`DefaultDataProcessor`继承`DataProcessorTemplate`将必须实现的抽象方法委托给`DataProvider`接口。
+
+可见类之前的交互都依赖于接口而不是具体的实现
+
+* 单一职责原则
+
+> 一个类,只有一个引起它变化的原因。
+
+每个接口及其实现类都有自己单一的职责
+
+1. `DataProcessor`: 核心接口,负责整个框架需要提供的功能
+2. `DataProvider`: 用户必须实现的接口,告诉框架数据从哪里来,要到哪里去,以什么方式分片
+3. `SliceParser`: 分片解析器,负责分片的序列化和反序列化
+4. `SliceRecorder`: 分片记录器,负责记录所有分片、已完成分片和出错的分片
+5. `ThreadPoolFactory`: 线程池工厂,负责创建线程池
+
+* 接口隔离原则
+
+> 不应强行要求客户端依赖于它们不用的接口;
+> 
+> 类之间的依赖应该建立在最小的接口上面。
+
+框架中每个接口的职责都很单一,用户只需要关注自己需要的功能,该功能对应的接口没有要求任何与功能无关的方法
+
+* 里式替换原则
+
+> 继承必须确保超类所拥有的性质在子类中仍然成立。
+
+框架中提供的所有实现类都可以替换其父类(接口)的位置
+
+* 开放封闭原则
+
+> 软件实体应该是可扩展,而不可修改的。也就是说,对扩展是开放的,而对修改是封闭的。
+
+框架本身已经提供了相应接口的默认实现类和各个属性的常规情况下的默认值,如果用户有更加个性化的需求,无需修改框架本身的代码,而是实现该功能对应的接口,然后传递给主类以达到修改的目的。

BIN
data-processor/img/UML.png


BIN
data-processor/img/flow-chart.png


+ 62 - 0
data-processor/pom.xml

@@ -0,0 +1,62 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <modelVersion>4.0.0</modelVersion>
+
+    <groupId>com.zcxk.zoniot</groupId>
+    <artifactId>data-processor</artifactId>
+    <version>1.0</version>
+
+    <properties>
+        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+        <slf4j.version>1.7.25</slf4j.version>
+        <fastjson.version>1.2.58</fastjson.version>
+    </properties>
+
+    <dependencies>
+        <dependency>
+            <groupId>org.slf4j</groupId>
+            <artifactId>slf4j-api</artifactId>
+            <version>${slf4j.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>com.alibaba</groupId>
+            <artifactId>fastjson</artifactId>
+            <version>${fastjson.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.hamcrest</groupId>
+            <artifactId>hamcrest</artifactId>
+            <version>2.1</version>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>junit</groupId>
+            <artifactId>junit</artifactId>
+            <version>4.12</version>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.slf4j</groupId>
+            <artifactId>slf4j-log4j12</artifactId>
+            <version>1.7.21</version>
+            <scope>test</scope>
+        </dependency>
+    </dependencies>
+    <build>
+        <plugins>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-compiler-plugin</artifactId>
+                <version>3.1</version>
+                <configuration>
+                    <source>1.8</source>
+                    <target>1.8</target>
+                </configuration>
+            </plugin>
+        </plugins>
+    </build>
+
+
+</project>

+ 42 - 0
meter-reading-batch/pom.xml

@@ -0,0 +1,42 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <modelVersion>4.0.0</modelVersion>
+    <parent>
+        <groupId>org.springframework.boot</groupId>
+        <artifactId>spring-boot-starter-parent</artifactId>
+        <version>2.3.1.RELEASE</version>
+        <relativePath/> <!-- lookup parent from repository -->
+    </parent>
+    <groupId>com.huaxu.zoniot</groupId>
+    <artifactId>meter-reading-batch</artifactId>
+    <version>1.0</version>
+    <description>抄表批处理模块</description>
+    <dependencies>
+        <dependency>
+            <groupId>com.huaxu.zoniot</groupId>
+            <artifactId>meter-reading-common</artifactId>
+            <version>1.0</version>
+        </dependency>
+        <dependency>
+            <groupId>org.springframework.boot</groupId>
+            <artifactId>spring-boot-starter-web</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.springframework.boot</groupId>
+            <artifactId>spring-boot-starter-test</artifactId>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.projectlombok</groupId>
+            <artifactId>lombok</artifactId>
+            <optional>true</optional>
+        </dependency>
+        <!-- AMQP-->
+        <dependency>
+            <groupId>org.springframework.boot</groupId>
+            <artifactId>spring-boot-starter-amqp</artifactId>
+        </dependency>
+    </dependencies>
+</project>

+ 18 - 0
meter-reading-batch/src/main/java/com/huaxu/zoniot/MeterReadingBatchApplication.java

@@ -0,0 +1,18 @@
+package com.huaxu.zoniot;
+
+import org.springframework.boot.SpringApplication;
+import org.springframework.boot.autoconfigure.SpringBootApplication;
+
+/**
+ * <p></p>
+ *
+ * @Author wilian.peng
+ * @Date 2021/1/3 15:25
+ * @Version 1.0
+ */
+@SpringBootApplication
+public class MeterReadingBatchApplication {
+    public static void main(String[] args) {
+        SpringApplication.run(MeterReadingBatchApplication.class,args);
+    }
+}

+ 64 - 0
meter-reading-batch/src/main/java/com/huaxu/zoniot/consumer/JobTaskConsumer.java

@@ -0,0 +1,64 @@
+package com.huaxu.zoniot.consumer;
+
+import cn.hutool.core.thread.ThreadUtil;
+import com.huaxu.zoniot.common.TaskState;
+import com.huaxu.zoniot.entity.JobTask;
+import com.huaxu.zoniot.service.JobTaskService;
+import com.huaxu.zoniot.service.MeterReadRecordService;
+import com.rabbitmq.client.Channel;
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.amqp.core.ExchangeTypes;
+import org.springframework.amqp.rabbit.annotation.Exchange;
+import org.springframework.amqp.rabbit.annotation.Queue;
+import org.springframework.amqp.rabbit.annotation.QueueBinding;
+import org.springframework.amqp.rabbit.annotation.RabbitListener;
+import org.springframework.amqp.support.AmqpHeaders;
+import org.springframework.messaging.handler.annotation.Headers;
+import org.springframework.messaging.handler.annotation.Payload;
+import org.springframework.stereotype.Component;
+
+import javax.annotation.Resource;
+import java.io.IOException;
+import java.util.Map;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * <p>作业任务消费程序</p>
+ *
+ * @Author wilian.peng
+ * @Date 2021/1/3 15:39
+ * @Version 1.0
+ */
+@Slf4j
+@Component
+public class JobTaskConsumer {
+    @Resource
+    JobTaskService  jobTaskService ;
+    @Resource
+    MeterReadRecordService meterReadRecordService;
+
+
+    @RabbitListener(bindings = @QueueBinding(value = @Queue(value = "${job.task.rabbit.queue}", durable = "true", autoDelete = "false"),
+            exchange = @Exchange(value = "${job.task.rabbit.exchange}", type = ExchangeTypes.FANOUT)), ackMode = "MANUAL")
+    public void handleMessage(@Payload JobTask jobTask, Channel channel,
+                              @Headers Map<String, Object> headers) throws IOException{
+        long tag = (long) headers.get(AmqpHeaders.DELIVERY_TAG);
+        Integer taskId = jobTask.getTaskId();
+        try {
+            JobTask task = jobTaskService.findTask(taskId);
+            // 任务在执行过程中,不予处理
+            if(task.getTaskState() != TaskState.HANDLING.getCode()){
+                // 更改任务状态为处理中
+                jobTaskService.updateTaskState(taskId,TaskState.HANDLING,"");
+                int resultSize = meterReadRecordService.batchCreateMeterUnReadRecord(task);
+                jobTaskService.updateTaskState(taskId,TaskState.COMPLETED,""+resultSize);
+            }
+
+            channel.basicAck(tag, false);
+        } catch (Exception e) {
+            log.error("消费数据失败",e);
+            channel.basicNack(tag, false, true);
+            jobTaskService.updateTaskState(taskId,TaskState.FAILED,e.getMessage());
+        }
+    }
+}

+ 20 - 0
meter-reading-batch/src/main/resources/application-batch-dev.properties

@@ -0,0 +1,20 @@
+server.port=8092
+server.servlet.context-path=/meter/reading/batch
+#ÈÕÖ¾ÅäÖÃ
+logging.level.root=info
+logging.file.path=./logs
+#########################################Rabbit MQ ÅäÖÃ#############################################
+spring.rabbitmq.host=114.135.61.188
+spring.rabbitmq.port=55672
+spring.rabbitmq.username=zoniot
+spring.rabbitmq.password=zcxk100
+spring.rabbitmq.virtual-host=/
+spring.rabbitmq.connection-timeout=1000ms
+spring.rabbitmq.listener.direct.acknowledge-mode=manual
+spring.rabbitmq.listener.simple.acknowledge-mode=manual
+spring.rabbitmq.listener.simple.concurrency=2
+spring.rabbitmq.listener.simple.max-concurrency=4
+spring.rabbitmq.listener.simple.prefetch=4
+# ÈÎÎñ¶ÓÁÐ
+job.task.rabbit.exchange=job-task-exchange
+job.task.rabbit.queue=job-task-queue

+ 1 - 0
meter-reading-batch/src/main/resources/application.properties

@@ -0,0 +1 @@
+spring.profiles.active=common-dev,batch-dev

+ 23 - 0
meter-reading-batch/src/main/resources/logback-spring.xml

@@ -0,0 +1,23 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<configuration>
+    <springProperty scope="context" name="LOG_PATH" source="logging.file.path" defaultValue="/tmp" />
+    <include resource="org/springframework/boot/logging/logback/defaults.xml" />
+    <include resource="org/springframework/boot/logging/logback/console-appender.xml" />
+    <appender name="TIME_FILE"
+              class="ch.qos.logback.core.rolling.RollingFileAppender">
+        <encoder>
+            <pattern>${FILE_LOG_PATTERN}</pattern>
+        </encoder>
+        <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+            <fileNamePattern>${LOG_PATH}/meter-reading-batch.%d{yyyy-MM-dd}.%i.log</fileNamePattern>
+            <maxHistory>365</maxHistory>
+            <timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
+                <maxFileSize>100MB</maxFileSize>
+            </timeBasedFileNamingAndTriggeringPolicy>
+        </rollingPolicy>
+    </appender>
+    <root level="INFO">
+        <appender-ref ref="CONSOLE" />
+        <appender-ref ref="TIME_FILE" />
+    </root>
+</configuration>

+ 37 - 0
meter-reading-common/target/classes/application-common-dev.properties

@@ -0,0 +1,37 @@
+#########################################��������#################################################
+server.workId=0
+server.dataCenterId=1
+#########################################����Դ����#################################################
+spring.datasource.driver-class-name=com.mysql.jdbc.Driver
+spring.datasource.type=com.alibaba.druid.pool.DruidDataSource
+spring.datasource.url=jdbc:mysql://114.135.61.188:33306/smart_city_sit_6_10?characterEncoding=utf8&useSSL=false&serverTimezone=Asia/Shanghai&zeroDateTimeBehavior=convertToNull
+spring.datasource.username=root
+spring.datasource.password=100Zone@123
+spring.datasource.druid.initial-size=5
+spring.datasource.druid.min-idle=5
+spring.datasource.druid.max-active=20
+spring.datasource.druid.max-wait=60000
+spring.datasource.druid.time-between-eviction-runs-millis=60000
+spring.datasource.druid.min-evictable-idle-time-millis=300000
+spring.datasource.druid.validation-query=SELECT 1
+spring.datasource.druid.test-while-idle=true
+spring.datasource.druid.test-on-borrow=true
+spring.datasource.druid.test-on-return=false
+spring.datasource.druid.pool-prepared-statements=true
+spring.datasource.druid.max-pool-prepared-statement-per-connection-size=20
+spring.datasource.druid.filters=stat,wall
+spring.datasource.druid.connection-properties=druid.stat.mergeSql=true;druid.stat.slowSqlMillis=5000
+#########################################MyBatis����#################################################
+mybatis.mapper-locations=classpath*:mapper/*.xml
+mybatis.type-aliases-package=com.huaxu.zoniot.entity
+mybatis.configuration.map-underscore-to-camel-case=true
+mybatis.configuration.log-impl=org.apache.ibatis.logging.stdout.StdOutImpl
+mybatis.configuration.use-column-label=true
+#########################################Redis����#################################################
+spring.redis.host=114.135.61.188
+spring.redis.port=26379
+spring.redis.password=zoniot
+spring.redis.database=1
+#########################################ҵ������############################################
+#����ÿ����������ݼ���С
+task.chunk.size=2000

BIN
meter-reading-common/target/classes/com/huaxu/zoniot/common/ApplicationException.class


BIN
meter-reading-common/target/classes/com/huaxu/zoniot/common/Constants.class


BIN
meter-reading-common/target/classes/com/huaxu/zoniot/common/ResultStatus.class


BIN
meter-reading-common/target/classes/com/huaxu/zoniot/common/ServiceException.class


BIN
meter-reading-common/target/classes/com/huaxu/zoniot/common/TaskConstants.class


BIN
meter-reading-common/target/classes/com/huaxu/zoniot/common/TaskState.class


BIN
meter-reading-common/target/classes/com/huaxu/zoniot/config/SnowflakeIdWorkerConfig.class


BIN
meter-reading-common/target/classes/com/huaxu/zoniot/dao/JobTaskMapper.class


BIN
meter-reading-common/target/classes/com/huaxu/zoniot/dao/MeterReadRateMapper.class


BIN
meter-reading-common/target/classes/com/huaxu/zoniot/dao/MeterReadRecordMapper.class


BIN
meter-reading-common/target/classes/com/huaxu/zoniot/dao/WaterMeterMapper.class


BIN
meter-reading-common/target/classes/com/huaxu/zoniot/entity/JobResult.class


BIN
meter-reading-common/target/classes/com/huaxu/zoniot/entity/JobTask.class


BIN
meter-reading-common/target/classes/com/huaxu/zoniot/entity/MeasuringData.class


BIN
meter-reading-common/target/classes/com/huaxu/zoniot/entity/MeterReadRecord.class


BIN
meter-reading-common/target/classes/com/huaxu/zoniot/entity/WaterMeter.class


BIN
meter-reading-common/target/classes/com/huaxu/zoniot/service/JobTaskService.class


BIN
meter-reading-common/target/classes/com/huaxu/zoniot/service/MeterReadRateService.class


BIN
meter-reading-common/target/classes/com/huaxu/zoniot/service/MeterReadRecordService.class


BIN
meter-reading-common/target/classes/com/huaxu/zoniot/service/impl/JobTaskServiceImpl.class


BIN
meter-reading-common/target/classes/com/huaxu/zoniot/service/impl/MeterReadRateServiceImpl.class


BIN
meter-reading-common/target/classes/com/huaxu/zoniot/service/impl/MeterReadRecordServiceImpl$1.class


BIN
meter-reading-common/target/classes/com/huaxu/zoniot/service/impl/MeterReadRecordServiceImpl.class


BIN
meter-reading-common/target/classes/com/huaxu/zoniot/utils/RedisUtil.class


BIN
meter-reading-common/target/classes/com/huaxu/zoniot/utils/SnowflakeIdWorker.class


+ 149 - 0
meter-reading-common/target/classes/mapper/JobTaskMapper.xml

@@ -0,0 +1,149 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
+<mapper namespace="com.huaxu.zoniot.dao.JobTaskMapper">
+  <resultMap id="BaseResultMap" type="com.huaxu.zoniot.entity.JobTask">
+    <!--@mbg.generated-->
+    <!--@Table sc_job_task-->
+    <id column="task_id" jdbcType="INTEGER" property="taskId" />
+    <result column="job_name" jdbcType="VARCHAR" property="jobName" />
+    <result column="context" jdbcType="VARCHAR" property="context" />
+    <result column="task_state" jdbcType="INTEGER" property="taskState" />
+    <result column="remark" jdbcType="VARCHAR" property="remark" />
+    <result column="create_time" jdbcType="TIMESTAMP" property="createTime" />
+    <result column="update_time" jdbcType="TIMESTAMP" property="updateTime" />
+    <result column="complete_time" jdbcType="TIMESTAMP" property="completeTime" />
+  </resultMap>
+  <sql id="Base_Column_List">
+    <!--@mbg.generated-->
+    task_id, job_name, context, task_state,remark, create_time, update_time, complete_time
+  </sql>
+  <select id="selectByPrimaryKey" parameterType="java.lang.Integer" resultMap="BaseResultMap">
+    <!--@mbg.generated-->
+    select 
+    <include refid="Base_Column_List" />
+    from sc_job_task
+    where task_id = #{taskId,jdbcType=INTEGER}
+  </select>
+  <delete id="deleteByPrimaryKey" parameterType="java.lang.Integer">
+    <!--@mbg.generated-->
+    delete from sc_job_task
+    where task_id = #{taskId,jdbcType=INTEGER}
+  </delete>
+  <insert id="insert" parameterType="com.huaxu.zoniot.entity.JobTask" useGeneratedKeys="true" keyProperty="taskId" keyColumn="task_id">
+    <!--@mbg.generated-->
+    insert into sc_job_task (task_id, job_name, context, 
+      task_state, remark,create_time, update_time,
+      complete_time)
+    values (#{taskId,jdbcType=INTEGER}, #{jobName,jdbcType=VARCHAR}, #{context,jdbcType=VARCHAR},
+      #{taskState,jdbcType=INTEGER},#{remark,jdbcType=VARCHAR}, #{createTime,jdbcType=TIMESTAMP}, #{updateTime,jdbcType=TIMESTAMP},
+      #{completeTime,jdbcType=TIMESTAMP})
+  </insert>
+  <insert id="insertSelective" parameterType="com.huaxu.zoniot.entity.JobTask">
+    <!--@mbg.generated-->
+    insert into sc_job_task
+    <trim prefix="(" suffix=")" suffixOverrides=",">
+      <if test="taskId != null">
+        task_id,
+      </if>
+      <if test="jobName != null">
+        job_name,
+      </if>
+      <if test="context != null">
+        context,
+      </if>
+      <if test="taskState != null">
+        task_state,
+      </if>
+      <if test="remark != null">
+        remark,
+      </if>
+      <if test="createTime != null">
+        create_time,
+      </if>
+      <if test="updateTime != null">
+        update_time,
+      </if>
+      <if test="completeTime != null">
+        complete_time,
+      </if>
+    </trim>
+    <trim prefix="values (" suffix=")" suffixOverrides=",">
+      <if test="taskId != null">
+        #{taskId,jdbcType=INTEGER},
+      </if>
+      <if test="jobName != null">
+        #{jobName,jdbcType=VARCHAR},
+      </if>
+      <if test="context != null">
+        #{context,jdbcType=VARCHAR},
+      </if>
+      <if test="taskState != null">
+        #{taskState,jdbcType=INTEGER},
+      </if>
+      <if test="remark != null">
+        #{remark,jdbcType=VARCHAR},
+      </if>
+      <if test="createTime != null">
+        #{createTime,jdbcType=TIMESTAMP},
+      </if>
+      <if test="updateTime != null">
+        #{updateTime,jdbcType=TIMESTAMP},
+      </if>
+      <if test="completeTime != null">
+        #{completeTime,jdbcType=TIMESTAMP},
+      </if>
+    </trim>
+  </insert>
+  <update id="updateByPrimaryKeySelective" parameterType="com.huaxu.zoniot.entity.JobTask">
+    <!--@mbg.generated-->
+    update sc_job_task
+    <set>
+      <if test="jobName != null">
+        job_name = #{jobName,jdbcType=VARCHAR},
+      </if>
+      <if test="context != null">
+        context = #{context,jdbcType=VARCHAR},
+      </if>
+      <if test="taskState != null">
+        task_state = #{taskState,jdbcType=INTEGER},
+      </if>
+      <if test="remark != null">
+        remark = #{remark,jdbcType=VARCHAR},
+      </if>
+      <if test="createTime != null">
+        create_time = #{createTime,jdbcType=TIMESTAMP},
+      </if>
+      <if test="updateTime != null">
+        update_time = #{updateTime,jdbcType=TIMESTAMP},
+      </if>
+      <if test="completeTime != null">
+        complete_time = #{completeTime,jdbcType=TIMESTAMP},
+      </if>
+    </set>
+    where task_id = #{taskId,jdbcType=INTEGER}
+  </update>
+  <update id="updateByPrimaryKey" parameterType="com.huaxu.zoniot.entity.JobTask">
+    <!--@mbg.generated-->
+    update sc_job_task
+    set job_name = #{jobName,jdbcType=VARCHAR},
+      context = #{context,jdbcType=VARCHAR},
+      task_state = #{taskState,jdbcType=INTEGER},
+      remark = #{remark,jdbcType=VARCHAR},
+      create_time = #{createTime,jdbcType=TIMESTAMP},
+      update_time = #{updateTime,jdbcType=TIMESTAMP},
+      complete_time = #{completeTime,jdbcType=TIMESTAMP}
+    where task_id = #{taskId,jdbcType=INTEGER}
+  </update>
+  <insert id="batchInsert" parameterType="map" useGeneratedKeys="true" keyProperty="taskId" keyColumn="task_id">
+    <!--@mbg.generated-->
+    insert into sc_job_task
+    (task_id, job_name, context, task_state,remark, create_time, update_time, complete_time)
+    values
+    <foreach collection="list" item="item" separator=",">
+      (#{item.taskId,jdbcType=INTEGER}, #{item.jobName,jdbcType=VARCHAR}, #{item.context,jdbcType=VARCHAR},
+        #{item.taskState,jdbcType=INTEGER},#{item.remark,jdbcType=VARCHAR},
+      #{item.createTime,jdbcType=TIMESTAMP}, #{item.updateTime,jdbcType=TIMESTAMP},
+        #{item.completeTime,jdbcType=TIMESTAMP})
+    </foreach>
+  </insert>
+</mapper>

+ 7 - 0
meter-reading-common/target/classes/mapper/MeterReadRateMapper.xml

@@ -0,0 +1,7 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
+<mapper namespace="com.huaxu.zoniot.dao.MeterReadRateMapper">
+    <insert id="executeSql">
+        ${sql}
+    </insert>
+</mapper>

+ 410 - 0
meter-reading-common/target/classes/mapper/MeterReadRecordMapper.xml

@@ -0,0 +1,410 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
+<mapper namespace="com.huaxu.zoniot.dao.MeterReadRecordMapper">
+  <resultMap id="BaseResultMap" type="com.huaxu.zoniot.entity.MeterReadRecord">
+    <!--@mbg.generated-->
+    <!--@Table sc_meter_read_record-->
+    <id column="id" jdbcType="BIGINT" property="id" />
+    <result column="read_date" jdbcType="INTEGER" property="readDate" />
+    <result column="site_id" jdbcType="INTEGER" property="siteId" />
+    <result column="sys_id" jdbcType="INTEGER" property="sysId" />
+    <result column="province" jdbcType="INTEGER" property="province" />
+    <result column="city" jdbcType="INTEGER" property="city" />
+    <result column="region" jdbcType="INTEGER" property="region" />
+    <result column="community" jdbcType="INTEGER" property="community" />
+    <result column="customer_id" jdbcType="INTEGER" property="customerId" />
+    <result column="concentrator_id" jdbcType="INTEGER" property="concentratorId" />
+    <result column="collector_id" jdbcType="INTEGER" property="collectorId" />
+    <result column="building_id" jdbcType="INTEGER" property="buildingId" />
+    <result column="location" jdbcType="VARCHAR" property="location" />
+    <result column="device_type_id" jdbcType="INTEGER" property="deviceTypeId" />
+    <result column="device_id" jdbcType="BIGINT" property="deviceId" />
+    <result column="device_no" jdbcType="VARCHAR" property="deviceNo" />
+    <result column="meter_no" jdbcType="VARCHAR" property="meterNo" />
+    <result column="meter_file_no" jdbcType="VARCHAR" property="meterFileNo" />
+    <result column="read_time" jdbcType="TIMESTAMP" property="readTime" />
+    <result column="read_status" jdbcType="VARCHAR" property="readStatus" />
+    <result column="read_data" jdbcType="VARCHAR" property="readData" />
+    <result column="last_valid" jdbcType="VARCHAR" property="lastValid" />
+    <result column="last_cost" jdbcType="DECIMAL" property="lastCost" />
+    <result column="status" jdbcType="INTEGER" property="status" />
+    <result column="date_create" jdbcType="TIMESTAMP" property="dateCreate" />
+    <result column="date_update" jdbcType="TIMESTAMP" property="dateUpdate" />
+    <result column="create_by" jdbcType="VARCHAR" property="createBy" />
+    <result column="update_by" jdbcType="VARCHAR" property="updateBy" />
+  </resultMap>
+  <sql id="Base_Column_List">
+    <!--@mbg.generated-->
+    id, read_date, site_id, sys_id, province, city, region, community, customer_id, concentrator_id, 
+    collector_id, building_id, `location`, device_type_id, device_id, device_no, meter_no, 
+    meter_file_no, read_time, read_status, read_data, last_valid, last_cost, `status`, 
+    date_create, date_update, create_by, update_by
+  </sql>
+  <select id="selectByPrimaryKey" parameterType="java.lang.Long" resultMap="BaseResultMap">
+    <!--@mbg.generated-->
+    select 
+    <include refid="Base_Column_List" />
+    from sc_meter_read_record
+    where id = #{id,jdbcType=BIGINT}
+  </select>
+  <delete id="deleteByPrimaryKey" parameterType="java.lang.Long">
+    <!--@mbg.generated-->
+    delete from sc_meter_read_record
+    where id = #{id,jdbcType=BIGINT}
+  </delete>
+  <insert id="insert" parameterType="com.huaxu.zoniot.entity.MeterReadRecord">
+    <!--@mbg.generated-->
+    insert into sc_meter_read_record (id, read_date, site_id, 
+      sys_id, province, city, 
+      region, community, customer_id, 
+      concentrator_id, collector_id, building_id, 
+      `location`, device_type_id, device_id, 
+      device_no, meter_no, meter_file_no, 
+      read_time, read_status, read_data, 
+      last_valid, last_cost, `status`, 
+      date_create, date_update, create_by, 
+      update_by)
+    values (#{id,jdbcType=BIGINT}, #{readDate,jdbcType=INTEGER}, #{siteId,jdbcType=INTEGER}, 
+      #{sysId,jdbcType=INTEGER}, #{province,jdbcType=INTEGER}, #{city,jdbcType=INTEGER}, 
+      #{region,jdbcType=INTEGER}, #{community,jdbcType=INTEGER}, #{customerId,jdbcType=INTEGER}, 
+      #{concentratorId,jdbcType=INTEGER}, #{collectorId,jdbcType=INTEGER}, #{buildingId,jdbcType=INTEGER}, 
+      #{location,jdbcType=VARCHAR}, #{deviceTypeId,jdbcType=INTEGER}, #{deviceId,jdbcType=BIGINT}, 
+      #{deviceNo,jdbcType=VARCHAR}, #{meterNo,jdbcType=VARCHAR}, #{meterFileNo,jdbcType=VARCHAR}, 
+      #{readTime,jdbcType=TIMESTAMP}, #{readStatus,jdbcType=VARCHAR}, #{readData,jdbcType=VARCHAR}, 
+      #{lastValid,jdbcType=VARCHAR}, #{lastCost,jdbcType=DECIMAL}, #{status,jdbcType=INTEGER}, 
+      #{dateCreate,jdbcType=TIMESTAMP}, #{dateUpdate,jdbcType=TIMESTAMP}, #{createBy,jdbcType=VARCHAR}, 
+      #{updateBy,jdbcType=VARCHAR})
+  </insert>
+  <insert id="insertSelective" parameterType="com.huaxu.zoniot.entity.MeterReadRecord">
+    <!--@mbg.generated-->
+    insert into sc_meter_read_record
+    <trim prefix="(" suffix=")" suffixOverrides=",">
+      <if test="id != null">
+        id,
+      </if>
+      <if test="readDate != null">
+        read_date,
+      </if>
+      <if test="siteId != null">
+        site_id,
+      </if>
+      <if test="sysId != null">
+        sys_id,
+      </if>
+      <if test="province != null">
+        province,
+      </if>
+      <if test="city != null">
+        city,
+      </if>
+      <if test="region != null">
+        region,
+      </if>
+      <if test="community != null">
+        community,
+      </if>
+      <if test="customerId != null">
+        customer_id,
+      </if>
+      <if test="concentratorId != null">
+        concentrator_id,
+      </if>
+      <if test="collectorId != null">
+        collector_id,
+      </if>
+      <if test="buildingId != null">
+        building_id,
+      </if>
+      <if test="location != null">
+        `location`,
+      </if>
+      <if test="deviceTypeId != null">
+        device_type_id,
+      </if>
+      <if test="deviceId != null">
+        device_id,
+      </if>
+      <if test="deviceNo != null">
+        device_no,
+      </if>
+      <if test="meterNo != null">
+        meter_no,
+      </if>
+      <if test="meterFileNo != null">
+        meter_file_no,
+      </if>
+      <if test="readTime != null">
+        read_time,
+      </if>
+      <if test="readStatus != null">
+        read_status,
+      </if>
+      <if test="readData != null">
+        read_data,
+      </if>
+      <if test="lastValid != null">
+        last_valid,
+      </if>
+      <if test="lastCost != null">
+        last_cost,
+      </if>
+      <if test="status != null">
+        `status`,
+      </if>
+      <if test="dateCreate != null">
+        date_create,
+      </if>
+      <if test="dateUpdate != null">
+        date_update,
+      </if>
+      <if test="createBy != null">
+        create_by,
+      </if>
+      <if test="updateBy != null">
+        update_by,
+      </if>
+    </trim>
+    <trim prefix="values (" suffix=")" suffixOverrides=",">
+      <if test="id != null">
+        #{id,jdbcType=BIGINT},
+      </if>
+      <if test="readDate != null">
+        #{readDate,jdbcType=INTEGER},
+      </if>
+      <if test="siteId != null">
+        #{siteId,jdbcType=INTEGER},
+      </if>
+      <if test="sysId != null">
+        #{sysId,jdbcType=INTEGER},
+      </if>
+      <if test="province != null">
+        #{province,jdbcType=INTEGER},
+      </if>
+      <if test="city != null">
+        #{city,jdbcType=INTEGER},
+      </if>
+      <if test="region != null">
+        #{region,jdbcType=INTEGER},
+      </if>
+      <if test="community != null">
+        #{community,jdbcType=INTEGER},
+      </if>
+      <if test="customerId != null">
+        #{customerId,jdbcType=INTEGER},
+      </if>
+      <if test="concentratorId != null">
+        #{concentratorId,jdbcType=INTEGER},
+      </if>
+      <if test="collectorId != null">
+        #{collectorId,jdbcType=INTEGER},
+      </if>
+      <if test="buildingId != null">
+        #{buildingId,jdbcType=INTEGER},
+      </if>
+      <if test="location != null">
+        #{location,jdbcType=VARCHAR},
+      </if>
+      <if test="deviceTypeId != null">
+        #{deviceTypeId,jdbcType=INTEGER},
+      </if>
+      <if test="deviceId != null">
+        #{deviceId,jdbcType=BIGINT},
+      </if>
+      <if test="deviceNo != null">
+        #{deviceNo,jdbcType=VARCHAR},
+      </if>
+      <if test="meterNo != null">
+        #{meterNo,jdbcType=VARCHAR},
+      </if>
+      <if test="meterFileNo != null">
+        #{meterFileNo,jdbcType=VARCHAR},
+      </if>
+      <if test="readTime != null">
+        #{readTime,jdbcType=TIMESTAMP},
+      </if>
+      <if test="readStatus != null">
+        #{readStatus,jdbcType=VARCHAR},
+      </if>
+      <if test="readData != null">
+        #{readData,jdbcType=VARCHAR},
+      </if>
+      <if test="lastValid != null">
+        #{lastValid,jdbcType=VARCHAR},
+      </if>
+      <if test="lastCost != null">
+        #{lastCost,jdbcType=DECIMAL},
+      </if>
+      <if test="status != null">
+        #{status,jdbcType=INTEGER},
+      </if>
+      <if test="dateCreate != null">
+        #{dateCreate,jdbcType=TIMESTAMP},
+      </if>
+      <if test="dateUpdate != null">
+        #{dateUpdate,jdbcType=TIMESTAMP},
+      </if>
+      <if test="createBy != null">
+        #{createBy,jdbcType=VARCHAR},
+      </if>
+      <if test="updateBy != null">
+        #{updateBy,jdbcType=VARCHAR},
+      </if>
+    </trim>
+  </insert>
+  <update id="updateByPrimaryKeySelective" parameterType="com.huaxu.zoniot.entity.MeterReadRecord">
+    <!--@mbg.generated-->
+    update sc_meter_read_record
+    <set>
+      <if test="readDate != null">
+        read_date = #{readDate,jdbcType=INTEGER},
+      </if>
+      <if test="siteId != null">
+        site_id = #{siteId,jdbcType=INTEGER},
+      </if>
+      <if test="sysId != null">
+        sys_id = #{sysId,jdbcType=INTEGER},
+      </if>
+      <if test="province != null">
+        province = #{province,jdbcType=INTEGER},
+      </if>
+      <if test="city != null">
+        city = #{city,jdbcType=INTEGER},
+      </if>
+      <if test="region != null">
+        region = #{region,jdbcType=INTEGER},
+      </if>
+      <if test="community != null">
+        community = #{community,jdbcType=INTEGER},
+      </if>
+      <if test="customerId != null">
+        customer_id = #{customerId,jdbcType=INTEGER},
+      </if>
+      <if test="concentratorId != null">
+        concentrator_id = #{concentratorId,jdbcType=INTEGER},
+      </if>
+      <if test="collectorId != null">
+        collector_id = #{collectorId,jdbcType=INTEGER},
+      </if>
+      <if test="buildingId != null">
+        building_id = #{buildingId,jdbcType=INTEGER},
+      </if>
+      <if test="location != null">
+        `location` = #{location,jdbcType=VARCHAR},
+      </if>
+      <if test="deviceTypeId != null">
+        device_type_id = #{deviceTypeId,jdbcType=INTEGER},
+      </if>
+      <if test="deviceId != null">
+        device_id = #{deviceId,jdbcType=BIGINT},
+      </if>
+      <if test="deviceNo != null">
+        device_no = #{deviceNo,jdbcType=VARCHAR},
+      </if>
+      <if test="meterNo != null">
+        meter_no = #{meterNo,jdbcType=VARCHAR},
+      </if>
+      <if test="meterFileNo != null">
+        meter_file_no = #{meterFileNo,jdbcType=VARCHAR},
+      </if>
+      <if test="readTime != null">
+        read_time = #{readTime,jdbcType=TIMESTAMP},
+      </if>
+      <if test="readStatus != null">
+        read_status = #{readStatus,jdbcType=VARCHAR},
+      </if>
+      <if test="readData != null">
+        read_data = #{readData,jdbcType=VARCHAR},
+      </if>
+      <if test="lastValid != null">
+        last_valid = #{lastValid,jdbcType=VARCHAR},
+      </if>
+      <if test="lastCost != null">
+        last_cost = #{lastCost,jdbcType=DECIMAL},
+      </if>
+      <if test="status != null">
+        `status` = #{status,jdbcType=INTEGER},
+      </if>
+      <if test="dateCreate != null">
+        date_create = #{dateCreate,jdbcType=TIMESTAMP},
+      </if>
+      <if test="dateUpdate != null">
+        date_update = #{dateUpdate,jdbcType=TIMESTAMP},
+      </if>
+      <if test="createBy != null">
+        create_by = #{createBy,jdbcType=VARCHAR},
+      </if>
+      <if test="updateBy != null">
+        update_by = #{updateBy,jdbcType=VARCHAR},
+      </if>
+    </set>
+    where id = #{id,jdbcType=BIGINT}
+  </update>
+  <update id="updateByPrimaryKey" parameterType="com.huaxu.zoniot.entity.MeterReadRecord">
+    <!--@mbg.generated-->
+    update sc_meter_read_record
+    set read_date = #{readDate,jdbcType=INTEGER},
+      site_id = #{siteId,jdbcType=INTEGER},
+      sys_id = #{sysId,jdbcType=INTEGER},
+      province = #{province,jdbcType=INTEGER},
+      city = #{city,jdbcType=INTEGER},
+      region = #{region,jdbcType=INTEGER},
+      community = #{community,jdbcType=INTEGER},
+      customer_id = #{customerId,jdbcType=INTEGER},
+      concentrator_id = #{concentratorId,jdbcType=INTEGER},
+      collector_id = #{collectorId,jdbcType=INTEGER},
+      building_id = #{buildingId,jdbcType=INTEGER},
+      `location` = #{location,jdbcType=VARCHAR},
+      device_type_id = #{deviceTypeId,jdbcType=INTEGER},
+      device_id = #{deviceId,jdbcType=BIGINT},
+      device_no = #{deviceNo,jdbcType=VARCHAR},
+      meter_no = #{meterNo,jdbcType=VARCHAR},
+      meter_file_no = #{meterFileNo,jdbcType=VARCHAR},
+      read_time = #{readTime,jdbcType=TIMESTAMP},
+      read_status = #{readStatus,jdbcType=VARCHAR},
+      read_data = #{readData,jdbcType=VARCHAR},
+      last_valid = #{lastValid,jdbcType=VARCHAR},
+      last_cost = #{lastCost,jdbcType=DECIMAL},
+      `status` = #{status,jdbcType=INTEGER},
+      date_create = #{dateCreate,jdbcType=TIMESTAMP},
+      date_update = #{dateUpdate,jdbcType=TIMESTAMP},
+      create_by = #{createBy,jdbcType=VARCHAR},
+      update_by = #{updateBy,jdbcType=VARCHAR}
+    where id = #{id,jdbcType=BIGINT}
+  </update>
+  <insert id="batchInsert" parameterType="map">
+    <!--@mbg.generated-->
+    replace into sc_meter_read_record
+    (id, read_date, site_id, sys_id, province, city, region, community, customer_id, 
+      concentrator_id, collector_id, building_id, `location`, device_type_id, device_id, 
+      device_no, meter_no, meter_file_no, read_time, read_status, read_data, last_valid, 
+      last_cost, `status`, date_create, date_update, create_by, update_by)
+    values
+    <foreach collection="list" item="item" separator=",">
+      (#{item.id,jdbcType=BIGINT}, #{item.readDate,jdbcType=INTEGER}, #{item.siteId,jdbcType=INTEGER}, 
+        #{item.sysId,jdbcType=INTEGER}, #{item.province,jdbcType=INTEGER}, #{item.city,jdbcType=INTEGER}, 
+        #{item.region,jdbcType=INTEGER}, #{item.community,jdbcType=INTEGER}, #{item.customerId,jdbcType=INTEGER}, 
+        #{item.concentratorId,jdbcType=INTEGER}, #{item.collectorId,jdbcType=INTEGER}, 
+        #{item.buildingId,jdbcType=INTEGER}, #{item.location,jdbcType=VARCHAR}, #{item.deviceTypeId,jdbcType=INTEGER}, 
+        #{item.deviceId,jdbcType=BIGINT}, #{item.deviceNo,jdbcType=VARCHAR}, #{item.meterNo,jdbcType=VARCHAR}, 
+        #{item.meterFileNo,jdbcType=VARCHAR}, #{item.readTime,jdbcType=TIMESTAMP}, #{item.readStatus,jdbcType=VARCHAR}, 
+        #{item.readData,jdbcType=VARCHAR}, #{item.lastValid,jdbcType=VARCHAR}, #{item.lastCost,jdbcType=DECIMAL}, 
+        #{item.status,jdbcType=INTEGER}, #{item.dateCreate,jdbcType=TIMESTAMP}, #{item.dateUpdate,jdbcType=TIMESTAMP}, 
+        #{item.createBy,jdbcType=VARCHAR}, #{item.updateBy,jdbcType=VARCHAR})
+    </foreach>
+  </insert>
+  <select id="findRecordByReadDayAndDeviceId" resultMap="BaseResultMap">
+    select
+        <include refid="Base_Column_List" />
+    from
+      sc_meter_read_record
+    where
+        status = 1
+    and device_id = #{deviceId,jdbcType=BIGINT}
+    and read_date = #{readDate,jdbcType=INTEGER}
+  </select>
+  <update id="deleteRecordByReadDayAndDeviceId">
+    update sc_meter_read_record set status = 0
+    where device_id = #{deviceId,jdbcType=BIGINT}
+      and read_date = #{readDate,jdbcType=INTEGER}
+  </update>
+</mapper>

+ 120 - 0
meter-reading-common/target/classes/mapper/WaterMeterMapper.xml

@@ -0,0 +1,120 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
+<mapper namespace="com.huaxu.zoniot.dao.WaterMeterMapper">
+  <resultMap id="BaseResultMap" type="com.huaxu.zoniot.entity.WaterMeter">
+    <id column="device_id" jdbcType="BIGINT" property="deviceId" />
+    <result column="water_meter_type" jdbcType="INTEGER" property="waterMeterType" />
+    <result column="site_id" jdbcType="INTEGER" property="siteId" />
+    <result column="sys_id" jdbcType="INTEGER" property="sysId" />
+    <result column="province" jdbcType="INTEGER" property="province" />
+    <result column="city" jdbcType="INTEGER" property="city" />
+    <result column="region" jdbcType="INTEGER" property="region" />
+    <result column="community" jdbcType="INTEGER" property="community" />
+    <result column="customer_id" jdbcType="INTEGER" property="customerId" />
+    <result column="concentrator_id" jdbcType="INTEGER" property="concentratorId" />
+    <result column="collector_id" jdbcType="INTEGER" property="collectorId" />
+    <result column="building_id" jdbcType="INTEGER" property="buildingId" />
+    <result column="location" jdbcType="VARCHAR" property="location" />
+    <result column="device_type_id" jdbcType="INTEGER" property="deviceTypeId" />
+    <result column="device_id" jdbcType="BIGINT" property="deviceId" />
+    <result column="device_no" jdbcType="VARCHAR" property="deviceNo" />
+    <result column="meter_no" jdbcType="VARCHAR" property="meterNo" />
+    <result column="meter_file_no" jdbcType="VARCHAR" property="meterFileNo" />
+    <result column="reading_measuring_code" jdbcType="VARCHAR" property="readingMeasuringCode" />
+    <result column="valve_measuring_code" jdbcType="VARCHAR" property="valveMeasuringCode" />
+    <result column="status" jdbcType="INTEGER" property="status" />
+    <result column="date_create" jdbcType="TIMESTAMP" property="dateCreate" />
+    <result column="date_update" jdbcType="TIMESTAMP" property="dateUpdate" />
+    <result column="create_by" jdbcType="VARCHAR" property="createBy" />
+    <result column="update_by" jdbcType="VARCHAR" property="updateBy" />
+  </resultMap>
+  <sql id="Base_Column_List">
+    <!--@mbg.generated-->
+    device_id ,water_meter_type,device_no, site_id, sys_id,  building_id,
+    province, city, region, community, customer_id, meter_no, meter_file_no,
+    device_type_id,  concentrator_id,collector_id, `location`,valve_measuring_code,
+    reading_measuring_code,date_create
+  </sql>
+  <sql id="Base_Meter_Query_Column">
+      d.id as device_id ,
+      wmt.parent_id as water_meter_type ,
+      d.device_no as 	device_no ,
+      d.site_id as  site_id ,
+      d.sys_id as sys_id ,
+      d.building_id as building_id ,
+      b.province as province ,
+      b.city as city ,
+      b.region as region,
+      b.community as community ,
+      d.customer_id as customer_id ,
+      d.water_meter_no as meter_no ,
+      d.water_meter_file_no as meter_file_no,
+      d.device_type as device_type_id ,
+      wrd.concentrator_id as concentrator_id ,
+      wrd.collector_id as collector_id ,
+      d.loc_desc as location,
+      wmt.valve_measuring_code as valve_measuring_code ,
+	  wmt.reading_measuring_code as reading_measuring_code,
+	  d.date_create as date_create
+  </sql>
+  <sql id="Base_Meter_Query_Where">
+      d.`status` = 1
+      and wmt.`status` = 1
+      and wmt.valve_measuring_code is not null
+      and wmt.reading_measuring_code is not null
+      and wmt.parent_id in (1,2)
+  </sql>
+  <sql id = "Base_Meter_Query">
+    SELECT
+      <include refid="Base_Meter_Query_Column" />
+    FROM
+      <include refid="Base_Meter_Query_Table"></include>
+    WHERE
+     <include refid="Base_Meter_Query_Where" />
+  </sql>
+  <sql id="Base_Meter_Query_Table">
+      sc_w_meter_type wmt
+      left join sc_device d on (d.`status` = 1 and wmt.device_type_id = d.device_type)
+      left join sc_water_related_device  wrd on (wrd.device_id =d.id)
+      left join sc_building b on (d.building_id = b.id)
+  </sql>
+  <select id="findAllWaterMeterList" resultMap="BaseResultMap">
+    <include refid="Base_Meter_Query" />
+  </select>
+  <select id="findWaterMeterListWithPage" resultMap="BaseResultMap">
+    <include refid="Base_Meter_Query" />
+	  order by d.id asc
+	  limit #{start,jdbcType=INTEGER},#{offset,jdbcType=INTEGER}
+  </select>
+  <select id="findWaterMeterById" resultMap="BaseResultMap">
+    <include refid="Base_Meter_Query" />
+	and d.id =  #{deviceId,jdbcType=BIGINT}
+  </select>
+
+  <select id="countWaterMeter" resultType="java.lang.Long">
+    select
+        count(1)
+    from
+       <include refid="Base_Meter_Query_Table"></include>
+    WHERE
+       <include refid="Base_Meter_Query_Where" />
+  </select>
+  <select id="countWaterMeterByShard" resultType="java.lang.Long">
+      select
+        count(1)
+      from (
+      <include refid="Base_Meter_Query" />
+      limit #{shardBegin,jdbcType=INTEGER},#{shardSize,jdbcType=INTEGER}
+      )  tmp
+  </select>
+  <select id="findShardWaterMeterListWithPage" resultMap="BaseResultMap">
+      select
+      <include refid="Base_Column_List" />
+      from (
+        <include refid="Base_Meter_Query" />
+        order by d.id asc
+        limit #{shardBegin,jdbcType=INTEGER},#{shardSize,jdbcType=INTEGER}
+      )  tmp
+      limit #{start,jdbcType=INTEGER},#{offset,jdbcType=INTEGER}
+  </select>
+</mapper>

+ 20 - 0
meter-reading-common/target/classes/script/sql/stat_meter_read_rate_by_building.sql

@@ -0,0 +1,20 @@
+replace into sc_stat_meter_read_rate_by_building (stat_day,site_id,channel_id,customer_id,building_id,device_count,read_times,real_read_times,un_read_times,read_rate,date_create,date_update)
+select
+	DATE_FORMAT( date_add(#{date}, interval -1 day), '%Y%m%d' ),
+	site_id,  -- 所有站点
+	sys_id,  -- 所有场景
+	customer_id,  -- 所有客户
+	building_id,  -- 所有建筑
+	count(distinct device_id) as device_count, -- 设备总数
+	count(1) as read_times,   -- 应抄总数
+	SUM(IF(read_status = 2, 1, 0)) as real_read_times, -- 实抄总数 
+	SUM(IF(read_status = 1, 1, 0)) as un_read_times,   -- 未抄总数
+	FORMAT(SUM(IF(read_status = 2, 1, 0))/ COUNT(1)*100, 2) as read_rate ,-- 抄表率
+	now(),
+	now()
+from
+	sc_meter_read_record 
+where 
+	read_date = DATE_FORMAT( date_add(#{date}, interval -1 day), '%Y%m%d' )
+	group by site_id,sys_id,customer_id,building_id
+

+ 42 - 0
meter-reading-common/target/classes/script/sql/stat_meter_read_rate_by_building_15day.sql

@@ -0,0 +1,42 @@
+replace into sc_stat_meter_read_rate_by_building_15day (stat_day,site_id,channel_id,customer_id,building_id,device_count,read_times,real_read_times,un_read_times,read_rate,date_create,date_update)
+SELECT
+	DATE_FORMAT( date_add(#{date}, interval -1 day), '%Y%m%d' ),
+	tmp.site_id,
+	tmp.sys_id,
+	tmp.customer_id,
+	tmp.building_id,
+	count(tmp.device_id) as device_count,
+	count(1) as read_times,  
+	SUM(IF(tmp.real_read_times != 0 ,1,0)) as real_read_times,
+	SUM(IF(tmp.un_read_times = 15 ,1,0)) as un_read_times,
+	FORMAT(SUM(IF(tmp.real_read_times != 0 ,1,0))/ COUNT(1)*100, 2) as read_rate ,-- 抄表率
+	now(),
+	now()
+FROM
+	(
+		SELECT
+			site_id,
+			sys_id,
+			customer_id,
+			building_id,
+			device_id,
+			SUM( IF ( read_status = 2, 1, 0 ) ) AS real_read_times,
+			SUM( IF ( read_status = 1, 1, 0 ) ) AS un_read_times 
+		FROM
+			sc_meter_read_record 
+		WHERE
+			read_date < DATE_FORMAT( #{date}, '%Y%m%d' )
+AND read_date >= DATE_FORMAT( date_add(#{date}, interval -15 day), '%Y%m%d' )
+		GROUP BY
+			site_id,
+			sys_id,
+			customer_id,
+			building_id,
+			device_id 
+	) tmp
+	GROUP BY
+			site_id,
+			sys_id,
+			customer_id,
+			building_id
+	

+ 41 - 0
meter-reading-common/target/classes/script/sql/stat_meter_read_rate_by_building_7day.sql

@@ -0,0 +1,41 @@
+replace into sc_stat_meter_read_rate_by_building_7day (stat_day,site_id,channel_id,customer_id,building_id,device_count,read_times,real_read_times,un_read_times,read_rate,date_create,date_update)
+SELECT
+	DATE_FORMAT( date_add(#{date}, interval -1 day), '%Y%m%d' ),
+	tmp.site_id,
+	tmp.sys_id,
+	tmp.customer_id,
+	tmp.building_id,
+	count(tmp.device_id) as device_count,
+	count(1) as read_times,  
+	SUM(IF(tmp.real_read_times != 0 ,1,0)) as real_read_times,
+	SUM(IF(tmp.un_read_times = 7 ,1,0)) as un_read_times,
+	FORMAT(SUM(IF(tmp.real_read_times != 0 ,1,0))/ COUNT(1)*100, 2) as read_rate ,-- 抄表率
+	now(),
+	now()
+FROM
+	(
+		SELECT
+			site_id,
+			sys_id,
+			customer_id,
+			building_id,
+			device_id,
+			SUM( IF ( read_status = 2, 1, 0 ) ) AS real_read_times,
+			SUM( IF ( read_status = 1, 1, 0 ) ) AS un_read_times 
+		FROM
+			sc_meter_read_record 
+		WHERE
+			read_date < DATE_FORMAT( #{date}, '%Y%m%d' )
+AND read_date >= DATE_FORMAT( date_add(#{date}, interval -7 day), '%Y%m%d' )
+		GROUP BY
+			site_id,
+			sys_id,
+			customer_id,
+			building_id,
+			device_id 
+	) tmp
+	GROUP BY
+			site_id,
+			sys_id,
+			customer_id,
+			building_id

+ 41 - 0
meter-reading-common/target/classes/script/sql/stat_meter_read_rate_by_building_month.sql

@@ -0,0 +1,41 @@
+replace into sc_stat_meter_read_rate_by_building_month (stat_day,site_id,channel_id,customer_id,building_id,device_count,read_times,real_read_times,un_read_times,read_rate,date_create,date_update)
+SELECT
+	DATE_FORMAT( date_add(#{date}, interval -1 MONTH), '%Y%m' ),
+	tmp.site_id,
+	tmp.sys_id,
+	tmp.customer_id,
+	tmp.building_id,
+	count(tmp.device_id) as device_count,
+	count(1) as read_times,  
+	SUM(IF(tmp.real_read_times != 0 ,1,0)) as real_read_times,
+	SUM(IF(tmp.un_read_times = day(LAST_DAY(date_add(#{date}, INTERVAL - 1 MONTH ))) ,1,0)) as un_read_times,
+	FORMAT(SUM(IF(tmp.real_read_times != 0 ,1,0))/ COUNT(1)*100, 2) as read_rate ,-- 抄表率
+	now(),
+	now()
+FROM
+	(
+		SELECT
+			site_id,
+			sys_id,
+			customer_id,
+			building_id,
+			device_id,
+			SUM( IF ( read_status = 2, 1, 0 ) ) AS real_read_times,
+			SUM( IF ( read_status = 1, 1, 0 ) ) AS un_read_times 
+		FROM
+			sc_meter_read_record 
+		WHERE
+		    period_diff(date_format(#{date} , '%Y%m') , date_format(`read_date`, '%Y%m')) =1
+		GROUP BY
+			site_id,
+			sys_id,
+			customer_id,
+			building_id,
+			device_id 
+	) tmp
+	GROUP BY
+			site_id,
+			sys_id,
+			customer_id,
+			building_id
+	

+ 0 - 0
meter-reading-common/target/classes/script/sql/stat_meter_read_rate_by_collector_15day.sql


+ 0 - 0
meter-reading-common/target/classes/script/sql/stat_meter_read_rate_by_collector_7day.sql


+ 25 - 0
meter-reading-common/target/classes/script/sql/stat_meter_read_rate_by_collector_day.sql

@@ -0,0 +1,25 @@
+-- 按照客户采集器进行抄表率统计
+INSERT INTO sc_stat_meter_read_rate_by_collector (stat_day,site_id,channel_id,meter_type_id,customer_id,concentrator_id,collector_id,device_count,read_times,real_read_times,un_read_times,read_rate,date_create,date_update)
+-- 按照站点集中器进行统计
+SELECT
+	DATE_FORMAT( date_add(:date, interval -1 day), '%Y%m%d' ),
+	site_id, -- 所有站点
+	0, -- 场景
+	device_type_id, -- 表类型
+	customer_id, -- 客户
+	concentrator_id, -- 集中器
+	collector_id , -- 采集器
+	count(DISTINCT device_id) as device_count, -- 设备总数
+	count(1) as read_times,   -- 应抄总数
+	SUM(IF(read_status = 2, 1, 0)) as real_read_times, -- 实抄总数 
+	SUM(IF(read_status = 1, 1, 0)) as un_read_times,   -- 未抄总数
+	FORMAT(SUM(IF(read_status = 2, 1, 0))/ COUNT(1)*100, 2) as read_rate ,-- 抄表率
+	now(),
+	now()
+FROM
+	sc_meter_read_record
+where read_date < DATE_FORMAT( :date, '%Y%m%d' ) 
+AND read_date >= DATE_FORMAT( date_add(:date, interval - 1 day), '%Y%m%d' ) 
+and device_type_id in (17,18,19)
+group by
+	site_id,device_type_id,customer_id,concentrator_id,collector_id 

+ 60 - 0
meter-reading-common/target/classes/script/sql/stat_meter_read_rate_by_community.sql

@@ -0,0 +1,60 @@
+replace into sc_stat_meter_read_rate_by_community (stat_day,site_id,channel_id,customer_id,community_id,device_count,read_times,real_read_times,un_read_times,read_rate,date_create,date_update)
+-- 所有站点统计
+select
+	DATE_FORMAT( date_add(#{date}, interval -1 day), '%Y%m%d' ),
+	0,  -- 所有站点
+	0,  -- 所有场景
+	0,  -- 所有客户
+	0,  -- 所有小区
+	count(1) as device_count, -- 设备总数
+	count(1) as read_times,   -- 应抄总数
+	SUM(IF(read_status = 2, 1, 0)) as real_read_times, -- 实抄总数 
+	SUM(IF(read_status = 1, 1, 0)) as un_read_times,   -- 未抄总数
+	FORMAT(SUM(IF(read_status = 2, 1, 0))/ COUNT(1)*100, 2) as read_rate ,-- 抄表率
+	now(),
+	now()
+from
+	sc_meter_read_record 
+where 
+	read_date = DATE_FORMAT( date_add(#{date}, interval -1 day), '%Y%m%d' )
+-- 所有站点按照建筑统计
+union all
+select
+	DATE_FORMAT( date_add(#{date}, interval -1 day), '%Y%m%d' ),
+	0,  -- 所有站点
+	0,  -- 所有场景
+	0,  -- 所有客户
+	community,  -- 小区
+	count(1) as device_count, -- 设备总数
+	count(1) as read_times,   -- 应抄总数
+	SUM(IF(read_status = 2, 1, 0)) as real_read_times, -- 实抄总数 
+	SUM(IF(read_status = 1, 1, 0)) as un_read_times,   -- 未抄总数
+	FORMAT(SUM(IF(read_status = 2, 1, 0))/ COUNT(1)*100, 2) as read_rate ,-- 抄表率
+	now(),
+	now()
+from
+	sc_meter_read_record 
+where 
+	read_date = DATE_FORMAT( date_add(#{date}, interval -1 day), '%Y%m%d' )
+group by community
+-- 按照小区统计
+union all 
+select
+	DATE_FORMAT( date_add(#{date}, interval -1 day), '%Y%m%d' ),
+	0,  -- 所有站点
+	0,  -- 所有场景
+	customer_id,  -- 客户
+	community,  -- 小区
+	count(1) as device_count, -- 设备总数
+	count(1) as read_times,   -- 应抄总数
+	SUM(IF(read_status = 2, 1, 0)) as real_read_times, -- 实抄总数 
+	SUM(IF(read_status = 1, 1, 0)) as un_read_times,   -- 未抄总数
+	FORMAT(SUM(IF(read_status = 2, 1, 0))/ COUNT(1)*100, 2) as read_rate ,-- 抄表率
+	now(),
+	now()
+from
+	sc_meter_read_record 
+where 
+	read_date = DATE_FORMAT( date_add(#{date}, interval -1 day), '%Y%m%d' )
+	group by customer_id,community
+-- 按照客户的小区统计

+ 45 - 0
meter-reading-common/target/classes/script/sql/stat_meter_read_rate_by_community_15day.sql

@@ -0,0 +1,45 @@
+-- 按照客户小区统计抄表率
+replace into sc_stat_meter_read_rate_by_community_15day(stat_day,site_id,channel_id,meter_type_id,customer_id,community_id,device_count,read_times,real_read_times,un_read_times,read_rate,date_create,date_update)
+-- 按照站点小区进行统计
+SELECT
+	DATE_FORMAT( date_add(#{date}, interval -1 day), '%Y%m%d' ),
+	tmp.site_id,
+	0,
+	tmp.device_type_id,
+	tmp.customer_id,
+	tmp.community,
+	count(tmp.device_id) as device_count,
+	count(1) as read_times,  
+	SUM(IF(tmp.real_read_times != 0 ,1,0)) as real_read_times,
+	SUM(IF(tmp.un_read_times = 15 ,1,0)) as un_read_times,
+	FORMAT(SUM(IF(tmp.real_read_times != 0 ,1,0))/ COUNT(1)*100, 2) as read_rate ,-- 抄表率
+	now(),
+	now()
+FROM
+	(
+		SELECT
+			site_id,
+			device_type_id,
+			customer_id,
+			community,
+			device_id,
+			SUM( IF ( read_status = 2, 1, 0 ) ) AS real_read_times,
+			SUM( IF ( read_status = 1, 1, 0 ) ) AS un_read_times 
+		FROM
+			sc_meter_read_record 
+		WHERE
+			read_date < DATE_FORMAT( #{date}, '%Y%m%d' )
+		AND read_date >= DATE_FORMAT( date_add(#{date}, interval -15 day), '%Y%m%d' )
+		and device_type_id in (15,20)
+		GROUP BY
+			site_id,
+			device_type_id,
+			customer_id,
+			community,
+			device_id
+	) tmp
+	GROUP BY
+			site_id,
+			device_type_id,
+			customer_id,
+			community

+ 44 - 0
meter-reading-common/target/classes/script/sql/stat_meter_read_rate_by_community_7day.sql

@@ -0,0 +1,44 @@
+-- 按照客户小区统计抄表率
+replace into sc_stat_meter_read_rate_by_community_7day(stat_day,site_id,channel_id,meter_type_id,customer_id,community_id,device_count,read_times,real_read_times,un_read_times,read_rate,date_create,date_update)
+SELECT
+	DATE_FORMAT( date_add(#{date}, interval -1 day), '%Y%m%d' ),
+	tmp.site_id,
+	0,
+	tmp.device_type_id,
+	tmp.customer_id,
+	tmp.community,
+	count(tmp.device_id) as device_count,
+	count(1) as read_times,  
+	SUM(IF(tmp.real_read_times != 0 ,1,0)) as real_read_times,
+	SUM(IF(tmp.un_read_times = 7 ,1,0)) as un_read_times,
+	FORMAT(SUM(IF(tmp.real_read_times != 0 ,1,0))/ COUNT(1)*100, 2) as read_rate ,-- 抄表率
+	now(),
+	now()
+FROM
+	(
+		SELECT
+			site_id,
+			device_type_id,
+			customer_id,
+			community,
+			device_id,
+			SUM( IF ( read_status = 2, 1, 0 ) ) AS real_read_times,
+			SUM( IF ( read_status = 1, 1, 0 ) ) AS un_read_times 
+		FROM
+			sc_meter_read_record 
+		WHERE
+			read_date < DATE_FORMAT( #{date}, '%Y%m%d' )
+		AND read_date >= DATE_FORMAT( date_add(:date, interval -7 day), '%Y%m%d' ) 
+		and device_type_id in (15,20)
+		GROUP BY
+			site_id,
+			device_type_id,
+			customer_id,
+			community,
+			device_id
+	) tmp
+	GROUP BY
+			site_id,
+			device_type_id,
+			customer_id,
+			community

+ 24 - 0
meter-reading-common/target/classes/script/sql/stat_meter_read_rate_by_community_day.sql

@@ -0,0 +1,24 @@
+-- 按照客户小区统计抄表率
+replace into sc_stat_meter_read_rate_by_community(stat_day,site_id,channel_id,meter_type_id,customer_id,community_id,device_count,read_times,real_read_times,un_read_times,read_rate,date_create,date_update)
+-- 按照站点小区进行统计
+SELECT
+	DATE_FORMAT( date_add(#{date}, interval -1 day), '%Y%m%d' ),
+	site_id, -- 所有站点
+	0, -- 所有场景
+	device_type_id, -- 表类型
+	customer_id, -- 客户
+	community, -- 小区
+	count(distinct device_id) as device_count, -- 设备总数
+	count(1) as read_times,   -- 应抄总数
+	SUM(IF(read_status = 2, 1, 0)) as real_read_times, -- 实抄总数
+	SUM(IF(read_status = 1, 1, 0)) as un_read_times,   -- 未抄总数
+	FORMAT(SUM(IF(read_status = 2, 1, 0))/ COUNT(1)*100, 2) as read_rate ,-- 抄表率
+	now(),
+	now()
+FROM
+	sc_meter_read_record
+where read_date < DATE_FORMAT( #{date}, '%Y%m%d' )
+AND read_date >= DATE_FORMAT( date_add(#{date}, interval -1 day), '%Y%m%d' )
+and device_type_id in (15,20)
+group by
+	site_id,device_type_id,customer_id,community

+ 44 - 0
meter-reading-common/target/classes/script/sql/stat_meter_read_rate_by_concentrator_15day.sql

@@ -0,0 +1,44 @@
+-- 按照客户集中器进行统计抄表率
+replace INTO sc_stat_meter_read_rate_by_concentrator_15day( stat_day, site_id, channel_id,meter_type_id, customer_id, concentrator_id, device_count, read_times, real_read_times, un_read_times, read_rate, date_create, date_update )
+SELECT
+	DATE_FORMAT( date_add(#{date}, interval -1 day), '%Y%m%d' ),
+	tmp.site_id,
+	0,
+	tmp.device_type_id,
+	tmp.customer_id,
+	tmp.concentrator_id,
+	count(tmp.device_id) as device_count,
+	count(1) as read_times,  
+	SUM(IF(tmp.real_read_times != 0 ,1,0)) as real_read_times,
+	SUM(IF(tmp.un_read_times = 15 ,1,0)) as un_read_times,
+	FORMAT(SUM(IF(tmp.real_read_times != 0 ,1,0))/ COUNT(1)*100, 2) as read_rate ,-- 抄表率
+	now(),
+	now()
+FROM
+	(
+		SELECT
+			site_id,
+			device_type_id,
+			customer_id,
+			concentrator_id,
+			device_id,
+			SUM( IF ( read_status = 2, 1, 0 ) ) AS real_read_times,
+			SUM( IF ( read_status = 1, 1, 0 ) ) AS un_read_times 
+		FROM
+			sc_meter_read_record 
+		WHERE
+			device_type_id  in (select device_type_id from sc_w_meter_type where parent_id = 1 and status = 1)
+		and	read_date < DATE_FORMAT( #{date}, '%Y%m%d' )
+		AND read_date >= DATE_FORMAT( date_add(#{date}, interval -15 day), '%Y%m%d' ) 
+		GROUP BY
+			site_id,
+			device_type_id,
+			customer_id,
+			concentrator_id,
+			device_id 
+	) tmp
+	GROUP BY
+			site_id,
+			device_type_id,
+			customer_id,
+			concentrator_id

+ 44 - 0
meter-reading-common/target/classes/script/sql/stat_meter_read_rate_by_concentrator_7day.sql

@@ -0,0 +1,44 @@
+-- 按照客户采集器进行抄表率统计
+replace INTO sc_stat_meter_read_rate_by_concentrator_7day( stat_day, site_id, channel_id,meter_type_id, customer_id, concentrator_id, device_count, read_times, real_read_times, un_read_times, read_rate, date_create, date_update )
+SELECT
+	DATE_FORMAT( date_add(#{date}, interval -1 day), '%Y%m%d' ),
+	tmp.site_id,
+	0,
+	tmp.device_type_id,
+	tmp.customer_id,
+	tmp.concentrator_id,
+	count(tmp.device_id) as device_count,
+	count(1) as read_times,  
+	SUM(IF(tmp.real_read_times != 0 ,1,0)) as real_read_times,
+	SUM(IF(tmp.un_read_times = 7 ,1,0)) as un_read_times,
+	FORMAT(SUM(IF(tmp.real_read_times != 0 ,1,0))/ COUNT(1)*100, 2) as read_rate ,-- 抄表率
+	now(),
+	now()
+FROM
+	(
+		SELECT
+			site_id,
+			device_type_id,
+			customer_id,
+			concentrator_id,
+			device_id,
+			SUM( IF ( read_status = 2, 1, 0 ) ) AS real_read_times,
+			SUM( IF ( read_status = 1, 1, 0 ) ) AS un_read_times 
+		FROM
+			sc_meter_read_record 
+		WHERE
+			device_type_id  in (select device_type_id from sc_w_meter_type where parent_id = 1 and status = 1)
+		AND	read_date < DATE_FORMAT( #{date}, '%Y%m%d' )
+		AND read_date >= DATE_FORMAT( date_add(#{date}, interval -7 day), '%Y%m%d' ) 
+		GROUP BY
+			site_id,
+			device_type_id,
+			customer_id,
+			concentrator_id,
+			device_id 
+	) tmp
+	GROUP BY
+			site_id,
+			device_type_id,
+			customer_id,
+			concentrator_id

+ 25 - 0
meter-reading-common/target/classes/script/sql/stat_meter_read_rate_by_concentrator_day.sql

@@ -0,0 +1,25 @@
+-- 按照客户集中器进行统计抄表率
+replace INTO sc_stat_meter_read_rate_by_concentrator ( stat_day, site_id, channel_id,meter_type_id, customer_id, concentrator_id, device_count, read_times, real_read_times, un_read_times, read_rate, date_create, date_update )
+-- 按照站点内集中器统计
+SELECT
+	DATE_FORMAT( date_add(#{date}, interval -1 day), '%Y%m%d' ),
+	site_id, -- 所有站点
+	0, -- 所有场景
+	device_type_id, -- 表类型
+	customer_id, -- 客户
+	concentrator_id, -- 集中器
+	count(distinct device_id) as device_count, -- 设备总数
+	count(1) as read_times,   -- 应抄总数
+	SUM(IF(read_status = 2, 1, 0)) as real_read_times, -- 实抄总数 
+	SUM(IF(read_status = 1, 1, 0)) as un_read_times,   -- 未抄总数
+	FORMAT(SUM(IF(read_status = 2, 1, 0))/ COUNT(1)*100, 2) as read_rate ,-- 抄表率
+	now(),
+	now()
+FROM
+	sc_meter_read_record
+where
+    device_type_id  in (select device_type_id from sc_w_meter_type where parent_id = 1 and status = 1)
+and read_date < DATE_FORMAT( #{date}, '%Y%m%d' )
+AND read_date >= DATE_FORMAT( date_add(#{date}, interval -1 day), '%Y%m%d' )
+group by
+	site_id,device_type_id,customer_id,concentrator_id

+ 45 - 0
meter-reading-common/target/classes/script/sql/stat_meter_read_rate_by_concentrator_month.sql

@@ -0,0 +1,45 @@
+-- 按照客户集中器进行统计抄表率
+replace INTO sc_stat_meter_read_rate_by_concentrator_month ( stat_month, site_id, channel_id,meter_type_id, customer_id, concentrator_id, device_count, read_times, real_read_times, un_read_times, read_rate, date_create, date_update )
+-- 按照站点内集中器统计
+SELECT
+	DATE_FORMAT( date_add( #{date}, INTERVAL - 1 MONTH ), '%Y%m' ),
+	tmp.site_id,
+	0,
+	tmp.device_type_id,
+	tmp.customer_id,
+	tmp.concentrator_id,
+	count(tmp.device_id) as device_count,
+	count(1) as read_times,
+	SUM(IF(tmp.real_read_times != 0 ,1,0)) as real_read_times,
+	SUM(IF(tmp.un_read_times = day(LAST_DAY(date_add(#{date}, INTERVAL - 1 MONTH ))) ,1,0)) as un_read_times,
+	FORMAT(SUM(IF(tmp.real_read_times != 0 ,1,0))/ COUNT(1)*100, 2) as read_rate ,-- 抄表率
+	now(),
+	now()
+FROM
+	(
+		SELECT
+			site_id,
+			device_type_id,
+			customer_id,
+			concentrator_id,
+			device_id,
+			SUM( IF ( read_status = 2, 1, 0 ) ) AS real_read_times,
+			SUM( IF ( read_status = 1, 1, 0 ) ) AS un_read_times
+		FROM
+			sc_meter_read_record
+		WHERE
+			device_type_id  in (select device_type_id from sc_w_meter_type where parent_id = 1 and status = 1)
+		AND	read_date < date_format(date_add(#{date},interval - day(#{date})+1 day) ,'%Y%m%d')
+		AND read_date >= date_format(date_sub(date_sub( date_format( #{date}, '%y-%m-%d' ), INTERVAL extract( DAY FROM #{date} ) - 1 DAY ),INTERVAL 1 MONTH ),'%Y%m%d')
+		GROUP BY
+			site_id,
+			device_type_id,
+			customer_id,
+			concentrator_id,
+			device_id
+	) tmp
+	GROUP BY
+			site_id,
+			device_type_id,
+			customer_id,
+			concentrator_id

+ 21 - 0
meter-reading-common/target/classes/script/sql/stat_meter_unread_device_by_building.sql

@@ -0,0 +1,21 @@
+insert into sc_stat_meter_unread_device_by_building (stat_day,site_id,channel_id,building_id,device_id,date_create)
+SELECT
+	DATE_FORMAT( date_add( #{date}, INTERVAL - 1 DAY ), '%Y%m%d' ),
+	site_id,
+	sys_id,
+	building_id,
+	device_id,
+	now()
+FROM
+	sc_meter_read_record 
+WHERE
+	read_date < DATE_FORMAT( #{date}, '%Y%m%d' )
+AND read_date >= DATE_FORMAT( date_add(#{date}, interval -1 day), '%Y%m%d' )
+	AND read_status = 1 
+GROUP BY
+	site_id,
+	sys_id,
+	building_id,
+	device_id 
+HAVING
+	count(1) = 1

+ 21 - 0
meter-reading-common/target/classes/script/sql/stat_meter_unread_device_by_building_15day.sql

@@ -0,0 +1,21 @@
+insert into sc_stat_meter_unread_device_by_building_15day (stat_day,site_id,channel_id,building_id,device_id,date_create)
+SELECT
+	DATE_FORMAT( date_add( #{date}, INTERVAL - 1 DAY ), '%Y%m%d' ),
+	site_id,
+	sys_id,
+	building_id,
+	device_id,
+	now()
+FROM
+	sc_meter_read_record 
+WHERE
+	read_date < DATE_FORMAT( #{date}, '%Y%m%d' )
+AND read_date >= DATE_FORMAT( date_add(#{date}, interval -15 day), '%Y%m%d' )
+AND read_status = 1 
+GROUP BY
+	site_id,
+	sys_id,
+	building_id,
+	device_id 
+HAVING
+	count(1) = 15

+ 21 - 0
meter-reading-common/target/classes/script/sql/stat_meter_unread_device_by_building_7day.sql

@@ -0,0 +1,21 @@
+insert into sc_stat_meter_unread_device_by_building_7day (stat_day,site_id,channel_id,building_id,device_id,date_create)
+SELECT
+	DATE_FORMAT( date_add( #{date}, INTERVAL - 1 DAY ), '%Y%m%d' ),
+	site_id,
+	sys_id,
+	building_id,
+	device_id,
+	now()
+FROM
+	sc_meter_read_record 
+WHERE
+	read_date < DATE_FORMAT( #{date}, '%Y%m%d' )
+AND read_date >= DATE_FORMAT( date_add(#{date}, interval -7 day), '%Y%m%d' )
+	AND read_status = 1 
+GROUP BY
+	site_id,
+	sys_id,
+	building_id,
+	device_id 
+HAVING
+	count(1) = 7

+ 20 - 0
meter-reading-common/target/classes/script/sql/stat_meter_unread_device_by_building_month.sql

@@ -0,0 +1,20 @@
+insert into sc_stat_meter_unread_device_by_building_month (stat_day,site_id,channel_id,building_id,device_id,date_create)
+SELECT
+	DATE_FORMAT( date_add( #{date}, INTERVAL - 1 MONTH ), '%Y%m' ),
+	site_id,
+	sys_id,
+	building_id,
+	device_id,
+	now()
+FROM
+	sc_meter_read_record 
+WHERE
+	period_diff(date_format(#{date} , '%Y%m') , date_format(`read_date`, '%Y%m')) =1
+AND read_status = 1 
+GROUP BY
+	site_id,
+	sys_id,
+	building_id,
+	device_id 
+HAVING
+	count(1) = day(LAST_DAY(date_add(#{date}, INTERVAL - 1 MONTH )))

+ 26 - 0
meter-reading-common/target/classes/script/sql/stat_meter_unread_device_by_building_month_v2.sql

@@ -0,0 +1,26 @@
+insert into sc_stat_meter_unread_device_by_building_month (stat_day,site_id,channel_id,building_id,device_id,date_create)
+SELECT
+DATE_FORMAT( date_add( :date, INTERVAL - 1 MONTH ), '%Y%m' ),
+t1.site_id,
+t1.sys_id,
+t1.building_id,
+t1.device_id,
+now()
+FROM(
+        SELECT
+            site_id,
+            sys_id,
+            building_id,
+            device_id,
+            SUM(IF(read_status = 1 ,1,0)) as unread_times,
+            SUM(IF(read_status = 2 ,1,0)) as read_times
+        FROM
+            sc_meter_read_record
+        WHERE
+            period_diff(date_format(:date , '%Y%m') , date_format(`read_date`, '%Y%m')) =1
+        GROUP BY
+            site_id,
+            sys_id,
+            building_id,
+            device_id
+) t1 WHERE t1.read_times = 0

+ 2 - 0
meter-reading-common/target/classes/script/sql/water_meter_error_day.sql

@@ -0,0 +1,2 @@
+-- 每天更新连续异常天数
+UPDATE sc_water_meter_error_days set days = 0,start_date = null,end_date = null WHERE end_date < DATE_FORMAT(date_add(:date, interval -1 day), '%Y%m%d' ) and status = 1

+ 60 - 0
meter-reading-job/pom.xml

@@ -0,0 +1,60 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <modelVersion>4.0.0</modelVersion>
+    <parent>
+        <groupId>org.springframework.boot</groupId>
+        <artifactId>spring-boot-starter-parent</artifactId>
+        <version>2.3.1.RELEASE</version>
+        <relativePath/> <!-- lookup parent from repository -->
+    </parent>
+    <groupId>com.huaxu.zoniot</groupId>
+    <artifactId>meter-reading-job</artifactId>
+    <version>1.0</version>
+    <description>抄表作业子模块</description>
+    <dependencies>
+        <dependency>
+            <groupId>com.huaxu.zoniot</groupId>
+            <artifactId>meter-reading-common</artifactId>
+            <version>1.0</version>
+        </dependency>
+        <dependency>
+            <groupId>org.springframework.boot</groupId>
+            <artifactId>spring-boot-starter-web</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.springframework.boot</groupId>
+            <artifactId>spring-boot-starter-test</artifactId>
+            <scope>test</scope>
+        </dependency>
+        <!-- xxl-job -->
+        <dependency>
+            <groupId>com.xuxueli</groupId>
+            <artifactId>xxl-job-core</artifactId>
+            <version>2.2.0</version>
+        </dependency>
+        <dependency>
+            <groupId>org.projectlombok</groupId>
+            <artifactId>lombok</artifactId>
+            <optional>true</optional>
+        </dependency>
+        <!-- AMQP-->
+        <dependency>
+            <groupId>org.springframework.boot</groupId>
+            <artifactId>spring-boot-starter-amqp</artifactId>
+        </dependency>
+    </dependencies>
+    <build>
+        <plugins>
+            <plugin>
+                <groupId>org.springframework.boot</groupId>
+                <artifactId>spring-boot-maven-plugin</artifactId>
+                <configuration>
+                    <fork>true</fork>
+                    <addResources>true</addResources>
+                </configuration>
+            </plugin>
+        </plugins>
+    </build>
+</project>

+ 19 - 0
meter-reading-job/src/main/java/com/huaxu/zoniot/MeterReadingJobApplication.java

@@ -0,0 +1,19 @@
+package com.huaxu.zoniot;
+
+import org.springframework.boot.SpringApplication;
+import org.springframework.boot.autoconfigure.SpringBootApplication;
+
+/**
+ * <p></p>
+ *
+ * @Author wilian.peng
+ * @Date 2021/1/3 11:04
+ * @Version 1.0
+ */
+@SpringBootApplication
+public class MeterReadingJobApplication {
+    public static void main(String[] args) {
+        SpringApplication.run(MeterReadingJobApplication.class,args);
+    }
+
+}

+ 76 - 0
meter-reading-job/src/main/java/com/huaxu/zoniot/config/XxlJobConfig.java

@@ -0,0 +1,76 @@
+package com.huaxu.zoniot.config;
+
+import com.xxl.job.core.executor.impl.XxlJobSpringExecutor;
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+
+/**
+ * xxl-job config
+ *
+ * @author pengdi
+ */
+@Slf4j
+@Configuration
+public class XxlJobConfig {
+    @Value("${xxl.job.admin.addresses}")
+    private String adminAddresses;
+
+    @Value("${xxl.job.accessToken}")
+    private String accessToken;
+
+    @Value("${xxl.job.executor.appname}")
+    private String appname;
+
+    @Value("${xxl.job.executor.address}")
+    private String address;
+
+    @Value("${xxl.job.executor.ip}")
+    private String ip;
+
+    @Value("${xxl.job.executor.port}")
+    private int port;
+
+    @Value("${xxl.job.executor.logpath}")
+    private String logPath;
+
+    @Value("${xxl.job.executor.logretentiondays}")
+    private int logRetentionDays;
+
+
+    @Bean
+    public XxlJobSpringExecutor xxlJobExecutor() {
+        log.info(">>>>>>>>>>> xxl-job config init.");
+        XxlJobSpringExecutor xxlJobSpringExecutor = new XxlJobSpringExecutor();
+        xxlJobSpringExecutor.setAdminAddresses(adminAddresses);
+        xxlJobSpringExecutor.setAppname(appname);
+        xxlJobSpringExecutor.setAddress(address);
+        xxlJobSpringExecutor.setIp(ip);
+        xxlJobSpringExecutor.setPort(port);
+        xxlJobSpringExecutor.setAccessToken(accessToken);
+        xxlJobSpringExecutor.setLogPath(logPath);
+        xxlJobSpringExecutor.setLogRetentionDays(logRetentionDays);
+
+        return xxlJobSpringExecutor;
+    }
+
+    /**
+     * 针对多网卡、容器内部署等情况,可借助 "spring-cloud-commons" 提供的 "InetUtils" 组件灵活定制注册IP;
+     *
+     *      1、引入依赖:
+     *          <dependency>
+     *             <groupId>org.springframework.cloud</groupId>
+     *             <artifactId>spring-cloud-commons</artifactId>
+     *             <version>${version}</version>
+     *         </dependency>
+     *
+     *      2、配置文件,或者容器启动变量
+     *          spring.cloud.inetutils.preferred-networks: 'xxx.xxx.xxx.'
+     *
+     *      3、获取IP
+     *          String ip_ = inetUtils.findFirstNonLoopbackHostInfo().getIpAddress();
+     */
+
+
+}

+ 93 - 0
meter-reading-job/src/main/java/com/huaxu/zoniot/job/MeterReadJob.java

@@ -0,0 +1,93 @@
+package com.huaxu.zoniot.job;
+
+import cn.hutool.core.date.DateField;
+import cn.hutool.core.date.DateUtil;
+import cn.hutool.core.thread.ThreadUtil;
+import com.alibaba.fastjson.JSON;
+import com.huaxu.zoniot.common.Constants;
+import com.huaxu.zoniot.entity.JobResult;
+import com.huaxu.zoniot.entity.JobTask;
+import com.huaxu.zoniot.producer.JobTaskProducer;
+import com.huaxu.zoniot.service.JobTaskService;
+import com.xxl.job.core.biz.model.ReturnT;
+import com.xxl.job.core.handler.annotation.XxlJob;
+import com.xxl.job.core.log.XxlJobLogger;
+import com.xxl.job.core.util.ShardingUtil;
+import lombok.extern.slf4j.Slf4j;
+import org.apache.commons.lang3.StringUtils;
+import org.springframework.stereotype.Component;
+
+import javax.annotation.Resource;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * <p></p>
+ *
+ * @Author wilian.peng
+ * @Date 2020/12/23 21:19
+ * @Version 1.0
+ */
+@Slf4j
+@Component
+public class MeterReadJob {
+    @Resource
+    JobTaskService  jobTaskService;
+    @Resource
+    JobTaskProducer  jobTaskProducer ;
+
+    @XxlJob("meterReadJobHandler")
+    public ReturnT<String> meterReadJobHandler(String param) throws Exception {
+        XxlJobLogger.log("XXL-JOB, Meter Read Job.Param = {}",param);
+//        ShardingUtil.ShardingVO shardingVO = ShardingUtil.getShardingVo();
+//        int shardIndex = shardingVO.getIndex();
+//        int shardTotal = shardingVO.getTotal();
+
+        Map<String,Object> taskParam = null ;
+        if(StringUtils.isNotBlank(param)){
+            taskParam = JSON.parseObject(param, HashMap.class);
+        }
+        else{
+            taskParam = new HashMap<>(10);
+            // more执行明天的未抄记录
+            taskParam.put(Constants.READ_DAY_PARAM_FLAG,getTomorrowDay());
+        }
+        // 拆解任务
+        List<JobTask> taskList = jobTaskService.splitJob("meterReadJobHandler", taskParam);
+        XxlJobLogger.log("XXL-JOB, Meter Read Job. Task Size = {}", taskList.size());
+        // 下发任务
+        for(JobTask task : taskList){
+            jobTaskProducer.sendTask(task);
+        }
+        // 等待任务处理完成
+        JobResult jobResult ;
+        while(true){
+            jobResult = jobTaskService.getJobResult(taskList);
+            if((jobResult.getFailTaskCount()+jobResult.getSuccessTaskCount())==jobResult.getTotalTaskCount()){
+                break ;
+            }
+            // 每分钟查询一次最新的任务状态
+            ThreadUtil.sleep(5, TimeUnit.MINUTES);
+        }
+        XxlJobLogger.log("XXL-JOB, Meter Read Job. Job Result = {}", JSON.toJSONString(jobResult));
+        return ReturnT.SUCCESS;
+    }
+
+    protected  int getTomorrowDay(){
+        Date tomorrow =DateUtil.offset(DateUtil.date(), DateField.DAY_OF_MONTH,  1);
+        int tomorrowDay = Integer.parseInt(DateUtil.format(tomorrow, Constants.DEFAULT_METER_READ_DATE_FORMAT));
+        return tomorrowDay;
+    }
+    @XxlJob("testJobHandler")
+    public ReturnT<String> testJobHandler(String param) throws Exception {
+        XxlJobLogger.log("XXL-JOB, Test Job.Param = {}",param);
+        ShardingUtil.ShardingVO shardingVO = ShardingUtil.getShardingVo();
+        int shardIndex = shardingVO.getIndex();
+        int shardTotal = shardingVO.getTotal();
+        XxlJobLogger.log("XXL-JOB, Test Job.Sharding Param: index = {}, total = {}", shardIndex, shardTotal);
+        return ReturnT.SUCCESS;
+    }
+}

+ 258 - 0
meter-reading-job/src/main/java/com/huaxu/zoniot/job/MeterReadRateJob.java

@@ -0,0 +1,258 @@
+package com.huaxu.zoniot.job;
+
+import cn.hutool.core.date.DateUtil;
+import cn.hutool.core.map.MapUtil;
+import com.alibaba.fastjson.JSON;
+import com.huaxu.zoniot.common.Constants;
+import com.huaxu.zoniot.service.MeterReadRateService;
+import com.xxl.job.core.biz.model.ReturnT;
+import com.xxl.job.core.handler.annotation.XxlJob;
+import com.xxl.job.core.log.XxlJobLogger;
+import lombok.extern.slf4j.Slf4j;
+import org.apache.commons.lang3.StringUtils;
+import org.springframework.stereotype.Component;
+
+import javax.annotation.Resource;
+import java.util.Date;
+import java.util.HashMap;
+
+/**
+ * <p>抄表率相关作业</p>
+ *
+ * @Author wilian.peng
+ * @Date 2020/12/27 20:30
+ * @Version 1.0
+ */
+@Slf4j
+@Component
+public class MeterReadRateJob {
+    @Resource
+    MeterReadRateService meterReadRateService ;
+    @XxlJob("statLastDayReadRateByBuildingJobHandler")
+    public ReturnT<String> statLastDayReadRateByBuildingJobHandler(String param) throws Exception {
+        XxlJobLogger.log("XXL-JOB, Meter Read Rate Job.Param = {}",param);
+        Date runDate = new Date();
+        if(StringUtils.isNotBlank(param)) {
+            try{
+                HashMap paramMap = JSON.parseObject(param, HashMap.class);
+                Integer readDay = MapUtil.getInt(paramMap, Constants.READ_DAY_PARAM_FLAG);
+                runDate = DateUtil.parse(String.valueOf(readDay), Constants.DEFAULT_METER_READ_DATE_FORMAT).toJdkDate();
+            }catch (Exception e){
+                e.printStackTrace();
+                log.error("Not legal task param ,Use Default Param",e);
+            }
+        }
+        int count = meterReadRateService.statLastDayReadRateByBuilding(runDate);
+        XxlJobLogger.log("XXL-JOB, Meter Read Rate Job.Param = {},Data count={}", param, count);
+        return ReturnT.SUCCESS;
+    }
+
+    @XxlJob("statLast7DayReadRateByBuildingJobHandler")
+    public ReturnT<String> statLast7DayReadRateByBuildingJobHandler(String param) throws Exception {
+        XxlJobLogger.log("XXL-JOB, Meter Read Rate Job.Param = {}",param);
+        Date runDate = new Date();
+        if(StringUtils.isNotBlank(param)) {
+            try{
+                HashMap paramMap = JSON.parseObject(param, HashMap.class);
+                Integer readDay = MapUtil.getInt(paramMap, Constants.READ_DAY_PARAM_FLAG);
+                runDate = DateUtil.parse(String.valueOf(readDay), Constants.DEFAULT_METER_READ_DATE_FORMAT).toJdkDate();
+            }catch (Exception e){
+                e.printStackTrace();
+                log.error("Not legal task param ,Use Default Param",e);
+            }
+        }
+        int count = meterReadRateService.statLast7DayReadRateByBuilding(runDate);
+        XxlJobLogger.log("XXL-JOB, Meter Read Rate Job.Param = {},Data count={}", param, count);
+        return ReturnT.SUCCESS;
+    }
+    @XxlJob("statLast15DayReadRateByBuildingJobHandler")
+    public ReturnT<String> statLast15DayReadRateByBuildingJobHandler(String param) throws Exception {
+        XxlJobLogger.log("XXL-JOB, Meter Read Rate Job.Param = {}",param);
+        Date runDate = new Date();
+        if(StringUtils.isNotBlank(param)) {
+            try{
+                HashMap paramMap = JSON.parseObject(param, HashMap.class);
+                Integer readDay = MapUtil.getInt(paramMap, Constants.READ_DAY_PARAM_FLAG);
+                runDate = DateUtil.parse(String.valueOf(readDay), Constants.DEFAULT_METER_READ_DATE_FORMAT).toJdkDate();
+            }catch (Exception e){
+                e.printStackTrace();
+                log.error("Not legal task param ,Use Default Param",e);
+            }
+        }
+        int count = meterReadRateService.statLast15DayReadRateByBuilding(runDate);
+        XxlJobLogger.log("XXL-JOB, Meter Read Rate Job.Param = {},Data count={}", param, count);
+        return ReturnT.SUCCESS;
+    }
+
+    @XxlJob("statLastMonthReadRateByBuildingJobHandler")
+    public ReturnT<String> statLastMonthReadRateByBuildingJobHandler(String param) throws Exception {
+        XxlJobLogger.log("XXL-JOB, Meter Read Rate Job.Param = {}",param);
+        Date runDate = new Date();
+        if(StringUtils.isNotBlank(param)) {
+            try{
+                HashMap paramMap = JSON.parseObject(param, HashMap.class);
+                Integer readDay = MapUtil.getInt(paramMap, Constants.READ_DAY_PARAM_FLAG);
+                runDate = DateUtil.parse(String.valueOf(readDay), Constants.DEFAULT_METER_READ_DATE_FORMAT).toJdkDate();
+            }catch (Exception e){
+                e.printStackTrace();
+                log.error("Not legal task param ,Use Default Param",e);
+            }
+        }
+        int count = meterReadRateService.statLastMonthReadRateByBuilding(runDate);
+        XxlJobLogger.log("XXL-JOB, Meter Read Rate Job.Param = {},Data count={}", param, count);
+        return ReturnT.SUCCESS;
+    }
+
+    @XxlJob("statLastDayUnReadMeterByBuildingJobHandler")
+    public ReturnT<String> statLastDayUnReadMeterByBuildingJobHandler(String param) throws Exception {
+        XxlJobLogger.log("XXL-JOB, Meter Read Rate Job.Param = {}",param);
+        Date runDate = new Date();
+        if(StringUtils.isNotBlank(param)) {
+            try{
+                HashMap paramMap = JSON.parseObject(param, HashMap.class);
+                Integer readDay = MapUtil.getInt(paramMap, Constants.READ_DAY_PARAM_FLAG);
+                runDate = DateUtil.parse(String.valueOf(readDay), Constants.DEFAULT_METER_READ_DATE_FORMAT).toJdkDate();
+            }catch (Exception e){
+                e.printStackTrace();
+                log.error("Not legal task param ,Use Default Param",e);
+            }
+        }
+        int count = meterReadRateService.statLastDayUnReadMeterByBuilding(runDate);
+        XxlJobLogger.log("XXL-JOB, Meter Read Rate Job.Param = {},Data count={}", param, count);
+        return ReturnT.SUCCESS;
+    }
+
+    @XxlJob("statLast7DayUnReadMeterByBuildingJobHandler")
+    public ReturnT<String> statLast7DayUnReadMeterByBuildingJobHandler(String param) throws Exception {
+        XxlJobLogger.log("XXL-JOB, Meter Read Rate Job.Param = {}",param);
+        Date runDate = new Date();
+        if(StringUtils.isNotBlank(param)) {
+            try{
+                HashMap paramMap = JSON.parseObject(param, HashMap.class);
+                Integer readDay = MapUtil.getInt(paramMap, Constants.READ_DAY_PARAM_FLAG);
+                runDate = DateUtil.parse(String.valueOf(readDay), Constants.DEFAULT_METER_READ_DATE_FORMAT).toJdkDate();
+            }catch (Exception e){
+                e.printStackTrace();
+                log.error("Not legal task param ,Use Default Param",e);
+            }
+        }
+        int count = meterReadRateService.statLast7DayUnReadMeterByBuilding(runDate);
+        XxlJobLogger.log("XXL-JOB, Meter Read Rate Job.Param = {},Data count={}", param, count);
+        return ReturnT.SUCCESS;
+    }
+
+    @XxlJob("statLast15DayUnReadMeterByBuildingJobHandler")
+    public ReturnT<String> statLast15DayUnReadMeterByBuildingJobHandler(String param) throws Exception {
+        XxlJobLogger.log("XXL-JOB, Meter Read Rate Job.Param = {}",param);
+        Date runDate = new Date();
+        if(StringUtils.isNotBlank(param)) {
+            try{
+                HashMap paramMap = JSON.parseObject(param, HashMap.class);
+                Integer readDay = MapUtil.getInt(paramMap, Constants.READ_DAY_PARAM_FLAG);
+                runDate = DateUtil.parse(String.valueOf(readDay), Constants.DEFAULT_METER_READ_DATE_FORMAT).toJdkDate();
+            }catch (Exception e){
+                e.printStackTrace();
+                log.error("Not legal task param ,Use Default Param",e);
+            }
+        }
+        int count = meterReadRateService.statLast15DayUnReadMeterByBuilding(runDate);
+        XxlJobLogger.log("XXL-JOB, Meter Read Rate Job.Param = {},Data count={}", param, count);
+        return ReturnT.SUCCESS;
+    }
+
+    @XxlJob("statLastMonthUnReadMeterByBuildingJobHandler")
+    public ReturnT<String> statLastMonthUnReadMeterByBuildingJobHandler(String param) throws Exception {
+        XxlJobLogger.log("XXL-JOB, Meter Read Rate Job.Param = {}",param);
+        Date runDate = new Date();
+        if(StringUtils.isNotBlank(param)) {
+            try{
+                HashMap paramMap = JSON.parseObject(param, HashMap.class);
+                Integer readDay = MapUtil.getInt(paramMap, Constants.READ_DAY_PARAM_FLAG);
+                runDate = DateUtil.parse(String.valueOf(readDay), Constants.DEFAULT_METER_READ_DATE_FORMAT).toJdkDate();
+            }catch (Exception e){
+                e.printStackTrace();
+                log.error("Not legal task param ,Use Default Param",e);
+            }
+        }
+        int count = meterReadRateService.statLastMonthUnReadMeterByBuilding(runDate);
+        XxlJobLogger.log("XXL-JOB, Meter Read Rate Job.Param = {},Data count={}", param, count);
+        return ReturnT.SUCCESS;
+    }
+
+    @XxlJob("statLastDayReadRateByConcentratorJobHandler")
+    public ReturnT<String> statLastDayReadRateByConcentratorJobHandler(String param) throws Exception {
+        XxlJobLogger.log("XXL-JOB, Meter Read Rate Job.Param = {}",param);
+        Date runDate = new Date();
+        if(StringUtils.isNotBlank(param)) {
+            try{
+                HashMap paramMap = JSON.parseObject(param, HashMap.class);
+                Integer readDay = MapUtil.getInt(paramMap, Constants.READ_DAY_PARAM_FLAG);
+                runDate = DateUtil.parse(String.valueOf(readDay), Constants.DEFAULT_METER_READ_DATE_FORMAT).toJdkDate();
+            }catch (Exception e){
+                e.printStackTrace();
+                log.error("Not legal task param ,Use Default Param",e);
+            }
+        }
+        int count = meterReadRateService.statLastDayReadRateByConcentrator(runDate);
+        XxlJobLogger.log("XXL-JOB, Meter Read Rate Job.Param = {},Data count={}", param, count);
+        return ReturnT.SUCCESS;
+    }
+
+    @XxlJob("statLast7DayReadRateByConcentratorJobHandler")
+    public ReturnT<String> statLast7DayReadRateByConcentratorJobHandler(String param) throws Exception {
+        XxlJobLogger.log("XXL-JOB, Meter Read Rate Job.Param = {}",param);
+        Date runDate = new Date();
+        if(StringUtils.isNotBlank(param)) {
+            try{
+                HashMap paramMap = JSON.parseObject(param, HashMap.class);
+                Integer readDay = MapUtil.getInt(paramMap, Constants.READ_DAY_PARAM_FLAG);
+                runDate = DateUtil.parse(String.valueOf(readDay), Constants.DEFAULT_METER_READ_DATE_FORMAT).toJdkDate();
+            }catch (Exception e){
+                e.printStackTrace();
+                log.error("Not legal task param ,Use Default Param",e);
+            }
+        }
+        int count = meterReadRateService.statLast7DayReadRateByConcentrator(runDate);
+        XxlJobLogger.log("XXL-JOB, Meter Read Rate Job.Param = {},Data count={}", param, count);
+        return ReturnT.SUCCESS;
+    }
+
+    @XxlJob("statLast15DayReadRateByConcentratorJobHandler")
+    public ReturnT<String> statLast15DayReadRateByConcentratorJobHandler(String param) throws Exception {
+        XxlJobLogger.log("XXL-JOB, Meter Read Rate Job.Param = {}",param);
+        Date runDate = new Date();
+        if(StringUtils.isNotBlank(param)) {
+            try{
+                HashMap paramMap = JSON.parseObject(param, HashMap.class);
+                Integer readDay = MapUtil.getInt(paramMap, Constants.READ_DAY_PARAM_FLAG);
+                runDate = DateUtil.parse(String.valueOf(readDay), Constants.DEFAULT_METER_READ_DATE_FORMAT).toJdkDate();
+            }catch (Exception e){
+                e.printStackTrace();
+                log.error("Not legal task param ,Use Default Param",e);
+            }
+        }
+        int count = meterReadRateService.statLast15DayReadRateByConcentrator(runDate);
+        XxlJobLogger.log("XXL-JOB, Meter Read Rate Job.Param = {},Data count={}", param, count);
+        return ReturnT.SUCCESS;
+    }
+
+    @XxlJob("statLastMonthReadRateByConcentratorJobHandler")
+    public ReturnT<String> statLastMonthReadRateByConcentratorJobHandler(String param) throws Exception {
+        XxlJobLogger.log("XXL-JOB, Meter Read Rate Job.Param = {}",param);
+        Date runDate = new Date();
+        if(StringUtils.isNotBlank(param)) {
+            try{
+                HashMap paramMap = JSON.parseObject(param, HashMap.class);
+                Integer readDay = MapUtil.getInt(paramMap, Constants.READ_DAY_PARAM_FLAG);
+                runDate = DateUtil.parse(String.valueOf(readDay), Constants.DEFAULT_METER_READ_DATE_FORMAT).toJdkDate();
+            }catch (Exception e){
+                e.printStackTrace();
+                log.error("Not legal task param ,Use Default Param",e);
+            }
+        }
+        int count = meterReadRateService.statLastMonthReadRateByConcentrator(runDate);
+        XxlJobLogger.log("XXL-JOB, Meter Read Rate Job.Param = {},Data count={}", param, count);
+        return ReturnT.SUCCESS;
+    }
+
+}

+ 86 - 0
meter-reading-job/src/main/java/com/huaxu/zoniot/producer/JobTaskProducer.java

@@ -0,0 +1,86 @@
+package com.huaxu.zoniot.producer;
+
+import com.alibaba.fastjson.JSON;
+import com.huaxu.zoniot.common.TaskState;
+import com.huaxu.zoniot.entity.JobTask;
+import com.huaxu.zoniot.service.JobTaskService;
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.amqp.core.Message;
+import org.springframework.amqp.rabbit.connection.CorrelationData;
+import org.springframework.amqp.rabbit.core.RabbitTemplate;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.stereotype.Component;
+
+import javax.annotation.Resource;
+
+/**
+ * <p></p>
+ *
+ * @Author wilian.peng
+ * @Date 2021/1/3 14:01
+ * @Version 1.0
+ */
+@Slf4j
+@Component
+public class JobTaskProducer {
+    @Autowired
+    private RabbitTemplate rabbitTemplate;
+
+    @Value("${job.task.rabbit.exchange}")
+    String exchange ;
+
+    @Resource
+    JobTaskService   jobTaskService ;
+
+    RabbitTemplate.ConfirmCallback confirmCallback = new RabbitTemplate.ConfirmCallback() {
+        /**
+         *
+         * @param correlationData 消息的附加消息(自定义id)
+         * @param isAck 消息是否被Broker接收,isAck(true接收,false拒收)
+         * @param s 如果拒收,则返回拒收的原因
+         */
+        @Override
+        public void confirm(CorrelationData correlationData, boolean isAck, String s) {
+            log.info("消息确认回调,CorrelationData = {},isAck = {}",correlationData.getId(),isAck);
+            Integer taskId = Integer.parseInt(correlationData.getId().split("-")[0]);
+            TaskState taskState = null ;
+            String remark = "";
+            if(!isAck){
+                log.info("消息{}被拒收,原因如下:{}",correlationData.getId(),s);
+                taskState = TaskState.FAILED;
+                remark = s;
+                jobTaskService.updateTaskState(taskId,taskState,remark);
+            }
+        }
+    };
+
+    RabbitTemplate.ReturnCallback returnCallback = new RabbitTemplate.ReturnCallback() {
+
+        /**
+         *
+         * @param message 被退回的消息
+         * @param replyCode 错误编码
+         * @param replyText 错误描述
+         * @param exchangeName 交换机的名字
+         * @param routingKey 路由Key
+         */
+        @Override
+        public void returnedMessage(Message message, int replyCode, String replyText, String exchangeName, String routingKey) {
+            log.info("消息被退回,消息={},ReplyCode={},ReplyText={},Exchange={},RoutingKey={}",
+                    JSON.toJSONString(message),replyCode,replyText,exchangeName,routingKey);
+            Integer taskId = Integer.parseInt(message.getMessageProperties().getCorrelationId().split("-")[0]);
+            TaskState taskState = TaskState.FAILED;
+            jobTaskService.updateTaskState(taskId,taskState,replyText);
+        }
+    };
+
+
+    public void sendTask(JobTask jobTask){
+        CorrelationData correlationData = new CorrelationData(jobTask.getTaskId()+"-"+System.currentTimeMillis());
+        //交换机名称, 路由key,消息的对象, 自定义id
+        rabbitTemplate.setConfirmCallback(confirmCallback);
+        rabbitTemplate.setReturnCallback(returnCallback);
+        rabbitTemplate.convertAndSend(exchange, "", jobTask, correlationData);
+    }
+}

+ 29 - 0
meter-reading-job/src/main/resources/application-job-dev.properties

@@ -0,0 +1,29 @@
+server.port=8081
+server.servlet.context-path=/meter/reading/job
+logging.config=classpath:logback.xml
+#########################################XXL Job配置#################################################
+xxl.job.admin.addresses=http://127.0.0.1:8080/xxl-job-admin
+xxl.job.accessToken=
+xxl.job.executor.appname=meter-reading-executor
+xxl.job.executor.address=
+xxl.job.executor.ip=
+xxl.job.executor.port=9995
+xxl.job.executor.logpath=C:/tmp/xxl-job/jobhandler
+xxl.job.executor.logretentiondays=30
+#########################################Rabbit MQ 配置#############################################
+spring.rabbitmq.host=114.135.61.188
+spring.rabbitmq.port=55672
+spring.rabbitmq.username=zoniot
+spring.rabbitmq.password=zcxk100
+spring.rabbitmq.virtual-host=/
+spring.rabbitmq.connection-timeout=1000ms
+# 开启发送确认
+spring.rabbitmq.publisher-confirm-type=correlated
+# 开启发送失败退回
+spring.rabbitmq.publisher-returns=true
+spring.rabbitmq.template.mandatory=true
+# 开启ACK
+spring.rabbitmq.listener.direct.acknowledge-mode=manual
+spring.rabbitmq.listener.simple.acknowledge-mode=manual
+# 任务队列
+job.task.rabbit.exchange=job-task-exchange

+ 29 - 0
meter-reading-job/src/main/resources/application-job-sit.properties

@@ -0,0 +1,29 @@
+server.port=8081
+server.servlet.context-path=/meter/reading/job
+logging.config=classpath:logback.xml
+#########################################XXL Job配置#################################################
+xxl.job.admin.addresses=http://10.0.0.156:8081/xxl-job-admin
+xxl.job.accessToken=
+xxl.job.executor.appname=meter-reading-executor
+xxl.job.executor.address=
+xxl.job.executor.ip=
+xxl.job.executor.port=9995
+xxl.job.executor.logpath=/opt/sit/meter-reading-job/xxl-job/jobhandler
+xxl.job.executor.logretentiondays=30
+#########################################Rabbit MQ 配置#############################################
+spring.rabbitmq.host=114.135.61.188
+spring.rabbitmq.port=55672
+spring.rabbitmq.username=zoniot
+spring.rabbitmq.password=zcxk100
+spring.rabbitmq.virtual-host=/
+spring.rabbitmq.connection-timeout=1000ms
+# 开启发送确认
+spring.rabbitmq.publisher-confirm-type=correlated
+# 开启发送失败退回
+spring.rabbitmq.publisher-returns=true
+spring.rabbitmq.template.mandatory=true
+# 开启ACK
+spring.rabbitmq.listener.direct.acknowledge-mode=manual
+spring.rabbitmq.listener.simple.acknowledge-mode=manual
+# 任务队列
+job.task.rabbit.exchange=job-task-exchange

+ 1 - 0
meter-reading-job/src/main/resources/application.properties

@@ -0,0 +1 @@
+spring.profiles.active=common-dev,job-dev

+ 27 - 0
meter-reading-job/src/main/resources/logback.xml

@@ -0,0 +1,27 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<configuration debug="false" scan="true" scanPeriod="1 seconds">
+    <contextName>logback</contextName>
+    <property name="log.path" value="./logs/meter-reading-job.log"/>
+    <appender name="console" class="ch.qos.logback.core.ConsoleAppender">
+        <encoder>
+            <pattern>%d{HH:mm:ss.SSS} %contextName [%thread] %-5level %logger{36} - %msg%n</pattern>
+        </encoder>
+    </appender>
+
+    <appender name="file" class="ch.qos.logback.core.rolling.RollingFileAppender">
+        <file>${log.path}</file>
+        <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+            <fileNamePattern>${log.path}.%d{yyyy-MM-dd}.zip</fileNamePattern>
+        </rollingPolicy>
+        <encoder>
+            <pattern>%date %level [%thread] %logger{36} [%file : %line] %msg%n
+            </pattern>
+        </encoder>
+    </appender>
+
+    <root level="info">
+        <appender-ref ref="console"/>
+        <appender-ref ref="file"/>
+    </root>
+
+</configuration>

+ 80 - 0
meter-reading-service/pom.xml

@@ -0,0 +1,80 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <modelVersion>4.0.0</modelVersion>
+    <parent>
+        <groupId>org.springframework.boot</groupId>
+        <artifactId>spring-boot-starter-parent</artifactId>
+        <version>2.3.1.RELEASE</version>
+        <relativePath/> <!-- lookup parent from repository -->
+    </parent>
+    <groupId>com.huaxu.zoniot</groupId>
+    <artifactId>meter-reading-service</artifactId>
+    <version>1.0</version>
+    <description>抄表服务</description>
+
+    <properties>
+        <java.version>1.8</java.version>
+        <netty-all.version>4.1.54.Final</netty-all.version>
+        <gson.version>2.8.6</gson.version>
+        <skipTests>true</skipTests>
+    </properties>
+    <dependencies>
+        <dependency>
+            <groupId>com.huaxu.zoniot</groupId>
+            <artifactId>meter-reading-common</artifactId>
+            <version>1.0</version>
+        </dependency>
+        <!-- 实现web功能 -->
+        <dependency>
+            <groupId>org.springframework.boot</groupId>
+            <artifactId>spring-boot-starter-web</artifactId>
+        </dependency>
+
+        <dependency>
+            <groupId>org.springframework.boot</groupId>
+            <artifactId>spring-boot-starter-test</artifactId>
+            <scope>test</scope>
+        </dependency>
+        <!--集成RabbitMQ-->
+        <dependency>
+            <groupId>org.springframework.boot</groupId>
+            <artifactId>spring-boot-starter-amqp</artifactId>
+        </dependency>
+
+
+        <dependency>
+            <groupId>org.projectlombok</groupId>
+            <artifactId>lombok</artifactId>
+            <optional>true</optional>
+        </dependency>
+        <!--
+        <dependency>
+            <groupId>org.springframework.boot</groupId>
+            <artifactId>spring-boot-devtools</artifactId>
+            <scope>runtime</scope>
+            <optional>true</optional>
+        </dependency>
+        -->
+
+        <!-- data processor -->
+        <dependency>
+            <groupId>com.zcxk.zoniot</groupId>
+            <artifactId>data-processor</artifactId>
+            <version>1.0</version>
+        </dependency>
+    </dependencies>
+    <build>
+        <plugins>
+            <plugin>
+                <groupId>org.springframework.boot</groupId>
+                <artifactId>spring-boot-maven-plugin</artifactId>
+                <configuration>
+                    <fork>true</fork>
+                    <addResources>true</addResources>
+                </configuration>
+            </plugin>
+        </plugins>
+    </build>
+</project>

+ 18 - 0
meter-reading-service/src/main/java/com/huaxu/zoniot/MeterReadingServiceApplication.java

@@ -0,0 +1,18 @@
+package com.huaxu.zoniot;
+
+import org.springframework.boot.SpringApplication;
+import org.springframework.boot.autoconfigure.SpringBootApplication;
+
+/**
+ * <p></p>
+ *
+ * @Author wilian.peng
+ * @Date 2020/12/22 15:01
+ * @Version 1.0
+ */
+@SpringBootApplication
+public class MeterReadingServiceApplication {
+    public static void main(String[] args) {
+        SpringApplication.run(MeterReadingServiceApplication.class,args);
+    }
+}

+ 31 - 0
meter-reading-service/src/main/java/com/huaxu/zoniot/config/RabbitConfig.java

@@ -0,0 +1,31 @@
+package com.huaxu.zoniot.config;
+
+import org.springframework.amqp.rabbit.config.SimpleRabbitListenerContainerFactory;
+import org.springframework.amqp.rabbit.connection.ConnectionFactory;
+import org.springframework.boot.autoconfigure.amqp.SimpleRabbitListenerContainerFactoryConfigurer;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+
+/**
+ * <p></p>
+ *
+ * @Author wilian.peng
+ * @Date 2020/12/22 10:25
+ * @Version 1.0
+ */
+@Configuration
+public class RabbitConfig {
+
+    @Bean("myRabbitContainerFactory")
+    public SimpleRabbitListenerContainerFactory containerFactory(SimpleRabbitListenerContainerFactoryConfigurer configurer,
+                                                                 ConnectionFactory connectionFactory) {
+        SimpleRabbitListenerContainerFactory factory = new SimpleRabbitListenerContainerFactory();
+        //设置线程数
+        factory.setConcurrentConsumers(2);
+        //最大线程数
+        factory.setMaxConcurrentConsumers(8);
+        configurer.configure(factory, connectionFactory);
+        return factory;
+    }
+
+}

+ 124 - 0
meter-reading-service/src/main/java/com/huaxu/zoniot/service/WaterMeterDataProvider.java

@@ -0,0 +1,124 @@
+package com.huaxu.zoniot.service;
+
+import cn.hutool.core.date.DateField;
+import cn.hutool.core.date.DateUtil;
+import cn.hutool.core.map.MapUtil;
+import com.alibaba.fastjson.JSON;
+import com.huaxu.zoniot.common.Constants;
+import com.huaxu.zoniot.dao.WaterMeterMapper;
+import com.huaxu.zoniot.entity.WaterMeter;
+import com.huaxu.zoniot.utils.SpringContextUtil;
+import com.zcxk.zoniot.processor.provider.LongSliceDataProvider;
+import com.zcxk.zoniot.processor.provider.Page;
+import com.zcxk.zoniot.processor.slice.Slice;
+import lombok.Data;
+import lombok.extern.slf4j.Slf4j;
+import org.apache.commons.lang3.StringUtils;
+
+import java.util.Date;
+import java.util.HashMap;
+import java.util.List;
+import java.util.concurrent.Callable;
+
+/**
+ * <p>水表数据源,先分布式分片,再多线程分片分页</p>
+ *
+ * @Author wilian.peng
+ * @Date 2020/12/24 14:20
+ * @Version 1.0
+ */
+@Data
+@Slf4j
+public class WaterMeterDataProvider extends LongSliceDataProvider<WaterMeter> {
+
+    /**
+     * 分页大小
+     */
+    private int pageSize = 1000;
+    /**
+     * 分片索引
+     */
+    private int shardIndex = 0 ;
+    /**
+     * 分片起始位置
+     */
+    private int shardBegin = 0 ;
+    /**
+     * 分片大小
+     */
+    private int shardDataSize = 0;
+
+    /**
+     * 任务执行参数
+     */
+    private String taskParam ;
+    /**
+     *
+     * @param totalSize 总数据量
+     * @param shardBegin 分片开始位置
+     * @param shardDataSize 分片大小
+     * @param pageSize 分页大小
+     */
+    public WaterMeterDataProvider(long totalSize,int shardBegin ,int shardDataSize,int pageSize) {
+        super(0, totalSize, 10000, true);
+        this.shardBegin = shardBegin ;
+        this.shardDataSize = shardDataSize ;
+        this.pageSize = pageSize;
+    }
+
+    /**
+     *
+     * @param totalSize 总数据量
+     * @param shardBegin 分片开始位置
+     * @param shardDataSize 分片大小
+     */
+    public WaterMeterDataProvider(long totalSize,  int shardBegin ,int shardDataSize) {
+        super(0, totalSize, 10000, true);
+        this.shardBegin = shardBegin ;
+        this.shardDataSize = shardDataSize ;
+    }
+
+    @Override
+    public Page<WaterMeter> getResources(Slice<Long> slice, Page<WaterMeter> lastPage) throws Exception {
+        int start = slice.getBegin().intValue();
+        int currentPage = 0;
+        if (lastPage != null) {
+            currentPage = lastPage.getCurrentPage() + 1;
+        }
+        start += currentPage * pageSize;
+        int end = start + pageSize;
+        // 进行分页查询数据
+        WaterMeterMapper waterMeterMapper = SpringContextUtil.getBean(WaterMeterMapper.class);
+        List<WaterMeter> dataList = waterMeterMapper.findShardWaterMeterListWithPage(shardBegin, shardDataSize, start, pageSize);
+        return new Page<>(end < slice.getEnd(), dataList, pageSize, currentPage);
+    }
+
+    @Override
+    public Callable<?> createTask(List<WaterMeter> resources) {
+        return (Callable<Object>) () -> {
+            log.info("Shard Index = {},Data Size = {}",shardIndex,resources.size());
+            MeterReadRecordService meterReadRecordService = SpringContextUtil.getBean(MeterReadRecordService.class);
+            int readDay = 0 ;
+            if(StringUtils.isNotBlank(taskParam)){
+                try {
+                    HashMap<String,Object> taskParamMap = JSON.parseObject(taskParam, HashMap.class);
+                    readDay = MapUtil.getInt(taskParamMap, Constants.READ_DAY_PARAM_FLAG);
+                }catch (Exception e){
+                    log.error("Task Param is not valid Format,taskParam = {}",taskParam);
+                    readDay = getTomorrowDay();
+                }
+            }
+            else{
+                // 生成后一天的未抄记录
+                readDay = getTomorrowDay();
+            }
+            meterReadRecordService.batchCreateMeterUnReadRecord(resources,readDay);
+            return true;
+        };
+    }
+    protected  int getTomorrowDay(){
+        Date tomorrow =DateUtil.offset(DateUtil.date(), DateField.DAY_OF_MONTH,  1);
+        int tomorrowDay = Integer.parseInt(DateUtil.format(tomorrow, Constants.DEFAULT_METER_READ_DATE_FORMAT));
+        return tomorrowDay;
+    }
+}

+ 182 - 0
meter-reading-service/src/main/java/com/huaxu/zoniot/utils/SnowflakeIdWorker.java

@@ -0,0 +1,182 @@
+package com.huaxu.zoniot.utils;
+
+/**
+ * Twitter_Snowflake<br>
+ * SnowFlake的结构如下(每部分用-分开):<br>
+ * 0 - 0000000000 0000000000 0000000000 0000000000 0 - 00000 - 00000 - 000000000000 <br>
+ * 1位标识,由于long基本类型在Java中是带符号的,最高位是符号位,正数是0,负数是1,所以id一般是正数,最高位是0<br>
+ * 41位时间截(毫秒级),注意,41位时间截不是存储当前时间的时间截,而是存储时间截的差值(当前时间截 - 开始时间截)
+ * 得到的值),这里的的开始时间截,一般是我们的id生成器开始使用的时间,由我们程序来指定的(如下下面程序IdWorker类的startTime属性)。41位的时间截,可以使用69年,年T = (1L << 41) / (1000L * 60 * 60 * 24 * 365) = 69<br>
+ * 10位的数据机器位,可以部署在1024个节点,包括5位datacenterId和5位workerId<br>
+ * 12位序列,毫秒内的计数,12位的计数顺序号支持每个节点每毫秒(同一机器,同一时间截)产生4096个ID序号<br>
+ * 加起来刚好64位,为一个Long型。<br>
+ * SnowFlake的优点是,整体上按照时间自增排序,并且整个分布式系统内不会产生ID碰撞(由数据中心ID和机器ID作区分),并且效率较高,经测试,SnowFlake每秒能够产生26万ID左右。
+ */
+public class SnowflakeIdWorker {
+
+    // ==============================Fields===========================================
+    /**
+     * 开始时间截 (2015-01-01)
+     */
+    private final long twepoch = 1420041600000L;
+
+    /**
+     * 机器id所占的位数
+     */
+    private final long workerIdBits = 5L;
+
+    /**
+     * 数据标识id所占的位数
+     */
+    private final long datacenterIdBits = 5L;
+
+    /**
+     * 支持的最大机器id,结果是31 (这个移位算法可以很快的计算出几位二进制数所能表示的最大十进制数)
+     */
+    private final long maxWorkerId = -1L ^ (-1L << workerIdBits);
+
+    /**
+     * 支持的最大数据标识id,结果是31
+     */
+    private final long maxDatacenterId = -1L ^ (-1L << datacenterIdBits);
+
+    /**
+     * 序列在id中占的位数
+     */
+    private final long sequenceBits = 12L;
+
+    /**
+     * 机器ID向左移12位
+     */
+    private final long workerIdShift = sequenceBits;
+
+    /**
+     * 数据标识id向左移17位(12+5)
+     */
+    private final long datacenterIdShift = sequenceBits + workerIdBits;
+
+    /**
+     * 时间截向左移22位(5+5+12)
+     */
+    private final long timestampLeftShift = sequenceBits + workerIdBits + datacenterIdBits;
+
+    /**
+     * 生成序列的掩码,这里为4095 (0b111111111111=0xfff=4095)
+     */
+    private final long sequenceMask = -1L ^ (-1L << sequenceBits);
+
+    /**
+     * 工作机器ID(0~31)
+     */
+    private long workerId;
+
+    /**
+     * 数据中心ID(0~31)
+     */
+    private long datacenterId;
+
+    /**
+     * 毫秒内序列(0~4095)
+     */
+    private long sequence = 0L;
+
+    /**
+     * 上次生成ID的时间截
+     */
+    private long lastTimestamp = -1L;
+
+    //==============================Constructors=====================================
+
+    /**
+     * 构造函数
+     *
+     * @param workerId     工作ID (0~31)
+     * @param datacenterId 数据中心ID (0~31)
+     */
+    public SnowflakeIdWorker(long workerId, long datacenterId) {
+        if (workerId > maxWorkerId || workerId < 0) {
+            throw new IllegalArgumentException(String.format("worker Id can't be greater than %d or less than 0", maxWorkerId));
+        }
+        if (datacenterId > maxDatacenterId || datacenterId < 0) {
+            throw new IllegalArgumentException(String.format("datacenter Id can't be greater than %d or less than 0", maxDatacenterId));
+        }
+        this.workerId = workerId;
+        this.datacenterId = datacenterId;
+    }
+
+    // ==============================Methods==========================================
+
+    /**
+     * 获得下一个ID (该方法是线程安全的)
+     *
+     * @return SnowflakeId
+     */
+    public synchronized long nextId() {
+        long timestamp = timeGen();
+
+        //如果当前时间小于上一次ID生成的时间戳,说明系统时钟回退过这个时候应当抛出异常
+        if (timestamp < lastTimestamp) {
+            throw new RuntimeException(
+                    String.format("Clock moved backwards.  Refusing to generate id for %d milliseconds", lastTimestamp - timestamp));
+        }
+
+        //如果是同一时间生成的,则进行毫秒内序列
+        if (lastTimestamp == timestamp) {
+            sequence = (sequence + 1) & sequenceMask;
+            //毫秒内序列溢出
+            if (sequence == 0) {
+                //阻塞到下一个毫秒,获得新的时间戳
+                timestamp = tilNextMillis(lastTimestamp);
+            }
+        }
+        //时间戳改变,毫秒内序列重置
+        else {
+            sequence = 0L;
+        }
+
+        //上次生成ID的时间截
+        lastTimestamp = timestamp;
+
+        //移位并通过或运算拼到一起组成64位的ID
+        return ((timestamp - twepoch) << timestampLeftShift) //
+                | (datacenterId << datacenterIdShift) //
+                | (workerId << workerIdShift) //
+                | sequence;
+    }
+
+    /**
+     * 阻塞到下一个毫秒,直到获得新的时间戳
+     *
+     * @param lastTimestamp 上次生成ID的时间截
+     * @return 当前时间戳
+     */
+    protected long tilNextMillis(long lastTimestamp) {
+        long timestamp = timeGen();
+        while (timestamp <= lastTimestamp) {
+            timestamp = timeGen();
+        }
+        return timestamp;
+    }
+
+    /**
+     * 返回以毫秒为单位的当前时间
+     *
+     * @return 当前时间(毫秒)
+     */
+    protected long timeGen() {
+        return System.currentTimeMillis();
+    }
+
+    //==============================Test=============================================
+
+    /**
+     * 测试
+     */
+    public static void main(String[] args) {
+        SnowflakeIdWorker idWorker = new SnowflakeIdWorker(0, 0);
+        for (int i = 0; i < 5; i++) {
+            long id = idWorker.nextId();
+            System.out.println(id);
+        }
+    }
+}

+ 36 - 0
meter-reading-service/src/main/java/com/huaxu/zoniot/utils/SpringContextUtil.java

@@ -0,0 +1,36 @@
+package com.huaxu.zoniot.utils;
+
+import org.apache.commons.lang3.ArrayUtils;
+import org.springframework.beans.BeansException;
+import org.springframework.context.ApplicationContext;
+import org.springframework.context.ApplicationContextAware;
+import org.springframework.stereotype.Component;
+
+/**
+ * @author pengdi
+ */
+@Component
+public class SpringContextUtil implements ApplicationContextAware {
+
+    private static ApplicationContext context;
+
+    @Override
+    public void setApplicationContext(ApplicationContext applicationcontext) throws BeansException {
+        SpringContextUtil.context = applicationcontext;
+    }
+
+    public static Object getBean(String beanName) {
+        return context.getBean(beanName);
+    }
+
+    public static <T> T getBean(Class<T> clazz) {
+        String[] beanNames = context.getBeanNamesForType(clazz);
+        if (ArrayUtils.isEmpty(beanNames)) {
+            throw new IllegalArgumentException("There are no bean of type " + clazz.getName());
+        } else if (beanNames.length > 1) {
+            throw new IllegalArgumentException("There are more than one bean of type " + clazz.getName());
+        }
+        return (T) getBean(beanNames[0]);
+    }
+
+}

+ 17 - 0
meter-reading-service/src/main/java/com/huaxu/zoniot/web/MeterReadingController.java

@@ -0,0 +1,17 @@
+package com.huaxu.zoniot.web;
+
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.web.bind.annotation.RestController;
+
+/**
+ * <p></p>
+ *
+ * @Author wilian.peng
+ * @Date 2020/12/23 14:40
+ * @Version 1.0
+ */
+@Slf4j
+@RestController
+public class MeterReadingController {
+
+}

+ 21 - 0
meter-reading-service/src/main/resources/application-service-dev.properties

@@ -0,0 +1,21 @@
+server.port=8095
+server.servlet.context-path=/meter/reading/service
+logging.level.root=info
+logging.file.path=./logs
+#########################################Rabbit MQ 配置#############################################
+spring.rabbitmq.host=114.135.61.188
+spring.rabbitmq.port=55672
+spring.rabbitmq.username=zoniot
+spring.rabbitmq.password=zcxk100
+spring.rabbitmq.virtual-host=/
+spring.rabbitmq.connection-timeout=1000ms
+# 开启发送确认
+spring.rabbitmq.publisher-confirm-type=correlated
+# 开启发送失败退回
+spring.rabbitmq.publisher-returns=true
+spring.rabbitmq.template.mandatory=true
+# 开启ACK
+spring.rabbitmq.listener.direct.acknowledge-mode=manual
+spring.rabbitmq.listener.simple.acknowledge-mode=manual
+# 水表数据队列
+com.huaxu.zoniot.meter.data.queue=hr.employee

+ 1 - 0
meter-reading-service/src/main/resources/application.properties

@@ -0,0 +1 @@
+spring.profiles.active=common-dev,service-dev

+ 23 - 0
meter-reading-service/src/main/resources/logback-spring.xml

@@ -0,0 +1,23 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<configuration>
+    <springProperty scope="context" name="LOG_PATH" source="logging.file.path" defaultValue="/tmp" />
+    <include resource="org/springframework/boot/logging/logback/defaults.xml" />
+    <include resource="org/springframework/boot/logging/logback/console-appender.xml" />
+    <appender name="TIME_FILE"
+              class="ch.qos.logback.core.rolling.RollingFileAppender">
+        <encoder>
+            <pattern>${FILE_LOG_PATTERN}</pattern>
+        </encoder>
+        <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+            <fileNamePattern>${LOG_PATH}/meter-reading-service.%d{yyyy-MM-dd}.%i.log</fileNamePattern>
+            <maxHistory>365</maxHistory>
+            <timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
+                <maxFileSize>100MB</maxFileSize>
+            </timeBasedFileNamingAndTriggeringPolicy>
+        </rollingPolicy>
+    </appender>
+    <root level="INFO">
+        <appender-ref ref="CONSOLE" />
+        <appender-ref ref="TIME_FILE" />
+    </root>
+</configuration>

+ 31 - 0
meter-reading-service/src/test/java/com/huaxu/zoniot/MeterReadRateServiceTests.java

@@ -0,0 +1,31 @@
+package com.huaxu.zoniot;
+
+import com.huaxu.zoniot.service.MeterReadRateService;
+import org.junit.jupiter.api.Test;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.test.context.SpringBootTest;
+
+import java.util.Date;
+
+/**
+ * <p></p>
+ *
+ * @Author wilian.peng
+ * @Date 2020/12/27 19:28
+ * @Version 1.0
+ */
+@SpringBootTest
+public class MeterReadRateServiceTests {
+
+    @Autowired
+    MeterReadRateService meterReadRateService;
+    @Test
+    void contextLoads() {
+    }
+
+    @Test
+    void statLastDayReadRateByBuildingTest(){
+        int i = meterReadRateService.statLastDayReadRateByBuilding(new Date());
+        System.out.println(i);
+    }
+}

+ 135 - 0
meter-reading-service/src/test/java/com/huaxu/zoniot/MeterReadingServiceTests.java

@@ -0,0 +1,135 @@
+package com.huaxu.zoniot;
+
+import com.alibaba.fastjson.JSON;
+import com.huaxu.zoniot.dao.WaterMeterMapper;
+import com.huaxu.zoniot.entity.MeasuringData;
+import com.huaxu.zoniot.entity.WaterMeter;
+import com.huaxu.zoniot.service.MeterReadRecordService;
+import com.huaxu.zoniot.service.WaterMeterDataProvider;
+import com.zcxk.zoniot.processor.DefaultDataProcessor;
+import org.junit.jupiter.api.Test;
+import org.springframework.boot.test.context.SpringBootTest;
+
+import javax.annotation.Resource;
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * <p></p>
+ *
+ * @Author wilian.peng
+ * @Date 2020/12/23 14:45
+ * @Version 1.0
+ */
+@SpringBootTest
+public class MeterReadingServiceTests {
+    @Resource
+    WaterMeterMapper waterMeterMapper ;
+
+    @Resource
+    MeterReadRecordService meterReadRecordService;
+    @Test
+    void contextLoads() {
+    }
+
+    @Test
+    void  batchCreateMeterUnReadRecordTest(){
+        // List<WaterMeter> list = waterMeterMapper.findWaterMeterListWithPage(0, 100);
+        WaterMeter meter = waterMeterMapper.findWaterMeterById(580141137533075456L);
+        List<WaterMeter> list = new ArrayList<>();
+        list.add(meter);
+        meterReadRecordService.batchCreateMeterUnReadRecord(list,20201225);
+    }
+
+    @Test
+    void queryWaterMeterTest(){
+        List<WaterMeter> list = waterMeterMapper.findWaterMeterListWithPage(0, 100);
+        System.out.println(list.size());
+    }
+
+    @Test
+    void getMeterTotalSizeTest(){
+        Long total = waterMeterMapper.countWaterMeter();
+        System.out.println(total);
+    }
+    @Test
+    void getShardInfoTest(){
+        int shardCount = 5 ;
+        int dataCount = waterMeterMapper.countWaterMeter().intValue() ;
+        int shardDataSize = (dataCount + (shardCount-dataCount % shardCount)) / shardCount ;
+        for(int i = 0 ; i < shardCount ; i++ ){
+            int shardBegin = i * shardDataSize ;
+            System.out.println("分片索引="+i+",分片数据开始位置"+shardBegin+",分片数据量="+shardDataSize);
+        }
+    }
+    @Test
+    void getShardCountTest(){
+//        int shardIndex = 0 ;
+        int shardCount = 5 ;
+        int dataCount = waterMeterMapper.countWaterMeter().intValue() ;
+        int shardDataSize = (dataCount + (shardCount-dataCount % shardCount)) / shardCount ;
+        for(int i = 0 ; i < shardCount ; i++ ){
+            int shardBegin = i * shardDataSize ;
+            Long shardDataCount = waterMeterMapper.countWaterMeterByShard(shardBegin, shardDataSize);
+            System.out.println("分片索引="+i+"分片数据集合大小="+shardDataCount);
+        }
+    }
+    @Test
+    void testMeterReadJob(){
+        int shardIndex = 4;
+        int shardTotal = 5;
+        // 计算所有水表的数量
+        int dataCount = waterMeterMapper.countWaterMeter().intValue() ;
+        // 计算每个分片的平均数量
+        int shardDataSize = (dataCount + (shardTotal-dataCount % shardTotal)) / shardTotal ;
+        // 计算每个分片的开始位置
+        int shardBegin = shardIndex * shardDataSize ;
+        // 计算每个分片的实际数量
+        //Long shardDataCount = waterMeterMapper.countWaterMeterByShard(shardBegin, shardDataSize);
+        // 对分片任务采取多线程方式执行
+        WaterMeterDataProvider provider = new WaterMeterDataProvider(shardDataSize,shardBegin,shardDataSize);
+        provider.setShardIndex(shardIndex);
+        //provider.setTaskParam("{\"readDay\":\"20201227\"}");
+        DefaultDataProcessor processor = new DefaultDataProcessor<>(provider);
+        processor.process();
+    }
+
+    @Test
+    void queryShardWaterMeterListWithPageTest()
+    {
+        int shardIndex = 0 ;
+        int shardCount = 5 ;
+        int dataCount = waterMeterMapper.countWaterMeter().intValue() ;
+        int shardDataSize = (dataCount + (shardCount-dataCount % shardCount)) / shardCount ;
+        int shardBegin = shardIndex * shardDataSize ;
+        int startIndex = 1000 ;
+        int pageSize = 1000 ;
+        List<WaterMeter> result = waterMeterMapper.findShardWaterMeterListWithPage(shardBegin, shardDataSize, startIndex, pageSize);
+        System.out.println(result.size());
+    }
+    @Test
+    void meterReadingTest(){
+        //WaterMeter meter = waterMeterMapper.findWaterMeterById(580141137533075456L);
+        // 模拟数据
+        MeasuringData md1 = new MeasuringData();
+        md1.setMeasuringCode("TIME");
+        md1.setMeasuringName("最后上报时间");
+        md1.setMeasuringVaule("20201225102120");
+
+        MeasuringData md2 = new MeasuringData();
+        md2.setMeasuringCode("currentQuantity");
+        md2.setMeasuringName("当前止度");
+        md2.setMeasuringVaule("72.5");
+        md2.setMeasuringUnit("m³");
+
+        List<MeasuringData> list = new ArrayList<>();
+        list.add(md1);
+        list.add(md2);
+        System.out.println(JSON.toJSONString(list));
+//        Map<String ,MeasuringData > mdm = new HashMap<>();
+//        mdm.put("TIME",md1);
+//        mdm.put("currentQuantity",md2);
+//        meterReadRecordService.meterReading(meter,mdm,20201225);
+    }
+
+}

+ 23 - 0
meter-reading-service/src/test/java/com/huaxu/zoniot/ShardTest.java

@@ -0,0 +1,23 @@
+package com.huaxu.zoniot;
+
+/**
+ * <p></p>
+ *
+ * @Author wilian.peng
+ * @Date 2020/12/25 9:36
+ * @Version 1.0
+ */
+public class ShardTest {
+    public static void main(String[] args) {
+        int shardIndex = 1 ;
+        int shardCount = 5 ;
+        int dataCount = 512 ;
+
+        int shardDataSize = (dataCount + (shardCount-dataCount % shardCount)) / shardCount ;
+
+        for(int i = 0 ; i < shardCount ; i++ ){
+            int shardBegin = i * shardDataSize ;
+            System.out.println("分片索引="+i+",分片数据开始位置"+shardBegin+",分片数据量="+shardDataSize);
+        }
+    }
+}

+ 83 - 0
meter-reading-tianjin/pom.xml

@@ -0,0 +1,83 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <modelVersion>4.0.0</modelVersion>
+    <parent>
+        <groupId>org.springframework.boot</groupId>
+        <artifactId>spring-boot-starter-parent</artifactId>
+        <version>2.3.1.RELEASE</version>
+        <relativePath/> <!-- lookup parent from repository -->
+    </parent>
+    <groupId>com.huaxu.zoniot</groupId>
+    <artifactId>meter-reading-tianjin</artifactId>
+    <version>1.0-SNAPSHOT</version>
+    <description>津南抄表集成与迁移工程</description>
+    <properties>
+        <java.version>1.8</java.version>
+    </properties>
+
+    <dependencies>
+        <!-- 实现web功能 -->
+        <dependency>
+            <groupId>org.springframework.boot</groupId>
+            <artifactId>spring-boot-starter-web</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.springframework.boot</groupId>
+            <artifactId>spring-boot-starter-test</artifactId>
+            <scope>test</scope>
+        </dependency>
+        <!--数据库连接池-->
+        <dependency>
+            <groupId>com.alibaba</groupId>
+            <artifactId>druid-spring-boot-starter</artifactId>
+            <version>1.1.23</version>
+        </dependency>
+        <dependency>
+            <groupId>mysql</groupId>
+            <artifactId>mysql-connector-java</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.mybatis.spring.boot</groupId>
+            <artifactId>mybatis-spring-boot-starter</artifactId>
+            <version>2.1.3</version>
+        </dependency>
+        <!-- FastJSON -->
+        <dependency>
+            <groupId>com.alibaba</groupId>
+            <artifactId>fastjson</artifactId>
+            <version>1.2.41</version>
+        </dependency>
+        <dependency>
+            <groupId>org.projectlombok</groupId>
+            <artifactId>lombok</artifactId>
+            <optional>true</optional>
+        </dependency>
+        <dependency>
+            <groupId>org.springframework.boot</groupId>
+            <artifactId>spring-boot-devtools</artifactId>
+            <scope>runtime</scope>
+            <optional>true</optional>
+        </dependency>
+        <!-- LANG3-->
+        <dependency>
+            <groupId>org.apache.commons</groupId>
+            <artifactId>commons-lang3</artifactId>
+        </dependency>
+    </dependencies>
+
+    <build>
+        <plugins>
+            <plugin>
+                <groupId>org.springframework.boot</groupId>
+                <artifactId>spring-boot-maven-plugin</artifactId>
+                <configuration>
+                    <fork>true</fork>
+                    <addResources>true</addResources>
+                </configuration>
+            </plugin>
+        </plugins>
+    </build>
+
+</project>

+ 20 - 0
meter-reading-tianjin/src/main/java/com/huaxu/zoniot/MeterReadingApplication.java

@@ -0,0 +1,20 @@
+package com.huaxu.zoniot;
+
+import org.springframework.boot.SpringApplication;
+import org.springframework.boot.autoconfigure.SpringBootApplication;
+
+/**
+ * <p></p>
+ *
+ * @Author wilian.peng
+ * @Date 2020/12/16 21:09
+ * @Version 1.0
+ */
+@SpringBootApplication
+public class MeterReadingApplication {
+
+    public static void main(String[] args) {
+        System.out.println("----------Start--------");
+        SpringApplication.run(MeterReadingApplication.class, args);
+    }
+}

+ 19 - 0
meter-reading-tianjin/src/main/java/com/huaxu/zoniot/common/ErrorConstants.java

@@ -0,0 +1,19 @@
+package com.huaxu.zoniot.common;
+
+/**
+ * <p>错误描述常量</p>
+ *
+ * @Author wilian.peng
+ * @Date 2020/12/31 15:28
+ * @Version 1.0
+ */
+public class ErrorConstants {
+    public final static String CUSTOMER_NO_EMPTY = "用户编码为空";
+    public final static String REGIST_NO_EMPTY = "水表注册号为空";
+    public final static String SMALL_ZONE_EMPTY = "小区名称为空";
+    public final static String ADDRESS_EMPTY = "详细地址为空";
+    public final static String LOCATION_EMPTY = "安装位置为空";
+    public final static String IMEI_EMPTY = "IMEI号为空";
+    public final static String API_KEY_ERROR = "apiKey信息错误";
+    public final static String NO_DEVICE_ERROR = "未查询到设备信息";
+}

+ 17 - 0
meter-reading-tianjin/src/main/java/com/huaxu/zoniot/entity/Device.java

@@ -0,0 +1,17 @@
+package com.huaxu.zoniot.entity;
+
+import lombok.Data;
+
+import java.io.Serializable;
+
+/**
+ * <p>设备接口</p>
+ *
+ * @Author wilian.peng
+ * @Date 2020/12/31 17:01
+ * @Version 1.0
+ */
+@Data
+public class Device implements Serializable {
+
+}

+ 73 - 0
meter-reading-tianjin/src/main/java/com/huaxu/zoniot/entity/RegistData.java

@@ -0,0 +1,73 @@
+package com.huaxu.zoniot.entity;
+
+import com.huaxu.zoniot.common.ErrorConstants;
+import lombok.Data;
+import org.apache.commons.lang3.StringUtils;
+
+/**
+ * <p>注册数据</p>
+ *
+ * @Author wilian.peng
+ * @Date 2020/12/31 15:19
+ * @Version 1.0
+ */
+@Data
+public class RegistData {
+    /**
+     * 用户编码
+     */
+    private String customerNo;
+    /**
+     * 水表注册号
+     */
+    private String registNo;
+    /**
+     * 小区名称
+     */
+    private String smallZone;
+    /**
+     * 详细地址
+     */
+    private String addr;
+    /**
+     * 安装位置
+     */
+    private String location;
+    /**
+     * IMEI号
+     */
+    private String imei;
+    /**
+     * 分配验证值
+     */
+    private String apiKey;
+
+    public String check(){
+        StringBuffer result=new StringBuffer();
+        if(StringUtils.isBlank(customerNo)){
+            result.append(ErrorConstants.CUSTOMER_NO_EMPTY);
+        }
+        if(StringUtils.isBlank(registNo)){
+            result.append(ErrorConstants.REGIST_NO_EMPTY);
+        }
+        if(StringUtils.isBlank(smallZone)){
+            result.append(ErrorConstants.SMALL_ZONE_EMPTY);
+        }
+        if(StringUtils.isBlank(addr)){
+            result.append(ErrorConstants.ADDRESS_EMPTY);
+        }
+        if(StringUtils.isBlank(imei)){
+            result.append(ErrorConstants.IMEI_EMPTY);
+        }
+        if(result.length()>0){
+            return result.toString();
+        }
+        return null;
+    }
+    @Override
+    public String toString() {
+        return "RegistData [customerNo=" + customerNo + ", registNo=" + registNo + ", smallZone=" + smallZone
+                + ", addr=" + addr + ", location=" + location + ", imei=" + imei + ", apiKey=" + apiKey + "]";
+    }
+
+}

+ 19 - 0
meter-reading-tianjin/src/main/java/com/huaxu/zoniot/entity/RespData.java

@@ -0,0 +1,19 @@
+package com.huaxu.zoniot.entity;
+
+import lombok.Data;
+
+import java.util.List;
+
+/**
+ * <p>水表注册数据</p>
+ *
+ * @Author wilian.peng
+ * @Date 2020/12/31 15:19
+ * @Version 1.0
+ */
+@Data
+public class RespData {
+    private String RtnId;
+    private String Msg;
+    private List<RtnData> RtnData;
+}

+ 58 - 0
meter-reading-tianjin/src/main/java/com/huaxu/zoniot/entity/RtnData.java

@@ -0,0 +1,58 @@
+package com.huaxu.zoniot.entity;
+
+import lombok.Data;
+
+/**
+ * <p>返回数据</p>
+ *
+ * @Author wilian.peng
+ * @Date 2020/12/31 15:22
+ * @Version 1.0
+ */
+@Data
+public class RtnData {
+    /**
+     * IMEI
+     */
+    private String ECUID;
+    /**
+     * 注册时间
+     */
+    private String ECURegDate;
+    /**
+     * 基站ID	对应cellID
+     */
+    private String BaseStationID;
+    /**
+     * 水表状态
+     */
+    private String ECUState;
+    /**
+     * 抄表时间
+     */
+    private String RXDDate;
+    /**
+     * 表盘读数
+     */
+    private String RXDReading;
+    /**
+     * 抄表指数(四舍五入)
+     */
+    private String Reading;
+    /**
+     * 电池电量
+     */
+    private String BatteryVotage;
+    /**
+     * 信号强度
+     */
+    private String SignalIntensity;
+    /**
+     * 截门状态
+     */
+    private String Valve;
+    /**
+     * 状态信息状态(报警原因)
+     */
+    private String Message;
+}

+ 134 - 0
meter-reading-tianjin/src/main/java/com/huaxu/zoniot/web/IntegrationController.java

@@ -0,0 +1,134 @@
+package com.huaxu.zoniot.web;
+
+import com.alibaba.fastjson.JSON;
+import com.huaxu.zoniot.common.ErrorConstants;
+import com.huaxu.zoniot.entity.RegistData;
+import com.huaxu.zoniot.entity.RespData;
+import com.huaxu.zoniot.entity.RtnData;
+import lombok.extern.slf4j.Slf4j;
+import org.apache.commons.lang3.StringUtils;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.web.bind.annotation.PostMapping;
+import org.springframework.web.bind.annotation.RequestBody;
+import org.springframework.web.bind.annotation.RestController;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * <p>对外集成接口</p>
+ *
+ * @Author wilian.peng
+ * @Date 2020/12/31 16:34
+ * @Version 1.0
+ */
+@Slf4j
+@RestController
+public class IntegrationController {
+    public  static final String IMEI_SEPARATOR = ",";
+
+    @Value("${api.key}")
+    String apiKey ;
+    /**
+     *  天津查询数据接口,根据IMEI号查询当天的抄表数据
+     * @param imei
+     * @param apiKey
+     * @return
+     */
+    @PostMapping("/queryReadrecordData")
+    public RespData queryReadRecordData(String imei , String apiKey) {
+        RespData respData = new RespData();
+        if(StringUtils.isNotBlank(apiKey) &&apiKey.equals(this.apiKey)){
+            if(StringUtils.isNotBlank(imei.trim())){
+                //List<RtnData> list= remoteReadrecordService.getResponseDataForTJ(Arrays.asList(imei.split(",")));
+                List<RtnData> list = null ;
+                for(RtnData rtnData:list){
+                    //把查询到的IMEI号置空
+                    imei=imei.replace(rtnData.getECUID(), "");
+                }
+                for(String s:imei.split(IMEI_SEPARATOR)){
+                    if(StringUtils.isNotBlank(s)){
+                        RtnData rtnData2=new RtnData();
+                        rtnData2.setECUID(s);
+                        rtnData2.setECUState("-2");
+                        rtnData2.setMessage(ErrorConstants.NO_DEVICE_ERROR);
+                        list.add(rtnData2);
+                    }
+                }
+                respData.setRtnId("0");
+                respData.setRtnData(list);
+                return respData;
+            }else{
+                respData.setRtnId("-1");
+                respData.setRtnData(new ArrayList<>());
+                respData.setMsg(ErrorConstants.IMEI_EMPTY);
+                return respData;
+            }
+        }
+        respData.setRtnId("-1");
+        respData.setMsg(ErrorConstants.API_KEY_ERROR);
+        return respData;
+    }
+
+    @PostMapping("/registIMEI")
+    public RespData registIMEI( @RequestBody RegistData registData) {
+        log.info("天津注册接口,data = {}", JSON.toJSONString(registData.toString()));
+        RespData respData = new RespData();
+        if(registData != null && StringUtils.isNotBlank(registData.getApiKey())&& registData.getApiKey().equals(this.apiKey)){
+
+            String checkInfo = registData.check();
+            if(checkInfo == null){
+                // String result= devicePrepaymeterService.regist(registData);
+                String result= "";
+                if(result.equals("OK")){
+                    respData.setRtnId("0");
+                    respData.setMsg("注册成功");
+                }else{
+                    respData.setRtnId("-1");
+                    respData.setMsg("注册失败:"+result);
+                }
+
+            }else{	//有空值直接返回
+                respData.setRtnId("-1");
+                respData.setMsg(checkInfo);
+            }
+            return respData;
+        }
+        respData.setRtnId("-1");
+        respData.setMsg("apiKey信息错误");
+        return respData;
+
+    }
+
+
+    @PostMapping("/queryIMEI")
+    public RespData queryIMEI(  String imei , String apiKey) {
+        RespData respData = new RespData();
+        if(StringUtils.isNotBlank(apiKey)&& apiKey.equals(this.apiKey)){
+
+            if(StringUtils.isNotBlank(imei)){
+                // List<DevicePrepaymeter> list=	devicePrepaymeterService.getPrepaymeterByDeviceCode(imei);
+                List  list = null;
+                if(list.size()==0){
+                    respData.setRtnId("-1");
+                    respData.setMsg("未注册");
+                }else if(list.size()==1){
+                    respData.setRtnId("0");
+                    respData.setMsg("已注册");
+                }else if(list.size()>1){
+                    respData.setRtnId("-1");
+                    respData.setMsg("系统数据异常,存在重复注册数据");
+                }
+            }else{	//有空值直接返回
+                respData.setRtnId("-1");
+                respData.setMsg("IMEI为空");
+            }
+            return respData;
+        }
+        respData.setRtnId("-1");
+        respData.setMsg("apiKey信息错误");
+        return respData;
+    }
+
+
+}

+ 35 - 0
meter-reading-tianjin/src/main/resources/application-dev.properties

@@ -0,0 +1,35 @@
+server.port=8001
+logging.level.root=info
+logging.file.path=./logs
+server.servlet.context-path=/readmeter
+##############################################Êý¾Ý¿âÅäÖÃ##########################################
+#spring datasource with druid
+spring.datasource.driver-class-name=com.mysql.jdbc.Driver
+spring.datasource.type=com.alibaba.druid.pool.DruidDataSource
+spring.datasource.url=jdbc:mysql://114.135.61.188:33306/smart_city_sit_6_10?characterEncoding=utf8&useSSL=false&serverTimezone=Asia/Shanghai&zeroDateTimeBehavior=convertToNull
+spring.datasource.username=root
+spring.datasource.password=100Zone@123
+spring.datasource.druid.initial-size=5
+spring.datasource.druid.min-idle=5
+spring.datasource.druid.max-active=20
+spring.datasource.druid.max-wait=60000
+spring.datasource.druid.time-between-eviction-runs-millis=60000
+spring.datasource.druid.min-evictable-idle-time-millis=300000
+spring.datasource.druid.validation-query=SELECT 1
+spring.datasource.druid.test-while-idle=true
+spring.datasource.druid.test-on-borrow=true
+spring.datasource.druid.test-on-return=false
+spring.datasource.druid.pool-prepared-statements=true
+spring.datasource.druid.max-pool-prepared-statement-per-connection-size=20
+spring.datasource.druid.filters=stat,wall
+spring.datasource.druid.connection-properties=druid.stat.mergeSql=true;druid.stat.slowSqlMillis=5000
+# mybatis_config
+mybatis.mapper-locations=classpath*:mapper/*.xml
+mybatis.type-aliases-package=com.huaxu.zoniot.entity
+mybatis.configuration.map-underscore-to-camel-case=true
+mybatis.configuration.log-impl=org.apache.ibatis.logging.stdout.StdOutImpl
+mybatis.configuration.use-column-label=true
+##############################################ÒµÎñÅäÖÃ##########################################
+api.key=hauxureadmeter
+netcode=20191118
+meterBore=NB_IOT_20mm

+ 1 - 0
meter-reading-tianjin/src/main/resources/application.properties

@@ -0,0 +1 @@
+spring.profiles.active=dev

+ 1 - 1
smart-city-bat/src/main/java/com/zcxk/smartcity/bat/job/SQLAnalysisJob.java

@@ -58,7 +58,7 @@ public class SQLAnalysisJob implements  Job,Serializable {
         logger.info("SQL Script Content: "+content);
         // 3,动态执行SQL
         if(content != null && !content.equals("")) {
-        	Map<String,Object> params = new HashMap<String,Object>();
+        	Map<String,Object> params = new HashMap<String,Object>(1);
         	params.put("date", new Date());
         	dynamicQuery.nativeInsert(content, params);
         }

+ 2 - 2
smart-city-bat/src/main/resources/application-test.properties

@@ -30,7 +30,7 @@ spring.thymeleaf.template.cache=false
 
 
 #注意中文乱码
-spring.datasource.url=jdbc:mysql://129.204.175.72:3306/smart_city_sit_6_10?characterEncoding=utf-8&useSSL=false
+spring.datasource.url=jdbc:mysql://10.0.0.161:3306/smart_city_sit_6_10?characterEncoding=utf-8&useSSL=false
 spring.datasource.username=root
 spring.datasource.password=100Zone@123
 spring.datasource.driver-class-name=com.mysql.jdbc.Driver
@@ -62,7 +62,7 @@ spring.quartz.properties.org.quartz.threadPool.threadPriority=5
 spring.quartz.properties.org.quartz.threadPool.threadsInheritContextClassLoaderOfInitializingThread=true
 
 # 打开集群配置
-spring.quartz.properties.org.quartz.jobStore.isClustered:true
+spring.quartz.properties.org.quartz.jobStore.isClustered=true
 # 设置集群检查间隔20s
 spring.quartz.properties.org.quartz.jobStore.clusterCheckinInterval = 2000
 

+ 1 - 1
smart-city-bat/src/main/resources/application.properties

@@ -1,2 +1,2 @@
 #开发环境:dev  测试环境:test  线上环境:prd
-spring.profiles.active=uat
+spring.profiles.active=prd

+ 1 - 1
smart-city-bat/src/main/resources/sql/stat_meter_read_rate_by_concentrator_15day.sql

@@ -29,7 +29,7 @@ FROM
 		WHERE
 			read_date < DATE_FORMAT( :date, '%Y%m%d' ) 
 		AND read_date >= DATE_FORMAT( date_add(:date, interval -15 day), '%Y%m%d' ) 
-		AND device_type_id = 16
+		AND device_type_id  in (select device_type_id from sc_w_meter_type where parent_id = 1 and status = 1)
 		GROUP BY
 			site_id,
 			device_type_id,

+ 1 - 1
smart-city-bat/src/main/resources/sql/stat_meter_read_rate_by_concentrator_7day.sql

@@ -29,7 +29,7 @@ FROM
 		WHERE
 			read_date < DATE_FORMAT( :date, '%Y%m%d' ) 
 		AND read_date >= DATE_FORMAT( date_add(:date, interval -7 day), '%Y%m%d' ) 
-		AND device_type_id = 16
+		AND device_type_id  in (select device_type_id from sc_w_meter_type where parent_id = 1 and status = 1)
 		GROUP BY
 			site_id,
 			device_type_id,

+ 1 - 1
smart-city-bat/src/main/resources/sql/stat_meter_read_rate_by_concentrator_day.sql

@@ -19,6 +19,6 @@ FROM
 	sc_meter_read_record
 where read_date < DATE_FORMAT( :date, '%Y%m%d' ) 
 AND read_date > DATE_FORMAT( date_add(:date, interval -1 day), '%Y%m%d' ) 
-and device_type_id = 16
+and device_type_id  in (select device_type_id from sc_w_meter_type where parent_id = 1 and status = 1)
 group by
 	site_id,device_type_id,customer_id,concentrator_id

Daži faili netika attēloti, jo izmaiņu fails ir pārāk liels