Преглед на файлове

flink优化,dwd不直接写入 maxCompute,而是写入 oss

root преди 3 години
родител
ревизия
4dead24d72
променени са 35 файла, в които са добавени 510 реда и са изтрити 150 реда
  1. 0 4
      flink-ad-monitoring/dependency-reduced-pom.xml
  2. 4 4
      flink-ad-monitoring/pom.xml
  3. 83 0
      flink-ad-monitoring/src/main/java/flink/zanxiangnet/ad/monitoring/AdDayODSStreamJob.java
  4. 14 16
      flink-ad-monitoring/src/main/java/flink/zanxiangnet/ad/monitoring/AdDayStreamJob.java
  5. 106 0
      flink-ad-monitoring/src/main/java/flink/zanxiangnet/ad/monitoring/AdHourODSStreamJob.java
  6. 19 26
      flink-ad-monitoring/src/main/java/flink/zanxiangnet/ad/monitoring/AdHourStreamJob.java
  7. 3 3
      flink-ad-monitoring/src/main/java/flink/zanxiangnet/ad/monitoring/PlanDayStreamJob.java
  8. 3 3
      flink-ad-monitoring/src/main/java/flink/zanxiangnet/ad/monitoring/PlanHourStreamJob.java
  9. 78 44
      flink-ad-monitoring/src/main/java/flink/zanxiangnet/ad/monitoring/Test.java
  10. 4 3
      flink-ad-monitoring/src/main/java/flink/zanxiangnet/ad/monitoring/config/ClickhouseDataSourceFactory.java
  11. 3 0
      flink-ad-monitoring/src/main/java/flink/zanxiangnet/ad/monitoring/kafka/KafkaComponent.java
  12. 35 21
      flink-ad-monitoring/src/main/java/flink/zanxiangnet/ad/monitoring/maxcompute/bean/BeanUtil.java
  13. 1 1
      flink-ad-monitoring/src/main/java/flink/zanxiangnet/ad/monitoring/maxcompute/sink/TunnelBatchSinkBuffer.java
  14. 1 1
      flink-ad-monitoring/src/main/java/flink/zanxiangnet/ad/monitoring/maxcompute/sink/TunnelBatchWriter.java
  15. 1 1
      flink-ad-monitoring/src/main/java/flink/zanxiangnet/ad/monitoring/pojo/dto/AdStatOfDayODSDTO.java
  16. 1 1
      flink-ad-monitoring/src/main/java/flink/zanxiangnet/ad/monitoring/pojo/entity/AdStatOfDayDWD.java
  17. 1 1
      flink-ad-monitoring/src/main/java/flink/zanxiangnet/ad/monitoring/pojo/entity/AdStatOfHourDWD.java
  18. 1 3
      flink-ad-monitoring/src/main/java/flink/zanxiangnet/ad/monitoring/process/AdDayDWDRollMonthProcess.java
  19. 1 1
      flink-ad-monitoring/src/main/java/flink/zanxiangnet/ad/monitoring/process/AdDayOnTimeStreamCompletionProcess.java
  20. 2 2
      flink-ad-monitoring/src/main/java/flink/zanxiangnet/ad/monitoring/process/AdHourDTOStreamProcess.java
  21. 1 1
      flink-ad-monitoring/src/main/java/flink/zanxiangnet/ad/monitoring/process/AdHourDWDProcess.java
  22. 1 1
      flink-ad-monitoring/src/main/java/flink/zanxiangnet/ad/monitoring/process/AdHourOnTimeStreamCompletionProcess.java
  23. 1 1
      flink-ad-monitoring/src/main/java/flink/zanxiangnet/ad/monitoring/process/AdMinuteDWDProcess.java
  24. 1 1
      flink-ad-monitoring/src/main/java/flink/zanxiangnet/ad/monitoring/process/CostHourProcess.java
  25. 1 1
      flink-ad-monitoring/src/main/java/flink/zanxiangnet/ad/monitoring/process/CostMinuteProcess.java
  26. 1 1
      flink-ad-monitoring/src/main/java/flink/zanxiangnet/ad/monitoring/process/PlanDayDWDRollMonthProcess.java
  27. 2 2
      flink-ad-monitoring/src/main/java/flink/zanxiangnet/ad/monitoring/process/PlanHourDTOStreamProcess.java
  28. 1 1
      flink-ad-monitoring/src/main/java/flink/zanxiangnet/ad/monitoring/process/PlanHourDWDProcess.java
  29. 1 1
      flink-ad-monitoring/src/main/java/flink/zanxiangnet/ad/monitoring/process/PlanHourStreamCompletionProcess.java
  30. 1 1
      flink-ad-monitoring/src/main/java/flink/zanxiangnet/ad/monitoring/process/PlanMinuteDWDProcess.java
  31. 134 0
      flink-ad-monitoring/src/main/java/flink/zanxiangnet/ad/monitoring/sink/OssBatchStreamSink.java
  32. 1 1
      flink-ad-monitoring/src/main/java/flink/zanxiangnet/ad/monitoring/trigger/AdMinuteODSStreamTrigger.java
  33. 1 1
      flink-ad-monitoring/src/main/java/flink/zanxiangnet/ad/monitoring/trigger/CostHourDMStreamTrigger.java
  34. 1 1
      flink-ad-monitoring/src/main/java/flink/zanxiangnet/ad/monitoring/trigger/CostMinuteDMStreamTrigger.java
  35. 1 1
      flink-ad-monitoring/src/main/java/flink/zanxiangnet/ad/monitoring/trigger/PlanMinuteODSStreamTrigger.java

+ 0 - 4
flink-ad-monitoring/dependency-reduced-pom.xml

@@ -148,10 +148,6 @@
           <artifactId>flink-shaded-guava</artifactId>
           <groupId>org.apache.flink</groupId>
         </exclusion>
-        <exclusion>
-          <artifactId>commons-math3</artifactId>
-          <groupId>org.apache.commons</groupId>
-        </exclusion>
       </exclusions>
     </dependency>
     <dependency>

+ 4 - 4
flink-ad-monitoring/pom.xml

@@ -171,9 +171,9 @@ under the License.
 
         <!-- Java操作 csv文件 -->
         <dependency>
-            <groupId>org.apache.commons</groupId>
-            <artifactId>commons-csv</artifactId>
-            <version>1.9.0</version>
+            <groupId>com.zanxiangnet.module</groupId>
+            <artifactId>zx-util</artifactId>
+            <version>1.0.0-SNAPSHOT</version>
         </dependency>
 
         <!-- mysql -->
@@ -244,7 +244,7 @@ under the License.
                             <transformers>
                                 <transformer
                                         implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
-                                    <mainClass>flink.zanxiangnet.ad.monitoring.AdDayStreamJob</mainClass>
+                                    <mainClass>flink.zanxiangnet.ad.monitoring.AdHourStreamJob</mainClass>
                                 </transformer>
                             </transformers>
                         </configuration>

+ 83 - 0
flink-ad-monitoring/src/main/java/flink/zanxiangnet/ad/monitoring/AdDayODSStreamJob.java

@@ -0,0 +1,83 @@
+package flink.zanxiangnet.ad.monitoring;
+
+import flink.zanxiangnet.ad.monitoring.kafka.KafkaComponent;
+import flink.zanxiangnet.ad.monitoring.pojo.dto.AdStatOfDayODSDTO;
+import flink.zanxiangnet.ad.monitoring.pojo.entity.AdDataOfDayODS;
+import flink.zanxiangnet.ad.monitoring.pojo.properties.ApplicationProperties;
+import flink.zanxiangnet.ad.monitoring.sink.TunnelBatchStreamSink;
+import flink.zanxiangnet.ad.monitoring.stream.KeyedBatchStream;
+import lombok.extern.slf4j.Slf4j;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.flink.api.common.eventtime.WatermarkStrategy;
+import org.apache.flink.configuration.Configuration;
+import org.apache.flink.connector.kafka.source.KafkaSource;
+import org.apache.flink.contrib.streaming.state.EmbeddedRocksDBStateBackend;
+import org.apache.flink.streaming.api.CheckpointingMode;
+import org.apache.flink.streaming.api.datastream.DataStreamSource;
+import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
+import org.apache.flink.streaming.api.environment.CheckpointConfig;
+import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
+import org.apache.flink.streaming.api.windowing.time.Time;
+
+import java.util.Properties;
+
+/**
+ * 原始数据流直接入库
+ */
+@Slf4j
+public class AdDayODSStreamJob {
+
+    public static void main(String[] args) throws Exception {
+        boolean isTest = false;
+        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
+
+        // 加载配置文件到 flink的全局配置中
+        Properties props = new Properties();
+        props.load(AdHourStreamJob.class.getResourceAsStream(isTest ? "/application.test.properties" : "/application.properties"));
+        Configuration configuration = new Configuration();
+        props.stringPropertyNames().forEach(key -> {
+            String value = props.getProperty(key);
+            configuration.setString(key.trim(), StringUtils.isBlank(value) ? "" : value.trim());
+        });
+        env.getConfig().setGlobalJobParameters(configuration);
+
+        // checkpoint配置(每 5分钟一次)
+        env.enableCheckpointing(5 * 60 * 1000L, CheckpointingMode.EXACTLY_ONCE);
+        // checkpoint执行超时时间,超时则 checkpoint失败
+        env.getCheckpointConfig().setCheckpointTimeout(5 * 60 * 1000L);
+        // checkpoint执行最小间隔时间
+        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(10 * 1000L);
+        // 允许并行执行 checkpoint个数,当指定了 minPauseBetweenCheckpoints时,其值无效(就是 1)
+        env.getCheckpointConfig().setMaxConcurrentCheckpoints(1);
+        // 开启 checkpoints的外部持久化,但是在job失败的时候不会自动清理,需要自己手工清理state。
+        // ExternalizedCheckpointCleanup用于指定当job canceled的时候外部的 checkpoint该如何清理
+        // DELETE_ON_CANCELLATION: 在job canceled的时候会自动删除外部 state,但是如果是FAILED的状态则会保留
+        // RETAIN_ON_CANCELLATION:在job canceled的时候保留外部 state
+        env.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
+        // 大概是允许 checkpoint失败几次,默认 0
+        env.getCheckpointConfig().setTolerableCheckpointFailureNumber(0);
+        env.setStateBackend(new EmbeddedRocksDBStateBackend(true));
+        if (StringUtils.isNotBlank(props.getProperty(ApplicationProperties.FLINK_CHECKPOINT_SAVEPOINT))) {
+            env.getCheckpointConfig().setCheckpointStorage(props.getProperty(ApplicationProperties.FLINK_CHECKPOINT_SAVEPOINT));
+        }
+
+        // --------------------------- 天数据 ---------------------------
+        KafkaSource<String> adStreamOfDaySource = KafkaComponent.buildKafkaSource(isTest, props, KafkaComponent.KafkaTopic.adDayTopic,
+                KafkaComponent.KafkaTopic.KafkaGroupId.adDayODSConsumer);
+        DataStreamSource<String> adStreamOfDayIn = env.fromSource(adStreamOfDaySource, WatermarkStrategy.noWatermarks(), "adDaySource_kafka").setParallelism(3);
+
+        SingleOutputStreamOperator<AdStatOfDayODSDTO> adDayODSStream = adStreamOfDayIn
+                .filter(StringUtils::isNotBlank).setParallelism(3)
+                .map(AdStatOfDayODSDTO::byJson).setParallelism(3);
+
+        // 写入原始表
+        new KeyedBatchStream<>(adDayODSStream.map(AdStatOfDayODSDTO::getAdDataOfDayODS).setParallelism(3), AdDataOfDayODS::getStatDay, 3000L, Time.minutes(3L))
+                .toBatch()
+                .setParallelism(3)
+                .addSink(new TunnelBatchStreamSink<>(AdDataOfDayODS.class))
+                .setParallelism(3)
+                .name("sink_ad_day_ods");
+
+        env.execute(isTest ? "ad_day_ods_stream_job_test" : "ad_day_ods_stream_job");
+    }
+}

+ 14 - 16
flink-ad-monitoring/src/main/java/flink/zanxiangnet/ad/monitoring/AdDayStreamJob.java

@@ -1,5 +1,6 @@
 package flink.zanxiangnet.ad.monitoring;
 
+import com.zanxiangnet.module.util.DateUtil;
 import flink.zanxiangnet.ad.monitoring.kafka.KafkaComponent;
 import flink.zanxiangnet.ad.monitoring.pojo.dto.AdStatOfDayODSDTO;
 import flink.zanxiangnet.ad.monitoring.pojo.entity.AdDataOfDayODS;
@@ -8,13 +9,13 @@ import flink.zanxiangnet.ad.monitoring.pojo.properties.ApplicationProperties;
 import flink.zanxiangnet.ad.monitoring.process.AdDayDWDRollMonthProcess;
 import flink.zanxiangnet.ad.monitoring.process.AdDayDWDRollYearProcess;
 import flink.zanxiangnet.ad.monitoring.sink.AdDayDWDToDBBatchSink;
-import flink.zanxiangnet.ad.monitoring.sink.TunnelBatchStreamSink;
+import flink.zanxiangnet.ad.monitoring.sink.OssBatchStreamSink;
 import flink.zanxiangnet.ad.monitoring.stream.BatchStream;
 import flink.zanxiangnet.ad.monitoring.stream.KeyedBatchStream;
-import flink.zanxiangnet.ad.monitoring.util.DateUtil;
 import lombok.extern.slf4j.Slf4j;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.flink.api.common.eventtime.WatermarkStrategy;
+import org.apache.flink.api.common.restartstrategy.RestartStrategies;
 import org.apache.flink.configuration.Configuration;
 import org.apache.flink.connector.kafka.source.KafkaSource;
 import org.apache.flink.contrib.streaming.state.EmbeddedRocksDBStateBackend;
@@ -38,6 +39,9 @@ public class AdDayStreamJob {
     public static void main(String[] args) throws Exception {
         boolean isTest = false;
         StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
+        // 任务失败后的重启策略
+        // env.setRestartStrategy(RestartStrategies.fixedDelayRestart(0, 0));
+        env.setRestartStrategy(RestartStrategies.noRestart());
 
         // 加载配置文件到 flink的全局配置中
         Properties props = new Properties();
@@ -69,8 +73,10 @@ public class AdDayStreamJob {
             env.getCheckpointConfig().setCheckpointStorage(props.getProperty(ApplicationProperties.FLINK_CHECKPOINT_SAVEPOINT));
         }
 
-        KafkaSource<String> adStreamOfDaySource = KafkaComponent.buildKafkaSource(isTest, props, KafkaComponent.KafkaTopic.adDayTopic, KafkaComponent.KafkaTopic.KafkaGroupId.adDayConsumerGroup);
-        DataStreamSource<String> adStreamOfDayIn = env.fromSource(adStreamOfDaySource, WatermarkStrategy.noWatermarks(), "adDaySource_kafka");
+        KafkaSource<String> adStreamOfDaySource = KafkaComponent.buildKafkaSource(isTest, props, KafkaComponent.KafkaTopic.adDayTopic,
+                KafkaComponent.KafkaTopic.KafkaGroupId.adDayConsumerGroup);
+        DataStreamSource<String> adStreamOfDayIn = env.fromSource(adStreamOfDaySource, WatermarkStrategy.noWatermarks(),
+                "adDaySource_kafka");
 
         // 广告日数据。往前回滚 10天
         final OutputTag<AdDataOfDayODS> adDayStreamRollDayTag = new OutputTag<AdDataOfDayODS>("adDayStreamRollDayTag") {
@@ -81,14 +87,6 @@ public class AdDayStreamJob {
         SingleOutputStreamOperator<AdStatOfDayODSDTO> adDayODSStream = adStreamOfDayIn.filter(StringUtils::isNotBlank)
                 .map(AdStatOfDayODSDTO::byJson);
 
-        // 写入原始表
-        new KeyedBatchStream<>(adDayODSStream.map(AdStatOfDayODSDTO::getAdDataOfDayODS), AdDataOfDayODS::getStatDay, 3000L, Time.minutes(3L))
-                .toBatch()
-                .setParallelism(12)
-                .addSink(new TunnelBatchStreamSink<>(AdDataOfDayODS.class))
-                .setParallelism(12)
-                .name("sink_ad_day_ods");
-
         // 拆分流
         SingleOutputStreamOperator<AdStatOfDayODSDTO> adDayODSStreamSplit = adDayODSStream.process(new ProcessFunction<AdStatOfDayODSDTO, AdStatOfDayODSDTO>() {
             @Override
@@ -110,7 +108,7 @@ public class AdDayStreamJob {
         SingleOutputStreamOperator<AdStatOfDayDWD> adDayDWDMonthStream = adDayODSStreamSplit.getSideOutput(adDayStreamRollDayTag)
                 .keyBy(AdDataOfDayODS::getAdId)
                 .process(new AdDayDWDRollMonthProcess())
-                .setParallelism(4);
+                .setParallelism(3);
 
         // 单个账号回滚一年
         SingleOutputStreamOperator<AdStatOfDayDWD> adDayDWDYearStream = adDayODSStreamSplit.getSideOutput(adDayStreamRollYearTag)
@@ -121,9 +119,9 @@ public class AdDayStreamJob {
         // 写入 maxCompute
         new KeyedBatchStream<>(adDayStream, AdStatOfDayDWD::getStatDay, 3000L, Time.minutes(3L))
                 .toBatch()
-                .setParallelism(8)
-                .addSink(new TunnelBatchStreamSink<>(AdStatOfDayDWD.class))
-                .setParallelism(8)
+                .setParallelism(6)
+                .addSink(new OssBatchStreamSink<>(AdStatOfDayDWD.class, new OssBatchStreamSink.MonitoringGenerateOssObjectName("ad_stat_of_day_dwd")))
+                .setParallelism(6)
                 .name("sink_ad_year_dwd");
         // 写入 mysql
         new BatchStream<>(adDayStream, 2000L, Time.minutes(1L))

+ 106 - 0
flink-ad-monitoring/src/main/java/flink/zanxiangnet/ad/monitoring/AdHourODSStreamJob.java

@@ -0,0 +1,106 @@
+package flink.zanxiangnet.ad.monitoring;
+
+import flink.zanxiangnet.ad.monitoring.kafka.KafkaComponent;
+import flink.zanxiangnet.ad.monitoring.pojo.dto.AdStatOfDayODSDTO;
+import flink.zanxiangnet.ad.monitoring.pojo.entity.AdDataOfDayODS;
+import flink.zanxiangnet.ad.monitoring.pojo.entity.AdDataOfHourODS;
+import flink.zanxiangnet.ad.monitoring.pojo.entity.AdDataOfMinuteODS;
+import flink.zanxiangnet.ad.monitoring.pojo.properties.ApplicationProperties;
+import flink.zanxiangnet.ad.monitoring.process.AdHourDTOStreamProcess;
+import flink.zanxiangnet.ad.monitoring.sink.TunnelBatchStreamSink;
+import flink.zanxiangnet.ad.monitoring.stream.KeyedBatchStream;
+import lombok.extern.slf4j.Slf4j;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.flink.api.common.eventtime.WatermarkStrategy;
+import org.apache.flink.configuration.Configuration;
+import org.apache.flink.connector.kafka.source.KafkaSource;
+import org.apache.flink.contrib.streaming.state.EmbeddedRocksDBStateBackend;
+import org.apache.flink.streaming.api.CheckpointingMode;
+import org.apache.flink.streaming.api.datastream.DataStream;
+import org.apache.flink.streaming.api.datastream.DataStreamSource;
+import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
+import org.apache.flink.streaming.api.environment.CheckpointConfig;
+import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
+import org.apache.flink.streaming.api.windowing.time.Time;
+import org.apache.flink.util.OutputTag;
+
+import java.util.Properties;
+
+/**
+ * 原始数据流直接入库
+ */
+@Slf4j
+public class AdHourODSStreamJob {
+
+    public static void main(String[] args) throws Exception {
+        boolean isTest = false;
+        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
+
+        // 加载配置文件到 flink的全局配置中
+        Properties props = new Properties();
+        props.load(AdHourStreamJob.class.getResourceAsStream(isTest ? "/application.test.properties" : "/application.properties"));
+        Configuration configuration = new Configuration();
+        props.stringPropertyNames().forEach(key -> {
+            String value = props.getProperty(key);
+            configuration.setString(key.trim(), StringUtils.isBlank(value) ? "" : value.trim());
+        });
+        env.getConfig().setGlobalJobParameters(configuration);
+
+        // checkpoint配置(每 5分钟一次)
+        env.enableCheckpointing(5 * 60 * 1000L, CheckpointingMode.EXACTLY_ONCE);
+        // checkpoint执行超时时间,超时则 checkpoint失败
+        env.getCheckpointConfig().setCheckpointTimeout(5 * 60 * 1000L);
+        // checkpoint执行最小间隔时间
+        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(10 * 1000L);
+        // 允许并行执行 checkpoint个数,当指定了 minPauseBetweenCheckpoints时,其值无效(就是 1)
+        env.getCheckpointConfig().setMaxConcurrentCheckpoints(1);
+        // 开启 checkpoints的外部持久化,但是在job失败的时候不会自动清理,需要自己手工清理state。
+        // ExternalizedCheckpointCleanup用于指定当job canceled的时候外部的 checkpoint该如何清理
+        // DELETE_ON_CANCELLATION: 在job canceled的时候会自动删除外部 state,但是如果是FAILED的状态则会保留
+        // RETAIN_ON_CANCELLATION:在job canceled的时候保留外部 state
+        env.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
+        // 大概是允许 checkpoint失败几次,默认 0
+        env.getCheckpointConfig().setTolerableCheckpointFailureNumber(0);
+        env.setStateBackend(new EmbeddedRocksDBStateBackend(true));
+        if (StringUtils.isNotBlank(props.getProperty(ApplicationProperties.FLINK_CHECKPOINT_SAVEPOINT))) {
+            env.getCheckpointConfig().setCheckpointStorage(props.getProperty(ApplicationProperties.FLINK_CHECKPOINT_SAVEPOINT));
+        }
+
+        // --------------------------- 小时数据 ---------------------------
+        KafkaSource<String> adStreamOfMinuteSource = KafkaComponent.buildKafkaSource(isTest, props, KafkaComponent.KafkaTopic.adHourTopic,
+                KafkaComponent.KafkaTopic.KafkaGroupId.adHourODSConsumer);
+        DataStreamSource<String> adStreamOfMinuteIn = env.fromSource(adStreamOfMinuteSource, WatermarkStrategy.noWatermarks(), "adHourSource_kafka").setParallelism(6);
+
+        // 广告分钟数据(前 5分钟的广告消耗数据)
+        final OutputTag<AdDataOfMinuteODS> adMinuteStreamTag = new OutputTag<AdDataOfMinuteODS>("adMinuteStream") {
+        };
+        // 广告小时数据(往前回滚 10天)
+        final OutputTag<AdDataOfHourODS> adHourStreamTag = new OutputTag<AdDataOfHourODS>("adHourStream") {
+        };
+        // 对流进行映射,拆分(实时的分钟流和回滚的小时流)
+        SingleOutputStreamOperator<AdDataOfMinuteODS> adODSStream = adStreamOfMinuteIn
+                .filter(StringUtils::isNotBlank).setParallelism(6)
+                .process(new AdHourDTOStreamProcess(adMinuteStreamTag, adHourStreamTag)).setParallelism(6);
+
+        // 分钟流
+        DataStream<AdDataOfMinuteODS> adMinuteODSStream = adODSStream.getSideOutput(adMinuteStreamTag);
+        // 分钟流-写入原始表
+        new KeyedBatchStream<>(adMinuteODSStream, AdDataOfMinuteODS::getStatDay, 6000L, Time.minutes(3L))
+                .toBatch()
+                .setParallelism(6)
+                .addSink(new TunnelBatchStreamSink<>(AdDataOfMinuteODS.class))
+                .setParallelism(6)
+                .name("sink_ad_minute_ods");
+        // 小时流
+        DataStream<AdDataOfHourODS> adHourODSStream = adODSStream.getSideOutput(adHourStreamTag);
+        // 小时流-写入原始表
+        new KeyedBatchStream<>(adHourODSStream, AdDataOfHourODS::getStatDay, 3000L, Time.minutes(3L))
+                .toBatch()
+                .setParallelism(3)
+                .addSink(new TunnelBatchStreamSink<>(AdDataOfHourODS.class))
+                .setParallelism(3)
+                .name("sink_ad_hour_ods");
+
+        env.execute(isTest ? "ad_hour_ods_stream_job_test" : "ad_hour_ods_stream_job");
+    }
+}

+ 19 - 26
flink-ad-monitoring/src/main/java/flink/zanxiangnet/ad/monitoring/AdHourStreamJob.java

@@ -1,12 +1,13 @@
 package flink.zanxiangnet.ad.monitoring;
 
+import com.zanxiangnet.module.util.DateUtil;
 import flink.zanxiangnet.ad.monitoring.pojo.entity.*;
 import flink.zanxiangnet.ad.monitoring.pojo.properties.ApplicationProperties;
 import flink.zanxiangnet.ad.monitoring.process.*;
 import flink.zanxiangnet.ad.monitoring.sink.AdDayDWDToDBBatchSink;
 import flink.zanxiangnet.ad.monitoring.sink.AdHourDMToCkBatchSink;
 import flink.zanxiangnet.ad.monitoring.sink.AdMinuteDMToCkBatchSink;
-import flink.zanxiangnet.ad.monitoring.sink.TunnelBatchStreamSink;
+import flink.zanxiangnet.ad.monitoring.sink.OssBatchStreamSink;
 import flink.zanxiangnet.ad.monitoring.stream.BatchStream;
 import flink.zanxiangnet.ad.monitoring.stream.KeyedBatchStream;
 import flink.zanxiangnet.ad.monitoring.trigger.AdMinuteODSStreamTrigger;
@@ -14,6 +15,7 @@ import flink.zanxiangnet.ad.monitoring.kafka.KafkaComponent;
 import lombok.extern.slf4j.Slf4j;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.flink.api.common.eventtime.*;
+import org.apache.flink.api.common.restartstrategy.RestartStrategies;
 import org.apache.flink.configuration.Configuration;
 import org.apache.flink.connector.kafka.source.KafkaSource;
 import org.apache.flink.contrib.streaming.state.ConfigurableRocksDBOptionsFactory;
@@ -40,6 +42,10 @@ public class AdHourStreamJob {
     public static void main(String[] args) throws Exception {
         boolean isTest = false;
         StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
+        // 任务失败后的重启策略
+        // env.setRestartStrategy(RestartStrategies.fixedDelayRestart(0, 0));
+        env.setRestartStrategy(RestartStrategies.noRestart());
+        // 设置默认的并行度
         env.setParallelism(6);
 
         // 加载配置文件到 flink的全局配置中
@@ -75,8 +81,9 @@ public class AdHourStreamJob {
             env.getCheckpointConfig().setCheckpointStorage(props.getProperty(ApplicationProperties.FLINK_CHECKPOINT_SAVEPOINT));
         }
 
-        KafkaSource<String> adStreamOfMinuteSource = KafkaComponent.buildKafkaSource(isTest, props, KafkaComponent.KafkaTopic.adHourTopic, KafkaComponent.KafkaTopic.KafkaGroupId.adHourConsumerGroup);
-        DataStreamSource<String> adStreamOfMinuteIn = env.fromSource(adStreamOfMinuteSource, WatermarkStrategy.noWatermarks(), "adHourSource_kafka").setParallelism(12);
+        KafkaSource<String> adStreamOfMinuteSource = KafkaComponent.buildKafkaSource(isTest, props, KafkaComponent.KafkaTopic.adHourTopic,
+                KafkaComponent.KafkaTopic.KafkaGroupId.adHourConsumerGroup);
+        DataStreamSource<String> adStreamOfMinuteIn = env.fromSource(adStreamOfMinuteSource, WatermarkStrategy.noWatermarks(), "adHourSource_kafka").setParallelism(8);
 
         // 广告分钟数据(前 5分钟的广告消耗数据)
         final OutputTag<AdDataOfMinuteODS> adMinuteStreamTag = new OutputTag<AdDataOfMinuteODS>("adMinuteStream") {
@@ -87,18 +94,11 @@ public class AdHourStreamJob {
 
         // 对流进行映射,拆分(实时的分钟流和回滚的小时流)
         SingleOutputStreamOperator<AdDataOfMinuteODS> adODSStream = adStreamOfMinuteIn
-                .filter(StringUtils::isNotBlank).setParallelism(12)
-                .process(new AdHourDTOStreamProcess(adMinuteStreamTag, adHourStreamTag)).setParallelism(12);
+                .filter(StringUtils::isNotBlank).setParallelism(8)
+                .process(new AdHourDTOStreamProcess(adMinuteStreamTag, adHourStreamTag)).setParallelism(8);
 
         // 分钟流
         DataStream<AdDataOfMinuteODS> adMinuteODSStream = adODSStream.getSideOutput(adMinuteStreamTag);
-        // 分钟流-写入原始表
-        new KeyedBatchStream<>(adMinuteODSStream, AdDataOfMinuteODS::getStatDay, 6000L, Time.minutes(3L))
-                .toBatch()
-                .setParallelism(12)
-                .addSink(new TunnelBatchStreamSink<>(AdDataOfMinuteODS.class))
-                .setParallelism(12)
-                .name("sink_ad_minute_ods");
 
         // 分钟流-计算
         SingleOutputStreamOperator<AdStatOfMinuteDWD> adMinuteDWDStream = adMinuteODSStream
@@ -110,12 +110,12 @@ public class AdHourStreamJob {
                 .window(TumblingEventTimeWindows.of(Time.minutes(5L)))
                 .trigger(new AdMinuteODSStreamTrigger())
                 .process(new AdMinuteDWDProcess())
-                .setParallelism(12);
+                .setParallelism(8);
         new KeyedBatchStream<>(adMinuteDWDStream, AdStatOfMinuteDWD::getStatDay, 5000L, Time.minutes(3L))
                 .toBatch()
-                .setParallelism(12)
-                .addSink(new TunnelBatchStreamSink<>(AdStatOfMinuteDWD.class))
-                .setParallelism(12)
+                .setParallelism(8)
+                .addSink(new OssBatchStreamSink<>(AdStatOfMinuteDWD.class, new OssBatchStreamSink.MonitoringGenerateOssObjectName("ad_stat_of_minute_dwd")))
+                .setParallelism(8)
                 .name("sink_ad_minute_dwd");
 
         //分钟流-写入 ck
@@ -132,13 +132,6 @@ public class AdHourStreamJob {
 
         // 小时流
         DataStream<AdDataOfHourODS> adHourODSStream = adODSStream.getSideOutput(adHourStreamTag);
-        // 小时流-写入原始表
-        new KeyedBatchStream<>(adHourODSStream, AdDataOfHourODS::getStatDay, 3000L, Time.minutes(3L))
-                .toBatch()
-                .setParallelism(12)
-                .addSink(new TunnelBatchStreamSink<>(AdDataOfHourODS.class))
-                .setParallelism(12)
-                .name("sink_ad_hour_ods");
 
         // 小时流-计算
         SingleOutputStreamOperator<AdStatOfHourDWD> adHourDWDStream =
@@ -148,9 +141,9 @@ public class AdHourStreamJob {
         // 小时流-写入maxCompute
         new KeyedBatchStream<>(adHourDWDStream, AdStatOfHourDWD::getStatDay, 3000L, Time.minutes(3L))
                 .toBatch()
-                .setParallelism(12)
-                .addSink(new TunnelBatchStreamSink<>(AdStatOfHourDWD.class))
-                .setParallelism(12)
+                .setParallelism(8)
+                .addSink(new OssBatchStreamSink<>(AdStatOfHourDWD.class, new OssBatchStreamSink.MonitoringGenerateOssObjectName("ad_stat_of_hour_dwd")))
+                .setParallelism(8)
                 .name("sink_ad_hour_dwd");
 
         // 分钟流转小时流同时填充空白的小时

+ 3 - 3
flink-ad-monitoring/src/main/java/flink/zanxiangnet/ad/monitoring/PlanDayStreamJob.java

@@ -1,5 +1,6 @@
 package flink.zanxiangnet.ad.monitoring;
 
+import com.zanxiangnet.module.util.DateUtil;
 import flink.zanxiangnet.ad.monitoring.kafka.KafkaComponent;
 import flink.zanxiangnet.ad.monitoring.pojo.dto.AdStatOfDayODSDTO;
 import flink.zanxiangnet.ad.monitoring.pojo.entity.AdDataOfDayODS;
@@ -7,11 +8,10 @@ import flink.zanxiangnet.ad.monitoring.pojo.entity.PlanStatOfDayDWD;
 import flink.zanxiangnet.ad.monitoring.pojo.properties.ApplicationProperties;
 import flink.zanxiangnet.ad.monitoring.process.PlanDayDWDRollMonthProcess;
 import flink.zanxiangnet.ad.monitoring.process.PlanDayDWDRollYearProcess;
+import flink.zanxiangnet.ad.monitoring.sink.OssBatchStreamSink;
 import flink.zanxiangnet.ad.monitoring.sink.PlanDayDWDToCkBatchSink;
-import flink.zanxiangnet.ad.monitoring.sink.TunnelBatchStreamSink;
 import flink.zanxiangnet.ad.monitoring.stream.BatchStream;
 import flink.zanxiangnet.ad.monitoring.stream.KeyedBatchStream;
-import flink.zanxiangnet.ad.monitoring.util.DateUtil;
 import lombok.extern.slf4j.Slf4j;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.flink.api.common.eventtime.WatermarkStrategy;
@@ -113,7 +113,7 @@ public class PlanDayStreamJob {
         // 写入 maxCompute
         new KeyedBatchStream<>(planDayDWDStream, PlanStatOfDayDWD::getStatDay, 4000L, Time.minutes(1L))
                 .toBatch()
-                .addSink(new TunnelBatchStreamSink<>(PlanStatOfDayDWD.class))
+                .addSink(new OssBatchStreamSink<>(PlanStatOfDayDWD.class, new OssBatchStreamSink.MonitoringGenerateOssObjectName("plan_stat_of_day_dwd")))
                 .name("sink_plan_year_dwd");
         // 写入 ck
         new BatchStream<>(planDayDWDStream, 1000L, Time.minutes(1L)).toBatch().addSink(new PlanDayDWDToCkBatchSink());

+ 3 - 3
flink-ad-monitoring/src/main/java/flink/zanxiangnet/ad/monitoring/PlanHourStreamJob.java

@@ -6,7 +6,7 @@ import flink.zanxiangnet.ad.monitoring.process.PlanHourDTOStreamProcess;
 import flink.zanxiangnet.ad.monitoring.process.PlanHourDWDProcess;
 import flink.zanxiangnet.ad.monitoring.process.PlanHourStreamCompletionProcess;
 import flink.zanxiangnet.ad.monitoring.process.PlanMinuteDWDProcess;
-import flink.zanxiangnet.ad.monitoring.sink.TunnelBatchStreamSink;
+import flink.zanxiangnet.ad.monitoring.sink.OssBatchStreamSink;
 import flink.zanxiangnet.ad.monitoring.stream.KeyedBatchStream;
 import flink.zanxiangnet.ad.monitoring.trigger.PlanMinuteODSStreamTrigger;
 import flink.zanxiangnet.ad.monitoring.kafka.KafkaComponent;
@@ -95,7 +95,7 @@ public class PlanHourStreamJob {
                 .process(new PlanMinuteDWDProcess(planHourFromMinuteStreamTag));
         new KeyedBatchStream<>(planMinuteDWDStream, PlanStatOfMinuteDWD::getStatDay, 4000L, Time.minutes(1L))
                 .toBatch()
-                .addSink(new TunnelBatchStreamSink<>(PlanStatOfMinuteDWD.class))
+                .addSink(new OssBatchStreamSink<>(PlanStatOfMinuteDWD.class, new OssBatchStreamSink.MonitoringGenerateOssObjectName("plan_stat_of_minute_dwd")))
                 .name("sink_plan_minute_dwd");
 
         // 小时流
@@ -107,7 +107,7 @@ public class PlanHourStreamJob {
                 .union(planHourDWDStream);
         new KeyedBatchStream<>(planHourDWDAllStream, PlanStatOfHourDWD::getStatDay, 4000L, Time.minutes(1L))
                 .toBatch()
-                .addSink(new TunnelBatchStreamSink<>(PlanStatOfHourDWD.class))
+                .addSink(new OssBatchStreamSink<>(PlanStatOfHourDWD.class, new OssBatchStreamSink.MonitoringGenerateOssObjectName("plan_stat_of_hour_dwd")))
                 .name("sink_plan_hour_dwd");
 
         env.execute(isTest ? "plan_hour_stream_job_test" : "plan_hour_stream_job");

+ 78 - 44
flink-ad-monitoring/src/main/java/flink/zanxiangnet/ad/monitoring/Test.java

@@ -1,8 +1,9 @@
 package flink.zanxiangnet.ad.monitoring;
 
 import com.google.common.util.concurrent.ThreadFactoryBuilder;
-import flink.zanxiangnet.ad.monitoring.util.DateUtil;
-import flink.zanxiangnet.ad.monitoring.util.JsonUtil;
+import com.zanxiangnet.module.util.DateUtil;
+import flink.zanxiangnet.ad.monitoring.pojo.entity.AdStatOfMinuteDWD;
+import flink.zanxiangnet.ad.monitoring.stream.KeyedBatchStream;
 import lombok.AllArgsConstructor;
 import lombok.Builder;
 import lombok.Data;
@@ -11,13 +12,13 @@ import lombok.extern.slf4j.Slf4j;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
 import org.apache.flink.api.common.eventtime.WatermarkStrategy;
-import org.apache.flink.api.common.state.ListState;
-import org.apache.flink.api.common.state.ListStateDescriptor;
+import org.apache.flink.api.common.state.*;
 import org.apache.flink.api.common.typeinfo.Types;
 import org.apache.flink.configuration.Configuration;
 import org.apache.flink.streaming.api.datastream.DataStreamSource;
 import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
 import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
+import org.apache.flink.streaming.api.functions.ProcessFunction;
 import org.apache.flink.streaming.api.functions.source.RichSourceFunction;
 import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
 import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
@@ -30,6 +31,7 @@ import org.apache.flink.util.Collector;
 import java.time.Duration;
 import java.util.ArrayList;
 import java.util.List;
+import java.util.Map;
 import java.util.Properties;
 import java.util.concurrent.LinkedBlockingQueue;
 import java.util.concurrent.ThreadPoolExecutor;
@@ -41,14 +43,14 @@ public class Test {
     public static void main(String[] args) throws Exception {
         StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
         // 加载配置文件到 flink的全局配置中
-        Properties props = new Properties();
+        /*Properties props = new Properties();
         props.load(Test.class.getResourceAsStream("/application.properties"));
         Configuration configuration = new Configuration();
         props.stringPropertyNames().forEach(key -> {
             String value = props.getProperty(key);
             configuration.setString(key.trim(), StringUtils.isBlank(value) ? "" : value.trim());
         });
-        env.getConfig().setGlobalJobParameters(configuration);
+        env.getConfig().setGlobalJobParameters(configuration);*/
 
         env.setParallelism(1);
 
@@ -57,76 +59,108 @@ public class Test {
         SingleOutputStreamOperator<Pojo> pojoStream = source.assignTimestampsAndWatermarks(WatermarkStrategy.<Pojo>forBoundedOutOfOrderness(Duration.ofHours(3))
                 .withTimestampAssigner((SerializableTimestampAssigner<Pojo>) (pojo, l) -> pojo.getCreateTime())
         );
-        pojoStream.keyBy(Pojo::getUserId)
+        SingleOutputStreamOperator<Pojo> temp = pojoStream.keyBy(Pojo::getUserId).window(TumblingEventTimeWindows.of(Time.hours(24))).process(new ProcessWindowFunction<Pojo, Pojo, Integer, TimeWindow>() {
+            @Override
+            public void process(Integer integer, ProcessWindowFunction<Pojo, Pojo, Integer, TimeWindow>.Context context, Iterable<Pojo> elements, Collector<Pojo> out) throws Exception {
+                System.out.println("2222begin: " + DateUtil.formatLocalDateTime(DateUtil.milliToLocalDateTime(context.window().getStart())) + " | end: " + DateUtil.formatLocalDateTime(DateUtil.milliToLocalDateTime(context.window().getEnd())));
+                for(Pojo pojo : elements) {
+                    out.collect(pojo);
+                }
+            }
+        });
+        new KeyedBatchStream<>(pojoStream, Pojo::getUserId, 10L, Time.seconds(10))
+                .toBatch()
+                .process(new ProcessFunction<List<Pojo>, String>() {
+                    @Override
+                    public void processElement(List<Pojo> value, ProcessFunction<List<Pojo>, String>.Context ctx, Collector<String> out) throws Exception {
+                        out.collect("收到 " + value.size() + "个元素!!!");
+                    }
+                }).print();
+        /*pojoStream.keyBy(Pojo::getUserId)
                 .window(TumblingEventTimeWindows.of(Time.days(1L), Time.hours(-8)))
                 .trigger(new Trigger<Pojo, TimeWindow>() {
-                    /**
-                     *
-                     * @param pojo
-                     * @param time 触发窗口的时间,比如此例中是 eventTime
-                     * @param timeWindow
-                     * @param triggerContext
-                     * @return
-                     * @throws Exception
-                     */
+                    *//**
+         *
+         * @param pojo
+         * @param time 触发窗口的时间,比如此例中是 eventTime
+         * @param timeWindow
+         * @param triggerContext
+         * @return
+         * @throws Exception
+         *//*
                     @Override
                     public TriggerResult onElement(Pojo pojo, long time, TimeWindow timeWindow, TriggerContext triggerContext) throws Exception {
-                        /*log.error("trigger->onElement: {}, {}, {}, {}, [{} - {}]", JsonUtil.toString(pojo),
+                        *//*log.error("trigger->onElement: {}, {}, {}, {}, [{} - {}]", JsonUtil.toString(pojo),
                                 DateUtil.formatLocalDateTime(DateUtil.milliToLocalDateTime(timeWindow.getStart())),
                                 DateUtil.formatLocalDateTime(DateUtil.milliToLocalDateTime(timeWindow.getEnd())),
                                 DateUtil.formatLocalDateTime(DateUtil.milliToLocalDateTime(triggerContext.getCurrentWatermark())),
                                 DateUtil.formatLocalDateTime(DateUtil.milliToLocalDateTime(timeWindow.maxTimestamp())),
                                 DateUtil.formatLocalDateTime(DateUtil.milliToLocalDateTime(triggerContext.getCurrentProcessingTime()))
-                        );*/
+                        );*//*
                         log.error("收到数据:{},eventTime:{}", DateUtil.milliToLocalDateTime(pojo.getCreateTime()), DateUtil.milliToLocalDateTime(time));
                         return TriggerResult.FIRE;
                     }
 
-                    /**
-                     * ProcessingTime定时器触发的时候调用
-                     *
-                     * @param time 调用这个方法的时间
-                     * @param timeWindow
-                     * @param triggerContext
-                     * @return
-                     * @throws Exception
-                     */
+                    *//**
+         * ProcessingTime定时器触发的时候调用
+         *
+         * @param time 调用这个方法的时间
+         * @param timeWindow
+         * @param triggerContext
+         * @return
+         * @throws Exception
+         *//*
                     @Override
                     public TriggerResult onProcessingTime(long time, TimeWindow timeWindow, TriggerContext triggerContext) throws Exception {
                         log.error("trigger->onProcessingTime: {}", DateUtil.formatLocalDateTime(DateUtil.milliToLocalDateTime(time)));
                         return TriggerResult.CONTINUE;
                     }
 
-                    /**
-                     * EventTime定时器触发的时候调用
-                     *
-                     * @param time eventTime
-                     * @param timeWindow
-                     * @param triggerContext
-                     * @return
-                     * @throws Exception
-                     */
+                    *//**
+         * EventTime定时器触发的时候调用
+         *
+         * @param time eventTime
+         * @param timeWindow
+         * @param triggerContext
+         * @return
+         * @throws Exception
+         *//*
                     @Override
                     public TriggerResult onEventTime(long time, TimeWindow timeWindow, TriggerContext triggerContext) throws Exception {
                         log.error("trigger->onEventTime: {}", DateUtil.formatLocalDateTime(DateUtil.milliToLocalDateTime(time)));
                         return TriggerResult.PURGE;
                     }
 
-                    /**
-                     * 窗口处理结束后,清除窗口的时候调用
-                     *
-                     * @param timeWindow
-                     * @param triggerContext
-                     * @throws Exception
-                     */
+                    *//**
+         * 窗口处理结束后,清除窗口的时候调用
+         *
+         * @param timeWindow
+         * @param triggerContext
+         * @throws Exception
+         *//*
                     @Override
                     public void clear(TimeWindow timeWindow, TriggerContext triggerContext) throws Exception {
                         log.error("trigger->clear");
                     }
                 }).process(new ProcessWindowFunction<Pojo, Pojo, Integer, TimeWindow>() {
 
+                    private ValueState<String> valueState;
+                    private ListState<String> listState;
+                    private MapState<String, String> mapState;
+
                     @Override
                     public void open(Configuration conf) {
+                        StateTtlConfig ttlConfig =
+                                StateTtlConfig.newBuilder(org.apache.flink.api.common.time.Time.days(1)) //它是生存时间值
+                                        .setUpdateType(StateTtlConfig.UpdateType.OnCreateAndWrite)
+                                        .setStateVisibility(StateTtlConfig.StateVisibility.NeverReturnExpired)
+                                        .cleanupFullSnapshot() // 在快照的时候进行删除
+                                        .build();
+                        ValueStateDescriptor<String> valueStateDescriptor = new ValueStateDescriptor<>("valueState", Types.STRING);
+                        valueStateDescriptor.enableTimeToLive(ttlConfig);
+                        valueState = getRuntimeContext().getState(new ValueStateDescriptor<>("valueState", Types.STRING));
+                        listState = getRuntimeContext().getListState(new ListStateDescriptor<>("listState", Types.STRING));
+                        mapState = getRuntimeContext().getMapState(new MapStateDescriptor<>("mapState", Types.STRING, Types.STRING));
                     }
 
                     @Override
@@ -137,7 +171,7 @@ public class Test {
                             collector.collect(pojo);
                         }
                     }
-                });//.print();
+                });*/
                 /*.aggregate(new AggregateFunction<Pojo, Tuple5<Integer, Long, Long, Integer, List<Long>>, String>() {
 
                     @Override

+ 4 - 3
flink-ad-monitoring/src/main/java/flink/zanxiangnet/ad/monitoring/config/ClickhouseDataSourceFactory.java

@@ -26,15 +26,16 @@ public class ClickhouseDataSourceFactory implements DataSourceFactory {
 
     @Override
     public DataSource getDataSource() {
+        System.out.println("========== 创建 CK连接 1次");
         ClickHouseProperties ckProps = new ClickHouseProperties();
         ckProps.setUser(props.getProperty(ApplicationProperties.CK_USERNAME));
         ckProps.setPassword(props.getProperty(ApplicationProperties.CK_PASSWORD));
         ckProps.setSocketTimeout(60 * 1000);
-        DataSource dataSource = new BalancedClickhouseDataSource(props.getProperty(ApplicationProperties.CK_URL), ckProps);
 
-        HikariConfig config = new HikariConfig();
+        /*HikariConfig config = new HikariConfig();
         // 此处还可以配置连接池大小,连接超时之类的
         config.setDataSource(dataSource);
-        return new HikariDataSource(config);
+        return new HikariDataSource(config);*/
+        return new BalancedClickhouseDataSource(props.getProperty(ApplicationProperties.CK_URL), ckProps);
     }
 }

+ 3 - 0
flink-ad-monitoring/src/main/java/flink/zanxiangnet/ad/monitoring/kafka/KafkaComponent.java

@@ -17,6 +17,9 @@ public class KafkaComponent {
         public static final String adDayTopic = "ad_day_cost_topic";
 
         public static class KafkaGroupId {
+            public static final String adHourODSConsumer = "ad_hour_ods_consumer";
+            public static final String adDayODSConsumer = "ad_day_ods_consumer";
+
             public static final String adHourConsumerGroup = "ad_hour_consumer";
             public static final String adDayConsumerGroup = "ad_day_consumer";
 

+ 35 - 21
flink-ad-monitoring/src/main/java/flink/zanxiangnet/ad/monitoring/maxcompute/bean/BeanUtil.java

@@ -10,13 +10,10 @@ import org.apache.commons.lang3.StringUtils;
 import java.lang.reflect.Field;
 import java.lang.reflect.Method;
 import java.lang.reflect.Modifier;
+import java.nio.charset.StandardCharsets;
 import java.util.*;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
 
 public class BeanUtil {
-    private static final Pattern humpPattern = Pattern.compile("[A-Z]");
-    private static final Pattern linePattern = Pattern.compile("_(\\w)");
 
     /**
      * 解析 javabean的字段
@@ -73,36 +70,53 @@ public class BeanUtil {
     }
 
     /**
-     * 驼峰转下划线
+     * 下划线转驼峰
      *
      * @param str
      * @return
      */
-    public static String humpToLine(String str) {
-        Matcher matcher = humpPattern.matcher(str);
-        StringBuffer sb = new StringBuffer();
-        while (matcher.find()) {
-            matcher.appendReplacement(sb, "_" + matcher.group(0).toLowerCase());
+    public static String lineToHump(String str) {
+        byte[] bytes = str.getBytes(StandardCharsets.UTF_8);
+        byte[] result = new byte[str.length()];
+        int index = 0;
+        for (int i = 0; i < bytes.length; i++) {
+            if (bytes[i] != 95) {
+                result[index++] = bytes[i];
+                continue;
+            }
+            if (i == bytes.length - 1) {
+                result[index++] = bytes[i];
+                continue;
+            }
+            byte nextChar = bytes[++i];
+            if (97 <= nextChar && nextChar <= 122) {
+                result[index++] = (byte) (nextChar - 32);
+            } else {
+                result[index++] = nextChar;
+            }
         }
-        matcher.appendTail(sb);
-        return sb.toString();
+        return new String(result, 0, index);
     }
 
     /**
-     * 下划线转驼峰
+     * 驼峰转下划线
      *
      * @param str
      * @return
      */
-    public static String lineToHump(String str) {
-        str = str.toLowerCase();
-        Matcher matcher = linePattern.matcher(str);
-        StringBuffer sb = new StringBuffer();
-        while (matcher.find()) {
-            matcher.appendReplacement(sb, matcher.group(1).toUpperCase());
+    public static String humpToLine(String str) {
+        byte[] bytes = str.getBytes(StandardCharsets.UTF_8);
+        byte[] result = new byte[str.length() * 2];
+        int index = 0;
+        for (byte c : bytes) {
+            if (65 <= c && c <= 90) {
+                result[index++] = 95;
+                result[index++] = (byte) (c + 32);
+            } else {
+                result[index++] = c;
+            }
         }
-        matcher.appendTail(sb);
-        return sb.toString();
+        return new String(result, 0, index);
     }
 
     public static <T> Method methodOfGet(Class<T> clazz, Field field) throws NoSuchMethodException {

+ 1 - 1
flink-ad-monitoring/src/main/java/flink/zanxiangnet/ad/monitoring/maxcompute/sink/TunnelBatchSinkBuffer.java

@@ -5,7 +5,7 @@ import com.aliyun.odps.Table;
 import com.aliyun.odps.tunnel.TableTunnel;
 import com.google.common.util.concurrent.ThreadFactoryBuilder;
 import flink.zanxiangnet.ad.monitoring.maxcompute.bean.BeanUtil;
-import flink.zanxiangnet.ad.monitoring.util.JsonUtil;
+import com.zanxiangnet.module.util.JsonUtil;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.util.CollectionUtils;

+ 1 - 1
flink-ad-monitoring/src/main/java/flink/zanxiangnet/ad/monitoring/maxcompute/sink/TunnelBatchWriter.java

@@ -5,7 +5,7 @@ import com.aliyun.odps.data.Record;
 import com.aliyun.odps.data.RecordWriter;
 import com.aliyun.odps.tunnel.TableTunnel;
 import com.aliyun.odps.tunnel.TunnelException;
-import flink.zanxiangnet.ad.monitoring.util.JsonUtil;
+import com.zanxiangnet.module.util.JsonUtil;
 import flink.zanxiangnet.ad.monitoring.maxcompute.bean.BeanUtil;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;

+ 1 - 1
flink-ad-monitoring/src/main/java/flink/zanxiangnet/ad/monitoring/pojo/dto/AdStatOfDayODSDTO.java

@@ -3,7 +3,7 @@ package flink.zanxiangnet.ad.monitoring.pojo.dto;
 import com.tencent.ads.model.DailyReportsGetListStruct;
 import flink.zanxiangnet.ad.monitoring.pojo.entity.AdDataOfDayODS;
 import flink.zanxiangnet.ad.monitoring.pojo.entity.AdStatOfDayDWD;
-import flink.zanxiangnet.ad.monitoring.util.JsonUtil;
+import com.zanxiangnet.module.util.JsonUtil;
 import lombok.AllArgsConstructor;
 import lombok.Builder;
 import lombok.Data;

+ 1 - 1
flink-ad-monitoring/src/main/java/flink/zanxiangnet/ad/monitoring/pojo/entity/AdStatOfDayDWD.java

@@ -2,9 +2,9 @@ package flink.zanxiangnet.ad.monitoring.pojo.entity;
 
 import com.aliyun.odps.data.Record;
 import com.google.gson.annotations.SerializedName;
+import com.zanxiangnet.module.util.DateUtil;
 import flink.zanxiangnet.ad.monitoring.maxcompute.bean.annotation.MaxComputeColumn;
 import flink.zanxiangnet.ad.monitoring.maxcompute.bean.annotation.MaxComputeTable;
-import flink.zanxiangnet.ad.monitoring.util.DateUtil;
 import flink.zanxiangnet.ad.monitoring.util.NumberUtil;
 import flink.zanxiangnet.ad.monitoring.util.ObjectUtil;
 import lombok.Data;

+ 1 - 1
flink-ad-monitoring/src/main/java/flink/zanxiangnet/ad/monitoring/pojo/entity/AdStatOfHourDWD.java

@@ -2,10 +2,10 @@ package flink.zanxiangnet.ad.monitoring.pojo.entity;
 
 import com.aliyun.odps.data.Record;
 import com.google.gson.annotations.SerializedName;
+import com.zanxiangnet.module.util.DateUtil;
 import flink.zanxiangnet.ad.monitoring.maxcompute.bean.BeanUtil;
 import flink.zanxiangnet.ad.monitoring.maxcompute.bean.annotation.MaxComputeColumn;
 import flink.zanxiangnet.ad.monitoring.maxcompute.bean.annotation.MaxComputeTable;
-import flink.zanxiangnet.ad.monitoring.util.DateUtil;
 import flink.zanxiangnet.ad.monitoring.util.NumberUtil;
 import flink.zanxiangnet.ad.monitoring.util.ObjectUtil;
 import lombok.Data;

+ 1 - 3
flink-ad-monitoring/src/main/java/flink/zanxiangnet/ad/monitoring/process/AdDayDWDRollMonthProcess.java

@@ -5,7 +5,7 @@ import flink.zanxiangnet.ad.monitoring.dao.mapper.AdStatOfDayDWDMapper;
 import flink.zanxiangnet.ad.monitoring.pojo.entity.AdDataOfDayODS;
 import flink.zanxiangnet.ad.monitoring.pojo.entity.AdStatOfDayDWD;
 import flink.zanxiangnet.ad.monitoring.pojo.properties.ApplicationProperties;
-import flink.zanxiangnet.ad.monitoring.util.DateUtil;
+import com.zanxiangnet.module.util.DateUtil;
 import lombok.extern.slf4j.Slf4j;
 import org.apache.flink.api.common.state.MapState;
 import org.apache.flink.api.common.state.MapStateDescriptor;
@@ -13,8 +13,6 @@ import org.apache.flink.api.common.state.ValueState;
 import org.apache.flink.api.common.state.ValueStateDescriptor;
 import org.apache.flink.configuration.Configuration;
 import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
-import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
-import org.apache.flink.streaming.api.windowing.windows.GlobalWindow;
 import org.apache.flink.util.Collector;
 import org.apache.ibatis.datasource.DataSourceFactory;
 import org.apache.ibatis.mapping.Environment;

+ 1 - 1
flink-ad-monitoring/src/main/java/flink/zanxiangnet/ad/monitoring/process/AdDayOnTimeStreamCompletionProcess.java

@@ -4,7 +4,7 @@ import flink.zanxiangnet.ad.monitoring.config.MysqlDataSourceFactory;
 import flink.zanxiangnet.ad.monitoring.dao.mapper.AdStatOfDayDWDMapper;
 import flink.zanxiangnet.ad.monitoring.pojo.entity.AdStatOfDayDWD;
 import flink.zanxiangnet.ad.monitoring.pojo.properties.ApplicationProperties;
-import flink.zanxiangnet.ad.monitoring.util.DateUtil;
+import com.zanxiangnet.module.util.DateUtil;
 import lombok.extern.slf4j.Slf4j;
 import org.apache.flink.api.common.state.ValueState;
 import org.apache.flink.api.common.state.ValueStateDescriptor;

+ 2 - 2
flink-ad-monitoring/src/main/java/flink/zanxiangnet/ad/monitoring/process/AdHourDTOStreamProcess.java

@@ -4,8 +4,8 @@ import com.tencent.ads.model.HourlyReportsGetListStruct;
 import flink.zanxiangnet.ad.monitoring.pojo.dto.AdDataOfMinuteDTO;
 import flink.zanxiangnet.ad.monitoring.pojo.entity.AdDataOfHourODS;
 import flink.zanxiangnet.ad.monitoring.pojo.entity.AdDataOfMinuteODS;
-import flink.zanxiangnet.ad.monitoring.util.DateUtil;
-import flink.zanxiangnet.ad.monitoring.util.JsonUtil;
+import com.zanxiangnet.module.util.DateUtil;
+import com.zanxiangnet.module.util.JsonUtil;
 import org.apache.flink.streaming.api.functions.ProcessFunction;
 import org.apache.flink.util.Collector;
 import org.apache.flink.util.OutputTag;

+ 1 - 1
flink-ad-monitoring/src/main/java/flink/zanxiangnet/ad/monitoring/process/AdHourDWDProcess.java

@@ -6,7 +6,7 @@ import flink.zanxiangnet.ad.monitoring.pojo.entity.AdDataOfHourODS;
 import flink.zanxiangnet.ad.monitoring.pojo.entity.AdStatOfDayDWD;
 import flink.zanxiangnet.ad.monitoring.pojo.entity.AdStatOfHourDWD;
 import flink.zanxiangnet.ad.monitoring.pojo.properties.ApplicationProperties;
-import flink.zanxiangnet.ad.monitoring.util.DateUtil;
+import com.zanxiangnet.module.util.DateUtil;
 import lombok.extern.slf4j.Slf4j;
 import org.apache.flink.api.common.state.*;
 import org.apache.flink.api.common.typeinfo.Types;

+ 1 - 1
flink-ad-monitoring/src/main/java/flink/zanxiangnet/ad/monitoring/process/AdHourOnTimeStreamCompletionProcess.java

@@ -1,7 +1,7 @@
 package flink.zanxiangnet.ad.monitoring.process;
 
 import flink.zanxiangnet.ad.monitoring.pojo.entity.AdStatOfHourDWD;
-import flink.zanxiangnet.ad.monitoring.util.DateUtil;
+import com.zanxiangnet.module.util.DateUtil;
 import lombok.extern.slf4j.Slf4j;
 import org.apache.flink.api.common.state.ValueState;
 import org.apache.flink.api.common.state.ValueStateDescriptor;

+ 1 - 1
flink-ad-monitoring/src/main/java/flink/zanxiangnet/ad/monitoring/process/AdMinuteDWDProcess.java

@@ -4,7 +4,7 @@ import flink.zanxiangnet.ad.monitoring.config.MysqlDataSourceFactory;
 import flink.zanxiangnet.ad.monitoring.dao.mapper.AdStatOfDayDWDMapper;
 import flink.zanxiangnet.ad.monitoring.pojo.entity.*;
 import flink.zanxiangnet.ad.monitoring.pojo.properties.ApplicationProperties;
-import flink.zanxiangnet.ad.monitoring.util.DateUtil;
+import com.zanxiangnet.module.util.DateUtil;
 import lombok.extern.slf4j.Slf4j;
 import org.apache.flink.api.common.state.*;
 import org.apache.flink.api.common.typeinfo.Types;

+ 1 - 1
flink-ad-monitoring/src/main/java/flink/zanxiangnet/ad/monitoring/process/CostHourProcess.java

@@ -2,7 +2,7 @@ package flink.zanxiangnet.ad.monitoring.process;
 
 import flink.zanxiangnet.ad.monitoring.pojo.entity.AdStatOfHourDWD;
 import flink.zanxiangnet.ad.monitoring.pojo.entity.CostHourDM;
-import flink.zanxiangnet.ad.monitoring.util.DateUtil;
+import com.zanxiangnet.module.util.DateUtil;
 import flink.zanxiangnet.ad.monitoring.util.NumberUtil;
 import org.apache.flink.api.common.state.MapState;
 import org.apache.flink.api.common.state.MapStateDescriptor;

+ 1 - 1
flink-ad-monitoring/src/main/java/flink/zanxiangnet/ad/monitoring/process/CostMinuteProcess.java

@@ -1,7 +1,7 @@
 package flink.zanxiangnet.ad.monitoring.process;
 
 import flink.zanxiangnet.ad.monitoring.pojo.entity.*;
-import flink.zanxiangnet.ad.monitoring.util.DateUtil;
+import com.zanxiangnet.module.util.DateUtil;
 import flink.zanxiangnet.ad.monitoring.util.NumberUtil;
 import org.apache.flink.api.common.state.MapState;
 import org.apache.flink.api.common.state.MapStateDescriptor;

+ 1 - 1
flink-ad-monitoring/src/main/java/flink/zanxiangnet/ad/monitoring/process/PlanDayDWDRollMonthProcess.java

@@ -5,7 +5,7 @@ import flink.zanxiangnet.ad.monitoring.dao.mapper.PlanStatOfDayDWDMapper;
 import flink.zanxiangnet.ad.monitoring.pojo.entity.AdDataOfDayODS;
 import flink.zanxiangnet.ad.monitoring.pojo.entity.PlanStatOfDayDWD;
 import flink.zanxiangnet.ad.monitoring.pojo.properties.ApplicationProperties;
-import flink.zanxiangnet.ad.monitoring.util.DateUtil;
+import com.zanxiangnet.module.util.DateUtil;
 import flink.zanxiangnet.ad.monitoring.util.PlanUtil;
 import lombok.extern.slf4j.Slf4j;
 import org.apache.flink.api.common.state.MapState;

+ 2 - 2
flink-ad-monitoring/src/main/java/flink/zanxiangnet/ad/monitoring/process/PlanHourDTOStreamProcess.java

@@ -4,8 +4,8 @@ import com.tencent.ads.model.HourlyReportsGetListStruct;
 import flink.zanxiangnet.ad.monitoring.pojo.dto.AdDataOfMinuteDTO;
 import flink.zanxiangnet.ad.monitoring.pojo.entity.AdDataOfHourODS;
 import flink.zanxiangnet.ad.monitoring.pojo.entity.AdDataOfMinuteODS;
-import flink.zanxiangnet.ad.monitoring.util.DateUtil;
-import flink.zanxiangnet.ad.monitoring.util.JsonUtil;
+import com.zanxiangnet.module.util.DateUtil;
+import com.zanxiangnet.module.util.JsonUtil;
 import lombok.extern.slf4j.Slf4j;
 import org.apache.flink.streaming.api.functions.ProcessFunction;
 import org.apache.flink.util.Collector;

+ 1 - 1
flink-ad-monitoring/src/main/java/flink/zanxiangnet/ad/monitoring/process/PlanHourDWDProcess.java

@@ -6,7 +6,7 @@ import flink.zanxiangnet.ad.monitoring.pojo.entity.AdDataOfHourODS;
 import flink.zanxiangnet.ad.monitoring.pojo.entity.PlanStatOfDayDWD;
 import flink.zanxiangnet.ad.monitoring.pojo.entity.PlanStatOfHourDWD;
 import flink.zanxiangnet.ad.monitoring.pojo.properties.ApplicationProperties;
-import flink.zanxiangnet.ad.monitoring.util.DateUtil;
+import com.zanxiangnet.module.util.DateUtil;
 import flink.zanxiangnet.ad.monitoring.util.PlanUtil;
 import lombok.extern.slf4j.Slf4j;
 import org.apache.flink.api.common.state.MapState;

+ 1 - 1
flink-ad-monitoring/src/main/java/flink/zanxiangnet/ad/monitoring/process/PlanHourStreamCompletionProcess.java

@@ -2,7 +2,7 @@ package flink.zanxiangnet.ad.monitoring.process;
 
 import flink.zanxiangnet.ad.monitoring.pojo.entity.AdStatOfHourDWD;
 import flink.zanxiangnet.ad.monitoring.pojo.entity.PlanStatOfHourDWD;
-import flink.zanxiangnet.ad.monitoring.util.DateUtil;
+import com.zanxiangnet.module.util.DateUtil;
 import org.apache.flink.api.common.state.ValueState;
 import org.apache.flink.api.common.state.ValueStateDescriptor;
 import org.apache.flink.api.common.typeinfo.Types;

+ 1 - 1
flink-ad-monitoring/src/main/java/flink/zanxiangnet/ad/monitoring/process/PlanMinuteDWDProcess.java

@@ -4,7 +4,7 @@ import flink.zanxiangnet.ad.monitoring.config.ClickhouseDataSourceFactory;
 import flink.zanxiangnet.ad.monitoring.dao.mapper.PlanStatOfDayDWDMapper;
 import flink.zanxiangnet.ad.monitoring.pojo.entity.*;
 import flink.zanxiangnet.ad.monitoring.pojo.properties.ApplicationProperties;
-import flink.zanxiangnet.ad.monitoring.util.DateUtil;
+import com.zanxiangnet.module.util.DateUtil;
 import flink.zanxiangnet.ad.monitoring.util.PlanUtil;
 import lombok.extern.slf4j.Slf4j;
 import org.apache.flink.api.common.state.MapState;

+ 134 - 0
flink-ad-monitoring/src/main/java/flink/zanxiangnet/ad/monitoring/sink/OssBatchStreamSink.java

@@ -0,0 +1,134 @@
+package flink.zanxiangnet.ad.monitoring.sink;
+
+import com.aliyun.odps.Odps;
+import com.aliyun.odps.PartitionSpec;
+import com.aliyun.odps.account.Account;
+import com.aliyun.odps.account.AliyunAccount;
+import com.aliyun.odps.data.Record;
+import com.aliyun.odps.tunnel.TableTunnel;
+import com.aliyun.odps.tunnel.TunnelException;
+import com.aliyun.oss.OSS;
+import com.aliyun.oss.OSSClientBuilder;
+import com.aliyun.oss.model.PutObjectResult;
+import com.zanxiangnet.module.util.DateUtil;
+import com.zanxiangnet.module.util.JsonUtil;
+import com.zanxiangnet.module.util.csv.CsvUtil;
+import flink.zanxiangnet.ad.monitoring.maxcompute.MaxComputeLog;
+import flink.zanxiangnet.ad.monitoring.maxcompute.bean.BeanUtil;
+import flink.zanxiangnet.ad.monitoring.maxcompute.bean.annotation.MaxComputeTable;
+import flink.zanxiangnet.ad.monitoring.pojo.properties.ApplicationProperties;
+import lombok.extern.slf4j.Slf4j;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.flink.configuration.Configuration;
+import org.apache.flink.streaming.api.functions.sink.RichSinkFunction;
+import org.springframework.util.CollectionUtils;
+
+import java.io.*;
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.Method;
+import java.math.BigDecimal;
+import java.time.LocalDate;
+import java.time.LocalDateTime;
+import java.time.format.DateTimeFormatter;
+import java.util.List;
+import java.util.Map;
+import java.util.UUID;
+import java.util.stream.Collectors;
+
+/**
+ * 批量数据写出到 OSS的 CSV文件
+ *
+ * @param <IN>
+ */
+@Slf4j
+public class OssBatchStreamSink<T, IN extends List<T>> extends RichSinkFunction<IN> {
+    private static final int RETRY_COUNT = 3;
+    private static final DateTimeFormatter YYYY_MM_DD = DateTimeFormatter.ofPattern("yyyy_MM_dd");
+
+    private final Class<T> clazz;
+
+    private volatile transient OSS ossClient;
+    private String endpoint;
+    // 暂存的 oss地址
+    private String ossBucket;
+
+    private GenerateOssObjectName generateOssObjectName;
+
+    public OssBatchStreamSink(Class<T> clazz, GenerateOssObjectName generateOssObjectName) {
+        this.clazz = clazz;
+        this.generateOssObjectName = generateOssObjectName;
+    }
+
+    @Override
+    public void open(Configuration config) {
+        Map<String, String> params = getRuntimeContext()
+                .getExecutionConfig()
+                .getGlobalJobParameters()
+                .toMap();
+        endpoint = params.get(ApplicationProperties.OSS_ENDPOINT);
+        ossClient = new OSSClientBuilder().build(params.get(ApplicationProperties.OSS_ENDPOINT),
+                params.get(ApplicationProperties.OSS_ASSESS_KEY_ID),
+                params.get(ApplicationProperties.OSS_ASSESS_KEY_SECRET));
+        ossBucket = params.get(ApplicationProperties.OSS_BUCKET);
+    }
+
+    /**
+     * 将值写入到 Sink。每个值都会调用此函数
+     *
+     * @param value
+     * @param context
+     */
+    @Override
+    public void invoke(IN value, Context context) {
+        LocalDateTime now = LocalDateTime.now();
+        long start = System.currentTimeMillis();
+        int index = 0;
+        do {
+            String savePath = System.getProperty("java.io.tmpdir") + "/flink/oss/" + now.toLocalDate().format(YYYY_MM_DD) + "/";
+            File path = new File(savePath);
+            if (!path.exists()) {
+                path.mkdirs();
+            }
+            String fileName = UUID.randomUUID() + ".csv";
+            File file = new File(savePath + fileName);
+            try {
+                CsvUtil.print(clazz, value, new FileWriter(file));
+                String objectName = generateOssObjectName.objectName(now, savePath, fileName);
+                PutObjectResult result = ossClient.putObject(ossBucket, objectName, new FileInputStream(file));
+                System.out.println("OSS文件上传成功:" + "https://" + ossBucket +"." + endpoint + "/" + objectName);
+                break;
+            } catch (InstantiationException | IllegalAccessException | IOException e) {
+                log.error("第 " + index + "次上传数据到 oss失败!" + e.getMessage(), e);
+            } finally {
+                file.delete();
+            }
+        } while (index++ < RETRY_COUNT);
+        long end = System.currentTimeMillis();
+        System.out.println("写入 " + value.size() + "条数据用时:" + (end - start));
+    }
+
+    @Override
+    public void close() throws Exception {
+        super.close();
+    }
+
+    public interface GenerateOssObjectName {
+
+        String objectName(LocalDateTime executeTime, String savePath, String fileName);
+    }
+
+    public static class MonitoringGenerateOssObjectName implements GenerateOssObjectName, Serializable {
+        private static final long serialVersionUID = 1L;
+
+        private final String key;
+
+        public MonitoringGenerateOssObjectName(String key) {
+            this.key = key;
+        }
+
+        @Override
+        public String objectName(LocalDateTime executeTime, String savePath, String fileName) {
+            return "flink/oss/" + key + "/" + DateUtil.formatLocalDate(executeTime.toLocalDate()) + "/" + fileName;
+        }
+    }
+}

+ 1 - 1
flink-ad-monitoring/src/main/java/flink/zanxiangnet/ad/monitoring/trigger/AdMinuteODSStreamTrigger.java

@@ -1,7 +1,7 @@
 package flink.zanxiangnet.ad.monitoring.trigger;
 
 import flink.zanxiangnet.ad.monitoring.pojo.entity.AdDataOfMinuteODS;
-import flink.zanxiangnet.ad.monitoring.util.DateUtil;
+import com.zanxiangnet.module.util.DateUtil;
 import org.apache.flink.streaming.api.windowing.triggers.Trigger;
 import org.apache.flink.streaming.api.windowing.triggers.TriggerResult;
 import org.apache.flink.streaming.api.windowing.windows.TimeWindow;

+ 1 - 1
flink-ad-monitoring/src/main/java/flink/zanxiangnet/ad/monitoring/trigger/CostHourDMStreamTrigger.java

@@ -1,7 +1,7 @@
 package flink.zanxiangnet.ad.monitoring.trigger;
 
 import flink.zanxiangnet.ad.monitoring.pojo.entity.AdStatOfHourDWD;
-import flink.zanxiangnet.ad.monitoring.util.DateUtil;
+import com.zanxiangnet.module.util.DateUtil;
 import org.apache.flink.streaming.api.windowing.triggers.Trigger;
 import org.apache.flink.streaming.api.windowing.triggers.TriggerResult;
 import org.apache.flink.streaming.api.windowing.windows.TimeWindow;

+ 1 - 1
flink-ad-monitoring/src/main/java/flink/zanxiangnet/ad/monitoring/trigger/CostMinuteDMStreamTrigger.java

@@ -2,7 +2,7 @@ package flink.zanxiangnet.ad.monitoring.trigger;
 
 import flink.zanxiangnet.ad.monitoring.pojo.entity.AdDataOfMinuteODS;
 import flink.zanxiangnet.ad.monitoring.pojo.entity.AdStatOfMinuteDWD;
-import flink.zanxiangnet.ad.monitoring.util.DateUtil;
+import com.zanxiangnet.module.util.DateUtil;
 import org.apache.flink.streaming.api.windowing.triggers.Trigger;
 import org.apache.flink.streaming.api.windowing.triggers.TriggerResult;
 import org.apache.flink.streaming.api.windowing.windows.TimeWindow;

+ 1 - 1
flink-ad-monitoring/src/main/java/flink/zanxiangnet/ad/monitoring/trigger/PlanMinuteODSStreamTrigger.java

@@ -1,7 +1,7 @@
 package flink.zanxiangnet.ad.monitoring.trigger;
 
 import flink.zanxiangnet.ad.monitoring.pojo.entity.AdDataOfMinuteODS;
-import flink.zanxiangnet.ad.monitoring.util.DateUtil;
+import com.zanxiangnet.module.util.DateUtil;
 import org.apache.flink.streaming.api.windowing.triggers.Trigger;
 import org.apache.flink.streaming.api.windowing.triggers.TriggerResult;
 import org.apache.flink.streaming.api.windowing.windows.TimeWindow;