wcc 3 anos atrás
pai
commit
97d65dbc1e

+ 6 - 2
flink-ad-monitoring/src/main/java/flink/zanxiangnet/ad/monitoring/AdStatJob.java

@@ -16,6 +16,8 @@ import flink.zanxiangnet.ad.monitoring.pojo.entity.*;
 import flink.zanxiangnet.ad.monitoring.pojo.properties.ApplicationProperties;
 import flink.zanxiangnet.ad.monitoring.pojo.properties.KafkaProperties;
 import flink.zanxiangnet.ad.monitoring.pojo.dto.AdDataOfMinuteDTO;
+import flink.zanxiangnet.ad.monitoring.sink.TunnelBatchStreamSink;
+import flink.zanxiangnet.ad.monitoring.stream.KeyedBatchStream;
 import flink.zanxiangnet.ad.monitoring.util.DateUtil;
 import flink.zanxiangnet.ad.monitoring.util.JsonUtil;
 import org.apache.commons.lang3.StringUtils;
@@ -127,7 +129,10 @@ public class AdStatJob {
         // 分钟流
         DataStream<AdDataOfMinuteODS> adMinuteODSStream = adODSStream.getSideOutput(adMinuteStreamTag);
         // 写入原始表
-        adMinuteODSStream.addSink(new TunnelBatchSink<>(AdDataOfMinuteODS.class, 36000L, 64000L, 3));
+        // adMinuteODSStream.addSink(new TunnelBatchSink<>(AdDataOfMinuteODS.class, 36000L, 64000L, 3));
+        new KeyedBatchStream<>("adMinuteODSStream", adMinuteODSStream.keyBy(AdDataOfMinuteODS::getStatDay), 4000L, 60 * 1000L)
+                .toBatch().addSink(new TunnelBatchStreamSink<>(AdDataOfMinuteODS.class));
+
         adMinuteODSStream
                 // 打水印,允许数据延迟 6分钟,同时指定时间流
                 .assignTimestampsAndWatermarks(WatermarkStrategy.<AdDataOfMinuteODS>forBoundedOutOfOrderness(Duration.ofMinutes(6L))
@@ -242,7 +247,6 @@ public class AdStatJob {
                         historyMinuteMapping.put(statDay, nowAdStat);
                     }
                 }).addSink(new TunnelBatchSink<>(AdStatOfMinuteDWD.class, 30000L, 365L, 6));
-        ;
 
         // 小时流(直接写到小时报表的 ods)
         DataStream<AdDataOfHourODS> adHourODSStream = adODSStream.getSideOutput(adHourStreamTag);

+ 10 - 12
flink-ad-monitoring/src/main/java/flink/zanxiangnet/ad/monitoring/Test.java

@@ -1,6 +1,8 @@
 package flink.zanxiangnet.ad.monitoring;
 
 import com.google.common.util.concurrent.ThreadFactoryBuilder;
+import flink.zanxiangnet.ad.monitoring.stream.BatchStream;
+import flink.zanxiangnet.ad.monitoring.stream.KeyedBatchStream;
 import flink.zanxiangnet.ad.monitoring.util.DateUtil;
 import flink.zanxiangnet.ad.monitoring.util.JsonUtil;
 import lombok.AllArgsConstructor;
@@ -9,28 +11,21 @@ import lombok.Data;
 import lombok.NoArgsConstructor;
 import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
 import org.apache.flink.api.common.eventtime.WatermarkStrategy;
-import org.apache.flink.api.common.functions.AggregateFunction;
-import org.apache.flink.api.common.functions.ReduceFunction;
-import org.apache.flink.api.common.functions.RichAggregateFunction;
-import org.apache.flink.api.common.functions.RichReduceFunction;
+import org.apache.flink.api.java.functions.KeySelector;
+import org.apache.flink.api.java.tuple.Tuple2;
 import org.apache.flink.api.java.tuple.Tuple3;
-import org.apache.flink.api.java.tuple.Tuple5;
 import org.apache.flink.configuration.Configuration;
 import org.apache.flink.streaming.api.datastream.DataStreamSource;
 import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
 import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
 import org.apache.flink.streaming.api.functions.source.RichSourceFunction;
 import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
-import org.apache.flink.streaming.api.functions.windowing.WindowFunction;
-import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
-import org.apache.flink.streaming.api.windowing.time.Time;
 import org.apache.flink.streaming.api.windowing.windows.GlobalWindow;
 import org.apache.flink.util.Collector;
 
 import java.time.Duration;
 import java.time.LocalDateTime;
 import java.util.ArrayList;
-import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.concurrent.ConcurrentHashMap;
@@ -78,12 +73,13 @@ public class Test {
                         return new Tuple5<>(tuple5.f0, Math.min(acc.f1, tuple5.f1), Math.max(acc.f2, tuple5.f2), tuple5.f3 + acc.f3, temp);
                     }
                 }).print();*/
-        pojoStream.keyBy(Pojo::getUserId).countWindow(1)
+        SingleOutputStreamOperator<String> ff = pojoStream.keyBy(Pojo::getUserId).countWindow(1)
                 .process(new ProcessWindowFunction<Pojo, String, Integer, GlobalWindow>() {
                     private final Map<Integer, Tuple3<LocalDateTime, LocalDateTime, List<Pojo>>> oldData = new ConcurrentHashMap<>();
 
                     @Override
-                    public void process(Integer integer, ProcessWindowFunction<Pojo, String, Integer, GlobalWindow>.Context context, Iterable<Pojo> elements, Collector<String> out) throws Exception {
+                    public void process(Integer integer, ProcessWindowFunction<Pojo, String, Integer, GlobalWindow>.Context context,
+                                        Iterable<Pojo> elements, Collector<String> out) throws Exception {
                         Pojo pojo = elements.iterator().next();
                         LocalDateTime createTime = DateUtil.milliToLocalDateTime(pojo.getCreateTime());
                         Tuple3<LocalDateTime, LocalDateTime, List<Pojo>> temp = oldData.get(pojo.getUserId());
@@ -95,7 +91,9 @@ public class Test {
                         oldData.put(pojo.getUserId(), temp);
                         out.collect(JsonUtil.toString(temp.f2.stream().map(Pojo::getIndex).collect(Collectors.toList())));
                     }
-                }).print();
+                });
+        new BatchStream<>("ff", ff, 10L, 60 * 1000L).toBatch().print();
+        // new BatchSink<>(ff, 60 * 1000L, 10L).toBatch().print();
 /*        .apply(new WindowFunction<Pojo, String, Integer, GlobalWindow>() {
             private Tuple3<LocalDateTime, LocalDateTime, List<Pojo>> oldData = null;
             @Override

+ 142 - 0
flink-ad-monitoring/src/main/java/flink/zanxiangnet/ad/monitoring/sink/TunnelBatchStreamSink.java

@@ -0,0 +1,142 @@
+package flink.zanxiangnet.ad.monitoring.sink;
+
+import com.aliyun.odps.Odps;
+import com.aliyun.odps.account.Account;
+import com.aliyun.odps.account.AliyunAccount;
+import com.aliyun.odps.data.Record;
+import com.aliyun.odps.tunnel.TableTunnel;
+import com.aliyun.odps.tunnel.TunnelException;
+import flink.zanxiangnet.ad.monitoring.maxcompute.MaxComputeLog;
+import flink.zanxiangnet.ad.monitoring.maxcompute.bean.BeanUtil;
+import flink.zanxiangnet.ad.monitoring.maxcompute.bean.annotation.MaxComputeTable;
+import flink.zanxiangnet.ad.monitoring.pojo.properties.ApplicationProperties;
+import org.apache.flink.configuration.Configuration;
+import org.apache.flink.streaming.api.functions.sink.RichSinkFunction;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.util.CollectionUtils;
+
+import java.io.IOException;
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.Method;
+import java.util.List;
+import java.util.Map;
+import java.util.stream.Collectors;
+
+/**
+ * 该类有严重 bug,请勿使用
+ *
+ * @param <IN>
+ */
+public class TunnelBatchStreamSink<T, IN extends List<T>> extends RichSinkFunction<IN> {
+    private static final Logger log = LoggerFactory.getLogger(TunnelBatchStreamSink.class);
+
+    // 对象锁,防止MaxCompute的 Tunnel对象多次初始化
+    private static final Object DUMMY_LOCK = new Object();
+
+    private final Class<T> clazz;
+    private String projectName;
+    private String tableName;
+
+    private volatile transient TableTunnel tunnel;
+    private volatile transient List<BeanUtil.FieldInfo> fieldInfoList;
+    private volatile transient Map<String, Method> partitionFieldMethods;
+
+    public TunnelBatchStreamSink(Class<T> clazz) {
+        this.clazz = clazz;
+    }
+
+    @Override
+    public void open(Configuration config) {
+        if (tunnel == null) {
+            synchronized (DUMMY_LOCK) {
+                if (tunnel == null) {
+                    Map<String, String> params = getRuntimeContext()
+                            .getExecutionConfig()
+                            .getGlobalJobParameters()
+                            .toMap();
+                    MaxComputeTable tableAnnotation = clazz.getAnnotation(MaxComputeTable.class);
+
+                    Account account = new AliyunAccount(params.get(ApplicationProperties.MAX_COMPUTE_ACCOUNT_ID),
+                            params.get(ApplicationProperties.MAX_COMPUTE_ACCOUNT_KEY));
+                    Odps odps = new Odps(account);
+                    odps.getRestClient().setRetryLogger(new MaxComputeLog());
+                    odps.setEndpoint(params.get(ApplicationProperties.MAX_COMPUTE_ACCOUNT_ENDPOINT));
+                    odps.setDefaultProject(params.get(ApplicationProperties.MAX_COMPUTE_ACCOUNT_PROJECT_NAME));
+                    tunnel = new TableTunnel(odps);
+                    tunnel.setEndpoint(params.get(ApplicationProperties.MAX_COMPUTE_ACCOUNT_TUNNEL_ENDPOINT));
+                    projectName = params.get(ApplicationProperties.MAX_COMPUTE_ACCOUNT_PROJECT_NAME);
+                    tableName = tableAnnotation.value();
+                    fieldInfoList = BeanUtil.parseBeanField(clazz);
+                    partitionFieldMethods = fieldInfoList.stream().filter(BeanUtil.FieldInfo::isUsePartitioned).collect(Collectors.toMap(BeanUtil.FieldInfo::getColumnName, BeanUtil.FieldInfo::getGetMethod));
+                }
+            }
+        }
+    }
+
+    /**
+     * 将值写入到 Sink。每个值都会调用此函数
+     *
+     * @param value
+     * @param context
+     */
+    @Override
+    public void invoke(IN value, Context context) throws TunnelException, IOException, InvocationTargetException, IllegalAccessException {
+        T element = value.get(0);
+        String partitionStr = generatePartitionStr(element);
+        TableTunnel.StreamUploadSession uploadSession = tunnel.createStreamUploadSession(projectName, tableName, partitionStr);
+        TableTunnel.StreamRecordPack pack = uploadSession.newRecordPack();
+        for (T t : value) {
+            Record record = uploadSession.newRecord();
+            for (BeanUtil.FieldInfo fieldInfo : fieldInfoList) {
+                if (fieldInfo.isUsePartitioned()) {
+                    // 分区字段不在这里设值
+                    continue;
+                }
+                Object obj = fieldInfo.getGetMethod().invoke(t);
+                record.set(fieldInfo.getColumnName(), obj);
+            }
+            // append只是写入内存
+            pack.append(record);
+        }
+        System.out.println("写入数据==》" + value.size());
+        int retry = 0;
+        do {
+            try {
+                // 大概用时 100ms ~ 3s
+                pack.flush();
+                break;
+            } catch (IOException e) {
+                if (retry == 3) {
+                    log.error("Flush data error!msg: " + e.getMessage());
+                    throw e;
+                }
+            }
+        } while (retry++ < 3);
+    }
+
+    @Override
+    public void close() throws Exception {
+        super.close();
+    }
+
+    private String generatePartitionStr(T t) {
+        if (CollectionUtils.isEmpty(partitionFieldMethods)) {
+            return null;
+        }
+        StringBuilder partition = new StringBuilder();
+        for (Map.Entry<String, Method> entry : partitionFieldMethods.entrySet()) {
+            partition.append(entry.getKey()).append("=");
+            try {
+                partition.append(entry.getValue().invoke(t));
+            } catch (InvocationTargetException | IllegalAccessException e) {
+                // 获取分区字段的值失败
+                log.error(e.getMessage(), e);
+                throw new RuntimeException("Failed get partition field value!");
+            }
+            partition.append(",");
+        }
+        partition = new StringBuilder(partition.substring(0, partition.length() - 1));
+        return partition.toString();
+    }
+}

+ 40 - 0
flink-ad-monitoring/src/main/java/flink/zanxiangnet/ad/monitoring/stream/BatchStream.java

@@ -0,0 +1,40 @@
+package flink.zanxiangnet.ad.monitoring.stream;
+
+import com.google.common.collect.Lists;
+import flink.zanxiangnet.ad.monitoring.trigger.TimerCountTrigger;
+import org.apache.flink.streaming.api.datastream.DataStream;
+import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
+import org.apache.flink.streaming.api.functions.windowing.AllWindowFunction;
+import org.apache.flink.streaming.api.windowing.assigners.GlobalWindows;
+import org.apache.flink.streaming.api.windowing.windows.GlobalWindow;
+import org.apache.flink.util.Collector;
+
+import java.util.List;
+
+public class BatchStream<T> {
+
+    private final DataStream<T> stream;
+    private final String streamName;
+    // 缓存刷新的间隔时间
+    private final Long bufferRefreshTime;
+    // 缓存的最大数据量
+    private final Long maxBufferCount;
+
+    public BatchStream(String streamName, DataStream<T> stream, Long maxBufferCount, Long bufferRefreshTime) {
+        this.streamName = streamName;
+        this.stream = stream;
+        this.bufferRefreshTime = bufferRefreshTime;
+        this.maxBufferCount = maxBufferCount;
+    }
+
+    public SingleOutputStreamOperator<List<T>> toBatch() {
+        return stream.windowAll(GlobalWindows.create())
+                .trigger(new TimerCountTrigger<>(streamName + "_trigger", maxBufferCount, bufferRefreshTime))
+                .apply(new AllWindowFunction<T, List<T>, GlobalWindow>() {
+                    @Override
+                    public void apply(GlobalWindow globalWindow, Iterable<T> iterable, Collector<List<T>> collector) throws Exception {
+                        collector.collect(Lists.newArrayList(iterable));
+                    }
+                });
+    }
+}

+ 42 - 0
flink-ad-monitoring/src/main/java/flink/zanxiangnet/ad/monitoring/stream/KeyedBatchStream.java

@@ -0,0 +1,42 @@
+package flink.zanxiangnet.ad.monitoring.stream;
+
+import com.google.common.collect.Lists;
+import flink.zanxiangnet.ad.monitoring.trigger.TimerCountTrigger;
+import org.apache.flink.api.java.functions.KeySelector;
+import org.apache.flink.streaming.api.datastream.DataStream;
+import org.apache.flink.streaming.api.datastream.KeyedStream;
+import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
+import org.apache.flink.streaming.api.functions.windowing.WindowFunction;
+import org.apache.flink.streaming.api.windowing.assigners.GlobalWindows;
+import org.apache.flink.streaming.api.windowing.windows.GlobalWindow;
+import org.apache.flink.util.Collector;
+
+import java.util.List;
+
+public class KeyedBatchStream<T, KEY> {
+
+    private final KeyedStream<T, KEY> stream;
+    private final String streamName;
+    // 缓存刷新的间隔时间
+    private final Long bufferRefreshTime;
+    // 缓存的最大数据量
+    private final Long maxBufferCount;
+
+    public KeyedBatchStream(String streamName, KeyedStream<T, KEY> stream, Long maxBufferCount, Long bufferRefreshTime) {
+        this.streamName = streamName;
+        this.stream = stream;
+        this.bufferRefreshTime = bufferRefreshTime;
+        this.maxBufferCount = maxBufferCount;
+    }
+
+    public SingleOutputStreamOperator<List<T>> toBatch() {
+        return stream.window(GlobalWindows.create())
+                .trigger(new TimerCountTrigger<>(streamName + "_trigger", maxBufferCount, bufferRefreshTime))
+                .apply(new WindowFunction<T, List<T>, KEY, GlobalWindow>() {
+                    @Override
+                    public void apply(KEY key, GlobalWindow globalWindow, Iterable<T> iterable, Collector<List<T>> collector) throws Exception {
+                        collector.collect(Lists.newArrayList(iterable));
+                    }
+                });
+    }
+}

+ 116 - 0
flink-ad-monitoring/src/main/java/flink/zanxiangnet/ad/monitoring/trigger/TimerCountTrigger.java

@@ -0,0 +1,116 @@
+package flink.zanxiangnet.ad.monitoring.trigger;
+
+import org.apache.flink.api.common.state.ValueState;
+import org.apache.flink.api.common.state.ValueStateDescriptor;
+import org.apache.flink.streaming.api.windowing.triggers.Trigger;
+import org.apache.flink.streaming.api.windowing.triggers.TriggerResult;
+import org.apache.flink.streaming.api.windowing.windows.Window;
+
+/**
+ * 定时 && 定量触发
+ *
+ * @param <T>
+ * @param <W>
+ */
+public class TimerCountTrigger<T, W extends Window> extends Trigger<T, W> {
+    private static final String STATE_NAME_WINDOW_TIME = "_windowTime";
+    private static final String STATE_NAME_WINDOW_COUNT = "_windowCount";
+
+    // 缓存刷新的间隔时间
+    private final Long bufferRefreshTime;
+    // 缓存的最大数据量
+    private final Long maxBufferCount;
+    // 缓存架构
+    private final String triggerName;
+
+    public TimerCountTrigger(String triggerName, Long maxBufferCount, Long bufferRefreshTime) {
+        this.triggerName = triggerName;
+        this.maxBufferCount = maxBufferCount;
+        this.bufferRefreshTime = bufferRefreshTime;
+    }
+
+    /**
+     * 每次元素进入时调用
+     *
+     * @param element        元素
+     * @param time           元素进入的时间(这个时间有问题)
+     * @param window         元素被添加到的窗口对象
+     * @param triggerContext trigger上下文
+     * @return
+     * @throws Exception
+     */
+    @Override
+    public TriggerResult onElement(T element, long time, W window, TriggerContext triggerContext) throws Exception {
+        ValueState<Long> windowTimeState = triggerContext.getPartitionedState(new ValueStateDescriptor<>(triggerName + STATE_NAME_WINDOW_TIME, Long.class));
+        ValueState<Long> windowCountState = triggerContext.getPartitionedState(new ValueStateDescriptor<>(triggerName + STATE_NAME_WINDOW_COUNT, Long.class));
+        long now = System.currentTimeMillis();
+        Long windowCount = windowCountState.value() == null ? 1L : windowCountState.value() + 1;
+        Long windowTime = windowTimeState.value() == null ? now : windowTimeState.value();
+
+        if (windowCount >= maxBufferCount || (now - windowTime) >= bufferRefreshTime) {
+            windowCountState.update(0L);
+            windowTimeState.update(now);
+            return TriggerResult.FIRE_AND_PURGE;
+        }
+
+        windowCountState.update(windowCount);
+        if (windowTimeState.value() == null) {
+            windowTimeState.update(now);
+        }
+        // 注册一个定时器,到时间了去触发 ProcessTime
+        triggerContext.registerProcessingTimeTimer(triggerContext.getCurrentProcessingTime() + bufferRefreshTime);
+        return TriggerResult.CONTINUE;
+    }
+
+    /**
+     * trigger上下文设置的 processTimer触发时调用
+     *
+     * @param time           计时器触发的时间戳
+     * @param window
+     * @param triggerContext
+     * @return
+     * @throws Exception
+     */
+    @Override
+    public TriggerResult onProcessingTime(long time, W window, TriggerContext triggerContext) throws Exception {
+        ValueState<Long> windowTimeState = triggerContext.getPartitionedState(new ValueStateDescriptor<>(triggerName + STATE_NAME_WINDOW_TIME, Long.class));
+        // long now = System.currentTimeMillis();
+        long now = time;
+        Long windowTime = windowTimeState.value();
+        if (now - windowTime >= maxBufferCount) {
+            triggerContext.getPartitionedState(new ValueStateDescriptor<>(triggerName + STATE_NAME_WINDOW_TIME, Long.class)).update(0L);
+            windowTimeState.update(now);
+            return TriggerResult.FIRE_AND_PURGE;
+        }
+        return TriggerResult.CONTINUE;
+    }
+
+    /**
+     * trigger上下文设置的 eventTimer触发时调用
+     *
+     * @param time
+     * @param window
+     * @param triggerContext
+     * @return
+     * @throws Exception
+     */
+    @Override
+    public TriggerResult onEventTime(long time, W window, TriggerContext triggerContext) throws Exception {
+        return TriggerResult.CONTINUE;
+    }
+
+    /**
+     * 窗口被清除时调用
+     *
+     * @param w
+     * @param triggerContext
+     * @throws Exception
+     */
+    @Override
+    public void clear(W w, TriggerContext triggerContext) throws Exception {
+        ValueState<Long> windowTime = triggerContext.getPartitionedState(new ValueStateDescriptor<>(triggerName + STATE_NAME_WINDOW_TIME, Long.class));
+        ValueState<Long> windowCount = triggerContext.getPartitionedState(new ValueStateDescriptor<>(triggerName + STATE_NAME_WINDOW_COUNT, Long.class));
+        windowTime.clear();
+        windowCount.clear();
+    }
+}

+ 4 - 0
flink-ad-monitoring/src/main/java/flink/zanxiangnet/ad/monitoring/util/DateUtil.java

@@ -33,6 +33,10 @@ public class DateUtil {
         return LocalDate.parse(dateStr, FORMAT_DATE);
     }
 
+    public static LocalDateTime parseLocalDateTime(String dateStr) {
+        return LocalDateTime.parse(dateStr, FORMAT_DATETIME);
+    }
+
     public static long localDateToSecond(LocalDate localDate) {
         return localDate.atStartOfDay(ZoneOffset.ofHours(8)).toEpochSecond();
     }

+ 9 - 6
flink-ad-monitoring/src/main/java/flink/zanxiangnet/ad/monitoring/util/ObjectUtil.java

@@ -37,7 +37,7 @@ public class ObjectUtil {
         if (obj instanceof BigInteger) {
             return ((BigInteger) obj).longValue();
         }
-        throw new RuntimeException("unknow data type! Class: " + obj.getClass());
+        throw new RuntimeException("unknown data type! Class: " + obj.getClass() + ", value: " + obj);
     }
 
     public static Integer toInt(Object obj) {
@@ -59,7 +59,7 @@ public class ObjectUtil {
         if (obj instanceof BigInteger) {
             return ((BigInteger) obj).intValue();
         }
-        throw new RuntimeException("unknow data type! Class: " + obj.getClass());
+        throw new RuntimeException("unknown data type! Class: " + obj.getClass() + ", value: " + obj);
     }
 
     public static Double toDouble(Object obj) {
@@ -87,7 +87,7 @@ public class ObjectUtil {
         if (obj instanceof BigInteger) {
             return (double) ((BigInteger) obj).longValue();
         }
-        throw new RuntimeException("unknow data type! Class: " + obj.getClass());
+        throw new RuntimeException("unknown data type! Class: " + obj.getClass() + ", value: " + obj);
     }
 
     public static BigDecimal toDecimal(Object obj) {
@@ -112,11 +112,11 @@ public class ObjectUtil {
         if (obj instanceof String) {
             return new BigDecimal((String) obj);
         }
-        throw new RuntimeException("unknow data type! Class: " + obj.getClass());
+        throw new RuntimeException("unknown data type! Class: " + obj.getClass() + ", value: " + obj);
     }
 
     public static Date toDate(Object obj) {
-        if(obj == null) {
+        if (obj == null) {
             return null;
         }
         if (obj instanceof Date) {
@@ -134,7 +134,10 @@ public class ObjectUtil {
         if (obj instanceof Integer) {
             return new Date(((Integer) obj).longValue() * 1000);
         }
-        throw new RuntimeException("unknow data type! Class: " + obj.getClass());
+        if (obj instanceof String) {
+            return DateUtil.localDateTimeToDate(DateUtil.parseLocalDateTime((String) obj));
+        }
+        throw new RuntimeException("unknown data type! Class: " + obj.getClass() + ", value: " + obj);
     }
 
     public static Boolean toBoolean(Object obj) {