ck 4 anni fa
commit
8767366b2c

+ 4 - 0
.gitignore

@@ -0,0 +1,4 @@
+.idea/
+target/
+*.log
+*.pyc

+ 15 - 0
Pipfile

@@ -0,0 +1,15 @@
+[[source]]
+url = "https://mirrors.aliyun.com/pypi/simple/"
+verify_ssl = true
+name = "pypi"
+
+[packages]
+pymysql = "*"
+pandas = "*"
+pyyaml = "*"
+numpy = "==1.19.3"
+
+[dev-packages]
+
+[requires]
+python_version = "3.6"

+ 138 - 0
Pipfile.lock

@@ -0,0 +1,138 @@
+{
+    "_meta": {
+        "hash": {
+            "sha256": "76ae395d864408a98e27b072151556d92731f084fdc9cabea3274702e0d238e2"
+        },
+        "pipfile-spec": 6,
+        "requires": {
+            "python_version": "3.6"
+        },
+        "sources": [
+            {
+                "name": "pypi",
+                "url": "https://mirrors.aliyun.com/pypi/simple/",
+                "verify_ssl": true
+            }
+        ]
+    },
+    "default": {
+        "numpy": {
+            "hashes": [
+                "sha256:0ee77786eebbfa37f2141fd106b549d37c89207a0d01d8852fde1c82e9bfc0e7",
+                "sha256:199bebc296bd8a5fc31c16f256ac873dd4d5b4928dfd50e6c4995570fc71a8f3",
+                "sha256:1a307bdd3dd444b1d0daa356b5f4c7de2e24d63bdc33ea13ff718b8ec4c6a268",
+                "sha256:1ea7e859f16e72ab81ef20aae69216cfea870676347510da9244805ff9670170",
+                "sha256:271139653e8b7a046d11a78c0d33bafbddd5c443a5b9119618d0652a4eb3a09f",
+                "sha256:35bf5316af8dc7c7db1ad45bec603e5fb28671beb98ebd1d65e8059efcfd3b72",
+                "sha256:463792a249a81b9eb2b63676347f996d3f0082c2666fd0604f4180d2e5445996",
+                "sha256:50d3513469acf5b2c0406e822d3f314d7ac5788c2b438c24e5dd54d5a81ef522",
+                "sha256:50f68ebc439821b826823a8da6caa79cd080dee2a6d5ab9f1163465a060495ed",
+                "sha256:51e8d2ae7c7e985c7bebf218e56f72fa93c900ad0c8a7d9fbbbf362f45710f69",
+                "sha256:522053b731e11329dd52d258ddf7de5288cae7418b55e4b7d32f0b7e31787e9d",
+                "sha256:5ea4401ada0d3988c263df85feb33818dc995abc85b8125f6ccb762009e7bc68",
+                "sha256:604d2e5a31482a3ad2c88206efd43d6fcf666ada1f3188fd779b4917e49b7a98",
+                "sha256:6ff88bcf1872b79002569c63fe26cd2cda614e573c553c4d5b814fb5eb3d2822",
+                "sha256:7197ee0a25629ed782c7bd01871ee40702ffeef35bc48004bc2fdcc71e29ba9d",
+                "sha256:741d95eb2b505bb7a99fbf4be05fa69f466e240c2b4f2d3ddead4f1b5f82a5a5",
+                "sha256:83af653bb92d1e248ccf5fdb05ccc934c14b936bcfe9b917dc180d3f00250ac6",
+                "sha256:8802d23e4895e0c65e418abe67cdf518aa5cbb976d97f42fd591f921d6dffad0",
+                "sha256:8edc4d687a74d0a5f8b9b26532e860f4f85f56c400b3a98899fc44acb5e27add",
+                "sha256:942d2cdcb362739908c26ce8dd88db6e139d3fa829dd7452dd9ff02cba6b58b2",
+                "sha256:9a0669787ba8c9d3bb5de5d9429208882fb47764aa79123af25c5edc4f5966b9",
+                "sha256:9d08d84bb4128abb9fbd9f073e5c69f70e5dab991a9c42e5b4081ea5b01b5db0",
+                "sha256:9f7f56b5e85b08774939622b7d45a5d00ff511466522c44fc0756ac7692c00f2",
+                "sha256:a2daea1cba83210c620e359de2861316f49cc7aea8e9a6979d6cb2ddab6dda8c",
+                "sha256:b9074d062d30c2779d8af587924f178a539edde5285d961d2dfbecbac9c4c931",
+                "sha256:c4aa79993f5d856765819a3651117520e41ac3f89c3fc1cb6dee11aa562df6da",
+                "sha256:d78294f1c20f366cde8a75167f822538a7252b6e8b9d6dbfb3bdab34e7c1929e",
+                "sha256:dfdc8b53aa9838b9d44ed785431ca47aa3efaa51d0d5dd9c412ab5247151a7c4",
+                "sha256:dffed17848e8b968d8d3692604e61881aa6ef1f8074c99e81647ac84f6038535",
+                "sha256:e080087148fd70469aade2abfeadee194357defd759f9b59b349c6192aba994c",
+                "sha256:e983cbabe10a8989333684c98fdc5dd2f28b236216981e0c26ed359aaa676772",
+                "sha256:ea6171d2d8d648dee717457d0f75db49ad8c2f13100680e284d7becf3dc311a6",
+                "sha256:eefc13863bf01583a85e8c1121a901cc7cb8f059b960c4eba30901e2e6aba95f",
+                "sha256:efd656893171bbf1331beca4ec9f2e74358fc732a2084f664fd149cc4b3441d2"
+            ],
+            "index": "pypi",
+            "version": "==1.19.3"
+        },
+        "pandas": {
+            "hashes": [
+                "sha256:09e0503758ad61afe81c9069505f8cb8c1e36ea8cc1e6826a95823ef5b327daf",
+                "sha256:0a11a6290ef3667575cbd4785a1b62d658c25a2fd70a5adedba32e156a8f1773",
+                "sha256:0d9a38a59242a2f6298fff45d09768b78b6eb0c52af5919ea9e45965d7ba56d9",
+                "sha256:112c5ba0f9ea0f60b2cc38c25f87ca1d5ca10f71efbee8e0f1bee9cf584ed5d5",
+                "sha256:185cf8c8f38b169dbf7001e1a88c511f653fbb9dfa3e048f5e19c38049e991dc",
+                "sha256:3aa8e10768c730cc1b610aca688f588831fa70b65a26cb549fbb9f35049a05e0",
+                "sha256:41746d520f2b50409dffdba29a15c42caa7babae15616bcf80800d8cfcae3d3e",
+                "sha256:43cea38cbcadb900829858884f49745eb1f42f92609d368cabcc674b03e90efc",
+                "sha256:5378f58172bd63d8c16dd5d008d7dcdd55bf803fcdbe7da2dcb65dbbf322f05b",
+                "sha256:54404abb1cd3f89d01f1fb5350607815326790efb4789be60508f458cdd5ccbf",
+                "sha256:5dac3aeaac5feb1016e94bde851eb2012d1733a222b8afa788202b836c97dad5",
+                "sha256:5fdb2a61e477ce58d3f1fdf2470ee142d9f0dde4969032edaf0b8f1a9dafeaa2",
+                "sha256:6613c7815ee0b20222178ad32ec144061cb07e6a746970c9160af1ebe3ad43b4",
+                "sha256:6d2b5b58e7df46b2c010ec78d7fb9ab20abf1d306d0614d3432e7478993fbdb0",
+                "sha256:8a5d7e57b9df2c0a9a202840b2881bb1f7a648eba12dd2d919ac07a33a36a97f",
+                "sha256:8b4c2055ebd6e497e5ecc06efa5b8aa76f59d15233356eb10dad22a03b757805",
+                "sha256:a15653480e5b92ee376f8458197a58cca89a6e95d12cccb4c2d933df5cecc63f",
+                "sha256:a7d2547b601ecc9a53fd41561de49a43d2231728ad65c7713d6b616cd02ddbed",
+                "sha256:a979d0404b135c63954dea79e6246c45dd45371a88631cdbb4877d844e6de3b6",
+                "sha256:b1f8111635700de7ac350b639e7e452b06fc541a328cf6193cf8fc638804bab8",
+                "sha256:c5a3597880a7a29a31ebd39b73b2c824316ae63a05c3c8a5ce2aea3fc68afe35",
+                "sha256:c681e8fcc47a767bf868341d8f0d76923733cbdcabd6ec3a3560695c69f14a1e",
+                "sha256:cf135a08f306ebbcfea6da8bf775217613917be23e5074c69215b91e180caab4",
+                "sha256:e2b8557fe6d0a18db4d61c028c6af61bfed44ef90e419ed6fadbdc079eba141e"
+            ],
+            "index": "pypi",
+            "version": "==1.1.4"
+        },
+        "pymysql": {
+            "hashes": [
+                "sha256:263040d2779a3b84930f7ac9da5132be0fefcd6f453a885756656103f8ee1fdd",
+                "sha256:44f47128dda8676e021c8d2dbb49a82be9e4ab158b9f03e897152a3a287c69ea"
+            ],
+            "index": "pypi",
+            "version": "==0.10.1"
+        },
+        "python-dateutil": {
+            "hashes": [
+                "sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c",
+                "sha256:75bb3f31ea686f1197762692a9ee6a7550b59fc6ca3a1f4b5d7e32fb98e2da2a"
+            ],
+            "version": "==2.8.1"
+        },
+        "pytz": {
+            "hashes": [
+                "sha256:3e6b7dd2d1e0a59084bcee14a17af60c5c562cdc16d828e8eba2e683d3a7e268",
+                "sha256:5c55e189b682d420be27c6995ba6edce0c0a77dd67bfbe2ae6607134d5851ffd"
+            ],
+            "version": "==2020.4"
+        },
+        "pyyaml": {
+            "hashes": [
+                "sha256:06a0d7ba600ce0b2d2fe2e78453a470b5a6e000a985dd4a4e54e436cc36b0e97",
+                "sha256:240097ff019d7c70a4922b6869d8a86407758333f02203e0fc6ff79c5dcede76",
+                "sha256:4f4b913ca1a7319b33cfb1369e91e50354d6f07a135f3b901aca02aa95940bd2",
+                "sha256:69f00dca373f240f842b2931fb2c7e14ddbacd1397d57157a9b005a6a9942648",
+                "sha256:73f099454b799e05e5ab51423c7bcf361c58d3206fa7b0d555426b1f4d9a3eaf",
+                "sha256:74809a57b329d6cc0fdccee6318f44b9b8649961fa73144a98735b0aaf029f1f",
+                "sha256:7739fc0fa8205b3ee8808aea45e968bc90082c10aef6ea95e855e10abf4a37b2",
+                "sha256:95f71d2af0ff4227885f7a6605c37fd53d3a106fcab511b8860ecca9fcf400ee",
+                "sha256:b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d",
+                "sha256:cc8955cfbfc7a115fa81d85284ee61147059a753344bc51098f3ccd69b0d7e0c",
+                "sha256:d13155f591e6fcc1ec3b30685d50bf0711574e2c0dfffd7644babf8b5102ca1a"
+            ],
+            "index": "pypi",
+            "version": "==5.3.1"
+        },
+        "six": {
+            "hashes": [
+                "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259",
+                "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced"
+            ],
+            "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
+            "version": "==1.15.0"
+        }
+    },
+    "develop": {}
+}

+ 26 - 0
README.md

@@ -0,0 +1,26 @@
+# 快速开始
+趣程数据处理中心,离线任务处理及数据分析
+
+## 安装环境及依赖
+- 下载 python=3.7
+### 安装pipenv
+ 
+`pip install pipenv`
+
+### 根据pipfile安装依赖包
+`cd DataCenter`
+
+`pipenv install` 
+ 
+
+## 项目结构
+- monitor-center/config目录下的config.yaml数据库等配置文件
+- monitor-center/model 下为封装的工具包,可直接调用
+- script 为可运行脚本文件
+
+
+## 基本使用
+### 数据库操作
+
+
+    

+ 0 - 0
app/__init__.py


+ 38 - 0
app/novel_order_daily_amount.py

@@ -0,0 +1,38 @@
+""""
+@desc 订单数据日统计
+@auth ck
+@date 20201119
+"""
+
+from model.util.DateUtils import DateUtils
+from model.util.DataBaseUtils import MysqlUtils
+db = MysqlUtils()
+dt = DateUtils()
+
+
+def run(ymd):
+
+    sql = """replace into novel_daily_order_amount
+             select "{0}" as ymd,platform,channel_id,channel,stage,
+                    ROUND(SUM(amount),2) as amount,
+                    count(1) as order_num,
+                    count(DISTINCT user_id) as order_user
+             from `order` 
+             where `date` = UNIX_TIMESTAMP("{0}") GROUP BY platform,stage,channel 
+             """.format(ymd)
+    db.quchen_text.execute(sql)
+
+
+if __name__ == '__main__':
+    ymd = dt.get_n_days(-1)
+    run(ymd)
+
+    # for i in dt.getDateLists("2020-10-01", "2020-11-01"):
+    #     run(i)
+
+
+
+
+
+
+

+ 13 - 0
config/__init__.py

@@ -0,0 +1,13 @@
+"""
+@desc 本地环境用【test】
+      生产环境用【product】
+@auth chenkai
+@date 2019/10/8
+"""
+# import socket
+# from . import product, test
+# ip = socket.gethostbyname(socket.gethostname())
+# if ip.startswith("192"):
+#     product = test
+# else:
+#     product = product

+ 5 - 0
config/db_config.yaml

@@ -0,0 +1,5 @@
+quchen_text:
+  host: rm-bp1c9cj79872tx3aaro.mysql.rds.aliyuncs.com
+  user: superc
+  passwd: Cc719199895
+  db: quchen_text

+ 0 - 0
config/global_config.yaml


+ 0 - 0
example/__init__.py


+ 2 - 0
example/ding_test.py

@@ -0,0 +1,2 @@
+from model.util.DingTalkUtils import DingTalkUtils
+DingTalkUtils.send("这是测试文本", "18860455786")

+ 31 - 0
example/insert_example.py

@@ -0,0 +1,31 @@
+"""
+Example
+pandas操作MySQL
+以pandas.dataframe 插入mysql
+"""
+from model.util.DataBaseUtils import MysqlUtils
+import pandas as pd
+db = MysqlUtils()
+
+
+li = [["1", "小米", "122", "南京"],
+      ["3", "小红", "11", "杭州"],
+      ["2", "小绿", "10", "上海"]]
+df = pd.DataFrame(li, columns=["id", "name", "age", "city"])
+
+
+key = ["id"]                                # 抽象主键,不管表里这个是不是主键,都会根据该值来决定是更新还是插入,但是若无该索引,会导致插入速度很慢
+tag = ["name", "age", "city"]               # 其他值
+
+db.beauty.insertorupdatemany_v2(
+    table='test',
+    keys=key,
+    tags=tag,
+    key_values=df[key],
+    tag_values=df[tag],
+    flag=False)
+
+# 关闭连接
+db.close()
+
+

+ 18 - 0
example/select_example.py

@@ -0,0 +1,18 @@
+"""
+Example
+pandas操作MySQL
+
+ 查数据,结果用dataframe显示
+"""
+from model.util.DataBaseUtils import MysqlUtils
+db = MysqlUtils()
+
+
+
+sql = "select * from notebook limit 10"
+df1 = db.beauty.getData_pd(sql)
+print(df1)
+
+
+# 关闭连接
+db.close()

+ 0 - 0
model/__init__.py


+ 2 - 0
model/common/__init__.py

@@ -0,0 +1,2 @@
+
+

+ 70 - 0
model/common/log.py

@@ -0,0 +1,70 @@
+
+import logging
+import os
+import time
+
+
+class logger(object):
+    """
+    终端打印不同颜色的日志,在pycharm中如果强行规定了日志的颜色, 这个方法不会起作用, 但是
+    对于终端,这个方法是可以打印不同颜色的日志的。
+    """
+
+    # 在这里定义StreamHandler,可以实现单例, 所有的logger()共用一个StreamHandler
+    ch = logging.StreamHandler()
+
+    def __init__(self):
+        self.logger = logging.getLogger()
+        if not self.logger.handlers:
+            # 如果self.logger没有handler, 就执行以下代码添加handler
+            self.logger.setLevel(logging.DEBUG)
+            rootpath =os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
+            self.log_path = rootpath + '/logs'
+            if not os.path.exists(self.log_path):
+                os.makedirs(self.log_path)
+
+            # 创建一个handler,用于写入日志文件
+            fh = logging.FileHandler(self.log_path + '/runlog' + time.strftime("%Y%m%d", time.localtime()) + '.log',
+                                     encoding='utf-8')
+            fh.setLevel(logging.INFO)
+
+            # 定义handler的输出格式
+            formatter = logging.Formatter('[%(asctime)s] - [%(levelname)s] - %(message)s')
+            fh.setFormatter(formatter)
+
+            # 给logger添加handler
+            self.logger.addHandler(fh)
+
+    def debug(self, message):
+        self.fontColor('\033[0;32m%s\033[0m')
+        self.logger.debug(message)
+
+    def info(self, message):
+        self.fontColor('\033[0;34m%s\033[0m')
+        self.logger.info(message)
+
+    def warning(self, message):
+        self.fontColor('\033[0;37m%s\033[0m')
+        self.logger.warning(message)
+
+    def error(self, message):
+        self.fontColor('\033[0;31m%s\033[0m')
+        self.logger.error(message)
+
+    def critical(self, message):
+        self.fontColor('\033[0;35m%s\033[0m')
+        self.logger.critical(message)
+
+    def fontColor(self, color):
+        # 不同的日志输出不同的颜色
+        formatter = logging.Formatter(color % '[%(asctime)s] - [%(levelname)s] - %(message)s')
+        self.ch.setFormatter(formatter)
+        self.logger.addHandler(self.ch)
+
+
+if __name__ == "__main__":
+    logger = logger()
+    logger.info("12345")
+    logger.debug("12345")
+    logger.warning("12345")
+    logger.error("12345")

+ 1 - 0
model/util/ClickHouseUtils.py

@@ -0,0 +1 @@
+

+ 613 - 0
model/util/DataBaseOperation.py

@@ -0,0 +1,613 @@
+"""
+@desc 数据库操作方法封装
+@auth  chenkai
+@date 2020/11/19
+@py_version py3.7
+"""
+import pymysql
+# from clickhouse_sqlalchemy import make_session
+# from sqlalchemy import create_engine
+import logging as log
+import pandas as pd
+import time
+from model.common.log import logger
+log = logger()
+pd.set_option('display.max_columns', None)
+pd.set_option('display.width', 1000)
+MYSQL_DEBUG = 1
+
+
+class MysqlOperation:
+
+    def __init__(self, host, user, passwd, db, port=3306):
+        try:
+            self.conn = pymysql.connect(host=host,
+                                        user=user,
+                                        passwd=passwd,
+                                        db=db,
+                                        charset='utf8mb4',
+                                        port=port)
+            self.cursor = self.conn.cursor()
+        except Exception as e:
+            log.info(e)
+
+
+    def set_dict_cursor(self):
+        """
+        设置字典形式取数据
+        """
+        self.cursor = self.conn.cursor(pymysql.cursors.DictCursor)
+    
+    def getData(self, sql, args=None):
+        start = time.time()
+        # if args:
+        #     log.debug(sql % tuple(args))
+        # else:
+        #     log.debug(sql)
+        self.cursor.execute(sql, args=args)
+        result = self.cursor.fetchall()
+        if MYSQL_DEBUG:
+            sql_str = sql % tuple(args) if args else sql
+            log.info('sql: \n' + sql_str)
+            log.info('sql cost: %s' % (time.time() - start))
+        return result
+
+    def execute(self, sql):
+        start = time.time()
+        self.cursor.execute(sql)
+        self.conn.commit()
+        if MYSQL_DEBUG:
+
+            log.info('sql: \n' + sql)
+            log.info('sql cost: %s' % (time.time() - start))
+
+
+    def getOne(self,sql, args=None):
+        result = self.getData(sql, args)
+
+        return result[0][0]
+
+    def getData_pd(self, sql, args=None):
+        start = time.time()
+        # if args:
+        #     log.debug(sql % tuple(args))
+        # else:
+        #     log.debug(sql)
+        self.cursor.execute(sql, args=args)
+        num_fields = len(self.cursor.description)
+        field_names = [i[0] for i in self.cursor.description]
+        df = self.cursor.fetchall()
+
+        df = pd.DataFrame(data=list(df), columns=field_names)
+
+        if MYSQL_DEBUG:
+            sql_str = sql % tuple(args) if args else sql
+            log.info('sql: \n' + sql_str)
+            log.info('sql cost: %s' % (time.time() - start))
+        return df
+
+    def insertData(self, sql, args=None):
+        # if args:
+        #     log.debug(sql % tuple(args))
+        # else:
+        #     log.debug(sql)
+        start = time.time()
+        self.cursor.execute(sql, args=args)
+
+        if MYSQL_DEBUG:
+            sql_str = sql % tuple(args) if args else sql
+            log.info('sql: \n' + sql_str)
+            log.info('sql cost: %s' % (time.time() - start))
+        self.conn.commit()
+
+    def executeWithoutCommit(self, sql, args=None):
+        return self.cursor.execute(sql, args=args)
+
+    def commit(self):
+        self.conn.commit()
+
+    def insertorupdate(self, table, keys, tags, tagvalue, flag, *args):
+        """
+        :param table: 表名
+        :param keys: 联合主键名元组
+        :param tags: 字段名元组
+        :param tagvalue: 字段值
+        :param args: 主键值
+        :param flag: 控制是否打印日志
+        :return:
+
+        """
+
+        # log.info(tags)
+        sql = "INSERT INTO " + table + " ("
+        sql += ",".join(keys) + ","
+        sql += ",".join(tags)
+        sql += ") SELECT "
+        sql += "%s," * len(keys)
+        sql += ("%s," * len(tags))[:-1]
+        sql += " FROM DUAL WHERE NOT EXISTS (SELECT id FROM " + table
+        sql += " WHERE "
+        for _ in keys:
+            sql += _ + "=%s AND "
+
+        sql = sql[:-4]
+        sql += "LIMIT 1)"
+        arg = list(args)
+        arg.extend(tagvalue)
+        arg.extend(list(args))
+
+        rows = self.cursor.execute(sql, args=arg)
+        if rows == 0:
+            sql = "UPDATE " + table + " SET "
+            for _ in tags:
+                sql += _ + "=%s,"
+            sql = sql[:-1]
+            sql += " WHERE "
+            for _ in keys:
+                sql += _ + "=%s AND "
+            sql = sql[:-4]
+            arg = []
+            arg.extend(tagvalue)
+            arg.extend(list(args))
+            self.cursor.execute(sql, args=arg)
+        if flag:
+            log.info(sql % tuple(arg))
+        self.conn.commit()
+
+    def _insertorupdate(self, table, keys, tags, tag_value, flag, key_value, update=False):
+
+        if not update:
+            sql = "INSERT INTO " + table + " ("
+            sql += ",".join(keys) + ","
+            sql += ",".join(tags)
+            sql += ") SELECT "
+            sql += "%s," * len(keys)
+            sql += ("%s," * len(tags))[:-1]
+            sql += " FROM DUAL WHERE NOT EXISTS (SELECT id FROM " + table
+            sql += " WHERE "
+            for _ in keys:
+                sql += _ + "=%s AND "
+
+            sql = sql[:-4]
+            sql += "LIMIT 1)"
+            arg = list(key_value)
+            arg.extend(tag_value)
+            arg.extend(list(key_value))
+
+            rows = self.cursor.execute(sql, args=arg)
+            if rows == 0:
+                sql = "UPDATE " + table + " SET "
+                for _ in tags:
+                    sql += _ + "=%s,"
+                sql = sql[:-1]
+                sql += " WHERE "
+                for _ in keys:
+                    sql += _ + "=%s AND "
+                sql = sql[:-4]
+                arg = []
+                arg.extend(tag_value)
+                arg.extend(list(key_value))
+                self.cursor.execute(sql, args=arg)
+            if flag:
+                log.info(sql % tuple(arg))
+        else:
+
+            sql = "UPDATE " + table + " SET "
+            for _ in tags:
+                sql += _ + "=%s,"
+            sql = sql[:-1]
+            sql += " WHERE "
+            for _ in keys:
+                sql += _ + "=%s AND "
+            sql = sql[:-4]
+            arg = []
+            arg.extend(tag_value)
+            arg.extend(list(key_value))
+            self.cursor.execute(sql, args=arg)
+
+            if flag:
+                log.info(sql % tuple(arg))
+
+    def _insert_on_duplicate(self, table, keys, tags, tag_value, flag, key_value):
+        name_all = list(keys)
+        name_all.extend(tags)
+        arg = list(key_value)
+        arg.extend(tag_value)
+        arg.extend(tag_value)
+        sql_name = '(' + ','.join(name_all) + ')'
+        sql_value = '(' + ','.join(['%s'] * len(name_all)) + ')'
+        sql_update = ','.join([_ + '=%s' for _ in tags])
+        sql = """
+            insert into %s
+            %s
+            VALUES %s
+            ON duplicate key UPDATE %s
+        """ % (table, sql_name, sql_value, sql_update)
+        self.cursor.execute(sql, args=arg)
+        if flag:
+            log.debug(sql % tuple(arg))
+
+    def insertorupdatemany(self, table, keys, tags, tag_values, key_values, flag=False, unique_key=False, update=False):
+        """
+        :param table: 表名
+        :param keys: 联合主键名元组
+        :param tags: 字段名元组
+        :param tag_values: 字段值组(list or pd.DataFrame)
+        :param key_values: 主键值组(list or pd.DataFrame)
+        :param flag: 控制是否打印日志
+        :param unique_key: keys 是否为table的 unique_key
+        :return:
+        ps: 效率(外网): rows / 50;  1000以上更新使用
+        """
+        if isinstance(tag_values, pd.DataFrame):
+            list_tag_value = [list(tag_values.iloc[_, :]) for _ in range(len(tag_values))]
+        else:
+            list_tag_value = list(tag_values)
+        if isinstance(key_values, pd.DataFrame):
+            list_key_value = [list(key_values.iloc[_, :]) for _ in range(len(key_values))]
+        else:
+            list_key_value = list(key_values)
+        for _ in range(len(list_tag_value)):
+            tag_value = list_tag_value[_]
+            key_value = list_key_value[_]
+            if unique_key:
+                self._insert_on_duplicate(table, keys, tags, tag_value, flag, key_value)
+            else:
+                self._insertorupdate(table, keys, tags, tag_value, flag, key_value, update)
+        self.conn.commit()
+
+    def _check_repeat_key(self, key_list):
+        tmp = list(map(lambda x: tuple(x), key_list))
+        if len(tmp) == len(set(tmp)):
+            return False
+        else:
+            last_data = -1
+            repeat_key = set()
+            for i in sorted(tmp):
+                if last_data == i:
+                    repeat_key.add(i)
+                if len(repeat_key) >= 10:
+                    break
+                last_data = i
+            log.error('Reject repeated keys')
+            log.error('repeat_key: %s' % repeat_key)
+            return True
+
+    def _convert_to_list(self, data):
+        if isinstance(data, pd.DataFrame):
+            # np.nan != np.nan 从而判断值为np.nan
+            list_data = [map(lambda x: None if x != x else x, list(data.iloc[_, :])) for _ in range(len(data))]
+            li =[]
+            for  i in list_data:
+                li.append(list(i))
+            list_data = li
+
+        else:
+            list_data = list(data)
+
+        return list_data
+
+    def _get_exist_keys_index(self, table, keys, key_values, flag=False):
+        list_sql_when = []
+        list_tmp = []
+
+        for i in range(len(key_values)):
+            sql_when = """when (%s)=(%s) then %s""" % (','.join(keys), ','.join(['%s'] * len(key_values[i])), i)
+            list_sql_when.append(sql_when)
+            list_tmp.extend(key_values[i])
+        list_sql_condition = []
+        for i in range(len(key_values)):
+            # sql_condition_old = """(%s)=(%s)""" % (','.join(keys), ','.join(['%s'] * len(key_values[i])))
+            row_condition_list = map(lambda x: '%s = %%s' % x, keys)
+            sql_condition = """(%s)""" % ' and '.join(row_condition_list)
+            # print sql_condition_old, sql_condition
+            list_sql_condition.append(sql_condition)
+            list_tmp.extend(key_values[i])
+        sql_where = ' or '.join(list_sql_condition)
+
+        sql_case = '\n'.join(list_sql_when)
+        sql = """
+            select
+            case
+                %s
+            end
+            from %s
+            where %s
+        """ % (sql_case, table, sql_where)
+        if flag:
+            log.info(sql % tuple(list_tmp))
+
+        self.cursor.execute(sql, tuple(list_tmp))
+        print()
+        result = self.cursor.fetchall()
+        return map(lambda x: x[0], result)
+
+    def insertorupdatemany_v2(self, table, keys, tags, tag_values, key_values, flag=False, split=80):
+        """
+        更新插入多条数据(无key时自动插入, 有keys时更新)
+        :param table: 表名
+        :param keys: 联合主键名元组
+        :param tags: 字段名元组
+        :param tag_values: 字段值组(list or pd.DataFrame)
+        :param key_values: 主键值组(list or pd.DataFrame)
+        :param flag: 控制是否打印日志
+        :param split: 切割阈值
+        :return:
+
+        ps: 效率(外网): rows^2 / 50000;  rows以split为单位分批更新
+        """
+        if not isinstance(tag_values, (tuple, list, pd.DataFrame)):
+            log.error('Type Error')
+            exit(-1)
+            return
+        if len(tag_values) > split:
+            length = len(tag_values)
+            for i in range(0, length, split):
+                start, finish = i, i + split
+                self.insertorupdatemany_v2(table, keys, tags, tag_values[start:finish], key_values[start:finish], flag, split=split)
+            return
+        if len(key_values) == 0 or len(tag_values) == 0:
+            log.debug('insert or update 0 rows')
+            return
+        tag_values = self._convert_to_list(tag_values)
+        key_values = self._convert_to_list(key_values)
+        assert self._check_repeat_key(key_values) == False
+
+        exist_key_index = list(self._get_exist_keys_index(table, keys, key_values, flag))
+
+        new_key_index = list(set(range(len(key_values))) - set(exist_key_index))
+        update_keys = list(map(lambda x: key_values[x], exist_key_index))
+        update_tags = list(map(lambda x: tag_values[x], exist_key_index))
+        insert_keys = list(map(lambda x: key_values[x], new_key_index))
+        insert_tags = list(map(lambda x: tag_values[x], new_key_index))
+
+        self.insert_many(table=table,
+                         keys=keys,
+                         tags=tags,
+                         tag_values=insert_tags,
+                         key_values=insert_keys,
+                         flag=flag)
+
+        self.update_many(table=table,
+                         keys=keys,
+                         tags=tags,
+                         tag_values=update_tags,
+                         key_values=update_keys,
+                         flag=flag,
+                         split=split)
+
+    def insertorupdatemany_v3(self, df, table, keys, tags, flag=False, split=80):
+        self.insertorupdatemany_v2(
+            table=table,
+            keys=keys,
+            tags=tags,
+            tag_values=df[tags],
+            key_values=df[keys],
+            flag=flag,
+            split=split
+        )
+
+    def _get_s_format(self, data):
+        """
+        Args:
+            data: [[featureA1, featureB1, ...], [featureA2, featureB2, ...], ...]
+
+        Returns:
+            format of %s and real value
+
+        Example:
+            [['2017-07-01', 78], ['2017-07-01', 1]] ->
+                     ('((%s, %s), (%s, %s))', ['2017-07-01', 78, '2017-07-01', 1])
+        """
+        list_tmp_s = []
+        values = []
+        for _ in data:
+            tmp_s = ','.join(len(_) * ['%s'])
+            values.extend(_)
+            if len(_) > 1:
+                tmp_s = '(' + tmp_s + ')'
+            list_tmp_s.append(tmp_s)
+        format_s = '(' + ','.join(list_tmp_s) + ')'
+        return format_s, values
+
+    def delete_by_key(self, table, keys, key_values, flag=False):
+        """
+
+        Args:
+            table: 表名
+            keys: 联合主键名元组
+            key_values: 主键值组(list or pd.DataFrame)
+            flag: 控制是否打印日志
+
+        Examples:
+            delete_by_key('table_test', keys=['date'], key_values=[['2017-07-01'], ['2017-07-02']], flag=False)
+            delete_by_key('table_test', keys=['date'], key_values=['2017-07-01'], flag=False)
+        """
+        if len(key_values) == 0:
+            return
+        if not (isinstance(key_values[0], (list, tuple)) or isinstance(key_values, pd.DataFrame)):
+            key_values_list = [key_values]
+        else:
+            key_values_list = self._convert_to_list(key_values)
+        sql_keys = '(' + ','.join(keys) + ')'
+
+        contact_s, values_s = self._get_s_format(key_values_list)
+        sql_del = """
+            delete from %s
+            where %s in %s
+        """ % (table, sql_keys, contact_s)
+        if flag:
+            log.debug(sql_del % tuple(values_s))
+        self.cursor.execute(sql_del, tuple(values_s))
+        self.conn.commit()
+
+    def insert_many(self, table, keys, tags, tag_values, key_values, flag=False, split=80):
+        """
+        直接插入多条数据
+        :param table: 表名
+        :param keys: 联合主键名元组
+        :param tags: 字段名元组
+        :param tag_values: 字段值组(list or pd.DataFrame)
+        :param key_values: 主键值组(list or pd.DataFrame)
+        :param flag: 控制是否打印日志
+        :return:
+
+        Examples: 参照 insertorupdatemany_v2
+        insert into table
+        (count_date, cid, tag1, tag2)
+        values ('2017-01-01', 10, 1, 'a'), ('2017-01-02', 20, 2, 'b'), ...
+        """
+        if len(key_values) == 0 or len(tag_values) == 0:
+            log.debug('insert 0 rows')
+            return
+        if len(tag_values) > split:
+            length = len(tag_values)
+            for i in range(0, length, split):
+                start, finish = i, i + split
+                self.insert_many(table, keys, tags, tag_values[start:finish], key_values[start:finish], flag, split=split)
+            return
+        tag_values = self._convert_to_list(tag_values)
+        key_values = self._convert_to_list(key_values)
+
+        feature_total = "(" + ",".join(keys + tags) + ")"
+        tmp_s = "(" + ",".join(["%s"] * len(keys + tags)) + ")"
+        tmp_s_concat = ",\n".join([tmp_s] * len(key_values))
+        sql_insert = """
+                Insert into %s
+                %s
+                values %s""" % (table, feature_total, tmp_s_concat)
+        value_insert = []
+        for _ in zip(key_values, tag_values):
+            value_insert.extend(_[0] + _[1])
+        if flag:
+            log.debug(sql_insert % tuple(value_insert))
+        t0 = time.time()
+
+        self.cursor.execute(sql_insert,tuple(value_insert))
+        log.debug('insert %s rows, cost: %s' % (len(key_values), time.time() - t0))
+        self.conn.commit()
+
+    def update_many(self, table, keys, tags, tag_values, key_values, flag=False, split=80):
+        """
+        更新多条数据(无key时不会自动插入)
+        :param table: 表名
+        :param keys: 联合主键名元组
+        :param tags: 字段名元组
+        :param tag_values: 字段值组(list or pd.DataFrame)
+        :param key_values: 主键值组(list or pd.DataFrame)
+        :param flag: 控制是否打印日志
+        :param split: 分批更新量
+        :return:
+
+        Examples: 参照 insertorupdatemany_v2
+        # 单条 update sql tag1=1, tag2='a' 插入到 (count_date, cid) =('2017-01-01', 10)
+        update table
+        set tag1=1, tag2='a'
+        where (count_date, cid) =('2017-01-01', 10)
+
+        # 多条组合 update sql
+        # tag1=1, tag2='a' 插入到 (count_date, cid) =('2017-01-01', 10);
+        # tag1=1, tag2='a' 插入到 (count_date, cid) =('2017-01-01', 10);
+        update table
+        set tag1 = case 
+            when (count_date, cid)=('2017-01-01', 10) then 1
+            when (count_date, cid)=('2017-01-02', 20) then 2
+            ...
+            ,
+            tag_2 = case
+            when (count_date, cid)=('2017-01-01', 10) then 'a'
+            when (count_date, cid)=('2017-01-02', 20) then 'b'
+            ...
+        where (count_date, cid)=('2017-01-01', 10) or (count_date, cid)=('2017-01-02', 20) or ...
+
+        """
+        if len(tag_values) > split:
+            length = len(tag_values)
+            for i in range(0, length, split):
+                start, finish = i, i + split
+                self.update_many(table, keys, tags, tag_values[start:finish], key_values[start:finish], flag, split=split)
+            return
+        if len(key_values) == 0 or len(tag_values) == 0:
+            log.debug('update 0 rows')
+            return
+
+        tag_values = self._convert_to_list(tag_values)
+        key_values = self._convert_to_list(key_values)
+
+        if self._check_repeat_key(key_values):
+            return
+
+        update_value = []
+        sql_keys = ','.join(keys)
+        if len(keys) > 1:
+            sql_keys = '(' + sql_keys + ')'
+
+        sql_key_values = ','.join(['%s'] * len(keys))
+        if len(keys) > 1:
+            sql_key_values = '(' + sql_key_values + ')'
+
+        sql_set_list = []
+        for i in range(len(tags)):
+            sql_when_list = []
+            for j in range(len(tag_values)):
+                sql_when = """when %s=%s then %s """ % (sql_keys, sql_key_values, '%s')
+                update_value.extend(key_values[j])
+                update_value.append(tag_values[j][i])
+                sql_when_list.append(sql_when)
+            sql_when_concat = '\n\t'.join(sql_when_list)
+            sql_set = """%s = case \n\t %s\n end""" % (tags[i], sql_when_concat)
+            sql_set_list.append(sql_set)
+        for _ in key_values:
+            update_value.extend(_)
+        sql_set_concat = ',\n'.join(sql_set_list)
+
+        list_sql_condition = []
+        for i in range(len(key_values)):
+            row_condition_list = map(lambda x: '%s = %%s' % x, keys)
+            sql_condition = """(%s)""" % ' and '.join(row_condition_list)
+            list_sql_condition.append(sql_condition)
+        sql_where = ' or '.join(list_sql_condition)
+
+        # condition = ' or\n\t'.join([sql_keys + '=' + sql_key_values] * len(tag_values))
+        # print condition
+        sql = """update %s\n set %s\n where %s""" % (table, sql_set_concat, sql_where)
+        if flag:
+            log.info(sql % tuple(update_value))
+        t0 = time.time()
+        self.cursor.execute(sql, tuple(update_value))
+        self.conn.commit()
+        log.debug('update %s rows, cost: %s' % (len(key_values), time.time() - t0))
+
+
+# class CkOperation:
+#     cursor = None
+#     session = None
+#
+#     def __init__(self, conf):
+#         try:
+#             connection = 'clickhouse://{user}:{passwd}@{host}:{port}/{db}'.format(**conf)
+#             engine = create_engine(connection, pool_size=100, pool_recycle=3600, pool_timeout=20)
+#             self.session = make_session(engine)
+#
+#         except Exception as e:
+#             log.info(e)
+#
+#     def execute(self, sql):
+#         self.cursor = self.session.execute(sql)
+#         try:
+#             fields = self.cursor._metadata.keys
+#             return [dict(zip(fields, item)) for item in self.cursor.fetchall()]
+#         except Exception as e:
+#             log.info(e)
+#
+#     def getData_pd(self, sql):
+#         li = self.execute(sql)
+#         return pd.DataFrame(li)
+#
+#     def getOne(self, sql):
+#         li = self.execute(sql)
+#         return [i for i in li[0].values()][0]
+
+
+
+

+ 48 - 0
model/util/DataBaseUtils.py

@@ -0,0 +1,48 @@
+"""
+@desc 数据库连接
+@auth chenkai
+@date 2020/11/19
+"""
+from .DataBaseOperation import *
+from model.common.log import logger
+import yaml
+import os
+
+log = logger()
+
+
+class MysqlUtils:
+    _quchen_text = None
+
+    def __init__(self):
+        p_path = os.path.dirname(os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)))
+        path = os.path.join(p_path,"config", "db_config.yaml")
+        f = open(path, encoding="utf-8")
+        self.config = yaml.load(f.read(), Loader=yaml.FullLoader)
+
+    @property
+    def quchen_text(self):
+
+        conf = self.config['quchen_text']
+        self._quchen_text = MysqlOperation(host=conf['host'],
+                                      user=conf['user'],
+                                      passwd=conf['passwd'],
+                                      db=conf['db'])
+        return self._quchen_text
+
+
+
+    def find_db(self, db):
+
+        if db == "quchen_text":
+            self._quchen_text = self._quchen_text
+            return self._quchen_text
+
+        else:
+            log.debug("输入数据库有误")
+
+    def close(self):
+        if self._quchen_text:
+            self._quchen_text.cursor.close()
+            self._quchen_text.conn.close()
+

+ 273 - 0
model/util/DateUtils.py

@@ -0,0 +1,273 @@
+import time
+from datetime import date, datetime, timedelta
+import calendar
+from dateutil.relativedelta import relativedelta
+
+
+class DateUtils:
+    """
+    时间相关函数封装
+    """
+
+    def __init__(self):
+        self.today = datetime.strptime(datetime.today().strftime("%Y-%m-%d"), "%Y-%m-%d")
+        self.daydelta = timedelta(days=1)
+        self.now = datetime.now()
+
+    def getDateLists(self, begin, end):
+        """
+        返回一个时间列表
+        """
+        interval = self.getInterval(begin, end)
+        return [self.getLastDays(begin, -x) for x in range(interval + 1)]
+
+    def getMonthLists(self, begin, end):
+        begin_date = datetime.strptime(begin, "%Y-%m").date()
+        end_date = datetime.strptime(end, '%Y-%m').date()
+        temp = end_date
+        month_list = []
+        while temp >= begin_date:
+            month_list.append(temp.strftime('%Y-%m'))
+            temp = self.get_before_month(temp.year, temp.month, 1, 1)
+        month_list.reverse()
+        return month_list
+
+    def getLastDays(self, begin, interval):
+        """
+        :param begin:
+        :param interval: 正数是之前几天, 负数是之后几天
+        :return:
+        """
+        start = datetime(int(begin[0:4]), int(begin[5:7]), int(begin[8:10]))
+        delta = timedelta(days=1)
+        if interval < 0:
+            for _ in range(0, -interval):
+                start = start + delta
+        else:
+            for _ in range(0, interval):
+                start = start - delta
+        return start.strftime("%Y-%m-%d")
+
+    def get_n_month_ago_begin(self, begin, n):
+        year = int(begin[:4])
+        month = int(begin[5:7])
+        if n > 0:
+            for i in range(0, n):
+                month -= 1
+                if month == 0:
+                    month = 12
+                    year -= 1
+        else:
+            for i in range(0, -n):
+                if month == 12:
+                    month = 0
+                    year += 1
+                month += 1
+        return date(year, month, 1).strftime('%Y-%m')
+
+
+
+    def get_n_days(self, interval=0, flag=0):
+        """
+        负数,过去的几天
+        正数,未来的几天
+        :param interval:
+        :param flag: 1 返回 timedelta
+        :return:
+        """
+        start = self.today
+        if interval < 0:
+            for _ in range(0, -interval):
+                start = start - self.daydelta
+        else:
+            for _ in range(0, interval):
+                start = start + self.daydelta
+        if flag == 1:
+            return start
+        else:
+            return start.strftime("%Y-%m-%d")
+
+    def getNow(self):
+        """
+        获取当天时间
+        :return: 当天时间的字符串
+        """
+        now = datetime.now()
+        return now.strftime("%Y-%m-%d")
+
+    def getWeek(self, begin):
+        return datetime(int(begin[0:4]), int(begin[5:7]), int(begin[8:10])).strftime("%w")
+
+    def get_today_before_month(self, n):
+        """
+        获取之前n个月
+        :param n:
+        :return:
+        """
+        year = time.localtime()[0]
+        month = time.localtime()[1]
+        for i in range(0, n):
+            month -= 1
+            if month == 0:
+                month = 12
+                year -= 1
+        return year, month
+
+    def get_one_month_ago(self, flag=0):
+        """
+        返回一个月前的时间, 默认返回格式化时间字符串
+        flag 1 返回 datetime
+        :param flag:
+        :return:
+        """
+        x = self.today-relativedelta(months=1)
+        if flag == 1:
+            return x
+        else:
+            return x.strftime("%Y-%m-%d")
+
+    def get_today_before_month_list(self, n=0):
+        year = time.localtime()[0]
+        month = time.localtime()[1]
+        ret = []
+        for i in range(0, n):
+            month -= 1
+            if month == 0:
+                month = 12
+                year -= 1
+            if month >= 10:
+                ret.append(str(year) + "-" + str(month) + "-" + "01")
+            else:
+                ret.append(str(year) + "-0" + str(month) + "-" + "01")
+        return ret
+
+    def get_before_month(self, year, month, day, n):
+        for i in range(0, n):
+            month -= 1
+            if month == 0:
+                month = 12
+                year -= 1
+        day = min(day, calendar.monthrange(year, month)[1])
+        return date(year, month, day)
+
+    def getInterval(self, begin, end):
+        t1 = datetime(int(begin[0:4]), int(begin[5:7]), int(begin[8:10]))
+        t2 = datetime(int(end[0:4]), int(end[5:7]), int(end[8:10]))
+        return (t2 - t1).days
+
+    def month_first_day(self, flag=False):
+        """
+        返回当月第一天
+        若为1号,则返回上个月1号
+        :param flag:
+        :return:
+        """
+        if self.today.day == 1:
+            # 如果当日是月初1号,则返回上月1号
+            if self.today.month == 1:
+                tmp = date(self.today.year - 1, 12, 1)
+            else:
+                tmp = date(self.today.year, self.today.month - 1, 1)
+        else:
+            tmp = date(self.today.year, self.today.month, 1)
+        return tmp if flag else tmp.strftime("%Y-%m-%d")
+
+    def get_today(self, flag=False):
+        return self.today if flag else self.today.strftime("%Y-%m-%d")
+
+    def get_n_pre_month_first_day(self, n, flag=False):
+        """
+        获取 n 个月前第一天
+        """
+        r = self.get_before_month(self.today.year, self.today.month, 1, n)
+        return r if flag else r.strftime("%Y-%m-%d")
+
+    def get_n_month_ago(self, n, flag=0):
+        d = self.get_before_month(self.today.year, self.today.month, self.today.day, n)
+        return d.strftime("%Y-%m-%d") if flag == 0 else d
+
+    def get_week_ago(self, flag=False):
+        """
+        :param: None
+        :return: 7 days ago
+        """
+        tmp = (self.today - timedelta(days=7))
+        return tmp if flag else tmp.strftime("%Y-%m-%d")
+
+    def get_week_first_day(self, flag=False):
+
+        """
+        返回当前天的一周开始
+        :param flag:
+        :return:
+        """
+        # print self.today.day
+        if self.today.weekday() == 0:
+            # 如果当天是周一,则返回上周一日期
+            tmp = self.today - timedelta(days=7)
+        else:
+            tmp = self.today - timedelta(days=self.today.weekday())
+
+        return tmp if flag else tmp.strftime("%Y-%m-%d")
+
+    def get_one_day_ago(self, flag=False):
+        """
+        :param: None
+        :return: 1 day ago
+        """
+        tmp = (self.today - timedelta(days=1))
+        return tmp if flag else tmp.strftime("%Y-%m-%d")
+
+    def get_start(self, s_start=date.today().strftime("%Y-%m-%d")):
+        return datetime(int(s_start[0:4]), int(s_start[5:7]), int(s_start[8:10]))
+
+    def get_n_hours_ago(self, n=1, flag = 0):
+        """
+        get n hous ago
+        flag is True return datetime format
+        default 1 hours ago
+        if n > 0 :
+            过去时间
+        else:
+            未来时间
+        :param n:
+        :return:
+        """
+        r = self.now - timedelta(hours=n)
+        return r if flag else r.strftime("%Y-%m-%d %H:00:00")
+
+    def get_n_minutes_ago(self, n=1, string=True):
+        """
+        get n minutes ago
+        default 1 minutes ago
+        if n > 0 :
+            过去时间
+        else:
+            未来时间
+        :param n:
+        :return:
+        """
+        if string:
+            return (self.now - timedelta(minutes=n)).strftime("%Y-%m-%d %H:%M")
+        else:
+            return self.now - timedelta(minutes=n)
+
+    def get_n_pre_month_last_day(self, n=0, flag=False):
+        """
+        获取 n 个月前的最后一天
+        :param n:
+        :param flag:
+        :return:
+        """
+        r = self.get_before_month(self.today.year, self.today.month, 1, n)
+        num = calendar.monthrange(r.year, r.month)[1]
+        x = r.replace(day=num)
+        return x if flag else x.strftime("%Y-%m-%d")
+
+if __name__ == "__main__":
+    ut = DateUtils()
+    end = ut.now.strftime('%Y-%m') + '-01 00:00:00'
+    # begin = ut.get_n_month_ago_begin(ut.now.strftime('%Y-%m'), 1) + '-01 00:00:00'
+    # print(ut.get_n_pre_month_first_day(0))
+    ut.today = date(2018, 1, 1)
+    print(ut.month_first_day())

+ 44 - 0
model/util/DingTalkUtils.py

@@ -0,0 +1,44 @@
+"""
+@desc 钉钉报警
+@auth ck
+手机号是str,用","分隔,如果不填,默认@所有人
+"""
+import requests
+import random
+
+url = "https://oapi.dingtalk.com/robot/send?access_token=ca2fe03f4c4932f4017c2a7724a6beee94072313cd3325667576856f79156382"
+headers = {'Content-Type': 'application/json;charset=utf-8'}
+members = ["17752557125", "15902760898", "17757147568", "18860455786"]
+
+class DingTalkUtils:
+    """
+    @phone Str
+            可多个,按英文逗号隔开
+            可填 【ramdom】 随机发送
+    """
+    def send(msg, phone=""):
+        if phone == "all":
+            isAtall = True
+            atMobiles = []
+
+        elif phone == "random":
+            isAtall = False
+            atMobiles = []
+            atMobiles.append(random.choice(members))
+
+        else:
+            isAtall = False
+            atMobiles =phone.split(",")
+
+        data = {'msgtype': 'text',
+                    "at": {"isAtAll": isAtall,
+                           "atMobiles": atMobiles},
+                    "text": {"content": msg}}
+
+        requests.post(url=url, headers=headers, json=data)
+
+
+if __name__ == '__main__':
+    DingTalkUtils.send("xxx","188604557876,11232")
+
+

+ 0 - 0
model/util/__init__.py