ck 3 年之前
父節點
當前提交
d1ea0f45cd

+ 27 - 10
app/api_data/tx_ad_cost/TxCostUtils.py

@@ -4,13 +4,30 @@ from model.DateUtils import DateUtils
 from model.DataBaseUtils import MysqlUtils
 du = DateUtils()
 
-def get_accounts(filter=None):
-    db = MysqlUtils()
-    if filter:
-		if filter=='MP':
-			return db.quchen_text.getData("select account_id,access_token,name channel from advertiser_vx where (name !='' or name is not null)")
-		else:
-			return db.quchen_text.getData("select account_id,access_token,name channel from advertiser_qq where (name !='' or name is not null)")
-
-    return db.quchen_text.getData("select account_id,access_token,name channel,'MP' flag from advertiser_vx where (name !='' or name is not null) union "
-								   "select account_id,access_token,name channel,'GDT' flag from advertiser_qq where (name !='' or name is not null)")
+# def get_accounts(filter=None):
+#     db = MysqlUtils()
+#     if filter:
+#         if filter=='MP':
+# 			return db.quchen_text.getData("select account_id,access_token,name channel from advertiser_vxwhere (name !='' or name is not null)")
+#
+# 		else:
+# 			return db.quchen_text.getData("select account_id,access_token,name channel"
+# 										  " from advertiser_qq where (name !='' or name is not null)")
+#
+#     return db.quchen_text.getData("select account_id,access_token,name channel,'MP' flag "
+# 								  "from advertiser_vx where (name !='' or name is not null) union "
+# 								   "select account_id,access_token,name channel,'GDT' flag"
+# 								  " from advertiser_qq where (name !='' or name is not null)")
+
+
+
+
+
+if __name__ == '__main__':
+    account_id = "19521440"
+    token = "5ea8737494daecf9e954df56b250826e"
+    st  = "2021-05-01"
+    et = '2021-05-20'
+    print(mp_campaign_daily_report(account_id,token,st,et).__len__())
+
+

+ 103 - 3
app/api_data/tx_ad_cost/campaign_cost_daily.py

@@ -1,9 +1,12 @@
 """
 计划维度日报"""
-
+import time
 from model.DataBaseUtils import MysqlUtils
 from concurrent.futures import ThreadPoolExecutor
 from .TxCostUtils import get_accounts
+import random
+import json
+import requests
 
 
 def run(st, et):
@@ -13,10 +16,107 @@ def run(st, et):
     executor.shutdown()
 
 
-
-def get_campaign_cost_daily(account_id, token, st, et):
+def get_campaign_cost_daily(account_id, token, flag,st, et):
     """1,获取计划维度日报,把有消耗的计划入库
     2,根据有消耗的计划,拉取计划的基础信息入库
     """
+    if flag == 'MP':
+        get_mp_data()
+    else:
+        get_qq_data()
+
+
+def mp_campaign_daily_report(account_id,access_token, start_date, end_date):
+	level = "REPORT_LEVEL_CAMPAIGN_WECHAT"
+	interface = 'daily_reports/get'
+	url = 'https://api.e.qq.com/v1.1/' + interface
+	fields = ('account_id', 'date', 'campaign_id', 'view_count', 'cost', 'ctr', 'cpc', 'order_roi', 'thousand_display_price',
+		 'valid_click_count', 'official_account_follow_count', 'conversions_count', 'official_account_follow_rate',
+		 'conversions_rate', 'order_count', 'order_rate', 'order_unit_price', 'first_day_order_amount')
+	page = 1
+	li = []
+	while True:
+		parameters = {
+			'access_token': access_token,
+			'timestamp': int(time.time()),
+			'nonce': str(time.time()) + str(random.randint(0, 999999)),
+			'fields': fields,
+			"account_id": account_id,
+			"level": level,
+			"date_range":
+				{"start_date": start_date,
+				"end_date": end_date},
+
+			"page": page,
+			"page_size": 1000,
+			# "filtering": [],
+			# "group_by": ["campaign_id",'date']
+		}
+		for k in parameters:
+			if type(parameters[k]) is not str:
+				parameters[k] = json.dumps(parameters[k])
+
+		r = requests.get(url, params=parameters).json()
+		data = r['data']['list']
+		li.extend(data)
+		total_page = r['data']['page_info']['total_page']
+		if page == total_page:
+			break
+		else:
+			page +=1
+	return li
+
+
+
+
+
+
+def gdt_campaign_daily_report(account_id,access_token, start_date, end_date):
+	level = "REPORT_LEVEL_CAMPAIGN_WECHAT"
+	interface = 'daily_reports/get'
+	url = 'https://api.e.qq.com/v1.1/' + interface
+	fields = ('account_id', 'date', 'campaign_id', 'view_count', 'cost', 'ctr', 'cpc', 'order_roi', 'thousand_display_price',
+		 'valid_click_count', 'official_account_follow_count', 'conversions_count', 'official_account_follow_rate',
+		 'conversions_rate', 'order_count', 'order_rate', 'order_unit_price', 'first_day_order_amount')
+	page = 1
+	li = []
+	while True:
+		parameters = {
+			'access_token': access_token,
+			'timestamp': int(time.time()),
+			'nonce': str(time.time()) + str(random.randint(0, 999999)),
+			'fields': fields,
+			"account_id": account_id,
+			"level": level,
+			"date_range":
+				{"start_date": start_date,
+				"end_date": end_date},
+
+			"page": page,
+			"page_size": 1000,
+			# "filtering": [],
+			# "group_by": ["campaign_id",'date']
+		}
+		for k in parameters:
+			if type(parameters[k]) is not str:
+				parameters[k] = json.dumps(parameters[k])
+
+		r = requests.get(url, params=parameters).json()
+		data = r['data']['list']
+		li.extend(data)
+		total_page = r['data']['page_info']['total_page']
+		if page == total_page:
+			break
+		else:
+			page +=1
+	return li
+
+
+
+def get_qq_data():
+    pass
+
+
+
 
 

+ 4 - 0
app/dailyRun.py

@@ -4,6 +4,7 @@ from model.DateUtils import DateUtils
 from app.api_data import get_cost
 from app.etl.dw import dw_image_cost_day
 from app import task
+from app.etl.MaterialLibrary import MaterialDataClean
 du = DateUtils()
 
 
@@ -21,6 +22,9 @@ if __name__ == '__main__':
     get_cost.day()
     dw_image_cost_day.day()
 
+    # 素材库
+    MaterialDataClean()
+
 
 
 

+ 9 - 9
app/etl/MaterialLibrary/MaterialDataClean.py

@@ -13,7 +13,7 @@ def title():
         sum(view_count) view_times,
         group_concat(distinct book) novels,
         max(dt) end_date,min(dt) start_date 
-        from dw_image_cost_day where title!='' and title is not null  GROUP BY REPLACE(REPLACE(title, CHAR(10), ''), CHAR(13), '')  limit 1000
+        from dw_image_cost_day where title!='' and title is not null  GROUP BY REPLACE(REPLACE(title, CHAR(10), ''), CHAR(13), '')
 				"""
 
     df =  db.dm.getData_pd(sql)
@@ -26,7 +26,7 @@ def title():
     tag = ["view_times", "click_times", "novels", "start_date", "end_date", "create_by",'data_type', 'consume_amount']
     table = "t_ads_content"
 
-    db.zx_test.dfsave2mysql(df, table, key, tag)
+    db.zx_ads.dfsave2mysql(df, table, key, tag)
 
 def description():
     sql = """select REPLACE(REPLACE(description, CHAR(10), ''), CHAR(13), '') content,
@@ -35,7 +35,7 @@ def description():
             sum(view_count) view_times,
             group_concat(distinct book) novels,
             max(dt) end_date,min(dt) start_date
-            from dw_image_cost_day where description!='' and description is not null  GROUP BY REPLACE(REPLACE(description, CHAR(10), ''), CHAR(13), '')  limit 1000
+            from dw_image_cost_day where description!='' and description is not null  GROUP BY REPLACE(REPLACE(description, CHAR(10), ''), CHAR(13), '')
     				"""
 
     df = db.dm.getData_pd(sql)
@@ -48,7 +48,7 @@ def description():
     tag = ["view_times", "click_times", "novels", "start_date", "end_date", "create_by",'data_type', 'consume_amount']
     table = "t_ads_content"
 
-    db.zx_test.dfsave2mysql(df, table, key, tag)
+    db.zx_ads.dfsave2mysql(df, table, key, tag)
 
 
 def image():
@@ -62,7 +62,7 @@ def image():
             min(preview_url) content,
             if(is_video=1,2,1) type,  
             if(locate(',',signature)>0,0,1) single_img
-            from dw_image_cost_day  where signature is not null and signature !=''  GROUP BY  signature,is_video limit 1000"""
+            from dw_image_cost_day  where signature is not null and signature !=''  GROUP BY  signature,is_video"""
 
     df = db.dm.getData_pd(sql)
     # print(df)
@@ -74,7 +74,7 @@ def image():
     tag = ["view_times", "click_times", "novels", "start_date", "end_date", "create_by", "single_img", "content",'consume_amount','type']
     table = "t_ads_media"
 
-    db.zx_test.dfsave2mysql(df, table, key, tag)
+    db.zx_ads.dfsave2mysql(df, table, key, tag)
 
 
 
@@ -101,11 +101,11 @@ from dw_image_cost_day where signature is not null and signature!=''   GROUP BY
     tag = ["view_times", "click_times", "novels", "start_date", "end_date","type","channel",'consume_amount','single_img','media']
     table = "t_ads_idea"
 
-    db.zx_test.dfsave2mysql(df, table, key, tag)
+    db.zx_ads.dfsave2mysql(df, table, key, tag)
 
 
 if __name__ == '__main__':
-    # title()
-    # description()
+    title()
+    description()
     image()
     adcreative()

+ 9 - 2
config/db_config.yaml

@@ -6,16 +6,23 @@ quchen_text:
 
 zx:
   host: rm-bp145mi6r24ik50z5xo.mysql.rds.aliyuncs.com
-  user: readonly
-  passwd: toDataOfReadOnly@2021
+  user: zx_manager
+  passwd: ZX@mysql&@$
   db: zx_prod
 
+zx_ads:
+  host: rm-bp145mi6r24ik50z5xo.mysql.rds.aliyuncs.com
+  user: zx_manager
+  passwd: ZX@mysql&@$
+  db: zx_ads
+
 dm:
   host: rm-bp10mvfktc9o41ir91o.mysql.rds.aliyuncs.com
   user: qucheng
   passwd: Qc_123456
   db: dm
 
+
 zx_test:
   host: 118.178.187.109
   user: root

+ 14 - 0
model/DataBaseUtils.py

@@ -15,6 +15,7 @@ log = logger()
 class MysqlUtils:
     _quchen_text = None
     _zx=None
+    _zx_ads=None
     _dm=None
     _zx_test =None
 
@@ -47,6 +48,15 @@ class MysqlUtils:
                                   db=conf['db'])
         return self._zx
 
+    @property
+    def zx_ads(self):
+        conf = self.config['zx_ads']
+        self._zx_ads = MysqlOperation(host=conf['host'],
+                                  user=conf['user'],
+                                  passwd=conf['passwd'],
+                                  db=conf['db'])
+        return self._zx_ads
+
     @property
     def zx_test(self):
         conf = self.config['zx_test']
@@ -93,6 +103,10 @@ class MysqlUtils:
             self._zx_test.cursor.close()
             self._zx_test.conn.close()
 
+        if self._zx_ads:
+            self._zx_ads.cursor.close()
+            self._zx_ads.conn.close()
+
 class CkUtils:
 
     def __init__(self):