|
@@ -0,0 +1,193 @@
|
|
|
+import threading
|
|
|
+import pandas
|
|
|
+from io import BytesIO
|
|
|
+from model.DateUtils import DateUtils
|
|
|
+from logging import handlers
|
|
|
+import logging
|
|
|
+import json
|
|
|
+import random
|
|
|
+import requests
|
|
|
+import time
|
|
|
+
|
|
|
+du = DateUtils()
|
|
|
+
|
|
|
+
|
|
|
+def async_tasks_add(ymd, account_id, access_token):
|
|
|
+ interface = 'async_tasks/add'
|
|
|
+ url = 'https://api.e.qq.com/v1.3/' + interface
|
|
|
+
|
|
|
+ for i in range(5):
|
|
|
+ try:
|
|
|
+ common_parameters = {
|
|
|
+ 'access_token': access_token,
|
|
|
+ 'timestamp': int(time.time()),
|
|
|
+ 'nonce': str(time.time()) + str(random.randint(0, 999999)),
|
|
|
+ }
|
|
|
+
|
|
|
+ parameters = {
|
|
|
+ "account_id": account_id,
|
|
|
+ "task_name": str(time.time()) + str(random.random()),
|
|
|
+ "task_type": "TASK_TYPE_WECHAT_ADGROUP_DAILY_REPORT",
|
|
|
+ "task_spec":
|
|
|
+ {
|
|
|
+ "task_type_wechat_adgroup_daily_report_spec":
|
|
|
+ {
|
|
|
+ "date": ymd
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ for k in parameters:
|
|
|
+ if type(parameters[k]) is not str:
|
|
|
+ parameters[k] = json.dumps(parameters[k])
|
|
|
+
|
|
|
+ r = requests.post(url, params=common_parameters, data=parameters)
|
|
|
+ logging.info(r.text)
|
|
|
+ task_id = r.json()['data']['task_id']
|
|
|
+ return task_id
|
|
|
+ except:
|
|
|
+ logging.info('time sleep 70 seconds')
|
|
|
+ time.sleep(70)
|
|
|
+
|
|
|
+ return None
|
|
|
+
|
|
|
+
|
|
|
+# 获取异步任务的文件id
|
|
|
+def async_tasks_get(account_id, access_token, task_id):
|
|
|
+ interface = 'async_tasks/get'
|
|
|
+ url = 'https://api.e.qq.com/v1.3/' + interface
|
|
|
+
|
|
|
+ for i in range(5):
|
|
|
+ try:
|
|
|
+
|
|
|
+ common_parameters = {
|
|
|
+ 'access_token': access_token,
|
|
|
+ 'timestamp': int(time.time()),
|
|
|
+ 'nonce': str(time.time()) + str(random.randint(0, 999999)),
|
|
|
+ }
|
|
|
+ fields = ("task_id", "result")
|
|
|
+ parameters = {
|
|
|
+ "account_id": account_id,
|
|
|
+ 'fields': fields,
|
|
|
+ "filtering":
|
|
|
+ [
|
|
|
+
|
|
|
+ {
|
|
|
+ "field": "task_id",
|
|
|
+ "operator": "EQUALS",
|
|
|
+ "values":
|
|
|
+ [
|
|
|
+ task_id
|
|
|
+ ]
|
|
|
+ }
|
|
|
+ ],
|
|
|
+ "page": 1,
|
|
|
+ "page_size": 10
|
|
|
+ }
|
|
|
+
|
|
|
+ parameters.update(common_parameters)
|
|
|
+ for k in parameters:
|
|
|
+ if type(parameters[k]) is not str:
|
|
|
+ parameters[k] = json.dumps(parameters[k])
|
|
|
+
|
|
|
+ r = requests.get(url, params=parameters)
|
|
|
+ logging.info(r.text)
|
|
|
+ file_id = r.json()['data']['list'][0]['result']['data']['file_info_list'][0]['file_id']
|
|
|
+ return file_id
|
|
|
+ except:
|
|
|
+ logging.info('sleep {} seconds'.format(60 * i))
|
|
|
+ time.sleep(60 * i)
|
|
|
+ return None
|
|
|
+
|
|
|
+
|
|
|
+# 获取异步任务文件
|
|
|
+def async_task_files_get(account_id, access_token, task_id, file_id):
|
|
|
+ interface = 'async_task_files/get'
|
|
|
+ url = 'https://dl.e.qq.com/v1.1/' + interface
|
|
|
+ for i in range(5):
|
|
|
+ common_parameters = {
|
|
|
+ 'access_token': access_token,
|
|
|
+ 'timestamp': int(time.time()),
|
|
|
+ 'nonce': str(time.time()) + str(random.randint(0, 999999)),
|
|
|
+ }
|
|
|
+
|
|
|
+ parameters = {
|
|
|
+ "account_id": account_id,
|
|
|
+ "task_id": task_id,
|
|
|
+ "file_id": file_id
|
|
|
+ }
|
|
|
+
|
|
|
+ parameters.update(common_parameters)
|
|
|
+ for k in parameters:
|
|
|
+ if type(parameters[k]) is not str:
|
|
|
+ parameters[k] = json.dumps(parameters[k])
|
|
|
+
|
|
|
+ r = requests.get(url, params=parameters)
|
|
|
+ if r.status_code == 200:
|
|
|
+ return r
|
|
|
+
|
|
|
+
|
|
|
+def get_data(y, li, st, et):
|
|
|
+ account_id = y[0]
|
|
|
+ access_token = y[2]
|
|
|
+ for i in du.getDateLists(st, et):
|
|
|
+ task_id = async_tasks_add(i, account_id, access_token)
|
|
|
+ time.sleep(10)
|
|
|
+ file_id = async_tasks_get(account_id, access_token, task_id)
|
|
|
+ if file_id:
|
|
|
+ rsp = async_task_files_get(account_id, access_token, task_id, file_id)
|
|
|
+
|
|
|
+ file_z = BytesIO(rsp.content)
|
|
|
+ file_z.seek(0)
|
|
|
+ source_data = pandas.read_csv(file_z)
|
|
|
+ new_data = source_data[['view_pv', 'canvas_view_pv', 'cost', 'click_pv', 'follow_pv']]
|
|
|
+ res = new_data.apply(lambda x: x.sum())
|
|
|
+ '''
|
|
|
+ date,cost,view_count,valid_click_count,ctr,official_account_follow_rate,order_amount,
|
|
|
+ order_roi,order_count,order_rate,order_unit_price,web_order_cost,
|
|
|
+ first_day_order_amount,first_day_order_count,account_id
|
|
|
+ '''
|
|
|
+ date = i
|
|
|
+ cost = res['cost'].item() * 100
|
|
|
+ view_count = res['view_pv'].item()
|
|
|
+ valid_click_count = res['click_pv'].item()
|
|
|
+ ctr = valid_click_count / view_count if view_count else 0
|
|
|
+ official_account_follow_rate = res['follow_pv'].item() / view_count if view_count else 0
|
|
|
+ order_amount = 0
|
|
|
+ order_roi = 0
|
|
|
+ order_count = 0
|
|
|
+ order_rate = 0
|
|
|
+ order_unit_price = 0
|
|
|
+ web_order_cost = 0
|
|
|
+ first_day_order_amount = 0
|
|
|
+ first_day_order_count = 0
|
|
|
+ account_id = account_id
|
|
|
+ li.append((date, cost, view_count, valid_click_count, ctr, official_account_follow_rate, order_amount,
|
|
|
+ order_roi, order_count, order_rate, order_unit_price, web_order_cost,
|
|
|
+ first_day_order_amount, first_day_order_count, account_id))
|
|
|
+ print(li)
|
|
|
+
|
|
|
+
|
|
|
+if __name__ == '__main__':
|
|
|
+ logging.basicConfig(
|
|
|
+ handlers=[
|
|
|
+ logging.handlers.RotatingFileHandler('./test_get_media.log',
|
|
|
+ maxBytes=10 * 1024 * 1024,
|
|
|
+ backupCount=5,
|
|
|
+ encoding='utf-8')
|
|
|
+ , logging.StreamHandler() # 供输出使用
|
|
|
+ ],
|
|
|
+ level=logging.INFO,
|
|
|
+ format="%(asctime)s - %(levelname)s %(filename)s %(funcName)s %(lineno)s - %(message)s"
|
|
|
+ )
|
|
|
+ # async_tasks_add(21768795,'80ddc731c9108817b560273422c8e187')
|
|
|
+ # async_tasks_get(21768795, '80ddc731c9108817b560273422c8e187', 4183028546)
|
|
|
+ # async_task_files_get(21768795,'80ddc731c9108817b560273422c8e187',4183028546, 510401785)
|
|
|
+ # get_data()
|
|
|
+
|
|
|
+ thread_list = []
|
|
|
+ get_data('2021-10-07', '2021-10-07', 21768795, '80ddc731c9108817b560273422c8e187')
|
|
|
+ # for i in range(1):
|
|
|
+ # pass
|
|
|
+ # one_tread=threading.Thread(target=get_data)
|
|
|
+ # one_tread.start()
|