get_cost_older.py 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330
  1. import requests
  2. import hashlib
  3. import time
  4. import json
  5. import logging
  6. import random
  7. from concurrent.futures import ThreadPoolExecutor
  8. from model.DateUtils import DateUtils
  9. from model.DataBaseUtils import MysqlUtils
  10. from model.DingTalkUtils import DingTalkUtils
  11. from six import string_types
  12. from six.moves.urllib.parse import urlencode, urlunparse
  13. db = MysqlUtils()
  14. du = DateUtils()
  15. def md5value(s):
  16. md5 = hashlib.md5()
  17. md5.update(s.encode("utf-8"))
  18. return md5.hexdigest()
  19. def daily_reports_get(access_token, account_id, st, et, level, fields, err_num=0):
  20. logging.info(f'开始获取消耗数据,token:{access_token}, id:{account_id}, st:{str(st)}, et:{str(et)}')
  21. interface = 'daily_reports/get'
  22. url = 'https://api.e.qq.com/v1.1/' + interface
  23. common_parameters = {
  24. 'access_token': access_token,
  25. 'timestamp': int(time.time()),
  26. 'nonce': str(time.time()) + str(random.randint(0, 999999)),
  27. }
  28. parameters = {
  29. "account_id": account_id,
  30. "level": level,
  31. "date_range":
  32. {
  33. "start_date": st,
  34. "end_date": et
  35. },
  36. "page": 1,
  37. "page_size": 1000,
  38. "fields": fields
  39. }
  40. parameters.update(common_parameters)
  41. for k in parameters:
  42. if type(parameters[k]) is not str:
  43. parameters[k] = json.dumps(parameters[k])
  44. r = requests.get(url, params=parameters, timeout=5).json()
  45. logging.info('account_id: {} 开始获取消耗数据'.format(account_id))
  46. if r['code'] != 0:
  47. logging.warning(
  48. 'access_token:{} code:{} message:{}'.format(str(access_token), str(r['code']), str(r['message'])))
  49. if err_num < 5:
  50. time.sleep(0.1)
  51. return daily_reports_get(access_token, account_id, st, et, level, fields, err_num=err_num + 1)
  52. DingTalkUtils().send(
  53. '消耗日报请求出现问题\naccess_token:{} code:{} message:{}'.format(str(access_token), str(r['code']),
  54. str(r['message'])))
  55. return r
  56. def get_q_data(y, li, st, et):
  57. try:
  58. c = daily_reports_get(y[2], y[0], st, et, "REPORT_LEVEL_ADVERTISER", (
  59. 'date', 'view_count', 'valid_click_count', 'ctr', 'cpc', 'cost', 'web_order_count', 'web_order_rate',
  60. 'web_order_cost', 'follow_count', 'order_amount', 'order_roi', 'platform_page_view_count',
  61. 'web_commodity_page_view_count', 'from_follow_uv'))
  62. if 'data' in c.keys() and len(c["data"]["list"]) > 0:
  63. for d in c['data']['list']:
  64. d['account_id'] = y[0]
  65. logging.info('qq: ' + str(d['account_id']) + str(d["cost"]))
  66. x = d
  67. res_data=[x['date'],x['view_count'],x['valid_click_count'],x['ctr'],x['cpc'],x['cost'],
  68. x['web_order_count'],x['web_order_rate'],
  69. x['web_order_cost'],x['follow_count'],x['order_amount'],x['order_roi'],
  70. x['platform_page_view_count'],x['web_commodity_page_view_count'],
  71. x['from_follow_uv'],x['account_id']]
  72. li.append(tuple(res_data))
  73. except Exception as e:
  74. logging.error('qq account:{} error :{}'.format(str(y),str(e)))
  75. def get_v_data(y, li, st, et):
  76. try:
  77. c = daily_reports_get(y[2], y[0], st, et, "REPORT_LEVEL_ADVERTISER_WECHAT", (
  78. 'date', 'cost', 'view_count', 'valid_click_count', 'ctr', 'official_account_follow_rate', 'order_amount',
  79. 'order_roi', 'order_count', 'order_rate', 'order_unit_price', 'web_order_cost', 'first_day_order_amount',
  80. 'first_day_order_count'))
  81. if 'data' in c.keys() and len(c["data"]["list"]) > 0:
  82. for d in c['data']['list']:
  83. d['account_id'] = y[0]
  84. logging.info('vx:' + str(d['account_id'])+ ' ' + str(d["cost"]))
  85. x = d
  86. res_data=[x['date'],x['cost'],x['view_count'],x['valid_click_count'],x['ctr'],
  87. x['official_account_follow_rate'],
  88. x['order_amount'],x['order_roi'],x['order_count'],x['order_rate'],
  89. x['order_unit_price'],x['web_order_cost'],x['first_day_order_amount'],
  90. x['first_day_order_count'],x['account_id']]
  91. li.append(tuple(res_data))
  92. except Exception as e:
  93. logging.error('vx account:{} error :{}'.format(str(y),str(e)))
  94. def get_tt_data(account_info, li, st, et):
  95. def build_url_ad(path, query=""):
  96. # type: (str, str) -> str
  97. """
  98. Build request URL
  99. :param path: Request path
  100. :param query: Querystring
  101. :return: Request URL
  102. """
  103. scheme, netloc = "https", "ad.oceanengine.com"
  104. return urlunparse((scheme, netloc, path, "", query, ""))
  105. page_num = 1
  106. advertiser_ids = account_info[1]
  107. for advertiser_id in advertiser_ids:
  108. while True:
  109. # account_info
  110. my_args = {
  111. "start_date": st,
  112. "end_date": et,
  113. "page_size": 100,
  114. "page": page_num,
  115. # "agent_id" : "1708974248093789",
  116. 'advertiser_id': advertiser_id,
  117. # "start_date": "%s"
  118. }
  119. PATH = "/open_api/2/report/advertiser/get/"
  120. args = json.loads(json.dumps(my_args))
  121. query_string = urlencode({k: v if isinstance(v, string_types) else json.dumps(v) for k, v in args.items()})
  122. url = build_url_ad(PATH, query_string)
  123. headers = {
  124. "Access-Token": account_info[0],
  125. }
  126. rsp = requests.get(url, headers=headers)
  127. '''
  128. date,cost,view_count,valid_click_count,
  129. ctr,official_account_follow_rate,order_amount,
  130. order_roi,order_count,order_rate,order_unit_price,
  131. web_order_cost,first_day_order_amount,first_day_order_count,account_id
  132. '''
  133. # print(account_info)
  134. # print(rsp.text)
  135. result = rsp.json()
  136. for _ in result['data']['list']:
  137. campaign_info = (_['stat_datetime'][:10], _['cost'] * 100, _['show'], _['click'],
  138. _['ctr'], None, None,
  139. None, None, None, None,
  140. None, None, None, advertiser_id)
  141. li.append(campaign_info)
  142. total_page = result['data']['page_info']['total_page']
  143. if page_num > total_page or page_num == total_page:
  144. break
  145. else:
  146. page_num = page_num + 1
  147. def get_vx_list():
  148. sql = "select account_id,wechat_account_id,access_token,refresh_token,name," \
  149. "ifnull(stage,''),ifnull(pitcher,''),ifnull(platform,''),ifnull(book,'') from advertiser_vx"
  150. a = db.quchen_text.getData(sql)
  151. return a
  152. def get_qq_list():
  153. sql = "select account_id,'',access_token,refresh_token,name," \
  154. "ifnull(stage,''),ifnull(pitcher,''),ifnull(platform,''),ifnull(book,'') from advertiser_qq"
  155. a = db.quchen_text.getData(sql)
  156. return a
  157. def mysql_insert_daily_vx(data):
  158. logging.info('start save daily_vx info')
  159. b = """replace into daily_vx (date,cost,view_count,valid_click_count,ctr,official_account_follow_rate,order_amount,
  160. order_roi,order_count,order_rate,order_unit_price,web_order_cost,first_day_order_amount,first_day_order_count,account_id)
  161. values (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"""
  162. db.quchen_text.executeMany(b, data)
  163. logging.info('start save daily_vx info')
  164. def mysql_insert_daily_qq(data):
  165. a = """replace into daily_qq (date,view_count,valid_click_count,ctr,cpc,cost,web_order_count,web_order_rate,
  166. web_order_cost,follow_count,order_amount,order_roi,platform_page_view_count,web_commodity_page_view_count,
  167. from_follow_uv,account_id) values (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"""
  168. db.quchen_text.executeMany(a, data)
  169. def mysql_insert_daily_tt(data):
  170. b = """replace into daily_tt (date,cost,view_count,valid_click_count,ctr,
  171. official_account_follow_rate,order_amount,
  172. order_roi,order_count,order_rate,order_unit_price,
  173. web_order_cost,first_day_order_amount,
  174. first_day_order_count,account_id)
  175. values (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"""
  176. db.quchen_text.executeMany(b, data)
  177. def get_daily_vx(st, et):
  178. token_list_v = get_vx_list()
  179. logging.info("获取vx账号:" + str(token_list_v.__len__()))
  180. time1 = time.time()
  181. executor = ThreadPoolExecutor(max_workers=10)
  182. li = []
  183. for y in token_list_v:
  184. executor.submit(get_v_data, y, li, st, et)
  185. executor.shutdown()
  186. logging.info('get_daily_vx:' + str(len(li)) + 'cost:' + str(int(time.time() - time1)))
  187. mysql_insert_daily_vx(li)
  188. def get_daily_qq(st, et):
  189. token_list_q = get_qq_list()
  190. logging.info("获取qq账号:" + str(token_list_q.__len__()))
  191. time1 = time.time()
  192. li = []
  193. executor = ThreadPoolExecutor(max_workers=10)
  194. for x in token_list_q:
  195. executor.submit(get_q_data, x, li, st, et)
  196. executor.shutdown()
  197. logging.info('get_qq_order:' + str(len(li)) + 'cost:' + str(int(time.time() - time1)))
  198. mysql_insert_daily_qq(li)
  199. def get_daily_tt(st, et):
  200. def refresh_access_token(appid, account_id, secret, refresh_token):
  201. open_api_url_prefix = "https://ad.oceanengine.com/open_api/"
  202. uri = "oauth2/refresh_token/"
  203. refresh_token_url = open_api_url_prefix + uri
  204. data = {
  205. "appid": appid,
  206. "secret": secret,
  207. "grant_type": "refresh_token",
  208. "refresh_token": refresh_token,
  209. }
  210. rsp = requests.post(refresh_token_url, json=data)
  211. rsp_data = rsp.json()
  212. new_refresh_token = rsp_data['data']['refresh_token']
  213. new_access_token = rsp_data['data']['access_token']
  214. sql = f'''
  215. update bytedance_login_info
  216. set refresh_token='{new_refresh_token}' ,access_token='{new_access_token}'
  217. where appid='{appid}' and account_id='{account_id}'
  218. '''
  219. db.quchen_text.execute(sql)
  220. return rsp_data['data']['access_token']
  221. # 1.获取refresh_token
  222. sql = '''
  223. select appid,account_id,secret,refresh_token from bytedance_login_info
  224. '''
  225. accounts_info = db.quchen_text.getData(sql)
  226. # 2.刷新refresh_token,并获取最新的access_token
  227. for account_info in accounts_info:
  228. appid, account_id, secret, refresh_token = account_info
  229. access_token = refresh_access_token(appid, account_id, secret, refresh_token)
  230. # 3.获取agent_id
  231. sql = f'''
  232. select distinct(advertiser_id) from advertiser_bytedance
  233. where appid='{appid}' and account_id='{account_id}'
  234. '''
  235. advertiser_ids = db.quchen_text.getData(sql)
  236. logging.info("获取头条账号:" + str(advertiser_ids.__len__()))
  237. advertiser_ids = [_[0] for _ in advertiser_ids]
  238. # token,adv_ids
  239. account_info = (access_token, advertiser_ids)
  240. time1 = time.time()
  241. li = []
  242. get_tt_data(account_info, li, st, et)
  243. logging.info('get_tt_order:' + str(len(li)) + 'cost:' + str(int(time.time() - time1)))
  244. mysql_insert_daily_tt(li)
  245. def get_token_bytedance():
  246. #添加bytedance账号,需要添加一下access_token
  247. open_api_url_prefix = "https://ad.oceanengine.com/open_api/"
  248. uri = "oauth2/access_token/"
  249. url = open_api_url_prefix + uri
  250. data = {
  251. "app_id": 1709866698360883,
  252. "secret": "****",
  253. "grant_type": "auth_code",
  254. "auth_code": "********"
  255. }
  256. rsp = requests.post(url, json=data)
  257. rsp_data = rsp.json()
  258. return rsp_data['data']['access_token']
  259. def run(st, et):
  260. logging.info('微信消耗数据拉取,开始')
  261. get_daily_vx(st, et)
  262. logging.info('微信消耗数据拉取,结束')
  263. logging.info('qq消耗数据拉取,开始')
  264. get_daily_qq(st, et)
  265. logging.info('qq消耗数据拉取,结束')
  266. logging.info('头条消耗数据拉取,开始')
  267. get_daily_tt(st, et)
  268. logging.info('头条消耗数据拉取,结束')
  269. def old_cost_hourly():
  270. st = et = du.getNow()
  271. logging.info('消耗数据拉取,开始')
  272. run(st, et)
  273. logging.info('消耗数据拉取,结束')
  274. def old_cost_daily():
  275. st = du.get_n_days(-10)
  276. et = du.get_n_days(-1)
  277. run(st, et)
  278. if __name__ == '__main__':
  279. # run()
  280. # old_cost_daily()
  281. st = du.get_n_days(-30)
  282. et = du.get_n_days(0)
  283. print(st, et)
  284. get_daily_tt(st, et)