cost_util.py 23 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690
  1. import json
  2. import requests
  3. import time
  4. import pymysql
  5. import logging
  6. import pandas
  7. from concurrent.futures import ThreadPoolExecutor
  8. from model.DataBaseUtils import MysqlUtils
  9. from model.ComUtils import *
  10. from model.DateUtils import DateUtils
  11. du = DateUtils()
  12. db = MysqlUtils()
  13. max_workers = 10
  14. count = []
  15. t = du.get_n_days(-10)
  16. def get_campaign(account_id, access_token, flag, campaign_ids, dt):
  17. path = 'campaigns/get'
  18. fields = ('campaign_id', 'campaign_name', 'configured_status', 'campaign_type', 'promoted_object_type',
  19. 'daily_budget', 'budget_reach_date', 'created_time', 'last_modified_time', 'speed_mode', 'is_deleted')
  20. url = 'https://api.e.qq.com/v1.3/' + path
  21. li = []
  22. page = 1
  23. while True:
  24. parameters = {
  25. 'access_token': access_token,
  26. 'timestamp': int(time.time()),
  27. 'nonce': str(time.time()) + str(random.randint(0, 999999)),
  28. 'fields': fields,
  29. "filtering": [{
  30. "field": "campaign_id",
  31. "operator": "IN",
  32. "values":
  33. campaign_ids.split(',')
  34. }],
  35. "account_id": account_id,
  36. "page": page,
  37. "page_size": 100,
  38. "is_deleted": False
  39. }
  40. for k in parameters:
  41. if type(parameters[k]) is not str:
  42. parameters[k] = json.dumps(parameters[k])
  43. while True:
  44. r = requests.get(url, params=parameters).json()
  45. code = r['code']
  46. if code == 11017:
  47. time.sleep(61)
  48. else:
  49. break
  50. # logging.info(r)
  51. total_page = r['data']['page_info']['total_page']
  52. if page > total_page:
  53. break
  54. else:
  55. page += 1
  56. if r.get("data"):
  57. for i in r['data']['list']:
  58. li.append((str(i['campaign_id']), i['campaign_name'], i['configured_status'], i['campaign_type'],
  59. i['promoted_object_type'], i['daily_budget'], i.get('budget_reach_date'),
  60. DateUtils.stamp_to_str(i['created_time']),
  61. DateUtils.stamp_to_str(i['last_modified_time']), i.get('speed_mode'), i.get('is_deleted'),
  62. account_id, flag, dt))
  63. # logging.info(li)
  64. """mp 没有 speed_mode,is_deleted,budget_reach_date"""
  65. if li.__len__() > 0:
  66. logging.info(f"{account_id}有计划:" + str(li.__len__()))
  67. sql = "replace into campaign_info values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
  68. db.quchen_text.executeMany(sql, li)
  69. db.close()
  70. def get_adcreatives(account_id, access_token, flag, adc_ids, dt): # 获取创意
  71. # 接口 https://developers.e.qq.com/docs/api/adsmanagement/adcreatives/adcreatives_get?version=1.3
  72. url = 'https://api.e.qq.com/v1.1/adcreatives/get'
  73. li = []
  74. page = 1
  75. logging.info(f"{account_id}开始获取创意")
  76. while True:
  77. parameters = {
  78. 'access_token': access_token,
  79. 'timestamp': int(time.time()),
  80. 'nonce': str(time.time()) + str(random.randint(0, 999999)),
  81. 'fields': ('campaign_id', 'adcreative_id', 'adcreative_name', 'adcreative_elements', 'promoted_object_type',
  82. 'page_type',
  83. 'page_spec', 'link_page_spec', 'universal_link_url', 'promoted_object_id', 'site_set'),
  84. "filtering": [{
  85. "field": "adcreative_id",
  86. "operator": "IN",
  87. "values": adc_ids.split(',')
  88. }],
  89. "account_id": account_id,
  90. "page": page,
  91. "page_size": 100,
  92. "is_deleted": False
  93. }
  94. for k in parameters:
  95. if type(parameters[k]) is not str:
  96. parameters[k] = json.dumps(parameters[k])
  97. while True:
  98. h = requests.get(url, params=parameters, timeout=1)
  99. # logging.info(h.json())
  100. if h.status_code == 200:
  101. r = h.json()
  102. # logging.info(r)
  103. break
  104. else:
  105. time.sleep(1)
  106. logging.info("爬取失败 等待1s")
  107. logging.info(f"{account_id}采集到创意")
  108. if 'data' in r.keys():
  109. is_video = 0
  110. for i in r['data']['list']:
  111. # logging.info(i)
  112. if flag == 'MP':
  113. if len(i['adcreative_elements']) > 0:
  114. d = i['adcreative_elements']
  115. title = d.get('title', '')
  116. description = d.get('description', '')
  117. if 'image' in d.keys():
  118. image = d.get('image', '')
  119. elif 'image_list' in d.keys():
  120. image = ','.join(d.get('image_list'))
  121. elif 'video' in d.keys():
  122. image = d['video']
  123. is_video = 1
  124. else:
  125. image = ''
  126. else:
  127. title = image = ''
  128. li.append((
  129. i['adcreative_id'], i['adcreative_name'], i['campaign_id'], image, title,
  130. i.get('promoted_object_type', ''), i.get('page_type', ''),
  131. i['page_spec'].get('page_id', ''), i.get('promoted_object_id', ''),
  132. '', description, 'MP', account_id, dt, is_video
  133. ))
  134. else:
  135. if len(i['adcreative_elements']) > 0:
  136. d = i['adcreative_elements']
  137. if 'image' in d.keys():
  138. image = d['image']
  139. elif 'element_story' in d.keys():
  140. image = ','.join([x['image'] for x in d['element_story']])
  141. else:
  142. image = ''
  143. title = d.get('title', '')
  144. description = d.get('description', '')
  145. else:
  146. image = title = description = ''
  147. li.append(
  148. (
  149. i['adcreative_id'], i['adcreative_name'], i['campaign_id'], image, title,
  150. i.get('promoted_object_type', ''), i.get('page_type', ''),
  151. i['page_spec'].get('page_id', ''), i.get('promoted_object_id', ''),
  152. ','.join(i['site_set']), description, 'GDT', account_id, dt, is_video
  153. )
  154. )
  155. total_page = r['data']['page_info']['total_page']
  156. if total_page > page:
  157. page += 1
  158. else:
  159. break
  160. else:
  161. break
  162. logging.info(f"{account_id}创意分析结束")
  163. logging.info(f"{account_id}获取创意,结束")
  164. if len(li) > 0:
  165. logging.info(f"{account_id}有创意:" + str(len(li)))
  166. sql = 'replace into adcreative_info values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s) '
  167. db.quchen_text.executeMany(sql, li)
  168. def images_info_get(account_id, access_token, image_ids): # 获取图片信息
  169. # 接口 https://developers.e.qq.com/docs/api/business_assets/image/images_get?version=1.3
  170. # 1.更新数据
  171. id_content = ','.join([''' '{}' '''.format(i) for i in image_ids.split(',')])
  172. id_content = id_content[:-1]
  173. sql = ''' select image_id from image_info vi
  174. where image_id in ({});'''.format(id_content)
  175. rs = db.quchen_text.getData(sql)
  176. id_all_set = set([i for i in image_ids.split(',') if len(i) > 0])
  177. id_have = set([i[0] for i in rs])
  178. image_ids = id_all_set - id_have
  179. fields = ('image_id', 'width', 'height', 'file_size', 'signature', 'preview_url')
  180. interface = 'images/get'
  181. url = 'https://api.e.qq.com/v1.3/' + interface
  182. page = 1
  183. li = []
  184. for image_id in image_ids:
  185. if len(image_id) < 1:
  186. continue
  187. while True:
  188. common_parameters = {
  189. 'access_token': access_token,
  190. 'timestamp': int(time.time()),
  191. 'nonce': str(time.time()) + str(random.randint(0, 999999)),
  192. 'fields': fields
  193. }
  194. parameters = {
  195. "account_id": account_id,
  196. "filtering": [{
  197. "field": "image_id",
  198. "operator": "IN",
  199. "values": [image_id]
  200. }],
  201. "page": page,
  202. "page_size": 100
  203. }
  204. parameters.update(common_parameters)
  205. for k in parameters:
  206. if type(parameters[k]) is not str:
  207. parameters[k] = json.dumps(parameters[k])
  208. while True:
  209. h = requests.get(url, params=parameters)
  210. # logging.info(h.text)
  211. if h.status_code == 200:
  212. r = h.json()
  213. break
  214. else:
  215. time.sleep(1)
  216. logging.info("请求出错 等待1s..")
  217. if 'data' in r.keys():
  218. li.extend(r['data']['list'])
  219. total_page = r['data']['page_info']['total_page']
  220. if total_page > page:
  221. page += 1
  222. else:
  223. break
  224. data = []
  225. for i in li:
  226. data.append(
  227. (i['image_id'], i['width'], i['height'], i['signature'], i['preview_url'], i['file_size']))
  228. logging.info(f"{account_id} 有新图片:" + str(li.__len__()))
  229. if li.__len__() > 0:
  230. sql = "insert IGNORE into image_info (image_id,width,height,signature,preview_url,size) value (%s,%s,%s,%s,%s,%s)"
  231. db.quchen_text.executeMany(sql, data)
  232. db.close()
  233. def video_info_get(account_id, access_token, image_ids): # 获取视频信息
  234. # 接口 https://developers.e.qq.com/docs/api/business_assets/video/videos_get?version=1.3
  235. # 1.数据库获取,查看是否需要获取对应数据
  236. id_content = ','.join([''' '{}' '''.format(i) for i in image_ids.split(',')])
  237. id_content = id_content[:-1]
  238. sql = ''' select video_id from video_info vi
  239. where video_id in ({});'''.format(id_content)
  240. rs = db.quchen_text.getData(sql)
  241. id_all_set = set([i for i in image_ids.split(',') if len(i) > 0])
  242. id_have = set([i[0] for i in rs])
  243. image_ids = id_all_set - id_have
  244. # 2.获取对应数据
  245. fields = ('video_id', 'width', 'height', 'file_size', 'signature', 'preview_url')
  246. interface = 'videos/get'
  247. url = 'https://api.e.qq.com/v1.3/' + interface
  248. page = 1
  249. li = []
  250. for image_id in image_ids:
  251. if len(image_id) < 1:
  252. continue
  253. while True:
  254. common_parameters = {
  255. 'access_token': access_token,
  256. 'timestamp': int(time.time()),
  257. 'nonce': str(time.time()) + str(random.randint(0, 999999)),
  258. 'fields': fields
  259. }
  260. parameters = {
  261. "account_id": account_id,
  262. "filtering": [{
  263. "field": "media_id",
  264. "operator": "IN",
  265. "values": [image_id]
  266. }],
  267. "page": page,
  268. "page_size": 100
  269. }
  270. parameters.update(common_parameters)
  271. for k in parameters:
  272. if type(parameters[k]) is not str:
  273. parameters[k] = json.dumps(parameters[k])
  274. while True:
  275. h = requests.get(url, params=parameters)
  276. # logging.info(h.text)
  277. if h.status_code == 200:
  278. r = h.json()
  279. break
  280. else:
  281. time.sleep(1)
  282. logging.info("请求出错 等待1s..")
  283. if 'data' in r.keys():
  284. li.extend(r['data']['list'])
  285. total_page = r['data']['page_info']['total_page']
  286. if total_page > page:
  287. page += 1
  288. else:
  289. break
  290. data = []
  291. for i in li:
  292. data.append((i['video_id'], i['width'], i['height'], i['signature'],
  293. i['preview_url'], i['file_size']))
  294. logging.info(f"{account_id} 获取到新视频:" + str(li.__len__()))
  295. if li.__len__() > 0:
  296. sql = "insert IGNORE into video_info (video_id,width,height,signature,preview_url,size) value (%s,%s,%s,%s,%s,%s)"
  297. db.quchen_text.executeMany(sql, data)
  298. db.close()
  299. def ad_info():
  300. accounts = db.quchen_text.getData("""
  301. select account_id,access_token,name channel,'GDT' type from advertiser_qq where name !='' or name is not null
  302. union
  303. select account_id,access_token,name channel,'MP' type from advertiser_vx where name !='' or name is not null
  304. """)
  305. total_data = []
  306. executor = ThreadPoolExecutor(max_workers=max_workers)
  307. for i in accounts:
  308. # logging.info(i)
  309. account_id = i[0]
  310. access_token = i[1]
  311. type = i[3]
  312. executor.submit(get_ad_info, account_id, access_token, type, total_data)
  313. executor.shutdown()
  314. logging.info(len(total_data))
  315. if len(total_data) > 0:
  316. sql = "replace into ad_info values(%s,%s,%s,%s,%s,%s,%s) "
  317. db.quchen_text.executeMany(sql, total_data)
  318. """获取广告基础信息"""
  319. def get_ad_info(account_id, access_token, flag, ad_ids, dt):
  320. # 接口为 https://developers.e.qq.com/docs/apilist/ads/ad?version=1.3#a3
  321. path = 'ads/get'
  322. fields = ('ad_id', 'ad_name', 'adcreative_id', 'adgroup_id', 'campaign_id')
  323. url = 'https://api.e.qq.com/v1.3/' + path
  324. li = []
  325. page = 1
  326. while True:
  327. parameters = {
  328. 'access_token': access_token,
  329. 'timestamp': int(time.time()),
  330. 'nonce': str(time.time()) + str(random.randint(0, 999999)),
  331. 'fields': fields,
  332. "filtering": [{
  333. "field": "ad_id",
  334. "operator": "IN",
  335. "values":
  336. ad_ids.split(',')
  337. }],
  338. "account_id": account_id,
  339. "page": page,
  340. "page_size": 100,
  341. "is_deleted": False
  342. }
  343. for k in parameters:
  344. if type(parameters[k]) is not str:
  345. parameters[k] = json.dumps(parameters[k])
  346. while True:
  347. r = requests.get(url, params=parameters).json()
  348. code = r['code']
  349. if code == 11017:
  350. time.sleep(61)
  351. else:
  352. break
  353. # logging.info(r)
  354. total_page = r['data']['page_info']['total_page']
  355. if page > total_page:
  356. break
  357. else:
  358. page += 1
  359. if r.get("data"):
  360. for i in r['data']['list']:
  361. li.append((str(i['ad_id']), i['ad_name'], i['adcreative_id'], i['campaign_id'], i['adgroup_id'],
  362. account_id, flag, dt))
  363. if li.__len__() > 0:
  364. logging.info(f"{account_id}有广告:" + str(li.__len__()))
  365. sql = "replace into ad_info values(%s,%s,%s,%s,%s,%s,%s,%s) "
  366. db.quchen_text.executeMany(sql, li)
  367. db.close()
  368. def get_ad_cost_day(account_id, access_token, flag, st, et):
  369. if flag == 'MP':
  370. ad_cost_day_mp(account_id, access_token, st, et)
  371. else:
  372. ad_cost_day_gdt(account_id, access_token, st, et)
  373. def ad_cost_day_gdt(account_id, access_token, st, et):
  374. # 接口文档 https://developers.e.qq.com/docs/api/insights/ad_insights/daily_reports_get?version=1.3
  375. url = 'https://api.e.qq.com/v1.3/daily_reports/get'
  376. fields = (
  377. 'date', 'ad_id', 'adgroup_id', 'cost', 'view_count', 'ctr', 'follow_count', 'web_order_count', 'order_amount')
  378. li = []
  379. page = 1
  380. while True:
  381. parameters = {
  382. 'access_token': access_token,
  383. 'timestamp': int(time.time()),
  384. 'nonce': str(time.time()) + str(random.randint(0, 999999)),
  385. 'fields': fields,
  386. "account_id": account_id,
  387. "group_by": ['ad_id', 'date'],
  388. "level": 'REPORT_LEVEL_AD',
  389. "page": page,
  390. "page_size": 1000,
  391. "date_range": {
  392. "start_date": st,
  393. "end_date": et
  394. }
  395. }
  396. for k in parameters:
  397. if type(parameters[k]) is not str:
  398. parameters[k] = json.dumps(parameters[k])
  399. while True:
  400. r = requests.get(url, params=parameters)
  401. r = r.json()
  402. # logging.info(r)
  403. code = r['code']
  404. if code == 11017:
  405. time.sleep(61)
  406. else:
  407. break
  408. if r.get("data"):
  409. for i in r['data']['list']:
  410. if i['cost'] > 0:
  411. li.append(
  412. (
  413. i['date'], i['ad_id'], i['adgroup_id'], i['cost'] / 100, i['view_count'],
  414. i['ctr'] * i['view_count'],
  415. i['follow_count'], i['web_order_count'], i['order_amount'] / 100, account_id, 'GDT'
  416. )
  417. )
  418. total_page = r['data']['page_info']['total_page']
  419. if page >= total_page:
  420. break
  421. else:
  422. page += 1
  423. # logging.info(li)
  424. if len(li) > 0:
  425. logging.info(f"{account_id} have ad cost :{len(li)} ")
  426. db.quchen_text.executeMany('replace into ad_cost_day values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)', li)
  427. db.close()
  428. def ad_cost_day_mp(account_id, access_token, st, et):
  429. # 接口文档 https://developers.e.qq.com/docs/api/insights/ad_insights/daily_reports_get?version=1.3
  430. url = 'https://api.e.qq.com/v1.3/daily_reports/get'
  431. fields = ('date', 'ad_id', 'adgroup_id', 'cost', 'view_count', 'valid_click_count', 'official_account_follow_count',
  432. 'order_count', 'order_amount')
  433. li = []
  434. page = 1
  435. while True:
  436. parameters = {
  437. 'access_token': access_token,
  438. 'timestamp': int(time.time()),
  439. 'nonce': str(time.time()) + str(random.randint(0, 999999)),
  440. 'fields': fields,
  441. "account_id": account_id,
  442. "level": 'REPORT_LEVEL_AD_WECHAT',
  443. "page": page,
  444. "page_size": 1000,
  445. "date_range": {
  446. "start_date": st,
  447. "end_date": et
  448. }
  449. }
  450. for k in parameters:
  451. if type(parameters[k]) is not str:
  452. parameters[k] = json.dumps(parameters[k])
  453. while True:
  454. r = requests.get(url, params=parameters)
  455. r = r.json()
  456. # logging.info(r['data']['list'])
  457. # import pandas as pd
  458. # logging.info(pd.DataFrame(r['data']['list']))
  459. code = r['code']
  460. if code == 11017:
  461. time.sleep(61)
  462. else:
  463. break
  464. if r.get("data"):
  465. for i in r['data']['list']:
  466. if i['cost'] > 0:
  467. li.append(
  468. (
  469. i['date'], i['ad_id'], i['adgroup_id'], i['cost'] / 100, i['view_count'],
  470. i['valid_click_count'],
  471. i['official_account_follow_count'], i['order_count'], i['order_amount'] / 100, account_id,
  472. 'MP'
  473. )
  474. )
  475. total_page = r['data']['page_info']['total_page']
  476. if page >= total_page:
  477. break
  478. else:
  479. page += 1
  480. # logging.info(li)
  481. # exit()
  482. if len(li) > 0:
  483. # TODO:询问一下adgroup_id,campaign_id作用
  484. li_df = pandas.DataFrame(li)
  485. li_df_g = li_df.groupby([0, 1, 9, 10])
  486. li_new = []
  487. for index, row in li_df_g.agg('sum').iterrows():
  488. new_row = row.tolist()
  489. new_row = list(index[0:2]) + new_row[1:] + list(index[2:])
  490. li_new.append(tuple(new_row))
  491. logging.info(f"{account_id} have ad cost :{len(li_new)} ")
  492. db.quchen_text.executeMany('replace into ad_cost_day values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)', li_new)
  493. db.close()
  494. def daily_reports_get(access_token, account_id, level, start_date, end_date, fields): # 获取wx投放计划日报数据
  495. interface = 'daily_reports/get'
  496. url = 'https://api.e.qq.com/v1.3/' + interface
  497. common_parameters = {
  498. 'access_token': access_token,
  499. 'timestamp': int(time.time()),
  500. 'nonce': str(time.time()) + str(random.randint(0, 999999)),
  501. 'fields': fields
  502. }
  503. parameters = {
  504. "account_id": account_id,
  505. "level": level,
  506. "date_range":
  507. {
  508. "start_date": start_date,
  509. "end_date": end_date
  510. },
  511. "page": 1,
  512. "page_size": 1000,
  513. "fields":
  514. [
  515. ]
  516. }
  517. parameters.update(common_parameters)
  518. for k in parameters:
  519. if type(parameters[k]) is not str:
  520. parameters[k] = json.dumps(parameters[k])
  521. while True:
  522. r = requests.get(url, params=parameters)
  523. if r.status_code == 200:
  524. break
  525. else:
  526. time.sleep(1)
  527. logging.info("请求出错 等待1s..")
  528. return r.json()
  529. def daily_qq_reports_get(access_token, account_id, compaign_id, level, start_date, end_date, fields): # 获取gdt投放计划日报数据
  530. interface = 'daily_reports/get'
  531. url = 'https://api.e.qq.com/v1.1/' + interface
  532. common_parameters = {
  533. 'access_token': access_token,
  534. 'timestamp': int(time.time()),
  535. 'nonce': str(time.time()) + str(random.randint(0, 999999)),
  536. 'fields': fields
  537. }
  538. parameters = {
  539. "account_id": account_id,
  540. "filtering":
  541. [
  542. {
  543. "field": "campaign_id",
  544. "operator": "EQUALS",
  545. "values":
  546. [
  547. compaign_id
  548. ]
  549. }
  550. ],
  551. "level": level,
  552. "date_range":
  553. {
  554. "start_date": start_date,
  555. "end_date": end_date
  556. },
  557. "page": 1,
  558. "page_size": 1000,
  559. "fields":
  560. [
  561. ]
  562. }
  563. parameters.update(common_parameters)
  564. for k in parameters:
  565. if type(parameters[k]) is not str:
  566. parameters[k] = json.dumps(parameters[k])
  567. r = requests.get(url, params=parameters)
  568. return r.json()
  569. def mysql_insert_adcreative(data):
  570. db = pymysql.connect('rm-bp1c9cj79872tx3aaro.mysql.rds.aliyuncs.com', 'superc', 'Cc719199895', 'quchen_text')
  571. cursor = db.cursor()
  572. sql = 'replace into adcreative (campaign_id,adcreative_id,adcreative_name,image_id,title,promoted_object_type,page_type,page_id,link_page_id,promoted_object_id) values (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)'
  573. try:
  574. cursor.executemany(sql, data)
  575. db.commit()
  576. logging.info('insert [adcreative] ' + str(len(data)))
  577. except:
  578. db.rollback()
  579. logging.info('insert [adcreative] defeat')
  580. if __name__ == '__main__':
  581. account_id = 19206910
  582. access_token = '89079ccc8db047b078a0108e36a7e276'
  583. #
  584. account_id2 = 14709511
  585. access_token2 = 'e87f7b6f860eaeef086ddcc9c3614678'
  586. get_ad_cost_day(account_id, access_token, 'MP', '2021-04-09', '2021-04-09')
  587. # get_adcreatives(account_id,access_token,'MP','3187867673','2021-04-09')