get_campaign_daily.py 25 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688
  1. #!/usr/bin/env python3
  2. # -*- coding: utf-8 -*-
  3. """
  4. Created on Fri Jun 5 17:00:45 2020
  5. @author: chencong
  6. """
  7. import json
  8. import random
  9. import requests
  10. import time
  11. import pandas as pd
  12. import pymysql
  13. from apscheduler.schedulers.blocking import BlockingScheduler
  14. import datetime
  15. import token_list as tl
  16. import importlib
  17. importlib.reload(tl)
  18. def adcreatives_get(access_token,account_id,fields) : #获取创意
  19. interface = 'adcreatives/get'
  20. url = 'https://api.e.qq.com/v1.1/' + interface
  21. page =1
  22. list1 = []
  23. common_parameters = {
  24. 'access_token': access_token,
  25. 'timestamp': int(time.time()),
  26. 'nonce': str(time.time()) + str(random.randint(0, 999999)),
  27. 'fields':fields
  28. }
  29. parameters = {
  30. "account_id": account_id,
  31. "page": page,
  32. "page_size": 100,
  33. "is_deleted": False
  34. }
  35. parameters.update(common_parameters)
  36. for k in parameters:
  37. if type(parameters[k]) is not str:
  38. parameters[k] = json.dumps(parameters[k])
  39. r = requests.get(url, params = parameters).json()
  40. if 'data' in r.keys():
  41. list1 = list1+r['data']['list']
  42. total_page=r['data']['page_info']['total_page']
  43. if total_page>1:
  44. for page in range(2,total_page+1):
  45. common_parameters = {
  46. 'access_token': access_token,
  47. 'timestamp': int(time.time()),
  48. 'nonce': str(time.time()) + str(random.randint(0, 999999)),
  49. 'fields':fields
  50. }
  51. parameters = {
  52. "account_id": account_id,
  53. "page": page,
  54. "page_size": 100,
  55. "is_deleted": False
  56. }
  57. parameters.update(common_parameters)
  58. for k in parameters:
  59. if type(parameters[k]) is not str:
  60. parameters[k] = json.dumps(parameters[k])
  61. r = requests.get(url, params = parameters).json()
  62. if 'data' in r.keys():
  63. list1 = list1+r['data']['list']
  64. return list1
  65. #print(adcreatives_get('3bbbae77bed9fcde94cc0f1742a18c6e',11436446,('campaign_id','adcreative_id','adcreative_name','adcreative_elements','promoted_object_type','page_type','page_spec','link_page_spec','universal_link_url','promoted_object_id')))
  66. def ads_get(access_token,account_id,fields) : #获取广告
  67. interface = 'ads/get'
  68. url = 'https://api.e.qq.com/v1.1/' + interface
  69. common_parameters = {
  70. 'access_token': access_token,
  71. 'timestamp': int(time.time()),
  72. 'nonce': str(time.time()) + str(random.randint(0, 999999)),
  73. 'fields':fields
  74. }
  75. parameters = {
  76. "account_id": account_id,
  77. "page": 1,
  78. "page_size": 10,
  79. "is_deleted": False
  80. }
  81. parameters.update(common_parameters)
  82. for k in parameters:
  83. if type(parameters[k]) is not str:
  84. parameters[k] = json.dumps(parameters[k])
  85. r = requests.get(url, params = parameters)
  86. return r.json()
  87. #print(ads_get('2a674bef201314d338be30420369671f',14985162,('ad_id','ad_name','adcreative_id','adcreative')))
  88. def wechat_pages_get(access_token,account_id,page_id,fields) : #获取微信原生页
  89. interface = 'wechat_pages/get'
  90. url = 'https://api.e.qq.com/v1.1/' + interface
  91. common_parameters = {
  92. 'access_token': access_token,
  93. 'timestamp': int(time.time()),
  94. 'nonce': str(time.time()) + str(random.randint(0, 999999)),
  95. 'fields':fields
  96. }
  97. parameters = {
  98. "account_id": account_id,
  99. "filtering":
  100. [
  101. {
  102. "field": "page_id",
  103. "operator": "EQUALS",
  104. "values":
  105. [
  106. page_id
  107. ]
  108. }
  109. ],
  110. "page": 1,
  111. "page_size": 10
  112. }
  113. parameters.update(common_parameters)
  114. for k in parameters:
  115. if type(parameters[k]) is not str:
  116. parameters[k] = json.dumps(parameters[k])
  117. r = requests.get(url, params = parameters)
  118. return r.json()
  119. #print(wechat_pages_get('2a674bef201314d338be30420369671f',14985162,1900495593,('page_id','page_name','created_time','last_modified_time','page_template_id','preview_url','page_type','source_type')))
  120. def adgroups_get(access_token,account_id,fields) : #获取广告组
  121. interface = 'adgroups/get'
  122. url = 'https://api.e.qq.com/v1.1/' + interface
  123. common_parameters = {
  124. 'access_token': access_token,
  125. 'timestamp': int(time.time()),
  126. 'nonce': str(time.time()) + str(random.randint(0, 999999)),
  127. 'fields':fields
  128. }
  129. parameters = {
  130. "account_id": account_id,
  131. "page": 4,
  132. "page_size": 100,
  133. "is_deleted": False
  134. }
  135. parameters.update(common_parameters)
  136. for k in parameters:
  137. if type(parameters[k]) is not str:
  138. parameters[k] = json.dumps(parameters[k])
  139. r = requests.get(url, params = parameters)
  140. return r.json()
  141. #print(adgroups_get('2a674bef201314d338be30420369671f',14985162,('campaign_id','adgroup_id','adgroup_name','optimization_goal','billing_event','bid_amount','daily_budget','targeting','begin_date','end_date','time_series','bid_strategy','cold_start_audience','auto_audience','expand_enabled','expand_targeting','deep_conversion_spec','deep_optimization_action_type','conversion_id','deep_conversion_behavior_bid','deep_conversion_worth_rate','system_status')))
  142. def images_get(access_token,account_id,fields) : #获取图片信息
  143. import json
  144. import random
  145. import requests
  146. import time
  147. interface = 'images/get'
  148. url = 'https://api.e.qq.com/v1.1/' + interface
  149. page = 1
  150. list1 = []
  151. common_parameters = {
  152. 'access_token': access_token,
  153. 'timestamp': int(time.time()),
  154. 'nonce': str(time.time()) + str(random.randint(0, 999999)),
  155. 'fields':fields
  156. }
  157. parameters = {
  158. "account_id": account_id,
  159. "page": page,
  160. "page_size": 100
  161. }
  162. parameters.update(common_parameters)
  163. for k in parameters:
  164. if type(parameters[k]) is not str:
  165. parameters[k] = json.dumps(parameters[k])
  166. r = requests.get(url, params = parameters).json()
  167. if 'data' in r.keys():
  168. list1 = list1+r['data']['list']
  169. total_page=r['data']['page_info']['total_page']
  170. if total_page>1:
  171. for page in range(2,total_page+1):
  172. common_parameters = {
  173. 'access_token': access_token,
  174. 'timestamp': int(time.time()),
  175. 'nonce': str(time.time()) + str(random.randint(0, 999999)),
  176. 'fields':fields
  177. }
  178. parameters = {
  179. "account_id": account_id,
  180. "page": page,
  181. "page_size": 100
  182. }
  183. parameters.update(common_parameters)
  184. for k in parameters:
  185. if type(parameters[k]) is not str:
  186. parameters[k] = json.dumps(parameters[k])
  187. r = requests.get(url, params = parameters).json()
  188. if 'data' in r.keys():
  189. list1 = list1+r['data']['list']
  190. return list1
  191. #print(images_get('2a674bef201314d338be30420369671f',14985162,('image_id','preview_url')))
  192. def campaigns_get(access_token,account_id,fields) : #获取推广计划
  193. import json
  194. import random
  195. import requests
  196. import time
  197. interface = 'campaigns/get'
  198. url = 'https://api.e.qq.com/v1.1/' + interface
  199. page = 1
  200. list1 = []
  201. common_parameters = {
  202. 'access_token': access_token,
  203. 'timestamp': int(time.time()),
  204. 'nonce': str(time.time()) + str(random.randint(0, 999999)),
  205. 'fields':fields
  206. }
  207. parameters = {
  208. "account_id": account_id,
  209. "page": page,
  210. "page_size": 100,
  211. "is_deleted": False
  212. }
  213. parameters.update(common_parameters)
  214. for k in parameters:
  215. if type(parameters[k]) is not str:
  216. parameters[k] = json.dumps(parameters[k])
  217. r = requests.get(url, params = parameters).json()
  218. if 'data' in r.keys():
  219. list1 = list1+r['data']['list']
  220. total_page=r['data']['page_info']['total_page']
  221. if total_page>1:
  222. for page in range(2,total_page+1):
  223. common_parameters = {
  224. 'access_token': access_token,
  225. 'timestamp': int(time.time()),
  226. 'nonce': str(time.time()) + str(random.randint(0, 999999)),
  227. 'fields':fields
  228. }
  229. parameters = {
  230. "account_id": account_id,
  231. "page": page,
  232. "page_size": 100,
  233. "is_deleted": False
  234. }
  235. parameters.update(common_parameters)
  236. for k in parameters:
  237. if type(parameters[k]) is not str:
  238. parameters[k] = json.dumps(parameters[k])
  239. r = requests.get(url, params = parameters).json()
  240. if 'data' in r.keys():
  241. list1 = list1+r['data']['list']
  242. return list1
  243. #aa=tl.token_list_vx[-2]
  244. #print(campaigns_get(aa[2],aa[0],('campaign_id','campaign_name','configured_status','campaign_type','promoted_object_type','daily_budget','budget_reach_date','created_time','last_modified_time','speed_mode','is_deleted')))
  245. def daily_reports_get(access_token,account_id,level,start_date,end_date,fields) : #获取wx投放计划日报数据
  246. interface = 'daily_reports/get'
  247. url = 'https://api.e.qq.com/v1.1/' + interface
  248. common_parameters = {
  249. 'access_token': access_token,
  250. 'timestamp': int(time.time()),
  251. 'nonce': str(time.time()) + str(random.randint(0, 999999)),
  252. 'fields':fields
  253. }
  254. parameters = {
  255. "account_id": account_id,
  256. "level": level,
  257. "date_range":
  258. {
  259. "start_date": start_date,
  260. "end_date": end_date
  261. },
  262. "page": 1,
  263. "page_size": 1000,
  264. "fields":
  265. [
  266. ]
  267. }
  268. parameters.update(common_parameters)
  269. for k in parameters:
  270. if type(parameters[k]) is not str:
  271. parameters[k] = json.dumps(parameters[k])
  272. r = requests.get(url, params = parameters)
  273. return r.json()
  274. def daily_qq_reports_get(access_token,account_id,compaign_id,level,start_date,end_date,fields) : #获取gdt投放计划日报数据
  275. interface = 'daily_reports/get'
  276. url = 'https://api.e.qq.com/v1.1/' + interface
  277. common_parameters = {
  278. 'access_token': access_token,
  279. 'timestamp': int(time.time()),
  280. 'nonce': str(time.time()) + str(random.randint(0, 999999)),
  281. 'fields':fields
  282. }
  283. parameters = {
  284. "account_id": account_id,
  285. "filtering":
  286. [
  287. {
  288. "field": "campaign_id",
  289. "operator": "EQUALS",
  290. "values":
  291. [
  292. compaign_id
  293. ]
  294. }
  295. ],
  296. "level": level,
  297. "date_range":
  298. {
  299. "start_date": start_date,
  300. "end_date": end_date
  301. },
  302. "page": 1,
  303. "page_size": 1000,
  304. "fields":
  305. [
  306. ]
  307. }
  308. parameters.update(common_parameters)
  309. for k in parameters:
  310. if type(parameters[k]) is not str:
  311. parameters[k] = json.dumps(parameters[k])
  312. r = requests.get(url, params = parameters)
  313. return r.json()
  314. def mysql_insert_daily_vx_campaign(data):
  315. db = pymysql.connect('rm-bp1c9cj79872tx3aaro.mysql.rds.aliyuncs.com','superc','Cc719199895','quchen_text')
  316. cursor = db.cursor()
  317. time1 = time.time()
  318. sql = 'insert ignore into daily_vx_campaign (account_id,date,campaign_id,view_count,cost,ctr,cpc,order_roi,thousand_display_price,valid_click_count,official_account_follow_count,conversions_count,official_account_follow_rate,conversions_rate,order_count,order_rate,order_unit_price,first_day_order_amount) values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s);'
  319. try:
  320. cursor.executemany(sql,data)
  321. db.commit()
  322. cost_time =round((time.time()-time1)/60,1)
  323. print('insert_daily_vx_campaign access',len(data),'cost_minutes:',cost_time)
  324. except:
  325. db.rollback()
  326. print('insert_daily_vx_campaign defeat')
  327. def mysql_insert_daily_qq_campaign(data):
  328. db = pymysql.connect('rm-bp1c9cj79872tx3aaro.mysql.rds.aliyuncs.com','superc','Cc719199895','quchen_text')
  329. cursor = db.cursor()
  330. time1 = time.time()
  331. sql = 'insert ignore into daily_qq_campaign (account_id,date,campaign_id,view_count,thousand_display_price,valid_click_count,ctr,cpc,cost,order_roi) values (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)'
  332. try:
  333. cursor.executemany(sql,data)
  334. db.commit()
  335. cost_time =round((time.time()-time1)/60,1)
  336. print('insert_daily_qq_campaign access',len(data),'cost_minutes:',cost_time)
  337. except:
  338. db.rollback()
  339. print('insert_daily_qq_campaign defeat')
  340. #print(daily_reports_get('2a674bef201314d338be30420369671f',14985162,'REPORT_LEVEL_CAMPAIGN_WECHAT','2020-07-20','2020-07-20',('account_id','date','campaign_id','view_count','cost','ctr','cpc','order_roi','thousand_display_price','valid_click_count','official_account_follow_count','conversions_count','official_account_follow_rate','conversions_rate','order_count','order_rate','order_unit_price','first_day_order_amount')))
  341. #print(daily_reports_get('27b2f2768640555133162b5982872b83',15223385,'REPORT_LEVEL_CAMPAIGN','2020-07-10','2020-07-19',('account_id','date','campaign_id','view_count','thousand_display_price','valid_click_count','ctr','cpc','cost','order_roi')))
  342. def mysql_insert_campaign_vx(data,data2):
  343. db = pymysql.connect('rm-bp1c9cj79872tx3aaro.mysql.rds.aliyuncs.com','superc','Cc719199895','quchen_text')
  344. cursor = db.cursor()
  345. time1 = time.time()
  346. sql = 'insert ignore into campaign_vx (campaign_id,campaign_name,configured_status,campaign_type,promoted_object_type,daily_budget,created_time,last_modified_time,account_id) values (%s,%s,%s,%s,%s,%s,%s,%s,%s)'
  347. sql2 = 'delete from campaign_vx where campaign_id =%s '
  348. try:
  349. cursor.executemany(sql2,data2)
  350. db.commit()
  351. print('delete campaign_vx access',len(data2))
  352. except:
  353. db.rollback()
  354. print('delete campaign_vx defeat')
  355. try:
  356. cursor.executemany(sql,data)
  357. db.commit()
  358. cost_time =round((time.time()-time1)/60,1)
  359. print('insert_campaign_vx access',len(data),'cost_minutes:',cost_time)
  360. except:
  361. db.rollback()
  362. print('insert_campaign_vx defeat')
  363. def mysql_insert_adcreative(data):
  364. db = pymysql.connect('rm-bp1c9cj79872tx3aaro.mysql.rds.aliyuncs.com','superc','Cc719199895','quchen_text')
  365. cursor = db.cursor()
  366. time1 = time.time()
  367. sql = 'insert ignore into adcreative (campaign_id,adcreative_id,adcreative_name,image_id,title,promoted_object_type,page_type,page_id,link_page_id,promoted_object_id) values (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)'
  368. try:
  369. cursor.executemany(sql,data)
  370. db.commit()
  371. cost_time =round((time.time()-time1)/60,1)
  372. print('insert_adcreative access',len(data),'cost_minutes:',cost_time)
  373. except:
  374. db.rollback()
  375. print('insert_adcreative defeat')
  376. def mysql_insert_image(data):
  377. db = pymysql.connect('rm-bp1c9cj79872tx3aaro.mysql.rds.aliyuncs.com','superc','Cc719199895','quchen_text')
  378. cursor = db.cursor()
  379. time1 = time.time()
  380. sql = 'insert ignore into image (image_id,preview_url,account_id) values (%s,%s,%s)'
  381. try:
  382. cursor.executemany(sql,data)
  383. db.commit()
  384. cost_time =round((time.time()-time1)/60,1)
  385. print('insert image access',len(data),'cost_minutes:',cost_time)
  386. except:
  387. db.rollback()
  388. print('insert image defeat')
  389. def get_daily_vx_campaign(st,et): #获取投放计划、日报数据
  390. token_list_v = tl.token_list_vx
  391. r = ()
  392. p = ()
  393. q=[]
  394. for x in token_list_v:
  395. account_id = x[0]
  396. access_token = x[2]
  397. start_date = time.strftime("%Y-%m-%d",time.localtime(st))
  398. end_date = time.strftime("%Y-%m-%d",time.localtime(et))
  399. l = campaigns_get(access_token,account_id,('campaign_id','campaign_name','configured_status','campaign_type','promoted_object_type','daily_budget','budget_reach_date','created_time','last_modified_time','speed_mode','is_deleted'))
  400. if len(l)>0:
  401. for ll in l:
  402. ll['account_id']=account_id
  403. if ll['created_time']>st or ll['last_modified_time']>st:
  404. q.append(ll['campaign_id'])
  405. lt = tuple(ll.values())
  406. p = p+((lt),)
  407. data_list = daily_reports_get(access_token,account_id,'REPORT_LEVEL_CAMPAIGN_WECHAT',start_date,end_date,('account_id','date','campaign_id','view_count','cost','ctr','cpc','order_roi','thousand_display_price','valid_click_count','official_account_follow_count','conversions_count','official_account_follow_rate','conversions_rate','order_count','order_rate','order_unit_price','first_day_order_amount'))
  408. if 'data' in data_list.keys():
  409. for y in data_list['data']['list']:
  410. y['account_id'] = account_id
  411. y = tuple(y.values())
  412. r=r+((y),)
  413. mysql_insert_daily_vx_campaign(r)
  414. mysql_insert_campaign_vx(p,q)
  415. #get_daily_vx_campaign(1597766400,1597852800)
  416. def get_daily_qq_campaign(st,et):
  417. token_list_q = tl.token_list_qq
  418. r=()
  419. for x in token_list_q:
  420. account_id = x[0]
  421. access_token = x[2]
  422. start_date = st
  423. end_date = et
  424. l = campaigns_get(access_token,account_id,('campaign_id','campaign_name','configured_status','campaign_type','promoted_object_type','daily_budget','budget_reach_date','created_time','last_modified_time','speed_mode','is_deleted'))
  425. for ll in l:
  426. campaign_id =ll['campaign_id']
  427. data_list = daily_qq_reports_get(access_token,account_id,campaign_id,'REPORT_LEVEL_CAMPAIGN',start_date,end_date,('account_id','date','campaign_id','view_count','thousand_display_price','valid_click_count','ctr','cpc','cost','order_roi'))
  428. if len(data_list['data']['list'])>0:
  429. print(data_list)
  430. print(l)
  431. if 'data' in data_list.keys():
  432. for y in data_list['data']['list']:
  433. y = tuple(y.values())
  434. r=r+((y),)
  435. #mysql_insert_daily_qq_campaign(r)
  436. #get_daily_vx_campaign('2020-01-01','2020-07-27')
  437. #get_daily_qq_campaign('2020-07-10','2020-07-24')
  438. def get_campaign_update_list():
  439. db = pymysql.connect('rm-bp1c9cj79872tx3aaro.mysql.rds.aliyuncs.com','superc','Cc719199895','quchen_text')
  440. #db = pymysql.connect('localhost','root','chencong1996','quchen_text')
  441. cursor = db.cursor()
  442. sql = 'select distinct advertiser_vx.account_id,access_token from campaign_vx left join advertiser_vx on advertiser_vx.account_id = campaign_vx.account_id where created_time>=%s or last_modified_time>=%s'
  443. data = (int((time.time()+8*3600)//86400*86400-8*3600-86400),int((time.time()+8*3600)//86400*86400-8*3600-86400))
  444. try:
  445. cursor.execute(sql,data)
  446. db.commit()
  447. x=cursor.fetchall()
  448. print('access get campaign update list',x)
  449. except:
  450. db.rollback()
  451. print('defeat get campaign update list')
  452. a = []
  453. if len(x)>0:
  454. for t in x:
  455. a.append(t[0])
  456. sql2 = 'delete from adcreative where campaign_id=%s'
  457. try:
  458. cursor.executemany(sql2,a)
  459. db.commit()
  460. y=cursor.fetchall()
  461. print('access delete adcreative',y)
  462. except:
  463. db.rollback()
  464. print('defeat delete adcreative')
  465. return x
  466. def get_adcreative_vx():
  467. token_list_vx=get_campaign_update_list()
  468. r = ()
  469. for x in token_list_vx:
  470. account_id = x[0]
  471. access_token = x[1]
  472. l = adcreatives_get(access_token,account_id,('campaign_id','adcreative_id','adcreative_name','adcreative_elements','promoted_object_type','page_type','page_spec','link_page_spec','universal_link_url','promoted_object_id'))
  473. if len(l)>0:
  474. for ll in l:
  475. if 'image_list' in ll['adcreative_elements'].keys():
  476. for image_id in ll['adcreative_elements']['image_list']:
  477. a={}
  478. a['campaign_id']=ll['campaign_id']
  479. a['adcreative_id'] = ll['adcreative_id']
  480. a['adcreative_name'] = ll['adcreative_name']
  481. a['image_id'] = image_id
  482. a['title'] = ll['adcreative_elements']['title']
  483. a['promoted_object_type'] = ll['promoted_object_type']
  484. a['page_type'] = ll['page_type']
  485. if 'page_spec' in ll.keys():
  486. if 'page_id' in ll['page_spec'].keys():
  487. a['page_id'] = ll['page_spec']['page_id']
  488. else :
  489. a['page_id'] = None
  490. else:
  491. a['page_id'] = None
  492. if 'link_page_spec' in ll.keys():
  493. if 'page_id' in ll['link_page_spec'].keys():
  494. a['link_page_id']= ll['link_page_spec']['page_id']
  495. else:
  496. a['link_page_id']=None
  497. else:
  498. a['link_page_id'] = None
  499. a['promoted_object_id'] = ll['promoted_object_id']
  500. y = tuple(a.values())
  501. r = r+((y),)
  502. elif 'image' in ll['adcreative_elements'].keys() :
  503. a={}
  504. a['campaign_id']=ll['campaign_id']
  505. a['adcreative_id'] = ll['adcreative_id']
  506. a['adcreative_name'] = ll['adcreative_name']
  507. a['image_id'] = ll['adcreative_elements']['image']
  508. if 'title' in ll['adcreative_elements']:
  509. a['title'] = ll['adcreative_elements']['title']
  510. else:
  511. a['title']=''
  512. a['promoted_object_type'] = ll['promoted_object_type']
  513. a['page_type'] = ll['page_type']
  514. if 'page_spec' in ll.keys():
  515. if 'page_id' in ll['page_spec'].keys():
  516. a['page_id'] = ll['page_spec']['page_id']
  517. else :
  518. a['page_id'] = None
  519. else:
  520. a['page_id'] = None
  521. if 'link_page_spec' in ll.keys():
  522. if 'page_id' in ll['link_page_spec'].keys():
  523. a['link_page_id']= ll['link_page_spec']['page_id']
  524. else:
  525. a['link_page_id']=None
  526. else:
  527. a['link_page_id'] = None
  528. a['promoted_object_id'] = ll['promoted_object_id']
  529. y = tuple(a.values())
  530. r = r+((y),)
  531. mysql_insert_adcreative(r)
  532. #get_adcreative_vx()
  533. def get_image_imformation():
  534. token_list_vx = tl.token_list_vx
  535. r = ()
  536. for x in token_list_vx:
  537. account_id = x[0]
  538. access_token = x[2]
  539. l = images_get(access_token,account_id,('image_id','preview_url'))
  540. if len(l)>0:
  541. for ll in l:
  542. ll['account_id']=account_id
  543. y=tuple(ll.values())
  544. r=r+((y),)
  545. mysql_insert_image(r)
  546. #get_image_imformation()
  547. #get_adcreative_vx()
  548. #get_image_imformation()
  549. def start_all_job():
  550. start_time = int((time.time()+8*3600)//86400*86400-8*3600-86400)
  551. end_time = int((time.time()+8*3600)//86400*86400-8*3600-86400)
  552. get_daily_vx_campaign(start_time,end_time)
  553. get_adcreative_vx()
  554. get_image_imformation()
  555. start_job_time = '2020-11-25 03:10:10'
  556. if __name__ == '__main__':
  557. scheduler = BlockingScheduler()
  558. scheduler.add_job(start_all_job, 'interval',days=1,start_date=start_job_time)
  559. scheduler.start()