get_campaign_daily.py 25 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685
  1. #!/usr/bin/env python3
  2. # -*- coding: utf-8 -*-
  3. """
  4. Created on Fri Jun 5 17:00:45 2020
  5. @author: chencong
  6. """
  7. import json
  8. import random
  9. import requests
  10. import time
  11. import pandas as pd
  12. import pymysql
  13. from apscheduler.schedulers.blocking import BlockingScheduler
  14. import datetime
  15. import token_list as tl
  16. def adcreatives_get(access_token,account_id,fields) : #获取创意
  17. interface = 'adcreatives/get'
  18. url = 'https://api.e.qq.com/v1.1/' + interface
  19. page =1
  20. list1 = []
  21. common_parameters = {
  22. 'access_token': access_token,
  23. 'timestamp': int(time.time()),
  24. 'nonce': str(time.time()) + str(random.randint(0, 999999)),
  25. 'fields':fields
  26. }
  27. parameters = {
  28. "account_id": account_id,
  29. "page": page,
  30. "page_size": 100,
  31. "is_deleted": False
  32. }
  33. parameters.update(common_parameters)
  34. for k in parameters:
  35. if type(parameters[k]) is not str:
  36. parameters[k] = json.dumps(parameters[k])
  37. r = requests.get(url, params = parameters).json()
  38. if 'data' in r.keys():
  39. list1 = list1+r['data']['list']
  40. total_page=r['data']['page_info']['total_page']
  41. if total_page>1:
  42. for page in range(2,total_page+1):
  43. common_parameters = {
  44. 'access_token': access_token,
  45. 'timestamp': int(time.time()),
  46. 'nonce': str(time.time()) + str(random.randint(0, 999999)),
  47. 'fields':fields
  48. }
  49. parameters = {
  50. "account_id": account_id,
  51. "page": page,
  52. "page_size": 100,
  53. "is_deleted": False
  54. }
  55. parameters.update(common_parameters)
  56. for k in parameters:
  57. if type(parameters[k]) is not str:
  58. parameters[k] = json.dumps(parameters[k])
  59. r = requests.get(url, params = parameters).json()
  60. if 'data' in r.keys():
  61. list1 = list1+r['data']['list']
  62. return list1
  63. #print(adcreatives_get('3bbbae77bed9fcde94cc0f1742a18c6e',11436446,('campaign_id','adcreative_id','adcreative_name','adcreative_elements','promoted_object_type','page_type','page_spec','link_page_spec','universal_link_url','promoted_object_id')))
  64. def ads_get(access_token,account_id,fields) : #获取广告
  65. interface = 'ads/get'
  66. url = 'https://api.e.qq.com/v1.1/' + interface
  67. common_parameters = {
  68. 'access_token': access_token,
  69. 'timestamp': int(time.time()),
  70. 'nonce': str(time.time()) + str(random.randint(0, 999999)),
  71. 'fields':fields
  72. }
  73. parameters = {
  74. "account_id": account_id,
  75. "page": 1,
  76. "page_size": 10,
  77. "is_deleted": False
  78. }
  79. parameters.update(common_parameters)
  80. for k in parameters:
  81. if type(parameters[k]) is not str:
  82. parameters[k] = json.dumps(parameters[k])
  83. r = requests.get(url, params = parameters)
  84. return r.json()
  85. #print(ads_get('2a674bef201314d338be30420369671f',14985162,('ad_id','ad_name','adcreative_id','adcreative')))
  86. def wechat_pages_get(access_token,account_id,page_id,fields) : #获取微信原生页
  87. interface = 'wechat_pages/get'
  88. url = 'https://api.e.qq.com/v1.1/' + interface
  89. common_parameters = {
  90. 'access_token': access_token,
  91. 'timestamp': int(time.time()),
  92. 'nonce': str(time.time()) + str(random.randint(0, 999999)),
  93. 'fields':fields
  94. }
  95. parameters = {
  96. "account_id": account_id,
  97. "filtering":
  98. [
  99. {
  100. "field": "page_id",
  101. "operator": "EQUALS",
  102. "values":
  103. [
  104. page_id
  105. ]
  106. }
  107. ],
  108. "page": 1,
  109. "page_size": 10
  110. }
  111. parameters.update(common_parameters)
  112. for k in parameters:
  113. if type(parameters[k]) is not str:
  114. parameters[k] = json.dumps(parameters[k])
  115. r = requests.get(url, params = parameters)
  116. return r.json()
  117. #print(wechat_pages_get('2a674bef201314d338be30420369671f',14985162,1900495593,('page_id','page_name','created_time','last_modified_time','page_template_id','preview_url','page_type','source_type')))
  118. def adgroups_get(access_token,account_id,fields) : #获取广告组
  119. interface = 'adgroups/get'
  120. url = 'https://api.e.qq.com/v1.1/' + interface
  121. common_parameters = {
  122. 'access_token': access_token,
  123. 'timestamp': int(time.time()),
  124. 'nonce': str(time.time()) + str(random.randint(0, 999999)),
  125. 'fields':fields
  126. }
  127. parameters = {
  128. "account_id": account_id,
  129. "page": 4,
  130. "page_size": 100,
  131. "is_deleted": False
  132. }
  133. parameters.update(common_parameters)
  134. for k in parameters:
  135. if type(parameters[k]) is not str:
  136. parameters[k] = json.dumps(parameters[k])
  137. r = requests.get(url, params = parameters)
  138. return r.json()
  139. #print(adgroups_get('2a674bef201314d338be30420369671f',14985162,('campaign_id','adgroup_id','adgroup_name','optimization_goal','billing_event','bid_amount','daily_budget','targeting','begin_date','end_date','time_series','bid_strategy','cold_start_audience','auto_audience','expand_enabled','expand_targeting','deep_conversion_spec','deep_optimization_action_type','conversion_id','deep_conversion_behavior_bid','deep_conversion_worth_rate','system_status')))
  140. def images_get(access_token,account_id,fields) : #获取图片信息
  141. import json
  142. import random
  143. import requests
  144. import time
  145. interface = 'images/get'
  146. url = 'https://api.e.qq.com/v1.1/' + interface
  147. page = 1
  148. list1 = []
  149. common_parameters = {
  150. 'access_token': access_token,
  151. 'timestamp': int(time.time()),
  152. 'nonce': str(time.time()) + str(random.randint(0, 999999)),
  153. 'fields':fields
  154. }
  155. parameters = {
  156. "account_id": account_id,
  157. "page": page,
  158. "page_size": 100
  159. }
  160. parameters.update(common_parameters)
  161. for k in parameters:
  162. if type(parameters[k]) is not str:
  163. parameters[k] = json.dumps(parameters[k])
  164. r = requests.get(url, params = parameters).json()
  165. if 'data' in r.keys():
  166. list1 = list1+r['data']['list']
  167. total_page=r['data']['page_info']['total_page']
  168. if total_page>1:
  169. for page in range(2,total_page+1):
  170. common_parameters = {
  171. 'access_token': access_token,
  172. 'timestamp': int(time.time()),
  173. 'nonce': str(time.time()) + str(random.randint(0, 999999)),
  174. 'fields':fields
  175. }
  176. parameters = {
  177. "account_id": account_id,
  178. "page": page,
  179. "page_size": 100
  180. }
  181. parameters.update(common_parameters)
  182. for k in parameters:
  183. if type(parameters[k]) is not str:
  184. parameters[k] = json.dumps(parameters[k])
  185. r = requests.get(url, params = parameters).json()
  186. if 'data' in r.keys():
  187. list1 = list1+r['data']['list']
  188. return list1
  189. #print(images_get('2a674bef201314d338be30420369671f',14985162,('image_id','preview_url')))
  190. def campaigns_get(access_token,account_id,fields) : #获取推广计划
  191. import json
  192. import random
  193. import requests
  194. import time
  195. interface = 'campaigns/get'
  196. url = 'https://api.e.qq.com/v1.1/' + interface
  197. page = 1
  198. list1 = []
  199. common_parameters = {
  200. 'access_token': access_token,
  201. 'timestamp': int(time.time()),
  202. 'nonce': str(time.time()) + str(random.randint(0, 999999)),
  203. 'fields':fields
  204. }
  205. parameters = {
  206. "account_id": account_id,
  207. "page": page,
  208. "page_size": 100,
  209. "is_deleted": False
  210. }
  211. parameters.update(common_parameters)
  212. for k in parameters:
  213. if type(parameters[k]) is not str:
  214. parameters[k] = json.dumps(parameters[k])
  215. r = requests.get(url, params = parameters).json()
  216. if 'data' in r.keys():
  217. list1 = list1+r['data']['list']
  218. total_page=r['data']['page_info']['total_page']
  219. if total_page>1:
  220. for page in range(2,total_page+1):
  221. common_parameters = {
  222. 'access_token': access_token,
  223. 'timestamp': int(time.time()),
  224. 'nonce': str(time.time()) + str(random.randint(0, 999999)),
  225. 'fields':fields
  226. }
  227. parameters = {
  228. "account_id": account_id,
  229. "page": page,
  230. "page_size": 100,
  231. "is_deleted": False
  232. }
  233. parameters.update(common_parameters)
  234. for k in parameters:
  235. if type(parameters[k]) is not str:
  236. parameters[k] = json.dumps(parameters[k])
  237. r = requests.get(url, params = parameters).json()
  238. if 'data' in r.keys():
  239. list1 = list1+r['data']['list']
  240. return list1
  241. #aa=tl.token_list_vx[-2]
  242. #print(campaigns_get(aa[2],aa[0],('campaign_id','campaign_name','configured_status','campaign_type','promoted_object_type','daily_budget','budget_reach_date','created_time','last_modified_time','speed_mode','is_deleted')))
  243. def daily_reports_get(access_token,account_id,level,start_date,end_date,fields) : #获取wx投放计划日报数据
  244. interface = 'daily_reports/get'
  245. url = 'https://api.e.qq.com/v1.1/' + interface
  246. common_parameters = {
  247. 'access_token': access_token,
  248. 'timestamp': int(time.time()),
  249. 'nonce': str(time.time()) + str(random.randint(0, 999999)),
  250. 'fields':fields
  251. }
  252. parameters = {
  253. "account_id": account_id,
  254. "level": level,
  255. "date_range":
  256. {
  257. "start_date": start_date,
  258. "end_date": end_date
  259. },
  260. "page": 1,
  261. "page_size": 1000,
  262. "fields":
  263. [
  264. ]
  265. }
  266. parameters.update(common_parameters)
  267. for k in parameters:
  268. if type(parameters[k]) is not str:
  269. parameters[k] = json.dumps(parameters[k])
  270. r = requests.get(url, params = parameters)
  271. return r.json()
  272. def daily_qq_reports_get(access_token,account_id,compaign_id,level,start_date,end_date,fields) : #获取gdt投放计划日报数据
  273. interface = 'daily_reports/get'
  274. url = 'https://api.e.qq.com/v1.1/' + interface
  275. common_parameters = {
  276. 'access_token': access_token,
  277. 'timestamp': int(time.time()),
  278. 'nonce': str(time.time()) + str(random.randint(0, 999999)),
  279. 'fields':fields
  280. }
  281. parameters = {
  282. "account_id": account_id,
  283. "filtering":
  284. [
  285. {
  286. "field": "campaign_id",
  287. "operator": "EQUALS",
  288. "values":
  289. [
  290. compaign_id
  291. ]
  292. }
  293. ],
  294. "level": level,
  295. "date_range":
  296. {
  297. "start_date": start_date,
  298. "end_date": end_date
  299. },
  300. "page": 1,
  301. "page_size": 1000,
  302. "fields":
  303. [
  304. ]
  305. }
  306. parameters.update(common_parameters)
  307. for k in parameters:
  308. if type(parameters[k]) is not str:
  309. parameters[k] = json.dumps(parameters[k])
  310. r = requests.get(url, params = parameters)
  311. return r.json()
  312. def mysql_insert_daily_vx_campaign(data):
  313. db = pymysql.connect('rm-bp1c9cj79872tx3aaro.mysql.rds.aliyuncs.com','superc','Cc719199895','quchen_text')
  314. cursor = db.cursor()
  315. time1 = time.time()
  316. sql = 'insert ignore into daily_vx_campaign (account_id,date,campaign_id,view_count,cost,ctr,cpc,order_roi,thousand_display_price,valid_click_count,official_account_follow_count,conversions_count,official_account_follow_rate,conversions_rate,order_count,order_rate,order_unit_price,first_day_order_amount) values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s);'
  317. try:
  318. cursor.executemany(sql,data)
  319. db.commit()
  320. cost_time =round((time.time()-time1)/60,1)
  321. print('insert_daily_vx_campaign access',len(data),'cost_minutes:',cost_time)
  322. except:
  323. db.rollback()
  324. print('insert_daily_vx_campaign defeat')
  325. def mysql_insert_daily_qq_campaign(data):
  326. db = pymysql.connect('rm-bp1c9cj79872tx3aaro.mysql.rds.aliyuncs.com','superc','Cc719199895','quchen_text')
  327. cursor = db.cursor()
  328. time1 = time.time()
  329. sql = 'insert ignore into daily_qq_campaign (account_id,date,campaign_id,view_count,thousand_display_price,valid_click_count,ctr,cpc,cost,order_roi) values (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)'
  330. try:
  331. cursor.executemany(sql,data)
  332. db.commit()
  333. cost_time =round((time.time()-time1)/60,1)
  334. print('insert_daily_qq_campaign access',len(data),'cost_minutes:',cost_time)
  335. except:
  336. db.rollback()
  337. print('insert_daily_qq_campaign defeat')
  338. #print(daily_reports_get('2a674bef201314d338be30420369671f',14985162,'REPORT_LEVEL_CAMPAIGN_WECHAT','2020-07-20','2020-07-20',('account_id','date','campaign_id','view_count','cost','ctr','cpc','order_roi','thousand_display_price','valid_click_count','official_account_follow_count','conversions_count','official_account_follow_rate','conversions_rate','order_count','order_rate','order_unit_price','first_day_order_amount')))
  339. #print(daily_reports_get('27b2f2768640555133162b5982872b83',15223385,'REPORT_LEVEL_CAMPAIGN','2020-07-10','2020-07-19',('account_id','date','campaign_id','view_count','thousand_display_price','valid_click_count','ctr','cpc','cost','order_roi')))
  340. def mysql_insert_campaign_vx(data,data2):
  341. db = pymysql.connect('rm-bp1c9cj79872tx3aaro.mysql.rds.aliyuncs.com','superc','Cc719199895','quchen_text')
  342. cursor = db.cursor()
  343. time1 = time.time()
  344. sql = 'insert ignore into campaign_vx (campaign_id,campaign_name,configured_status,campaign_type,promoted_object_type,daily_budget,created_time,last_modified_time,account_id) values (%s,%s,%s,%s,%s,%s,%s,%s,%s)'
  345. sql2 = 'delete from campaign_vx where campaign_id =%s '
  346. try:
  347. cursor.executemany(sql2,data2)
  348. db.commit()
  349. print('delete campaign_vx access',len(data2))
  350. except:
  351. db.rollback()
  352. print('delete campaign_vx defeat')
  353. try:
  354. cursor.executemany(sql,data)
  355. db.commit()
  356. cost_time =round((time.time()-time1)/60,1)
  357. print('insert_campaign_vx access',len(data),'cost_minutes:',cost_time)
  358. except:
  359. db.rollback()
  360. print('insert_campaign_vx defeat')
  361. def mysql_insert_adcreative(data):
  362. db = pymysql.connect('rm-bp1c9cj79872tx3aaro.mysql.rds.aliyuncs.com','superc','Cc719199895','quchen_text')
  363. cursor = db.cursor()
  364. time1 = time.time()
  365. sql = 'insert ignore into adcreative (campaign_id,adcreative_id,adcreative_name,image_id,title,promoted_object_type,page_type,page_id,link_page_id,promoted_object_id) values (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)'
  366. try:
  367. cursor.executemany(sql,data)
  368. db.commit()
  369. cost_time =round((time.time()-time1)/60,1)
  370. print('insert_adcreative access',len(data),'cost_minutes:',cost_time)
  371. except:
  372. db.rollback()
  373. print('insert_adcreative defeat')
  374. def mysql_insert_image(data):
  375. db = pymysql.connect('rm-bp1c9cj79872tx3aaro.mysql.rds.aliyuncs.com','superc','Cc719199895','quchen_text')
  376. cursor = db.cursor()
  377. time1 = time.time()
  378. sql = 'insert ignore into image (image_id,preview_url,account_id) values (%s,%s,%s)'
  379. try:
  380. cursor.executemany(sql,data)
  381. db.commit()
  382. cost_time =round((time.time()-time1)/60,1)
  383. print('insert image access',len(data),'cost_minutes:',cost_time)
  384. except:
  385. db.rollback()
  386. print('insert image defeat')
  387. def get_daily_vx_campaign(st,et): #获取投放计划、日报数据
  388. token_list_v = tl.token_list_vx
  389. r = ()
  390. p = ()
  391. q=[]
  392. for x in token_list_v:
  393. account_id = x[0]
  394. access_token = x[2]
  395. start_date = time.strftime("%Y-%m-%d",time.localtime(st))
  396. end_date = time.strftime("%Y-%m-%d",time.localtime(et))
  397. l = campaigns_get(access_token,account_id,('campaign_id','campaign_name','configured_status','campaign_type','promoted_object_type','daily_budget','budget_reach_date','created_time','last_modified_time','speed_mode','is_deleted'))
  398. if len(l)>0:
  399. for ll in l:
  400. ll['account_id']=account_id
  401. if ll['created_time']>st or ll['last_modified_time']>st:
  402. q.append(ll['campaign_id'])
  403. lt = tuple(ll.values())
  404. p = p+((lt),)
  405. data_list = daily_reports_get(access_token,account_id,'REPORT_LEVEL_CAMPAIGN_WECHAT',start_date,end_date,('account_id','date','campaign_id','view_count','cost','ctr','cpc','order_roi','thousand_display_price','valid_click_count','official_account_follow_count','conversions_count','official_account_follow_rate','conversions_rate','order_count','order_rate','order_unit_price','first_day_order_amount'))
  406. if 'data' in data_list.keys():
  407. for y in data_list['data']['list']:
  408. y['account_id'] = account_id
  409. y = tuple(y.values())
  410. r=r+((y),)
  411. mysql_insert_daily_vx_campaign(r)
  412. mysql_insert_campaign_vx(p,q)
  413. #get_daily_vx_campaign(1597766400,1597852800)
  414. def get_daily_qq_campaign(st,et):
  415. token_list_q = tl.token_list_qq
  416. r=()
  417. for x in token_list_q:
  418. account_id = x[0]
  419. access_token = x[2]
  420. start_date = st
  421. end_date = et
  422. l = campaigns_get(access_token,account_id,('campaign_id','campaign_name','configured_status','campaign_type','promoted_object_type','daily_budget','budget_reach_date','created_time','last_modified_time','speed_mode','is_deleted'))
  423. for ll in l:
  424. campaign_id =ll['campaign_id']
  425. data_list = daily_qq_reports_get(access_token,account_id,campaign_id,'REPORT_LEVEL_CAMPAIGN',start_date,end_date,('account_id','date','campaign_id','view_count','thousand_display_price','valid_click_count','ctr','cpc','cost','order_roi'))
  426. if len(data_list['data']['list'])>0:
  427. print(data_list)
  428. print(l)
  429. if 'data' in data_list.keys():
  430. for y in data_list['data']['list']:
  431. y = tuple(y.values())
  432. r=r+((y),)
  433. #mysql_insert_daily_qq_campaign(r)
  434. #get_daily_vx_campaign('2020-01-01','2020-07-27')
  435. #get_daily_qq_campaign('2020-07-10','2020-07-24')
  436. def get_campaign_update_list():
  437. db = pymysql.connect('rm-bp1c9cj79872tx3aaro.mysql.rds.aliyuncs.com','superc','Cc719199895','quchen_text')
  438. #db = pymysql.connect('localhost','root','chencong1996','quchen_text')
  439. cursor = db.cursor()
  440. sql = 'select distinct advertiser_vx.account_id,access_token from campaign_vx left join advertiser_vx on advertiser_vx.account_id = campaign_vx.account_id where created_time>=%s or last_modified_time>=%s'
  441. data = (int((time.time()+8*3600)//86400*86400-8*3600-86400),int((time.time()+8*3600)//86400*86400-8*3600-86400))
  442. try:
  443. cursor.execute(sql,data)
  444. db.commit()
  445. x=cursor.fetchall()
  446. print('access get campaign update list',x)
  447. except:
  448. db.rollback()
  449. print('defeat get campaign update list')
  450. a = []
  451. if len(x)>0:
  452. for t in x:
  453. a.append(t[0])
  454. sql2 = 'delete from adcreative where campaign_id=%s'
  455. try:
  456. cursor.executemany(sql2,a)
  457. db.commit()
  458. y=cursor.fetchall()
  459. print('access delete adcreative',y)
  460. except:
  461. db.rollback()
  462. print('defeat delete adcreative')
  463. return x
  464. def get_adcreative_vx():
  465. token_list_vx=get_campaign_update_list()
  466. r = ()
  467. for x in token_list_vx:
  468. account_id = x[0]
  469. access_token = x[1]
  470. l = adcreatives_get(access_token,account_id,('campaign_id','adcreative_id','adcreative_name','adcreative_elements','promoted_object_type','page_type','page_spec','link_page_spec','universal_link_url','promoted_object_id'))
  471. if len(l)>0:
  472. for ll in l:
  473. if 'image_list' in ll['adcreative_elements'].keys():
  474. for image_id in ll['adcreative_elements']['image_list']:
  475. a={}
  476. a['campaign_id']=ll['campaign_id']
  477. a['adcreative_id'] = ll['adcreative_id']
  478. a['adcreative_name'] = ll['adcreative_name']
  479. a['image_id'] = image_id
  480. a['title'] = ll['adcreative_elements']['title']
  481. a['promoted_object_type'] = ll['promoted_object_type']
  482. a['page_type'] = ll['page_type']
  483. if 'page_spec' in ll.keys():
  484. if 'page_id' in ll['page_spec'].keys():
  485. a['page_id'] = ll['page_spec']['page_id']
  486. else :
  487. a['page_id'] = None
  488. else:
  489. a['page_id'] = None
  490. if 'link_page_spec' in ll.keys():
  491. if 'page_id' in ll['link_page_spec'].keys():
  492. a['link_page_id']= ll['link_page_spec']['page_id']
  493. else:
  494. a['link_page_id']=None
  495. else:
  496. a['link_page_id'] = None
  497. a['promoted_object_id'] = ll['promoted_object_id']
  498. y = tuple(a.values())
  499. r = r+((y),)
  500. elif 'image' in ll['adcreative_elements'].keys() :
  501. a={}
  502. a['campaign_id']=ll['campaign_id']
  503. a['adcreative_id'] = ll['adcreative_id']
  504. a['adcreative_name'] = ll['adcreative_name']
  505. a['image_id'] = ll['adcreative_elements']['image']
  506. if 'title' in ll['adcreative_elements']:
  507. a['title'] = ll['adcreative_elements']['title']
  508. else:
  509. a['title']=''
  510. a['promoted_object_type'] = ll['promoted_object_type']
  511. a['page_type'] = ll['page_type']
  512. if 'page_spec' in ll.keys():
  513. if 'page_id' in ll['page_spec'].keys():
  514. a['page_id'] = ll['page_spec']['page_id']
  515. else :
  516. a['page_id'] = None
  517. else:
  518. a['page_id'] = None
  519. if 'link_page_spec' in ll.keys():
  520. if 'page_id' in ll['link_page_spec'].keys():
  521. a['link_page_id']= ll['link_page_spec']['page_id']
  522. else:
  523. a['link_page_id']=None
  524. else:
  525. a['link_page_id'] = None
  526. a['promoted_object_id'] = ll['promoted_object_id']
  527. y = tuple(a.values())
  528. r = r+((y),)
  529. mysql_insert_adcreative(r)
  530. #get_adcreative_vx()
  531. def get_image_imformation():
  532. token_list_vx = tl.token_list_vx
  533. r = ()
  534. for x in token_list_vx:
  535. account_id = x[0]
  536. access_token = x[2]
  537. l = images_get(access_token,account_id,('image_id','preview_url'))
  538. if len(l)>0:
  539. for ll in l:
  540. ll['account_id']=account_id
  541. y=tuple(ll.values())
  542. r=r+((y),)
  543. mysql_insert_image(r)
  544. #get_image_imformation()
  545. #get_adcreative_vx()
  546. #get_image_imformation()
  547. def start_all_job():
  548. start_time = int((time.time()+8*3600)//86400*86400-8*3600-86400)
  549. end_time = int((time.time()+8*3600)//86400*86400-8*3600-86400)
  550. get_daily_vx_campaign(start_time,end_time)
  551. get_adcreative_vx()
  552. get_image_imformation()
  553. start_job_time = '2020-09-25 06:10:10'
  554. if __name__ == '__main__':
  555. scheduler = BlockingScheduler()
  556. scheduler.add_job(start_all_job, 'interval',days=1,start_date=start_job_time)
  557. scheduler.start()