get_cost_history.py 7.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158
  1. #!/usr/bin/env python3
  2. # -*- coding: utf-8 -*-
  3. """
  4. Created on Thu Apr 30 11:18:31 2020
  5. @author: chencong
  6. """
  7. import requests
  8. import urllib.parse
  9. import time
  10. import json
  11. import random
  12. import datetime
  13. import csv
  14. import pymysql
  15. import token_list as ts
  16. from apscheduler.schedulers.blocking import BlockingScheduler
  17. """
  18. header1 = ['account_id','date','view_count','valid_click_count','ctr','cpc','cost','web_order_count','web_order_rate','web_order_cost','follow_count','order_amount','cheout_fd','order_roi','cheout_fd_reward']
  19. header2 = ['account_id','date','cost','view_count','valid_click_count','ctr','official_account_follow_rate','order_amount','order_roi','order_count','order_rate','order_unit_price','web_order_cost','first_day_order_amount','first_day_order_count']
  20. def append_csv(path,data,header):
  21. header = header
  22. with open (path,'a+',newline='',encoding='utf-8') as f :
  23. dictWriter = csv.DictWriter(f,fieldnames=header)
  24. dictWriter.writerow(data)
  25. def write_header(path,header):
  26. header = header
  27. with open (path,'w',newline='',encoding='utf-8') as f:
  28. dictWriter = csv.DictWriter(f, fieldnames=header)
  29. dictWriter.writeheader()
  30. ad_qq = 'ad_qq_'+str(time.strftime("%Y-%m-%d", time.localtime()))+'.csv'
  31. ad_vx = 'ad_vx_'+str(time.strftime("%Y-%m-%d", time.localtime()))+'.csv'
  32. write_header(ad_qq,header1)
  33. write_header(ad_vx,header2)
  34. """
  35. def daily_reports_get(access_token,account_id,level,fields) :
  36. interface = 'daily_reports/get'
  37. url = 'https://api.e.qq.com/v1.1/' + interface
  38. common_parameters = {
  39. 'access_token': access_token,
  40. 'timestamp': int(time.time()),
  41. 'nonce': str(time.time()) + str(random.randint(0, 999999)),
  42. }
  43. parameters = {
  44. "account_id": account_id,
  45. "level":level,
  46. "date_range":
  47. {
  48. "start_date": '2020-09-01',
  49. "end_date": '2020-09-03'
  50. },
  51. "page":1,
  52. "page_size":1000,
  53. "fields":fields
  54. }
  55. parameters.update(common_parameters)
  56. for k in parameters:
  57. if type(parameters[k]) is not str:
  58. parameters[k] = json.dumps(parameters[k])
  59. r = requests.get(url, params = parameters)
  60. return r.json()
  61. def mysql_insert_daily_vx(data):
  62. db = pymysql.connect('rm-bp1c9cj79872tx3aaro.mysql.rds.aliyuncs.com','superc','Cc719199895','quchen_text')
  63. cursor = db.cursor()
  64. time1 = time.time()
  65. sql2 = 'insert ignore into daily_vx (date,cost,view_count,valid_click_count,ctr,official_account_follow_rate,order_amount,order_roi,order_count,order_rate,order_unit_price,web_order_cost,first_day_order_amount,first_day_order_count,account_id) values (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s);'
  66. try:
  67. cursor.executemany(sql2,data)
  68. db.commit()
  69. cost_time =round((time.time()-time1)/60,1)
  70. print('insert_daily_vx access',len(data),'cost_minutes:',cost_time)
  71. except:
  72. db.rollback()
  73. print('insert_daily_vx defeat')
  74. def mysql_insert_daily_qq(data):
  75. db = pymysql.connect('rm-bp1c9cj79872tx3aaro.mysql.rds.aliyuncs.com','superc','Cc719199895','quchen_text')
  76. cursor = db.cursor()
  77. time1 = time.time()
  78. sql2 = 'insert ignore into daily_qq (date,view_count,valid_click_count,ctr,cpc,cost,web_order_count,web_order_rate,web_order_cost,follow_count,order_amount,order_roi,platform_page_view_count,web_commodity_page_view_count,from_follow_uv,account_id) values (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s);'
  79. try:
  80. cursor.executemany(sql2,data)
  81. db.commit()
  82. cost_time =round((time.time()-time1)/60,1)
  83. print('insert_daily_qq access',len(data),'cost_minutes:',cost_time)
  84. except:
  85. db.rollback()
  86. print('insert_daily_qq defeat')
  87. def get_cost():
  88. token_list_q = [
  89. ['17654125','','0de3e48bfcf08048d499320afb34b129','a57bff7432889e78b5ac8c80c3a06f5d','流云书楼','趣程15期','唐怡婷','文鼎','丁二狗的天梯人生'],
  90. ['17654126','','94aa877d937fe4a334db98fad6cdd39d','d66044d43cabe9aa555b1384b297ce8c','流云书楼','趣程15期','唐怡婷','文鼎','丁二狗的天梯人生'],
  91. ['17654127','','5ddf8193d5bc9bb3c9e877c85318cc31','2c55d4636ecf9b2ebb81a7432ffc6581','流云书楼','趣程15期','唐怡婷','文鼎','丁二狗的天梯人生'],
  92. ['17654128','','f28fbdf11e03023bf9543b4f3c12144c','5a34f508fc1842e6b32c2e726fdc515f','流云书楼','趣程15期','唐怡婷','文鼎','丁二狗的天梯人生'],
  93. ['17654129','','6be64129e531941596f45ea8eb5191a3','3dee118bf703bda68583a818ba300f60','流云书楼','趣程15期','唐怡婷','文鼎','丁二狗的天梯人生'],
  94. ['17654130','','677e17643d3a25fee022dced4b1f6531','7930c900a8d10eef8eeafe8661a23567','流云书楼','趣程15期','唐怡婷','文鼎','丁二狗的天梯人生'],
  95. ['17654131','','6b8e4e67a4f1719349de8c7660af3ac2','a949d39b59dc2900269c7127d092648f','流云书楼','趣程15期','唐怡婷','文鼎','丁二狗的天梯人生'],
  96. ['17654133','','60cce74d20a94e7885a0338288b0e752','613447e6af3cd6feaea6de7a1ba73dcb','流云书楼','趣程15期','唐怡婷','文鼎','丁二狗的天梯人生'],
  97. ['17654135','','002a427ab5ce1be10b21e4addee51557','59989d70361ff97c1232c0b1c6e2d975','流云书楼','趣程15期','唐怡婷','文鼎','丁二狗的天梯人生']
  98. ]
  99. token_list_v =[
  100. ['17590000','wx1ca3d7b3e6a550d3','f3c280dd415dfa3bd913ee32be2bc234','95ef1c34e17d6aa480fd4dffffde9ada','晨潇文轩','刘勇9月','蒋瑜','掌中云','战龙无双'],
  101. ['17589896','wx26fd7f6e4010af01','14ebe41c578f3be8c9989210fe1aa7ac','4eb61b2f4edc024d29af18caa3a8d564','惜寒文轩','清勇','','',''],
  102. ['17589964','wx30b90b1671010fb4','4567a63fee668da58d07e5a90aa05c64','068695a073ddf763cabaeaabb4dade48','秋珊文典','刘勇9月','黄海军','文鼎','丁二狗的天梯人生'],
  103. ['17590030','wx99d65ca787bc6e80','a10f6a9407fef98e856b7a7f281febc9','cc6cd0790c78129705fe59d7c4bf52c1','凝梦文海','刘勇9月','杜广林','文鼎','丁二狗的天梯人生'],
  104. ['17590045','wxb0b557705f6f4c95','d9aa49ef61b572c669c67ac4a7aacd6e','867fe22448311c166693b6274e18dc06','墨年书楼','刘勇9月','杜广林','掌中云','战龙无双']
  105. ]
  106. t=()
  107. for x in token_list_q :
  108. a = daily_reports_get(x[2],x[0],"REPORT_LEVEL_ADVERTISER",('date','view_count','valid_click_count','ctr','cpc','cost','web_order_count','web_order_rate','web_order_cost','follow_count','order_amount','order_roi','platform_page_view_count','web_commodity_page_view_count','from_follow_uv'))
  109. for b in a['data']['list']:
  110. b['account_id']=x[0]
  111. z = tuple(b.values())
  112. t= t+((z),)
  113. mysql_insert_daily_qq(t)
  114. t= ()
  115. for y in token_list_v :
  116. c = daily_reports_get(y[2],y[0],"REPORT_LEVEL_ADVERTISER_WECHAT",('date','cost','view_count','valid_click_count','ctr','official_account_follow_rate','order_amount','order_roi','order_count','order_rate','order_unit_price','web_order_cost','first_day_order_amount','first_day_order_count'))
  117. #print(y[0],c['data']['list'])
  118. for d in c['data']['list']:
  119. d['account_id']=y[0]
  120. x = tuple(d.values())
  121. t= t+((x),)
  122. mysql_insert_daily_vx(t)
  123. print(len(t),y[0],t)
  124. get_cost()
  125. """
  126. start_job_time = '2020-07-23 17:12:00'
  127. if __name__ == '__main__':
  128. scheduler = BlockingScheduler()
  129. scheduler.add_job(get_cost, 'interval',minutes=1,start_date=start_job_time)
  130. scheduler.start()
  131. """