Browse Source

上传文件至 'dgp'

cary 4 năm trước cách đây
mục cha
commit
a95b1f1d0e
2 tập tin đã thay đổi với 159 bổ sung1 xóa
  1. 158 0
      dgp/get_cost_history_bk20201117.py
  2. 1 1
      dgp/get_data_hourly.py

+ 158 - 0
dgp/get_cost_history_bk20201117.py

@@ -0,0 +1,158 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+"""
+Created on Thu Apr 30 11:18:31 2020
+
+@author: chencong
+"""
+
+import requests
+import urllib.parse 
+import time
+import json 
+import random
+import datetime
+import csv
+import pymysql
+import token_list as ts
+from apscheduler.schedulers.blocking import BlockingScheduler
+
+"""
+header1 = ['account_id','date','view_count','valid_click_count','ctr','cpc','cost','web_order_count','web_order_rate','web_order_cost','follow_count','order_amount','cheout_fd','order_roi','cheout_fd_reward']
+header2 = ['account_id','date','cost','view_count','valid_click_count','ctr','official_account_follow_rate','order_amount','order_roi','order_count','order_rate','order_unit_price','web_order_cost','first_day_order_amount','first_day_order_count']
+
+def append_csv(path,data,header):
+    header = header
+    with open (path,'a+',newline='',encoding='utf-8') as f :
+        dictWriter = csv.DictWriter(f,fieldnames=header)
+        dictWriter.writerow(data)
+def write_header(path,header):
+    header = header
+    with open (path,'w',newline='',encoding='utf-8') as f:
+        dictWriter = csv.DictWriter(f, fieldnames=header)
+        dictWriter.writeheader()
+ad_qq = 'ad_qq_'+str(time.strftime("%Y-%m-%d", time.localtime()))+'.csv'
+ad_vx = 'ad_vx_'+str(time.strftime("%Y-%m-%d", time.localtime()))+'.csv'
+write_header(ad_qq,header1)
+write_header(ad_vx,header2)
+"""
+def daily_reports_get(access_token,account_id,level,fields) :
+    interface = 'daily_reports/get'
+    url = 'https://api.e.qq.com/v1.1/' + interface
+
+    common_parameters = {
+            'access_token': access_token, 
+            'timestamp': int(time.time()), 
+            'nonce': str(time.time()) + str(random.randint(0, 999999)),
+        }
+
+    parameters = {
+            "account_id": account_id,
+            "level":level,
+            "date_range": 
+            {   
+                "start_date": '2020-09-01',
+                "end_date": '2020-09-03'
+            },
+            "page":1,
+            "page_size":1000,
+            "fields":fields
+            }
+
+    parameters.update(common_parameters)
+    for k in parameters:
+        if type(parameters[k]) is not str:
+            parameters[k] = json.dumps(parameters[k])
+
+    r = requests.get(url, params = parameters)
+
+    return r.json()
+
+
+def mysql_insert_daily_vx(data):
+    db = pymysql.connect('rm-bp1c9cj79872tx3aaro.mysql.rds.aliyuncs.com','superc','Cc719199895','quchen_text')
+    cursor = db.cursor() 
+    time1 = time.time()
+    
+    sql2 = 'insert ignore into daily_vx (date,cost,view_count,valid_click_count,ctr,official_account_follow_rate,order_amount,order_roi,order_count,order_rate,order_unit_price,web_order_cost,first_day_order_amount,first_day_order_count,account_id) values (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s);'
+
+    try:
+        cursor.executemany(sql2,data)
+        db.commit()
+        cost_time =round((time.time()-time1)/60,1)
+        print('insert_daily_vx access',len(data),'cost_minutes:',cost_time)
+    except:
+        db.rollback()
+        print('insert_daily_vx defeat')
+
+
+def mysql_insert_daily_qq(data):
+    db = pymysql.connect('rm-bp1c9cj79872tx3aaro.mysql.rds.aliyuncs.com','superc','Cc719199895','quchen_text')
+    cursor = db.cursor() 
+    time1 = time.time()
+    sql2 = 'insert ignore into daily_qq (date,view_count,valid_click_count,ctr,cpc,cost,web_order_count,web_order_rate,web_order_cost,follow_count,order_amount,order_roi,platform_page_view_count,web_commodity_page_view_count,from_follow_uv,account_id) values (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s);'
+      
+    try:
+        cursor.executemany(sql2,data)
+        db.commit()
+        cost_time =round((time.time()-time1)/60,1)
+        print('insert_daily_qq access',len(data),'cost_minutes:',cost_time)
+    except:
+        db.rollback()
+        print('insert_daily_qq defeat')
+
+
+
+def get_cost():
+    token_list_q = [
+['17654125','','0de3e48bfcf08048d499320afb34b129','a57bff7432889e78b5ac8c80c3a06f5d','流云书楼','趣程15期','唐怡婷','文鼎','丁二狗的天梯人生'], 
+['17654126','','94aa877d937fe4a334db98fad6cdd39d','d66044d43cabe9aa555b1384b297ce8c','流云书楼','趣程15期','唐怡婷','文鼎','丁二狗的天梯人生'], 
+['17654127','','5ddf8193d5bc9bb3c9e877c85318cc31','2c55d4636ecf9b2ebb81a7432ffc6581','流云书楼','趣程15期','唐怡婷','文鼎','丁二狗的天梯人生'], 
+['17654128','','f28fbdf11e03023bf9543b4f3c12144c','5a34f508fc1842e6b32c2e726fdc515f','流云书楼','趣程15期','唐怡婷','文鼎','丁二狗的天梯人生'], 
+['17654129','','6be64129e531941596f45ea8eb5191a3','3dee118bf703bda68583a818ba300f60','流云书楼','趣程15期','唐怡婷','文鼎','丁二狗的天梯人生'], 
+['17654130','','677e17643d3a25fee022dced4b1f6531','7930c900a8d10eef8eeafe8661a23567','流云书楼','趣程15期','唐怡婷','文鼎','丁二狗的天梯人生'], 
+['17654131','','6b8e4e67a4f1719349de8c7660af3ac2','a949d39b59dc2900269c7127d092648f','流云书楼','趣程15期','唐怡婷','文鼎','丁二狗的天梯人生'], 
+['17654133','','60cce74d20a94e7885a0338288b0e752','613447e6af3cd6feaea6de7a1ba73dcb','流云书楼','趣程15期','唐怡婷','文鼎','丁二狗的天梯人生'], 
+['17654135','','002a427ab5ce1be10b21e4addee51557','59989d70361ff97c1232c0b1c6e2d975','流云书楼','趣程15期','唐怡婷','文鼎','丁二狗的天梯人生']
+]
+    token_list_v =[
+['17590000','wx1ca3d7b3e6a550d3','f3c280dd415dfa3bd913ee32be2bc234','95ef1c34e17d6aa480fd4dffffde9ada','晨潇文轩','刘勇9月','蒋瑜','掌中云','战龙无双'], 
+['17589896','wx26fd7f6e4010af01','14ebe41c578f3be8c9989210fe1aa7ac','4eb61b2f4edc024d29af18caa3a8d564','惜寒文轩','清勇','','',''], 
+['17589964','wx30b90b1671010fb4','4567a63fee668da58d07e5a90aa05c64','068695a073ddf763cabaeaabb4dade48','秋珊文典','刘勇9月','黄海军','文鼎','丁二狗的天梯人生'], 
+['17590030','wx99d65ca787bc6e80','a10f6a9407fef98e856b7a7f281febc9','cc6cd0790c78129705fe59d7c4bf52c1','凝梦文海','刘勇9月','杜广林','文鼎','丁二狗的天梯人生'], 
+['17590045','wxb0b557705f6f4c95','d9aa49ef61b572c669c67ac4a7aacd6e','867fe22448311c166693b6274e18dc06','墨年书楼','刘勇9月','杜广林','掌中云','战龙无双']
+]
+
+    
+    t=()
+    for x in token_list_q :
+        a = daily_reports_get(x[2],x[0],"REPORT_LEVEL_ADVERTISER",('date','view_count','valid_click_count','ctr','cpc','cost','web_order_count','web_order_rate','web_order_cost','follow_count','order_amount','order_roi','platform_page_view_count','web_commodity_page_view_count','from_follow_uv'))
+    
+        for b in a['data']['list']:
+            b['account_id']=x[0]
+            z = tuple(b.values())
+            t= t+((z),)
+    mysql_insert_daily_qq(t)
+    
+    t= ()
+    for y in token_list_v :
+        c = daily_reports_get(y[2],y[0],"REPORT_LEVEL_ADVERTISER_WECHAT",('date','cost','view_count','valid_click_count','ctr','official_account_follow_rate','order_amount','order_roi','order_count','order_rate','order_unit_price','web_order_cost','first_day_order_amount','first_day_order_count')) 
+        #print(y[0],c['data']['list'])
+        for d in c['data']['list']:
+            d['account_id']=y[0]
+            x = tuple(d.values())
+            t= t+((x),)
+            
+    mysql_insert_daily_vx(t)
+    print(len(t),y[0],t)  
+get_cost()
+"""
+start_job_time = '2020-07-23 17:12:00'
+
+if __name__ == '__main__':
+    scheduler = BlockingScheduler()
+    scheduler.add_job(get_cost, 'interval',minutes=1,start_date=start_job_time)
+    scheduler.start()
+
+"""   
+    

+ 1 - 1
dgp/get_data_hourly.py

@@ -779,7 +779,7 @@ def start_cost_job():
 
 
 #start_order_time = '2020-10-13 17:10:00'
-start_cost_time = '2020-11-17 18:15:00'
+start_cost_time = '2020-11-24 18:15:00'
 
 if __name__ == '__main__':
 	scheduler = BlockingScheduler()