""" 计划维度日报""" import time from model.DataBaseUtils import MysqlUtils from concurrent.futures import ThreadPoolExecutor from .TxCostUtils import get_accounts import random import json import requests def run(st, et): executor = ThreadPoolExecutor(max_workers=10) for account in get_accounts(): executor.submit(get_campaign_cost_daily, account[0], account[1], account[3], st, et) executor.shutdown() def get_campaign_cost_daily(account_id, token, flag,st, et): """1,获取计划维度日报,把有消耗的计划入库 2,根据有消耗的计划,拉取计划的基础信息入库 """ if flag == 'MP': get_mp_data() else: get_qq_data() def mp_campaign_daily_report(account_id,access_token, start_date, end_date): level = "REPORT_LEVEL_CAMPAIGN_WECHAT" interface = 'daily_reports/get' url = 'https://api.e.qq.com/v1.1/' + interface fields = ('account_id', 'date', 'campaign_id', 'view_count', 'cost', 'ctr', 'cpc', 'order_roi', 'thousand_display_price', 'valid_click_count', 'official_account_follow_count', 'conversions_count', 'official_account_follow_rate', 'conversions_rate', 'order_count', 'order_rate', 'order_unit_price', 'first_day_order_amount') page = 1 li = [] while True: parameters = { 'access_token': access_token, 'timestamp': int(time.time()), 'nonce': str(time.time()) + str(random.randint(0, 999999)), 'fields': fields, "account_id": account_id, "level": level, "date_range": {"start_date": start_date, "end_date": end_date}, "page": page, "page_size": 1000, # "filtering": [], # "group_by": ["campaign_id",'date'] } for k in parameters: if type(parameters[k]) is not str: parameters[k] = json.dumps(parameters[k]) r = requests.get(url, params=parameters).json() data = r['data']['list'] li.extend(data) total_page = r['data']['page_info']['total_page'] if page == total_page: break else: page +=1 return li def gdt_campaign_daily_report(account_id,access_token, start_date, end_date): level = "REPORT_LEVEL_CAMPAIGN_WECHAT" interface = 'daily_reports/get' url = 'https://api.e.qq.com/v1.1/' + interface fields = ('account_id', 'date', 'campaign_id', 'view_count', 'cost', 'ctr', 'cpc', 'order_roi', 'thousand_display_price', 'valid_click_count', 'official_account_follow_count', 'conversions_count', 'official_account_follow_rate', 'conversions_rate', 'order_count', 'order_rate', 'order_unit_price', 'first_day_order_amount') page = 1 li = [] while True: parameters = { 'access_token': access_token, 'timestamp': int(time.time()), 'nonce': str(time.time()) + str(random.randint(0, 999999)), 'fields': fields, "account_id": account_id, "level": level, "date_range": {"start_date": start_date, "end_date": end_date}, "page": page, "page_size": 1000, # "filtering": [], # "group_by": ["campaign_id",'date'] } for k in parameters: if type(parameters[k]) is not str: parameters[k] = json.dumps(parameters[k]) r = requests.get(url, params=parameters).json() data = r['data']['list'] li.extend(data) total_page = r['data']['page_info']['total_page'] if page == total_page: break else: page +=1 return li def get_qq_data(): pass