123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688 |
- #!/usr/bin/env python3
- # -*- coding: utf-8 -*-
- """
- Created on Fri Jun 5 17:00:45 2020
- @author: chencong
- """
- import json
- import random
- import requests
- import time
- import pandas as pd
- import pymysql
- from apscheduler.schedulers.blocking import BlockingScheduler
- import datetime
- import token_list as tl
- import importlib
- importlib.reload(tl)
- def adcreatives_get(access_token,account_id,fields) : #获取创意
- interface = 'adcreatives/get'
- url = 'https://api.e.qq.com/v1.1/' + interface
- page =1
- list1 = []
- common_parameters = {
- 'access_token': access_token,
- 'timestamp': int(time.time()),
- 'nonce': str(time.time()) + str(random.randint(0, 999999)),
- 'fields':fields
- }
- parameters = {
- "account_id": account_id,
- "page": page,
- "page_size": 100,
- "is_deleted": False
- }
- parameters.update(common_parameters)
- for k in parameters:
- if type(parameters[k]) is not str:
- parameters[k] = json.dumps(parameters[k])
- r = requests.get(url, params = parameters).json()
- if 'data' in r.keys():
- list1 = list1+r['data']['list']
- total_page=r['data']['page_info']['total_page']
- if total_page>1:
- for page in range(2,total_page+1):
- common_parameters = {
- 'access_token': access_token,
- 'timestamp': int(time.time()),
- 'nonce': str(time.time()) + str(random.randint(0, 999999)),
- 'fields':fields
- }
- parameters = {
- "account_id": account_id,
- "page": page,
- "page_size": 100,
- "is_deleted": False
- }
- parameters.update(common_parameters)
- for k in parameters:
- if type(parameters[k]) is not str:
- parameters[k] = json.dumps(parameters[k])
- r = requests.get(url, params = parameters).json()
- if 'data' in r.keys():
- list1 = list1+r['data']['list']
- return list1
- #print(adcreatives_get('3bbbae77bed9fcde94cc0f1742a18c6e',11436446,('campaign_id','adcreative_id','adcreative_name','adcreative_elements','promoted_object_type','page_type','page_spec','link_page_spec','universal_link_url','promoted_object_id')))
- def ads_get(access_token,account_id,fields) : #获取广告
- interface = 'ads/get'
- url = 'https://api.e.qq.com/v1.1/' + interface
- common_parameters = {
- 'access_token': access_token,
- 'timestamp': int(time.time()),
- 'nonce': str(time.time()) + str(random.randint(0, 999999)),
- 'fields':fields
- }
- parameters = {
- "account_id": account_id,
- "page": 1,
- "page_size": 10,
- "is_deleted": False
- }
- parameters.update(common_parameters)
- for k in parameters:
- if type(parameters[k]) is not str:
- parameters[k] = json.dumps(parameters[k])
- r = requests.get(url, params = parameters)
- return r.json()
- #print(ads_get('2a674bef201314d338be30420369671f',14985162,('ad_id','ad_name','adcreative_id','adcreative')))
-
- def wechat_pages_get(access_token,account_id,page_id,fields) : #获取微信原生页
- interface = 'wechat_pages/get'
- url = 'https://api.e.qq.com/v1.1/' + interface
- common_parameters = {
- 'access_token': access_token,
- 'timestamp': int(time.time()),
- 'nonce': str(time.time()) + str(random.randint(0, 999999)),
- 'fields':fields
- }
- parameters = {
- "account_id": account_id,
- "filtering":
- [
-
- {
- "field": "page_id",
- "operator": "EQUALS",
- "values":
- [
- page_id
- ]
- }
- ],
- "page": 1,
- "page_size": 10
- }
- parameters.update(common_parameters)
- for k in parameters:
- if type(parameters[k]) is not str:
- parameters[k] = json.dumps(parameters[k])
- r = requests.get(url, params = parameters)
- return r.json()
- #print(wechat_pages_get('2a674bef201314d338be30420369671f',14985162,1900495593,('page_id','page_name','created_time','last_modified_time','page_template_id','preview_url','page_type','source_type')))
-
- def adgroups_get(access_token,account_id,fields) : #获取广告组
- interface = 'adgroups/get'
- url = 'https://api.e.qq.com/v1.1/' + interface
- common_parameters = {
- 'access_token': access_token,
- 'timestamp': int(time.time()),
- 'nonce': str(time.time()) + str(random.randint(0, 999999)),
- 'fields':fields
- }
- parameters = {
- "account_id": account_id,
- "page": 4,
- "page_size": 100,
- "is_deleted": False
- }
- parameters.update(common_parameters)
- for k in parameters:
- if type(parameters[k]) is not str:
- parameters[k] = json.dumps(parameters[k])
- r = requests.get(url, params = parameters)
- return r.json()
- #print(adgroups_get('2a674bef201314d338be30420369671f',14985162,('campaign_id','adgroup_id','adgroup_name','optimization_goal','billing_event','bid_amount','daily_budget','targeting','begin_date','end_date','time_series','bid_strategy','cold_start_audience','auto_audience','expand_enabled','expand_targeting','deep_conversion_spec','deep_optimization_action_type','conversion_id','deep_conversion_behavior_bid','deep_conversion_worth_rate','system_status')))
- def images_get(access_token,account_id,fields) : #获取图片信息
- import json
- import random
- import requests
- import time
- interface = 'images/get'
- url = 'https://api.e.qq.com/v1.1/' + interface
- page = 1
- list1 = []
- common_parameters = {
- 'access_token': access_token,
- 'timestamp': int(time.time()),
- 'nonce': str(time.time()) + str(random.randint(0, 999999)),
- 'fields':fields
- }
- parameters = {
- "account_id": account_id,
- "page": page,
- "page_size": 100
- }
- parameters.update(common_parameters)
- for k in parameters:
- if type(parameters[k]) is not str:
- parameters[k] = json.dumps(parameters[k])
- r = requests.get(url, params = parameters).json()
- if 'data' in r.keys():
- list1 = list1+r['data']['list']
- total_page=r['data']['page_info']['total_page']
- if total_page>1:
- for page in range(2,total_page+1):
- common_parameters = {
- 'access_token': access_token,
- 'timestamp': int(time.time()),
- 'nonce': str(time.time()) + str(random.randint(0, 999999)),
- 'fields':fields
- }
- parameters = {
- "account_id": account_id,
- "page": page,
- "page_size": 100
- }
- parameters.update(common_parameters)
- for k in parameters:
- if type(parameters[k]) is not str:
- parameters[k] = json.dumps(parameters[k])
- r = requests.get(url, params = parameters).json()
- if 'data' in r.keys():
- list1 = list1+r['data']['list']
- return list1
- #print(images_get('2a674bef201314d338be30420369671f',14985162,('image_id','preview_url')))
- def campaigns_get(access_token,account_id,fields) : #获取推广计划
- import json
- import random
- import requests
- import time
- interface = 'campaigns/get'
- url = 'https://api.e.qq.com/v1.1/' + interface
- page = 1
- list1 = []
- common_parameters = {
- 'access_token': access_token,
- 'timestamp': int(time.time()),
- 'nonce': str(time.time()) + str(random.randint(0, 999999)),
- 'fields':fields
- }
- parameters = {
- "account_id": account_id,
- "page": page,
- "page_size": 100,
- "is_deleted": False
- }
- parameters.update(common_parameters)
- for k in parameters:
- if type(parameters[k]) is not str:
- parameters[k] = json.dumps(parameters[k])
- r = requests.get(url, params = parameters).json()
- if 'data' in r.keys():
- list1 = list1+r['data']['list']
- total_page=r['data']['page_info']['total_page']
- if total_page>1:
- for page in range(2,total_page+1):
- common_parameters = {
- 'access_token': access_token,
- 'timestamp': int(time.time()),
- 'nonce': str(time.time()) + str(random.randint(0, 999999)),
- 'fields':fields
- }
- parameters = {
- "account_id": account_id,
- "page": page,
- "page_size": 100,
- "is_deleted": False
- }
- parameters.update(common_parameters)
- for k in parameters:
- if type(parameters[k]) is not str:
- parameters[k] = json.dumps(parameters[k])
- r = requests.get(url, params = parameters).json()
- if 'data' in r.keys():
- list1 = list1+r['data']['list']
- return list1
-
- #aa=tl.token_list_vx[-2]
- #print(campaigns_get(aa[2],aa[0],('campaign_id','campaign_name','configured_status','campaign_type','promoted_object_type','daily_budget','budget_reach_date','created_time','last_modified_time','speed_mode','is_deleted')))
- def daily_reports_get(access_token,account_id,level,start_date,end_date,fields) : #获取wx投放计划日报数据
- interface = 'daily_reports/get'
- url = 'https://api.e.qq.com/v1.1/' + interface
- common_parameters = {
- 'access_token': access_token,
- 'timestamp': int(time.time()),
- 'nonce': str(time.time()) + str(random.randint(0, 999999)),
- 'fields':fields
- }
- parameters = {
- "account_id": account_id,
- "level": level,
- "date_range":
- {
- "start_date": start_date,
- "end_date": end_date
- },
-
- "page": 1,
- "page_size": 1000,
- "fields":
- [
-
- ]
- }
- parameters.update(common_parameters)
- for k in parameters:
- if type(parameters[k]) is not str:
- parameters[k] = json.dumps(parameters[k])
- r = requests.get(url, params = parameters)
- return r.json()
- def daily_qq_reports_get(access_token,account_id,compaign_id,level,start_date,end_date,fields) : #获取gdt投放计划日报数据
- interface = 'daily_reports/get'
- url = 'https://api.e.qq.com/v1.1/' + interface
- common_parameters = {
- 'access_token': access_token,
- 'timestamp': int(time.time()),
- 'nonce': str(time.time()) + str(random.randint(0, 999999)),
- 'fields':fields
- }
- parameters = {
- "account_id": account_id,
- "filtering":
- [
-
- {
- "field": "campaign_id",
- "operator": "EQUALS",
- "values":
- [
- compaign_id
- ]
- }
- ],
- "level": level,
- "date_range":
- {
- "start_date": start_date,
- "end_date": end_date
- },
-
- "page": 1,
- "page_size": 1000,
- "fields":
- [
-
- ]
- }
- parameters.update(common_parameters)
- for k in parameters:
- if type(parameters[k]) is not str:
- parameters[k] = json.dumps(parameters[k])
- r = requests.get(url, params = parameters)
- return r.json()
- def mysql_insert_daily_vx_campaign(data):
- db = pymysql.connect('rm-bp1c9cj79872tx3aaro.mysql.rds.aliyuncs.com','superc','Cc719199895','quchen_text')
- cursor = db.cursor()
- time1 = time.time()
- sql = 'insert ignore into daily_vx_campaign (account_id,date,campaign_id,view_count,cost,ctr,cpc,order_roi,thousand_display_price,valid_click_count,official_account_follow_count,conversions_count,official_account_follow_rate,conversions_rate,order_count,order_rate,order_unit_price,first_day_order_amount) values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s);'
- try:
- cursor.executemany(sql,data)
- db.commit()
- cost_time =round((time.time()-time1)/60,1)
- print('insert_daily_vx_campaign access',len(data),'cost_minutes:',cost_time)
- except:
- db.rollback()
- print('insert_daily_vx_campaign defeat')
-
- def mysql_insert_daily_qq_campaign(data):
- db = pymysql.connect('rm-bp1c9cj79872tx3aaro.mysql.rds.aliyuncs.com','superc','Cc719199895','quchen_text')
- cursor = db.cursor()
- time1 = time.time()
- sql = 'insert ignore into daily_qq_campaign (account_id,date,campaign_id,view_count,thousand_display_price,valid_click_count,ctr,cpc,cost,order_roi) values (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)'
- try:
- cursor.executemany(sql,data)
- db.commit()
- cost_time =round((time.time()-time1)/60,1)
- print('insert_daily_qq_campaign access',len(data),'cost_minutes:',cost_time)
- except:
- db.rollback()
- print('insert_daily_qq_campaign defeat')
- #print(daily_reports_get('2a674bef201314d338be30420369671f',14985162,'REPORT_LEVEL_CAMPAIGN_WECHAT','2020-07-20','2020-07-20',('account_id','date','campaign_id','view_count','cost','ctr','cpc','order_roi','thousand_display_price','valid_click_count','official_account_follow_count','conversions_count','official_account_follow_rate','conversions_rate','order_count','order_rate','order_unit_price','first_day_order_amount')))
- #print(daily_reports_get('27b2f2768640555133162b5982872b83',15223385,'REPORT_LEVEL_CAMPAIGN','2020-07-10','2020-07-19',('account_id','date','campaign_id','view_count','thousand_display_price','valid_click_count','ctr','cpc','cost','order_roi')))
- def mysql_insert_campaign_vx(data,data2):
- db = pymysql.connect('rm-bp1c9cj79872tx3aaro.mysql.rds.aliyuncs.com','superc','Cc719199895','quchen_text')
- cursor = db.cursor()
- time1 = time.time()
- sql = 'insert ignore into campaign_vx (campaign_id,campaign_name,configured_status,campaign_type,promoted_object_type,daily_budget,created_time,last_modified_time,account_id) values (%s,%s,%s,%s,%s,%s,%s,%s,%s)'
- sql2 = 'delete from campaign_vx where campaign_id =%s '
- try:
- cursor.executemany(sql2,data2)
- db.commit()
- print('delete campaign_vx access',len(data2))
- except:
- db.rollback()
- print('delete campaign_vx defeat')
- try:
- cursor.executemany(sql,data)
- db.commit()
- cost_time =round((time.time()-time1)/60,1)
- print('insert_campaign_vx access',len(data),'cost_minutes:',cost_time)
- except:
- db.rollback()
- print('insert_campaign_vx defeat')
- def mysql_insert_adcreative(data):
- db = pymysql.connect('rm-bp1c9cj79872tx3aaro.mysql.rds.aliyuncs.com','superc','Cc719199895','quchen_text')
- cursor = db.cursor()
- time1 = time.time()
- sql = 'insert ignore into adcreative (campaign_id,adcreative_id,adcreative_name,image_id,title,promoted_object_type,page_type,page_id,link_page_id,promoted_object_id) values (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)'
- try:
- cursor.executemany(sql,data)
- db.commit()
- cost_time =round((time.time()-time1)/60,1)
- print('insert_adcreative access',len(data),'cost_minutes:',cost_time)
- except:
- db.rollback()
- print('insert_adcreative defeat')
- def mysql_insert_image(data):
- db = pymysql.connect('rm-bp1c9cj79872tx3aaro.mysql.rds.aliyuncs.com','superc','Cc719199895','quchen_text')
- cursor = db.cursor()
- time1 = time.time()
- sql = 'insert ignore into image (image_id,preview_url,account_id) values (%s,%s,%s)'
- try:
- cursor.executemany(sql,data)
- db.commit()
- cost_time =round((time.time()-time1)/60,1)
- print('insert image access',len(data),'cost_minutes:',cost_time)
- except:
- db.rollback()
- print('insert image defeat')
- def get_daily_vx_campaign(st,et): #获取投放计划、日报数据
- token_list_v = tl.token_list_vx
- r = ()
- p = ()
- q=[]
- for x in token_list_v:
- account_id = x[0]
- access_token = x[2]
- start_date = time.strftime("%Y-%m-%d",time.localtime(st))
- end_date = time.strftime("%Y-%m-%d",time.localtime(et))
-
- l = campaigns_get(access_token,account_id,('campaign_id','campaign_name','configured_status','campaign_type','promoted_object_type','daily_budget','budget_reach_date','created_time','last_modified_time','speed_mode','is_deleted'))
-
- if len(l)>0:
- for ll in l:
- ll['account_id']=account_id
- if ll['created_time']>st or ll['last_modified_time']>st:
- q.append(ll['campaign_id'])
- lt = tuple(ll.values())
- p = p+((lt),)
-
-
- data_list = daily_reports_get(access_token,account_id,'REPORT_LEVEL_CAMPAIGN_WECHAT',start_date,end_date,('account_id','date','campaign_id','view_count','cost','ctr','cpc','order_roi','thousand_display_price','valid_click_count','official_account_follow_count','conversions_count','official_account_follow_rate','conversions_rate','order_count','order_rate','order_unit_price','first_day_order_amount'))
- if 'data' in data_list.keys():
-
- for y in data_list['data']['list']:
- y['account_id'] = account_id
- y = tuple(y.values())
- r=r+((y),)
-
- mysql_insert_daily_vx_campaign(r)
- mysql_insert_campaign_vx(p,q)
-
- #get_daily_vx_campaign(1597766400,1597852800)
- def get_daily_qq_campaign(st,et):
- token_list_q = tl.token_list_qq
- r=()
- for x in token_list_q:
- account_id = x[0]
- access_token = x[2]
- start_date = st
- end_date = et
-
- l = campaigns_get(access_token,account_id,('campaign_id','campaign_name','configured_status','campaign_type','promoted_object_type','daily_budget','budget_reach_date','created_time','last_modified_time','speed_mode','is_deleted'))
- for ll in l:
- campaign_id =ll['campaign_id']
-
- data_list = daily_qq_reports_get(access_token,account_id,campaign_id,'REPORT_LEVEL_CAMPAIGN',start_date,end_date,('account_id','date','campaign_id','view_count','thousand_display_price','valid_click_count','ctr','cpc','cost','order_roi'))
- if len(data_list['data']['list'])>0:
- print(data_list)
- print(l)
-
- if 'data' in data_list.keys():
-
- for y in data_list['data']['list']:
-
- y = tuple(y.values())
- r=r+((y),)
-
- #mysql_insert_daily_qq_campaign(r)
- #get_daily_vx_campaign('2020-01-01','2020-07-27')
- #get_daily_qq_campaign('2020-07-10','2020-07-24')
- def get_campaign_update_list():
- db = pymysql.connect('rm-bp1c9cj79872tx3aaro.mysql.rds.aliyuncs.com','superc','Cc719199895','quchen_text')
- #db = pymysql.connect('localhost','root','chencong1996','quchen_text')
- cursor = db.cursor()
- sql = 'select distinct advertiser_vx.account_id,access_token from campaign_vx left join advertiser_vx on advertiser_vx.account_id = campaign_vx.account_id where created_time>=%s or last_modified_time>=%s'
- data = (int((time.time()+8*3600)//86400*86400-8*3600-86400),int((time.time()+8*3600)//86400*86400-8*3600-86400))
- try:
- cursor.execute(sql,data)
- db.commit()
- x=cursor.fetchall()
- print('access get campaign update list',x)
- except:
- db.rollback()
- print('defeat get campaign update list')
- a = []
- if len(x)>0:
- for t in x:
- a.append(t[0])
- sql2 = 'delete from adcreative where campaign_id=%s'
- try:
- cursor.executemany(sql2,a)
- db.commit()
- y=cursor.fetchall()
- print('access delete adcreative',y)
- except:
- db.rollback()
- print('defeat delete adcreative')
- return x
- def get_adcreative_vx():
- token_list_vx=get_campaign_update_list()
- r = ()
- for x in token_list_vx:
- account_id = x[0]
- access_token = x[1]
- l = adcreatives_get(access_token,account_id,('campaign_id','adcreative_id','adcreative_name','adcreative_elements','promoted_object_type','page_type','page_spec','link_page_spec','universal_link_url','promoted_object_id'))
-
- if len(l)>0:
- for ll in l:
-
-
- if 'image_list' in ll['adcreative_elements'].keys():
- for image_id in ll['adcreative_elements']['image_list']:
- a={}
- a['campaign_id']=ll['campaign_id']
- a['adcreative_id'] = ll['adcreative_id']
- a['adcreative_name'] = ll['adcreative_name']
- a['image_id'] = image_id
- a['title'] = ll['adcreative_elements']['title']
- a['promoted_object_type'] = ll['promoted_object_type']
- a['page_type'] = ll['page_type']
- if 'page_spec' in ll.keys():
- if 'page_id' in ll['page_spec'].keys():
- a['page_id'] = ll['page_spec']['page_id']
- else :
- a['page_id'] = None
- else:
- a['page_id'] = None
-
- if 'link_page_spec' in ll.keys():
- if 'page_id' in ll['link_page_spec'].keys():
- a['link_page_id']= ll['link_page_spec']['page_id']
- else:
- a['link_page_id']=None
- else:
- a['link_page_id'] = None
-
- a['promoted_object_id'] = ll['promoted_object_id']
- y = tuple(a.values())
- r = r+((y),)
- elif 'image' in ll['adcreative_elements'].keys() :
- a={}
- a['campaign_id']=ll['campaign_id']
- a['adcreative_id'] = ll['adcreative_id']
- a['adcreative_name'] = ll['adcreative_name']
- a['image_id'] = ll['adcreative_elements']['image']
- if 'title' in ll['adcreative_elements']:
- a['title'] = ll['adcreative_elements']['title']
- else:
- a['title']=''
- a['promoted_object_type'] = ll['promoted_object_type']
- a['page_type'] = ll['page_type']
- if 'page_spec' in ll.keys():
- if 'page_id' in ll['page_spec'].keys():
- a['page_id'] = ll['page_spec']['page_id']
- else :
- a['page_id'] = None
- else:
- a['page_id'] = None
-
- if 'link_page_spec' in ll.keys():
- if 'page_id' in ll['link_page_spec'].keys():
- a['link_page_id']= ll['link_page_spec']['page_id']
- else:
- a['link_page_id']=None
- else:
- a['link_page_id'] = None
-
- a['promoted_object_id'] = ll['promoted_object_id']
- y = tuple(a.values())
- r = r+((y),)
-
- mysql_insert_adcreative(r)
- #get_adcreative_vx()
- def get_image_imformation():
- token_list_vx = tl.token_list_vx
- r = ()
- for x in token_list_vx:
- account_id = x[0]
- access_token = x[2]
-
- l = images_get(access_token,account_id,('image_id','preview_url'))
- if len(l)>0:
- for ll in l:
- ll['account_id']=account_id
- y=tuple(ll.values())
- r=r+((y),)
- mysql_insert_image(r)
- #get_image_imformation()
- #get_adcreative_vx()
- #get_image_imformation()
- def start_all_job():
- start_time = int((time.time()+8*3600)//86400*86400-8*3600-86400)
- end_time = int((time.time()+8*3600)//86400*86400-8*3600-86400)
- get_daily_vx_campaign(start_time,end_time)
- get_adcreative_vx()
- get_image_imformation()
- start_job_time = '2020-11-25 03:10:10'
- if __name__ == '__main__':
- scheduler = BlockingScheduler()
- scheduler.add_job(start_all_job, 'interval',days=1,start_date=start_job_time)
- scheduler.start()
-
|