|
@@ -0,0 +1,625 @@
|
|
|
+#!/usr/bin/env python
|
|
|
+# -*- coding: utf-8 -*-
|
|
|
+
|
|
|
+"""
|
|
|
+__title__ = '每日凌晨空闲时检查本地数据库中的订单数据是否和平台昨天总订单一致'
|
|
|
+
|
|
|
+@Time : 2020/9/26 19:44
|
|
|
+@Author : Kenny-PC
|
|
|
+@Software: PyCharm
|
|
|
+
|
|
|
+# code is far away from bugs with the god animal protecting
|
|
|
+ I love animals. They taste delicious.
|
|
|
+ ┏┓ ┏┓
|
|
|
+ ┏┛┻━━━┛┻┓
|
|
|
+ ┃ ☃ ┃
|
|
|
+ ┃ ┳┛ ┗┳ ┃
|
|
|
+ ┃ ┻ ┃
|
|
|
+ ┗━┓ ┏━┛
|
|
|
+ ┃ ┗━━━┓
|
|
|
+ ┃ 神兽保佑 ┣┓
|
|
|
+ ┃ 永无BUG! ┏┛
|
|
|
+ ┗┓┓┏━┳┓┏┛
|
|
|
+ ┃┫┫ ┃┫┫
|
|
|
+ ┗┻┛ ┗┻┛
|
|
|
+"""
|
|
|
+
|
|
|
+import requests
|
|
|
+import hashlib
|
|
|
+
|
|
|
+import time
|
|
|
+import datetime
|
|
|
+from urllib import parse
|
|
|
+
|
|
|
+import math
|
|
|
+import pymysql
|
|
|
+from apscheduler.schedulers.blocking import BlockingScheduler
|
|
|
+import account_list as al
|
|
|
+from util import date_util, platform_util
|
|
|
+
|
|
|
+
|
|
|
+def md5value(s):
|
|
|
+ md5 = hashlib.md5()
|
|
|
+ md5.update(s.encode("utf-8"))
|
|
|
+ return md5.hexdigest()
|
|
|
+
|
|
|
+
|
|
|
+##《1》阅文
|
|
|
+def get_yuewen_order(st, et):
|
|
|
+ url = 'https://open.yuewen.com/cpapi/wxRecharge/querychargelog'
|
|
|
+ version = 1
|
|
|
+ secert_list = al.yuewen_account_list
|
|
|
+
|
|
|
+ for secert in secert_list:
|
|
|
+ start_time = st
|
|
|
+ email = secert[0]
|
|
|
+ appsecert = secert[1]
|
|
|
+
|
|
|
+ for i in range(int((et - st) / 86400)):
|
|
|
+ # time.sleep(61)
|
|
|
+ t = ()
|
|
|
+ end_time = min(start_time + 86400, et)
|
|
|
+ timestamp = int(time.time())
|
|
|
+ s = ''
|
|
|
+ page = 1
|
|
|
+ order_status = 2
|
|
|
+ data = {
|
|
|
+ 'email': email,
|
|
|
+ 'version': version,
|
|
|
+ 'timestamp': timestamp,
|
|
|
+ 'start_time': start_time,
|
|
|
+ 'end_time': end_time,
|
|
|
+ 'page': page,
|
|
|
+ 'order_status': order_status
|
|
|
+ # 'last_min_id':last_min_id,
|
|
|
+ # 'last_max_id':last_max_id,
|
|
|
+ # 'total_count':total_count,
|
|
|
+ # 'last_page':last_page
|
|
|
+ }
|
|
|
+ sorted_data = sorted(data.items())
|
|
|
+ for k, v in sorted_data:
|
|
|
+ s = s + str(k) + str(v)
|
|
|
+
|
|
|
+ sign = md5value(appsecert + s).upper()
|
|
|
+
|
|
|
+ data1 = {
|
|
|
+ 'email': email,
|
|
|
+ 'version': version,
|
|
|
+ 'timestamp': timestamp,
|
|
|
+ 'start_time': start_time,
|
|
|
+ 'end_time': end_time,
|
|
|
+ 'page': page,
|
|
|
+ 'order_status': order_status,
|
|
|
+ 'sign': sign
|
|
|
+ }
|
|
|
+ list1 = requests.get(url=url, params=data1)
|
|
|
+
|
|
|
+ total_count = list1.json()['data']['total_count']
|
|
|
+ last_min_id = list1.json()['data']['min_id']
|
|
|
+ last_max_id = list1.json()['data']['max_id']
|
|
|
+ last_page = list1.json()['data']['page']
|
|
|
+
|
|
|
+ if total_count > 0:
|
|
|
+ for x in list1.json()['data']['list']:
|
|
|
+ y = {}
|
|
|
+ dtime = datetime.datetime.strptime(x['order_time'], "%Y-%m-%d %H:%M:%S")
|
|
|
+ y['date'] = ((int(time.mktime(dtime.timetuple())) + 8 * 3600) // 86400) * 86400 - 8 * 3600
|
|
|
+ y['platform'] = '阅文'
|
|
|
+ y['channel'] = x['app_name']
|
|
|
+ y['from_novel'] = x['book_name']
|
|
|
+ y['user_id'] = x['openid']
|
|
|
+ y['stage'] = ''
|
|
|
+ y['channel_id'] = 0
|
|
|
+ y['order_time'] = x['order_time']
|
|
|
+ y['amount'] = x['amount']
|
|
|
+ y['reg_time'] = x['reg_time']
|
|
|
+ y['order_id'] = x['order_id']
|
|
|
+ """
|
|
|
+ del x['app_name']
|
|
|
+ del x['order_status']
|
|
|
+ del x['order_type']
|
|
|
+ del x['openid']
|
|
|
+ del x['user_name']
|
|
|
+ del x['sex']
|
|
|
+ del x['channel_name']
|
|
|
+ del x['book_id']
|
|
|
+ del x['book_name']
|
|
|
+ del x['report_status']
|
|
|
+ """
|
|
|
+ y = sorted(y.items(), key=lambda item: item[0])
|
|
|
+ y = dict(y)
|
|
|
+ y = tuple(y.values())
|
|
|
+ t = t + ((y),)
|
|
|
+
|
|
|
+ if total_count > 100:
|
|
|
+ for page in range(2, math.ceil(total_count / 100) + 1):
|
|
|
+ data = {
|
|
|
+ 'email': email,
|
|
|
+ 'version': version,
|
|
|
+ 'timestamp': timestamp,
|
|
|
+ 'start_time': start_time,
|
|
|
+ 'end_time': end_time,
|
|
|
+ 'page': page,
|
|
|
+ 'last_min_id': last_min_id,
|
|
|
+ 'last_max_id': last_max_id,
|
|
|
+ 'total_count': total_count,
|
|
|
+ 'last_page': last_page,
|
|
|
+ 'order_status': order_status
|
|
|
+ }
|
|
|
+ sorted_data = sorted(data.items())
|
|
|
+ s1 = ''
|
|
|
+ for k, v in sorted_data:
|
|
|
+ s1 = s1 + str(k) + str(v)
|
|
|
+ sign = md5value(appsecert + s1).upper()
|
|
|
+ data2 = {
|
|
|
+ 'email': email,
|
|
|
+ 'version': version,
|
|
|
+ 'timestamp': timestamp,
|
|
|
+ 'start_time': start_time,
|
|
|
+ 'end_time': end_time,
|
|
|
+ 'page': page,
|
|
|
+ 'last_min_id': last_min_id,
|
|
|
+ 'last_max_id': last_max_id,
|
|
|
+ 'total_count': total_count,
|
|
|
+ 'last_page': last_page,
|
|
|
+ 'order_status': order_status,
|
|
|
+ 'sign': sign
|
|
|
+ }
|
|
|
+ list2 = requests.get(url=url, params=data2)
|
|
|
+ for x in list2.json()['data']['list']:
|
|
|
+ y = {}
|
|
|
+ dtime = datetime.datetime.strptime(x['order_time'], "%Y-%m-%d %H:%M:%S")
|
|
|
+ y['date'] = ((int(time.mktime(dtime.timetuple())) + 8 * 3600) // 86400) * 86400 - 8 * 3600
|
|
|
+ y['platform'] = '阅文'
|
|
|
+ y['channel'] = x['app_name']
|
|
|
+ y['from_novel'] = x['book_name']
|
|
|
+ y['user_id'] = x['openid']
|
|
|
+ y['stage'] = ''
|
|
|
+ y['channel_id'] = 0
|
|
|
+ y['order_time'] = x['order_time']
|
|
|
+ y['amount'] = x['amount']
|
|
|
+ y['reg_time'] = x['reg_time']
|
|
|
+ y['order_id'] = x['order_id']
|
|
|
+ """
|
|
|
+ del x['app_name']
|
|
|
+ del x['order_status']
|
|
|
+ del x['order_type']
|
|
|
+ del x['openid']
|
|
|
+ del x['user_name']
|
|
|
+ del x['sex']
|
|
|
+ del x['channel_name']
|
|
|
+ del x['book_id']
|
|
|
+ del x['book_name']
|
|
|
+ del x['report_status']
|
|
|
+ """
|
|
|
+ y = sorted(y.items(), key=lambda item: item[0])
|
|
|
+ y = dict(y)
|
|
|
+ y = tuple(y.values())
|
|
|
+ t = t + ((y),)
|
|
|
+
|
|
|
+ total_count = list2.json()['data']['total_count']
|
|
|
+ last_min_id = list2.json()['data']['min_id']
|
|
|
+ last_max_id = list2.json()['data']['max_id']
|
|
|
+ last_page = list2.json()['data']['page']
|
|
|
+
|
|
|
+ print(email, start_time, len(t))
|
|
|
+ start_time = start_time + 86400
|
|
|
+ if len(t) > 0:
|
|
|
+ mysql_insert_order(t)
|
|
|
+
|
|
|
+
|
|
|
+##《2》掌读
|
|
|
+def get_zhangdu_order(st, et):
|
|
|
+ secert_list = al.zhangdu_account_list
|
|
|
+ url = 'https://api.zhangdu520.com/channel/getorder'
|
|
|
+
|
|
|
+ for item in secert_list: # 分渠道
|
|
|
+ t = ()
|
|
|
+
|
|
|
+ uid = item[0]
|
|
|
+ appsecert = item[1]
|
|
|
+ channel = item[2]
|
|
|
+ timestamp = int(time.time())
|
|
|
+ sign = md5value(str(uid) + '&' + appsecert + '&' + str(timestamp))
|
|
|
+ page = 1
|
|
|
+ starttime = st
|
|
|
+ timespace = 90 * 3600 * 24
|
|
|
+ endtime = min(et, st + timespace)
|
|
|
+
|
|
|
+ for x in range((et - st) // timespace + 1): # 分时段
|
|
|
+ Params = {
|
|
|
+ 'uid': uid,
|
|
|
+ 'timestamp': timestamp,
|
|
|
+ 'sign': sign,
|
|
|
+ 'starttime': starttime,
|
|
|
+ 'endtime': endtime
|
|
|
+ }
|
|
|
+ list1 = requests.get(url=url, params=Params)
|
|
|
+ pageCount = list1.json()['data']['pageCount']
|
|
|
+ if pageCount > 0:
|
|
|
+ for a in range(1, pageCount + 1): # 分页
|
|
|
+ page = a
|
|
|
+ Params = {
|
|
|
+ 'uid': uid,
|
|
|
+ 'timestamp': timestamp,
|
|
|
+ 'sign': sign,
|
|
|
+ 'starttime': starttime,
|
|
|
+ 'endtime': endtime,
|
|
|
+ 'page': page
|
|
|
+ }
|
|
|
+ list2 = requests.get(url=url, params=Params).json()
|
|
|
+ if 'data' in list2.keys():
|
|
|
+ for b in list2['data']['list']:
|
|
|
+ c = {}
|
|
|
+ c['amount'] = b['amount']
|
|
|
+ c['channel_id'] = uid
|
|
|
+ c['order_id'] = str(b['orderno'])
|
|
|
+ c['order_time'] = b['ctime']
|
|
|
+ c['user_id'] = b['openid']
|
|
|
+ c['platform'] = '掌读'
|
|
|
+ c['channel'] = channel
|
|
|
+ c['reg_time'] = b['regtime']
|
|
|
+ c['from_novel'] = ''
|
|
|
+ c['stage'] = ''
|
|
|
+ c['date'] = ((int(b['ctime']) + 8 * 3600) // 86400) * 86400 - 8 * 3600
|
|
|
+ """
|
|
|
+ del b['openid']
|
|
|
+ del b['regtime']
|
|
|
+ del b['ip']
|
|
|
+ del b['ua']
|
|
|
+ del b['id']
|
|
|
+ del b['ctime']
|
|
|
+ del b['userid']
|
|
|
+ del b['orderno']
|
|
|
+ del b['source']
|
|
|
+ del b['sourceid']
|
|
|
+ """
|
|
|
+ if b['status'] == '1':
|
|
|
+ # del b['status']
|
|
|
+ del b
|
|
|
+ x = sorted(c.items(), key=lambda item: item[0])
|
|
|
+ x = dict(x)
|
|
|
+ x = tuple(x.values())
|
|
|
+ t = t + ((x),)
|
|
|
+ else:
|
|
|
+ print(list2)
|
|
|
+ starttime = starttime + timespace
|
|
|
+ endtime = min(et, starttime + timespace)
|
|
|
+ if len(t) > 0:
|
|
|
+ mysql_insert_order(t)
|
|
|
+ print('掌读', channel, len(t))
|
|
|
+
|
|
|
+
|
|
|
+##《3》花生
|
|
|
+def get_huasheng_order(st, et):
|
|
|
+ apikey_list = al.huasheng_account_list
|
|
|
+ url = 'https://vip.rlcps.cn/api/getMerchants'
|
|
|
+
|
|
|
+ for key in apikey_list: # 获取每个vip账号下的channel_id
|
|
|
+ apiKEY = key[0]
|
|
|
+ apiSecurity = key[1]
|
|
|
+ stage = key[2]
|
|
|
+ timestamp = str(int(time.time()))
|
|
|
+ sign = md5value(apiKEY + timestamp + apiSecurity).upper()
|
|
|
+ data = {
|
|
|
+ 'apiKey': apiKEY,
|
|
|
+ 'apiSecurity': apiSecurity,
|
|
|
+ 'timestamp': timestamp,
|
|
|
+ 'sign': sign
|
|
|
+ }
|
|
|
+ list0 = requests.post(url, data).json()
|
|
|
+ t = ()
|
|
|
+
|
|
|
+ for merchant in list0['data']:
|
|
|
+ merchant_id = merchant['merchant_id']
|
|
|
+ merchant_name = merchant['merchant_name']
|
|
|
+ url1 = 'https://vip.rlcps.cn/api/orderList'
|
|
|
+ start_time = st
|
|
|
+
|
|
|
+ for i in range((et - st) // 86400):
|
|
|
+ page = 1
|
|
|
+ date = time.strftime("%Y-%m-%d", time.localtime(start_time))
|
|
|
+ sign = md5value(apiKEY + date + str(merchant_id) + timestamp + apiSecurity).upper()
|
|
|
+ data1 = {
|
|
|
+ 'apiKey': apiKEY,
|
|
|
+ 'apiSecurity': apiSecurity,
|
|
|
+ 'timestamp': timestamp,
|
|
|
+ 'date': date,
|
|
|
+ 'merchant_id': merchant_id,
|
|
|
+ 'sign': sign,
|
|
|
+ 'page': page
|
|
|
+ }
|
|
|
+ list1 = requests.post(url1, data1).json()
|
|
|
+
|
|
|
+ if 'data' in list1.keys() and len(list1['data']) > 0:
|
|
|
+
|
|
|
+ for i in range(int(math.ceil(list1['count'] / 500))):
|
|
|
+ data2 = {
|
|
|
+ 'apiKey': apiKEY,
|
|
|
+ 'apiSecurity': apiSecurity,
|
|
|
+ 'timestamp': timestamp,
|
|
|
+ 'date': date,
|
|
|
+ 'merchant_id': merchant_id,
|
|
|
+ 'sign': sign,
|
|
|
+ 'page': page
|
|
|
+ }
|
|
|
+ list2 = requests.post(url1, data2).json()
|
|
|
+
|
|
|
+ for x in list2['data']:
|
|
|
+ if x['order_status'] == 1:
|
|
|
+ y = {}
|
|
|
+ ##dtime = datetime.datetime.strptime(x['pay_at'],"%Y-%m-%d")
|
|
|
+ ##y['date']= ((int(time.mktime(dtime.timetuple()))+8*3600)//86400)*86400-8*3600
|
|
|
+ y['user_id'] = x['openid']
|
|
|
+ y['order_id'] = x['trans_id']
|
|
|
+ y['order_time'] = x['pay_at']
|
|
|
+ y['reg_time'] = x['join_at']
|
|
|
+ y['date'] = (start_time + 8 * 3600) // 86400 * 86400 - 8 * 3600
|
|
|
+ y['channel'] = merchant_name
|
|
|
+ y['channel_id'] = merchant_id
|
|
|
+ y['platform'] = '花生'
|
|
|
+ y['stage'] = stage
|
|
|
+ y['from_novel'] = x['book_name']
|
|
|
+ y['amount'] = x['amount']
|
|
|
+ """
|
|
|
+ del x['order_num']
|
|
|
+ del x['book_name']
|
|
|
+ del x['trans_id']
|
|
|
+ del x['pay_at']
|
|
|
+ del x['join_at']
|
|
|
+ del x['subscribe_at']
|
|
|
+ del x['openid']
|
|
|
+ del x['charge_count']
|
|
|
+ del x['book_id']
|
|
|
+ del x['order_status']
|
|
|
+ del x['user_name']
|
|
|
+ del x['spread_name']
|
|
|
+ del x['request_at']
|
|
|
+ """
|
|
|
+ y = sorted(y.items(), key=lambda item: item[0])
|
|
|
+ y = dict(y)
|
|
|
+ y = tuple(y.values())
|
|
|
+ t = t + ((y),)
|
|
|
+ page = page + 1
|
|
|
+ else:
|
|
|
+ print(list1)
|
|
|
+ start_time = start_time + 86400
|
|
|
+
|
|
|
+ if len(t) > 0:
|
|
|
+ mysql_insert_order(t)
|
|
|
+ print(stage, merchant_name, len(t))
|
|
|
+
|
|
|
+
|
|
|
+##《4》掌中云
|
|
|
+def get_zzy_order(st, et):
|
|
|
+ # 掌中云的时间格式比较特殊,转换下
|
|
|
+ st = platform_util.getZzyQueryTime(st)
|
|
|
+ et = platform_util.getZzyQueryTime(et)
|
|
|
+
|
|
|
+ API_list = al.zzy_account_list
|
|
|
+ url = 'https://openapi.818tu.com/partners/channel/channels/list?'
|
|
|
+
|
|
|
+ for x in API_list:
|
|
|
+ my_key = x[0]
|
|
|
+ secert = x[1]
|
|
|
+ stage = x[2]
|
|
|
+ my_sign = md5value(secert + 'key=' + my_key)
|
|
|
+ parameter = 'key=' + my_key + '&sign=' + my_sign
|
|
|
+ print(url + parameter)
|
|
|
+ channel_list = requests.get(url + parameter) # 获取子渠道列表
|
|
|
+
|
|
|
+ if 'data' in channel_list.json().keys():
|
|
|
+ items = channel_list.json()['data']['items']
|
|
|
+ elif len(x) > 3:
|
|
|
+ my_key = x[3]
|
|
|
+ secert = x[4]
|
|
|
+ my_sign = md5value(secert + 'key=' + my_key)
|
|
|
+ parameter = 'key=' + my_key + '&sign=' + my_sign
|
|
|
+ channel_list = requests.get(url + parameter)
|
|
|
+ items = channel_list.json()['data']['items']
|
|
|
+ else:
|
|
|
+ print(channel_list.json())
|
|
|
+ items = []
|
|
|
+
|
|
|
+ for item in items: # 获取channel_id 后逐个拉取历史orders
|
|
|
+ r = ()
|
|
|
+ channel_id = item['id']
|
|
|
+ channel = item['nickname']
|
|
|
+ status = str(1)
|
|
|
+ per_page = str(1000)
|
|
|
+ limit_time = et
|
|
|
+ get_time = st
|
|
|
+ lt = parse.urlencode({'created_at[lt]': limit_time})
|
|
|
+ gt = parse.urlencode({'created_at[gt]': get_time})
|
|
|
+ url_1 = 'https://openapi.818tu.com/partners/channel/orders/list?'
|
|
|
+ my_sign_1 = md5value(secert + 'channel_id=' + str(
|
|
|
+ channel_id) + '&created_at[gt]=' + get_time + '&created_at[lt]=' + limit_time + '&key=' + my_key + '&per_page=' + per_page + '&status=' + status)
|
|
|
+ parameter_1 = 'channel_id=' + str(
|
|
|
+ channel_id) + '&' + gt + '&' + lt + '&per_page=' + per_page + '&status=' + status + '&key=' + my_key + '&sign=' + my_sign_1
|
|
|
+ orders = requests.get(url_1 + parameter_1)
|
|
|
+ t = orders.json()['data']['count'] // int(per_page) + 1
|
|
|
+ for page in range(1, t + 1):
|
|
|
+ my_sign_2 = md5value(secert + 'channel_id=' + str(
|
|
|
+ channel_id) + '&created_at[gt]=' + get_time + '&created_at[lt]=' + limit_time + '&key=' + my_key + '&page=' + str(
|
|
|
+ page) + '&per_page=' + per_page + '&status=' + status)
|
|
|
+ parameter_2 = 'channel_id=' + str(channel_id) + '&' + gt + '&' + lt + '&page=' + str(
|
|
|
+ page) + '&per_page=' + per_page + '&status=' + status + '&key=' + my_key + '&sign=' + my_sign_2
|
|
|
+ orders_1 = requests.get(url_1 + parameter_2)
|
|
|
+ b = orders_1.json()['data']['items']
|
|
|
+
|
|
|
+ for a in b:
|
|
|
+ c = {}
|
|
|
+ c['user_id'] = str(a['member']['openid'])
|
|
|
+ c['channel'] = channel
|
|
|
+ c['reg_time'] = a['member']['created_at']
|
|
|
+ c['channel_id'] = channel_id
|
|
|
+ c['amount'] = round(a['price'] / 100, 2)
|
|
|
+ c['order_id'] = str(a['id'])
|
|
|
+ c['order_time'] = a['created_at']
|
|
|
+ c['platform'] = '掌中云'
|
|
|
+ c['stage'] = stage
|
|
|
+ # c['amount']=a['amount']
|
|
|
+ dtime = datetime.datetime.strptime(a['created_at'][0:10], "%Y-%m-%d")
|
|
|
+ c['date'] = ((int(time.mktime(dtime.timetuple())) + 8 * 3600) // 86400) * 86400 - 8 * 3600
|
|
|
+
|
|
|
+ if str(a['from_novel_id']) != 'None':
|
|
|
+ c['from_novel'] = a['from_novel']['title']
|
|
|
+ else:
|
|
|
+ c['from_novel'] = 'None'
|
|
|
+ """
|
|
|
+ del a['member']
|
|
|
+ del a['referral_link_id']
|
|
|
+ del a['id']
|
|
|
+ del a['created_at']
|
|
|
+ del a['paid_at']
|
|
|
+ del a['border_id']
|
|
|
+ del a['from_novel_id']
|
|
|
+ del a['status']
|
|
|
+ del a['price']
|
|
|
+ del a['agent_uid']
|
|
|
+ """
|
|
|
+ x = sorted(c.items(), key=lambda item: item[0])
|
|
|
+ x = dict(x)
|
|
|
+ x = tuple(x.values())
|
|
|
+ r = r + ((x),)
|
|
|
+
|
|
|
+ if len(r) > 0:
|
|
|
+ mysql_insert_order(r)
|
|
|
+
|
|
|
+ print('zzy', channel, len(r))
|
|
|
+
|
|
|
+##《5》 悠书阁
|
|
|
+def get_ysg_order(st, et):
|
|
|
+ key_list = al.ysg_account_list
|
|
|
+ url = 'https://novel.youshuge.com/v2/open/orders'
|
|
|
+ o = ()
|
|
|
+
|
|
|
+ for key in key_list:
|
|
|
+ host_name = key[0]
|
|
|
+ channel_id = key[1]
|
|
|
+ secert_key = key[2]
|
|
|
+ channel = key[3]
|
|
|
+ stage = key[4]
|
|
|
+ timestamp = int(time.time())
|
|
|
+ start_date = time.strftime("%Y-%m-%d", time.localtime(st))
|
|
|
+ end_date = time.strftime("%Y-%m-%d", time.localtime(et))
|
|
|
+ page = 1
|
|
|
+ str1 = 'channel_id=' + str(channel_id) + '&end_date=' + end_date + '&host_name=' + host_name + '&page=' + str(
|
|
|
+ page) + '&pay_status=1' + '&start_date=' + start_date + '&time=' + str(timestamp) + '&key=' + secert_key
|
|
|
+ sign = md5value(str1).upper()
|
|
|
+ data = {
|
|
|
+ 'sign': sign,
|
|
|
+ 'host_name': host_name,
|
|
|
+ 'time': timestamp,
|
|
|
+ 'channel_id': channel_id,
|
|
|
+ 'page': page,
|
|
|
+ 'pay_status': 1,
|
|
|
+ 'start_date': start_date,
|
|
|
+ 'end_date': end_date
|
|
|
+ }
|
|
|
+ r = requests.post(url, data).json()
|
|
|
+
|
|
|
+ if 'data' in r.keys():
|
|
|
+ if len(r['data']) > 0:
|
|
|
+ for i in range((r['data'][0]['count'] - 1) // 100 + 1):
|
|
|
+ timestamp = int(time.time())
|
|
|
+ str1 = 'channel_id=' + str(
|
|
|
+ channel_id) + '&end_date=' + end_date + '&host_name=' + host_name + '&page=' + str(
|
|
|
+ page) + '&pay_status=1' + '&start_date=' + start_date + '&time=' + str(
|
|
|
+ timestamp) + '&key=' + secert_key
|
|
|
+ sign = md5value(str1).upper()
|
|
|
+ data2 = {
|
|
|
+ 'sign': sign,
|
|
|
+ 'host_name': host_name,
|
|
|
+ 'time': timestamp,
|
|
|
+ 'channel_id': channel_id,
|
|
|
+ 'page': page,
|
|
|
+ 'pay_status': 1,
|
|
|
+ 'start_date': start_date,
|
|
|
+ 'end_date': end_date
|
|
|
+ }
|
|
|
+ r2 = requests.post(url, data2).json()
|
|
|
+
|
|
|
+ if 'data' in r2.keys():
|
|
|
+ if len(r2['data']) > 0:
|
|
|
+ for x in r2['data']:
|
|
|
+ y = {}
|
|
|
+ dtime = datetime.datetime.strptime(x['create_time'][0:10], "%Y-%m-%d")
|
|
|
+ y['date'] = ((int(
|
|
|
+ time.mktime(dtime.timetuple())) + 8 * 3600) // 86400) * 86400 - 8 * 3600
|
|
|
+ y['order_id'] = x['order_num']
|
|
|
+ y['amount'] = round(int(x['price']) / 100, 2)
|
|
|
+ y['order_time'] = x['create_time']
|
|
|
+ y['channel'] = channel
|
|
|
+ y['from_novel'] = x['book_name']
|
|
|
+ y['stage'] = stage
|
|
|
+ y['user_id'] = x['openid']
|
|
|
+ y['channel_id'] = channel_id
|
|
|
+ y['platform'] = '悠书阁'
|
|
|
+ y['reg_time'] = x['reg_time']
|
|
|
+
|
|
|
+ y = sorted(y.items(), key=lambda item: item[0])
|
|
|
+ y = dict(y)
|
|
|
+ y = tuple(y.values())
|
|
|
+ o = o + ((y),)
|
|
|
+ page = page + 1
|
|
|
+ if len(o) > 0:
|
|
|
+ mysql_insert_order(o)
|
|
|
+
|
|
|
+
|
|
|
+## 数据导入表采用replace替换主键orderid的方法
|
|
|
+def mysql_insert_order(data):
|
|
|
+ pass
|
|
|
+ # if len(data) != 0:
|
|
|
+ # print('数据为空,不执行数据库操作!')
|
|
|
+ # pass
|
|
|
+ # else:
|
|
|
+ # db = pymysql.connect('localhost', 'root', 'root', 'quchen_text')
|
|
|
+ # cursor = db.cursor()
|
|
|
+ # # sql = 'insert ignore into quchen_text.order_daily (amount,channel,channel_id,date,from_novel,order_id,order_time,platform,reg_time,stage,user_id) values (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s);'
|
|
|
+ # # sql = "update quchen_text.order set amount =%s where platform='掌中云' and order_id =%s"
|
|
|
+ # sql = 'replace into quchen_text.order (amount,channel,channel_id,date,from_novel,order_id,order_time,platform,reg_time,stage,user_id) values (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s);'
|
|
|
+ #
|
|
|
+ # try:
|
|
|
+ # cursor.executemany(sql, data)
|
|
|
+ # db.commit()
|
|
|
+ # print('access insert order', len(data))
|
|
|
+ # except Exception as e:
|
|
|
+ # db.rollback()
|
|
|
+ # print('订单数据入库失败:', e)
|
|
|
+
|
|
|
+
|
|
|
+def start_all_job():
|
|
|
+ st_unix = date_util.getYesterdayStartTime()
|
|
|
+ et_unix = date_util.getTodayStartTime()
|
|
|
+
|
|
|
+ print(date_util.getSecondsToDatetime(st_unix))
|
|
|
+ print(date_util.getSecondsToDatetime(et_unix))
|
|
|
+
|
|
|
+ start_exec_seconds = date_util.getCurrentSecondTime()
|
|
|
+ # get_yuewen_order(st_unix, et_unix)
|
|
|
+ # get_ysg_order(st_unix, et_unix)
|
|
|
+ # get_zhangdu_order(st_unix, et_unix)
|
|
|
+
|
|
|
+ get_zzy_order(st_unix, et_unix)
|
|
|
+ # get_huasheng_order(st_unix, et_unix)
|
|
|
+ print('执行时间(秒)为:', date_util.getCurrentSecondTime() - start_exec_seconds)
|
|
|
+
|
|
|
+if __name__ == '__main__':
|
|
|
+ start_all_job()
|
|
|
+
|
|
|
+ #
|
|
|
+ # scheduler = BlockingScheduler()
|
|
|
+ #
|
|
|
+ # #每天凌晨执行
|
|
|
+ # #start_job_time = '2020-09-26 03:05:00'
|
|
|
+ # # scheduler.add_job(start_all_job, 'interval', days=1, start_date=start_job_time)
|
|
|
+ #
|
|
|
+ # #每天凌晨3点到4点的30分钟都执行一次
|
|
|
+ # # scheduler.add_job(start_all_job, 'cron', hour='3-4', minute='30')
|
|
|
+ #
|
|
|
+ # # 每10秒执行一次
|
|
|
+ # scheduler.add_job(start_all_job, 'interval', seconds=2)
|
|
|
+ # scheduler.start()
|
|
|
+
|
|
|
+
|