|
@@ -610,5 +610,103 @@ def advertisement_rank(user_id, start, end, type, page, page_size, order, order_
|
|
|
return getLimitSumData(db, sql, sum_sql, page, page_size)
|
|
|
|
|
|
|
|
|
+"""创意展示"""
|
|
|
+
|
|
|
+
|
|
|
+def idea_rank(user_id, start, end, page, page_size, order, order_by, book, channel, is_singleimg,
|
|
|
+ is_video, labels, collect, data_type):
|
|
|
+ # TODO:修改为clickhouse来进行数据访问
|
|
|
+
|
|
|
+ # TODO:时间的归因-----获取到所有这段时间内的记录,并进行聚合(聚合周末再做,先把数据拿出来)
|
|
|
+
|
|
|
+ # TODO:前端需要做的事情--------添加一个显示,owner,添加一个请求userid,去除掉上传时间,开始投放时间
|
|
|
+
|
|
|
+ if user_id in super_auth():
|
|
|
+ op1 = ''
|
|
|
+ else:
|
|
|
+ user = tuple([i['nick_name'] for i in get_pitcher({'user_id': user_id})] + [get_user_name_by_id(user_id)])
|
|
|
+ if len(user) == 1:
|
|
|
+ op1 = f" and pitcher ='{user[0]}'"
|
|
|
+ else:
|
|
|
+ op1 = f" and pitcher in {str(user)}"
|
|
|
+
|
|
|
+ op4 = f" and channel='{channel}'" if channel else ''
|
|
|
+
|
|
|
+ op10 = f" and book='{book}'" if book else ''
|
|
|
+ # TODO:添加标签相关处理------id与对应计划进行--对应
|
|
|
+
|
|
|
+ op11 = f" and image_id like '%,%' " if not is_singleimg else ''
|
|
|
+ op12 = f" and is_video" if is_video else '' # 进行对应修改1----图片
|
|
|
+
|
|
|
+ # 公共数据,和素材库一样,个人只显示个人(小组)数据
|
|
|
+ # TODO:之后op_or1 变化为owner来限制,------dw_image_cost_day 生成时就根据dt,cost来归类owner
|
|
|
+ op_or1 = f' or (dt<date_add(now(),interval -5 day) or cost>5000) ' if data_type == 'all' else ''
|
|
|
+ # clicktimes,view_times,consume_amount,click_rate---------数据进行一一对应
|
|
|
+ if order_by == 'clicktimes':
|
|
|
+ order_by = 'click_count'
|
|
|
+ if order_by == 'view_times':
|
|
|
+ order_by = 'view_count'
|
|
|
+ if order_by == 'consume_amount':
|
|
|
+ order_by = 'cost'
|
|
|
+ if order_by == 'click_rate':
|
|
|
+ order_by = 'if(click_count=0 or view_count =0 or view_count is null or click_count is null,0,click_count / view_count) '
|
|
|
+
|
|
|
+ op_order = f" order by {order_by} {order}" if order_by and order else ''
|
|
|
+
|
|
|
+ # 时间为基底限制,必须遵守
|
|
|
+ op_time_bigger = f" and dt>='{start}' " if start else ''
|
|
|
+ op_time_small = f" and dt<='{end}' " if end else ''
|
|
|
+
|
|
|
+ db = MysqlUtils().dm
|
|
|
+
|
|
|
+ # TODO:-----label_id 需要对应修改
|
|
|
+ sql = f"""
|
|
|
+ select
|
|
|
+ row_number () over() as id,
|
|
|
+ book as novels,
|
|
|
+ dt as createTime,
|
|
|
+ `type` as channel,
|
|
|
+ 'all' as dataType,
|
|
|
+ owner as creator,
|
|
|
+ 0 as delFlag,
|
|
|
+ False as isCollected,
|
|
|
+ '' as labels,
|
|
|
+ download_path as downloadPath,
|
|
|
+ now() as startDate,
|
|
|
+ now() as endDate,
|
|
|
+ height,
|
|
|
+ width,
|
|
|
+ preview_url as media,
|
|
|
+ format as mediaFormat,
|
|
|
+ size as mediaSize,
|
|
|
+ if(click_count=0 or view_count =0 or view_count is null or click_count is null,0,click_count / view_count) as clickRate,
|
|
|
+ min(width)/if(min(height),min(height),1) aspect_ratio
|
|
|
+ cost as consumeAmount,
|
|
|
+ view_count as viewTimes,
|
|
|
+ click_count as clickTimes,
|
|
|
+ round((cost/view_count)*1000,2) cpm,
|
|
|
+ round(click_count/view_count,4) ctr,
|
|
|
+ round(cost/click_count,2) cpc,
|
|
|
+ title,
|
|
|
+ description as article,
|
|
|
+ now() as upateTime,
|
|
|
+ null as updateBy,
|
|
|
+ if(is_video,2,1) as type,
|
|
|
+ videoBitRate,
|
|
|
+ videoFirstPage,
|
|
|
+ videoLength,
|
|
|
+ use_times as userTimes
|
|
|
+ from dw_image_cost_day
|
|
|
+ where replace (preview_url,' ,','') !=''
|
|
|
+ and (1=1 {op1} {op4} {op10} {op11} {op12} {op_or1})
|
|
|
+ {op_time_bigger} {op_time_small}
|
|
|
+ {op_order}
|
|
|
+ """
|
|
|
+ print(sql)
|
|
|
+ data, total = getLimitData(db, sql, page, page_size)
|
|
|
+ data = {'records': data, 'total': total, 'size': page_size, 'current': page, 'pages': int(total / page_size) + 1}
|
|
|
+ return data
|
|
|
+
|
|
|
+
|
|
|
if __name__ == '__main__':
|
|
|
print(get_pitcher({"user_id": 78}))
|