123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169 |
- """
- 创建虚拟站内 UID
- https://w42nne6hzg.feishu.cn/docx/PhbhdXScYo9CxpxTT3gcOle4nIs
- """
- import os
- import sys
- import uuid
- import requests
- sys.path.append(os.getcwd())
- from common.common import Common
- from common.db import MysqlHelper
- class getUser:
- @classmethod
- def get_default_user(cls):
- url = "https://api-internal.piaoquantv.com/user-center/info/getDefaultUserInfo"
- payload = {"params": {"mid": str(uuid.uuid1())}}
- headers = {
- 'Content-Type': 'application/json'
- }
- response = requests.request("POST", url, headers=headers, json=payload).json()
- return response['data']
- @classmethod
- def create_uid(cls, log_type, crawler, user_dict, env):
- """
- 创建站内虚拟 UID
- :param log_type: 日志
- :param crawler: 哪款爬虫
- :param user_dict: 字典{'nickName': 用户名, 'avatarUrl': 头像, 'tagName': 站内用户标签}
- :param env: 环境
- :return: 站内 UID
- """
- try:
- if env == 'dev':
-
- url = 'https://videotest.yishihui.com/longvideoapi/user/virtual/crawler/registerVirtualUser'
-
-
- elif env == 'prod':
-
- url = 'https://longvideoapi.piaoquantv.com/longvideoapi/user/virtual/crawler/registerVirtualUser'
-
-
- else:
-
- url = 'https://longvideoapi.piaoquantv.com/longvideoapi/user/virtual/crawler/registerVirtualUser'
-
-
- params = {
-
-
- 'recommendStatus': user_dict.get('recommendStatus', -6),
- 'appRecommendStatus': user_dict.get('appRecommendStatus', -6),
- 'pwd': '',
- 'nickName': user_dict['nickName'],
- 'avatarUrl': user_dict['avatarUrl'],
-
- 'tagName': user_dict['tagName'],
- }
- response = requests.post(url=url, params=params)
-
- user_id = response.json()['data']
- return user_id
- except Exception as e:
- Common.logger(log_type, crawler).error(f"create_user异常:{e}\n")
- @classmethod
- def create_user(cls, log_type, crawler, out_user_dict, env, machine):
- """
- 补全飞书用户表信息,并返回
- :param log_type: 日志
- :param crawler: 哪款爬虫
- :param out_user_dict: 站外用户信息字典
- :param env: 正式环境:prod,测试环境:dev
- :param machine: 部署机器,阿里云填写 aliyun,aliyun_hk ,线下分别填写 macpro,macair,local
- :return: user_list
- """
- try:
-
- out_uid = out_user_dict['out_uid']
- user_name = out_user_dict['user_name']
- out_avatar_url = out_user_dict['out_avatar_url']
- out_create_time = out_user_dict['out_create_time']
- out_tag = out_user_dict['out_tag']
- out_play_cnt = out_user_dict['out_play_cnt']
- out_fans = out_user_dict['out_fans']
- out_follow = out_user_dict['out_follow']
- out_friend = out_user_dict['out_friend']
- out_like = out_user_dict['out_like']
- platform = out_user_dict['platform']
- tag = out_user_dict['tag']
- sql = f""" select * from crawler_user where platform="{platform}" and out_user_id="{out_uid}" """
- our_user_info = MysqlHelper.get_values(log_type, crawler, sql, env, machine)
-
- if our_user_info is None or len(our_user_info) == 0:
-
- create_user_dict = {
- 'nickName': user_name,
- 'avatarUrl': out_avatar_url,
- 'tagName': tag,
- }
- our_uid = cls.create_uid(log_type, crawler, create_user_dict, env)
- Common.logger(log_type, crawler).info(f'新创建的站内UID:{our_uid}')
- if env == 'prod':
- our_user_link = f'https://admin.piaoquantv.com/ums/user/{our_uid}/post'
- else:
- our_user_link = f'https://testadmin.piaoquantv.com/ums/user/{our_uid}/post'
- Common.logger(log_type, crawler).info(f'站内用户主页链接:{our_user_link}')
-
- sql = f""" insert into crawler_user(user_id,
- out_user_id,
- out_user_name,
- out_avatar_url,
- out_create_time,
- out_tag,
- out_play_cnt,
- out_fans,
- out_follow,
- out_friend,
- out_like,
- platform,
- tag)
- values({our_uid},
- "{out_uid}",
- "{user_name}",
- "{out_avatar_url}",
- "{out_create_time}",
- "{out_tag}",
- {out_play_cnt},
- {out_fans},
- {out_follow},
- {out_friend},
- {out_like},
- "{platform}",
- "{tag}") """
- Common.logger(log_type, crawler).info(f'sql:{sql}')
- MysqlHelper.update_values(log_type, crawler, sql, env, machine)
- Common.logger(log_type, crawler).info('用户信息插入数据库成功!')
-
- else:
- our_uid = our_user_info[0][1]
- if env == 'prod':
- our_user_link = f'https://admin.piaoquantv.com/ums/user/{our_uid}/post'
- else:
- our_user_link = f'https://testadmin.piaoquantv.com/ums/user/{our_uid}/post'
- Common.logger(log_type, crawler).info(f'站内用户主页链接:{our_user_link}')
- user_dict = {
- 'out_uid': out_uid,
- 'user_name': user_name,
- 'our_uid': our_uid,
- 'our_user_link': our_user_link,
- }
- return user_dict
- except Exception as e:
- Common.logger(log_type, crawler).error(f"create_user:{e}\n")
- if __name__ == "__main__":
-
-
- pass
|