|
@@ -57,6 +57,33 @@ def get_data_from_odps(date, project, table, connect_timeout=3000, read_timeout=
|
|
|
return records
|
|
|
|
|
|
|
|
|
+def check_table_partition_exits(date, project, table, connect_timeout=3000, read_timeout=500000,
|
|
|
+ pool_maxsize=1000, pool_connections=1000):
|
|
|
+ """
|
|
|
+ 判断表中是否存在这个分区
|
|
|
+ :param date: 日期 type-string '%Y%m%d'
|
|
|
+ :param project: type-string
|
|
|
+ :param table: 表名 type-string
|
|
|
+ :param connect_timeout: 连接超时设置
|
|
|
+ :param read_timeout: 读取超时设置
|
|
|
+ :param pool_maxsize:
|
|
|
+ :param pool_connections:
|
|
|
+ :return: records
|
|
|
+ """
|
|
|
+ odps = ODPS(
|
|
|
+ access_id=config_.ODPS_CONFIG['ACCESSID'],
|
|
|
+ secret_access_key=config_.ODPS_CONFIG['ACCESSKEY'],
|
|
|
+ project=project,
|
|
|
+ endpoint=config_.ODPS_CONFIG['ENDPOINT'],
|
|
|
+ connect_timeout=connect_timeout,
|
|
|
+ read_timeout=read_timeout,
|
|
|
+ pool_maxsize=pool_maxsize,
|
|
|
+ pool_connections=pool_connections
|
|
|
+ )
|
|
|
+ t = odps.get_table(name=table)
|
|
|
+ return t.exist_partition(partition_spec=f'dt={date}')
|
|
|
+
|
|
|
+
|
|
|
def write_to_pickle(data, filename, filepath=config_.DATA_DIR_PATH):
|
|
|
"""
|
|
|
将数据写入pickle文件中
|
|
@@ -351,6 +378,10 @@ if __name__ == '__main__':
|
|
|
# data_test = [9.20273281e+03, 7.00795065e+03, 5.54813112e+03, 9.97402494e-01, 9.96402495e-01, 9.96402494e-01]
|
|
|
# data_normalization(data_test)
|
|
|
# request_post(request_url=config_.NOTIFY_BACKEND_UPDATE_ROV_SCORE_URL, request_data={'videos': []})
|
|
|
- video_ids = [110, 112, 113, 115, 116, 117, 8289883]
|
|
|
- update_video_w_h_rate(video_ids=video_ids, key_name='')
|
|
|
-
|
|
|
+ # video_ids = [110, 112, 113, 115, 116, 117, 8289883]
|
|
|
+ # update_video_w_h_rate(video_ids=video_ids, key_name='')
|
|
|
+ project = config_.PROJECT_24H_APP_TYPE
|
|
|
+ table = config_.TABLE_24H_APP_TYPE
|
|
|
+ dt = '2022080115'
|
|
|
+ check_res = check_table_partition_exits(date=dt, project=project, table=table)
|
|
|
+ print(check_res)
|