checkHiveDataUtil.py 2.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566
  1. # -*- coding: utf-8 -*-
  2. from odps import ODPS
  3. import argparse
  4. ODPS_CONFIG = {
  5. 'ENDPOINT': 'http://service.cn.maxcompute.aliyun.com/api',
  6. 'ACCESSID': 'LTAIWYUujJAm7CbH',
  7. 'ACCESSKEY': 'RfSjdiWwED1sGFlsjXv0DlfTnZTG1P',
  8. }
  9. def check_origin_hive(args):
  10. project = "loghubods"
  11. # table = "alg_recsys_view_sample_v2"
  12. table = args.table
  13. partition = args.partition
  14. count = check_data(project, table, partition)
  15. if count == 0:
  16. print("1")
  17. exit(1)
  18. else:
  19. print('存在 数据 size:', count)
  20. print("0")
  21. def check_data(project, table, partition) -> int:
  22. """检查数据是否准备好,输出数据条数"""
  23. odps = ODPS(
  24. access_id=ODPS_CONFIG['ACCESSID'],
  25. secret_access_key=ODPS_CONFIG['ACCESSKEY'],
  26. project=project,
  27. endpoint=ODPS_CONFIG['ENDPOINT'],
  28. connect_timeout=3000,
  29. read_timeout=500000,
  30. pool_maxsize=1000,
  31. pool_connections=1000
  32. )
  33. try:
  34. t = odps.get_table(name=table)
  35. check_res = t.exist_partition(partition_spec=f'dt={partition}')
  36. if check_res:
  37. sql = f'select * from {project}.{table} where dt = {partition}'
  38. with odps.execute_sql(sql=sql).open_reader() as reader:
  39. data_count = reader.count
  40. else:
  41. data_count = 0
  42. except Exception as e:
  43. print("error:" + str(e))
  44. data_count = 0
  45. return data_count
  46. if __name__ == '__main__':
  47. parser = argparse.ArgumentParser(description='脚本utils')
  48. # parser.add_argument('--excute_program', type=str, help='执行程序')
  49. parser.add_argument('--partition', type=str, help='表分区')
  50. # parser.add_argument('--project', type=str, help='表空间')
  51. parser.add_argument('--table', type=str, help='表名')
  52. argv = parser.parse_args()
  53. # args = parser.parse_args()
  54. # table = argv[1]
  55. # partition = argv[2]
  56. # table = 'alg_recsys_sample_all'
  57. # partition = '20240703'
  58. print(check_origin_hive(argv))