checkHiveDataUtil.py 2.1 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667
  1. # -*- coding: utf-8 -*-
  2. from odps import ODPS
  3. import argparse
  4. ODPS_CONFIG = {
  5. 'ENDPOINT': 'http://service.cn.maxcompute.aliyun.com/api',
  6. 'ACCESSID': 'LTAIWYUujJAm7CbH',
  7. 'ACCESSKEY': 'RfSjdiWwED1sGFlsjXv0DlfTnZTG1P',
  8. }
  9. def check_origin_hive(args):
  10. project = "loghubods"
  11. # table = "alg_recsys_view_sample_v2"
  12. table = args.table
  13. partition = args.partition
  14. count = check_data(project, table, partition)
  15. if count == 0:
  16. print("1")
  17. exit(1)
  18. else:
  19. print('存在 数据 size:', count)
  20. print("0")
  21. def check_data(project, table, partition) -> int:
  22. """检查数据是否准备好,输出数据条数"""
  23. odps = ODPS(
  24. access_id=ODPS_CONFIG['ACCESSID'],
  25. secret_access_key=ODPS_CONFIG['ACCESSKEY'],
  26. project=project,
  27. endpoint=ODPS_CONFIG['ENDPOINT'],
  28. # connect_timeout=300000,
  29. # read_timeout=500000,
  30. # pool_maxsize=1000,
  31. # pool_connections=1000
  32. )
  33. try:
  34. t = odps.get_table(name=table)
  35. check_res = t.exist_partition(partition_spec=f'dt={partition}')
  36. if check_res:
  37. # sql = f'select * from {project}.{table} where dt = {partition} limit 0,100'
  38. sql = f'select * from loghubods.alg_recsys_sample_all where dt = 20240703 limit 0,100;'
  39. with odps.execute_sql(sql=sql).open_reader() as reader:
  40. data_count = reader.count
  41. else:
  42. data_count = 0
  43. except Exception as e:
  44. print("error:" + str(e))
  45. data_count = 0
  46. return data_count
  47. if __name__ == '__main__':
  48. parser = argparse.ArgumentParser(description='脚本utils')
  49. # parser.add_argument('--excute_program', type=str, help='执行程序')
  50. parser.add_argument('--partition', type=str, help='表分区')
  51. # parser.add_argument('--project', type=str, help='表空间')
  52. parser.add_argument('--table', type=str, help='表名')
  53. argv = parser.parse_args()
  54. # args = parser.parse_args()
  55. # table = argv[1]
  56. # partition = argv[2]
  57. # table = 'alg_recsys_sample_all'
  58. # partition = '20240703'
  59. print(check_origin_hive(argv))