|
@@ -7,11 +7,12 @@ yesterday="$(date -d '1 days ago' +%Y%m%d)"
|
|
|
|
|
|
HADOOP="/opt/apps/HADOOP-COMMON/hadoop-common-current/bin/hadoop"
|
|
|
|
|
|
-source /root/anaconda3/bin/activate py37
|
|
|
+
|
|
|
max_hour=11
|
|
|
max_minute=00
|
|
|
|
|
|
# 0 判断上游表是否生产完成,最长等待到12点
|
|
|
+source /root/anaconda3/bin/activate py37
|
|
|
while true; do
|
|
|
python_return_code=$(python utils.py --excute_program check_item_hive --partition ${today_early_1})
|
|
|
if [ $python_return_code -eq 0 ]; then
|
|
@@ -27,6 +28,7 @@ while true; do
|
|
|
exit 1
|
|
|
fi
|
|
|
done
|
|
|
+conda deactivate
|
|
|
# 1 item 生产数据
|
|
|
/opt/apps/SPARK2/spark-2.4.8-hadoop3.2-1.0.8/bin/spark-class2 org.apache.spark.deploy.SparkSubmit \
|
|
|
--class com.aliyun.odps.spark.examples.makedata.makedata_08_item2redis \
|
|
@@ -44,6 +46,7 @@ else
|
|
|
fi
|
|
|
|
|
|
# 2 检查user上游表
|
|
|
+source /root/anaconda3/bin/activate py37
|
|
|
while true; do
|
|
|
python_return_code=$(python utils.py --excute_program check_user_hive --partition ${today_early_1})
|
|
|
if [ $python_return_code -eq 0 ]; then
|
|
@@ -59,6 +62,7 @@ while true; do
|
|
|
exit 1
|
|
|
fi
|
|
|
done
|
|
|
+conda deactivate
|
|
|
# 3 user 生产数据
|
|
|
/opt/apps/SPARK2/spark-2.4.8-hadoop3.2-1.0.8/bin/spark-class2 org.apache.spark.deploy.SparkSubmit \
|
|
|
--class com.aliyun.odps.spark.examples.makedata.makedata_09_user2redis \
|