1 提交自己打包的流job
./bin/flink run -c com.tclking.ai.PVUVDayMain -m yarn-cluster examples/batch/WordCount.jar
2 flink 脚本内容:
target="$0"
iteration=0
# 判断脚本名合法性
while [ -L "$target" ]; do
if [ "$iteration" -gt 100 ]; then
echo "Cannot resolve path: You have a cyclic symlink in $target."
break
fi
ls=`ls -ld -- "$target"`
target=`expr "$ls" : '.* -> \(.*\)$'`
iteration=$((iteration + 1))
done
# 获取当前脚本的上一层路径
bin=`dirname "$target"`
# get flink config
. "$bin"/config.sh
if [ "$FLINK_IDENT_STRING" = "" ]; then
FLINK_IDENT_STRING="$USER"
fi
CC_CLASSPATH=`constructFlinkClassPath`
log=$FLINK_LOG_DIR/flink-$FLINK_IDENT_STRING-client-$HOSTNAME.log
log_setting=(-Dlog.file="$log" -Dlog4j.configuration=file:"$FLINK_CONF_DIR"/log4j-cli.properties -Dlogback.configurationFile=file:"$FLINK_CONF_DIR"/logback.xml)
# Add HADOOP_CLASSPATH to allow the usage of Hadoop file systems
# 使用exec启用新进程,调用java方法org.apache.flink.client.cli.CliFrontend,这个类的main方法就是job提交的入口
exec $JAVA_RUN $JVM_ARGS "${log_setting[@]}" -classpath "`manglePathList "$CC_CLASSPATH:$INTERNAL_HADOOP_CLASSPATHS"`" org.apache.flink.client.cli.CliFrontend "$@"
//$@指代shell脚本后所有参数
上述命令替换变量后如下:
exec /opt/jdk1.8.0_211/bin/java
-Dlog.file=/opt/flink-1.9.0/log/flink-root-client-louisvv.log
-Dlog4j.configuration=file:/opt/flink-1.9.0/conf/log4j-cli.properties
-Dlogback.configurationFile=file:/opt/flink-1.9.0/conf/logback.xml
-classpath /opt/flink-1.9.0/lib/log4j-1.2.17.jar
:/opt/flink-1.9.0/lib/slf4j-log4j12-1.7.15.jar
:/opt/flink-1.9.0/lib/flink-dist_2.11-1.9.0.jar
org.apache.flink.client.cli.CliFrontend run -c com.tclking.ai.PVUVDayMain -m yarn-cluster examples/batch/WordCount.jar
来源:CSDN
作者:peidezhi
链接:https://blog.csdn.net/peidezhi/article/details/104058758