Upgrade to Flink 1.11 in EMR 5.31 Command line interface

classic Classic list List threaded Threaded
4 messages Options
Reply | Threaded
Open this post in threaded view
|

Upgrade to Flink 1.11 in EMR 5.31 Command line interface

Vijayendra Yadav
Hi Team,

I have upgraded to Flink 1.11 (EMR 5.31) from Flink 1.10 (5.30.1).

I am facing following Error while running flink streaming Job from command line.
run command like:    /usr/lib/flink/bin/flink run
What dependency I might be missing or conflicting ?


04:46:51.669 [main] ERROR org.apache.flink.client.cli.CliFrontend - Fatal error while running command line interface.
java.lang.NoSuchMethodError: org.apache.hadoop.ipc.RPC.getProtocolProxy(Ljava/lang/Class;JLjava/net/InetSocketAddress;Lorg/apache/hadoop/security/UserGroupInformation;Lorg/apache/hadoop/conf/Configuration;Ljavax/net/SocketFactory;ILorg/apache/hadoop/io/retry/RetryPolicy;Ljava/util/concurrent/atomic/AtomicBoolean;Lorg/apache/hadoop/ipc/AlignmentContext;)Lorg/apache/hadoop/ipc/ProtocolProxy;
        at org.apache.hadoop.hdfs.NameNodeProxiesClient.createProxyWithAlignmentContext(NameNodeProxiesClient.java:400) ~[hadoop-hdfs-client-2.10.0-amzn-0.jar:?]
        at org.apache.hadoop.hdfs.NameNodeProxiesClient.createNonHAProxyWithClientProtocol(NameNodeProxiesClient.java:351) ~[hadoop-hdfs-client-2.10.0-amzn-0.jar:?]
        at org.apache.hadoop.hdfs.NameNodeProxiesClient.createProxyWithClientProtocol(NameNodeProxiesClient.java:143) ~[hadoop-hdfs-client-2.10.0-amzn-0.jar:?]
        at org.apache.hadoop.hdfs.DFSClient.<init>(DFSClient.java:353) ~[hadoop-hdfs-client-2.10.0-amzn-0.jar:?]
        at org.apache.hadoop.hdfs.DFSClient.<init>(DFSClient.java:287) ~[hadoop-hdfs-client-2.10.0-amzn-0.jar:?]
        at org.apache.hadoop.hdfs.DistributedFileSystem.initialize(DistributedFileSystem.java:159) ~[hadoop-hdfs-client-2.10.0-amzn-0.jar:?]
        at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:3354) ~[flink-s3-fs-hadoop-1.11.0.jar:1.11.0]
        at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:124) ~[flink-s3-fs-hadoop-1.11.0.jar:1.11.0]
        at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:3403) ~[flink-s3-fs-hadoop-1.11.0.jar:1.11.0]
        at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:3371) ~[flink-s3-fs-hadoop-1.11.0.jar:1.11.0]
        at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:477) ~[flink-s3-fs-hadoop-1.11.0.jar:1.11.0]
        at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:226) ~[flink-s3-fs-hadoop-1.11.0.jar:1.11.0]
        at org.apache.flink.yarn.YarnClusterDescriptor.startAppMaster(YarnClusterDescriptor.java:661) ~[flink-dist_2.11-1.11.0.jar:1.11.0]
        at org.apache.flink.yarn.YarnClusterDescriptor.deployInternal(YarnClusterDescriptor.java:524) ~[flink-dist_2.11-1.11.0.jar:1.11.0]
        at org.apache.flink.yarn.YarnClusterDescriptor.deployJobCluster(YarnClusterDescriptor.java:424) ~[flink-dist_2.11-1.11.0.jar:1.11.0]
        at org.apache.flink.client.deployment.executors.AbstractJobClusterExecutor.execute(AbstractJobClusterExecutor.java:70) ~[flink-dist_2.11-1.11.0.jar:1.11.0]
        at org.apache.flink.streaming.api.environment.StreamExecutionEnvironment.executeAsync(StreamExecutionEnvironment.java:1812) ~[flink-dist_2.11-1.11.0.jar:1.11.0]
        at org.apache.flink.client.program.StreamContextEnvironment.executeAsync(StreamContextEnvironment.java:128) ~[flink-dist_2.11-1.11.0.jar:1.11.0]
        at org.apache.flink.streaming.api.environment.StreamExecutionEnvironment.executeAsync(StreamExecutionEnvironment.java:1785) ~[flink-dist_2.11-1.11.0.jar:1.11.0]
        at org.apache.flink.streaming.api.scala.StreamExecutionEnvironment.executeAsync(StreamExecutionEnvironment.scala:752) ~[flink-dist_2.11-1.11.0.jar:1.11.0]
        at com.att.vdcs.StreamingJobKafkaFlink$.main(StreamingJobKafkaFlink.scala:196) ~[?:?]
        at com.att.vdcs.StreamingJobKafkaFlink.main(StreamingJobKafkaFlink.scala) ~[?:?]
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_265]
        at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_265]
        at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_265]
        at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_265]
        at org.apache.flink.client.program.PackagedProgram.callMainMethod(PackagedProgram.java:288) ~[flink-dist_2.11-1.11.0.jar:1.11.0]
        at org.apache.flink.client.program.PackagedProgram.invokeInteractiveModeForExecution(PackagedProgram.java:198) ~[flink-dist_2.11-1.11.0.jar:1.11.0]
        at org.apache.flink.client.ClientUtils.executeProgram(ClientUtils.java:149) ~[flink-dist_2.11-1.11.0.jar:1.11.0]
        at org.apache.flink.client.cli.CliFrontend.executeProgram(CliFrontend.java:699) ~[flink-dist_2.11-1.11.0.jar:1.11.0]
        at org.apache.flink.client.cli.CliFrontend.run(CliFrontend.java:232) ~[flink-dist_2.11-1.11.0.jar:1.11.0]
        at org.apache.flink.client.cli.CliFrontend.parseParameters(CliFrontend.java:916) ~[flink-dist_2.11-1.11.0.jar:1.11.0]
        at org.apache.flink.client.cli.CliFrontend.lambda$main$10(CliFrontend.java:992) ~[flink-dist_2.11-1.11.0.jar:1.11.0]
        at java.security.AccessController.doPrivileged(Native Method) ~[?:1.8.0_265]
        at javax.security.auth.Subject.doAs(Subject.java:422) ~[?:1.8.0_265]
        at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1682) ~[flink-s3-fs-hadoop-1.11.0.jar:1.11.0]
        at org.apache.flink.runtime.security.contexts.HadoopSecurityContext.runSecured(HadoopSecurityContext.java:41) ~[flink-dist_2.11-1.11.0.jar:1.11.0]
        at org.apache.flink.client.cli.CliFrontend.main(CliFrontend.java:992) [flink-dist_2.11-1.11.0.jar:1.11.0]
java.lang.NoSuchMethodError: org.apache.hadoop.ipc.RPC.getProtocolProxy(Ljava/lang/Class;JLjava/net/InetSocketAddress;Lorg/apache/hadoop/security/UserGroupInformation;Lorg/apache/hadoop/conf/Configuration;Ljavax/net/SocketFactory;ILorg/apache/hadoop/io/retry/RetryPolicy;Ljava/util/concurrent/atomic/AtomicBoolean;Lorg/apache/hadoop/ipc/AlignmentContext;)Lorg/apache/hadoop/ipc/ProtocolProxy;
        at org.apache.hadoop.hdfs.NameNodeProxiesClient.createProxyWithAlignmentContext(NameNodeProxiesClient.java:400)
        at org.apache.hadoop.hdfs.NameNodeProxiesClient.createNonHAProxyWithClientProtocol(NameNodeProxiesClient.java:351)
        at org.apache.hadoop.hdfs.NameNodeProxiesClient.createProxyWithClientProtocol(NameNodeProxiesClient.java:143)
        at org.apache.hadoop.hdfs.DFSClient.<init>(DFSClient.java:353)
        at org.apache.hadoop.hdfs.DFSClient.<init>(DFSClient.java:287)
        at org.apache.hadoop.hdfs.DistributedFileSystem.initialize(DistributedFileSystem.java:159)
        at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:3354)
        at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:124)
        at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:3403)
        at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:3371)
        at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:477)
        at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:226)
        at org.apache.flink.yarn.YarnClusterDescriptor.startAppMaster(YarnClusterDescriptor.java:661)
        at org.apache.flink.yarn.YarnClusterDescriptor.deployInternal(YarnClusterDescriptor.java:524)
        at org.apache.flink.yarn.YarnClusterDescriptor.deployJobCluster(YarnClusterDescriptor.java:424)
        at org.apache.flink.client.deployment.executors.AbstractJobClusterExecutor.execute(AbstractJobClusterExecutor.java:70)
        at org.apache.flink.streaming.api.environment.StreamExecutionEnvironment.executeAsync(StreamExecutionEnvironment.java:1812)
        at org.apache.flink.client.program.StreamContextEnvironment.executeAsync(StreamContextEnvironment.java:128)
        at org.apache.flink.streaming.api.environment.StreamExecutionEnvironment.executeAsync(StreamExecutionEnvironment.java:1785)
        at org.apache.flink.streaming.api.scala.StreamExecutionEnvironment.executeAsync(StreamExecutionEnvironment.scala:752)
        at com.att.vdcs.StreamingJobKafkaFlink$.main(StreamingJobKafkaFlink.scala:196)
        at com.att.vdcs.StreamingJobKafkaFlink.main(StreamingJobKafkaFlink.scala)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
        at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:498)
        at org.apache.flink.client.program.PackagedProgram.callMainMethod(PackagedProgram.java:288)
        at org.apache.flink.client.program.PackagedProgram.invokeInteractiveModeForExecution(PackagedProgram.java:198)
        at org.apache.flink.client.ClientUtils.executeProgram(ClientUtils.java:149)
        at org.apache.flink.client.cli.CliFrontend.executeProgram(CliFrontend.java:699)
        at org.apache.flink.client.cli.CliFrontend.run(CliFrontend.java:232)
        at org.apache.flink.client.cli.CliFrontend.parseParameters(CliFrontend.java:916)
        at org.apache.flink.client.cli.CliFrontend.lambda$main$10(CliFrontend.java:992)
        at java.security.AccessController.doPrivileged(Native Method)
        at javax.security.auth.Subject.doAs(Subject.java:422)
        at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1682)
        at org.apache.flink.runtime.security.contexts.HadoopSecurityContext.runSecured(HadoopSecurityContext.java:41)
        at org.apache.flink.client.cli.CliFrontend.main(CliFrontend.java:992)


Regards,
Vijay
Reply | Threaded
Open this post in threaded view
|

Re: Upgrade to Flink 1.11 in EMR 5.31 Command line interface

Piotr Nowojski-4
Hi,

Are you sure you are loading the filesystems correctly? Are you using the plugin mechanism? [1] Since Flink 1.10 plugins can only be loaded in this way [2], while there were some changes to plug some holes in Flink 1.11 [3].

Best,
Piotrek

[2] FileSystems should be loaded via Plugin Architecture  ( https://ci.apache.org/projects/flink/flink-docs-release-1.10/release-notes/flink-1.10.html )
[3] Refined fallback filesystems to only handle specific filesystems ( https://ci.apache.org/projects/flink/flink-docs-stable/release-notes/flink-1.11.html )

śr., 14 paź 2020 o 06:49 Vijayendra Yadav <[hidden email]> napisał(a):
Hi Team,

I have upgraded to Flink 1.11 (EMR 5.31) from Flink 1.10 (5.30.1).

I am facing following Error while running flink streaming Job from command line.
run command like:    /usr/lib/flink/bin/flink run
What dependency I might be missing or conflicting ?


04:46:51.669 [main] ERROR org.apache.flink.client.cli.CliFrontend - Fatal error while running command line interface.
java.lang.NoSuchMethodError: org.apache.hadoop.ipc.RPC.getProtocolProxy(Ljava/lang/Class;JLjava/net/InetSocketAddress;Lorg/apache/hadoop/security/UserGroupInformation;Lorg/apache/hadoop/conf/Configuration;Ljavax/net/SocketFactory;ILorg/apache/hadoop/io/retry/RetryPolicy;Ljava/util/concurrent/atomic/AtomicBoolean;Lorg/apache/hadoop/ipc/AlignmentContext;)Lorg/apache/hadoop/ipc/ProtocolProxy;
        at org.apache.hadoop.hdfs.NameNodeProxiesClient.createProxyWithAlignmentContext(NameNodeProxiesClient.java:400) ~[hadoop-hdfs-client-2.10.0-amzn-0.jar:?]
        at org.apache.hadoop.hdfs.NameNodeProxiesClient.createNonHAProxyWithClientProtocol(NameNodeProxiesClient.java:351) ~[hadoop-hdfs-client-2.10.0-amzn-0.jar:?]
        at org.apache.hadoop.hdfs.NameNodeProxiesClient.createProxyWithClientProtocol(NameNodeProxiesClient.java:143) ~[hadoop-hdfs-client-2.10.0-amzn-0.jar:?]
        at org.apache.hadoop.hdfs.DFSClient.<init>(DFSClient.java:353) ~[hadoop-hdfs-client-2.10.0-amzn-0.jar:?]
        at org.apache.hadoop.hdfs.DFSClient.<init>(DFSClient.java:287) ~[hadoop-hdfs-client-2.10.0-amzn-0.jar:?]
        at org.apache.hadoop.hdfs.DistributedFileSystem.initialize(DistributedFileSystem.java:159) ~[hadoop-hdfs-client-2.10.0-amzn-0.jar:?]
        at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:3354) ~[flink-s3-fs-hadoop-1.11.0.jar:1.11.0]
        at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:124) ~[flink-s3-fs-hadoop-1.11.0.jar:1.11.0]
        at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:3403) ~[flink-s3-fs-hadoop-1.11.0.jar:1.11.0]
        at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:3371) ~[flink-s3-fs-hadoop-1.11.0.jar:1.11.0]
        at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:477) ~[flink-s3-fs-hadoop-1.11.0.jar:1.11.0]
        at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:226) ~[flink-s3-fs-hadoop-1.11.0.jar:1.11.0]
        at org.apache.flink.yarn.YarnClusterDescriptor.startAppMaster(YarnClusterDescriptor.java:661) ~[flink-dist_2.11-1.11.0.jar:1.11.0]
        at org.apache.flink.yarn.YarnClusterDescriptor.deployInternal(YarnClusterDescriptor.java:524) ~[flink-dist_2.11-1.11.0.jar:1.11.0]
        at org.apache.flink.yarn.YarnClusterDescriptor.deployJobCluster(YarnClusterDescriptor.java:424) ~[flink-dist_2.11-1.11.0.jar:1.11.0]
        at org.apache.flink.client.deployment.executors.AbstractJobClusterExecutor.execute(AbstractJobClusterExecutor.java:70) ~[flink-dist_2.11-1.11.0.jar:1.11.0]
        at org.apache.flink.streaming.api.environment.StreamExecutionEnvironment.executeAsync(StreamExecutionEnvironment.java:1812) ~[flink-dist_2.11-1.11.0.jar:1.11.0]
        at org.apache.flink.client.program.StreamContextEnvironment.executeAsync(StreamContextEnvironment.java:128) ~[flink-dist_2.11-1.11.0.jar:1.11.0]
        at org.apache.flink.streaming.api.environment.StreamExecutionEnvironment.executeAsync(StreamExecutionEnvironment.java:1785) ~[flink-dist_2.11-1.11.0.jar:1.11.0]
        at org.apache.flink.streaming.api.scala.StreamExecutionEnvironment.executeAsync(StreamExecutionEnvironment.scala:752) ~[flink-dist_2.11-1.11.0.jar:1.11.0]
        at com.att.vdcs.StreamingJobKafkaFlink$.main(StreamingJobKafkaFlink.scala:196) ~[?:?]
        at com.att.vdcs.StreamingJobKafkaFlink.main(StreamingJobKafkaFlink.scala) ~[?:?]
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_265]
        at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_265]
        at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_265]
        at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_265]
        at org.apache.flink.client.program.PackagedProgram.callMainMethod(PackagedProgram.java:288) ~[flink-dist_2.11-1.11.0.jar:1.11.0]
        at org.apache.flink.client.program.PackagedProgram.invokeInteractiveModeForExecution(PackagedProgram.java:198) ~[flink-dist_2.11-1.11.0.jar:1.11.0]
        at org.apache.flink.client.ClientUtils.executeProgram(ClientUtils.java:149) ~[flink-dist_2.11-1.11.0.jar:1.11.0]
        at org.apache.flink.client.cli.CliFrontend.executeProgram(CliFrontend.java:699) ~[flink-dist_2.11-1.11.0.jar:1.11.0]
        at org.apache.flink.client.cli.CliFrontend.run(CliFrontend.java:232) ~[flink-dist_2.11-1.11.0.jar:1.11.0]
        at org.apache.flink.client.cli.CliFrontend.parseParameters(CliFrontend.java:916) ~[flink-dist_2.11-1.11.0.jar:1.11.0]
        at org.apache.flink.client.cli.CliFrontend.lambda$main$10(CliFrontend.java:992) ~[flink-dist_2.11-1.11.0.jar:1.11.0]
        at java.security.AccessController.doPrivileged(Native Method) ~[?:1.8.0_265]
        at javax.security.auth.Subject.doAs(Subject.java:422) ~[?:1.8.0_265]
        at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1682) ~[flink-s3-fs-hadoop-1.11.0.jar:1.11.0]
        at org.apache.flink.runtime.security.contexts.HadoopSecurityContext.runSecured(HadoopSecurityContext.java:41) ~[flink-dist_2.11-1.11.0.jar:1.11.0]
        at org.apache.flink.client.cli.CliFrontend.main(CliFrontend.java:992) [flink-dist_2.11-1.11.0.jar:1.11.0]
java.lang.NoSuchMethodError: org.apache.hadoop.ipc.RPC.getProtocolProxy(Ljava/lang/Class;JLjava/net/InetSocketAddress;Lorg/apache/hadoop/security/UserGroupInformation;Lorg/apache/hadoop/conf/Configuration;Ljavax/net/SocketFactory;ILorg/apache/hadoop/io/retry/RetryPolicy;Ljava/util/concurrent/atomic/AtomicBoolean;Lorg/apache/hadoop/ipc/AlignmentContext;)Lorg/apache/hadoop/ipc/ProtocolProxy;
        at org.apache.hadoop.hdfs.NameNodeProxiesClient.createProxyWithAlignmentContext(NameNodeProxiesClient.java:400)
        at org.apache.hadoop.hdfs.NameNodeProxiesClient.createNonHAProxyWithClientProtocol(NameNodeProxiesClient.java:351)
        at org.apache.hadoop.hdfs.NameNodeProxiesClient.createProxyWithClientProtocol(NameNodeProxiesClient.java:143)
        at org.apache.hadoop.hdfs.DFSClient.<init>(DFSClient.java:353)
        at org.apache.hadoop.hdfs.DFSClient.<init>(DFSClient.java:287)
        at org.apache.hadoop.hdfs.DistributedFileSystem.initialize(DistributedFileSystem.java:159)
        at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:3354)
        at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:124)
        at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:3403)
        at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:3371)
        at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:477)
        at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:226)
        at org.apache.flink.yarn.YarnClusterDescriptor.startAppMaster(YarnClusterDescriptor.java:661)
        at org.apache.flink.yarn.YarnClusterDescriptor.deployInternal(YarnClusterDescriptor.java:524)
        at org.apache.flink.yarn.YarnClusterDescriptor.deployJobCluster(YarnClusterDescriptor.java:424)
        at org.apache.flink.client.deployment.executors.AbstractJobClusterExecutor.execute(AbstractJobClusterExecutor.java:70)
        at org.apache.flink.streaming.api.environment.StreamExecutionEnvironment.executeAsync(StreamExecutionEnvironment.java:1812)
        at org.apache.flink.client.program.StreamContextEnvironment.executeAsync(StreamContextEnvironment.java:128)
        at org.apache.flink.streaming.api.environment.StreamExecutionEnvironment.executeAsync(StreamExecutionEnvironment.java:1785)
        at org.apache.flink.streaming.api.scala.StreamExecutionEnvironment.executeAsync(StreamExecutionEnvironment.scala:752)
        at com.att.vdcs.StreamingJobKafkaFlink$.main(StreamingJobKafkaFlink.scala:196)
        at com.att.vdcs.StreamingJobKafkaFlink.main(StreamingJobKafkaFlink.scala)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
        at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:498)
        at org.apache.flink.client.program.PackagedProgram.callMainMethod(PackagedProgram.java:288)
        at org.apache.flink.client.program.PackagedProgram.invokeInteractiveModeForExecution(PackagedProgram.java:198)
        at org.apache.flink.client.ClientUtils.executeProgram(ClientUtils.java:149)
        at org.apache.flink.client.cli.CliFrontend.executeProgram(CliFrontend.java:699)
        at org.apache.flink.client.cli.CliFrontend.run(CliFrontend.java:232)
        at org.apache.flink.client.cli.CliFrontend.parseParameters(CliFrontend.java:916)
        at org.apache.flink.client.cli.CliFrontend.lambda$main$10(CliFrontend.java:992)
        at java.security.AccessController.doPrivileged(Native Method)
        at javax.security.auth.Subject.doAs(Subject.java:422)
        at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1682)
        at org.apache.flink.runtime.security.contexts.HadoopSecurityContext.runSecured(HadoopSecurityContext.java:41)
        at org.apache.flink.client.cli.CliFrontend.main(CliFrontend.java:992)


Regards,
Vijay
Reply | Threaded
Open this post in threaded view
|

Re: Upgrade to Flink 1.11 in EMR 5.31 Command line interface

Vijayendra Yadav
Thank You Piotre.  I moved  flink-s3-fs-hadoop library to plugin. Now it's good. 


On Wed, Oct 14, 2020 at 6:23 AM Piotr Nowojski <[hidden email]> wrote:
Hi,

Are you sure you are loading the filesystems correctly? Are you using the plugin mechanism? [1] Since Flink 1.10 plugins can only be loaded in this way [2], while there were some changes to plug some holes in Flink 1.11 [3].

Best,
Piotrek

[2] FileSystems should be loaded via Plugin Architecture  ( https://ci.apache.org/projects/flink/flink-docs-release-1.10/release-notes/flink-1.10.html )
[3] Refined fallback filesystems to only handle specific filesystems ( https://ci.apache.org/projects/flink/flink-docs-stable/release-notes/flink-1.11.html )

śr., 14 paź 2020 o 06:49 Vijayendra Yadav <[hidden email]> napisał(a):
Hi Team,

I have upgraded to Flink 1.11 (EMR 5.31) from Flink 1.10 (5.30.1).

I am facing following Error while running flink streaming Job from command line.
run command like:    /usr/lib/flink/bin/flink run
What dependency I might be missing or conflicting ?


04:46:51.669 [main] ERROR org.apache.flink.client.cli.CliFrontend - Fatal error while running command line interface.
java.lang.NoSuchMethodError: org.apache.hadoop.ipc.RPC.getProtocolProxy(Ljava/lang/Class;JLjava/net/InetSocketAddress;Lorg/apache/hadoop/security/UserGroupInformation;Lorg/apache/hadoop/conf/Configuration;Ljavax/net/SocketFactory;ILorg/apache/hadoop/io/retry/RetryPolicy;Ljava/util/concurrent/atomic/AtomicBoolean;Lorg/apache/hadoop/ipc/AlignmentContext;)Lorg/apache/hadoop/ipc/ProtocolProxy;
        at org.apache.hadoop.hdfs.NameNodeProxiesClient.createProxyWithAlignmentContext(NameNodeProxiesClient.java:400) ~[hadoop-hdfs-client-2.10.0-amzn-0.jar:?]
        at org.apache.hadoop.hdfs.NameNodeProxiesClient.createNonHAProxyWithClientProtocol(NameNodeProxiesClient.java:351) ~[hadoop-hdfs-client-2.10.0-amzn-0.jar:?]
        at org.apache.hadoop.hdfs.NameNodeProxiesClient.createProxyWithClientProtocol(NameNodeProxiesClient.java:143) ~[hadoop-hdfs-client-2.10.0-amzn-0.jar:?]
        at org.apache.hadoop.hdfs.DFSClient.<init>(DFSClient.java:353) ~[hadoop-hdfs-client-2.10.0-amzn-0.jar:?]
        at org.apache.hadoop.hdfs.DFSClient.<init>(DFSClient.java:287) ~[hadoop-hdfs-client-2.10.0-amzn-0.jar:?]
        at org.apache.hadoop.hdfs.DistributedFileSystem.initialize(DistributedFileSystem.java:159) ~[hadoop-hdfs-client-2.10.0-amzn-0.jar:?]
        at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:3354) ~[flink-s3-fs-hadoop-1.11.0.jar:1.11.0]
        at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:124) ~[flink-s3-fs-hadoop-1.11.0.jar:1.11.0]
        at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:3403) ~[flink-s3-fs-hadoop-1.11.0.jar:1.11.0]
        at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:3371) ~[flink-s3-fs-hadoop-1.11.0.jar:1.11.0]
        at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:477) ~[flink-s3-fs-hadoop-1.11.0.jar:1.11.0]
        at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:226) ~[flink-s3-fs-hadoop-1.11.0.jar:1.11.0]
        at org.apache.flink.yarn.YarnClusterDescriptor.startAppMaster(YarnClusterDescriptor.java:661) ~[flink-dist_2.11-1.11.0.jar:1.11.0]
        at org.apache.flink.yarn.YarnClusterDescriptor.deployInternal(YarnClusterDescriptor.java:524) ~[flink-dist_2.11-1.11.0.jar:1.11.0]
        at org.apache.flink.yarn.YarnClusterDescriptor.deployJobCluster(YarnClusterDescriptor.java:424) ~[flink-dist_2.11-1.11.0.jar:1.11.0]
        at org.apache.flink.client.deployment.executors.AbstractJobClusterExecutor.execute(AbstractJobClusterExecutor.java:70) ~[flink-dist_2.11-1.11.0.jar:1.11.0]
        at org.apache.flink.streaming.api.environment.StreamExecutionEnvironment.executeAsync(StreamExecutionEnvironment.java:1812) ~[flink-dist_2.11-1.11.0.jar:1.11.0]
        at org.apache.flink.client.program.StreamContextEnvironment.executeAsync(StreamContextEnvironment.java:128) ~[flink-dist_2.11-1.11.0.jar:1.11.0]
        at org.apache.flink.streaming.api.environment.StreamExecutionEnvironment.executeAsync(StreamExecutionEnvironment.java:1785) ~[flink-dist_2.11-1.11.0.jar:1.11.0]
        at org.apache.flink.streaming.api.scala.StreamExecutionEnvironment.executeAsync(StreamExecutionEnvironment.scala:752) ~[flink-dist_2.11-1.11.0.jar:1.11.0]
        at com.att.vdcs.StreamingJobKafkaFlink$.main(StreamingJobKafkaFlink.scala:196) ~[?:?]
        at com.att.vdcs.StreamingJobKafkaFlink.main(StreamingJobKafkaFlink.scala) ~[?:?]
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_265]
        at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_265]
        at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_265]
        at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_265]
        at org.apache.flink.client.program.PackagedProgram.callMainMethod(PackagedProgram.java:288) ~[flink-dist_2.11-1.11.0.jar:1.11.0]
        at org.apache.flink.client.program.PackagedProgram.invokeInteractiveModeForExecution(PackagedProgram.java:198) ~[flink-dist_2.11-1.11.0.jar:1.11.0]
        at org.apache.flink.client.ClientUtils.executeProgram(ClientUtils.java:149) ~[flink-dist_2.11-1.11.0.jar:1.11.0]
        at org.apache.flink.client.cli.CliFrontend.executeProgram(CliFrontend.java:699) ~[flink-dist_2.11-1.11.0.jar:1.11.0]
        at org.apache.flink.client.cli.CliFrontend.run(CliFrontend.java:232) ~[flink-dist_2.11-1.11.0.jar:1.11.0]
        at org.apache.flink.client.cli.CliFrontend.parseParameters(CliFrontend.java:916) ~[flink-dist_2.11-1.11.0.jar:1.11.0]
        at org.apache.flink.client.cli.CliFrontend.lambda$main$10(CliFrontend.java:992) ~[flink-dist_2.11-1.11.0.jar:1.11.0]
        at java.security.AccessController.doPrivileged(Native Method) ~[?:1.8.0_265]
        at javax.security.auth.Subject.doAs(Subject.java:422) ~[?:1.8.0_265]
        at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1682) ~[flink-s3-fs-hadoop-1.11.0.jar:1.11.0]
        at org.apache.flink.runtime.security.contexts.HadoopSecurityContext.runSecured(HadoopSecurityContext.java:41) ~[flink-dist_2.11-1.11.0.jar:1.11.0]
        at org.apache.flink.client.cli.CliFrontend.main(CliFrontend.java:992) [flink-dist_2.11-1.11.0.jar:1.11.0]
java.lang.NoSuchMethodError: org.apache.hadoop.ipc.RPC.getProtocolProxy(Ljava/lang/Class;JLjava/net/InetSocketAddress;Lorg/apache/hadoop/security/UserGroupInformation;Lorg/apache/hadoop/conf/Configuration;Ljavax/net/SocketFactory;ILorg/apache/hadoop/io/retry/RetryPolicy;Ljava/util/concurrent/atomic/AtomicBoolean;Lorg/apache/hadoop/ipc/AlignmentContext;)Lorg/apache/hadoop/ipc/ProtocolProxy;
        at org.apache.hadoop.hdfs.NameNodeProxiesClient.createProxyWithAlignmentContext(NameNodeProxiesClient.java:400)
        at org.apache.hadoop.hdfs.NameNodeProxiesClient.createNonHAProxyWithClientProtocol(NameNodeProxiesClient.java:351)
        at org.apache.hadoop.hdfs.NameNodeProxiesClient.createProxyWithClientProtocol(NameNodeProxiesClient.java:143)
        at org.apache.hadoop.hdfs.DFSClient.<init>(DFSClient.java:353)
        at org.apache.hadoop.hdfs.DFSClient.<init>(DFSClient.java:287)
        at org.apache.hadoop.hdfs.DistributedFileSystem.initialize(DistributedFileSystem.java:159)
        at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:3354)
        at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:124)
        at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:3403)
        at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:3371)
        at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:477)
        at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:226)
        at org.apache.flink.yarn.YarnClusterDescriptor.startAppMaster(YarnClusterDescriptor.java:661)
        at org.apache.flink.yarn.YarnClusterDescriptor.deployInternal(YarnClusterDescriptor.java:524)
        at org.apache.flink.yarn.YarnClusterDescriptor.deployJobCluster(YarnClusterDescriptor.java:424)
        at org.apache.flink.client.deployment.executors.AbstractJobClusterExecutor.execute(AbstractJobClusterExecutor.java:70)
        at org.apache.flink.streaming.api.environment.StreamExecutionEnvironment.executeAsync(StreamExecutionEnvironment.java:1812)
        at org.apache.flink.client.program.StreamContextEnvironment.executeAsync(StreamContextEnvironment.java:128)
        at org.apache.flink.streaming.api.environment.StreamExecutionEnvironment.executeAsync(StreamExecutionEnvironment.java:1785)
        at org.apache.flink.streaming.api.scala.StreamExecutionEnvironment.executeAsync(StreamExecutionEnvironment.scala:752)
        at com.att.vdcs.StreamingJobKafkaFlink$.main(StreamingJobKafkaFlink.scala:196)
        at com.att.vdcs.StreamingJobKafkaFlink.main(StreamingJobKafkaFlink.scala)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
        at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:498)
        at org.apache.flink.client.program.PackagedProgram.callMainMethod(PackagedProgram.java:288)
        at org.apache.flink.client.program.PackagedProgram.invokeInteractiveModeForExecution(PackagedProgram.java:198)
        at org.apache.flink.client.ClientUtils.executeProgram(ClientUtils.java:149)
        at org.apache.flink.client.cli.CliFrontend.executeProgram(CliFrontend.java:699)
        at org.apache.flink.client.cli.CliFrontend.run(CliFrontend.java:232)
        at org.apache.flink.client.cli.CliFrontend.parseParameters(CliFrontend.java:916)
        at org.apache.flink.client.cli.CliFrontend.lambda$main$10(CliFrontend.java:992)
        at java.security.AccessController.doPrivileged(Native Method)
        at javax.security.auth.Subject.doAs(Subject.java:422)
        at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1682)
        at org.apache.flink.runtime.security.contexts.HadoopSecurityContext.runSecured(HadoopSecurityContext.java:41)
        at org.apache.flink.client.cli.CliFrontend.main(CliFrontend.java:992)


Regards,
Vijay
Reply | Threaded
Open this post in threaded view
|

Re: Upgrade to Flink 1.11 in EMR 5.31 Command line interface

Piotr Nowojski-4
I'm glad to hear that :)

Best regards,
Piotrek

śr., 14 paź 2020 o 18:28 Vijayendra Yadav <[hidden email]> napisał(a):
Thank You Piotre.  I moved  flink-s3-fs-hadoop library to plugin. Now it's good. 


On Wed, Oct 14, 2020 at 6:23 AM Piotr Nowojski <[hidden email]> wrote:
Hi,

Are you sure you are loading the filesystems correctly? Are you using the plugin mechanism? [1] Since Flink 1.10 plugins can only be loaded in this way [2], while there were some changes to plug some holes in Flink 1.11 [3].

Best,
Piotrek

[2] FileSystems should be loaded via Plugin Architecture  ( https://ci.apache.org/projects/flink/flink-docs-release-1.10/release-notes/flink-1.10.html )
[3] Refined fallback filesystems to only handle specific filesystems ( https://ci.apache.org/projects/flink/flink-docs-stable/release-notes/flink-1.11.html )

śr., 14 paź 2020 o 06:49 Vijayendra Yadav <[hidden email]> napisał(a):
Hi Team,

I have upgraded to Flink 1.11 (EMR 5.31) from Flink 1.10 (5.30.1).

I am facing following Error while running flink streaming Job from command line.
run command like:    /usr/lib/flink/bin/flink run
What dependency I might be missing or conflicting ?


04:46:51.669 [main] ERROR org.apache.flink.client.cli.CliFrontend - Fatal error while running command line interface.
java.lang.NoSuchMethodError: org.apache.hadoop.ipc.RPC.getProtocolProxy(Ljava/lang/Class;JLjava/net/InetSocketAddress;Lorg/apache/hadoop/security/UserGroupInformation;Lorg/apache/hadoop/conf/Configuration;Ljavax/net/SocketFactory;ILorg/apache/hadoop/io/retry/RetryPolicy;Ljava/util/concurrent/atomic/AtomicBoolean;Lorg/apache/hadoop/ipc/AlignmentContext;)Lorg/apache/hadoop/ipc/ProtocolProxy;
        at org.apache.hadoop.hdfs.NameNodeProxiesClient.createProxyWithAlignmentContext(NameNodeProxiesClient.java:400) ~[hadoop-hdfs-client-2.10.0-amzn-0.jar:?]
        at org.apache.hadoop.hdfs.NameNodeProxiesClient.createNonHAProxyWithClientProtocol(NameNodeProxiesClient.java:351) ~[hadoop-hdfs-client-2.10.0-amzn-0.jar:?]
        at org.apache.hadoop.hdfs.NameNodeProxiesClient.createProxyWithClientProtocol(NameNodeProxiesClient.java:143) ~[hadoop-hdfs-client-2.10.0-amzn-0.jar:?]
        at org.apache.hadoop.hdfs.DFSClient.<init>(DFSClient.java:353) ~[hadoop-hdfs-client-2.10.0-amzn-0.jar:?]
        at org.apache.hadoop.hdfs.DFSClient.<init>(DFSClient.java:287) ~[hadoop-hdfs-client-2.10.0-amzn-0.jar:?]
        at org.apache.hadoop.hdfs.DistributedFileSystem.initialize(DistributedFileSystem.java:159) ~[hadoop-hdfs-client-2.10.0-amzn-0.jar:?]
        at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:3354) ~[flink-s3-fs-hadoop-1.11.0.jar:1.11.0]
        at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:124) ~[flink-s3-fs-hadoop-1.11.0.jar:1.11.0]
        at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:3403) ~[flink-s3-fs-hadoop-1.11.0.jar:1.11.0]
        at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:3371) ~[flink-s3-fs-hadoop-1.11.0.jar:1.11.0]
        at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:477) ~[flink-s3-fs-hadoop-1.11.0.jar:1.11.0]
        at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:226) ~[flink-s3-fs-hadoop-1.11.0.jar:1.11.0]
        at org.apache.flink.yarn.YarnClusterDescriptor.startAppMaster(YarnClusterDescriptor.java:661) ~[flink-dist_2.11-1.11.0.jar:1.11.0]
        at org.apache.flink.yarn.YarnClusterDescriptor.deployInternal(YarnClusterDescriptor.java:524) ~[flink-dist_2.11-1.11.0.jar:1.11.0]
        at org.apache.flink.yarn.YarnClusterDescriptor.deployJobCluster(YarnClusterDescriptor.java:424) ~[flink-dist_2.11-1.11.0.jar:1.11.0]
        at org.apache.flink.client.deployment.executors.AbstractJobClusterExecutor.execute(AbstractJobClusterExecutor.java:70) ~[flink-dist_2.11-1.11.0.jar:1.11.0]
        at org.apache.flink.streaming.api.environment.StreamExecutionEnvironment.executeAsync(StreamExecutionEnvironment.java:1812) ~[flink-dist_2.11-1.11.0.jar:1.11.0]
        at org.apache.flink.client.program.StreamContextEnvironment.executeAsync(StreamContextEnvironment.java:128) ~[flink-dist_2.11-1.11.0.jar:1.11.0]
        at org.apache.flink.streaming.api.environment.StreamExecutionEnvironment.executeAsync(StreamExecutionEnvironment.java:1785) ~[flink-dist_2.11-1.11.0.jar:1.11.0]
        at org.apache.flink.streaming.api.scala.StreamExecutionEnvironment.executeAsync(StreamExecutionEnvironment.scala:752) ~[flink-dist_2.11-1.11.0.jar:1.11.0]
        at com.att.vdcs.StreamingJobKafkaFlink$.main(StreamingJobKafkaFlink.scala:196) ~[?:?]
        at com.att.vdcs.StreamingJobKafkaFlink.main(StreamingJobKafkaFlink.scala) ~[?:?]
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_265]
        at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_265]
        at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_265]
        at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_265]
        at org.apache.flink.client.program.PackagedProgram.callMainMethod(PackagedProgram.java:288) ~[flink-dist_2.11-1.11.0.jar:1.11.0]
        at org.apache.flink.client.program.PackagedProgram.invokeInteractiveModeForExecution(PackagedProgram.java:198) ~[flink-dist_2.11-1.11.0.jar:1.11.0]
        at org.apache.flink.client.ClientUtils.executeProgram(ClientUtils.java:149) ~[flink-dist_2.11-1.11.0.jar:1.11.0]
        at org.apache.flink.client.cli.CliFrontend.executeProgram(CliFrontend.java:699) ~[flink-dist_2.11-1.11.0.jar:1.11.0]
        at org.apache.flink.client.cli.CliFrontend.run(CliFrontend.java:232) ~[flink-dist_2.11-1.11.0.jar:1.11.0]
        at org.apache.flink.client.cli.CliFrontend.parseParameters(CliFrontend.java:916) ~[flink-dist_2.11-1.11.0.jar:1.11.0]
        at org.apache.flink.client.cli.CliFrontend.lambda$main$10(CliFrontend.java:992) ~[flink-dist_2.11-1.11.0.jar:1.11.0]
        at java.security.AccessController.doPrivileged(Native Method) ~[?:1.8.0_265]
        at javax.security.auth.Subject.doAs(Subject.java:422) ~[?:1.8.0_265]
        at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1682) ~[flink-s3-fs-hadoop-1.11.0.jar:1.11.0]
        at org.apache.flink.runtime.security.contexts.HadoopSecurityContext.runSecured(HadoopSecurityContext.java:41) ~[flink-dist_2.11-1.11.0.jar:1.11.0]
        at org.apache.flink.client.cli.CliFrontend.main(CliFrontend.java:992) [flink-dist_2.11-1.11.0.jar:1.11.0]
java.lang.NoSuchMethodError: org.apache.hadoop.ipc.RPC.getProtocolProxy(Ljava/lang/Class;JLjava/net/InetSocketAddress;Lorg/apache/hadoop/security/UserGroupInformation;Lorg/apache/hadoop/conf/Configuration;Ljavax/net/SocketFactory;ILorg/apache/hadoop/io/retry/RetryPolicy;Ljava/util/concurrent/atomic/AtomicBoolean;Lorg/apache/hadoop/ipc/AlignmentContext;)Lorg/apache/hadoop/ipc/ProtocolProxy;
        at org.apache.hadoop.hdfs.NameNodeProxiesClient.createProxyWithAlignmentContext(NameNodeProxiesClient.java:400)
        at org.apache.hadoop.hdfs.NameNodeProxiesClient.createNonHAProxyWithClientProtocol(NameNodeProxiesClient.java:351)
        at org.apache.hadoop.hdfs.NameNodeProxiesClient.createProxyWithClientProtocol(NameNodeProxiesClient.java:143)
        at org.apache.hadoop.hdfs.DFSClient.<init>(DFSClient.java:353)
        at org.apache.hadoop.hdfs.DFSClient.<init>(DFSClient.java:287)
        at org.apache.hadoop.hdfs.DistributedFileSystem.initialize(DistributedFileSystem.java:159)
        at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:3354)
        at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:124)
        at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:3403)
        at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:3371)
        at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:477)
        at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:226)
        at org.apache.flink.yarn.YarnClusterDescriptor.startAppMaster(YarnClusterDescriptor.java:661)
        at org.apache.flink.yarn.YarnClusterDescriptor.deployInternal(YarnClusterDescriptor.java:524)
        at org.apache.flink.yarn.YarnClusterDescriptor.deployJobCluster(YarnClusterDescriptor.java:424)
        at org.apache.flink.client.deployment.executors.AbstractJobClusterExecutor.execute(AbstractJobClusterExecutor.java:70)
        at org.apache.flink.streaming.api.environment.StreamExecutionEnvironment.executeAsync(StreamExecutionEnvironment.java:1812)
        at org.apache.flink.client.program.StreamContextEnvironment.executeAsync(StreamContextEnvironment.java:128)
        at org.apache.flink.streaming.api.environment.StreamExecutionEnvironment.executeAsync(StreamExecutionEnvironment.java:1785)
        at org.apache.flink.streaming.api.scala.StreamExecutionEnvironment.executeAsync(StreamExecutionEnvironment.scala:752)
        at com.att.vdcs.StreamingJobKafkaFlink$.main(StreamingJobKafkaFlink.scala:196)
        at com.att.vdcs.StreamingJobKafkaFlink.main(StreamingJobKafkaFlink.scala)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
        at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:498)
        at org.apache.flink.client.program.PackagedProgram.callMainMethod(PackagedProgram.java:288)
        at org.apache.flink.client.program.PackagedProgram.invokeInteractiveModeForExecution(PackagedProgram.java:198)
        at org.apache.flink.client.ClientUtils.executeProgram(ClientUtils.java:149)
        at org.apache.flink.client.cli.CliFrontend.executeProgram(CliFrontend.java:699)
        at org.apache.flink.client.cli.CliFrontend.run(CliFrontend.java:232)
        at org.apache.flink.client.cli.CliFrontend.parseParameters(CliFrontend.java:916)
        at org.apache.flink.client.cli.CliFrontend.lambda$main$10(CliFrontend.java:992)
        at java.security.AccessController.doPrivileged(Native Method)
        at javax.security.auth.Subject.doAs(Subject.java:422)
        at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1682)
        at org.apache.flink.runtime.security.contexts.HadoopSecurityContext.runSecured(HadoopSecurityContext.java:41)
        at org.apache.flink.client.cli.CliFrontend.main(CliFrontend.java:992)


Regards,
Vijay