You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
2020-01-02T10:35:27.5955956Z 2020-01-02 16:20:27 INFO: FAILURE / io.prestosql.tests.hive.TestHiveStorageFormats.testInsertIntoPartitionedTable (Groups: storage_formats) took 3.3 seconds
2020-01-02T10:35:27.6022682Z 2020-01-02 16:20:27 SEVERE: Failure cause:
2020-01-02T10:35:27.6023272Z io.prestosql.tempto.query.QueryExecutionException: java.sql.SQLException: Query failed (#20200102_103524_00824_vgxtn): File /tmp/presto-hdfs/692cfcd5-61aa-4322-ba89-04d5381b4ba8/returnflag=R/20200102_103524_00824_vgxtn_9b7b632b-9b5e-4c0c-aa08-ea9c1705efa3 could only be written to 0 of the 1 minReplication nodes. There are 1 datanode(s) running and no node(s) are excluded in this operation.
2020-01-02T10:35:27.6023580Z at org.apache.hadoop.hdfs.server.blockmanagement.BlockManager.chooseTarget4NewBlock(BlockManager.java:2121)
2020-01-02T10:35:27.6023696Z at org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.chooseTargetForNewBlock(FSDirWriteFileOp.java:286)
2020-01-02T10:35:27.6023810Z at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:2706)
2020-01-02T10:35:27.6023918Z at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:875)
2020-01-02T10:35:27.6024038Z at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:561)
2020-01-02T10:35:27.6024167Z at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
2020-01-02T10:35:27.6024266Z at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:524)
2020-01-02T10:35:27.6024360Z at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1025)
2020-01-02T10:35:27.6024448Z at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:876)
2020-01-02T10:35:27.6024535Z at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:822)
2020-01-02T10:35:27.6024622Z at java.security.AccessController.doPrivileged(Native Method)
2020-01-02T10:35:27.6024693Z at javax.security.auth.Subject.doAs(Subject.java:422)
2020-01-02T10:35:27.6024788Z at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1730)
2020-01-02T10:35:27.6024883Z at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2682)
2020-01-02T10:35:27.6024927Z
2020-01-02T10:35:27.6025015Z at io.prestosql.tempto.query.JdbcQueryExecutor.execute(JdbcQueryExecutor.java:115)
2020-01-02T10:35:27.6025111Z at io.prestosql.tempto.query.JdbcQueryExecutor.executeQuery(JdbcQueryExecutor.java:83)
2020-01-02T10:35:27.6025208Z at io.prestosql.tempto.query.QueryExecutor.query(QueryExecutor.java:57)
2020-01-02T10:35:27.6025297Z at io.prestosql.tests.hive.TestHiveStorageFormats.testInsertIntoPartitionedTable(TestHiveStorageFormats.java:171)
2020-01-02T10:35:27.6025396Z at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
2020-01-02T10:35:27.6025488Z at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
2020-01-02T10:35:27.6025587Z at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
2020-01-02T10:35:27.6025662Z at java.lang.reflect.Method.invoke(Method.java:498)
2020-01-02T10:35:27.6025760Z at org.testng.internal.MethodInvocationHelper.invokeMethod(MethodInvocationHelper.java:104)
2020-01-02T10:35:27.6025940Z at org.testng.internal.Invoker.invokeMethod(Invoker.java:645)
2020-01-02T10:35:27.6026041Z at org.testng.internal.Invoker.invokeTestMethod(Invoker.java:851)
2020-01-02T10:35:27.6026115Z at org.testng.internal.Invoker.invokeTestMethods(Invoker.java:1177)
2020-01-02T10:35:27.6026214Z at org.testng.internal.TestMethodWorker.invokeTestMethods(TestMethodWorker.java:129)
2020-01-02T10:35:27.6026307Z at org.testng.internal.TestMethodWorker.run(TestMethodWorker.java:112)
2020-01-02T10:35:27.6026400Z at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
2020-01-02T10:35:27.6026496Z at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
2020-01-02T10:35:27.6026571Z at java.lang.Thread.run(Thread.java:748)
2020-01-02T10:35:27.6027135Z Caused by: java.sql.SQLException: Query failed (#20200102_103524_00824_vgxtn): File /tmp/presto-hdfs/692cfcd5-61aa-4322-ba89-04d5381b4ba8/returnflag=R/20200102_103524_00824_vgxtn_9b7b632b-9b5e-4c0c-aa08-ea9c1705efa3 could only be written to 0 of the 1 minReplication nodes. There are 1 datanode(s) running and no node(s) are excluded in this operation.
2020-01-02T10:35:27.6027271Z at org.apache.hadoop.hdfs.server.blockmanagement.BlockManager.chooseTarget4NewBlock(BlockManager.java:2121)
2020-01-02T10:35:27.6027460Z at org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.chooseTargetForNewBlock(FSDirWriteFileOp.java:286)
2020-01-02T10:35:27.6027569Z at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:2706)
2020-01-02T10:35:27.6027673Z at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:875)
2020-01-02T10:35:27.6027788Z at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:561)
2020-01-02T10:35:27.6027892Z at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
2020-01-02T10:35:27.6028006Z at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:524)
2020-01-02T10:35:27.6028102Z at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1025)
2020-01-02T10:35:27.6028192Z at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:876)
2020-01-02T10:35:27.6028280Z at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:822)
2020-01-02T10:35:27.6028350Z at java.security.AccessController.doPrivileged(Native Method)
2020-01-02T10:35:27.6028439Z at javax.security.auth.Subject.doAs(Subject.java:422)
2020-01-02T10:35:27.6028530Z at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1730)
2020-01-02T10:35:27.6028622Z at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2682)
2020-01-02T10:35:27.6028665Z
2020-01-02T10:35:27.6028751Z at io.prestosql.jdbc.PrestoResultSet.resultsException(PrestoResultSet.java:1894)
2020-01-02T10:35:27.6028852Z at io.prestosql.jdbc.PrestoResultSet$ResultsPageIterator.computeNext(PrestoResultSet.java:1883)
2020-01-02T10:35:27.6028937Z at io.prestosql.jdbc.PrestoResultSet$ResultsPageIterator.computeNext(PrestoResultSet.java:1843)
2020-01-02T10:35:27.6029041Z at io.prestosql.jdbc.$internal.guava.collect.AbstractIterator.tryToComputeNext(AbstractIterator.java:141)
2020-01-02T10:35:27.6029143Z at io.prestosql.jdbc.$internal.guava.collect.AbstractIterator.hasNext(AbstractIterator.java:136)
2020-01-02T10:35:27.6029244Z at java.util.Spliterators$IteratorSpliterator.tryAdvance(Spliterators.java:1811)
2020-01-02T10:35:27.6029348Z at java.util.stream.StreamSpliterators$WrappingSpliterator.lambda$initPartialTraversalState$0(StreamSpliterators.java:295)
2020-01-02T10:35:27.6029440Z at java.util.stream.StreamSpliterators$AbstractWrappingSpliterator.fillBuffer(StreamSpliterators.java:207)
2020-01-02T10:35:27.6029542Z at java.util.stream.StreamSpliterators$AbstractWrappingSpliterator.doAdvance(StreamSpliterators.java:162)
2020-01-02T10:35:27.6029700Z at java.util.stream.StreamSpliterators$WrappingSpliterator.tryAdvance(StreamSpliterators.java:301)
2020-01-02T10:35:27.6029806Z at java.util.Spliterators$1Adapter.hasNext(Spliterators.java:681)
2020-01-02T10:35:27.6029885Z at io.prestosql.jdbc.PrestoResultSet$AsyncIterator.lambda$new$0(PrestoResultSet.java:1785)
2020-01-02T10:35:27.6029988Z at java.util.concurrent.CompletableFuture$AsyncSupply.run(CompletableFuture.java:1604)
2020-01-02T10:35:27.6030079Z ... 3 more
2020-01-02T10:35:27.6030584Z Caused by: io.prestosql.spi.PrestoException: File /tmp/presto-hdfs/692cfcd5-61aa-4322-ba89-04d5381b4ba8/returnflag=R/20200102_103524_00824_vgxtn_9b7b632b-9b5e-4c0c-aa08-ea9c1705efa3 could only be written to 0 of the 1 minReplication nodes. There are 1 datanode(s) running and no node(s) are excluded in this operation.
2020-01-02T10:35:27.6030714Z at org.apache.hadoop.hdfs.server.blockmanagement.BlockManager.chooseTarget4NewBlock(BlockManager.java:2121)
2020-01-02T10:35:27.6030824Z at org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.chooseTargetForNewBlock(FSDirWriteFileOp.java:286)
2020-01-02T10:35:27.6030920Z at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:2706)
2020-01-02T10:35:27.6031026Z at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:875)
2020-01-02T10:35:27.6031210Z at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:561)
2020-01-02T10:35:27.6031331Z at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
2020-01-02T10:35:27.6031442Z at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:524)
2020-01-02T10:35:27.6031537Z at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1025)
2020-01-02T10:35:27.6031609Z at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:876)
2020-01-02T10:35:27.6031698Z at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:822)
2020-01-02T10:35:27.6031788Z at java.security.AccessController.doPrivileged(Native Method)
2020-01-02T10:35:27.6031874Z at javax.security.auth.Subject.doAs(Subject.java:422)
2020-01-02T10:35:27.6031952Z at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1730)
2020-01-02T10:35:27.6032045Z at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2682)
2020-01-02T10:35:27.6032105Z
2020-01-02T10:35:27.6032172Z at io.prestosql.plugin.hive.RecordFileWriter.appendRow(RecordFileWriter.java:166)
2020-01-02T10:35:27.6032268Z at io.prestosql.plugin.hive.RecordFileWriter.appendRows(RecordFileWriter.java:146)
2020-01-02T10:35:27.6032362Z at io.prestosql.plugin.hive.HiveWriter.append(HiveWriter.java:79)
2020-01-02T10:35:27.6032452Z at io.prestosql.plugin.hive.HivePageSink.writePage(HivePageSink.java:318)
2020-01-02T10:35:27.6032530Z at io.prestosql.plugin.hive.HivePageSink.doAppend(HivePageSink.java:270)
2020-01-02T10:35:27.6032629Z at io.prestosql.plugin.hive.HivePageSink.lambda$appendPage$2(HivePageSink.java:256)
2020-01-02T10:35:27.6032733Z at io.prestosql.plugin.hive.authentication.HdfsAuthentication.lambda$doAs$0(HdfsAuthentication.java:24)
2020-01-02T10:35:27.6032845Z at io.prestosql.plugin.hive.authentication.UserGroupInformationUtils.lambda$executeActionInDoAs$0(UserGroupInformationUtils.java:29)
2020-01-02T10:35:27.6032945Z at java.security.AccessController.doPrivileged(Native Method)
2020-01-02T10:35:27.6033016Z at javax.security.auth.Subject.doAs(Subject.java:360)
2020-01-02T10:35:27.6033107Z at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1710)
2020-01-02T10:35:27.6033216Z at io.prestosql.plugin.hive.authentication.UserGroupInformationUtils.executeActionInDoAs(UserGroupInformationUtils.java:27)
2020-01-02T10:35:27.6033324Z at io.prestosql.plugin.hive.authentication.ImpersonatingHdfsAuthentication.doAs(ImpersonatingHdfsAuthentication.java:39)
2020-01-02T10:35:27.6033487Z at io.prestosql.plugin.hive.authentication.HdfsAuthentication.doAs(HdfsAuthentication.java:23)
2020-01-02T10:35:27.6033577Z at io.prestosql.plugin.hive.HdfsEnvironment.doAs(HdfsEnvironment.java:85)
2020-01-02T10:35:27.6033668Z at io.prestosql.plugin.hive.HivePageSink.appendPage(HivePageSink.java:256)
2020-01-02T10:35:27.6033777Z at io.prestosql.spi.connector.classloader.ClassLoaderSafeConnectorPageSink.appendPage(ClassLoaderSafeConnectorPageSink.java:66)
2020-01-02T10:35:27.6033882Z at io.prestosql.operator.TableWriterOperator.addInput(TableWriterOperator.java:238)
2020-01-02T10:35:27.6033958Z at io.prestosql.operator.Driver.processInternal(Driver.java:384)
2020-01-02T10:35:27.6034048Z at io.prestosql.operator.Driver.lambda$processFor$8(Driver.java:283)
2020-01-02T10:35:27.6034137Z at io.prestosql.operator.Driver.tryWithLock(Driver.java:675)
2020-01-02T10:35:27.6034224Z at io.prestosql.operator.Driver.processFor(Driver.java:276)
2020-01-02T10:35:27.6034322Z at io.prestosql.execution.SqlTaskExecution$DriverSplitRunner.processFor(SqlTaskExecution.java:1075)
2020-01-02T10:35:27.6034411Z at io.prestosql.execution.executor.PrioritizedSplitRunner.process(PrioritizedSplitRunner.java:163)
2020-01-02T10:35:27.6034510Z at io.prestosql.execution.executor.TaskExecutor$TaskRunner.run(TaskExecutor.java:484)
2020-01-02T10:35:27.6034662Z at io.prestosql.$gen.Presto_327_57_g005745d____20200102_102649_2.run(Unknown Source)
2020-01-02T10:35:27.6034752Z ... 3 more
2020-01-02T10:35:27.6035249Z Caused by: org.apache.hadoop.ipc.RemoteException: File /tmp/presto-hdfs/692cfcd5-61aa-4322-ba89-04d5381b4ba8/returnflag=R/20200102_103524_00824_vgxtn_9b7b632b-9b5e-4c0c-aa08-ea9c1705efa3 could only be written to 0 of the 1 minReplication nodes. There are 1 datanode(s) running and no node(s) are excluded in this operation.
2020-01-02T10:35:27.6035381Z at org.apache.hadoop.hdfs.server.blockmanagement.BlockManager.chooseTarget4NewBlock(BlockManager.java:2121)
2020-01-02T10:35:27.6035494Z at org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.chooseTargetForNewBlock(FSDirWriteFileOp.java:286)
2020-01-02T10:35:27.6035604Z at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:2706)
2020-01-02T10:35:27.6035709Z at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:875)
2020-01-02T10:35:27.6035824Z at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:561)
2020-01-02T10:35:27.6035944Z at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
2020-01-02T10:35:27.6036054Z at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:524)
2020-01-02T10:35:27.6036133Z at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1025)
2020-01-02T10:35:27.6036223Z at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:876)
2020-01-02T10:35:27.6036309Z at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:822)
2020-01-02T10:35:27.6036402Z at java.security.AccessController.doPrivileged(Native Method)
2020-01-02T10:35:27.6036473Z at javax.security.auth.Subject.doAs(Subject.java:422)
2020-01-02T10:35:27.6036565Z at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1730)
2020-01-02T10:35:27.6036660Z at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2682)
2020-01-02T10:35:27.6036703Z
2020-01-02T10:35:27.6036785Z at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1511)
2020-01-02T10:35:27.6036871Z at org.apache.hadoop.ipc.Client.call(Client.java:1457)
2020-01-02T10:35:27.6036957Z at org.apache.hadoop.ipc.Client.call(Client.java:1367)
2020-01-02T10:35:27.6037030Z at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:228)
2020-01-02T10:35:27.6037126Z at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:116)
2020-01-02T10:35:27.6037215Z at com.sun.proxy.$Proxy305.addBlock(Unknown Source)
2020-01-02T10:35:27.6037405Z at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.addBlock(ClientNamenodeProtocolTranslatorPB.java:513)
2020-01-02T10:35:27.6037500Z at sun.reflect.GeneratedMethodAccessor873.invoke(Unknown Source)
2020-01-02T10:35:27.6037597Z at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
2020-01-02T10:35:27.6037689Z at java.lang.reflect.Method.invoke(Method.java:498)
2020-01-02T10:35:27.6037784Z at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:422)
2020-01-02T10:35:27.6037887Z at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:165)
2020-01-02T10:35:27.6037978Z at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:157)
2020-01-02T10:35:27.6038083Z at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:95)
2020-01-02T10:35:27.6038190Z at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:359)
2020-01-02T10:35:27.6038278Z at com.sun.proxy.$Proxy306.addBlock(Unknown Source)
2020-01-02T10:35:27.6038351Z at org.apache.hadoop.hdfs.DFSOutputStream.addBlock(DFSOutputStream.java:1081)
2020-01-02T10:35:27.6038506Z at org.apache.hadoop.hdfs.DataStreamer.locateFollowingBlock(DataStreamer.java:1865)
2020-01-02T10:35:27.6038605Z at org.apache.hadoop.hdfs.DataStreamer.nextBlockOutputStream(DataStreamer.java:1668)
2020-01-02T10:35:27.6038698Z at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:716)
2020-01-02T10:35:27.6038743Z
The text was updated successfully, but these errors were encountered:
https://pipelines.actions.githubusercontent.com/sJrKZtZsjYLeyqPixYxcTJp8fX9YNlwyJbmEGSC0oymXVDSpbK/_apis/pipelines/1/runs/1439/signedlogcontent/28?urlExpires=2020-01-02T12%3A10%3A22.7301514Z&urlSigningMethod=HMACV1&urlSignature=zyXbw6mYS9JI%2BAN5aJvI6q%2FHpbeGG3uFp0lgtdmKQ84%3D
The text was updated successfully, but these errors were encountered: