2024-11-17 21:14:45,027 main DEBUG Apache Log4j Core 2.17.2 initializing configuration org.apache.logging.log4j.core.config.properties.PropertiesConfiguration@3932c79a 2024-11-17 21:14:45,039 main DEBUG Took 0.010139 seconds to load 1 plugins from package org.apache.hadoop.hbase.logging 2024-11-17 21:14:45,040 main DEBUG PluginManager 'Core' found 129 plugins 2024-11-17 21:14:45,040 main DEBUG PluginManager 'Level' found 0 plugins 2024-11-17 21:14:45,041 main DEBUG PluginManager 'Lookup' found 16 plugins 2024-11-17 21:14:45,042 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-11-17 21:14:45,049 main DEBUG PluginManager 'TypeConverter' found 26 plugins 2024-11-17 21:14:45,061 main DEBUG LoggerConfig$Builder(additivity="null", level="ERROR", levelAndRefs="null", name="org.apache.hadoop.metrics2.util.MBeans", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-11-17 21:14:45,063 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-11-17 21:14:45,064 main DEBUG LoggerConfig$Builder(additivity="null", level="DEBUG", levelAndRefs="null", name="org.apache.hadoop.hbase.logging.TestJul2Slf4j", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-11-17 21:14:45,064 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-11-17 21:14:45,065 main DEBUG LoggerConfig$Builder(additivity="null", level="ERROR", levelAndRefs="null", name="org.apache.zookeeper", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-11-17 21:14:45,065 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-11-17 21:14:45,067 main DEBUG LoggerConfig$Builder(additivity="null", level="WARN", levelAndRefs="null", name="org.apache.hadoop.metrics2.impl.MetricsSinkAdapter", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-11-17 21:14:45,067 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-11-17 21:14:45,068 main DEBUG LoggerConfig$Builder(additivity="null", level="ERROR", levelAndRefs="null", name="org.apache.hadoop.metrics2.impl.MetricsSystemImpl", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-11-17 21:14:45,068 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-11-17 21:14:45,069 main DEBUG LoggerConfig$Builder(additivity="false", level="WARN", levelAndRefs="null", name="org.apache.directory", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-11-17 21:14:45,070 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-11-17 21:14:45,070 main DEBUG LoggerConfig$Builder(additivity="null", level="DEBUG", levelAndRefs="null", name="org.apache.hadoop.hbase.ipc.FailedServers", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-11-17 21:14:45,071 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-11-17 21:14:45,072 main DEBUG LoggerConfig$Builder(additivity="null", level="WARN", levelAndRefs="null", name="org.apache.hadoop.metrics2.impl.MetricsConfig", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-11-17 21:14:45,072 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-11-17 21:14:45,072 main DEBUG LoggerConfig$Builder(additivity="null", level="INFO", levelAndRefs="null", name="org.apache.hadoop.hbase.ScheduledChore", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-11-17 21:14:45,073 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-11-17 21:14:45,073 main DEBUG LoggerConfig$Builder(additivity="null", level="DEBUG", levelAndRefs="null", name="org.apache.hadoop.hbase.regionserver.RSRpcServices", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-11-17 21:14:45,073 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-11-17 21:14:45,074 main DEBUG LoggerConfig$Builder(additivity="null", level="WARN", levelAndRefs="null", name="org.apache.hadoop", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-11-17 21:14:45,074 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-11-17 21:14:45,074 main DEBUG LoggerConfig$Builder(additivity="null", level="DEBUG", levelAndRefs="null", name="org.apache.hadoop.hbase", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-11-17 21:14:45,075 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-11-17 21:14:45,075 main DEBUG LoggerConfig$Builder(additivity="null", level="DEBUG", levelAndRefs="null", name="org.apache.hbase.thirdparty.io.netty.channel", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-11-17 21:14:45,075 main DEBUG Building Plugin[name=root, class=org.apache.logging.log4j.core.config.LoggerConfig$RootLogger]. 2024-11-17 21:14:45,077 main DEBUG LoggerConfig$RootLogger$Builder(additivity="null", level="null", levelAndRefs="INFO,Console", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-11-17 21:14:45,078 main DEBUG Building Plugin[name=loggers, class=org.apache.logging.log4j.core.config.LoggersPlugin]. 2024-11-17 21:14:45,080 main DEBUG createLoggers(={org.apache.hadoop.metrics2.util.MBeans, org.apache.hadoop.hbase.logging.TestJul2Slf4j, org.apache.zookeeper, org.apache.hadoop.metrics2.impl.MetricsSinkAdapter, org.apache.hadoop.metrics2.impl.MetricsSystemImpl, org.apache.directory, org.apache.hadoop.hbase.ipc.FailedServers, org.apache.hadoop.metrics2.impl.MetricsConfig, org.apache.hadoop.hbase.ScheduledChore, org.apache.hadoop.hbase.regionserver.RSRpcServices, org.apache.hadoop, org.apache.hadoop.hbase, org.apache.hbase.thirdparty.io.netty.channel, root}) 2024-11-17 21:14:45,080 main DEBUG Building Plugin[name=layout, class=org.apache.logging.log4j.core.layout.PatternLayout]. 2024-11-17 21:14:45,082 main DEBUG PatternLayout$Builder(pattern="%d{ISO8601} %-5p [%t%notEmpty{ %X}] %C{2}(%L): %m%n", PatternSelector=null, Configuration(PropertiesConfig), Replace=null, charset="null", alwaysWriteExceptions="null", disableAnsi="null", noConsoleNoAnsi="null", header="null", footer="null") 2024-11-17 21:14:45,082 main DEBUG PluginManager 'Converter' found 47 plugins 2024-11-17 21:14:45,091 main DEBUG Building Plugin[name=appender, class=org.apache.hadoop.hbase.logging.HBaseTestAppender]. 2024-11-17 21:14:45,094 main DEBUG HBaseTestAppender$Builder(target="SYSTEM_ERR", maxSize="1G", bufferedIo="null", bufferSize="null", immediateFlush="null", ignoreExceptions="null", PatternLayout(%d{ISO8601} %-5p [%t%notEmpty{ %X}] %C{2}(%L): %m%n), name="Console", Configuration(PropertiesConfig), Filter=null, ={}) 2024-11-17 21:14:45,095 main DEBUG Starting HBaseTestOutputStreamManager SYSTEM_ERR 2024-11-17 21:14:45,096 main DEBUG Building Plugin[name=appenders, class=org.apache.logging.log4j.core.config.AppendersPlugin]. 2024-11-17 21:14:45,096 main DEBUG createAppenders(={Console}) 2024-11-17 21:14:45,097 main DEBUG Configuration org.apache.logging.log4j.core.config.properties.PropertiesConfiguration@3932c79a initialized 2024-11-17 21:14:45,097 main DEBUG Starting configuration org.apache.logging.log4j.core.config.properties.PropertiesConfiguration@3932c79a 2024-11-17 21:14:45,097 main DEBUG Started configuration org.apache.logging.log4j.core.config.properties.PropertiesConfiguration@3932c79a OK. 2024-11-17 21:14:45,098 main DEBUG Shutting down OutputStreamManager SYSTEM_OUT.false.false-1 2024-11-17 21:14:45,098 main DEBUG OutputStream closed 2024-11-17 21:14:45,098 main DEBUG Shut down OutputStreamManager SYSTEM_OUT.false.false-1, all resources released: true 2024-11-17 21:14:45,099 main DEBUG Appender DefaultConsole-1 stopped with status true 2024-11-17 21:14:45,099 main DEBUG Stopped org.apache.logging.log4j.core.config.DefaultConfiguration@49049a04 OK 2024-11-17 21:14:45,179 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6 2024-11-17 21:14:45,182 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=StatusLogger 2024-11-17 21:14:45,183 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=ContextSelector 2024-11-17 21:14:45,184 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name= 2024-11-17 21:14:45,185 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.directory 2024-11-17 21:14:45,186 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.metrics2.impl.MetricsSinkAdapter 2024-11-17 21:14:45,186 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.zookeeper 2024-11-17 21:14:45,187 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.hbase.logging.TestJul2Slf4j 2024-11-17 21:14:45,187 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.metrics2.impl.MetricsSystemImpl 2024-11-17 21:14:45,188 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.metrics2.util.MBeans 2024-11-17 21:14:45,188 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.hbase 2024-11-17 21:14:45,188 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop 2024-11-17 21:14:45,189 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.hbase.ipc.FailedServers 2024-11-17 21:14:45,189 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.hbase.regionserver.RSRpcServices 2024-11-17 21:14:45,190 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.metrics2.impl.MetricsConfig 2024-11-17 21:14:45,190 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hbase.thirdparty.io.netty.channel 2024-11-17 21:14:45,190 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.hbase.ScheduledChore 2024-11-17 21:14:45,191 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Appenders,name=Console 2024-11-17 21:14:45,193 main DEBUG org.apache.logging.log4j.core.util.SystemClock supports precise timestamps. 2024-11-17 21:14:45,194 main DEBUG Reconfiguration complete for context[name=1dbd16a6] at URI jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-logging/target/hbase-logging-2.7.0-SNAPSHOT-tests.jar!/log4j2.properties (org.apache.logging.log4j.core.LoggerContext@31dadd46) with optional ClassLoader: null 2024-11-17 21:14:45,194 main DEBUG Shutdown hook enabled. Registering a new one. 2024-11-17 21:14:45,194 main DEBUG LoggerContext[name=1dbd16a6, org.apache.logging.log4j.core.LoggerContext@31dadd46] started OK. 2024-11-17T21:14:45,481 DEBUG [main {}] hbase.HBaseTestingUtility(348): Setting hbase.rootdir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400 2024-11-17 21:14:45,484 main DEBUG AsyncLogger.ThreadNameStrategy=UNCACHED (user specified null, default is UNCACHED) 2024-11-17 21:14:45,485 main DEBUG org.apache.logging.log4j.core.util.SystemClock supports precise timestamps. 2024-11-17T21:14:45,497 INFO [main {}] hbase.HBaseClassTestRule(94): Test class org.apache.hadoop.hbase.regionserver.wal.TestFSHLog timeout: 13 mins 2024-11-17T21:14:45,528 INFO [Time-limited test {}] hbase.HBaseZKTestingUtility(82): Created new mini-cluster data directory: /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/cluster_2a0274ea-93ca-a98a-2c9d-271c63f62376, deleteOnExit=true 2024-11-17T21:14:45,529 INFO [Time-limited test {}] hbase.HBaseTestingUtility(824): Setting test.cache.data to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/test.cache.data in system properties and HBase conf 2024-11-17T21:14:45,530 INFO [Time-limited test {}] hbase.HBaseTestingUtility(824): Setting hadoop.tmp.dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/hadoop.tmp.dir in system properties and HBase conf 2024-11-17T21:14:45,531 INFO [Time-limited test {}] hbase.HBaseTestingUtility(824): Setting hadoop.log.dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/hadoop.log.dir in system properties and HBase conf 2024-11-17T21:14:45,532 INFO [Time-limited test {}] hbase.HBaseTestingUtility(824): Setting mapreduce.cluster.local.dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/mapreduce.cluster.local.dir in system properties and HBase conf 2024-11-17T21:14:45,532 INFO [Time-limited test {}] hbase.HBaseTestingUtility(824): Setting mapreduce.cluster.temp.dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/mapreduce.cluster.temp.dir in system properties and HBase conf 2024-11-17T21:14:45,533 INFO [Time-limited test {}] hbase.HBaseTestingUtility(811): read short circuit is OFF 2024-11-17T21:14:45,650 WARN [Time-limited test {}] util.NativeCodeLoader(60): Unable to load native-hadoop library for your platform... using builtin-java classes where applicable 2024-11-17T21:14:45,748 DEBUG [Time-limited test {}] fs.HFileSystem(310): The file system is not a DistributedFileSystem. Skipping on block location reordering 2024-11-17T21:14:45,751 INFO [Time-limited test {}] hbase.HBaseTestingUtility(824): Setting yarn.node-labels.fs-store.root-dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/yarn.node-labels.fs-store.root-dir in system properties and HBase conf 2024-11-17T21:14:45,752 INFO [Time-limited test {}] hbase.HBaseTestingUtility(824): Setting yarn.node-attribute.fs-store.root-dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/yarn.node-attribute.fs-store.root-dir in system properties and HBase conf 2024-11-17T21:14:45,752 INFO [Time-limited test {}] hbase.HBaseTestingUtility(824): Setting yarn.nodemanager.log-dirs to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/yarn.nodemanager.log-dirs in system properties and HBase conf 2024-11-17T21:14:45,753 INFO [Time-limited test {}] hbase.HBaseTestingUtility(824): Setting yarn.nodemanager.remote-app-log-dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/yarn.nodemanager.remote-app-log-dir in system properties and HBase conf 2024-11-17T21:14:45,753 INFO [Time-limited test {}] hbase.HBaseTestingUtility(824): Setting yarn.timeline-service.entity-group-fs-store.active-dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/yarn.timeline-service.entity-group-fs-store.active-dir in system properties and HBase conf 2024-11-17T21:14:45,754 INFO [Time-limited test {}] hbase.HBaseTestingUtility(824): Setting yarn.timeline-service.entity-group-fs-store.done-dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/yarn.timeline-service.entity-group-fs-store.done-dir in system properties and HBase conf 2024-11-17T21:14:45,754 INFO [Time-limited test {}] hbase.HBaseTestingUtility(824): Setting yarn.nodemanager.remote-app-log-dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/yarn.nodemanager.remote-app-log-dir in system properties and HBase conf 2024-11-17T21:14:45,754 INFO [Time-limited test {}] hbase.HBaseTestingUtility(824): Setting dfs.journalnode.edits.dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/dfs.journalnode.edits.dir in system properties and HBase conf 2024-11-17T21:14:45,755 INFO [Time-limited test {}] hbase.HBaseTestingUtility(824): Setting dfs.datanode.shared.file.descriptor.paths to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/dfs.datanode.shared.file.descriptor.paths in system properties and HBase conf 2024-11-17T21:14:45,755 INFO [Time-limited test {}] hbase.HBaseTestingUtility(824): Setting nfs.dump.dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/nfs.dump.dir in system properties and HBase conf 2024-11-17T21:14:45,755 INFO [Time-limited test {}] hbase.HBaseTestingUtility(824): Setting java.io.tmpdir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/java.io.tmpdir in system properties and HBase conf 2024-11-17T21:14:45,756 INFO [Time-limited test {}] hbase.HBaseTestingUtility(824): Setting dfs.journalnode.edits.dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/dfs.journalnode.edits.dir in system properties and HBase conf 2024-11-17T21:14:45,756 INFO [Time-limited test {}] hbase.HBaseTestingUtility(824): Setting dfs.provided.aliasmap.inmemory.leveldb.dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/dfs.provided.aliasmap.inmemory.leveldb.dir in system properties and HBase conf 2024-11-17T21:14:45,757 INFO [Time-limited test {}] hbase.HBaseTestingUtility(824): Setting fs.s3a.committer.staging.tmp.path to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/fs.s3a.committer.staging.tmp.path in system properties and HBase conf 2024-11-17T21:14:46,218 WARN [Time-limited test {}] blockmanagement.DatanodeManager(468): The given interval for marking stale datanode = 30000, which is larger than heartbeat expire interval 20000. 2024-11-17T21:14:46,542 WARN [Time-limited test {}] impl.MetricsConfig(138): Cannot locate configuration: tried hadoop-metrics2-namenode.properties,hadoop-metrics2.properties 2024-11-17T21:14:46,612 INFO [Time-limited test {}] log.Log(170): Logging initialized @2301ms to org.eclipse.jetty.util.log.Slf4jLog 2024-11-17T21:14:46,683 WARN [Time-limited test {}] server.AuthenticationFilter(240): Unable to initialize FileSignerSecretProvider, falling back to use random secrets. Reason: Could not read signature secret file: /home/jenkins/hadoop-http-auth-signature-secret 2024-11-17T21:14:46,743 INFO [Time-limited test {}] server.Server(375): jetty-9.4.53.v20231009; built: 2023-10-09T12:29:09.265Z; git: 27bde00a0b95a1d5bbee0eae7984f891d2d0f8c9; jvm 17.0.11+9 2024-11-17T21:14:46,763 INFO [Time-limited test {}] session.DefaultSessionIdManager(334): DefaultSessionIdManager workerName=node0 2024-11-17T21:14:46,763 INFO [Time-limited test {}] session.DefaultSessionIdManager(339): No SessionScavenger set, using defaults 2024-11-17T21:14:46,764 INFO [Time-limited test {}] session.HouseKeeper(132): node0 Scavenging every 660000ms 2024-11-17T21:14:46,776 WARN [Time-limited test {}] server.AuthenticationFilter(240): Unable to initialize FileSignerSecretProvider, falling back to use random secrets. Reason: Could not read signature secret file: /home/jenkins/hadoop-http-auth-signature-secret 2024-11-17T21:14:46,779 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.s.ServletContextHandler@3837011d{logs,/logs,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/hadoop.log.dir/,AVAILABLE} 2024-11-17T21:14:46,780 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.s.ServletContextHandler@6b6353bb{static,/static,jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/static,AVAILABLE} 2024-11-17T21:14:46,973 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.w.WebAppContext@76669e88{hdfs,/,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/java.io.tmpdir/jetty-localhost-36543-hadoop-hdfs-3_4_1-tests_jar-_-any-11474296372031686664/webapp/,AVAILABLE}{jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/hdfs} 2024-11-17T21:14:46,981 INFO [Time-limited test {}] server.AbstractConnector(333): Started ServerConnector@75c47eed{HTTP/1.1, (http/1.1)}{localhost:36543} 2024-11-17T21:14:46,981 INFO [Time-limited test {}] server.Server(415): Started @2671ms 2024-11-17T21:14:47,006 WARN [Time-limited test {}] blockmanagement.DatanodeManager(468): The given interval for marking stale datanode = 30000, which is larger than heartbeat expire interval 20000. 2024-11-17T21:14:47,361 WARN [Time-limited test {}] server.AuthenticationFilter(240): Unable to initialize FileSignerSecretProvider, falling back to use random secrets. Reason: Could not read signature secret file: /home/jenkins/hadoop-http-auth-signature-secret 2024-11-17T21:14:47,369 INFO [Time-limited test {}] server.Server(375): jetty-9.4.53.v20231009; built: 2023-10-09T12:29:09.265Z; git: 27bde00a0b95a1d5bbee0eae7984f891d2d0f8c9; jvm 17.0.11+9 2024-11-17T21:14:47,370 INFO [Time-limited test {}] session.DefaultSessionIdManager(334): DefaultSessionIdManager workerName=node0 2024-11-17T21:14:47,371 INFO [Time-limited test {}] session.DefaultSessionIdManager(339): No SessionScavenger set, using defaults 2024-11-17T21:14:47,371 INFO [Time-limited test {}] session.HouseKeeper(132): node0 Scavenging every 600000ms 2024-11-17T21:14:47,372 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.s.ServletContextHandler@7db971bf{logs,/logs,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/hadoop.log.dir/,AVAILABLE} 2024-11-17T21:14:47,372 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.s.ServletContextHandler@7ac6a421{static,/static,jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/static,AVAILABLE} 2024-11-17T21:14:47,491 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.w.WebAppContext@18ba4f79{datanode,/,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/java.io.tmpdir/jetty-localhost-39095-hadoop-hdfs-3_4_1-tests_jar-_-any-18033711411755917688/webapp/,AVAILABLE}{jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/datanode} 2024-11-17T21:14:47,492 INFO [Time-limited test {}] server.AbstractConnector(333): Started ServerConnector@56ade895{HTTP/1.1, (http/1.1)}{localhost:39095} 2024-11-17T21:14:47,492 INFO [Time-limited test {}] server.Server(415): Started @3182ms 2024-11-17T21:14:47,547 WARN [Time-limited test {}] web.RestCsrfPreventionFilterHandler(75): Got null for restCsrfPreventionFilter - will not do any filtering. 2024-11-17T21:14:47,664 WARN [Time-limited test {}] server.AuthenticationFilter(240): Unable to initialize FileSignerSecretProvider, falling back to use random secrets. Reason: Could not read signature secret file: /home/jenkins/hadoop-http-auth-signature-secret 2024-11-17T21:14:47,673 INFO [Time-limited test {}] server.Server(375): jetty-9.4.53.v20231009; built: 2023-10-09T12:29:09.265Z; git: 27bde00a0b95a1d5bbee0eae7984f891d2d0f8c9; jvm 17.0.11+9 2024-11-17T21:14:47,675 INFO [Time-limited test {}] session.DefaultSessionIdManager(334): DefaultSessionIdManager workerName=node0 2024-11-17T21:14:47,676 INFO [Time-limited test {}] session.DefaultSessionIdManager(339): No SessionScavenger set, using defaults 2024-11-17T21:14:47,676 INFO [Time-limited test {}] session.HouseKeeper(132): node0 Scavenging every 660000ms 2024-11-17T21:14:47,678 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.s.ServletContextHandler@170edd35{logs,/logs,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/hadoop.log.dir/,AVAILABLE} 2024-11-17T21:14:47,679 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.s.ServletContextHandler@19945434{static,/static,jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/static,AVAILABLE} 2024-11-17T21:14:47,835 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.w.WebAppContext@3c806758{datanode,/,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/java.io.tmpdir/jetty-localhost-37063-hadoop-hdfs-3_4_1-tests_jar-_-any-10308680010298665372/webapp/,AVAILABLE}{jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/datanode} 2024-11-17T21:14:47,836 INFO [Time-limited test {}] server.AbstractConnector(333): Started ServerConnector@23cb9268{HTTP/1.1, (http/1.1)}{localhost:37063} 2024-11-17T21:14:47,836 INFO [Time-limited test {}] server.Server(415): Started @3526ms 2024-11-17T21:14:47,839 WARN [Time-limited test {}] web.RestCsrfPreventionFilterHandler(75): Got null for restCsrfPreventionFilter - will not do any filtering. 2024-11-17T21:14:47,879 WARN [Time-limited test {}] server.AuthenticationFilter(240): Unable to initialize FileSignerSecretProvider, falling back to use random secrets. Reason: Could not read signature secret file: /home/jenkins/hadoop-http-auth-signature-secret 2024-11-17T21:14:47,884 INFO [Time-limited test {}] server.Server(375): jetty-9.4.53.v20231009; built: 2023-10-09T12:29:09.265Z; git: 27bde00a0b95a1d5bbee0eae7984f891d2d0f8c9; jvm 17.0.11+9 2024-11-17T21:14:47,888 INFO [Time-limited test {}] session.DefaultSessionIdManager(334): DefaultSessionIdManager workerName=node0 2024-11-17T21:14:47,889 INFO [Time-limited test {}] session.DefaultSessionIdManager(339): No SessionScavenger set, using defaults 2024-11-17T21:14:47,889 INFO [Time-limited test {}] session.HouseKeeper(132): node0 Scavenging every 600000ms 2024-11-17T21:14:47,892 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.s.ServletContextHandler@7a3b1c28{logs,/logs,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/hadoop.log.dir/,AVAILABLE} 2024-11-17T21:14:47,893 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.s.ServletContextHandler@4eb66c55{static,/static,jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/static,AVAILABLE} 2024-11-17T21:14:47,974 WARN [Thread-107 {}] impl.BlockPoolSlice(347): dfsUsed file missing in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/cluster_2a0274ea-93ca-a98a-2c9d-271c63f62376/dfs/data/data1/current/BP-754513072-172.17.0.2-1731878086310/current, will proceed with Du for space computation calculation, 2024-11-17T21:14:47,974 WARN [Thread-105 {}] impl.BlockPoolSlice(347): dfsUsed file missing in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/cluster_2a0274ea-93ca-a98a-2c9d-271c63f62376/dfs/data/data3/current/BP-754513072-172.17.0.2-1731878086310/current, will proceed with Du for space computation calculation, 2024-11-17T21:14:47,974 WARN [Thread-108 {}] impl.BlockPoolSlice(347): dfsUsed file missing in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/cluster_2a0274ea-93ca-a98a-2c9d-271c63f62376/dfs/data/data2/current/BP-754513072-172.17.0.2-1731878086310/current, will proceed with Du for space computation calculation, 2024-11-17T21:14:47,974 WARN [Thread-106 {}] impl.BlockPoolSlice(347): dfsUsed file missing in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/cluster_2a0274ea-93ca-a98a-2c9d-271c63f62376/dfs/data/data4/current/BP-754513072-172.17.0.2-1731878086310/current, will proceed with Du for space computation calculation, 2024-11-17T21:14:48,013 WARN [Thread-82 {}] datanode.DirectoryScanner(302): dfs.datanode.directoryscan.throttle.limit.ms.per.sec set to value above 1000 ms/sec. Assuming default value of -1 2024-11-17T21:14:48,013 WARN [Thread-58 {}] datanode.DirectoryScanner(302): dfs.datanode.directoryscan.throttle.limit.ms.per.sec set to value above 1000 ms/sec. Assuming default value of -1 2024-11-17T21:14:48,051 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.w.WebAppContext@180bb084{datanode,/,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/java.io.tmpdir/jetty-localhost-33579-hadoop-hdfs-3_4_1-tests_jar-_-any-10355568820931770396/webapp/,AVAILABLE}{jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/datanode} 2024-11-17T21:14:48,052 INFO [Time-limited test {}] server.AbstractConnector(333): Started ServerConnector@4a99ee51{HTTP/1.1, (http/1.1)}{localhost:33579} 2024-11-17T21:14:48,052 INFO [Time-limited test {}] server.Server(415): Started @3743ms 2024-11-17T21:14:48,055 WARN [Time-limited test {}] web.RestCsrfPreventionFilterHandler(75): Got null for restCsrfPreventionFilter - will not do any filtering. 2024-11-17T21:14:48,092 INFO [Block report processor {}] blockmanagement.BlockManager(2940): BLOCK* processReport 0xf964dd68316e6537 with lease ID 0xb2adfd7fd5a190d5: Processing first storage report for DS-dd8c75c6-292f-4f1f-bb94-742d1fc7d2f4 from datanode DatanodeRegistration(127.0.0.1:41963, datanodeUuid=1c3645fd-da23-44e6-a683-d522c8dc4494, infoPort=45905, infoSecurePort=0, ipcPort=40145, storageInfo=lv=-57;cid=testClusterID;nsid=686505069;c=1731878086310) 2024-11-17T21:14:48,093 INFO [Block report processor {}] blockmanagement.BlockManager(2972): BLOCK* processReport 0xf964dd68316e6537 with lease ID 0xb2adfd7fd5a190d5: from storage DS-dd8c75c6-292f-4f1f-bb94-742d1fc7d2f4 node DatanodeRegistration(127.0.0.1:41963, datanodeUuid=1c3645fd-da23-44e6-a683-d522c8dc4494, infoPort=45905, infoSecurePort=0, ipcPort=40145, storageInfo=lv=-57;cid=testClusterID;nsid=686505069;c=1731878086310), blocks: 0, hasStaleStorage: true, processing time: 1 msecs, invalidatedBlocks: 0 2024-11-17T21:14:48,094 INFO [Block report processor {}] blockmanagement.BlockManager(2940): BLOCK* processReport 0x779ae7139fe2d1a with lease ID 0xb2adfd7fd5a190d6: Processing first storage report for DS-0dc17499-73e1-4891-a8aa-a698546e6351 from datanode DatanodeRegistration(127.0.0.1:43941, datanodeUuid=c97e213b-0dda-4f70-8927-a88ec6af4e08, infoPort=38217, infoSecurePort=0, ipcPort=37733, storageInfo=lv=-57;cid=testClusterID;nsid=686505069;c=1731878086310) 2024-11-17T21:14:48,094 INFO [Block report processor {}] blockmanagement.BlockManager(2972): BLOCK* processReport 0x779ae7139fe2d1a with lease ID 0xb2adfd7fd5a190d6: from storage DS-0dc17499-73e1-4891-a8aa-a698546e6351 node DatanodeRegistration(127.0.0.1:43941, datanodeUuid=c97e213b-0dda-4f70-8927-a88ec6af4e08, infoPort=38217, infoSecurePort=0, ipcPort=37733, storageInfo=lv=-57;cid=testClusterID;nsid=686505069;c=1731878086310), blocks: 0, hasStaleStorage: true, processing time: 1 msecs, invalidatedBlocks: 0 2024-11-17T21:14:48,094 INFO [Block report processor {}] blockmanagement.BlockManager(2940): BLOCK* processReport 0xf964dd68316e6537 with lease ID 0xb2adfd7fd5a190d5: Processing first storage report for DS-fd18be2b-9d86-4163-a74e-4fa11ebf796e from datanode DatanodeRegistration(127.0.0.1:41963, datanodeUuid=1c3645fd-da23-44e6-a683-d522c8dc4494, infoPort=45905, infoSecurePort=0, ipcPort=40145, storageInfo=lv=-57;cid=testClusterID;nsid=686505069;c=1731878086310) 2024-11-17T21:14:48,094 INFO [Block report processor {}] blockmanagement.BlockManager(2972): BLOCK* processReport 0xf964dd68316e6537 with lease ID 0xb2adfd7fd5a190d5: from storage DS-fd18be2b-9d86-4163-a74e-4fa11ebf796e node DatanodeRegistration(127.0.0.1:41963, datanodeUuid=1c3645fd-da23-44e6-a683-d522c8dc4494, infoPort=45905, infoSecurePort=0, ipcPort=40145, storageInfo=lv=-57;cid=testClusterID;nsid=686505069;c=1731878086310), blocks: 0, hasStaleStorage: false, processing time: 0 msecs, invalidatedBlocks: 0 2024-11-17T21:14:48,094 INFO [Block report processor {}] blockmanagement.BlockManager(2940): BLOCK* processReport 0x779ae7139fe2d1a with lease ID 0xb2adfd7fd5a190d6: Processing first storage report for DS-6b27cd77-2561-4435-9fd6-5996636555e0 from datanode DatanodeRegistration(127.0.0.1:43941, datanodeUuid=c97e213b-0dda-4f70-8927-a88ec6af4e08, infoPort=38217, infoSecurePort=0, ipcPort=37733, storageInfo=lv=-57;cid=testClusterID;nsid=686505069;c=1731878086310) 2024-11-17T21:14:48,095 INFO [Block report processor {}] blockmanagement.BlockManager(2972): BLOCK* processReport 0x779ae7139fe2d1a with lease ID 0xb2adfd7fd5a190d6: from storage DS-6b27cd77-2561-4435-9fd6-5996636555e0 node DatanodeRegistration(127.0.0.1:43941, datanodeUuid=c97e213b-0dda-4f70-8927-a88ec6af4e08, infoPort=38217, infoSecurePort=0, ipcPort=37733, storageInfo=lv=-57;cid=testClusterID;nsid=686505069;c=1731878086310), blocks: 0, hasStaleStorage: false, processing time: 0 msecs, invalidatedBlocks: 0 2024-11-17T21:14:48,159 WARN [Thread-139 {}] impl.BlockPoolSlice(347): dfsUsed file missing in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/cluster_2a0274ea-93ca-a98a-2c9d-271c63f62376/dfs/data/data5/current/BP-754513072-172.17.0.2-1731878086310/current, will proceed with Du for space computation calculation, 2024-11-17T21:14:48,159 WARN [Thread-140 {}] impl.BlockPoolSlice(347): dfsUsed file missing in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/cluster_2a0274ea-93ca-a98a-2c9d-271c63f62376/dfs/data/data6/current/BP-754513072-172.17.0.2-1731878086310/current, will proceed with Du for space computation calculation, 2024-11-17T21:14:48,187 WARN [Thread-129 {}] datanode.DirectoryScanner(302): dfs.datanode.directoryscan.throttle.limit.ms.per.sec set to value above 1000 ms/sec. Assuming default value of -1 2024-11-17T21:14:48,193 INFO [Block report processor {}] blockmanagement.BlockManager(2940): BLOCK* processReport 0xac8deb3e11e39fe6 with lease ID 0xb2adfd7fd5a190d7: Processing first storage report for DS-2aedbc45-fd4e-4d62-83f9-420e54976b0e from datanode DatanodeRegistration(127.0.0.1:40579, datanodeUuid=a945aceb-762b-4dfe-b44c-f120ab5e97a9, infoPort=33721, infoSecurePort=0, ipcPort=43557, storageInfo=lv=-57;cid=testClusterID;nsid=686505069;c=1731878086310) 2024-11-17T21:14:48,193 INFO [Block report processor {}] blockmanagement.BlockManager(2972): BLOCK* processReport 0xac8deb3e11e39fe6 with lease ID 0xb2adfd7fd5a190d7: from storage DS-2aedbc45-fd4e-4d62-83f9-420e54976b0e node DatanodeRegistration(127.0.0.1:40579, datanodeUuid=a945aceb-762b-4dfe-b44c-f120ab5e97a9, infoPort=33721, infoSecurePort=0, ipcPort=43557, storageInfo=lv=-57;cid=testClusterID;nsid=686505069;c=1731878086310), blocks: 0, hasStaleStorage: true, processing time: 1 msecs, invalidatedBlocks: 0 2024-11-17T21:14:48,193 INFO [Block report processor {}] blockmanagement.BlockManager(2940): BLOCK* processReport 0xac8deb3e11e39fe6 with lease ID 0xb2adfd7fd5a190d7: Processing first storage report for DS-c2691695-3670-49ec-b0da-60e0db625fce from datanode DatanodeRegistration(127.0.0.1:40579, datanodeUuid=a945aceb-762b-4dfe-b44c-f120ab5e97a9, infoPort=33721, infoSecurePort=0, ipcPort=43557, storageInfo=lv=-57;cid=testClusterID;nsid=686505069;c=1731878086310) 2024-11-17T21:14:48,193 INFO [Block report processor {}] blockmanagement.BlockManager(2972): BLOCK* processReport 0xac8deb3e11e39fe6 with lease ID 0xb2adfd7fd5a190d7: from storage DS-c2691695-3670-49ec-b0da-60e0db625fce node DatanodeRegistration(127.0.0.1:40579, datanodeUuid=a945aceb-762b-4dfe-b44c-f120ab5e97a9, infoPort=33721, infoSecurePort=0, ipcPort=43557, storageInfo=lv=-57;cid=testClusterID;nsid=686505069;c=1731878086310), blocks: 0, hasStaleStorage: false, processing time: 0 msecs, invalidatedBlocks: 0 2024-11-17T21:14:48,393 DEBUG [Time-limited test {}] hbase.HBaseTestingUtility(703): Setting hbase.rootdir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400 2024-11-17T21:14:48,404 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testWALClosureFailureAndCleanup Thread=156, OpenFileDescriptor=390, MaxFileDescriptor=1048576, SystemLoadAverage=129, ProcessCount=11, AvailableMemoryMB=11859 2024-11-17T21:14:48,422 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-17T21:14:48,425 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-17T21:14:48,666 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:41963 is added to blk_1073741825_1001 (size=7) 2024-11-17T21:14:48,668 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40579 is added to blk_1073741825_1001 (size=7) 2024-11-17T21:14:48,668 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43941 is added to blk_1073741825_1001 (size=7) 2024-11-17T21:14:49,073 INFO [Time-limited test {}] util.FSUtils(490): Created version file at hdfs://localhost:37527/user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb with version=8 2024-11-17T21:14:49,074 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-17T21:14:49,078 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-17T21:14:49,089 DEBUG [Time-limited test {}] util.ClassSize(228): Using Unsafe to estimate memory layout 2024-11-17T21:14:49,113 INFO [Time-limited test {}] metrics.MetricRegistriesLoader(60): Loaded MetricRegistries class org.apache.hadoop.hbase.metrics.impl.MetricRegistriesImpl 2024-11-17T21:14:49,115 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-11-17T21:14:49,118 INFO [Time-limited test {}] wal.AbstractFSWAL(500): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:37527/user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb/testWALClosureFailureAndCleanup, archiveDir=hdfs://localhost:37527/user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb/oldWALs, maxLogs=1760 2024-11-17T21:14:49,162 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1731878089157 2024-11-17T21:14:49,163 DEBUG [Time-limited test {}] util.CommonFSUtils$DfsBuilderUtility(752): Using builder API via reflection for DFS file creation replicate flag. 2024-11-17T21:14:49,164 DEBUG [Time-limited test {}] util.CommonFSUtils$DfsBuilderUtility(762): Using builder API via reflection for DFS file creation noLocalWrite flag. 2024-11-17T21:14:49,218 INFO [Time-limited test {}] wal.AbstractFSWAL(841): New WAL /user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb/testWALClosureFailureAndCleanup/wal.1731878089157 2024-11-17T21:14:49,264 DEBUG [Time-limited test {}] wal.AbstractFSWAL(925): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:38217:38217),(127.0.0.1/127.0.0.1:45905:45905),(127.0.0.1/127.0.0.1:33721:33721)] 2024-11-17T21:14:49,278 INFO [Time-limited test {}] regionserver.ChunkCreator(472): data poolSizePercentage is less than 0. So not using pool 2024-11-17T21:14:49,278 INFO [Time-limited test {}] regionserver.ChunkCreator(472): index poolSizePercentage is less than 0. So not using pool 2024-11-17T21:14:49,297 INFO [Time-limited test {}] regionserver.HRegion(7106): creating {ENCODED => 90156c8d93a287cb83143aa6c7abd1fc, NAME => 'testWALClosureFailureAndCleanup,,1731878089277.90156c8d93a287cb83143aa6c7abd1fc.', STARTKEY => '', ENDKEY => ''}, tableDescriptor='testWALClosureFailureAndCleanup', {NAME => 'b', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'}, regionDir=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400 2024-11-17T21:14:49,312 WARN [IPC Server handler 0 on default port 37527 {}] blockmanagement.BlockPlacementPolicyDefault(501): Failed to place enough replicas, still in need of 1 to reach 3 (unavailableStorages=[], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}, newBlock=true) For more information, please enable DEBUG log level on org.apache.hadoop.hdfs.server.blockmanagement.BlockPlacementPolicy and org.apache.hadoop.net.NetworkTopology 2024-11-17T21:14:49,312 WARN [IPC Server handler 0 on default port 37527 {}] protocol.BlockStoragePolicy(161): Failed to place enough replicas: expected size is 1 but only 0 storage types can be selected (replication=3, selected=[], unavailable=[DISK], removed=[DISK], policy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}) 2024-11-17T21:14:49,312 WARN [IPC Server handler 0 on default port 37527 {}] blockmanagement.BlockPlacementPolicyDefault(501): Failed to place enough replicas, still in need of 1 to reach 3 (unavailableStorages=[DISK], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}, newBlock=true) All required storage types are unavailable: unavailableStorages=[DISK], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]} 2024-11-17T21:14:49,320 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:41963 is added to blk_1073741827_1003 (size=66) 2024-11-17T21:14:49,321 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43941 is added to blk_1073741827_1003 (size=66) 2024-11-17T21:14:49,325 INFO [Time-limited test {}] throttle.StoreHotnessProtector(112): StoreHotnessProtector is disabled. Set hbase.region.store.parallel.put.limit > 0 to enable, which may help mitigate load under heavy write pressure. 2024-11-17T21:14:49,326 DEBUG [Time-limited test {}] regionserver.HRegion(894): Instantiated testWALClosureFailureAndCleanup,,1731878089277.90156c8d93a287cb83143aa6c7abd1fc.; StoreHotnessProtector, parallelPutToStoreThreadLimit=0 ; minColumnNum=100 ; preparePutThreadLimit=0 ; hotProtect now disable 2024-11-17T21:14:49,377 INFO [StoreOpener-90156c8d93a287cb83143aa6c7abd1fc-1 {}] regionserver.HStore(400): Created cacheConfig: cacheDataOnRead=true, cacheDataOnWrite=false, cacheIndexesOnWrite=false, cacheBloomsOnWrite=false, cacheEvictOnClose=false, cacheDataCompressed=false, prefetchOnOpen=false, for column family b of region 90156c8d93a287cb83143aa6c7abd1fc 2024-11-17T21:14:49,404 INFO [StoreOpener-90156c8d93a287cb83143aa6c7abd1fc-1 {}] compactions.CompactionConfiguration(181): size [minCompactSize:128 MB, maxCompactSize:8.00 EB, offPeakMaxCompactSize:8.00 EB); files [minFilesToCompact:3, maxFilesToCompact:10); ratio 1.200000; off-peak ratio 5.000000; throttle point 2684354560; major period 604800000, major jitter 0.500000, min locality to compact 0.000000; tiered compaction: max_age 9223372036854775807, incoming window min 6, compaction policy for tiered window org.apache.hadoop.hbase.regionserver.compactions.ExploringCompactionPolicy, single output for minor true, compaction window factory org.apache.hadoop.hbase.regionserver.compactions.ExponentialCompactionWindowFactory, region 90156c8d93a287cb83143aa6c7abd1fc columnFamilyName b 2024-11-17T21:14:49,410 DEBUG [StoreOpener-90156c8d93a287cb83143aa6c7abd1fc-1 {}] storefiletracker.StoreFileTrackerFactory(122): instantiating StoreFileTracker impl org.apache.hadoop.hbase.regionserver.storefiletracker.DefaultStoreFileTracker 2024-11-17T21:14:49,417 INFO [StoreOpener-90156c8d93a287cb83143aa6c7abd1fc-1 {}] regionserver.HStore(327): Store=90156c8d93a287cb83143aa6c7abd1fc/b, memstore type=DefaultMemStore, storagePolicy=NONE, verifyBulkLoads=false, parallelPutCountPrintThreshold=50, encoding=NONE, compression=NONE 2024-11-17T21:14:49,424 DEBUG [Time-limited test {}] regionserver.HRegion(5301): Found 0 recovered edits file(s) under hdfs://localhost:37527/user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb/data/default/testWALClosureFailureAndCleanup/90156c8d93a287cb83143aa6c7abd1fc 2024-11-17T21:14:49,426 DEBUG [Time-limited test {}] regionserver.HRegion(5301): Found 0 recovered edits file(s) under hdfs://localhost:37527/user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb/data/default/testWALClosureFailureAndCleanup/90156c8d93a287cb83143aa6c7abd1fc 2024-11-17T21:14:49,427 DEBUG [Time-limited test {}] regionserver.HRegion(5301): Found 0 recovered edits file(s) under hdfs://localhost:37527/user/jenkins/test-data/acfd8122-d9ae-7f15-ac5f-0e0d10b0e1e3/data/default/testWALClosureFailureAndCleanup/90156c8d93a287cb83143aa6c7abd1fc 2024-11-17T21:14:49,446 DEBUG [Time-limited test {}] regionserver.HRegion(1085): writing seq id for 90156c8d93a287cb83143aa6c7abd1fc 2024-11-17T21:14:49,452 DEBUG [Time-limited test {}] wal.WALSplitUtil(409): Wrote file=hdfs://localhost:37527/user/jenkins/test-data/acfd8122-d9ae-7f15-ac5f-0e0d10b0e1e3/data/default/testWALClosureFailureAndCleanup/90156c8d93a287cb83143aa6c7abd1fc/recovered.edits/1.seqid, newMaxSeqId=1, maxSeqId=-1 2024-11-17T21:14:49,453 INFO [Time-limited test {}] regionserver.HRegion(1102): Opened 90156c8d93a287cb83143aa6c7abd1fc; next sequenceid=2; SteppingSplitPolicysuper{IncreasingToUpperBoundRegionSplitPolicy{initialSize=268435456, ConstantSizeRegionSplitPolicy{desiredMaxFileSize=59190647, jitterRate=-0.11799062788486481}}}, FlushLargeStoresPolicy{flushSizeLowerBound=-1} 2024-11-17T21:14:49,463 DEBUG [Time-limited test {}] regionserver.HRegion(1001): Region open journal for 90156c8d93a287cb83143aa6c7abd1fc: 2024-11-17T21:14:49,494 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1731878089494 2024-11-17T21:14:49,506 INFO [Time-limited test {}] wal.AbstractFSWAL(837): Rolled WAL /user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb/testWALClosureFailureAndCleanup/wal.1731878089157 with entries=2, filesize=0 B; new WAL /user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb/testWALClosureFailureAndCleanup/wal.1731878089494 2024-11-17T21:14:49,507 DEBUG [Time-limited test {}] wal.AbstractFSWAL(925): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:33721:33721),(127.0.0.1/127.0.0.1:38217:38217),(127.0.0.1/127.0.0.1:45905:45905)] 2024-11-17T21:14:49,508 DEBUG [Time-limited test {}] wal.AbstractFSWAL(751): hdfs://localhost:37527/user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb/testWALClosureFailureAndCleanup/wal.1731878089157 is not closed yet, will try archiving it next time 2024-11-17T21:14:49,510 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1731878089509 2024-11-17T21:14:49,507 WARN [Close-WAL-Writer-0 {}] wal.FSHLog(462): close old writer failed. java.io.IOException: WAL close injected failure.. at org.apache.hadoop.hbase.regionserver.wal.TestFSHLog$2FailingWriter.close(TestFSHLog.java:351) ~[test-classes/:?] at org.apache.hadoop.hbase.regionserver.wal.FSHLog.closeWriter(FSHLog.java:459) ~[classes/:?] at org.apache.hadoop.hbase.regionserver.wal.FSHLog.lambda$doReplaceWriter$0(FSHLog.java:402) ~[classes/:?] at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1136) ~[?:?] at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) ~[?:?] at java.lang.Thread.run(Thread.java:840) ~[?:?] 2024-11-17T21:14:49,513 WARN [IPC Server handler 3 on default port 37527 {}] blockmanagement.BlockPlacementPolicyDefault(501): Failed to place enough replicas, still in need of 1 to reach 3 (unavailableStorages=[], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}, newBlock=true) For more information, please enable DEBUG log level on org.apache.hadoop.hdfs.server.blockmanagement.BlockPlacementPolicy and org.apache.hadoop.net.NetworkTopology 2024-11-17T21:14:49,513 DEBUG [Close-WAL-Writer-0 {}] util.RecoverLeaseFSUtils(47): Initialize RecoverLeaseFSUtils 2024-11-17T21:14:49,513 DEBUG [Close-WAL-Writer-0 {}] util.RecoverLeaseFSUtils(59): set recoverLeaseMethod to org.apache.hadoop.fs.LeaseRecoverable.recoverLease() 2024-11-17T21:14:49,513 WARN [IPC Server handler 3 on default port 37527 {}] protocol.BlockStoragePolicy(161): Failed to place enough replicas: expected size is 1 but only 0 storage types can be selected (replication=3, selected=[], unavailable=[DISK], removed=[DISK], policy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}) 2024-11-17T21:14:49,513 INFO [Close-WAL-Writer-0 {}] util.RecoverLeaseFSUtils(134): Recover lease on dfs file hdfs://localhost:37527/user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb/testWALClosureFailureAndCleanup/wal.1731878089157 2024-11-17T21:14:49,513 WARN [IPC Server handler 3 on default port 37527 {}] blockmanagement.BlockPlacementPolicyDefault(501): Failed to place enough replicas, still in need of 1 to reach 3 (unavailableStorages=[DISK], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}, newBlock=true) All required storage types are unavailable: unavailableStorages=[DISK], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]} 2024-11-17T21:14:49,517 WARN [IPC Server handler 4 on default port 37527 {}] namenode.FSNamesystem(3854): DIR* NameSystem.internalReleaseLease: File /user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb/testWALClosureFailureAndCleanup/wal.1731878089157 has not been closed. Lease recovery is in progress. RecoveryId = 1006 for block blk_1073741826_1002 2024-11-17T21:14:49,519 INFO [Close-WAL-Writer-0 {}] util.RecoverLeaseFSUtils(223): Failed to recover lease, attempt=0 on file=hdfs://localhost:37527/user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb/testWALClosureFailureAndCleanup/wal.1731878089157 after 6ms 2024-11-17T21:14:49,521 INFO [Time-limited test {}] wal.AbstractFSWAL(837): Rolled WAL /user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb/testWALClosureFailureAndCleanup/wal.1731878089494 with entries=2, filesize=0 B; new WAL /user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb/testWALClosureFailureAndCleanup/wal.1731878089509 2024-11-17T21:14:49,521 DEBUG [Time-limited test {}] wal.AbstractFSWAL(925): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:45905:45905),(127.0.0.1/127.0.0.1:38217:38217)] 2024-11-17T21:14:49,521 DEBUG [Time-limited test {}] wal.AbstractFSWAL(751): hdfs://localhost:37527/user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb/testWALClosureFailureAndCleanup/wal.1731878089157 is not closed yet, will try archiving it next time 2024-11-17T21:14:49,521 WARN [Close-WAL-Writer-1 {}] wal.FSHLog(462): close old writer failed. java.io.IOException: WAL close injected failure.. at org.apache.hadoop.hbase.regionserver.wal.TestFSHLog$2FailingWriter.close(TestFSHLog.java:351) ~[test-classes/:?] at org.apache.hadoop.hbase.regionserver.wal.FSHLog.closeWriter(FSHLog.java:459) ~[classes/:?] at org.apache.hadoop.hbase.regionserver.wal.FSHLog.lambda$doReplaceWriter$0(FSHLog.java:402) ~[classes/:?] at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1136) ~[?:?] at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) ~[?:?] at java.lang.Thread.run(Thread.java:840) ~[?:?] 2024-11-17T21:14:49,521 DEBUG [Time-limited test {}] wal.AbstractFSWAL(751): hdfs://localhost:37527/user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb/testWALClosureFailureAndCleanup/wal.1731878089494 is not closed yet, will try archiving it next time 2024-11-17T21:14:49,522 INFO [Close-WAL-Writer-1 {}] util.RecoverLeaseFSUtils(134): Recover lease on dfs file hdfs://localhost:37527/user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb/testWALClosureFailureAndCleanup/wal.1731878089494 2024-11-17T21:14:49,522 WARN [IPC Server handler 2 on default port 37527 {}] namenode.FSNamesystem(3854): DIR* NameSystem.internalReleaseLease: File /user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb/testWALClosureFailureAndCleanup/wal.1731878089494 has not been closed. Lease recovery is in progress. RecoveryId = 1007 for block blk_1073741828_1004 2024-11-17T21:14:49,523 INFO [Close-WAL-Writer-1 {}] util.RecoverLeaseFSUtils(223): Failed to recover lease, attempt=0 on file=hdfs://localhost:37527/user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb/testWALClosureFailureAndCleanup/wal.1731878089494 after 1ms 2024-11-17T21:14:49,524 DEBUG [WAL-Shutdown-0 {}] wal.FSHLog(499): Closing WAL writer in /user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb/testWALClosureFailureAndCleanup 2024-11-17T21:14:49,527 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43941 is added to blk_1073741829_1005 (size=93) 2024-11-17T21:14:49,528 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:41963 is added to blk_1073741829_1005 (size=93) 2024-11-17T21:14:50,210 WARN [ResponseProcessor for block BP-754513072-172.17.0.2-1731878086310:blk_1073741826_1002 {}] hdfs.DataStreamer$ResponseProcessor(1303): Exception for BP-754513072-172.17.0.2-1731878086310:blk_1073741826_1002 java.io.EOFException: Unexpected EOF while trying to read response from server at org.apache.hadoop.hdfs.protocolPB.PBHelperClient.vintPrefixed(PBHelperClient.java:529) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck.readFields(PipelineAck.java:244) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer$ResponseProcessor.run(DataStreamer.java:1180) ~[hadoop-hdfs-client-3.4.1.jar:?] 2024-11-17T21:14:50,209 ERROR [DataXceiver for client DFSClient_NONMAPREDUCE_900039209_22 at /127.0.0.1:53526 [Receiving block BP-754513072-172.17.0.2-1731878086310:blk_1073741826_1002] {}] datanode.DataXceiver(331): 127.0.0.1:43941:DataXceiver error processing WRITE_BLOCK operation src: /127.0.0.1:53526 dst: /127.0.0.1:43941 java.io.InterruptedIOException: Interrupted while waiting for IO on channel java.nio.channels.SocketChannel[connected local=localhost/127.0.0.1:43941 remote=/127.0.0.1:53526]. Total timeout mills is 5000, 3979 millis timeout left. at org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:350) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) ~[hadoop-common-3.4.1.jar:?] at java.io.BufferedInputStream.fill(BufferedInputStream.java:244) ~[?:?] at java.io.BufferedInputStream.read1(BufferedInputStream.java:284) ~[?:?] at java.io.BufferedInputStream.read(BufferedInputStream.java:343) ~[?:?] at java.io.DataInputStream.read(DataInputStream.java:151) ~[?:?] at org.apache.hadoop.io.IOUtils.readFully(IOUtils.java:214) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doReadFully(PacketReceiver.java:221) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doRead(PacketReceiver.java:144) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.receiveNextPacket(PacketReceiver.java:119) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receivePacket(BlockReceiver.java:553) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receiveBlock(BlockReceiver.java:1011) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.DataXceiver.writeBlock(DataXceiver.java:920) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.opWriteBlock(Receiver.java:176) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.processOp(Receiver.java:110) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:299) ~[hadoop-hdfs-3.4.1.jar:?] at java.lang.Thread.run(Thread.java:840) ~[?:?] 2024-11-17T21:14:50,210 ERROR [DataXceiver for client DFSClient_NONMAPREDUCE_900039209_22 at /127.0.0.1:37696 [Receiving block BP-754513072-172.17.0.2-1731878086310:blk_1073741826_1002] {}] datanode.DataXceiver(331): 127.0.0.1:41963:DataXceiver error processing WRITE_BLOCK operation src: /127.0.0.1:37696 dst: /127.0.0.1:41963 java.io.IOException: Premature EOF from inputStream at org.apache.hadoop.io.IOUtils.readFully(IOUtils.java:216) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doReadFully(PacketReceiver.java:221) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doRead(PacketReceiver.java:144) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.receiveNextPacket(PacketReceiver.java:119) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receivePacket(BlockReceiver.java:553) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receiveBlock(BlockReceiver.java:1011) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.DataXceiver.writeBlock(DataXceiver.java:920) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.opWriteBlock(Receiver.java:176) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.processOp(Receiver.java:110) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:299) ~[hadoop-hdfs-3.4.1.jar:?] at java.lang.Thread.run(Thread.java:840) ~[?:?] 2024-11-17T21:14:50,210 WARN [DataStreamer for file /user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb/testWALClosureFailureAndCleanup/wal.1731878089157 block BP-754513072-172.17.0.2-1731878086310:blk_1073741826_1002 {}] hdfs.DataStreamer(1731): Error Recovery for BP-754513072-172.17.0.2-1731878086310:blk_1073741826_1002 in pipeline [DatanodeInfoWithStorage[127.0.0.1:43941,DS-0dc17499-73e1-4891-a8aa-a698546e6351,DISK], DatanodeInfoWithStorage[127.0.0.1:41963,DS-dd8c75c6-292f-4f1f-bb94-742d1fc7d2f4,DISK], DatanodeInfoWithStorage[127.0.0.1:40579,DS-2aedbc45-fd4e-4d62-83f9-420e54976b0e,DISK]]: datanode 0(DatanodeInfoWithStorage[127.0.0.1:43941,DS-0dc17499-73e1-4891-a8aa-a698546e6351,DISK]) is bad. 2024-11-17T21:14:50,210 ERROR [DataXceiver for client DFSClient_NONMAPREDUCE_900039209_22 at /127.0.0.1:56400 [Receiving block BP-754513072-172.17.0.2-1731878086310:blk_1073741826_1002] {}] datanode.DataXceiver(331): 127.0.0.1:40579:DataXceiver error processing WRITE_BLOCK operation src: /127.0.0.1:56400 dst: /127.0.0.1:40579 java.io.IOException: Premature EOF from inputStream at org.apache.hadoop.io.IOUtils.readFully(IOUtils.java:216) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doReadFully(PacketReceiver.java:221) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doRead(PacketReceiver.java:144) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.receiveNextPacket(PacketReceiver.java:119) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receivePacket(BlockReceiver.java:553) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receiveBlock(BlockReceiver.java:1011) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.DataXceiver.writeBlock(DataXceiver.java:920) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.opWriteBlock(Receiver.java:176) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.processOp(Receiver.java:110) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:299) ~[hadoop-hdfs-3.4.1.jar:?] at java.lang.Thread.run(Thread.java:840) ~[?:?] 2024-11-17T21:14:50,214 WARN [IPC Server handler 4 on default port 37527 {}] blockmanagement.BlockPlacementPolicyDefault(501): Failed to place enough replicas, still in need of 1 to reach 3 (unavailableStorages=[], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}, newBlock=false) For more information, please enable DEBUG log level on org.apache.hadoop.hdfs.server.blockmanagement.BlockPlacementPolicy and org.apache.hadoop.net.NetworkTopology 2024-11-17T21:14:50,214 WARN [IPC Server handler 4 on default port 37527 {}] blockmanagement.BlockPlacementPolicyDefault(501): Failed to place enough replicas, still in need of 1 to reach 3 (unavailableStorages=[DISK], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}, newBlock=false) For more information, please enable DEBUG log level on org.apache.hadoop.hdfs.server.blockmanagement.BlockPlacementPolicy and org.apache.hadoop.net.NetworkTopology 2024-11-17T21:14:50,214 WARN [IPC Server handler 4 on default port 37527 {}] protocol.BlockStoragePolicy(161): Failed to place enough replicas: expected size is 1 but only 0 storage types can be selected (replication=3, selected=[], unavailable=[DISK, ARCHIVE], removed=[DISK], policy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}) 2024-11-17T21:14:50,215 WARN [IPC Server handler 4 on default port 37527 {}] blockmanagement.BlockPlacementPolicyDefault(501): Failed to place enough replicas, still in need of 1 to reach 3 (unavailableStorages=[DISK, ARCHIVE], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}, newBlock=false) All required storage types are unavailable: unavailableStorages=[DISK, ARCHIVE], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]} 2024-11-17T21:14:50,217 WARN [DataStreamer for file /user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb/testWALClosureFailureAndCleanup/wal.1731878089157 block BP-754513072-172.17.0.2-1731878086310:blk_1073741826_1002 {}] hdfs.DataStreamer(859): DataStreamer Exception java.io.IOException: Failed to replace a bad datanode on the existing pipeline due to no more good datanodes being available to try. (Nodes: current=[DatanodeInfoWithStorage[127.0.0.1:41963,DS-dd8c75c6-292f-4f1f-bb94-742d1fc7d2f4,DISK], DatanodeInfoWithStorage[127.0.0.1:40579,DS-2aedbc45-fd4e-4d62-83f9-420e54976b0e,DISK]], original=[DatanodeInfoWithStorage[127.0.0.1:41963,DS-dd8c75c6-292f-4f1f-bb94-742d1fc7d2f4,DISK], DatanodeInfoWithStorage[127.0.0.1:40579,DS-2aedbc45-fd4e-4d62-83f9-420e54976b0e,DISK]]). The current failed datanode replacement policy is DEFAULT, and a client may configure this via 'dfs.client.block.write.replace-datanode-on-failure.policy' in its configuration. at org.apache.hadoop.hdfs.DataStreamer.findNewDatanode(DataStreamer.java:1455) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.addDatanode2ExistingPipeline(DataStreamer.java:1515) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.handleDatanodeReplacement(DataStreamer.java:1758) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.setupPipelineInternal(DataStreamer.java:1648) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.setupPipelineForAppendOrRecovery(DataStreamer.java:1627) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.processDatanodeOrExternalError(DataStreamer.java:1408) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:707) ~[hadoop-hdfs-client-3.4.1.jar:?] 2024-11-17T21:14:50,221 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43941 is added to blk_1073741826_1006 (size=85) 2024-11-17T21:14:50,223 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:41963 is added to blk_1073741826_1006 (size=85) 2024-11-17T21:14:50,224 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40579 is added to blk_1073741826_1006 (size=85) 2024-11-17T21:14:50,228 WARN [ResponseProcessor for block BP-754513072-172.17.0.2-1731878086310:blk_1073741828_1004 {}] hdfs.DataStreamer$ResponseProcessor(1303): Exception for BP-754513072-172.17.0.2-1731878086310:blk_1073741828_1004 java.io.EOFException: Unexpected EOF while trying to read response from server at org.apache.hadoop.hdfs.protocolPB.PBHelperClient.vintPrefixed(PBHelperClient.java:529) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck.readFields(PipelineAck.java:244) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer$ResponseProcessor.run(DataStreamer.java:1180) ~[hadoop-hdfs-client-3.4.1.jar:?] 2024-11-17T21:14:50,228 ERROR [DataXceiver for client DFSClient_NONMAPREDUCE_900039209_22 at /127.0.0.1:53544 [Receiving block BP-754513072-172.17.0.2-1731878086310:blk_1073741828_1004] {}] datanode.DataXceiver(331): 127.0.0.1:43941:DataXceiver error processing WRITE_BLOCK operation src: /127.0.0.1:53544 dst: /127.0.0.1:43941 java.io.IOException: Premature EOF from inputStream at org.apache.hadoop.io.IOUtils.readFully(IOUtils.java:216) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doReadFully(PacketReceiver.java:221) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doRead(PacketReceiver.java:144) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.receiveNextPacket(PacketReceiver.java:119) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receivePacket(BlockReceiver.java:553) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receiveBlock(BlockReceiver.java:1011) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.DataXceiver.writeBlock(DataXceiver.java:920) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.opWriteBlock(Receiver.java:176) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.processOp(Receiver.java:110) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:299) ~[hadoop-hdfs-3.4.1.jar:?] at java.lang.Thread.run(Thread.java:840) ~[?:?] 2024-11-17T21:14:50,228 WARN [DataStreamer for file /user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb/testWALClosureFailureAndCleanup/wal.1731878089494 block BP-754513072-172.17.0.2-1731878086310:blk_1073741828_1004 {}] hdfs.DataStreamer(1731): Error Recovery for BP-754513072-172.17.0.2-1731878086310:blk_1073741828_1004 in pipeline [DatanodeInfoWithStorage[127.0.0.1:40579,DS-2aedbc45-fd4e-4d62-83f9-420e54976b0e,DISK], DatanodeInfoWithStorage[127.0.0.1:43941,DS-0dc17499-73e1-4891-a8aa-a698546e6351,DISK], DatanodeInfoWithStorage[127.0.0.1:41963,DS-dd8c75c6-292f-4f1f-bb94-742d1fc7d2f4,DISK]]: datanode 0(DatanodeInfoWithStorage[127.0.0.1:40579,DS-2aedbc45-fd4e-4d62-83f9-420e54976b0e,DISK]) is bad. 2024-11-17T21:14:50,228 ERROR [DataXceiver for client DFSClient_NONMAPREDUCE_900039209_22 at /127.0.0.1:56410 [Receiving block BP-754513072-172.17.0.2-1731878086310:blk_1073741828_1004] {}] datanode.DataXceiver(331): 127.0.0.1:40579:DataXceiver error processing WRITE_BLOCK operation src: /127.0.0.1:56410 dst: /127.0.0.1:40579 java.io.InterruptedIOException: Interrupted while waiting for IO on channel java.nio.channels.SocketChannel[connected local=localhost/127.0.0.1:40579 remote=/127.0.0.1:56410]. Total timeout mills is 5000, 4277 millis timeout left. at org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:350) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) ~[hadoop-common-3.4.1.jar:?] at java.io.BufferedInputStream.fill(BufferedInputStream.java:244) ~[?:?] at java.io.BufferedInputStream.read1(BufferedInputStream.java:284) ~[?:?] at java.io.BufferedInputStream.read(BufferedInputStream.java:343) ~[?:?] at java.io.DataInputStream.read(DataInputStream.java:151) ~[?:?] at org.apache.hadoop.io.IOUtils.readFully(IOUtils.java:214) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doReadFully(PacketReceiver.java:221) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doRead(PacketReceiver.java:144) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.receiveNextPacket(PacketReceiver.java:119) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receivePacket(BlockReceiver.java:553) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receiveBlock(BlockReceiver.java:1011) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.DataXceiver.writeBlock(DataXceiver.java:920) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.opWriteBlock(Receiver.java:176) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.processOp(Receiver.java:110) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:299) ~[hadoop-hdfs-3.4.1.jar:?] at java.lang.Thread.run(Thread.java:840) ~[?:?] 2024-11-17T21:14:50,229 ERROR [DataXceiver for client DFSClient_NONMAPREDUCE_900039209_22 at /127.0.0.1:37726 [Receiving block BP-754513072-172.17.0.2-1731878086310:blk_1073741828_1004] {}] datanode.DataXceiver(331): 127.0.0.1:41963:DataXceiver error processing WRITE_BLOCK operation src: /127.0.0.1:37726 dst: /127.0.0.1:41963 java.io.IOException: Premature EOF from inputStream at org.apache.hadoop.io.IOUtils.readFully(IOUtils.java:216) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doReadFully(PacketReceiver.java:221) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doRead(PacketReceiver.java:144) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.receiveNextPacket(PacketReceiver.java:119) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receivePacket(BlockReceiver.java:553) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receiveBlock(BlockReceiver.java:1011) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.DataXceiver.writeBlock(DataXceiver.java:920) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.opWriteBlock(Receiver.java:176) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.processOp(Receiver.java:110) ~[hadoop-hdfs-3.4.1.jar:?] at org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:299) ~[hadoop-hdfs-3.4.1.jar:?] at java.lang.Thread.run(Thread.java:840) ~[?:?] 2024-11-17T21:14:50,230 WARN [IPC Server handler 4 on default port 37527 {}] blockmanagement.BlockPlacementPolicyDefault(501): Failed to place enough replicas, still in need of 1 to reach 3 (unavailableStorages=[], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}, newBlock=false) For more information, please enable DEBUG log level on org.apache.hadoop.hdfs.server.blockmanagement.BlockPlacementPolicy and org.apache.hadoop.net.NetworkTopology 2024-11-17T21:14:50,230 WARN [IPC Server handler 4 on default port 37527 {}] blockmanagement.BlockPlacementPolicyDefault(501): Failed to place enough replicas, still in need of 1 to reach 3 (unavailableStorages=[DISK], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}, newBlock=false) For more information, please enable DEBUG log level on org.apache.hadoop.hdfs.server.blockmanagement.BlockPlacementPolicy and org.apache.hadoop.net.NetworkTopology 2024-11-17T21:14:50,230 WARN [IPC Server handler 4 on default port 37527 {}] protocol.BlockStoragePolicy(161): Failed to place enough replicas: expected size is 1 but only 0 storage types can be selected (replication=3, selected=[], unavailable=[DISK, ARCHIVE], removed=[DISK], policy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}) 2024-11-17T21:14:50,230 WARN [IPC Server handler 4 on default port 37527 {}] blockmanagement.BlockPlacementPolicyDefault(501): Failed to place enough replicas, still in need of 1 to reach 3 (unavailableStorages=[DISK, ARCHIVE], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]}, newBlock=false) All required storage types are unavailable: unavailableStorages=[DISK, ARCHIVE], storagePolicy=BlockStoragePolicy{HOT:7, storageTypes=[DISK], creationFallbacks=[], replicationFallbacks=[ARCHIVE]} 2024-11-17T21:14:50,231 WARN [DataStreamer for file /user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb/testWALClosureFailureAndCleanup/wal.1731878089494 block BP-754513072-172.17.0.2-1731878086310:blk_1073741828_1004 {}] hdfs.DataStreamer(859): DataStreamer Exception java.io.IOException: Failed to replace a bad datanode on the existing pipeline due to no more good datanodes being available to try. (Nodes: current=[DatanodeInfoWithStorage[127.0.0.1:41963,DS-dd8c75c6-292f-4f1f-bb94-742d1fc7d2f4,DISK], DatanodeInfoWithStorage[127.0.0.1:43941,DS-0dc17499-73e1-4891-a8aa-a698546e6351,DISK]], original=[DatanodeInfoWithStorage[127.0.0.1:43941,DS-0dc17499-73e1-4891-a8aa-a698546e6351,DISK], DatanodeInfoWithStorage[127.0.0.1:41963,DS-dd8c75c6-292f-4f1f-bb94-742d1fc7d2f4,DISK]]). The current failed datanode replacement policy is DEFAULT, and a client may configure this via 'dfs.client.block.write.replace-datanode-on-failure.policy' in its configuration. at org.apache.hadoop.hdfs.DataStreamer.findNewDatanode(DataStreamer.java:1455) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.addDatanode2ExistingPipeline(DataStreamer.java:1515) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.handleDatanodeReplacement(DataStreamer.java:1758) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.setupPipelineInternal(DataStreamer.java:1648) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.setupPipelineForAppendOrRecovery(DataStreamer.java:1627) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.processDatanodeOrExternalError(DataStreamer.java:1408) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:707) ~[hadoop-hdfs-client-3.4.1.jar:?] 2024-11-17T21:14:50,233 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40579 is added to blk_1073741828_1007 (size=85) 2024-11-17T21:14:50,234 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43941 is added to blk_1073741828_1007 (size=85) 2024-11-17T21:14:51,075 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40579 is added to blk_1073741829_1005 (size=93) 2024-11-17T21:14:51,075 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40579 is added to blk_1073741827_1003 (size=66) 2024-11-17T21:14:53,521 INFO [Close-WAL-Writer-0 {}] util.RecoverLeaseFSUtils(223): Recovered lease, attempt=1 on file=hdfs://localhost:37527/user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb/testWALClosureFailureAndCleanup/wal.1731878089157 after 4007ms 2024-11-17T21:14:53,521 DEBUG [Close-WAL-Writer-0 {}] wal.AbstractFSWAL(751): hdfs://localhost:37527/user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb/testWALClosureFailureAndCleanup/wal.1731878089494 is not closed yet, will try archiving it next time 2024-11-17T21:14:53,524 INFO [Close-WAL-Writer-1 {}] util.RecoverLeaseFSUtils(223): Recovered lease, attempt=1 on file=hdfs://localhost:37527/user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb/testWALClosureFailureAndCleanup/wal.1731878089494 after 4002ms 2024-11-17T21:14:53,535 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1071): Moved 3 WAL file(s) to /user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb/oldWALs 2024-11-17T21:14:53,535 INFO [Time-limited test {}] wal.AbstractFSWAL(1074): Closed WAL: FSHLog wal:(num 1731878089509) 2024-11-17T21:14:53,547 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testWALClosureFailureAndCleanup Thread=164 (was 156) Potentially hanging thread: java.util.concurrent.ThreadPoolExecutor$Worker@4ed4f2f5[State = -1, empty queue] java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:252) java.base@17.0.11/java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:401) java.base@17.0.11/java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:903) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1061) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: IPC Parameter Sending Thread for localhost/127.0.0.1:40145 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:252) java.base@17.0.11/java.util.concurrent.SynchronousQueue$TransferQueue.transfer(SynchronousQueue.java:704) java.base@17.0.11/java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:903) app//org.apache.hadoop.ipc.Client$Connection$RpcRequestSender.run(Client.java:1121) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: IPC Parameter Sending Thread for localhost/127.0.0.1:37733 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:252) java.base@17.0.11/java.util.concurrent.SynchronousQueue$TransferQueue.transfer(SynchronousQueue.java:704) java.base@17.0.11/java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:903) app//org.apache.hadoop.ipc.Client$Connection$RpcRequestSender.run(Client.java:1121) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: java.util.concurrent.ThreadPoolExecutor$Worker@48266cbd[State = -1, empty queue] java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:252) java.base@17.0.11/java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:401) java.base@17.0.11/java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:903) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1061) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: Monitor thread for TaskMonitor java.base@17.0.11/java.lang.Thread.sleep(Native Method) app//org.apache.hadoop.hbase.monitoring.TaskMonitor$MonitorRunnable.run(TaskMonitor.java:325) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: LeaseRenewer:jenkins@localhost:37527 java.base@17.0.11/java.lang.Thread.sleep(Native Method) app//org.apache.hadoop.hdfs.client.impl.LeaseRenewer.run(LeaseRenewer.java:441) app//org.apache.hadoop.hdfs.client.impl.LeaseRenewer.access$800(LeaseRenewer.java:77) app//org.apache.hadoop.hdfs.client.impl.LeaseRenewer$1.run(LeaseRenewer.java:336) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: IPC Client (1055316056) connection to localhost/127.0.0.1:40145 from jenkins java.base@17.0.11/java.lang.Object.wait(Native Method) app//org.apache.hadoop.ipc.Client$Connection.waitForWork(Client.java:1042) app//org.apache.hadoop.ipc.Client$Connection.run(Client.java:1093) Potentially hanging thread: IPC Client (1055316056) connection to localhost/127.0.0.1:37733 from jenkins java.base@17.0.11/java.lang.Object.wait(Native Method) app//org.apache.hadoop.ipc.Client$Connection.waitForWork(Client.java:1042) app//org.apache.hadoop.ipc.Client$Connection.run(Client.java:1093) - Thread LEAK? -, OpenFileDescriptor=426 (was 390) - OpenFileDescriptor LEAK? -, MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=127 (was 129), ProcessCount=11 (was 11), AvailableMemoryMB=11799 (was 11859) 2024-11-17T21:14:53,554 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testSyncRunnerIndexOverflow Thread=164, OpenFileDescriptor=426, MaxFileDescriptor=1048576, SystemLoadAverage=127, ProcessCount=11, AvailableMemoryMB=11799 2024-11-17T21:14:53,574 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40579 is added to blk_1073741830_1008 (size=7) 2024-11-17T21:14:53,574 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:41963 is added to blk_1073741830_1008 (size=7) 2024-11-17T21:14:53,575 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43941 is added to blk_1073741830_1008 (size=7) 2024-11-17T21:14:53,577 INFO [Time-limited test {}] util.FSUtils(490): Created version file at hdfs://localhost:37527/user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb with version=8 2024-11-17T21:14:53,577 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-17T21:14:53,580 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-17T21:14:53,588 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-11-17T21:14:53,588 INFO [Time-limited test {}] wal.AbstractFSWAL(500): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:37527/user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb/testSyncRunnerIndexOverflow, archiveDir=hdfs://localhost:37527/user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb/oldWALs, maxLogs=1760 2024-11-17T21:14:53,590 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1731878093590 2024-11-17T21:14:53,600 INFO [Time-limited test {}] wal.AbstractFSWAL(841): New WAL /user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb/testSyncRunnerIndexOverflow/wal.1731878093590 2024-11-17T21:14:53,600 DEBUG [Time-limited test {}] wal.AbstractFSWAL(925): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:33721:33721),(127.0.0.1/127.0.0.1:38217:38217),(127.0.0.1/127.0.0.1:45905:45905)] 2024-11-17T21:14:53,624 DEBUG [WAL-Shutdown-0 {}] wal.FSHLog(499): Closing WAL writer in /user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb/testSyncRunnerIndexOverflow 2024-11-17T21:14:53,627 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:41963 is added to blk_1073741831_1009 (size=1293) 2024-11-17T21:14:53,628 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43941 is added to blk_1073741831_1009 (size=1293) 2024-11-17T21:14:53,628 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40579 is added to blk_1073741831_1009 (size=1293) 2024-11-17T21:14:53,632 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1071): Moved 1 WAL file(s) to /user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb/oldWALs 2024-11-17T21:14:53,632 INFO [Time-limited test {}] wal.AbstractFSWAL(1074): Closed WAL: FSHLog wal:(num 1731878093590) 2024-11-17T21:14:53,640 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testSyncRunnerIndexOverflow Thread=164 (was 164), OpenFileDescriptor=426 (was 426), MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=127 (was 127), ProcessCount=11 (was 11), AvailableMemoryMB=11796 (was 11799) 2024-11-17T21:14:53,647 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testDeadlockWithSyncOverwrites Thread=164, OpenFileDescriptor=426, MaxFileDescriptor=1048576, SystemLoadAverage=127, ProcessCount=11, AvailableMemoryMB=11796 2024-11-17T21:14:53,661 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:41963 is added to blk_1073741832_1010 (size=7) 2024-11-17T21:14:53,661 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43941 is added to blk_1073741832_1010 (size=7) 2024-11-17T21:14:53,662 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40579 is added to blk_1073741832_1010 (size=7) 2024-11-17T21:14:53,664 INFO [Time-limited test {}] util.FSUtils(490): Created version file at hdfs://localhost:37527/user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb with version=8 2024-11-17T21:14:53,664 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-17T21:14:53,667 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-17T21:14:53,676 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-11-17T21:14:53,676 INFO [Time-limited test {}] wal.AbstractFSWAL(500): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:37527/user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb/testDeadlockWithSyncOverwrites, archiveDir=hdfs://localhost:37527/user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb/oldWALs, maxLogs=1760 2024-11-17T21:14:53,679 WARN [sync.1 {}] wal.FSHLog$SyncRunner(683): UNEXPECTED java.lang.NullPointerException: Cannot invoke "org.apache.hadoop.hbase.wal.WALProvider$Writer.sync(boolean)" because "this.this$0.writer" is null at org.apache.hadoop.hbase.regionserver.wal.FSHLog$SyncRunner.run(FSHLog.java:674) ~[classes/:?] 2024-11-17T21:14:53,680 INFO [Time-limited test {}] hbase.Waiter(181): Waiting up to [10,000] milli-secs(wait.for.ratio=[1]) 2024-11-17T21:14:53,680 INFO [Time-limited test {}] hbase.Waiter(181): Waiting up to [10,000] milli-secs(wait.for.ratio=[1]) 2024-11-17T21:14:53,680 WARN [sync.2 {}] wal.FSHLog$SyncRunner(683): UNEXPECTED java.lang.NullPointerException: Cannot invoke "org.apache.hadoop.hbase.wal.WALProvider$Writer.sync(boolean)" because "this.this$0.writer" is null at org.apache.hadoop.hbase.regionserver.wal.FSHLog$SyncRunner.run(FSHLog.java:674) ~[classes/:?] 2024-11-17T21:14:53,681 DEBUG [WAL-Shutdown-0 {}] wal.FSHLog(499): Closing WAL writer in /user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb/testDeadlockWithSyncOverwrites 2024-11-17T21:14:53,682 INFO [Time-limited test {}] wal.AbstractFSWAL(1074): Closed WAL: CustomFSHLog wal:(num -1) 2024-11-17T21:14:53,689 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testDeadlockWithSyncOverwrites Thread=164 (was 164), OpenFileDescriptor=426 (was 426), MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=127 (was 127), ProcessCount=11 (was 11), AvailableMemoryMB=11794 (was 11796) 2024-11-17T21:14:53,696 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testUnflushedSeqIdTracking Thread=164, OpenFileDescriptor=426, MaxFileDescriptor=1048576, SystemLoadAverage=127, ProcessCount=11, AvailableMemoryMB=11793 2024-11-17T21:14:53,710 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:41963 is added to blk_1073741833_1011 (size=7) 2024-11-17T21:14:53,710 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40579 is added to blk_1073741833_1011 (size=7) 2024-11-17T21:14:53,711 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43941 is added to blk_1073741833_1011 (size=7) 2024-11-17T21:14:53,712 INFO [Time-limited test {}] util.FSUtils(490): Created version file at hdfs://localhost:37527/user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb with version=8 2024-11-17T21:14:53,713 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-17T21:14:53,715 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-17T21:14:53,721 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-11-17T21:14:53,722 INFO [Time-limited test {}] wal.AbstractFSWAL(500): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:37527/user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb/testUnflushedSeqIdTracking, archiveDir=hdfs://localhost:37527/user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb/oldWALs, maxLogs=1760 2024-11-17T21:14:53,724 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1731878093723 2024-11-17T21:14:53,732 INFO [Time-limited test {}] wal.AbstractFSWAL(841): New WAL /user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb/testUnflushedSeqIdTracking/wal.1731878093723 2024-11-17T21:14:53,732 DEBUG [Time-limited test {}] wal.AbstractFSWAL(925): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:45905:45905),(127.0.0.1/127.0.0.1:38217:38217),(127.0.0.1/127.0.0.1:33721:33721)] 2024-11-17T21:14:53,733 INFO [Time-limited test {}] regionserver.HRegion(7106): creating {ENCODED => aa9ce9b7340e2349061a390ffa7df1c1, NAME => 'testUnflushedSeqIdTracking,,1731878093733.aa9ce9b7340e2349061a390ffa7df1c1.', STARTKEY => '', ENDKEY => ''}, tableDescriptor='testUnflushedSeqIdTracking', {NAME => 'b', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'}, regionDir=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400 2024-11-17T21:14:53,745 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43941 is added to blk_1073741835_1013 (size=61) 2024-11-17T21:14:53,745 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40579 is added to blk_1073741835_1013 (size=61) 2024-11-17T21:14:53,746 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:41963 is added to blk_1073741835_1013 (size=61) 2024-11-17T21:14:53,747 DEBUG [Time-limited test {}] regionserver.HRegion(894): Instantiated testUnflushedSeqIdTracking,,1731878093733.aa9ce9b7340e2349061a390ffa7df1c1.; StoreHotnessProtector, parallelPutToStoreThreadLimit=0 ; minColumnNum=100 ; preparePutThreadLimit=0 ; hotProtect now disable 2024-11-17T21:14:53,749 INFO [StoreOpener-aa9ce9b7340e2349061a390ffa7df1c1-1 {}] regionserver.HStore(400): Created cacheConfig: cacheDataOnRead=true, cacheDataOnWrite=false, cacheIndexesOnWrite=false, cacheBloomsOnWrite=false, cacheEvictOnClose=false, cacheDataCompressed=false, prefetchOnOpen=false, for column family b of region aa9ce9b7340e2349061a390ffa7df1c1 2024-11-17T21:14:53,751 INFO [StoreOpener-aa9ce9b7340e2349061a390ffa7df1c1-1 {}] compactions.CompactionConfiguration(181): size [minCompactSize:128 MB, maxCompactSize:8.00 EB, offPeakMaxCompactSize:8.00 EB); files [minFilesToCompact:3, maxFilesToCompact:10); ratio 1.200000; off-peak ratio 5.000000; throttle point 2684354560; major period 604800000, major jitter 0.500000, min locality to compact 0.000000; tiered compaction: max_age 9223372036854775807, incoming window min 6, compaction policy for tiered window org.apache.hadoop.hbase.regionserver.compactions.ExploringCompactionPolicy, single output for minor true, compaction window factory org.apache.hadoop.hbase.regionserver.compactions.ExponentialCompactionWindowFactory, region aa9ce9b7340e2349061a390ffa7df1c1 columnFamilyName b 2024-11-17T21:14:53,751 DEBUG [StoreOpener-aa9ce9b7340e2349061a390ffa7df1c1-1 {}] storefiletracker.StoreFileTrackerFactory(122): instantiating StoreFileTracker impl org.apache.hadoop.hbase.regionserver.storefiletracker.DefaultStoreFileTracker 2024-11-17T21:14:53,752 INFO [StoreOpener-aa9ce9b7340e2349061a390ffa7df1c1-1 {}] regionserver.HStore(327): Store=aa9ce9b7340e2349061a390ffa7df1c1/b, memstore type=DefaultMemStore, storagePolicy=NONE, verifyBulkLoads=false, parallelPutCountPrintThreshold=50, encoding=NONE, compression=NONE 2024-11-17T21:14:53,755 DEBUG [Time-limited test {}] regionserver.HRegion(5301): Found 0 recovered edits file(s) under hdfs://localhost:37527/user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb/data/default/testUnflushedSeqIdTracking/aa9ce9b7340e2349061a390ffa7df1c1 2024-11-17T21:14:53,755 DEBUG [Time-limited test {}] regionserver.HRegion(5301): Found 0 recovered edits file(s) under hdfs://localhost:37527/user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb/data/default/testUnflushedSeqIdTracking/aa9ce9b7340e2349061a390ffa7df1c1 2024-11-17T21:14:53,756 DEBUG [Time-limited test {}] regionserver.HRegion(5301): Found 0 recovered edits file(s) under hdfs://localhost:37527/user/jenkins/test-data/58121cba-5ecb-bb2e-3071-d40ec820b0ee/data/default/testUnflushedSeqIdTracking/aa9ce9b7340e2349061a390ffa7df1c1 2024-11-17T21:14:53,760 DEBUG [Time-limited test {}] regionserver.HRegion(1085): writing seq id for aa9ce9b7340e2349061a390ffa7df1c1 2024-11-17T21:14:53,764 DEBUG [Time-limited test {}] wal.WALSplitUtil(409): Wrote file=hdfs://localhost:37527/user/jenkins/test-data/58121cba-5ecb-bb2e-3071-d40ec820b0ee/data/default/testUnflushedSeqIdTracking/aa9ce9b7340e2349061a390ffa7df1c1/recovered.edits/1.seqid, newMaxSeqId=1, maxSeqId=-1 2024-11-17T21:14:53,765 INFO [Time-limited test {}] regionserver.HRegion(1102): Opened aa9ce9b7340e2349061a390ffa7df1c1; next sequenceid=2; SteppingSplitPolicysuper{IncreasingToUpperBoundRegionSplitPolicy{initialSize=268435456, ConstantSizeRegionSplitPolicy{desiredMaxFileSize=68377500, jitterRate=0.018904149532318115}}}, FlushLargeStoresPolicy{flushSizeLowerBound=-1} 2024-11-17T21:14:53,767 DEBUG [Time-limited test {}] regionserver.HRegion(1001): Region open journal for aa9ce9b7340e2349061a390ffa7df1c1: 2024-11-17T21:14:56,773 INFO [pool-70-thread-2 {}] regionserver.HRegion(2837): Flushing aa9ce9b7340e2349061a390ffa7df1c1 1/1 column families, dataSize=24 B heapSize=352 B 2024-11-17T21:14:59,871 DEBUG [pool-70-thread-2 {}] hfile.HFileWriterImpl(814): Len of the biggest cell in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/data/default/testUnflushedSeqIdTracking/aa9ce9b7340e2349061a390ffa7df1c1/.tmp/b/157a836b70114059909fc0cea0bc6aaf is 28, key is b/b:b/1731878093771/Put/seqid=0 2024-11-17T21:14:59,887 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:41963 is added to blk_1073741836_1014 (size=4945) 2024-11-17T21:14:59,888 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43941 is added to blk_1073741836_1014 (size=4945) 2024-11-17T21:14:59,888 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40579 is added to blk_1073741836_1014 (size=4945) 2024-11-17T21:14:59,889 INFO [pool-70-thread-2 {}] regionserver.DefaultStoreFlusher(81): Flushed memstore data size=48 B at sequenceid=4 (bloomFilter=true), to=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/data/default/testUnflushedSeqIdTracking/aa9ce9b7340e2349061a390ffa7df1c1/.tmp/b/157a836b70114059909fc0cea0bc6aaf 2024-11-17T21:14:59,959 DEBUG [pool-70-thread-2 {}] regionserver.HRegionFileSystem(442): Committing /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/data/default/testUnflushedSeqIdTracking/aa9ce9b7340e2349061a390ffa7df1c1/.tmp/b/157a836b70114059909fc0cea0bc6aaf as /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/data/default/testUnflushedSeqIdTracking/aa9ce9b7340e2349061a390ffa7df1c1/b/157a836b70114059909fc0cea0bc6aaf 2024-11-17T21:14:59,970 INFO [pool-70-thread-2 {}] regionserver.HStore$StoreFlusherImpl(1989): Added /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/data/default/testUnflushedSeqIdTracking/aa9ce9b7340e2349061a390ffa7df1c1/b/157a836b70114059909fc0cea0bc6aaf, entries=1, sequenceid=4, filesize=4.8 K 2024-11-17T21:14:59,973 INFO [pool-70-thread-2 {}] regionserver.HRegion(3040): Finished flush of dataSize ~48 B/48, heapSize ~432 B/432, currentSize=0 B/0 for aa9ce9b7340e2349061a390ffa7df1c1 in 3201ms, sequenceid=4, compaction requested=false 2024-11-17T21:14:59,974 DEBUG [pool-70-thread-2 {}] regionserver.HRegion(2538): Flush status journal for aa9ce9b7340e2349061a390ffa7df1c1: 2024-11-17T21:14:59,974 INFO [pool-70-thread-2 {}] wal.TestFSHLog$4(293): Flush result:FLUSHED_NO_COMPACTION_NEEDED 2024-11-17T21:14:59,974 INFO [pool-70-thread-2 {}] wal.TestFSHLog$4(294): Flush succeeded:true 2024-11-17T21:14:59,975 DEBUG [Time-limited test {}] regionserver.HRegion(1681): Closing aa9ce9b7340e2349061a390ffa7df1c1, disabling compactions & flushes 2024-11-17T21:14:59,975 INFO [Time-limited test {}] regionserver.HRegion(1703): Closing region testUnflushedSeqIdTracking,,1731878093733.aa9ce9b7340e2349061a390ffa7df1c1. 2024-11-17T21:14:59,975 DEBUG [Time-limited test {}] regionserver.HRegion(1724): Waiting without time limit for close lock on testUnflushedSeqIdTracking,,1731878093733.aa9ce9b7340e2349061a390ffa7df1c1. 2024-11-17T21:14:59,975 DEBUG [Time-limited test {}] regionserver.HRegion(1791): Acquired close lock on testUnflushedSeqIdTracking,,1731878093733.aa9ce9b7340e2349061a390ffa7df1c1. after waiting 0 ms 2024-11-17T21:14:59,975 DEBUG [Time-limited test {}] regionserver.HRegion(1801): Updates disabled for region testUnflushedSeqIdTracking,,1731878093733.aa9ce9b7340e2349061a390ffa7df1c1. 2024-11-17T21:14:59,977 INFO [Time-limited test {}] regionserver.HRegion(1922): Closed testUnflushedSeqIdTracking,,1731878093733.aa9ce9b7340e2349061a390ffa7df1c1. 2024-11-17T21:14:59,977 DEBUG [Time-limited test {}] regionserver.HRegion(1635): Region close journal for aa9ce9b7340e2349061a390ffa7df1c1: 2024-11-17T21:14:59,978 DEBUG [WAL-Shutdown-0 {}] wal.FSHLog(499): Closing WAL writer in /user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb/testUnflushedSeqIdTracking 2024-11-17T21:14:59,980 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40579 is added to blk_1073741834_1012 (size=875) 2024-11-17T21:14:59,981 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43941 is added to blk_1073741834_1012 (size=875) 2024-11-17T21:14:59,981 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:41963 is added to blk_1073741834_1012 (size=875) 2024-11-17T21:14:59,985 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1071): Moved 1 WAL file(s) to /user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb/oldWALs 2024-11-17T21:14:59,986 INFO [Time-limited test {}] wal.AbstractFSWAL(1074): Closed WAL: FSHLog wal:(num 1731878093723) 2024-11-17T21:14:59,996 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testUnflushedSeqIdTracking Thread=172 (was 164) Potentially hanging thread: pool-70-thread-1 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.park(LockSupport.java:341) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionNode.block(AbstractQueuedSynchronizer.java:506) java.base@17.0.11/java.util.concurrent.ForkJoinPool.unmanagedBlock(ForkJoinPool.java:3465) java.base@17.0.11/java.util.concurrent.ForkJoinPool.managedBlock(ForkJoinPool.java:3436) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:1625) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.take(LinkedBlockingQueue.java:435) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1062) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: HBase-Metrics2-1 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:252) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:1674) java.base@17.0.11/java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:1182) java.base@17.0.11/java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:899) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1062) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: Async disk worker #0 for volume /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/cluster_2a0274ea-93ca-a98a-2c9d-271c63f62376/dfs/data/data6 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:252) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:1674) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:460) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1061) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_900039209_22 at /127.0.0.1:42512 [Waiting for operation #3] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:263) java.base@17.0.11/java.io.DataInputStream.readUnsignedShort(DataInputStream.java:334) java.base@17.0.11/java.io.DataInputStream.readShort(DataInputStream.java:312) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.readOp(Receiver.java:72) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:273) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_900039209_22 at /127.0.0.1:53408 [Waiting for operation #3] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:263) java.base@17.0.11/java.io.DataInputStream.readUnsignedShort(DataInputStream.java:334) java.base@17.0.11/java.io.DataInputStream.readShort(DataInputStream.java:312) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.readOp(Receiver.java:72) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:273) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: pool-70-thread-2 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.park(LockSupport.java:341) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionNode.block(AbstractQueuedSynchronizer.java:506) java.base@17.0.11/java.util.concurrent.ForkJoinPool.unmanagedBlock(ForkJoinPool.java:3465) java.base@17.0.11/java.util.concurrent.ForkJoinPool.managedBlock(ForkJoinPool.java:3436) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:1625) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.take(LinkedBlockingQueue.java:435) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1062) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: org.apache.hadoop.hdfs.PeerCache@6fc3e7b8 java.base@17.0.11/java.lang.Thread.sleep(Native Method) app//org.apache.hadoop.hdfs.PeerCache.run(PeerCache.java:253) app//org.apache.hadoop.hdfs.PeerCache.access$000(PeerCache.java:46) app//org.apache.hadoop.hdfs.PeerCache$1.run(PeerCache.java:124) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: Async disk worker #0 for volume /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/cluster_2a0274ea-93ca-a98a-2c9d-271c63f62376/dfs/data/data5 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:252) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:1674) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:460) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1061) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) - Thread LEAK? -, OpenFileDescriptor=422 (was 426), MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=116 (was 127), ProcessCount=11 (was 11), AvailableMemoryMB=11759 (was 11793) 2024-11-17T21:15:00,003 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testWALComparator Thread=172, OpenFileDescriptor=422, MaxFileDescriptor=1048576, SystemLoadAverage=116, ProcessCount=11, AvailableMemoryMB=11758 2024-11-17T21:15:00,017 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:41963 is added to blk_1073741837_1015 (size=7) 2024-11-17T21:15:00,018 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43941 is added to blk_1073741837_1015 (size=7) 2024-11-17T21:15:00,019 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40579 is added to blk_1073741837_1015 (size=7) 2024-11-17T21:15:00,020 INFO [Time-limited test {}] util.FSUtils(490): Created version file at hdfs://localhost:37527/user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb with version=8 2024-11-17T21:15:00,020 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-17T21:15:00,022 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-17T21:15:00,028 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-11-17T21:15:00,028 INFO [Time-limited test {}] wal.AbstractFSWAL(500): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:37527/user/jenkins/test-data/a171855e-d8a8-a35f-cc30-e56a19eb48b5/testWALComparator, archiveDir=hdfs://localhost:37527/user/jenkins/test-data/a171855e-d8a8-a35f-cc30-e56a19eb48b5/oldWALs, maxLogs=1760 2024-11-17T21:15:00,029 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1731878100029 2024-11-17T21:15:00,041 INFO [Time-limited test {}] wal.AbstractFSWAL(841): New WAL /user/jenkins/test-data/a171855e-d8a8-a35f-cc30-e56a19eb48b5/testWALComparator/wal.1731878100029 2024-11-17T21:15:00,041 DEBUG [Time-limited test {}] wal.AbstractFSWAL(925): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:38217:38217),(127.0.0.1/127.0.0.1:45905:45905),(127.0.0.1/127.0.0.1:33721:33721)] 2024-11-17T21:15:00,041 DEBUG [Time-limited test {}] wal.AbstractTestFSWAL(214): Log obtained is: FSHLog wal:(num 1731878100029) 2024-11-17T21:15:00,045 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-11-17T21:15:00,045 INFO [Time-limited test {}] wal.AbstractFSWAL(500): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=.meta, logDir=hdfs://localhost:37527/user/jenkins/test-data/a171855e-d8a8-a35f-cc30-e56a19eb48b5/testWALComparator, archiveDir=hdfs://localhost:37527/user/jenkins/test-data/a171855e-d8a8-a35f-cc30-e56a19eb48b5/oldWALs, maxLogs=1760 2024-11-17T21:15:00,047 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1731878100047.meta 2024-11-17T21:15:00,056 INFO [Time-limited test {}] wal.AbstractFSWAL(841): New WAL /user/jenkins/test-data/a171855e-d8a8-a35f-cc30-e56a19eb48b5/testWALComparator/wal.1731878100047.meta 2024-11-17T21:15:00,056 DEBUG [Time-limited test {}] wal.AbstractFSWAL(925): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:33721:33721),(127.0.0.1/127.0.0.1:38217:38217),(127.0.0.1/127.0.0.1:45905:45905)] 2024-11-17T21:15:00,057 DEBUG [WAL-Shutdown-0 {}] wal.FSHLog(499): Closing WAL writer in /user/jenkins/test-data/a171855e-d8a8-a35f-cc30-e56a19eb48b5/testWALComparator 2024-11-17T21:15:00,060 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40579 is added to blk_1073741838_1016 (size=93) 2024-11-17T21:15:00,060 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:41963 is added to blk_1073741838_1016 (size=93) 2024-11-17T21:15:00,060 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43941 is added to blk_1073741838_1016 (size=93) 2024-11-17T21:15:00,064 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1071): Moved 1 WAL file(s) to /user/jenkins/test-data/a171855e-d8a8-a35f-cc30-e56a19eb48b5/oldWALs 2024-11-17T21:15:00,064 INFO [Time-limited test {}] wal.AbstractFSWAL(1074): Closed WAL: FSHLog wal:(num 1731878100029) 2024-11-17T21:15:00,064 DEBUG [WAL-Shutdown-0 {}] wal.FSHLog(499): Closing WAL writer in /user/jenkins/test-data/a171855e-d8a8-a35f-cc30-e56a19eb48b5/testWALComparator 2024-11-17T21:15:00,067 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:41963 is added to blk_1073741839_1017 (size=93) 2024-11-17T21:15:00,068 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43941 is added to blk_1073741839_1017 (size=93) 2024-11-17T21:15:00,068 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40579 is added to blk_1073741839_1017 (size=93) 2024-11-17T21:15:00,071 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1071): Moved 1 WAL file(s) to /user/jenkins/test-data/a171855e-d8a8-a35f-cc30-e56a19eb48b5/oldWALs 2024-11-17T21:15:00,071 INFO [Time-limited test {}] wal.AbstractFSWAL(1074): Closed WAL: FSHLog wal:.meta(num 1731878100047) 2024-11-17T21:15:00,078 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testWALComparator Thread=174 (was 172) Potentially hanging thread: Async disk worker #0 for volume /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/cluster_2a0274ea-93ca-a98a-2c9d-271c63f62376/dfs/data/data4 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:252) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:1674) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:460) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1061) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: Async disk worker #0 for volume /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/cluster_2a0274ea-93ca-a98a-2c9d-271c63f62376/dfs/data/data3 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:252) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:1674) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:460) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1061) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) - Thread LEAK? -, OpenFileDescriptor=426 (was 422) - OpenFileDescriptor LEAK? -, MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=116 (was 116), ProcessCount=11 (was 11), AvailableMemoryMB=11754 (was 11758) 2024-11-17T21:15:00,085 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testFindMemStoresEligibleForFlush Thread=174, OpenFileDescriptor=426, MaxFileDescriptor=1048576, SystemLoadAverage=116, ProcessCount=11, AvailableMemoryMB=11754 2024-11-17T21:15:00,097 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:41963 is added to blk_1073741840_1018 (size=7) 2024-11-17T21:15:00,097 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40579 is added to blk_1073741840_1018 (size=7) 2024-11-17T21:15:00,098 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43941 is added to blk_1073741840_1018 (size=7) 2024-11-17T21:15:00,099 INFO [Time-limited test {}] util.FSUtils(490): Created version file at hdfs://localhost:37527/user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb with version=8 2024-11-17T21:15:00,099 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-17T21:15:00,101 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-17T21:15:00,105 DEBUG [Time-limited test {}] wal.AbstractTestFSWAL(382): testFindMemStoresEligibleForFlush 2024-11-17T21:15:00,130 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-11-17T21:15:00,130 INFO [Time-limited test {}] wal.AbstractFSWAL(500): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:37527/user/jenkins/test-data/8565697d-f1c2-c8cf-52c9-101643776d34/testFindMemStoresEligibleForFlush, archiveDir=hdfs://localhost:37527/user/jenkins/test-data/8565697d-f1c2-c8cf-52c9-101643776d34/oldWALs, maxLogs=1 2024-11-17T21:15:00,132 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1731878100132 2024-11-17T21:15:00,140 INFO [Time-limited test {}] wal.AbstractFSWAL(841): New WAL /user/jenkins/test-data/8565697d-f1c2-c8cf-52c9-101643776d34/testFindMemStoresEligibleForFlush/wal.1731878100132 2024-11-17T21:15:00,140 DEBUG [Time-limited test {}] wal.AbstractFSWAL(925): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:38217:38217),(127.0.0.1/127.0.0.1:45905:45905),(127.0.0.1/127.0.0.1:33721:33721)] 2024-11-17T21:15:00,143 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1731878100143 2024-11-17T21:15:00,152 INFO [Time-limited test {}] wal.AbstractFSWAL(837): Rolled WAL /user/jenkins/test-data/8565697d-f1c2-c8cf-52c9-101643776d34/testFindMemStoresEligibleForFlush/wal.1731878100132 with entries=2, filesize=275 B; new WAL /user/jenkins/test-data/8565697d-f1c2-c8cf-52c9-101643776d34/testFindMemStoresEligibleForFlush/wal.1731878100143 2024-11-17T21:15:00,153 DEBUG [Time-limited test {}] wal.AbstractFSWAL(925): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:45905:45905),(127.0.0.1/127.0.0.1:33721:33721),(127.0.0.1/127.0.0.1:38217:38217)] 2024-11-17T21:15:00,153 DEBUG [Time-limited test {}] wal.AbstractFSWAL(751): hdfs://localhost:37527/user/jenkins/test-data/8565697d-f1c2-c8cf-52c9-101643776d34/testFindMemStoresEligibleForFlush/wal.1731878100132 is not closed yet, will try archiving it next time 2024-11-17T21:15:00,154 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1731878100154 2024-11-17T21:15:00,155 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40579 is added to blk_1073741841_1019 (size=283) 2024-11-17T21:15:00,156 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:41963 is added to blk_1073741841_1019 (size=283) 2024-11-17T21:15:00,156 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43941 is added to blk_1073741841_1019 (size=283) 2024-11-17T21:15:00,164 INFO [Time-limited test {}] wal.AbstractFSWAL(837): Rolled WAL /user/jenkins/test-data/8565697d-f1c2-c8cf-52c9-101643776d34/testFindMemStoresEligibleForFlush/wal.1731878100143 with entries=2, filesize=275 B; new WAL /user/jenkins/test-data/8565697d-f1c2-c8cf-52c9-101643776d34/testFindMemStoresEligibleForFlush/wal.1731878100154 2024-11-17T21:15:00,165 DEBUG [Time-limited test {}] wal.AbstractFSWAL(925): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:33721:33721),(127.0.0.1/127.0.0.1:38217:38217),(127.0.0.1/127.0.0.1:45905:45905)] 2024-11-17T21:15:00,165 DEBUG [Time-limited test {}] wal.AbstractFSWAL(751): hdfs://localhost:37527/user/jenkins/test-data/8565697d-f1c2-c8cf-52c9-101643776d34/testFindMemStoresEligibleForFlush/wal.1731878100143 is not closed yet, will try archiving it next time 2024-11-17T21:15:00,165 INFO [Time-limited test {}] wal.AbstractFSWAL(718): Too many WALs; count=2, max=1; forcing (partial) flush of 1 region(s): f08cdc9fa884064b39ea6326cb69be48[cf1] 2024-11-17T21:15:00,166 INFO [Time-limited test {}] hbase.Waiter(181): Waiting up to [5,000] milli-secs(wait.for.ratio=[1]) 2024-11-17T21:15:00,167 INFO [Time-limited test {}] wal.AbstractFSWAL(718): Too many WALs; count=2, max=1; forcing (partial) flush of 1 region(s): f08cdc9fa884064b39ea6326cb69be48[cf1] 2024-11-17T21:15:00,167 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43941 is added to blk_1073741842_1020 (size=283) 2024-11-17T21:15:00,168 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40579 is added to blk_1073741842_1020 (size=283) 2024-11-17T21:15:00,168 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:41963 is added to blk_1073741842_1020 (size=283) 2024-11-17T21:15:00,169 INFO [Time-limited test {}] wal.AbstractFSWAL(718): Too many WALs; count=2, max=1; forcing (partial) flush of 1 region(s): f08cdc9fa884064b39ea6326cb69be48[cf1] 2024-11-17T21:15:00,169 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1731878100169 2024-11-17T21:15:00,170 INFO [WAL-Archive-0 {}] wal.AbstractFSWAL(818): Archiving hdfs://localhost:37527/user/jenkins/test-data/8565697d-f1c2-c8cf-52c9-101643776d34/testFindMemStoresEligibleForFlush/wal.1731878100132 to hdfs://localhost:37527/user/jenkins/test-data/8565697d-f1c2-c8cf-52c9-101643776d34/oldWALs/wal.1731878100132 2024-11-17T21:15:00,172 INFO [WAL-Archive-0 {}] wal.AbstractFSWAL(818): Archiving hdfs://localhost:37527/user/jenkins/test-data/8565697d-f1c2-c8cf-52c9-101643776d34/testFindMemStoresEligibleForFlush/wal.1731878100143 to hdfs://localhost:37527/user/jenkins/test-data/8565697d-f1c2-c8cf-52c9-101643776d34/oldWALs/wal.1731878100143 2024-11-17T21:15:00,178 INFO [Time-limited test {}] wal.AbstractFSWAL(837): Rolled WAL /user/jenkins/test-data/8565697d-f1c2-c8cf-52c9-101643776d34/testFindMemStoresEligibleForFlush/wal.1731878100154 with entries=2, filesize=275 B; new WAL /user/jenkins/test-data/8565697d-f1c2-c8cf-52c9-101643776d34/testFindMemStoresEligibleForFlush/wal.1731878100169 2024-11-17T21:15:00,178 DEBUG [Time-limited test {}] wal.AbstractFSWAL(925): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:33721:33721),(127.0.0.1/127.0.0.1:38217:38217),(127.0.0.1/127.0.0.1:45905:45905)] 2024-11-17T21:15:00,179 DEBUG [Time-limited test {}] wal.AbstractFSWAL(751): hdfs://localhost:37527/user/jenkins/test-data/8565697d-f1c2-c8cf-52c9-101643776d34/testFindMemStoresEligibleForFlush/wal.1731878100154 is not closed yet, will try archiving it next time 2024-11-17T21:15:00,179 INFO [Time-limited test {}] hbase.Waiter(181): Waiting up to [5,000] milli-secs(wait.for.ratio=[1]) 2024-11-17T21:15:00,179 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1731878100179 2024-11-17T21:15:00,181 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:41963 is added to blk_1073741843_1021 (size=283) 2024-11-17T21:15:00,182 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43941 is added to blk_1073741843_1021 (size=283) 2024-11-17T21:15:00,183 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40579 is added to blk_1073741843_1021 (size=283) 2024-11-17T21:15:00,184 INFO [WAL-Archive-0 {}] wal.AbstractFSWAL(818): Archiving hdfs://localhost:37527/user/jenkins/test-data/8565697d-f1c2-c8cf-52c9-101643776d34/testFindMemStoresEligibleForFlush/wal.1731878100154 to hdfs://localhost:37527/user/jenkins/test-data/8565697d-f1c2-c8cf-52c9-101643776d34/oldWALs/wal.1731878100154 2024-11-17T21:15:00,189 INFO [Time-limited test {}] wal.AbstractFSWAL(837): Rolled WAL /user/jenkins/test-data/8565697d-f1c2-c8cf-52c9-101643776d34/testFindMemStoresEligibleForFlush/wal.1731878100169 with entries=0, filesize=85 B; new WAL /user/jenkins/test-data/8565697d-f1c2-c8cf-52c9-101643776d34/testFindMemStoresEligibleForFlush/wal.1731878100179 2024-11-17T21:15:00,190 DEBUG [Time-limited test {}] wal.AbstractFSWAL(925): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:38217:38217),(127.0.0.1/127.0.0.1:33721:33721),(127.0.0.1/127.0.0.1:45905:45905)] 2024-11-17T21:15:00,190 DEBUG [Time-limited test {}] wal.AbstractFSWAL(751): hdfs://localhost:37527/user/jenkins/test-data/8565697d-f1c2-c8cf-52c9-101643776d34/testFindMemStoresEligibleForFlush/wal.1731878100169 is not closed yet, will try archiving it next time 2024-11-17T21:15:00,190 INFO [Time-limited test {}] hbase.Waiter(181): Waiting up to [5,000] milli-secs(wait.for.ratio=[1]) 2024-11-17T21:15:00,192 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:41963 is added to blk_1073741844_1022 (size=93) 2024-11-17T21:15:00,193 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43941 is added to blk_1073741844_1022 (size=93) 2024-11-17T21:15:00,193 INFO [WAL-Archive-0 {}] wal.AbstractFSWAL(818): Archiving hdfs://localhost:37527/user/jenkins/test-data/8565697d-f1c2-c8cf-52c9-101643776d34/testFindMemStoresEligibleForFlush/wal.1731878100169 to hdfs://localhost:37527/user/jenkins/test-data/8565697d-f1c2-c8cf-52c9-101643776d34/oldWALs/wal.1731878100169 2024-11-17T21:15:00,196 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40579 is added to blk_1073741844_1022 (size=93) 2024-11-17T21:15:00,294 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1731878100293 2024-11-17T21:15:00,304 INFO [Time-limited test {}] wal.AbstractFSWAL(837): Rolled WAL /user/jenkins/test-data/8565697d-f1c2-c8cf-52c9-101643776d34/testFindMemStoresEligibleForFlush/wal.1731878100179 with entries=4, filesize=465 B; new WAL /user/jenkins/test-data/8565697d-f1c2-c8cf-52c9-101643776d34/testFindMemStoresEligibleForFlush/wal.1731878100293 2024-11-17T21:15:00,304 DEBUG [Time-limited test {}] wal.AbstractFSWAL(925): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:45905:45905),(127.0.0.1/127.0.0.1:38217:38217),(127.0.0.1/127.0.0.1:33721:33721)] 2024-11-17T21:15:00,304 DEBUG [Time-limited test {}] wal.AbstractFSWAL(751): hdfs://localhost:37527/user/jenkins/test-data/8565697d-f1c2-c8cf-52c9-101643776d34/testFindMemStoresEligibleForFlush/wal.1731878100179 is not closed yet, will try archiving it next time 2024-11-17T21:15:00,305 INFO [Time-limited test {}] hbase.Waiter(181): Waiting up to [5,000] milli-secs(wait.for.ratio=[1]) 2024-11-17T21:15:00,307 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1731878100306 2024-11-17T21:15:00,307 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40579 is added to blk_1073741845_1023 (size=473) 2024-11-17T21:15:00,308 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:41963 is added to blk_1073741845_1023 (size=473) 2024-11-17T21:15:00,308 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43941 is added to blk_1073741845_1023 (size=473) 2024-11-17T21:15:00,322 INFO [Time-limited test {}] wal.AbstractFSWAL(837): Rolled WAL /user/jenkins/test-data/8565697d-f1c2-c8cf-52c9-101643776d34/testFindMemStoresEligibleForFlush/wal.1731878100293 with entries=2, filesize=275 B; new WAL /user/jenkins/test-data/8565697d-f1c2-c8cf-52c9-101643776d34/testFindMemStoresEligibleForFlush/wal.1731878100306 2024-11-17T21:15:00,322 DEBUG [Time-limited test {}] wal.AbstractFSWAL(925): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:45905:45905),(127.0.0.1/127.0.0.1:38217:38217),(127.0.0.1/127.0.0.1:33721:33721)] 2024-11-17T21:15:00,322 DEBUG [Time-limited test {}] wal.AbstractFSWAL(751): hdfs://localhost:37527/user/jenkins/test-data/8565697d-f1c2-c8cf-52c9-101643776d34/testFindMemStoresEligibleForFlush/wal.1731878100293 is not closed yet, will try archiving it next time 2024-11-17T21:15:00,322 INFO [Time-limited test {}] wal.AbstractFSWAL(718): Too many WALs; count=2, max=1; forcing (partial) flush of 2 region(s): 182ed4a515b528729bd85f7194fe0ad8[cf1],f08cdc9fa884064b39ea6326cb69be48[cf1] 2024-11-17T21:15:00,322 INFO [Time-limited test {}] wal.AbstractFSWAL(718): Too many WALs; count=2, max=1; forcing (partial) flush of 2 region(s): 182ed4a515b528729bd85f7194fe0ad8[cf1],f08cdc9fa884064b39ea6326cb69be48[cf1] 2024-11-17T21:15:00,323 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1731878100322 2024-11-17T21:15:00,324 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40579 is added to blk_1073741846_1024 (size=283) 2024-11-17T21:15:00,325 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43941 is added to blk_1073741846_1024 (size=283) 2024-11-17T21:15:00,326 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:41963 is added to blk_1073741846_1024 (size=283) 2024-11-17T21:15:00,326 INFO [WAL-Archive-0 {}] wal.AbstractFSWAL(818): Archiving hdfs://localhost:37527/user/jenkins/test-data/8565697d-f1c2-c8cf-52c9-101643776d34/testFindMemStoresEligibleForFlush/wal.1731878100179 to hdfs://localhost:37527/user/jenkins/test-data/8565697d-f1c2-c8cf-52c9-101643776d34/oldWALs/wal.1731878100179 2024-11-17T21:15:00,328 INFO [WAL-Archive-0 {}] wal.AbstractFSWAL(818): Archiving hdfs://localhost:37527/user/jenkins/test-data/8565697d-f1c2-c8cf-52c9-101643776d34/testFindMemStoresEligibleForFlush/wal.1731878100293 to hdfs://localhost:37527/user/jenkins/test-data/8565697d-f1c2-c8cf-52c9-101643776d34/oldWALs/wal.1731878100293 2024-11-17T21:15:00,338 INFO [Time-limited test {}] wal.AbstractFSWAL(837): Rolled WAL /user/jenkins/test-data/8565697d-f1c2-c8cf-52c9-101643776d34/testFindMemStoresEligibleForFlush/wal.1731878100306 with entries=0, filesize=85 B; new WAL /user/jenkins/test-data/8565697d-f1c2-c8cf-52c9-101643776d34/testFindMemStoresEligibleForFlush/wal.1731878100322 2024-11-17T21:15:00,339 DEBUG [Time-limited test {}] wal.AbstractFSWAL(925): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:38217:38217),(127.0.0.1/127.0.0.1:33721:33721),(127.0.0.1/127.0.0.1:45905:45905)] 2024-11-17T21:15:00,339 DEBUG [Time-limited test {}] wal.AbstractFSWAL(751): hdfs://localhost:37527/user/jenkins/test-data/8565697d-f1c2-c8cf-52c9-101643776d34/testFindMemStoresEligibleForFlush/wal.1731878100306 is not closed yet, will try archiving it next time 2024-11-17T21:15:00,339 INFO [Time-limited test {}] hbase.Waiter(181): Waiting up to [5,000] milli-secs(wait.for.ratio=[1]) 2024-11-17T21:15:00,343 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40579 is added to blk_1073741847_1025 (size=93) 2024-11-17T21:15:00,344 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43941 is added to blk_1073741847_1025 (size=93) 2024-11-17T21:15:00,344 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:41963 is added to blk_1073741847_1025 (size=93) 2024-11-17T21:15:00,744 INFO [WAL-Archive-0 {}] wal.AbstractFSWAL(818): Archiving hdfs://localhost:37527/user/jenkins/test-data/8565697d-f1c2-c8cf-52c9-101643776d34/testFindMemStoresEligibleForFlush/wal.1731878100306 to hdfs://localhost:37527/user/jenkins/test-data/8565697d-f1c2-c8cf-52c9-101643776d34/oldWALs/wal.1731878100306 2024-11-17T21:15:00,842 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1731878100841 2024-11-17T21:15:00,855 INFO [Time-limited test {}] wal.AbstractFSWAL(837): Rolled WAL /user/jenkins/test-data/8565697d-f1c2-c8cf-52c9-101643776d34/testFindMemStoresEligibleForFlush/wal.1731878100322 with entries=2, filesize=275 B; new WAL /user/jenkins/test-data/8565697d-f1c2-c8cf-52c9-101643776d34/testFindMemStoresEligibleForFlush/wal.1731878100841 2024-11-17T21:15:00,855 DEBUG [Time-limited test {}] wal.AbstractFSWAL(925): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:33721:33721),(127.0.0.1/127.0.0.1:45905:45905),(127.0.0.1/127.0.0.1:38217:38217)] 2024-11-17T21:15:00,855 DEBUG [Time-limited test {}] wal.AbstractFSWAL(751): hdfs://localhost:37527/user/jenkins/test-data/8565697d-f1c2-c8cf-52c9-101643776d34/testFindMemStoresEligibleForFlush/wal.1731878100322 is not closed yet, will try archiving it next time 2024-11-17T21:15:00,856 INFO [Time-limited test {}] hbase.Waiter(181): Waiting up to [5,000] milli-secs(wait.for.ratio=[1]) 2024-11-17T21:15:00,856 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1731878100856 2024-11-17T21:15:00,858 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:41963 is added to blk_1073741848_1026 (size=283) 2024-11-17T21:15:00,858 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40579 is added to blk_1073741848_1026 (size=283) 2024-11-17T21:15:00,859 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43941 is added to blk_1073741848_1026 (size=283) 2024-11-17T21:15:00,860 INFO [WAL-Archive-0 {}] wal.AbstractFSWAL(818): Archiving hdfs://localhost:37527/user/jenkins/test-data/8565697d-f1c2-c8cf-52c9-101643776d34/testFindMemStoresEligibleForFlush/wal.1731878100322 to hdfs://localhost:37527/user/jenkins/test-data/8565697d-f1c2-c8cf-52c9-101643776d34/oldWALs/wal.1731878100322 2024-11-17T21:15:00,869 INFO [Time-limited test {}] wal.AbstractFSWAL(837): Rolled WAL /user/jenkins/test-data/8565697d-f1c2-c8cf-52c9-101643776d34/testFindMemStoresEligibleForFlush/wal.1731878100841 with entries=0, filesize=85 B; new WAL /user/jenkins/test-data/8565697d-f1c2-c8cf-52c9-101643776d34/testFindMemStoresEligibleForFlush/wal.1731878100856 2024-11-17T21:15:00,870 DEBUG [Time-limited test {}] wal.AbstractFSWAL(925): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:33721:33721),(127.0.0.1/127.0.0.1:38217:38217),(127.0.0.1/127.0.0.1:45905:45905)] 2024-11-17T21:15:00,870 DEBUG [Time-limited test {}] wal.AbstractFSWAL(751): hdfs://localhost:37527/user/jenkins/test-data/8565697d-f1c2-c8cf-52c9-101643776d34/testFindMemStoresEligibleForFlush/wal.1731878100841 is not closed yet, will try archiving it next time 2024-11-17T21:15:00,872 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43941 is added to blk_1073741849_1027 (size=93) 2024-11-17T21:15:00,873 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:41963 is added to blk_1073741849_1027 (size=93) 2024-11-17T21:15:00,873 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40579 is added to blk_1073741849_1027 (size=93) 2024-11-17T21:15:00,874 INFO [WAL-Archive-0 {}] wal.AbstractFSWAL(818): Archiving hdfs://localhost:37527/user/jenkins/test-data/8565697d-f1c2-c8cf-52c9-101643776d34/testFindMemStoresEligibleForFlush/wal.1731878100841 to hdfs://localhost:37527/user/jenkins/test-data/8565697d-f1c2-c8cf-52c9-101643776d34/oldWALs/wal.1731878100841 2024-11-17T21:15:00,874 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1731878100874 2024-11-17T21:15:00,886 INFO [Time-limited test {}] wal.AbstractFSWAL(837): Rolled WAL /user/jenkins/test-data/8565697d-f1c2-c8cf-52c9-101643776d34/testFindMemStoresEligibleForFlush/wal.1731878100856 with entries=6, filesize=709 B; new WAL /user/jenkins/test-data/8565697d-f1c2-c8cf-52c9-101643776d34/testFindMemStoresEligibleForFlush/wal.1731878100874 2024-11-17T21:15:00,887 DEBUG [Time-limited test {}] wal.AbstractFSWAL(925): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:45905:45905),(127.0.0.1/127.0.0.1:33721:33721),(127.0.0.1/127.0.0.1:38217:38217)] 2024-11-17T21:15:00,887 DEBUG [Time-limited test {}] wal.AbstractFSWAL(751): hdfs://localhost:37527/user/jenkins/test-data/8565697d-f1c2-c8cf-52c9-101643776d34/testFindMemStoresEligibleForFlush/wal.1731878100856 is not closed yet, will try archiving it next time 2024-11-17T21:15:00,888 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1731878100888 2024-11-17T21:15:00,889 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:41963 is added to blk_1073741850_1028 (size=717) 2024-11-17T21:15:00,890 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43941 is added to blk_1073741850_1028 (size=717) 2024-11-17T21:15:00,890 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40579 is added to blk_1073741850_1028 (size=717) 2024-11-17T21:15:00,902 INFO [Time-limited test {}] wal.AbstractFSWAL(837): Rolled WAL /user/jenkins/test-data/8565697d-f1c2-c8cf-52c9-101643776d34/testFindMemStoresEligibleForFlush/wal.1731878100874 with entries=2, filesize=293 B; new WAL /user/jenkins/test-data/8565697d-f1c2-c8cf-52c9-101643776d34/testFindMemStoresEligibleForFlush/wal.1731878100888 2024-11-17T21:15:00,902 DEBUG [Time-limited test {}] wal.AbstractFSWAL(925): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:33721:33721),(127.0.0.1/127.0.0.1:38217:38217),(127.0.0.1/127.0.0.1:45905:45905)] 2024-11-17T21:15:00,902 DEBUG [Time-limited test {}] wal.AbstractFSWAL(751): hdfs://localhost:37527/user/jenkins/test-data/8565697d-f1c2-c8cf-52c9-101643776d34/testFindMemStoresEligibleForFlush/wal.1731878100874 is not closed yet, will try archiving it next time 2024-11-17T21:15:00,902 INFO [Time-limited test {}] wal.AbstractFSWAL(718): Too many WALs; count=2, max=1; forcing (partial) flush of 1 region(s): de3673b51a0f8286a5fb77891bf6bd23[cf1,cf3,cf2] 2024-11-17T21:15:00,902 INFO [Time-limited test {}] hbase.Waiter(181): Waiting up to [5,000] milli-secs(wait.for.ratio=[1]) 2024-11-17T21:15:00,903 INFO [Time-limited test {}] wal.AbstractFSWAL(718): Too many WALs; count=2, max=1; forcing (partial) flush of 1 region(s): de3673b51a0f8286a5fb77891bf6bd23[cf3,cf2] 2024-11-17T21:15:00,903 DEBUG [WAL-Shutdown-0 {}] wal.FSHLog(499): Closing WAL writer in /user/jenkins/test-data/8565697d-f1c2-c8cf-52c9-101643776d34/testFindMemStoresEligibleForFlush 2024-11-17T21:15:00,904 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43941 is added to blk_1073741851_1029 (size=301) 2024-11-17T21:15:00,905 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40579 is added to blk_1073741851_1029 (size=301) 2024-11-17T21:15:00,905 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:41963 is added to blk_1073741851_1029 (size=301) 2024-11-17T21:15:00,906 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:41963 is added to blk_1073741852_1030 (size=93) 2024-11-17T21:15:00,906 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43941 is added to blk_1073741852_1030 (size=93) 2024-11-17T21:15:00,907 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40579 is added to blk_1073741852_1030 (size=93) 2024-11-17T21:15:00,913 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1071): Moved 3 WAL file(s) to /user/jenkins/test-data/8565697d-f1c2-c8cf-52c9-101643776d34/oldWALs 2024-11-17T21:15:00,913 INFO [Time-limited test {}] wal.AbstractFSWAL(1074): Closed WAL: FSHLog wal:(num 1731878100888) 2024-11-17T21:15:00,921 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testFindMemStoresEligibleForFlush Thread=170 (was 174), OpenFileDescriptor=422 (was 426), MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=115 (was 116), ProcessCount=11 (was 11), AvailableMemoryMB=11743 (was 11754) 2024-11-17T21:15:00,928 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testMaxFlushedSequenceIdGoBackwards Thread=170, OpenFileDescriptor=422, MaxFileDescriptor=1048576, SystemLoadAverage=115, ProcessCount=11, AvailableMemoryMB=11741 2024-11-17T21:15:00,940 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:41963 is added to blk_1073741853_1031 (size=7) 2024-11-17T21:15:00,940 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40579 is added to blk_1073741853_1031 (size=7) 2024-11-17T21:15:00,941 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43941 is added to blk_1073741853_1031 (size=7) 2024-11-17T21:15:00,942 INFO [Time-limited test {}] util.FSUtils(490): Created version file at hdfs://localhost:37527/user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb with version=8 2024-11-17T21:15:00,943 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-17T21:15:00,945 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-17T21:15:00,953 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-11-17T21:15:00,953 INFO [Time-limited test {}] wal.AbstractFSWAL(500): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:37527/user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb/testMaxFlushedSequenceIdGoBackwards, archiveDir=hdfs://localhost:37527/user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb/oldWALs, maxLogs=1760 2024-11-17T21:15:00,955 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1731878100954 2024-11-17T21:15:00,963 INFO [Time-limited test {}] wal.AbstractFSWAL(841): New WAL /user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb/testMaxFlushedSequenceIdGoBackwards/wal.1731878100954 2024-11-17T21:15:00,963 DEBUG [Time-limited test {}] wal.AbstractFSWAL(925): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:33721:33721),(127.0.0.1/127.0.0.1:45905:45905),(127.0.0.1/127.0.0.1:38217:38217)] 2024-11-17T21:15:00,964 INFO [Time-limited test {}] regionserver.HRegion(7106): creating {ENCODED => c5d30742c7d0854bb9b28e39d32159aa, NAME => 'table,,1731878100964.c5d30742c7d0854bb9b28e39d32159aa.', STARTKEY => '', ENDKEY => ''}, tableDescriptor='table', {NAME => 'a', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'}, {NAME => 'b', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'}, regionDir=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400 2024-11-17T21:15:00,978 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:41963 is added to blk_1073741855_1033 (size=40) 2024-11-17T21:15:00,979 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40579 is added to blk_1073741855_1033 (size=40) 2024-11-17T21:15:00,979 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43941 is added to blk_1073741855_1033 (size=40) 2024-11-17T21:15:00,980 DEBUG [Time-limited test {}] regionserver.HRegion(894): Instantiated table,,1731878100964.c5d30742c7d0854bb9b28e39d32159aa.; StoreHotnessProtector, parallelPutToStoreThreadLimit=0 ; minColumnNum=100 ; preparePutThreadLimit=0 ; hotProtect now disable 2024-11-17T21:15:00,983 INFO [StoreOpener-c5d30742c7d0854bb9b28e39d32159aa-1 {}] regionserver.HStore(400): Created cacheConfig: cacheDataOnRead=true, cacheDataOnWrite=false, cacheIndexesOnWrite=false, cacheBloomsOnWrite=false, cacheEvictOnClose=false, cacheDataCompressed=false, prefetchOnOpen=false, for column family a of region c5d30742c7d0854bb9b28e39d32159aa 2024-11-17T21:15:00,985 INFO [StoreOpener-c5d30742c7d0854bb9b28e39d32159aa-1 {}] compactions.CompactionConfiguration(181): size [minCompactSize:128 MB, maxCompactSize:8.00 EB, offPeakMaxCompactSize:8.00 EB); files [minFilesToCompact:3, maxFilesToCompact:10); ratio 1.200000; off-peak ratio 5.000000; throttle point 2684354560; major period 604800000, major jitter 0.500000, min locality to compact 0.000000; tiered compaction: max_age 9223372036854775807, incoming window min 6, compaction policy for tiered window org.apache.hadoop.hbase.regionserver.compactions.ExploringCompactionPolicy, single output for minor true, compaction window factory org.apache.hadoop.hbase.regionserver.compactions.ExponentialCompactionWindowFactory, region c5d30742c7d0854bb9b28e39d32159aa columnFamilyName a 2024-11-17T21:15:00,986 DEBUG [StoreOpener-c5d30742c7d0854bb9b28e39d32159aa-1 {}] storefiletracker.StoreFileTrackerFactory(122): instantiating StoreFileTracker impl org.apache.hadoop.hbase.regionserver.storefiletracker.DefaultStoreFileTracker 2024-11-17T21:15:00,987 INFO [StoreOpener-c5d30742c7d0854bb9b28e39d32159aa-1 {}] regionserver.HStore(327): Store=c5d30742c7d0854bb9b28e39d32159aa/a, memstore type=DefaultMemStore, storagePolicy=NONE, verifyBulkLoads=false, parallelPutCountPrintThreshold=50, encoding=NONE, compression=NONE 2024-11-17T21:15:00,987 INFO [StoreOpener-c5d30742c7d0854bb9b28e39d32159aa-1 {}] regionserver.HStore(400): Created cacheConfig: cacheDataOnRead=true, cacheDataOnWrite=false, cacheIndexesOnWrite=false, cacheBloomsOnWrite=false, cacheEvictOnClose=false, cacheDataCompressed=false, prefetchOnOpen=false, for column family b of region c5d30742c7d0854bb9b28e39d32159aa 2024-11-17T21:15:00,989 INFO [StoreOpener-c5d30742c7d0854bb9b28e39d32159aa-1 {}] compactions.CompactionConfiguration(181): size [minCompactSize:128 MB, maxCompactSize:8.00 EB, offPeakMaxCompactSize:8.00 EB); files [minFilesToCompact:3, maxFilesToCompact:10); ratio 1.200000; off-peak ratio 5.000000; throttle point 2684354560; major period 604800000, major jitter 0.500000, min locality to compact 0.000000; tiered compaction: max_age 9223372036854775807, incoming window min 6, compaction policy for tiered window org.apache.hadoop.hbase.regionserver.compactions.ExploringCompactionPolicy, single output for minor true, compaction window factory org.apache.hadoop.hbase.regionserver.compactions.ExponentialCompactionWindowFactory, region c5d30742c7d0854bb9b28e39d32159aa columnFamilyName b 2024-11-17T21:15:00,989 DEBUG [StoreOpener-c5d30742c7d0854bb9b28e39d32159aa-1 {}] storefiletracker.StoreFileTrackerFactory(122): instantiating StoreFileTracker impl org.apache.hadoop.hbase.regionserver.storefiletracker.DefaultStoreFileTracker 2024-11-17T21:15:00,990 INFO [StoreOpener-c5d30742c7d0854bb9b28e39d32159aa-1 {}] regionserver.HStore(327): Store=c5d30742c7d0854bb9b28e39d32159aa/b, memstore type=DefaultMemStore, storagePolicy=NONE, verifyBulkLoads=false, parallelPutCountPrintThreshold=50, encoding=NONE, compression=NONE 2024-11-17T21:15:00,991 DEBUG [Time-limited test {}] regionserver.HRegion(5301): Found 0 recovered edits file(s) under hdfs://localhost:37527/user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb/data/default/table/c5d30742c7d0854bb9b28e39d32159aa 2024-11-17T21:15:00,992 DEBUG [Time-limited test {}] regionserver.HRegion(5301): Found 0 recovered edits file(s) under hdfs://localhost:37527/user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb/data/default/table/c5d30742c7d0854bb9b28e39d32159aa 2024-11-17T21:15:00,992 DEBUG [Time-limited test {}] regionserver.HRegion(5301): Found 0 recovered edits file(s) under hdfs://localhost:37527/user/jenkins/test-data/f42f8b1b-1b37-c14e-e298-9aafd748e226/data/default/table/c5d30742c7d0854bb9b28e39d32159aa 2024-11-17T21:15:00,995 DEBUG [Time-limited test {}] regionserver.FlushLargeStoresPolicy(65): No hbase.hregion.percolumnfamilyflush.size.lower.bound set in table table descriptor;using region.getMemStoreFlushHeapSize/# of families (64.0 M)) instead. 2024-11-17T21:15:00,997 DEBUG [Time-limited test {}] regionserver.HRegion(1085): writing seq id for c5d30742c7d0854bb9b28e39d32159aa 2024-11-17T21:15:01,000 DEBUG [Time-limited test {}] wal.WALSplitUtil(409): Wrote file=hdfs://localhost:37527/user/jenkins/test-data/f42f8b1b-1b37-c14e-e298-9aafd748e226/data/default/table/c5d30742c7d0854bb9b28e39d32159aa/recovered.edits/1.seqid, newMaxSeqId=1, maxSeqId=-1 2024-11-17T21:15:01,001 INFO [Time-limited test {}] regionserver.HRegion(1102): Opened c5d30742c7d0854bb9b28e39d32159aa; next sequenceid=2; SteppingSplitPolicysuper{IncreasingToUpperBoundRegionSplitPolicy{initialSize=268435456, ConstantSizeRegionSplitPolicy{desiredMaxFileSize=71154155, jitterRate=0.06027953326702118}}}, FlushLargeStoresPolicy{flushSizeLowerBound=67108864} 2024-11-17T21:15:01,004 DEBUG [Time-limited test {}] regionserver.HRegion(1001): Region open journal for c5d30742c7d0854bb9b28e39d32159aa: 2024-11-17T21:15:01,004 DEBUG [Time-limited test {}] regionserver.HRegion(1681): Closing c5d30742c7d0854bb9b28e39d32159aa, disabling compactions & flushes 2024-11-17T21:15:01,004 INFO [Time-limited test {}] regionserver.HRegion(1703): Closing region table,,1731878100964.c5d30742c7d0854bb9b28e39d32159aa. 2024-11-17T21:15:01,004 DEBUG [Time-limited test {}] regionserver.HRegion(1724): Waiting without time limit for close lock on table,,1731878100964.c5d30742c7d0854bb9b28e39d32159aa. 2024-11-17T21:15:01,004 DEBUG [Time-limited test {}] regionserver.HRegion(1791): Acquired close lock on table,,1731878100964.c5d30742c7d0854bb9b28e39d32159aa. after waiting 0 ms 2024-11-17T21:15:01,004 DEBUG [Time-limited test {}] regionserver.HRegion(1801): Updates disabled for region table,,1731878100964.c5d30742c7d0854bb9b28e39d32159aa. 2024-11-17T21:15:01,005 INFO [Time-limited test {}] regionserver.HRegion(1922): Closed table,,1731878100964.c5d30742c7d0854bb9b28e39d32159aa. 2024-11-17T21:15:01,005 DEBUG [Time-limited test {}] regionserver.HRegion(1635): Region close journal for c5d30742c7d0854bb9b28e39d32159aa: 2024-11-17T21:15:01,416 DEBUG [Time-limited test {}] regionserver.HRegion(7285): Opening region: {ENCODED => c5d30742c7d0854bb9b28e39d32159aa, NAME => 'table,,1731878100964.c5d30742c7d0854bb9b28e39d32159aa.', STARTKEY => '', ENDKEY => ''} 2024-11-17T21:15:01,434 DEBUG [Time-limited test {}] regionserver.MetricsRegionSourceImpl(79): Creating new MetricsRegionSourceImpl for table table c5d30742c7d0854bb9b28e39d32159aa 2024-11-17T21:15:01,434 DEBUG [Time-limited test {}] regionserver.HRegion(894): Instantiated table,,1731878100964.c5d30742c7d0854bb9b28e39d32159aa.; StoreHotnessProtector, parallelPutToStoreThreadLimit=0 ; minColumnNum=100 ; preparePutThreadLimit=0 ; hotProtect now disable 2024-11-17T21:15:01,436 DEBUG [Time-limited test {}] regionserver.HRegion(7327): checking encryption for c5d30742c7d0854bb9b28e39d32159aa 2024-11-17T21:15:01,437 DEBUG [Time-limited test {}] regionserver.HRegion(7330): checking classloading for c5d30742c7d0854bb9b28e39d32159aa 2024-11-17T21:15:01,440 INFO [StoreOpener-c5d30742c7d0854bb9b28e39d32159aa-1 {}] regionserver.HStore(400): Created cacheConfig: cacheDataOnRead=true, cacheDataOnWrite=false, cacheIndexesOnWrite=false, cacheBloomsOnWrite=false, cacheEvictOnClose=false, cacheDataCompressed=false, prefetchOnOpen=false, for column family a of region c5d30742c7d0854bb9b28e39d32159aa 2024-11-17T21:15:01,442 INFO [StoreOpener-c5d30742c7d0854bb9b28e39d32159aa-1 {}] compactions.CompactionConfiguration(181): size [minCompactSize:128 MB, maxCompactSize:8.00 EB, offPeakMaxCompactSize:8.00 EB); files [minFilesToCompact:3, maxFilesToCompact:10); ratio 1.200000; off-peak ratio 5.000000; throttle point 2684354560; major period 604800000, major jitter 0.500000, min locality to compact 0.000000; tiered compaction: max_age 9223372036854775807, incoming window min 6, compaction policy for tiered window org.apache.hadoop.hbase.regionserver.compactions.ExploringCompactionPolicy, single output for minor true, compaction window factory org.apache.hadoop.hbase.regionserver.compactions.ExponentialCompactionWindowFactory, region c5d30742c7d0854bb9b28e39d32159aa columnFamilyName a 2024-11-17T21:15:01,442 DEBUG [StoreOpener-c5d30742c7d0854bb9b28e39d32159aa-1 {}] storefiletracker.StoreFileTrackerFactory(122): instantiating StoreFileTracker impl org.apache.hadoop.hbase.regionserver.storefiletracker.DefaultStoreFileTracker 2024-11-17T21:15:01,443 INFO [StoreOpener-c5d30742c7d0854bb9b28e39d32159aa-1 {}] regionserver.HStore(327): Store=c5d30742c7d0854bb9b28e39d32159aa/a, memstore type=DefaultMemStore, storagePolicy=NONE, verifyBulkLoads=false, parallelPutCountPrintThreshold=50, encoding=NONE, compression=NONE 2024-11-17T21:15:01,443 INFO [StoreOpener-c5d30742c7d0854bb9b28e39d32159aa-1 {}] regionserver.HStore(400): Created cacheConfig: cacheDataOnRead=true, cacheDataOnWrite=false, cacheIndexesOnWrite=false, cacheBloomsOnWrite=false, cacheEvictOnClose=false, cacheDataCompressed=false, prefetchOnOpen=false, for column family b of region c5d30742c7d0854bb9b28e39d32159aa 2024-11-17T21:15:01,444 INFO [StoreOpener-c5d30742c7d0854bb9b28e39d32159aa-1 {}] compactions.CompactionConfiguration(181): size [minCompactSize:128 MB, maxCompactSize:8.00 EB, offPeakMaxCompactSize:8.00 EB); files [minFilesToCompact:3, maxFilesToCompact:10); ratio 1.200000; off-peak ratio 5.000000; throttle point 2684354560; major period 604800000, major jitter 0.500000, min locality to compact 0.000000; tiered compaction: max_age 9223372036854775807, incoming window min 6, compaction policy for tiered window org.apache.hadoop.hbase.regionserver.compactions.ExploringCompactionPolicy, single output for minor true, compaction window factory org.apache.hadoop.hbase.regionserver.compactions.ExponentialCompactionWindowFactory, region c5d30742c7d0854bb9b28e39d32159aa columnFamilyName b 2024-11-17T21:15:01,445 DEBUG [StoreOpener-c5d30742c7d0854bb9b28e39d32159aa-1 {}] storefiletracker.StoreFileTrackerFactory(122): instantiating StoreFileTracker impl org.apache.hadoop.hbase.regionserver.storefiletracker.DefaultStoreFileTracker 2024-11-17T21:15:01,445 INFO [StoreOpener-c5d30742c7d0854bb9b28e39d32159aa-1 {}] regionserver.HStore(327): Store=c5d30742c7d0854bb9b28e39d32159aa/b, memstore type=DefaultMemStore, storagePolicy=NONE, verifyBulkLoads=false, parallelPutCountPrintThreshold=50, encoding=NONE, compression=NONE 2024-11-17T21:15:01,447 DEBUG [Time-limited test {}] regionserver.HRegion(5301): Found 0 recovered edits file(s) under hdfs://localhost:37527/user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb/data/default/table/c5d30742c7d0854bb9b28e39d32159aa 2024-11-17T21:15:01,447 DEBUG [Time-limited test {}] regionserver.HRegion(5301): Found 0 recovered edits file(s) under hdfs://localhost:37527/user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb/data/default/table/c5d30742c7d0854bb9b28e39d32159aa 2024-11-17T21:15:01,449 DEBUG [Time-limited test {}] regionserver.HRegion(5301): Found 0 recovered edits file(s) under hdfs://localhost:37527/user/jenkins/test-data/f42f8b1b-1b37-c14e-e298-9aafd748e226/data/default/table/c5d30742c7d0854bb9b28e39d32159aa 2024-11-17T21:15:01,453 DEBUG [Time-limited test {}] regionserver.HRegion(1085): writing seq id for c5d30742c7d0854bb9b28e39d32159aa 2024-11-17T21:15:01,455 INFO [Time-limited test {}] regionserver.HRegion(1102): Opened c5d30742c7d0854bb9b28e39d32159aa; next sequenceid=2; SteppingSplitPolicysuper{IncreasingToUpperBoundRegionSplitPolicy{initialSize=268435456, ConstantSizeRegionSplitPolicy{desiredMaxFileSize=72217763, jitterRate=0.0761285275220871}}}, org.apache.hadoop.hbase.regionserver.wal.AbstractTestFSWAL$FlushSpecificStoresPolicy@30b11eca 2024-11-17T21:15:01,457 DEBUG [Time-limited test {}] regionserver.HRegion(1001): Region open journal for c5d30742c7d0854bb9b28e39d32159aa: 2024-11-17T21:15:04,476 INFO [pool-84-thread-1 {}] regionserver.HRegion(2837): Flushing c5d30742c7d0854bb9b28e39d32159aa 2/2 column families, dataSize=96 B heapSize=896 B 2024-11-17T21:15:06,944 WARN [HBase-Metrics2-1 {}] impl.MetricsConfig(138): Cannot locate configuration: tried hadoop-metrics2-hbase.properties,hadoop-metrics2.properties 2024-11-17T21:15:07,496 DEBUG [pool-84-thread-1 {}] hfile.HFileWriterImpl(814): Len of the biggest cell in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/data/default/table/c5d30742c7d0854bb9b28e39d32159aa/.tmp/a/12dcb9f059cc4fcf8f9f16f369f7c184 is 28, key is a/a:a/1731878101465/Put/seqid=0 2024-11-17T21:15:07,507 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:41963 is added to blk_1073741856_1034 (size=4945) 2024-11-17T21:15:07,507 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40579 is added to blk_1073741856_1034 (size=4945) 2024-11-17T21:15:07,507 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43941 is added to blk_1073741856_1034 (size=4945) 2024-11-17T21:15:07,508 INFO [pool-84-thread-1 {}] regionserver.DefaultStoreFlusher(81): Flushed memstore data size=48 B at sequenceid=6 (bloomFilter=true), to=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/data/default/table/c5d30742c7d0854bb9b28e39d32159aa/.tmp/a/12dcb9f059cc4fcf8f9f16f369f7c184 2024-11-17T21:15:07,536 DEBUG [pool-84-thread-1 {}] hfile.HFileWriterImpl(814): Len of the biggest cell in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/data/default/table/c5d30742c7d0854bb9b28e39d32159aa/.tmp/b/ac148d6a976140dfa68909ed1ab10a15 is 28, key is a/b:b/1731878101465/Put/seqid=0 2024-11-17T21:15:07,544 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43941 is added to blk_1073741857_1035 (size=4945) 2024-11-17T21:15:07,545 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:41963 is added to blk_1073741857_1035 (size=4945) 2024-11-17T21:15:07,545 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40579 is added to blk_1073741857_1035 (size=4945) 2024-11-17T21:15:07,546 INFO [pool-84-thread-1 {}] regionserver.DefaultStoreFlusher(81): Flushed memstore data size=48 B at sequenceid=6 (bloomFilter=true), to=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/data/default/table/c5d30742c7d0854bb9b28e39d32159aa/.tmp/b/ac148d6a976140dfa68909ed1ab10a15 2024-11-17T21:15:07,558 DEBUG [pool-84-thread-1 {}] regionserver.HRegionFileSystem(442): Committing /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/data/default/table/c5d30742c7d0854bb9b28e39d32159aa/.tmp/a/12dcb9f059cc4fcf8f9f16f369f7c184 as /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/data/default/table/c5d30742c7d0854bb9b28e39d32159aa/a/12dcb9f059cc4fcf8f9f16f369f7c184 2024-11-17T21:15:07,569 INFO [pool-84-thread-1 {}] regionserver.HStore$StoreFlusherImpl(1989): Added /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/data/default/table/c5d30742c7d0854bb9b28e39d32159aa/a/12dcb9f059cc4fcf8f9f16f369f7c184, entries=1, sequenceid=6, filesize=4.8 K 2024-11-17T21:15:07,571 DEBUG [pool-84-thread-1 {}] regionserver.HRegionFileSystem(442): Committing /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/data/default/table/c5d30742c7d0854bb9b28e39d32159aa/.tmp/b/ac148d6a976140dfa68909ed1ab10a15 as /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/data/default/table/c5d30742c7d0854bb9b28e39d32159aa/b/ac148d6a976140dfa68909ed1ab10a15 2024-11-17T21:15:07,581 INFO [pool-84-thread-1 {}] regionserver.HStore$StoreFlusherImpl(1989): Added /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/data/default/table/c5d30742c7d0854bb9b28e39d32159aa/b/ac148d6a976140dfa68909ed1ab10a15, entries=1, sequenceid=6, filesize=4.8 K 2024-11-17T21:15:07,583 INFO [pool-84-thread-1 {}] regionserver.HRegion(3040): Finished flush of dataSize ~96 B/96, heapSize ~864 B/864, currentSize=0 B/0 for c5d30742c7d0854bb9b28e39d32159aa in 3108ms, sequenceid=6, compaction requested=false 2024-11-17T21:15:07,583 DEBUG [pool-84-thread-1 {}] regionserver.HRegion(2538): Flush status journal for c5d30742c7d0854bb9b28e39d32159aa: 2024-11-17T21:15:07,584 INFO [pool-84-thread-1 {}] wal.AbstractTestFSWAL(657): Flush result:FLUSHED_NO_COMPACTION_NEEDED 2024-11-17T21:15:07,584 INFO [pool-84-thread-1 {}] wal.AbstractTestFSWAL(658): Flush succeeded:true 2024-11-17T21:15:07,589 INFO [Time-limited test {}] regionserver.HRegion(2837): Flushing c5d30742c7d0854bb9b28e39d32159aa 1/2 column families, dataSize=48 B heapSize=704 B; a={dataSize=24 B, heapSize=352 B, offHeapSize=0 B} 2024-11-17T21:15:07,595 DEBUG [Time-limited test {}] hfile.HFileWriterImpl(814): Len of the biggest cell in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/data/default/table/c5d30742c7d0854bb9b28e39d32159aa/.tmp/a/a1dc8a9fa34d4a82b5f02d247cffee71 is 28, key is a/a:a/1731878101465/Put/seqid=0 2024-11-17T21:15:07,603 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43941 is added to blk_1073741858_1036 (size=4945) 2024-11-17T21:15:07,604 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40579 is added to blk_1073741858_1036 (size=4945) 2024-11-17T21:15:07,604 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:41963 is added to blk_1073741858_1036 (size=4945) 2024-11-17T21:15:07,605 INFO [Time-limited test {}] regionserver.DefaultStoreFlusher(81): Flushed memstore data size=24 B at sequenceid=10 (bloomFilter=true), to=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/data/default/table/c5d30742c7d0854bb9b28e39d32159aa/.tmp/a/a1dc8a9fa34d4a82b5f02d247cffee71 2024-11-17T21:15:07,614 DEBUG [Time-limited test {}] regionserver.HRegionFileSystem(442): Committing /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/data/default/table/c5d30742c7d0854bb9b28e39d32159aa/.tmp/a/a1dc8a9fa34d4a82b5f02d247cffee71 as /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/data/default/table/c5d30742c7d0854bb9b28e39d32159aa/a/a1dc8a9fa34d4a82b5f02d247cffee71 2024-11-17T21:15:07,622 INFO [Time-limited test {}] regionserver.HStore$StoreFlusherImpl(1989): Added /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/data/default/table/c5d30742c7d0854bb9b28e39d32159aa/a/a1dc8a9fa34d4a82b5f02d247cffee71, entries=1, sequenceid=10, filesize=4.8 K 2024-11-17T21:15:07,624 INFO [Time-limited test {}] regionserver.HRegion(3040): Finished flush of dataSize ~24 B/24, heapSize ~336 B/336, currentSize=24 B/24 for c5d30742c7d0854bb9b28e39d32159aa in 35ms, sequenceid=10, compaction requested=false 2024-11-17T21:15:07,625 DEBUG [Time-limited test {}] regionserver.HRegion(2538): Flush status journal for c5d30742c7d0854bb9b28e39d32159aa: 2024-11-17T21:15:07,625 DEBUG [Time-limited test {}] regionserver.HRegion(1681): Closing c5d30742c7d0854bb9b28e39d32159aa, disabling compactions & flushes 2024-11-17T21:15:07,625 INFO [Time-limited test {}] regionserver.HRegion(1703): Closing region table,,1731878100964.c5d30742c7d0854bb9b28e39d32159aa. 2024-11-17T21:15:07,625 DEBUG [Time-limited test {}] regionserver.HRegion(1724): Waiting without time limit for close lock on table,,1731878100964.c5d30742c7d0854bb9b28e39d32159aa. 2024-11-17T21:15:07,625 DEBUG [Time-limited test {}] regionserver.HRegion(1791): Acquired close lock on table,,1731878100964.c5d30742c7d0854bb9b28e39d32159aa. after waiting 0 ms 2024-11-17T21:15:07,625 DEBUG [Time-limited test {}] regionserver.HRegion(1801): Updates disabled for region table,,1731878100964.c5d30742c7d0854bb9b28e39d32159aa. 2024-11-17T21:15:07,625 INFO [Time-limited test {}] regionserver.HRegion(2837): Flushing c5d30742c7d0854bb9b28e39d32159aa 2/2 column families, dataSize=24 B heapSize=608 B 2024-11-17T21:15:07,631 DEBUG [Time-limited test {}] hfile.HFileWriterImpl(814): Len of the biggest cell in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/data/default/table/c5d30742c7d0854bb9b28e39d32159aa/.tmp/b/40082aaca20c4519a577794b40de5360 is 28, key is a/b:b/1731878101465/Put/seqid=0 2024-11-17T21:15:07,639 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40579 is added to blk_1073741859_1037 (size=4945) 2024-11-17T21:15:07,639 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:41963 is added to blk_1073741859_1037 (size=4945) 2024-11-17T21:15:07,640 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43941 is added to blk_1073741859_1037 (size=4945) 2024-11-17T21:15:07,640 INFO [Time-limited test {}] regionserver.DefaultStoreFlusher(81): Flushed memstore data size=24 B at sequenceid=13 (bloomFilter=true), to=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/data/default/table/c5d30742c7d0854bb9b28e39d32159aa/.tmp/b/40082aaca20c4519a577794b40de5360 2024-11-17T21:15:07,649 DEBUG [Time-limited test {}] regionserver.HRegionFileSystem(442): Committing /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/data/default/table/c5d30742c7d0854bb9b28e39d32159aa/.tmp/b/40082aaca20c4519a577794b40de5360 as /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/data/default/table/c5d30742c7d0854bb9b28e39d32159aa/b/40082aaca20c4519a577794b40de5360 2024-11-17T21:15:07,656 INFO [Time-limited test {}] regionserver.HStore$StoreFlusherImpl(1989): Added /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/data/default/table/c5d30742c7d0854bb9b28e39d32159aa/b/40082aaca20c4519a577794b40de5360, entries=1, sequenceid=13, filesize=4.8 K 2024-11-17T21:15:07,658 INFO [Time-limited test {}] regionserver.HRegion(3040): Finished flush of dataSize ~24 B/24, heapSize ~336 B/336, currentSize=0 B/0 for c5d30742c7d0854bb9b28e39d32159aa in 33ms, sequenceid=13, compaction requested=false 2024-11-17T21:15:07,663 DEBUG [Time-limited test {}] wal.WALSplitUtil(409): Wrote file=hdfs://localhost:37527/user/jenkins/test-data/f42f8b1b-1b37-c14e-e298-9aafd748e226/data/default/table/c5d30742c7d0854bb9b28e39d32159aa/recovered.edits/16.seqid, newMaxSeqId=16, maxSeqId=1 2024-11-17T21:15:07,664 INFO [Time-limited test {}] regionserver.HRegion(1922): Closed table,,1731878100964.c5d30742c7d0854bb9b28e39d32159aa. 2024-11-17T21:15:07,665 DEBUG [Time-limited test {}] regionserver.HRegion(1635): Region close journal for c5d30742c7d0854bb9b28e39d32159aa: 2024-11-17T21:15:07,665 DEBUG [WAL-Shutdown-0 {}] wal.FSHLog(499): Closing WAL writer in /user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb/testMaxFlushedSequenceIdGoBackwards 2024-11-17T21:15:07,667 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43941 is added to blk_1073741854_1032 (size=2357) 2024-11-17T21:15:07,667 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:41963 is added to blk_1073741854_1032 (size=2357) 2024-11-17T21:15:07,668 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40579 is added to blk_1073741854_1032 (size=2357) 2024-11-17T21:15:07,670 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1071): Moved 1 WAL file(s) to /user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb/oldWALs 2024-11-17T21:15:07,670 INFO [Time-limited test {}] wal.AbstractFSWAL(1074): Closed WAL: FSHLog wal:(num 1731878100954) 2024-11-17T21:15:07,677 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testMaxFlushedSequenceIdGoBackwards Thread=173 (was 170) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_900039209_22 at /127.0.0.1:57252 [Waiting for operation #9] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:263) java.base@17.0.11/java.io.DataInputStream.readUnsignedShort(DataInputStream.java:334) java.base@17.0.11/java.io.DataInputStream.readShort(DataInputStream.java:312) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.readOp(Receiver.java:72) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:273) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: Async disk worker #0 for volume /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/cluster_2a0274ea-93ca-a98a-2c9d-271c63f62376/dfs/data/data1 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:252) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:1674) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:460) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1061) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_900039209_22 at /127.0.0.1:57592 [Waiting for operation #5] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:263) java.base@17.0.11/java.io.DataInputStream.readUnsignedShort(DataInputStream.java:334) java.base@17.0.11/java.io.DataInputStream.readShort(DataInputStream.java:312) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.readOp(Receiver.java:72) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:273) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_900039209_22 at /127.0.0.1:44236 [Waiting for operation #5] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:263) java.base@17.0.11/java.io.DataInputStream.readUnsignedShort(DataInputStream.java:334) java.base@17.0.11/java.io.DataInputStream.readShort(DataInputStream.java:312) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.readOp(Receiver.java:72) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:273) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: Timer for 'HBase' metrics system java.base@17.0.11/java.lang.Object.wait(Native Method) java.base@17.0.11/java.util.TimerThread.mainLoop(Timer.java:563) java.base@17.0.11/java.util.TimerThread.run(Timer.java:516) Potentially hanging thread: Async disk worker #0 for volume /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/cluster_2a0274ea-93ca-a98a-2c9d-271c63f62376/dfs/data/data2 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:252) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:1674) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:460) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1061) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) - Thread LEAK? -, OpenFileDescriptor=424 (was 422) - OpenFileDescriptor LEAK? -, MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=114 (was 115), ProcessCount=11 (was 11), AvailableMemoryMB=11677 (was 11741) 2024-11-17T21:15:07,684 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testFlushSequenceIdIsGreaterThanAllEditsInHFile Thread=173, OpenFileDescriptor=424, MaxFileDescriptor=1048576, SystemLoadAverage=114, ProcessCount=11, AvailableMemoryMB=11676 2024-11-17T21:15:07,695 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40579 is added to blk_1073741860_1038 (size=7) 2024-11-17T21:15:07,695 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:41963 is added to blk_1073741860_1038 (size=7) 2024-11-17T21:15:07,696 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43941 is added to blk_1073741860_1038 (size=7) 2024-11-17T21:15:07,698 INFO [Time-limited test {}] util.FSUtils(490): Created version file at hdfs://localhost:37527/user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb with version=8 2024-11-17T21:15:07,698 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-17T21:15:07,700 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-17T21:15:07,709 DEBUG [Time-limited test {}] asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper(244): No decryptEncryptedDataEncryptionKey method in DFSClient, should be hadoop version with HDFS-12396 java.lang.NoSuchMethodException: org.apache.hadoop.hdfs.DFSClient.decryptEncryptedDataEncryptionKey(org.apache.hadoop.fs.FileEncryptionInfo) at java.lang.Class.getDeclaredMethod(Class.java:2675) ~[?:?] at org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper.createTransparentCryptoHelperWithoutHDFS12396(FanOutOneBlockAsyncDFSOutputSaslHelper.java:183) ~[hbase-asyncfs-2.7.0-SNAPSHOT.jar:2.7.0-SNAPSHOT] at org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper.createTransparentCryptoHelper(FanOutOneBlockAsyncDFSOutputSaslHelper.java:242) ~[hbase-asyncfs-2.7.0-SNAPSHOT.jar:2.7.0-SNAPSHOT] at org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper.(FanOutOneBlockAsyncDFSOutputSaslHelper.java:253) ~[hbase-asyncfs-2.7.0-SNAPSHOT.jar:2.7.0-SNAPSHOT] at java.lang.Class.forName0(Native Method) ~[?:?] at java.lang.Class.forName(Class.java:375) ~[?:?] at org.apache.hadoop.hbase.wal.AsyncFSWALProvider.load(AsyncFSWALProvider.java:147) ~[classes/:?] at org.apache.hadoop.hbase.wal.WALFactory.getProviderClass(WALFactory.java:160) ~[classes/:?] at org.apache.hadoop.hbase.wal.WALFactory.getProvider(WALFactory.java:200) ~[classes/:?] at org.apache.hadoop.hbase.wal.WALFactory.(WALFactory.java:232) ~[classes/:?] at org.apache.hadoop.hbase.wal.WALFactory.(WALFactory.java:207) ~[classes/:?] at org.apache.hadoop.hbase.HBaseTestingUtility.createWal(HBaseTestingUtility.java:2587) ~[test-classes/:2.7.0-SNAPSHOT] at org.apache.hadoop.hbase.HBaseTestingUtility.createRegionAndWAL(HBaseTestingUtility.java:2632) ~[test-classes/:2.7.0-SNAPSHOT] at org.apache.hadoop.hbase.HBaseTestingUtility.createRegionAndWAL(HBaseTestingUtility.java:2596) ~[test-classes/:2.7.0-SNAPSHOT] at org.apache.hadoop.hbase.regionserver.wal.AbstractTestFSWAL.testFlushSequenceIdIsGreaterThanAllEditsInHFile(AbstractTestFSWAL.java:424) ~[test-classes/:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:59) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:56) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26) ~[junit-4.13.2.jar:4.13.2] at org.junit.rules.TestWatcher$1.evaluate(TestWatcher.java:61) ~[junit-4.13.2.jar:4.13.2] at org.junit.rules.TestWatcher$1.evaluate(TestWatcher.java:61) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.BlockJUnit4ClassRunner$1.evaluate(BlockJUnit4ClassRunner.java:100) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:366) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:103) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:63) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$4.run(ParentRunner.java:331) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:79) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:329) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner.access$100(ParentRunner.java:66) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:293) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:27) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:299) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:293) ~[junit-4.13.2.jar:4.13.2] at java.util.concurrent.FutureTask.run(FutureTask.java:264) ~[?:?] at java.lang.Thread.run(Thread.java:840) ~[?:?] 2024-11-17T21:15:07,710 INFO [Time-limited test {}] wal.WALFactory(183): Instantiating WALProvider of type class org.apache.hadoop.hbase.wal.AsyncFSWALProvider 2024-11-17T21:15:07,713 DEBUG [Time-limited test {}] channel.MultithreadEventLoopGroup(44): -Dio.netty.eventLoopThreads: 16 2024-11-17T21:15:07,724 DEBUG [Time-limited test {}] nio.NioEventLoop(110): -Dio.netty.noKeySetOptimization: false 2024-11-17T21:15:07,724 DEBUG [Time-limited test {}] nio.NioEventLoop(111): -Dio.netty.selectorAutoRebuildThreshold: 512 2024-11-17T21:15:07,737 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor defaultMonitorName 2024-11-17T21:15:07,741 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-11-17T21:15:07,741 INFO [Time-limited test {}] wal.AbstractFSWAL(500): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=hregion-70906617, suffix=, logDir=hdfs://localhost:37527/user/jenkins/test-data/3199d189-146f-05cf-9a4b-e38ac6408a90/WALs/hregion-70906617, archiveDir=hdfs://localhost:37527/user/jenkins/test-data/3199d189-146f-05cf-9a4b-e38ac6408a90/oldWALs, maxLogs=1760 2024-11-17T21:15:07,759 DEBUG [Time-limited test {}] asyncfs.FanOutOneBlockAsyncDFSOutputHelper(617): When create output stream for /user/jenkins/test-data/3199d189-146f-05cf-9a4b-e38ac6408a90/WALs/hregion-70906617/hregion-70906617.1731878107744, exclude list is [], retry=0 2024-11-17T21:15:07,771 DEBUG [Time-limited test {}] channel.DefaultChannelId(84): -Dio.netty.processId: 582 (auto-detected) 2024-11-17T21:15:07,774 DEBUG [Time-limited test {}] channel.DefaultChannelId(106): -Dio.netty.machineId: 02:42:ac:ff:fe:11:00:02 (auto-detected) 2024-11-17T21:15:07,794 DEBUG [AsyncFSWAL-1-1 {}] asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper(816): SASL client skipping handshake in unsecured configuration for addr = /127.0.0.1, datanodeId = DatanodeInfoWithStorage[127.0.0.1:40579,DS-2aedbc45-fd4e-4d62-83f9-420e54976b0e,DISK] 2024-11-17T21:15:07,797 DEBUG [AsyncFSWAL-1-1 {}] asyncfs.ProtobufDecoder(117): Hadoop 3.3 and above shades protobuf. 2024-11-17T21:15:07,813 DEBUG [AsyncFSWAL-1-1 {}] asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper(816): SASL client skipping handshake in unsecured configuration for addr = /127.0.0.1, datanodeId = DatanodeInfoWithStorage[127.0.0.1:43941,DS-0dc17499-73e1-4891-a8aa-a698546e6351,DISK] 2024-11-17T21:15:07,813 DEBUG [AsyncFSWAL-1-1 {}] asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper(816): SASL client skipping handshake in unsecured configuration for addr = /127.0.0.1, datanodeId = DatanodeInfoWithStorage[127.0.0.1:41963,DS-dd8c75c6-292f-4f1f-bb94-742d1fc7d2f4,DISK] 2024-11-17T21:15:07,826 INFO [Time-limited test {}] wal.AbstractFSWAL(841): New WAL /user/jenkins/test-data/3199d189-146f-05cf-9a4b-e38ac6408a90/WALs/hregion-70906617/hregion-70906617.1731878107744 2024-11-17T21:15:07,827 DEBUG [Time-limited test {}] wal.AbstractFSWAL(925): Create new AsyncFSWAL writer with pipeline: [(127.0.0.1/127.0.0.1:33721:33721),(127.0.0.1/127.0.0.1:38217:38217),(127.0.0.1/127.0.0.1:45905:45905)] 2024-11-17T21:15:07,827 INFO [Time-limited test {}] regionserver.HRegion(7106): creating {ENCODED => e95f5f3e5067be23eeab087b13db3d0f, NAME => 'testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1731878107702.e95f5f3e5067be23eeab087b13db3d0f.', STARTKEY => '', ENDKEY => ''}, tableDescriptor='testFlushSequenceIdIsGreaterThanAllEditsInHFile', {NAME => 'f', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'}, regionDir=hdfs://localhost:37527/user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb 2024-11-17T21:15:07,837 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:41963 is added to blk_1073741862_1040 (size=82) 2024-11-17T21:15:07,837 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43941 is added to blk_1073741862_1040 (size=82) 2024-11-17T21:15:07,838 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40579 is added to blk_1073741862_1040 (size=82) 2024-11-17T21:15:07,838 DEBUG [Time-limited test {}] regionserver.HRegion(894): Instantiated testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1731878107702.e95f5f3e5067be23eeab087b13db3d0f.; StoreHotnessProtector, parallelPutToStoreThreadLimit=0 ; minColumnNum=100 ; preparePutThreadLimit=0 ; hotProtect now disable 2024-11-17T21:15:07,840 INFO [StoreOpener-e95f5f3e5067be23eeab087b13db3d0f-1 {}] regionserver.HStore(400): Created cacheConfig: cacheDataOnRead=true, cacheDataOnWrite=false, cacheIndexesOnWrite=false, cacheBloomsOnWrite=false, cacheEvictOnClose=false, cacheDataCompressed=false, prefetchOnOpen=false, for column family f of region e95f5f3e5067be23eeab087b13db3d0f 2024-11-17T21:15:07,842 INFO [StoreOpener-e95f5f3e5067be23eeab087b13db3d0f-1 {}] compactions.CompactionConfiguration(181): size [minCompactSize:128 MB, maxCompactSize:8.00 EB, offPeakMaxCompactSize:8.00 EB); files [minFilesToCompact:3, maxFilesToCompact:10); ratio 1.200000; off-peak ratio 5.000000; throttle point 2684354560; major period 604800000, major jitter 0.500000, min locality to compact 0.000000; tiered compaction: max_age 9223372036854775807, incoming window min 6, compaction policy for tiered window org.apache.hadoop.hbase.regionserver.compactions.ExploringCompactionPolicy, single output for minor true, compaction window factory org.apache.hadoop.hbase.regionserver.compactions.ExponentialCompactionWindowFactory, region e95f5f3e5067be23eeab087b13db3d0f columnFamilyName f 2024-11-17T21:15:07,842 DEBUG [StoreOpener-e95f5f3e5067be23eeab087b13db3d0f-1 {}] storefiletracker.StoreFileTrackerFactory(122): instantiating StoreFileTracker impl org.apache.hadoop.hbase.regionserver.storefiletracker.DefaultStoreFileTracker 2024-11-17T21:15:07,843 INFO [StoreOpener-e95f5f3e5067be23eeab087b13db3d0f-1 {}] regionserver.HStore(327): Store=e95f5f3e5067be23eeab087b13db3d0f/f, memstore type=DefaultMemStore, storagePolicy=NONE, verifyBulkLoads=false, parallelPutCountPrintThreshold=50, encoding=NONE, compression=NONE 2024-11-17T21:15:07,844 DEBUG [Time-limited test {}] regionserver.HRegion(5301): Found 0 recovered edits file(s) under hdfs://localhost:37527/user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/e95f5f3e5067be23eeab087b13db3d0f 2024-11-17T21:15:07,844 DEBUG [Time-limited test {}] regionserver.HRegion(5301): Found 0 recovered edits file(s) under hdfs://localhost:37527/user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/e95f5f3e5067be23eeab087b13db3d0f 2024-11-17T21:15:07,845 DEBUG [Time-limited test {}] regionserver.HRegion(5301): Found 0 recovered edits file(s) under hdfs://localhost:37527/user/jenkins/test-data/3199d189-146f-05cf-9a4b-e38ac6408a90/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/e95f5f3e5067be23eeab087b13db3d0f 2024-11-17T21:15:07,848 DEBUG [Time-limited test {}] regionserver.HRegion(1085): writing seq id for e95f5f3e5067be23eeab087b13db3d0f 2024-11-17T21:15:07,851 DEBUG [Time-limited test {}] wal.WALSplitUtil(409): Wrote file=hdfs://localhost:37527/user/jenkins/test-data/3199d189-146f-05cf-9a4b-e38ac6408a90/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/e95f5f3e5067be23eeab087b13db3d0f/recovered.edits/1.seqid, newMaxSeqId=1, maxSeqId=-1 2024-11-17T21:15:07,851 INFO [Time-limited test {}] regionserver.HRegion(1102): Opened e95f5f3e5067be23eeab087b13db3d0f; next sequenceid=2; SteppingSplitPolicysuper{IncreasingToUpperBoundRegionSplitPolicy{initialSize=268435456, ConstantSizeRegionSplitPolicy{desiredMaxFileSize=65019375, jitterRate=-0.031135812401771545}}}, FlushLargeStoresPolicy{flushSizeLowerBound=-1} 2024-11-17T21:15:07,855 DEBUG [Time-limited test {}] regionserver.HRegion(1001): Region open journal for e95f5f3e5067be23eeab087b13db3d0f: 2024-11-17T21:15:07,855 DEBUG [Time-limited test {}] regionserver.HRegion(1681): Closing e95f5f3e5067be23eeab087b13db3d0f, disabling compactions & flushes 2024-11-17T21:15:07,855 INFO [Time-limited test {}] regionserver.HRegion(1703): Closing region testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1731878107702.e95f5f3e5067be23eeab087b13db3d0f. 2024-11-17T21:15:07,855 DEBUG [Time-limited test {}] regionserver.HRegion(1724): Waiting without time limit for close lock on testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1731878107702.e95f5f3e5067be23eeab087b13db3d0f. 2024-11-17T21:15:07,855 DEBUG [Time-limited test {}] regionserver.HRegion(1791): Acquired close lock on testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1731878107702.e95f5f3e5067be23eeab087b13db3d0f. after waiting 0 ms 2024-11-17T21:15:07,855 DEBUG [Time-limited test {}] regionserver.HRegion(1801): Updates disabled for region testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1731878107702.e95f5f3e5067be23eeab087b13db3d0f. 2024-11-17T21:15:07,856 INFO [Time-limited test {}] regionserver.HRegion(1922): Closed testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1731878107702.e95f5f3e5067be23eeab087b13db3d0f. 2024-11-17T21:15:07,856 DEBUG [Time-limited test {}] regionserver.HRegion(1635): Region close journal for e95f5f3e5067be23eeab087b13db3d0f: 2024-11-17T21:15:07,861 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40579 is added to blk_1073741861_1039 (size=93) 2024-11-17T21:15:07,861 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:41963 is added to blk_1073741861_1039 (size=93) 2024-11-17T21:15:07,862 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43941 is added to blk_1073741861_1039 (size=93) 2024-11-17T21:15:07,866 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1071): Moved 1 WAL file(s) to /user/jenkins/test-data/3199d189-146f-05cf-9a4b-e38ac6408a90/oldWALs 2024-11-17T21:15:07,866 INFO [Time-limited test {}] wal.AbstractFSWAL(1074): Closed WAL: AsyncFSWAL hregion-70906617:(num 1731878107744) 2024-11-17T21:15:07,869 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-11-17T21:15:07,869 INFO [Time-limited test {}] wal.AbstractFSWAL(500): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:37527/user/jenkins/test-data/3199d189-146f-05cf-9a4b-e38ac6408a90/testFlushSequenceIdIsGreaterThanAllEditsInHFile, archiveDir=hdfs://localhost:37527/user/jenkins/test-data/3199d189-146f-05cf-9a4b-e38ac6408a90/testFlushSequenceIdIsGreaterThanAllEditsInHFile, maxLogs=1760 2024-11-17T21:15:07,870 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1731878107870 2024-11-17T21:15:07,877 INFO [Time-limited test {}] wal.AbstractFSWAL(841): New WAL /user/jenkins/test-data/3199d189-146f-05cf-9a4b-e38ac6408a90/testFlushSequenceIdIsGreaterThanAllEditsInHFile/wal.1731878107870 2024-11-17T21:15:07,877 DEBUG [Time-limited test {}] wal.AbstractFSWAL(925): Create new writer with pipeline: [(127.0.0.1/127.0.0.1:45905:45905),(127.0.0.1/127.0.0.1:38217:38217),(127.0.0.1/127.0.0.1:33721:33721)] 2024-11-17T21:15:07,878 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-17T21:15:07,879 DEBUG [Time-limited test {}] regionserver.HRegion(7285): Opening region: {ENCODED => e95f5f3e5067be23eeab087b13db3d0f, NAME => 'testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1731878107702.e95f5f3e5067be23eeab087b13db3d0f.', STARTKEY => '', ENDKEY => ''} 2024-11-17T21:15:07,879 DEBUG [Time-limited test {}] regionserver.HRegion(894): Instantiated testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1731878107702.e95f5f3e5067be23eeab087b13db3d0f.; StoreHotnessProtector, parallelPutToStoreThreadLimit=0 ; minColumnNum=100 ; preparePutThreadLimit=0 ; hotProtect now disable 2024-11-17T21:15:07,879 DEBUG [Time-limited test {}] regionserver.HRegion(7327): checking encryption for e95f5f3e5067be23eeab087b13db3d0f 2024-11-17T21:15:07,879 DEBUG [Time-limited test {}] regionserver.HRegion(7330): checking classloading for e95f5f3e5067be23eeab087b13db3d0f 2024-11-17T21:15:07,881 INFO [StoreOpener-e95f5f3e5067be23eeab087b13db3d0f-1 {}] regionserver.HStore(400): Created cacheConfig: cacheDataOnRead=true, cacheDataOnWrite=false, cacheIndexesOnWrite=false, cacheBloomsOnWrite=false, cacheEvictOnClose=false, cacheDataCompressed=false, prefetchOnOpen=false, for column family f of region e95f5f3e5067be23eeab087b13db3d0f 2024-11-17T21:15:07,883 INFO [StoreOpener-e95f5f3e5067be23eeab087b13db3d0f-1 {}] compactions.CompactionConfiguration(181): size [minCompactSize:128 MB, maxCompactSize:8.00 EB, offPeakMaxCompactSize:8.00 EB); files [minFilesToCompact:3, maxFilesToCompact:10); ratio 1.200000; off-peak ratio 5.000000; throttle point 2684354560; major period 604800000, major jitter 0.500000, min locality to compact 0.000000; tiered compaction: max_age 9223372036854775807, incoming window min 6, compaction policy for tiered window org.apache.hadoop.hbase.regionserver.compactions.ExploringCompactionPolicy, single output for minor true, compaction window factory org.apache.hadoop.hbase.regionserver.compactions.ExponentialCompactionWindowFactory, region e95f5f3e5067be23eeab087b13db3d0f columnFamilyName f 2024-11-17T21:15:07,883 DEBUG [StoreOpener-e95f5f3e5067be23eeab087b13db3d0f-1 {}] storefiletracker.StoreFileTrackerFactory(122): instantiating StoreFileTracker impl org.apache.hadoop.hbase.regionserver.storefiletracker.DefaultStoreFileTracker 2024-11-17T21:15:07,883 INFO [StoreOpener-e95f5f3e5067be23eeab087b13db3d0f-1 {}] regionserver.HStore(327): Store=e95f5f3e5067be23eeab087b13db3d0f/f, memstore type=DefaultMemStore, storagePolicy=NONE, verifyBulkLoads=false, parallelPutCountPrintThreshold=50, encoding=NONE, compression=NONE 2024-11-17T21:15:07,884 DEBUG [Time-limited test {}] regionserver.HRegion(5301): Found 0 recovered edits file(s) under hdfs://localhost:37527/user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/e95f5f3e5067be23eeab087b13db3d0f 2024-11-17T21:15:07,885 DEBUG [Time-limited test {}] regionserver.HRegion(5301): Found 0 recovered edits file(s) under hdfs://localhost:37527/user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/e95f5f3e5067be23eeab087b13db3d0f 2024-11-17T21:15:07,886 DEBUG [Time-limited test {}] regionserver.HRegion(5301): Found 0 recovered edits file(s) under hdfs://localhost:37527/user/jenkins/test-data/3199d189-146f-05cf-9a4b-e38ac6408a90/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/e95f5f3e5067be23eeab087b13db3d0f 2024-11-17T21:15:07,889 DEBUG [Time-limited test {}] regionserver.HRegion(1085): writing seq id for e95f5f3e5067be23eeab087b13db3d0f 2024-11-17T21:15:07,890 INFO [Time-limited test {}] regionserver.HRegion(1102): Opened e95f5f3e5067be23eeab087b13db3d0f; next sequenceid=2; SteppingSplitPolicysuper{IncreasingToUpperBoundRegionSplitPolicy{initialSize=268435456, ConstantSizeRegionSplitPolicy{desiredMaxFileSize=64016664, jitterRate=-0.04607737064361572}}}, FlushLargeStoresPolicy{flushSizeLowerBound=-1} 2024-11-17T21:15:07,892 DEBUG [Time-limited test {}] regionserver.HRegion(1001): Region open journal for e95f5f3e5067be23eeab087b13db3d0f: 2024-11-17T21:15:07,908 INFO [Time-limited test {}] hbase.HBaseTestingUtility(451): System.getProperty("hadoop.log.dir") already set to: /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/hadoop.log.dir so I do NOT create it in target/test-data/835dac62-1739-375c-75c3-9f712dba0425 2024-11-17T21:15:07,908 WARN [Time-limited test {}] hbase.HBaseTestingUtility(455): hadoop.log.dir property value differs in configuration and system: Configuration=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/../logs while System=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/hadoop.log.dir Erasing configuration value by system value. 2024-11-17T21:15:07,908 INFO [Time-limited test {}] hbase.HBaseTestingUtility(451): System.getProperty("hadoop.tmp.dir") already set to: /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/hadoop.tmp.dir so I do NOT create it in target/test-data/835dac62-1739-375c-75c3-9f712dba0425 2024-11-17T21:15:07,908 WARN [Time-limited test {}] hbase.HBaseTestingUtility(455): hadoop.tmp.dir property value differs in configuration and system: Configuration=/tmp/hadoop-jenkins while System=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/hadoop.tmp.dir Erasing configuration value by system value. 2024-11-17T21:15:07,908 DEBUG [Time-limited test {}] hbase.HBaseTestingUtility(348): Setting hbase.rootdir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/835dac62-1739-375c-75c3-9f712dba0425 2024-11-17T21:15:07,931 INFO [Time-limited test {}] regionserver.HRegion(2837): Flushing e95f5f3e5067be23eeab087b13db3d0f 1/1 column families, dataSize=1.14 KB heapSize=2.13 KB 2024-11-17T21:15:08,030 DEBUG [Time-limited test.append-pool-0 {}] wal.AbstractTestFSWAL$1(441): Sleeping before appending 100ms 2024-11-17T21:15:08,131 DEBUG [Time-limited test.append-pool-0 {}] wal.AbstractTestFSWAL$1(441): Sleeping before appending 100ms 2024-11-17T21:15:08,231 DEBUG [Time-limited test.append-pool-0 {}] wal.AbstractTestFSWAL$1(441): Sleeping before appending 100ms 2024-11-17T21:15:08,332 DEBUG [Time-limited test.append-pool-0 {}] wal.AbstractTestFSWAL$1(441): Sleeping before appending 100ms 2024-11-17T21:15:08,432 DEBUG [Time-limited test.append-pool-0 {}] wal.AbstractTestFSWAL$1(441): Sleeping before appending 100ms 2024-11-17T21:15:08,533 DEBUG [Time-limited test.append-pool-0 {}] wal.AbstractTestFSWAL$1(441): Sleeping before appending 100ms 2024-11-17T21:15:08,633 DEBUG [Time-limited test.append-pool-0 {}] wal.AbstractTestFSWAL$1(441): Sleeping before appending 100ms 2024-11-17T21:15:08,734 DEBUG [Time-limited test.append-pool-0 {}] wal.AbstractTestFSWAL$1(441): Sleeping before appending 100ms 2024-11-17T21:15:08,834 DEBUG [Time-limited test.append-pool-0 {}] wal.AbstractTestFSWAL$1(441): Sleeping before appending 100ms 2024-11-17T21:15:08,934 DEBUG [Time-limited test.append-pool-0 {}] wal.AbstractTestFSWAL$1(441): Sleeping before appending 100ms 2024-11-17T21:15:09,035 DEBUG [Time-limited test.append-pool-0 {}] wal.AbstractTestFSWAL$1(441): Sleeping before appending 100ms 2024-11-17T21:15:09,054 DEBUG [Time-limited test {}] hfile.HFileWriterImpl(814): Len of the biggest cell in hdfs://localhost:37527/user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/e95f5f3e5067be23eeab087b13db3d0f/.tmp/f/b25631cbf174408cb04e32657818904f is 121, key is testFlushSequenceIdIsGreaterThanAllEditsInHFile/f:x0/1731878107908/Put/seqid=0 2024-11-17T21:15:09,062 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40579 is added to blk_1073741864_1042 (size=6333) 2024-11-17T21:15:09,062 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43941 is added to blk_1073741864_1042 (size=6333) 2024-11-17T21:15:09,062 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:41963 is added to blk_1073741864_1042 (size=6333) 2024-11-17T21:15:09,063 INFO [Time-limited test {}] regionserver.DefaultStoreFlusher(81): Flushed memstore data size=1.14 KB at sequenceid=23 (bloomFilter=true), to=hdfs://localhost:37527/user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/e95f5f3e5067be23eeab087b13db3d0f/.tmp/f/b25631cbf174408cb04e32657818904f 2024-11-17T21:15:09,072 DEBUG [Time-limited test {}] regionserver.HRegionFileSystem(442): Committing hdfs://localhost:37527/user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/e95f5f3e5067be23eeab087b13db3d0f/.tmp/f/b25631cbf174408cb04e32657818904f as hdfs://localhost:37527/user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/e95f5f3e5067be23eeab087b13db3d0f/f/b25631cbf174408cb04e32657818904f 2024-11-17T21:15:09,082 INFO [Time-limited test {}] regionserver.HStore$StoreFlusherImpl(1989): Added hdfs://localhost:37527/user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb/data/default/testFlushSequenceIdIsGreaterThanAllEditsInHFile/e95f5f3e5067be23eeab087b13db3d0f/f/b25631cbf174408cb04e32657818904f, entries=10, sequenceid=23, filesize=6.2 K 2024-11-17T21:15:09,182 DEBUG [Time-limited test.append-pool-0 {}] wal.AbstractTestFSWAL$1(441): Sleeping before appending 100ms 2024-11-17T21:15:09,184 INFO [Time-limited test {}] regionserver.HRegion(3040): Finished flush of dataSize ~1.14 KB/1170, heapSize ~2.11 KB/2160, currentSize=0 B/0 for e95f5f3e5067be23eeab087b13db3d0f in 1254ms, sequenceid=23, compaction requested=false 2024-11-17T21:15:09,184 DEBUG [Time-limited test {}] regionserver.HRegion(2538): Flush status journal for e95f5f3e5067be23eeab087b13db3d0f: 2024-11-17T21:15:09,184 DEBUG [Time-limited test {}] regionserver.HRegion(1681): Closing e95f5f3e5067be23eeab087b13db3d0f, disabling compactions & flushes 2024-11-17T21:15:09,184 INFO [Time-limited test {}] regionserver.HRegion(1703): Closing region testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1731878107702.e95f5f3e5067be23eeab087b13db3d0f. 2024-11-17T21:15:09,184 DEBUG [Time-limited test {}] regionserver.HRegion(1724): Waiting without time limit for close lock on testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1731878107702.e95f5f3e5067be23eeab087b13db3d0f. 2024-11-17T21:15:09,184 DEBUG [Time-limited test {}] regionserver.HRegion(1791): Acquired close lock on testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1731878107702.e95f5f3e5067be23eeab087b13db3d0f. after waiting 0 ms 2024-11-17T21:15:09,184 DEBUG [Time-limited test {}] regionserver.HRegion(1801): Updates disabled for region testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1731878107702.e95f5f3e5067be23eeab087b13db3d0f. 2024-11-17T21:15:09,185 INFO [Time-limited test {}] regionserver.HRegion(1922): Closed testFlushSequenceIdIsGreaterThanAllEditsInHFile,,1731878107702.e95f5f3e5067be23eeab087b13db3d0f. 2024-11-17T21:15:09,185 DEBUG [Time-limited test {}] regionserver.HRegion(1635): Region close journal for e95f5f3e5067be23eeab087b13db3d0f: 2024-11-17T21:15:09,186 DEBUG [WAL-Shutdown-0 {}] wal.FSHLog(499): Closing WAL writer in /user/jenkins/test-data/3199d189-146f-05cf-9a4b-e38ac6408a90/testFlushSequenceIdIsGreaterThanAllEditsInHFile 2024-11-17T21:15:09,188 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40579 is added to blk_1073741863_1041 (size=16537) 2024-11-17T21:15:09,188 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43941 is added to blk_1073741863_1041 (size=16537) 2024-11-17T21:15:09,188 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:41963 is added to blk_1073741863_1041 (size=16537) 2024-11-17T21:15:09,191 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1071): Moved 1 WAL file(s) to /user/jenkins/test-data/3199d189-146f-05cf-9a4b-e38ac6408a90/testFlushSequenceIdIsGreaterThanAllEditsInHFile 2024-11-17T21:15:09,191 INFO [Time-limited test {}] wal.AbstractFSWAL(1074): Closed WAL: wal:(num 1731878107870) 2024-11-17T21:15:09,199 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testFlushSequenceIdIsGreaterThanAllEditsInHFile Thread=174 (was 173) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_900039209_22 at /127.0.0.1:57252 [Waiting for operation #12] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:263) java.base@17.0.11/java.io.DataInputStream.readUnsignedShort(DataInputStream.java:334) java.base@17.0.11/java.io.DataInputStream.readShort(DataInputStream.java:312) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.readOp(Receiver.java:72) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:273) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: AsyncFSWAL-1-1 java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:146) app//org.apache.hbase.thirdparty.io.netty.channel.nio.SelectedSelectionKeySetSelector.select(SelectedSelectionKeySetSelector.java:68) app//org.apache.hbase.thirdparty.io.netty.channel.nio.NioEventLoop.select(NioEventLoop.java:879) app//org.apache.hbase.thirdparty.io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:526) app//org.apache.hbase.thirdparty.io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:997) app//org.apache.hbase.thirdparty.io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74) app//org.apache.hbase.thirdparty.io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_-1998723312_22 at /127.0.0.1:44236 [Waiting for operation #6] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:263) java.base@17.0.11/java.io.DataInputStream.readUnsignedShort(DataInputStream.java:334) java.base@17.0.11/java.io.DataInputStream.readShort(DataInputStream.java:312) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.readOp(Receiver.java:72) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:273) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) - Thread LEAK? -, OpenFileDescriptor=426 (was 424) - OpenFileDescriptor LEAK? -, MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=114 (was 114), ProcessCount=11 (was 11), AvailableMemoryMB=11663 (was 11676) 2024-11-17T21:15:09,207 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testFailedToCreateWALIfParentRenamed Thread=174, OpenFileDescriptor=426, MaxFileDescriptor=1048576, SystemLoadAverage=114, ProcessCount=11, AvailableMemoryMB=11662 2024-11-17T21:15:09,217 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40579 is added to blk_1073741865_1043 (size=7) 2024-11-17T21:15:09,217 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:41963 is added to blk_1073741865_1043 (size=7) 2024-11-17T21:15:09,218 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43941 is added to blk_1073741865_1043 (size=7) 2024-11-17T21:15:09,218 INFO [Time-limited test {}] util.FSUtils(490): Created version file at hdfs://localhost:37527/user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb with version=8 2024-11-17T21:15:09,219 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-17T21:15:09,220 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-17T21:15:09,226 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-11-17T21:15:09,226 INFO [Time-limited test {}] wal.AbstractFSWAL(500): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:37527/user/jenkins/test-data/7e4dd506-1207-ecf0-3f67-43b490dc501f/testFailedToCreateWALIfParentRenamed, archiveDir=hdfs://localhost:37527/user/jenkins/test-data/7e4dd506-1207-ecf0-3f67-43b490dc501f/oldWALs, maxLogs=1760 2024-11-17T21:15:09,227 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1731878109227 2024-11-17T21:15:09,234 INFO [Time-limited test {}] wal.AbstractFSWAL(841): New WAL /user/jenkins/test-data/7e4dd506-1207-ecf0-3f67-43b490dc501f/testFailedToCreateWALIfParentRenamed/wal.1731878109227 2024-11-17T21:15:09,234 DEBUG [Time-limited test {}] wal.AbstractFSWAL(925): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:45905:45905),(127.0.0.1/127.0.0.1:38217:38217),(127.0.0.1/127.0.0.1:33721:33721)] 2024-11-17T21:15:09,234 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1731878109234 2024-11-17T21:15:09,241 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1731878109235 2024-11-17T21:15:09,246 WARN [Time-limited test {}] wal.AbstractProtobufLogWriter(199): Init output failed, path=hdfs://localhost:37527/user/jenkins/test-data/7e4dd506-1207-ecf0-3f67-43b490dc501f/testFailedToCreateWALIfParentRenamed/wal.1731878109235 java.io.FileNotFoundException: Parent directory doesn't exist: /user/jenkins/test-data/7e4dd506-1207-ecf0-3f67-43b490dc501f/testFailedToCreateWALIfParentRenamed at org.apache.hadoop.hdfs.server.namenode.FSDirectory.verifyParentDir(FSDirectory.java:2037) at org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.resolvePathForStartFile(FSDirWriteFileOp.java:338) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFileInt(FSNamesystem.java:2773) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFile(FSNamesystem.java:2713) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.create(NameNodeRpcServer.java:830) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.create(ClientNamenodeProtocolServerSideTranslatorPB.java:504) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:621) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169) at java.base/java.security.AccessController.doPrivileged(AccessController.java:712) at java.base/javax.security.auth.Subject.doAs(Subject.java:439) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3198) at jdk.internal.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) ~[?:?] at jdk.internal.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) ~[?:?] at java.lang.reflect.Constructor.newInstanceWithCaller(Constructor.java:499) ~[?:?] at java.lang.reflect.Constructor.newInstance(Constructor.java:480) ~[?:?] at org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:121) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:88) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.DFSOutputStream.newStreamForCreate(DFSOutputStream.java:300) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DFSClient.create(DFSClient.java:1300) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem$10.doCall(DistributedFileSystem.java:674) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem$10.doCall(DistributedFileSystem.java:671) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem.createNonRecursive(DistributedFileSystem.java:692) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem.access$500(DistributedFileSystem.java:148) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem$HdfsDataOutputStreamBuilder.build(DistributedFileSystem.java:3873) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hbase.util.CommonFSUtils.createForWal(CommonFSUtils.java:832) ~[hbase-common-2.7.0-SNAPSHOT.jar:2.7.0-SNAPSHOT] at org.apache.hadoop.hbase.regionserver.wal.ProtobufLogWriter.initOutput(ProtobufLogWriter.java:103) ~[classes/:?] at org.apache.hadoop.hbase.regionserver.wal.AbstractProtobufLogWriter.init(AbstractProtobufLogWriter.java:171) ~[classes/:?] at org.apache.hadoop.hbase.wal.FSHLogProvider.createWriter(FSHLogProvider.java:81) ~[classes/:?] at org.apache.hadoop.hbase.regionserver.wal.FSHLog.createWriterInstance(FSHLog.java:307) ~[classes/:?] at org.apache.hadoop.hbase.regionserver.wal.FSHLog.createWriterInstance(FSHLog.java:70) ~[classes/:?] at org.apache.hadoop.hbase.regionserver.wal.AbstractTestFSWAL.testFailedToCreateWALIfParentRenamed(AbstractTestFSWAL.java:405) ~[test-classes/:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:59) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:56) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.ExpectException.evaluate(ExpectException.java:19) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26) ~[junit-4.13.2.jar:4.13.2] at org.junit.rules.TestWatcher$1.evaluate(TestWatcher.java:61) ~[junit-4.13.2.jar:4.13.2] at org.junit.rules.TestWatcher$1.evaluate(TestWatcher.java:61) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.BlockJUnit4ClassRunner$1.evaluate(BlockJUnit4ClassRunner.java:100) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:366) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:103) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:63) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$4.run(ParentRunner.java:331) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:79) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:329) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner.access$100(ParentRunner.java:66) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:293) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:27) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:299) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:293) ~[junit-4.13.2.jar:4.13.2] at java.util.concurrent.FutureTask.run(FutureTask.java:264) ~[?:?] at java.lang.Thread.run(Thread.java:840) ~[?:?] Caused by: org.apache.hadoop.ipc.RemoteException: Parent directory doesn't exist: /user/jenkins/test-data/7e4dd506-1207-ecf0-3f67-43b490dc501f/testFailedToCreateWALIfParentRenamed at org.apache.hadoop.hdfs.server.namenode.FSDirectory.verifyParentDir(FSDirectory.java:2037) at org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.resolvePathForStartFile(FSDirWriteFileOp.java:338) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFileInt(FSNamesystem.java:2773) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFile(FSNamesystem.java:2713) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.create(NameNodeRpcServer.java:830) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.create(ClientNamenodeProtocolServerSideTranslatorPB.java:504) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:621) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169) at java.base/java.security.AccessController.doPrivileged(AccessController.java:712) at java.base/javax.security.auth.Subject.doAs(Subject.java:439) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3198) at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1584) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1529) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1426) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:258) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:139) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy44.create(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.lambda$create$2(ClientNamenodeProtocolTranslatorPB.java:381) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc(ShadedProtobufHelper.java:160) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.create(ClientNamenodeProtocolTranslatorPB.java:381) ~[hadoop-hdfs-client-3.4.1.jar:?] at jdk.internal.reflect.GeneratedMethodAccessor4.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:437) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:170) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:162) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:100) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:366) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy45.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor4.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor4.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor4.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor4.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor4.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor4.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor4.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor4.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor4.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor4.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor4.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor4.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor4.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor4.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor4.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor4.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor4.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor4.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor4.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor4.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor4.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.DFSOutputStream.newStreamForCreate(DFSOutputStream.java:294) ~[hadoop-hdfs-client-3.4.1.jar:?] ... 42 more 2024-11-17T21:15:09,247 DEBUG [Time-limited test {}] wal.FSHLogProvider(92): Error instantiating log writer. java.io.FileNotFoundException: Parent directory doesn't exist: /user/jenkins/test-data/7e4dd506-1207-ecf0-3f67-43b490dc501f/testFailedToCreateWALIfParentRenamed at org.apache.hadoop.hdfs.server.namenode.FSDirectory.verifyParentDir(FSDirectory.java:2037) at org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.resolvePathForStartFile(FSDirWriteFileOp.java:338) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFileInt(FSNamesystem.java:2773) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFile(FSNamesystem.java:2713) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.create(NameNodeRpcServer.java:830) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.create(ClientNamenodeProtocolServerSideTranslatorPB.java:504) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:621) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169) at java.base/java.security.AccessController.doPrivileged(AccessController.java:712) at java.base/javax.security.auth.Subject.doAs(Subject.java:439) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3198) at jdk.internal.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) ~[?:?] at jdk.internal.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) ~[?:?] at java.lang.reflect.Constructor.newInstanceWithCaller(Constructor.java:499) ~[?:?] at java.lang.reflect.Constructor.newInstance(Constructor.java:480) ~[?:?] at org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:121) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:88) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.DFSOutputStream.newStreamForCreate(DFSOutputStream.java:300) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DFSClient.create(DFSClient.java:1300) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem$10.doCall(DistributedFileSystem.java:674) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem$10.doCall(DistributedFileSystem.java:671) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem.createNonRecursive(DistributedFileSystem.java:692) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem.access$500(DistributedFileSystem.java:148) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem$HdfsDataOutputStreamBuilder.build(DistributedFileSystem.java:3873) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hbase.util.CommonFSUtils.createForWal(CommonFSUtils.java:832) ~[hbase-common-2.7.0-SNAPSHOT.jar:2.7.0-SNAPSHOT] at org.apache.hadoop.hbase.regionserver.wal.ProtobufLogWriter.initOutput(ProtobufLogWriter.java:103) ~[classes/:?] at org.apache.hadoop.hbase.regionserver.wal.AbstractProtobufLogWriter.init(AbstractProtobufLogWriter.java:171) ~[classes/:?] at org.apache.hadoop.hbase.wal.FSHLogProvider.createWriter(FSHLogProvider.java:81) ~[classes/:?] at org.apache.hadoop.hbase.regionserver.wal.FSHLog.createWriterInstance(FSHLog.java:307) ~[classes/:?] at org.apache.hadoop.hbase.regionserver.wal.FSHLog.createWriterInstance(FSHLog.java:70) ~[classes/:?] at org.apache.hadoop.hbase.regionserver.wal.AbstractTestFSWAL.testFailedToCreateWALIfParentRenamed(AbstractTestFSWAL.java:405) ~[test-classes/:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:59) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:56) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.ExpectException.evaluate(ExpectException.java:19) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26) ~[junit-4.13.2.jar:4.13.2] at org.junit.rules.TestWatcher$1.evaluate(TestWatcher.java:61) ~[junit-4.13.2.jar:4.13.2] at org.junit.rules.TestWatcher$1.evaluate(TestWatcher.java:61) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.BlockJUnit4ClassRunner$1.evaluate(BlockJUnit4ClassRunner.java:100) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:366) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:103) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:63) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$4.run(ParentRunner.java:331) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:79) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:329) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner.access$100(ParentRunner.java:66) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:293) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:27) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:299) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:293) ~[junit-4.13.2.jar:4.13.2] at java.util.concurrent.FutureTask.run(FutureTask.java:264) ~[?:?] at java.lang.Thread.run(Thread.java:840) ~[?:?] Caused by: org.apache.hadoop.ipc.RemoteException: Parent directory doesn't exist: /user/jenkins/test-data/7e4dd506-1207-ecf0-3f67-43b490dc501f/testFailedToCreateWALIfParentRenamed at org.apache.hadoop.hdfs.server.namenode.FSDirectory.verifyParentDir(FSDirectory.java:2037) at org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.resolvePathForStartFile(FSDirWriteFileOp.java:338) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFileInt(FSNamesystem.java:2773) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFile(FSNamesystem.java:2713) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.create(NameNodeRpcServer.java:830) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.create(ClientNamenodeProtocolServerSideTranslatorPB.java:504) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:621) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169) at java.base/java.security.AccessController.doPrivileged(AccessController.java:712) at java.base/javax.security.auth.Subject.doAs(Subject.java:439) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3198) at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1584) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1529) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1426) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:258) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:139) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy44.create(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.lambda$create$2(ClientNamenodeProtocolTranslatorPB.java:381) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc(ShadedProtobufHelper.java:160) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.create(ClientNamenodeProtocolTranslatorPB.java:381) ~[hadoop-hdfs-client-3.4.1.jar:?] at jdk.internal.reflect.GeneratedMethodAccessor4.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:437) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:170) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:162) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:100) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:366) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy45.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor4.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor4.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor4.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor4.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor4.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor4.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor4.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor4.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor4.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor4.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor4.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor4.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor4.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor4.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor4.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor4.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor4.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor4.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor4.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor4.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor4.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.create(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.DFSOutputStream.newStreamForCreate(DFSOutputStream.java:294) ~[hadoop-hdfs-client-3.4.1.jar:?] ... 42 more 2024-11-17T21:15:09,257 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testFailedToCreateWALIfParentRenamed Thread=196 (was 174) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_900039209_22 at /127.0.0.1:44310 [Receiving block BP-754513072-172.17.0.2-1731878086310:blk_1073741867_1045] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read1(BufferedInputStream.java:284) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:343) java.base@17.0.11/java.io.DataInputStream.read(DataInputStream.java:151) app//org.apache.hadoop.io.IOUtils.readFully(IOUtils.java:214) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doReadFully(PacketReceiver.java:221) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doRead(PacketReceiver.java:144) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.receiveNextPacket(PacketReceiver.java:119) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receivePacket(BlockReceiver.java:553) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receiveBlock(BlockReceiver.java:1011) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.writeBlock(DataXceiver.java:920) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.opWriteBlock(Receiver.java:176) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.processOp(Receiver.java:110) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:299) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: ResponseProcessor for block BP-754513072-172.17.0.2-1731878086310:blk_1073741866_1044 java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:118) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) app//org.apache.hadoop.hdfs.protocolPB.PBHelperClient.vintPrefixed(PBHelperClient.java:527) app//org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck.readFields(PipelineAck.java:244) app//org.apache.hadoop.hdfs.DataStreamer$ResponseProcessor.run(DataStreamer.java:1180) Potentially hanging thread: sync.2 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.park(LockSupport.java:341) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionNode.block(AbstractQueuedSynchronizer.java:506) java.base@17.0.11/java.util.concurrent.ForkJoinPool.unmanagedBlock(ForkJoinPool.java:3465) java.base@17.0.11/java.util.concurrent.ForkJoinPool.managedBlock(ForkJoinPool.java:3436) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:1625) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.take(LinkedBlockingQueue.java:435) app//org.apache.hadoop.hbase.regionserver.wal.FSHLog$SyncRunner.run(FSHLog.java:650) Potentially hanging thread: DataStreamer for file /user/jenkins/test-data/7e4dd506-1207-ecf0-3f67-43b490dc501f/testFailedToCreateWALIfParentRenamed/wal.1731878109234 block BP-754513072-172.17.0.2-1731878086310:blk_1073741867_1045 java.base@17.0.11/java.lang.Object.wait(Native Method) app//org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:717) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_900039209_22 at /127.0.0.1:57672 [Receiving block BP-754513072-172.17.0.2-1731878086310:blk_1073741866_1044] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read1(BufferedInputStream.java:284) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:343) java.base@17.0.11/java.io.DataInputStream.read(DataInputStream.java:151) app//org.apache.hadoop.io.IOUtils.readFully(IOUtils.java:214) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doReadFully(PacketReceiver.java:221) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doRead(PacketReceiver.java:144) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.receiveNextPacket(PacketReceiver.java:119) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receivePacket(BlockReceiver.java:553) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receiveBlock(BlockReceiver.java:1011) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.writeBlock(DataXceiver.java:920) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.opWriteBlock(Receiver.java:176) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.processOp(Receiver.java:110) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:299) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: PacketResponder: BP-754513072-172.17.0.2-1731878086310:blk_1073741867_1045, type=LAST_IN_PIPELINE java.base@17.0.11/java.lang.Object.wait(Native Method) java.base@17.0.11/java.lang.Object.wait(Object.java:338) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.waitForAckHead(BlockReceiver.java:1367) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1439) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: PacketResponder: BP-754513072-172.17.0.2-1731878086310:blk_1073741866_1044, type=HAS_DOWNSTREAM_IN_PIPELINE, downstreams=1:[127.0.0.1:40579] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:118) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) app//org.apache.hadoop.hdfs.protocolPB.PBHelperClient.vintPrefixed(PBHelperClient.java:527) app//org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck.readFields(PipelineAck.java:244) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1420) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: PacketResponder: BP-754513072-172.17.0.2-1731878086310:blk_1073741867_1045, type=HAS_DOWNSTREAM_IN_PIPELINE, downstreams=1:[127.0.0.1:43941] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:118) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) app//org.apache.hadoop.hdfs.protocolPB.PBHelperClient.vintPrefixed(PBHelperClient.java:527) app//org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck.readFields(PipelineAck.java:244) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1420) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: sync.0 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.park(LockSupport.java:341) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionNode.block(AbstractQueuedSynchronizer.java:506) java.base@17.0.11/java.util.concurrent.ForkJoinPool.unmanagedBlock(ForkJoinPool.java:3465) java.base@17.0.11/java.util.concurrent.ForkJoinPool.managedBlock(ForkJoinPool.java:3436) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:1625) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.take(LinkedBlockingQueue.java:435) app//org.apache.hadoop.hbase.regionserver.wal.FSHLog$SyncRunner.run(FSHLog.java:650) Potentially hanging thread: Time-limited test.append-pool-0 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.park(LockSupport.java:341) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionNode.block(AbstractQueuedSynchronizer.java:506) java.base@17.0.11/java.util.concurrent.ForkJoinPool.unmanagedBlock(ForkJoinPool.java:3465) java.base@17.0.11/java.util.concurrent.ForkJoinPool.managedBlock(ForkJoinPool.java:3436) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:1625) app//com.lmax.disruptor.BlockingWaitStrategy.waitFor(BlockingWaitStrategy.java:47) app//com.lmax.disruptor.ProcessingSequenceBarrier.waitFor(ProcessingSequenceBarrier.java:56) app//com.lmax.disruptor.BatchEventProcessor.processEvents(BatchEventProcessor.java:159) app//com.lmax.disruptor.BatchEventProcessor.run(BatchEventProcessor.java:125) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: sync.4 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.park(LockSupport.java:341) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionNode.block(AbstractQueuedSynchronizer.java:506) java.base@17.0.11/java.util.concurrent.ForkJoinPool.unmanagedBlock(ForkJoinPool.java:3465) java.base@17.0.11/java.util.concurrent.ForkJoinPool.managedBlock(ForkJoinPool.java:3436) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:1625) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.take(LinkedBlockingQueue.java:435) app//org.apache.hadoop.hbase.regionserver.wal.FSHLog$SyncRunner.run(FSHLog.java:650) Potentially hanging thread: sync.3 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.park(LockSupport.java:341) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionNode.block(AbstractQueuedSynchronizer.java:506) java.base@17.0.11/java.util.concurrent.ForkJoinPool.unmanagedBlock(ForkJoinPool.java:3465) java.base@17.0.11/java.util.concurrent.ForkJoinPool.managedBlock(ForkJoinPool.java:3436) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:1625) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.take(LinkedBlockingQueue.java:435) app//org.apache.hadoop.hbase.regionserver.wal.FSHLog$SyncRunner.run(FSHLog.java:650) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_900039209_22 at /127.0.0.1:44304 [Receiving block BP-754513072-172.17.0.2-1731878086310:blk_1073741866_1044] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read1(BufferedInputStream.java:284) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:343) java.base@17.0.11/java.io.DataInputStream.read(DataInputStream.java:151) app//org.apache.hadoop.io.IOUtils.readFully(IOUtils.java:214) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doReadFully(PacketReceiver.java:221) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doRead(PacketReceiver.java:144) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.receiveNextPacket(PacketReceiver.java:119) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receivePacket(BlockReceiver.java:553) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receiveBlock(BlockReceiver.java:1011) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.writeBlock(DataXceiver.java:920) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.opWriteBlock(Receiver.java:176) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.processOp(Receiver.java:110) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:299) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: PacketResponder: BP-754513072-172.17.0.2-1731878086310:blk_1073741866_1044, type=LAST_IN_PIPELINE java.base@17.0.11/java.lang.Object.wait(Native Method) java.base@17.0.11/java.lang.Object.wait(Object.java:338) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.waitForAckHead(BlockReceiver.java:1367) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1439) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: PacketResponder: BP-754513072-172.17.0.2-1731878086310:blk_1073741867_1045, type=HAS_DOWNSTREAM_IN_PIPELINE, downstreams=2:[127.0.0.1:40579, 127.0.0.1:43941] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:118) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) app//org.apache.hadoop.hdfs.protocolPB.PBHelperClient.vintPrefixed(PBHelperClient.java:527) app//org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck.readFields(PipelineAck.java:244) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1420) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_900039209_22 at /127.0.0.1:57340 [Receiving block BP-754513072-172.17.0.2-1731878086310:blk_1073741867_1045] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read1(BufferedInputStream.java:284) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:343) java.base@17.0.11/java.io.DataInputStream.read(DataInputStream.java:151) app//org.apache.hadoop.io.IOUtils.readFully(IOUtils.java:214) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doReadFully(PacketReceiver.java:221) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doRead(PacketReceiver.java:144) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.receiveNextPacket(PacketReceiver.java:119) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receivePacket(BlockReceiver.java:553) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receiveBlock(BlockReceiver.java:1011) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.writeBlock(DataXceiver.java:920) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.opWriteBlock(Receiver.java:176) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.processOp(Receiver.java:110) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:299) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: PacketResponder: BP-754513072-172.17.0.2-1731878086310:blk_1073741866_1044, type=HAS_DOWNSTREAM_IN_PIPELINE, downstreams=2:[127.0.0.1:43941, 127.0.0.1:40579] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:118) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) app//org.apache.hadoop.hdfs.protocolPB.PBHelperClient.vintPrefixed(PBHelperClient.java:527) app//org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck.readFields(PipelineAck.java:244) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:1420) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: ResponseProcessor for block BP-754513072-172.17.0.2-1731878086310:blk_1073741867_1045 java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:118) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) java.base@17.0.11/java.io.FilterInputStream.read(FilterInputStream.java:82) app//org.apache.hadoop.hdfs.protocolPB.PBHelperClient.vintPrefixed(PBHelperClient.java:527) app//org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck.readFields(PipelineAck.java:244) app//org.apache.hadoop.hdfs.DataStreamer$ResponseProcessor.run(DataStreamer.java:1180) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_900039209_22 at /127.0.0.1:57338 [Receiving block BP-754513072-172.17.0.2-1731878086310:blk_1073741866_1044] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read1(BufferedInputStream.java:284) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:343) java.base@17.0.11/java.io.DataInputStream.read(DataInputStream.java:151) app//org.apache.hadoop.io.IOUtils.readFully(IOUtils.java:214) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doReadFully(PacketReceiver.java:221) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doRead(PacketReceiver.java:144) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.receiveNextPacket(PacketReceiver.java:119) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receivePacket(BlockReceiver.java:553) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receiveBlock(BlockReceiver.java:1011) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.writeBlock(DataXceiver.java:920) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.opWriteBlock(Receiver.java:176) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.processOp(Receiver.java:110) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:299) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataXceiver for client DFSClient_NONMAPREDUCE_900039209_22 at /127.0.0.1:57680 [Receiving block BP-754513072-172.17.0.2-1731878086310:blk_1073741867_1045] java.base@17.0.11/sun.nio.ch.EPoll.wait(Native Method) java.base@17.0.11/sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:118) java.base@17.0.11/sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:129) java.base@17.0.11/sun.nio.ch.SelectorImpl.select(SelectorImpl.java:141) app//org.apache.hadoop.net.SocketIOWithTimeout$SelectorPool.select(SocketIOWithTimeout.java:335) app//org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:156) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:161) app//org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:131) java.base@17.0.11/java.io.BufferedInputStream.fill(BufferedInputStream.java:244) java.base@17.0.11/java.io.BufferedInputStream.read1(BufferedInputStream.java:284) java.base@17.0.11/java.io.BufferedInputStream.read(BufferedInputStream.java:343) java.base@17.0.11/java.io.DataInputStream.read(DataInputStream.java:151) app//org.apache.hadoop.io.IOUtils.readFully(IOUtils.java:214) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doReadFully(PacketReceiver.java:221) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.doRead(PacketReceiver.java:144) app//org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver.receiveNextPacket(PacketReceiver.java:119) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receivePacket(BlockReceiver.java:553) app//org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receiveBlock(BlockReceiver.java:1011) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.writeBlock(DataXceiver.java:920) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.opWriteBlock(Receiver.java:176) app//org.apache.hadoop.hdfs.protocol.datatransfer.Receiver.processOp(Receiver.java:110) app//org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:299) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: DataStreamer for file /user/jenkins/test-data/7e4dd506-1207-ecf0-3f67-43b490dc501f/testFailedToCreateWALIfParentRenamed/wal.1731878109227 block BP-754513072-172.17.0.2-1731878086310:blk_1073741866_1044 java.base@17.0.11/java.lang.Object.wait(Native Method) app//org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:717) Potentially hanging thread: sync.1 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.park(LockSupport.java:341) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionNode.block(AbstractQueuedSynchronizer.java:506) java.base@17.0.11/java.util.concurrent.ForkJoinPool.unmanagedBlock(ForkJoinPool.java:3465) java.base@17.0.11/java.util.concurrent.ForkJoinPool.managedBlock(ForkJoinPool.java:3436) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:1625) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.take(LinkedBlockingQueue.java:435) app//org.apache.hadoop.hbase.regionserver.wal.FSHLog$SyncRunner.run(FSHLog.java:650) - Thread LEAK? -, OpenFileDescriptor=452 (was 426) - OpenFileDescriptor LEAK? -, MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=114 (was 114), ProcessCount=11 (was 11), AvailableMemoryMB=11659 (was 11662) 2024-11-17T21:15:09,265 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testWALCoprocessorLoaded Thread=196, OpenFileDescriptor=452, MaxFileDescriptor=1048576, SystemLoadAverage=114, ProcessCount=11, AvailableMemoryMB=11658 2024-11-17T21:15:09,275 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40579 is added to blk_1073741868_1046 (size=7) 2024-11-17T21:15:09,275 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43941 is added to blk_1073741868_1046 (size=7) 2024-11-17T21:15:09,276 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:41963 is added to blk_1073741868_1046 (size=7) 2024-11-17T21:15:09,277 INFO [Time-limited test {}] util.FSUtils(490): Created version file at hdfs://localhost:37527/user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb with version=8 2024-11-17T21:15:09,277 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-17T21:15:09,279 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-17T21:15:09,284 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-11-17T21:15:09,285 INFO [Time-limited test {}] wal.AbstractFSWAL(500): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:37527/user/jenkins/test-data/93b11197-76f7-731f-2e81-69fc7f670c2c/testWALCoprocessorLoaded, archiveDir=hdfs://localhost:37527/user/jenkins/test-data/93b11197-76f7-731f-2e81-69fc7f670c2c/oldWALs, maxLogs=1760 2024-11-17T21:15:09,286 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1731878109285 2024-11-17T21:15:09,295 INFO [Time-limited test {}] wal.AbstractFSWAL(841): New WAL /user/jenkins/test-data/93b11197-76f7-731f-2e81-69fc7f670c2c/testWALCoprocessorLoaded/wal.1731878109285 2024-11-17T21:15:09,295 DEBUG [Time-limited test {}] wal.AbstractFSWAL(925): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:33721:33721),(127.0.0.1/127.0.0.1:45905:45905),(127.0.0.1/127.0.0.1:38217:38217)] 2024-11-17T21:15:09,295 DEBUG [WAL-Shutdown-0 {}] wal.FSHLog(499): Closing WAL writer in /user/jenkins/test-data/93b11197-76f7-731f-2e81-69fc7f670c2c/testWALCoprocessorLoaded 2024-11-17T21:15:09,297 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43941 is added to blk_1073741869_1047 (size=93) 2024-11-17T21:15:09,298 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:41963 is added to blk_1073741869_1047 (size=93) 2024-11-17T21:15:09,298 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40579 is added to blk_1073741869_1047 (size=93) 2024-11-17T21:15:09,300 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1071): Moved 1 WAL file(s) to /user/jenkins/test-data/93b11197-76f7-731f-2e81-69fc7f670c2c/oldWALs 2024-11-17T21:15:09,300 INFO [Time-limited test {}] wal.AbstractFSWAL(1074): Closed WAL: FSHLog wal:(num 1731878109285) 2024-11-17T21:15:09,308 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testWALCoprocessorLoaded Thread=196 (was 196), OpenFileDescriptor=464 (was 452) - OpenFileDescriptor LEAK? -, MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=114 (was 114), ProcessCount=11 (was 11), AvailableMemoryMB=11655 (was 11658) 2024-11-17T21:15:09,316 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testSyncNoAppend Thread=196, OpenFileDescriptor=464, MaxFileDescriptor=1048576, SystemLoadAverage=114, ProcessCount=11, AvailableMemoryMB=11655 2024-11-17T21:15:09,326 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:41963 is added to blk_1073741870_1048 (size=7) 2024-11-17T21:15:09,327 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40579 is added to blk_1073741870_1048 (size=7) 2024-11-17T21:15:09,328 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43941 is added to blk_1073741870_1048 (size=7) 2024-11-17T21:15:09,329 INFO [Time-limited test {}] util.FSUtils(490): Created version file at hdfs://localhost:37527/user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb with version=8 2024-11-17T21:15:09,329 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-17T21:15:09,330 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-17T21:15:09,335 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-11-17T21:15:09,335 INFO [Time-limited test {}] wal.AbstractFSWAL(500): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:37527/user/jenkins/test-data/da865c6a-9b62-15c3-d464-19c814e4c1bc/testSyncNoAppend, archiveDir=hdfs://localhost:37527/user/jenkins/test-data/da865c6a-9b62-15c3-d464-19c814e4c1bc/testSyncNoAppend, maxLogs=1760 2024-11-17T21:15:09,336 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1731878109336 2024-11-17T21:15:09,344 INFO [Time-limited test {}] wal.AbstractFSWAL(841): New WAL /user/jenkins/test-data/da865c6a-9b62-15c3-d464-19c814e4c1bc/testSyncNoAppend/wal.1731878109336 2024-11-17T21:15:09,344 DEBUG [Time-limited test {}] wal.AbstractFSWAL(925): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:38217:38217),(127.0.0.1/127.0.0.1:33721:33721),(127.0.0.1/127.0.0.1:45905:45905)] 2024-11-17T21:15:09,345 DEBUG [WAL-Shutdown-0 {}] wal.FSHLog(499): Closing WAL writer in /user/jenkins/test-data/da865c6a-9b62-15c3-d464-19c814e4c1bc/testSyncNoAppend 2024-11-17T21:15:09,347 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:41963 is added to blk_1073741871_1049 (size=93) 2024-11-17T21:15:09,347 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40579 is added to blk_1073741871_1049 (size=93) 2024-11-17T21:15:09,347 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43941 is added to blk_1073741871_1049 (size=93) 2024-11-17T21:15:09,349 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1071): Moved 1 WAL file(s) to /user/jenkins/test-data/da865c6a-9b62-15c3-d464-19c814e4c1bc/testSyncNoAppend 2024-11-17T21:15:09,349 INFO [Time-limited test {}] wal.AbstractFSWAL(1074): Closed WAL: FSHLog wal:(num 1731878109336) 2024-11-17T21:15:09,357 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testSyncNoAppend Thread=196 (was 196), OpenFileDescriptor=464 (was 464), MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=114 (was 114), ProcessCount=11 (was 11), AvailableMemoryMB=11651 (was 11655) 2024-11-17T21:15:09,365 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testWriteEntryCanBeNull Thread=196, OpenFileDescriptor=464, MaxFileDescriptor=1048576, SystemLoadAverage=114, ProcessCount=11, AvailableMemoryMB=11650 2024-11-17T21:15:09,377 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:41963 is added to blk_1073741872_1050 (size=7) 2024-11-17T21:15:09,377 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43941 is added to blk_1073741872_1050 (size=7) 2024-11-17T21:15:09,377 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40579 is added to blk_1073741872_1050 (size=7) 2024-11-17T21:15:09,378 INFO [Time-limited test {}] util.FSUtils(490): Created version file at hdfs://localhost:37527/user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb with version=8 2024-11-17T21:15:09,379 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-17T21:15:09,381 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-17T21:15:09,385 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-11-17T21:15:09,385 INFO [Time-limited test {}] wal.AbstractFSWAL(500): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:37527/user/jenkins/test-data/f53c1746-0b17-51ae-11cb-ca347e5a3e9c/testWriteEntryCanBeNull, archiveDir=hdfs://localhost:37527/user/jenkins/test-data/f53c1746-0b17-51ae-11cb-ca347e5a3e9c/testWriteEntryCanBeNull, maxLogs=1760 2024-11-17T21:15:09,386 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1731878109386 2024-11-17T21:15:09,394 INFO [Time-limited test {}] wal.AbstractFSWAL(841): New WAL /user/jenkins/test-data/f53c1746-0b17-51ae-11cb-ca347e5a3e9c/testWriteEntryCanBeNull/wal.1731878109386 2024-11-17T21:15:09,394 DEBUG [Time-limited test {}] wal.AbstractFSWAL(925): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:45905:45905),(127.0.0.1/127.0.0.1:33721:33721),(127.0.0.1/127.0.0.1:38217:38217)] 2024-11-17T21:15:09,395 DEBUG [WAL-Shutdown-0 {}] wal.FSHLog(499): Closing WAL writer in /user/jenkins/test-data/f53c1746-0b17-51ae-11cb-ca347e5a3e9c/testWriteEntryCanBeNull 2024-11-17T21:15:09,397 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43941 is added to blk_1073741873_1051 (size=93) 2024-11-17T21:15:09,397 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:41963 is added to blk_1073741873_1051 (size=93) 2024-11-17T21:15:09,397 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40579 is added to blk_1073741873_1051 (size=93) 2024-11-17T21:15:09,400 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1071): Moved 1 WAL file(s) to /user/jenkins/test-data/f53c1746-0b17-51ae-11cb-ca347e5a3e9c/testWriteEntryCanBeNull 2024-11-17T21:15:09,400 INFO [Time-limited test {}] wal.AbstractFSWAL(1074): Closed WAL: FSHLog wal:(num 1731878109386) 2024-11-17T21:15:09,414 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testWriteEntryCanBeNull Thread=196 (was 196), OpenFileDescriptor=464 (was 464), MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=114 (was 114), ProcessCount=11 (was 11), AvailableMemoryMB=11640 (was 11650) 2024-11-17T21:15:09,422 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: regionserver.wal.TestFSHLog#testUnflushedSeqIdTrackingWithAsyncWal Thread=196, OpenFileDescriptor=464, MaxFileDescriptor=1048576, SystemLoadAverage=114, ProcessCount=11, AvailableMemoryMB=11639 2024-11-17T21:15:09,433 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:41963 is added to blk_1073741874_1052 (size=7) 2024-11-17T21:15:09,433 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40579 is added to blk_1073741874_1052 (size=7) 2024-11-17T21:15:09,434 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43941 is added to blk_1073741874_1052 (size=7) 2024-11-17T21:15:09,435 INFO [Time-limited test {}] util.FSUtils(490): Created version file at hdfs://localhost:37527/user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb with version=8 2024-11-17T21:15:09,435 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-17T21:15:09,437 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-17T21:15:09,443 INFO [Time-limited test {}] coprocessor.CoprocessorHost(174): System coprocessor org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor loaded, priority=536870911. 2024-11-17T21:15:09,443 INFO [Time-limited test {}] wal.AbstractFSWAL(500): WAL configuration: blocksize=2 MB, rollsize=1 MB, prefix=wal, suffix=, logDir=hdfs://localhost:37527/user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb/testUnflushedSeqIdTrackingWithAsyncWal, archiveDir=hdfs://localhost:37527/user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb/oldWALs, maxLogs=1760 2024-11-17T21:15:09,444 INFO [Time-limited test {}] monitor.StreamSlowMonitor(122): New stream slow monitor wal.1731878109444 2024-11-17T21:15:09,452 INFO [Time-limited test {}] wal.AbstractFSWAL(841): New WAL /user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb/testUnflushedSeqIdTrackingWithAsyncWal/wal.1731878109444 2024-11-17T21:15:09,452 DEBUG [Time-limited test {}] wal.AbstractFSWAL(925): Create new FSHLog writer with pipeline: [(127.0.0.1/127.0.0.1:38217:38217),(127.0.0.1/127.0.0.1:33721:33721),(127.0.0.1/127.0.0.1:45905:45905)] 2024-11-17T21:15:09,452 INFO [Time-limited test {}] regionserver.HRegion(7106): creating {ENCODED => 61c6310d548698303fe043e0057e2578, NAME => 'table,,1731878109452.61c6310d548698303fe043e0057e2578.', STARTKEY => '', ENDKEY => ''}, tableDescriptor='table', {NAME => 'b', INDEX_BLOCK_ENCODING => 'NONE', VERSIONS => '1', KEEP_DELETED_CELLS => 'FALSE', DATA_BLOCK_ENCODING => 'NONE', TTL => 'FOREVER', MIN_VERSIONS => '0', REPLICATION_SCOPE => '0', BLOOMFILTER => 'ROW', IN_MEMORY => 'false', COMPRESSION => 'NONE', BLOCKCACHE => 'true', BLOCKSIZE => '65536 B (64KB)'}, regionDir=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400 2024-11-17T21:15:09,465 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40579 is added to blk_1073741876_1054 (size=40) 2024-11-17T21:15:09,466 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:41963 is added to blk_1073741876_1054 (size=40) 2024-11-17T21:15:09,466 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43941 is added to blk_1073741876_1054 (size=40) 2024-11-17T21:15:09,467 DEBUG [Time-limited test {}] regionserver.HRegion(894): Instantiated table,,1731878109452.61c6310d548698303fe043e0057e2578.; StoreHotnessProtector, parallelPutToStoreThreadLimit=0 ; minColumnNum=100 ; preparePutThreadLimit=0 ; hotProtect now disable 2024-11-17T21:15:09,469 INFO [StoreOpener-61c6310d548698303fe043e0057e2578-1 {}] regionserver.HStore(400): Created cacheConfig: cacheDataOnRead=true, cacheDataOnWrite=false, cacheIndexesOnWrite=false, cacheBloomsOnWrite=false, cacheEvictOnClose=false, cacheDataCompressed=false, prefetchOnOpen=false, for column family b of region 61c6310d548698303fe043e0057e2578 2024-11-17T21:15:09,470 INFO [StoreOpener-61c6310d548698303fe043e0057e2578-1 {}] compactions.CompactionConfiguration(181): size [minCompactSize:128 MB, maxCompactSize:8.00 EB, offPeakMaxCompactSize:8.00 EB); files [minFilesToCompact:3, maxFilesToCompact:10); ratio 1.200000; off-peak ratio 5.000000; throttle point 2684354560; major period 604800000, major jitter 0.500000, min locality to compact 0.000000; tiered compaction: max_age 9223372036854775807, incoming window min 6, compaction policy for tiered window org.apache.hadoop.hbase.regionserver.compactions.ExploringCompactionPolicy, single output for minor true, compaction window factory org.apache.hadoop.hbase.regionserver.compactions.ExponentialCompactionWindowFactory, region 61c6310d548698303fe043e0057e2578 columnFamilyName b 2024-11-17T21:15:09,470 DEBUG [StoreOpener-61c6310d548698303fe043e0057e2578-1 {}] storefiletracker.StoreFileTrackerFactory(122): instantiating StoreFileTracker impl org.apache.hadoop.hbase.regionserver.storefiletracker.DefaultStoreFileTracker 2024-11-17T21:15:09,471 INFO [StoreOpener-61c6310d548698303fe043e0057e2578-1 {}] regionserver.HStore(327): Store=61c6310d548698303fe043e0057e2578/b, memstore type=DefaultMemStore, storagePolicy=NONE, verifyBulkLoads=false, parallelPutCountPrintThreshold=50, encoding=NONE, compression=NONE 2024-11-17T21:15:09,472 DEBUG [Time-limited test {}] regionserver.HRegion(5301): Found 0 recovered edits file(s) under hdfs://localhost:37527/user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb/data/default/table/61c6310d548698303fe043e0057e2578 2024-11-17T21:15:09,472 DEBUG [Time-limited test {}] regionserver.HRegion(5301): Found 0 recovered edits file(s) under hdfs://localhost:37527/user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb/data/default/table/61c6310d548698303fe043e0057e2578 2024-11-17T21:15:09,472 DEBUG [Time-limited test {}] regionserver.HRegion(5301): Found 0 recovered edits file(s) under hdfs://localhost:37527/user/jenkins/test-data/c4ac62cb-c556-9c27-521b-75f1c8613465/data/default/table/61c6310d548698303fe043e0057e2578 2024-11-17T21:15:09,475 DEBUG [Time-limited test {}] regionserver.HRegion(1085): writing seq id for 61c6310d548698303fe043e0057e2578 2024-11-17T21:15:09,477 DEBUG [Time-limited test {}] wal.WALSplitUtil(409): Wrote file=hdfs://localhost:37527/user/jenkins/test-data/c4ac62cb-c556-9c27-521b-75f1c8613465/data/default/table/61c6310d548698303fe043e0057e2578/recovered.edits/1.seqid, newMaxSeqId=1, maxSeqId=-1 2024-11-17T21:15:09,478 INFO [Time-limited test {}] regionserver.HRegion(1102): Opened 61c6310d548698303fe043e0057e2578; next sequenceid=2; SteppingSplitPolicysuper{IncreasingToUpperBoundRegionSplitPolicy{initialSize=268435456, ConstantSizeRegionSplitPolicy{desiredMaxFileSize=67008422, jitterRate=-0.0014967024326324463}}}, FlushLargeStoresPolicy{flushSizeLowerBound=-1} 2024-11-17T21:15:09,480 DEBUG [Time-limited test {}] regionserver.HRegion(1001): Region open journal for 61c6310d548698303fe043e0057e2578: 2024-11-17T21:15:09,481 DEBUG [Time-limited test {}] regionserver.HRegion(1681): Closing 61c6310d548698303fe043e0057e2578, disabling compactions & flushes 2024-11-17T21:15:09,481 INFO [Time-limited test {}] regionserver.HRegion(1703): Closing region table,,1731878109452.61c6310d548698303fe043e0057e2578. 2024-11-17T21:15:09,481 DEBUG [Time-limited test {}] regionserver.HRegion(1724): Waiting without time limit for close lock on table,,1731878109452.61c6310d548698303fe043e0057e2578. 2024-11-17T21:15:09,481 DEBUG [Time-limited test {}] regionserver.HRegion(1791): Acquired close lock on table,,1731878109452.61c6310d548698303fe043e0057e2578. after waiting 0 ms 2024-11-17T21:15:09,481 DEBUG [Time-limited test {}] regionserver.HRegion(1801): Updates disabled for region table,,1731878109452.61c6310d548698303fe043e0057e2578. 2024-11-17T21:15:09,481 INFO [Time-limited test {}] regionserver.HRegion(1922): Closed table,,1731878109452.61c6310d548698303fe043e0057e2578. 2024-11-17T21:15:09,481 DEBUG [Time-limited test {}] regionserver.HRegion(1635): Region close journal for 61c6310d548698303fe043e0057e2578: 2024-11-17T21:15:09,482 DEBUG [Time-limited test {}] regionserver.HRegion(7285): Opening region: {ENCODED => 61c6310d548698303fe043e0057e2578, NAME => 'table,,1731878109452.61c6310d548698303fe043e0057e2578.', STARTKEY => '', ENDKEY => ''} 2024-11-17T21:15:09,483 DEBUG [Time-limited test {}] regionserver.MetricsRegionSourceImpl(79): Creating new MetricsRegionSourceImpl for table table 61c6310d548698303fe043e0057e2578 2024-11-17T21:15:09,483 DEBUG [Time-limited test {}] regionserver.HRegion(894): Instantiated table,,1731878109452.61c6310d548698303fe043e0057e2578.; StoreHotnessProtector, parallelPutToStoreThreadLimit=0 ; minColumnNum=100 ; preparePutThreadLimit=0 ; hotProtect now disable 2024-11-17T21:15:09,483 DEBUG [Time-limited test {}] regionserver.HRegion(7327): checking encryption for 61c6310d548698303fe043e0057e2578 2024-11-17T21:15:09,483 DEBUG [Time-limited test {}] regionserver.HRegion(7330): checking classloading for 61c6310d548698303fe043e0057e2578 2024-11-17T21:15:09,485 INFO [StoreOpener-61c6310d548698303fe043e0057e2578-1 {}] regionserver.HStore(400): Created cacheConfig: cacheDataOnRead=true, cacheDataOnWrite=false, cacheIndexesOnWrite=false, cacheBloomsOnWrite=false, cacheEvictOnClose=false, cacheDataCompressed=false, prefetchOnOpen=false, for column family b of region 61c6310d548698303fe043e0057e2578 2024-11-17T21:15:09,486 INFO [StoreOpener-61c6310d548698303fe043e0057e2578-1 {}] compactions.CompactionConfiguration(181): size [minCompactSize:128 MB, maxCompactSize:8.00 EB, offPeakMaxCompactSize:8.00 EB); files [minFilesToCompact:3, maxFilesToCompact:10); ratio 1.200000; off-peak ratio 5.000000; throttle point 2684354560; major period 604800000, major jitter 0.500000, min locality to compact 0.000000; tiered compaction: max_age 9223372036854775807, incoming window min 6, compaction policy for tiered window org.apache.hadoop.hbase.regionserver.compactions.ExploringCompactionPolicy, single output for minor true, compaction window factory org.apache.hadoop.hbase.regionserver.compactions.ExponentialCompactionWindowFactory, region 61c6310d548698303fe043e0057e2578 columnFamilyName b 2024-11-17T21:15:09,486 DEBUG [StoreOpener-61c6310d548698303fe043e0057e2578-1 {}] storefiletracker.StoreFileTrackerFactory(122): instantiating StoreFileTracker impl org.apache.hadoop.hbase.regionserver.storefiletracker.DefaultStoreFileTracker 2024-11-17T21:15:09,487 INFO [StoreOpener-61c6310d548698303fe043e0057e2578-1 {}] regionserver.HStore(327): Store=61c6310d548698303fe043e0057e2578/b, memstore type=DefaultMemStore, storagePolicy=NONE, verifyBulkLoads=false, parallelPutCountPrintThreshold=50, encoding=NONE, compression=NONE 2024-11-17T21:15:09,488 DEBUG [Time-limited test {}] regionserver.HRegion(5301): Found 0 recovered edits file(s) under hdfs://localhost:37527/user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb/data/default/table/61c6310d548698303fe043e0057e2578 2024-11-17T21:15:09,488 DEBUG [Time-limited test {}] regionserver.HRegion(5301): Found 0 recovered edits file(s) under hdfs://localhost:37527/user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb/data/default/table/61c6310d548698303fe043e0057e2578 2024-11-17T21:15:09,489 DEBUG [Time-limited test {}] regionserver.HRegion(5301): Found 0 recovered edits file(s) under hdfs://localhost:37527/user/jenkins/test-data/c4ac62cb-c556-9c27-521b-75f1c8613465/data/default/table/61c6310d548698303fe043e0057e2578 2024-11-17T21:15:09,491 DEBUG [Time-limited test {}] regionserver.HRegion(1085): writing seq id for 61c6310d548698303fe043e0057e2578 2024-11-17T21:15:09,493 INFO [Time-limited test {}] regionserver.HRegion(1102): Opened 61c6310d548698303fe043e0057e2578; next sequenceid=2; SteppingSplitPolicysuper{IncreasingToUpperBoundRegionSplitPolicy{initialSize=268435456, ConstantSizeRegionSplitPolicy{desiredMaxFileSize=75166007, jitterRate=0.12006078660488129}}}, FlushLargeStoresPolicy{flushSizeLowerBound=-1} 2024-11-17T21:15:09,495 DEBUG [Time-limited test {}] regionserver.HRegion(1001): Region open journal for 61c6310d548698303fe043e0057e2578: 2024-11-17T21:15:09,867 DEBUG [HBase-Metrics2-1 {}] impl.GlobalMetricRegistriesAdapter(131): Registering adapter for the MetricRegistry: RegionServer,sub=Coprocessor.WAL.CP_org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor 2024-11-17T21:15:09,867 INFO [HBase-Metrics2-1 {}] impl.GlobalMetricRegistriesAdapter(135): Registering RegionServer,sub=Coprocessor.WAL.CP_org.apache.hadoop.hbase.coprocessor.SampleRegionWALCoprocessor Metrics about HBase WALObservers 2024-11-17T21:15:09,868 DEBUG [HBase-Metrics2-1 {}] impl.GlobalMetricRegistriesAdapter(131): Registering adapter for the MetricRegistry: RegionServer,sub=TableRequests_Namespace_default_table_testWALClosureFailureAndCleanup 2024-11-17T21:15:09,868 INFO [HBase-Metrics2-1 {}] impl.GlobalMetricRegistriesAdapter(135): Registering RegionServer,sub=TableRequests_Namespace_default_table_testWALClosureFailureAndCleanup Metrics about Tables on a single HBase RegionServer 2024-11-17T21:15:09,873 DEBUG [HBase-Metrics2-1 {}] impl.GlobalMetricRegistriesAdapter(131): Registering adapter for the MetricRegistry: RegionServer,sub=TableRequests_Namespace_default_table_table 2024-11-17T21:15:09,873 INFO [HBase-Metrics2-1 {}] impl.GlobalMetricRegistriesAdapter(135): Registering RegionServer,sub=TableRequests_Namespace_default_table_table Metrics about Tables on a single HBase RegionServer 2024-11-17T21:15:12,501 DEBUG [pool-116-thread-1 {}] regionserver.HRegion(1681): Closing 61c6310d548698303fe043e0057e2578, disabling compactions & flushes 2024-11-17T21:15:12,501 INFO [pool-116-thread-1 {}] regionserver.HRegion(1703): Closing region table,,1731878109452.61c6310d548698303fe043e0057e2578. 2024-11-17T21:15:12,501 DEBUG [pool-116-thread-1 {}] regionserver.HRegion(1724): Waiting without time limit for close lock on table,,1731878109452.61c6310d548698303fe043e0057e2578. 2024-11-17T21:15:12,501 DEBUG [pool-116-thread-1 {}] regionserver.HRegion(1791): Acquired close lock on table,,1731878109452.61c6310d548698303fe043e0057e2578. after waiting 0 ms 2024-11-17T21:15:12,501 DEBUG [pool-116-thread-1 {}] regionserver.HRegion(1801): Updates disabled for region table,,1731878109452.61c6310d548698303fe043e0057e2578. 2024-11-17T21:15:12,501 INFO [pool-116-thread-1 {}] regionserver.HRegion(2837): Flushing 61c6310d548698303fe043e0057e2578 1/1 column families, dataSize=48 B heapSize=448 B 2024-11-17T21:15:13,167 WARN [HBase-Metrics2-1 {}] impl.MetricsConfig(138): Cannot locate configuration: tried hadoop-metrics2-hbase.properties,hadoop-metrics2.properties 2024-11-17T21:15:15,520 DEBUG [pool-116-thread-1 {}] hfile.HFileWriterImpl(814): Len of the biggest cell in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/data/default/table/61c6310d548698303fe043e0057e2578/.tmp/b/4d14e1ab8cb24d19acda1e96077a8d92 is 28, key is b/b:b/1731878109497/Put/seqid=0 2024-11-17T21:15:15,528 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:41963 is added to blk_1073741877_1055 (size=4945) 2024-11-17T21:15:15,529 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43941 is added to blk_1073741877_1055 (size=4945) 2024-11-17T21:15:15,529 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40579 is added to blk_1073741877_1055 (size=4945) 2024-11-17T21:15:15,530 INFO [pool-116-thread-1 {}] regionserver.DefaultStoreFlusher(81): Flushed memstore data size=48 B at sequenceid=6 (bloomFilter=true), to=/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/data/default/table/61c6310d548698303fe043e0057e2578/.tmp/b/4d14e1ab8cb24d19acda1e96077a8d92 2024-11-17T21:15:15,539 DEBUG [pool-116-thread-1 {}] regionserver.HRegionFileSystem(442): Committing /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/data/default/table/61c6310d548698303fe043e0057e2578/.tmp/b/4d14e1ab8cb24d19acda1e96077a8d92 as /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/data/default/table/61c6310d548698303fe043e0057e2578/b/4d14e1ab8cb24d19acda1e96077a8d92 2024-11-17T21:15:15,547 INFO [pool-116-thread-1 {}] regionserver.HStore$StoreFlusherImpl(1989): Added /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/data/default/table/61c6310d548698303fe043e0057e2578/b/4d14e1ab8cb24d19acda1e96077a8d92, entries=1, sequenceid=6, filesize=4.8 K 2024-11-17T21:15:15,549 INFO [pool-116-thread-1 {}] regionserver.HRegion(3040): Finished flush of dataSize ~48 B/48, heapSize ~432 B/432, currentSize=0 B/0 for 61c6310d548698303fe043e0057e2578 in 3047ms, sequenceid=6, compaction requested=false 2024-11-17T21:15:15,553 DEBUG [pool-116-thread-1 {}] wal.WALSplitUtil(409): Wrote file=hdfs://localhost:37527/user/jenkins/test-data/c4ac62cb-c556-9c27-521b-75f1c8613465/data/default/table/61c6310d548698303fe043e0057e2578/recovered.edits/9.seqid, newMaxSeqId=9, maxSeqId=1 2024-11-17T21:15:15,554 INFO [pool-116-thread-1 {}] regionserver.HRegion(1922): Closed table,,1731878109452.61c6310d548698303fe043e0057e2578. 2024-11-17T21:15:15,554 DEBUG [pool-116-thread-1 {}] regionserver.HRegion(1635): Region close journal for 61c6310d548698303fe043e0057e2578: 2024-11-17T21:15:15,554 INFO [pool-116-thread-1 {}] wal.AbstractTestFSWAL(611): Close result:{[B@4143aa3=[/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/data/default/table/61c6310d548698303fe043e0057e2578/b/4d14e1ab8cb24d19acda1e96077a8d92]} 2024-11-17T21:15:15,554 WARN [Time-limited test {}] regionserver.HRegion(1666): Region table,,1731878109452.61c6310d548698303fe043e0057e2578. already closed 2024-11-17T21:15:15,555 DEBUG [Time-limited test {}] regionserver.HRegion(1635): Region close journal for 61c6310d548698303fe043e0057e2578: 2024-11-17T21:15:15,555 DEBUG [WAL-Shutdown-0 {}] wal.FSHLog(499): Closing WAL writer in /user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb/testUnflushedSeqIdTrackingWithAsyncWal 2024-11-17T21:15:15,557 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:41963 is added to blk_1073741875_1053 (size=1206) 2024-11-17T21:15:15,557 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:40579 is added to blk_1073741875_1053 (size=1206) 2024-11-17T21:15:15,558 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:43941 is added to blk_1073741875_1053 (size=1206) 2024-11-17T21:15:15,560 DEBUG [Time-limited test {}] wal.AbstractFSWAL(1071): Moved 1 WAL file(s) to /user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb/oldWALs 2024-11-17T21:15:15,560 INFO [Time-limited test {}] wal.AbstractFSWAL(1074): Closed WAL: FSHLog wal:(num 1731878109444) 2024-11-17T21:15:15,570 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: regionserver.wal.TestFSHLog#testUnflushedSeqIdTrackingWithAsyncWal Thread=195 (was 196), OpenFileDescriptor=472 (was 464) - OpenFileDescriptor LEAK? -, MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=149 (was 114) - SystemLoadAverage LEAK? -, ProcessCount=11 (was 11), AvailableMemoryMB=11623 (was 11639) 2024-11-17T21:15:15,570 INFO [Time-limited test {}] hbase.HBaseTestingUtility(1340): Shutting down minicluster 2024-11-17T21:15:15,570 ERROR [Time-limited test {}] hdfs.DFSClient(665): Failed to close file: /user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb/testWALClosureFailureAndCleanup/wal.1731878089494 with renewLeaseKey: DEFAULT_16417 java.io.IOException: Failed to replace a bad datanode on the existing pipeline due to no more good datanodes being available to try. (Nodes: current=[DatanodeInfoWithStorage[127.0.0.1:41963,DS-dd8c75c6-292f-4f1f-bb94-742d1fc7d2f4,DISK], DatanodeInfoWithStorage[127.0.0.1:43941,DS-0dc17499-73e1-4891-a8aa-a698546e6351,DISK]], original=[DatanodeInfoWithStorage[127.0.0.1:43941,DS-0dc17499-73e1-4891-a8aa-a698546e6351,DISK], DatanodeInfoWithStorage[127.0.0.1:41963,DS-dd8c75c6-292f-4f1f-bb94-742d1fc7d2f4,DISK]]). The current failed datanode replacement policy is DEFAULT, and a client may configure this via 'dfs.client.block.write.replace-datanode-on-failure.policy' in its configuration. at org.apache.hadoop.hdfs.DataStreamer.findNewDatanode(DataStreamer.java:1455) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.addDatanode2ExistingPipeline(DataStreamer.java:1515) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.handleDatanodeReplacement(DataStreamer.java:1758) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.setupPipelineInternal(DataStreamer.java:1648) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.setupPipelineForAppendOrRecovery(DataStreamer.java:1627) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.processDatanodeOrExternalError(DataStreamer.java:1408) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:707) ~[hadoop-hdfs-client-3.4.1.jar:?] 2024-11-17T21:15:15,574 ERROR [Time-limited test {}] hdfs.DFSClient(665): Failed to close file: /user/jenkins/test-data/7e4dd506-1207-ecf0-3f67-43b490dc501f/testFailedToCreateWALIfParentRenamed/wal.1731878109227 with renewLeaseKey: DEFAULT_16581 org.apache.hadoop.ipc.RemoteException: File does not exist: /user/jenkins/test-data/7e4dd506-1207-ecf0-3f67-43b490dc501f/testFailedToCreateWALIfParentRenamed/wal.1731878109227 (inode 16581) Holder DFSClient_NONMAPREDUCE_900039209_22 does not have any open files. at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkLease(FSNamesystem.java:3188) at org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.completeFileInternal(FSDirWriteFileOp.java:703) at org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.completeFile(FSDirWriteFileOp.java:689) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.completeFile(FSNamesystem.java:3232) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.complete(NameNodeRpcServer.java:983) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.complete(ClientNamenodeProtocolServerSideTranslatorPB.java:649) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:621) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169) at java.base/java.security.AccessController.doPrivileged(AccessController.java:712) at java.base/javax.security.auth.Subject.doAs(Subject.java:439) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3198) at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1584) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1529) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1426) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:258) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:139) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy44.complete(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.lambda$complete$13(ClientNamenodeProtocolTranslatorPB.java:532) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc(ShadedProtobufHelper.java:160) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.complete(ClientNamenodeProtocolTranslatorPB.java:532) ~[hadoop-hdfs-client-3.4.1.jar:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:437) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:170) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:162) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:100) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:366) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy45.complete(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.complete(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.complete(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.complete(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.complete(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.complete(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.complete(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.complete(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.complete(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.complete(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.complete(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.complete(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.complete(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.complete(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.complete(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.complete(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.complete(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.complete(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.complete(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.complete(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.complete(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.complete(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.complete(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.complete(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.complete(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.complete(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.complete(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.complete(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.complete(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.complete(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.DFSOutputStream.completeFile(DFSOutputStream.java:992) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DFSOutputStream.completeFile(DFSOutputStream.java:949) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DFSOutputStream.closeImpl(DFSOutputStream.java:913) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DFSOutputStream.close(DFSOutputStream.java:861) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DFSClient.closeAllFilesBeingWritten(DFSClient.java:662) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DFSClient.closeOutputStreams(DFSClient.java:699) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem.close(DistributedFileSystem.java:1528) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.MiniDFSCluster.shutdown(MiniDFSCluster.java:2194) ~[hadoop-hdfs-3.4.1-tests.jar:?] at org.apache.hadoop.hdfs.MiniDFSCluster.shutdown(MiniDFSCluster.java:2174) ~[hadoop-hdfs-3.4.1-tests.jar:?] at org.apache.hadoop.hdfs.MiniDFSCluster.shutdown(MiniDFSCluster.java:2167) ~[hadoop-hdfs-3.4.1-tests.jar:?] at org.apache.hadoop.hbase.HBaseTestingUtility.shutdownMiniDFSCluster(HBaseTestingUtility.java:834) ~[test-classes/:2.7.0-SNAPSHOT] at org.apache.hadoop.hbase.HBaseTestingUtility.shutdownMiniCluster(HBaseTestingUtility.java:1342) ~[test-classes/:2.7.0-SNAPSHOT] at org.apache.hadoop.hbase.regionserver.wal.AbstractTestFSWAL.tearDownAfterClass(AbstractTestFSWAL.java:148) ~[test-classes/:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:59) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:56) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.RunAfters.invokeMethod(RunAfters.java:46) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:33) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:299) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:293) ~[junit-4.13.2.jar:4.13.2] at java.util.concurrent.FutureTask.run(FutureTask.java:264) ~[?:?] at java.lang.Thread.run(Thread.java:840) ~[?:?] 2024-11-17T21:15:15,575 ERROR [Time-limited test {}] hdfs.DFSClient(665): Failed to close file: /user/jenkins/test-data/a45ee5a3-5b69-e870-43d7-9500a26edffb/testWALClosureFailureAndCleanup/wal.1731878089157 with renewLeaseKey: DEFAULT_16395 java.io.IOException: Failed to replace a bad datanode on the existing pipeline due to no more good datanodes being available to try. (Nodes: current=[DatanodeInfoWithStorage[127.0.0.1:41963,DS-dd8c75c6-292f-4f1f-bb94-742d1fc7d2f4,DISK], DatanodeInfoWithStorage[127.0.0.1:40579,DS-2aedbc45-fd4e-4d62-83f9-420e54976b0e,DISK]], original=[DatanodeInfoWithStorage[127.0.0.1:41963,DS-dd8c75c6-292f-4f1f-bb94-742d1fc7d2f4,DISK], DatanodeInfoWithStorage[127.0.0.1:40579,DS-2aedbc45-fd4e-4d62-83f9-420e54976b0e,DISK]]). The current failed datanode replacement policy is DEFAULT, and a client may configure this via 'dfs.client.block.write.replace-datanode-on-failure.policy' in its configuration. at org.apache.hadoop.hdfs.DataStreamer.findNewDatanode(DataStreamer.java:1455) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.addDatanode2ExistingPipeline(DataStreamer.java:1515) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.handleDatanodeReplacement(DataStreamer.java:1758) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.setupPipelineInternal(DataStreamer.java:1648) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.setupPipelineForAppendOrRecovery(DataStreamer.java:1627) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.processDatanodeOrExternalError(DataStreamer.java:1408) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:707) ~[hadoop-hdfs-client-3.4.1.jar:?] 2024-11-17T21:15:15,579 ERROR [Time-limited test {}] hdfs.DFSClient(665): Failed to close file: /user/jenkins/test-data/7e4dd506-1207-ecf0-3f67-43b490dc501f/testFailedToCreateWALIfParentRenamed/wal.1731878109234 with renewLeaseKey: DEFAULT_16582 org.apache.hadoop.ipc.RemoteException: File does not exist: /user/jenkins/test-data/7e4dd506-1207-ecf0-3f67-43b490dc501f/testFailedToCreateWALIfParentRenamed/wal.1731878109234 (inode 16582) Holder DFSClient_NONMAPREDUCE_900039209_22 does not have any open files. at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkLease(FSNamesystem.java:3188) at org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.completeFileInternal(FSDirWriteFileOp.java:703) at org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.completeFile(FSDirWriteFileOp.java:689) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.completeFile(FSNamesystem.java:3232) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.complete(NameNodeRpcServer.java:983) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.complete(ClientNamenodeProtocolServerSideTranslatorPB.java:649) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:621) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:589) at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:573) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1227) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1246) at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1169) at java.base/java.security.AccessController.doPrivileged(AccessController.java:712) at java.base/javax.security.auth.Subject.doAs(Subject.java:439) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3198) at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1584) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1529) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1426) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:258) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:139) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy44.complete(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.lambda$complete$13(ClientNamenodeProtocolTranslatorPB.java:532) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc(ShadedProtobufHelper.java:160) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.complete(ClientNamenodeProtocolTranslatorPB.java:532) ~[hadoop-hdfs-client-3.4.1.jar:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:437) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:170) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:162) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:100) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:366) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy45.complete(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.complete(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.complete(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.complete(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.complete(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.complete(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.complete(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.complete(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.complete(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.complete(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.complete(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.complete(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.complete(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.complete(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.complete(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.complete(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.complete(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.complete(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.complete(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.complete(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.complete(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.complete(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.complete(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.complete(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.complete(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.complete(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.complete(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.complete(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.complete(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy46.complete(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.DFSOutputStream.completeFile(DFSOutputStream.java:992) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DFSOutputStream.completeFile(DFSOutputStream.java:949) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DFSOutputStream.closeImpl(DFSOutputStream.java:913) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DFSOutputStream.close(DFSOutputStream.java:861) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DFSClient.closeAllFilesBeingWritten(DFSClient.java:662) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DFSClient.closeOutputStreams(DFSClient.java:699) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem.close(DistributedFileSystem.java:1528) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.MiniDFSCluster.shutdown(MiniDFSCluster.java:2194) ~[hadoop-hdfs-3.4.1-tests.jar:?] at org.apache.hadoop.hdfs.MiniDFSCluster.shutdown(MiniDFSCluster.java:2174) ~[hadoop-hdfs-3.4.1-tests.jar:?] at org.apache.hadoop.hdfs.MiniDFSCluster.shutdown(MiniDFSCluster.java:2167) ~[hadoop-hdfs-3.4.1-tests.jar:?] at org.apache.hadoop.hbase.HBaseTestingUtility.shutdownMiniDFSCluster(HBaseTestingUtility.java:834) ~[test-classes/:2.7.0-SNAPSHOT] at org.apache.hadoop.hbase.HBaseTestingUtility.shutdownMiniCluster(HBaseTestingUtility.java:1342) ~[test-classes/:2.7.0-SNAPSHOT] at org.apache.hadoop.hbase.regionserver.wal.AbstractTestFSWAL.tearDownAfterClass(AbstractTestFSWAL.java:148) ~[test-classes/:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?] at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:59) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) ~[junit-4.13.2.jar:4.13.2] at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:56) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.RunAfters.invokeMethod(RunAfters.java:46) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:33) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:299) ~[junit-4.13.2.jar:4.13.2] at org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:293) ~[junit-4.13.2.jar:4.13.2] at java.util.concurrent.FutureTask.run(FutureTask.java:264) ~[?:?] at java.lang.Thread.run(Thread.java:840) ~[?:?] 2024-11-17T21:15:15,584 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.w.WebAppContext@180bb084{datanode,/,null,STOPPED}{jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/datanode} 2024-11-17T21:15:15,587 INFO [Time-limited test {}] server.AbstractConnector(383): Stopped ServerConnector@4a99ee51{HTTP/1.1, (http/1.1)}{localhost:0} 2024-11-17T21:15:15,587 INFO [Time-limited test {}] session.HouseKeeper(149): node0 Stopped scavenging 2024-11-17T21:15:15,587 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.s.ServletContextHandler@4eb66c55{static,/static,jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/static,STOPPED} 2024-11-17T21:15:15,587 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.s.ServletContextHandler@7a3b1c28{logs,/logs,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/hadoop.log.dir/,STOPPED} 2024-11-17T21:15:15,590 WARN [BP-754513072-172.17.0.2-1731878086310 heartbeating to localhost/127.0.0.1:37527 {}] datanode.IncrementalBlockReportManager(160): IncrementalBlockReportManager interrupted 2024-11-17T21:15:15,590 ERROR [Command processor {}] datanode.BPServiceActor$CommandProcessingThread(1416): Command processor encountered interrupt and exit. 2024-11-17T21:15:15,590 WARN [BP-754513072-172.17.0.2-1731878086310 heartbeating to localhost/127.0.0.1:37527 {}] datanode.BPServiceActor(925): Ending block pool service for: Block pool BP-754513072-172.17.0.2-1731878086310 (Datanode Uuid a945aceb-762b-4dfe-b44c-f120ab5e97a9) service to localhost/127.0.0.1:37527 2024-11-17T21:15:15,590 WARN [Command processor {}] datanode.BPServiceActor$CommandProcessingThread(1400): Ending command processor service for: Thread[Command processor,5,FailOnTimeoutGroup] 2024-11-17T21:15:15,591 WARN [refreshUsed-/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/cluster_2a0274ea-93ca-a98a-2c9d-271c63f62376/dfs/data/data5/current/BP-754513072-172.17.0.2-1731878086310 {}] fs.CachingGetSpaceUsed$RefreshThread(231): Thread Interrupted waiting to refresh disk information: sleep interrupted 2024-11-17T21:15:15,591 WARN [refreshUsed-/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/cluster_2a0274ea-93ca-a98a-2c9d-271c63f62376/dfs/data/data6/current/BP-754513072-172.17.0.2-1731878086310 {}] fs.CachingGetSpaceUsed$RefreshThread(231): Thread Interrupted waiting to refresh disk information: sleep interrupted 2024-11-17T21:15:15,592 WARN [Time-limited test {}] datanode.DataSetLockManager(261): not open lock leak check func 2024-11-17T21:15:15,595 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.w.WebAppContext@3c806758{datanode,/,null,STOPPED}{jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/datanode} 2024-11-17T21:15:15,595 INFO [Time-limited test {}] server.AbstractConnector(383): Stopped ServerConnector@23cb9268{HTTP/1.1, (http/1.1)}{localhost:0} 2024-11-17T21:15:15,595 INFO [Time-limited test {}] session.HouseKeeper(149): node0 Stopped scavenging 2024-11-17T21:15:15,595 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.s.ServletContextHandler@19945434{static,/static,jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/static,STOPPED} 2024-11-17T21:15:15,596 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.s.ServletContextHandler@170edd35{logs,/logs,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/hadoop.log.dir/,STOPPED} 2024-11-17T21:15:15,597 WARN [BP-754513072-172.17.0.2-1731878086310 heartbeating to localhost/127.0.0.1:37527 {}] datanode.IncrementalBlockReportManager(160): IncrementalBlockReportManager interrupted 2024-11-17T21:15:15,597 ERROR [Command processor {}] datanode.BPServiceActor$CommandProcessingThread(1416): Command processor encountered interrupt and exit. 2024-11-17T21:15:15,597 WARN [Command processor {}] datanode.BPServiceActor$CommandProcessingThread(1400): Ending command processor service for: Thread[Command processor,5,FailOnTimeoutGroup] 2024-11-17T21:15:15,597 WARN [BP-754513072-172.17.0.2-1731878086310 heartbeating to localhost/127.0.0.1:37527 {}] datanode.BPServiceActor(925): Ending block pool service for: Block pool BP-754513072-172.17.0.2-1731878086310 (Datanode Uuid 1c3645fd-da23-44e6-a683-d522c8dc4494) service to localhost/127.0.0.1:37527 2024-11-17T21:15:15,598 WARN [refreshUsed-/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/cluster_2a0274ea-93ca-a98a-2c9d-271c63f62376/dfs/data/data3/current/BP-754513072-172.17.0.2-1731878086310 {}] fs.CachingGetSpaceUsed$RefreshThread(231): Thread Interrupted waiting to refresh disk information: sleep interrupted 2024-11-17T21:15:15,598 WARN [refreshUsed-/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/cluster_2a0274ea-93ca-a98a-2c9d-271c63f62376/dfs/data/data4/current/BP-754513072-172.17.0.2-1731878086310 {}] fs.CachingGetSpaceUsed$RefreshThread(231): Thread Interrupted waiting to refresh disk information: sleep interrupted 2024-11-17T21:15:15,598 WARN [Time-limited test {}] datanode.DataSetLockManager(261): not open lock leak check func 2024-11-17T21:15:15,603 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.w.WebAppContext@18ba4f79{datanode,/,null,STOPPED}{jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/datanode} 2024-11-17T21:15:15,603 INFO [Time-limited test {}] server.AbstractConnector(383): Stopped ServerConnector@56ade895{HTTP/1.1, (http/1.1)}{localhost:0} 2024-11-17T21:15:15,603 INFO [Time-limited test {}] session.HouseKeeper(149): node0 Stopped scavenging 2024-11-17T21:15:15,603 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.s.ServletContextHandler@7ac6a421{static,/static,jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/static,STOPPED} 2024-11-17T21:15:15,604 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.s.ServletContextHandler@7db971bf{logs,/logs,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/hadoop.log.dir/,STOPPED} 2024-11-17T21:15:15,605 ERROR [Command processor {}] datanode.BPServiceActor$CommandProcessingThread(1416): Command processor encountered interrupt and exit. 2024-11-17T21:15:15,605 WARN [BP-754513072-172.17.0.2-1731878086310 heartbeating to localhost/127.0.0.1:37527 {}] datanode.IncrementalBlockReportManager(160): IncrementalBlockReportManager interrupted 2024-11-17T21:15:15,605 WARN [Command processor {}] datanode.BPServiceActor$CommandProcessingThread(1400): Ending command processor service for: Thread[Command processor,5,FailOnTimeoutGroup] 2024-11-17T21:15:15,605 WARN [BP-754513072-172.17.0.2-1731878086310 heartbeating to localhost/127.0.0.1:37527 {}] datanode.BPServiceActor(925): Ending block pool service for: Block pool BP-754513072-172.17.0.2-1731878086310 (Datanode Uuid c97e213b-0dda-4f70-8927-a88ec6af4e08) service to localhost/127.0.0.1:37527 2024-11-17T21:15:15,606 WARN [refreshUsed-/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/cluster_2a0274ea-93ca-a98a-2c9d-271c63f62376/dfs/data/data1/current/BP-754513072-172.17.0.2-1731878086310 {}] fs.CachingGetSpaceUsed$RefreshThread(231): Thread Interrupted waiting to refresh disk information: sleep interrupted 2024-11-17T21:15:15,606 WARN [refreshUsed-/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/cluster_2a0274ea-93ca-a98a-2c9d-271c63f62376/dfs/data/data2/current/BP-754513072-172.17.0.2-1731878086310 {}] fs.CachingGetSpaceUsed$RefreshThread(231): Thread Interrupted waiting to refresh disk information: sleep interrupted 2024-11-17T21:15:15,606 WARN [Time-limited test {}] datanode.DataSetLockManager(261): not open lock leak check func 2024-11-17T21:15:15,613 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.w.WebAppContext@76669e88{hdfs,/,null,STOPPED}{jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/hdfs} 2024-11-17T21:15:15,614 INFO [Time-limited test {}] server.AbstractConnector(383): Stopped ServerConnector@75c47eed{HTTP/1.1, (http/1.1)}{localhost:0} 2024-11-17T21:15:15,614 INFO [Time-limited test {}] session.HouseKeeper(149): node0 Stopped scavenging 2024-11-17T21:15:15,614 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.s.ServletContextHandler@6b6353bb{static,/static,jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/static,STOPPED} 2024-11-17T21:15:15,614 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.s.ServletContextHandler@3837011d{logs,/logs,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/09645b7e-9829-f1fc-19c7-d9675fc19400/hadoop.log.dir/,STOPPED} 2024-11-17T21:15:15,653 INFO [Time-limited test {}] hbase.HBaseTestingUtility(1347): Minicluster is down