2024-11-14 17:05:45,740 main DEBUG Apache Log4j Core 2.17.2 initializing configuration org.apache.logging.log4j.core.config.properties.PropertiesConfiguration@2dfaea86 2024-11-14 17:05:45,757 main DEBUG Took 0.015769 seconds to load 1 plugins from package org.apache.hadoop.hbase.logging 2024-11-14 17:05:45,757 main DEBUG PluginManager 'Core' found 129 plugins 2024-11-14 17:05:45,758 main DEBUG PluginManager 'Level' found 0 plugins 2024-11-14 17:05:45,758 main DEBUG PluginManager 'Lookup' found 16 plugins 2024-11-14 17:05:45,759 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-11-14 17:05:45,767 main DEBUG PluginManager 'TypeConverter' found 26 plugins 2024-11-14 17:05:45,783 main DEBUG LoggerConfig$Builder(additivity="null", level="ERROR", levelAndRefs="null", name="org.apache.hadoop.metrics2.util.MBeans", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-11-14 17:05:45,785 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-11-14 17:05:45,786 main DEBUG LoggerConfig$Builder(additivity="null", level="DEBUG", levelAndRefs="null", name="org.apache.hadoop.hbase.logging.TestJul2Slf4j", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-11-14 17:05:45,787 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-11-14 17:05:45,787 main DEBUG LoggerConfig$Builder(additivity="null", level="ERROR", levelAndRefs="null", name="org.apache.zookeeper", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-11-14 17:05:45,788 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-11-14 17:05:45,789 main DEBUG LoggerConfig$Builder(additivity="null", level="WARN", levelAndRefs="null", name="org.apache.hadoop.metrics2.impl.MetricsSinkAdapter", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-11-14 17:05:45,789 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-11-14 17:05:45,790 main DEBUG LoggerConfig$Builder(additivity="null", level="ERROR", levelAndRefs="null", name="org.apache.hadoop.metrics2.impl.MetricsSystemImpl", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-11-14 17:05:45,790 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-11-14 17:05:45,791 main DEBUG LoggerConfig$Builder(additivity="false", level="WARN", levelAndRefs="null", name="org.apache.directory", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-11-14 17:05:45,792 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-11-14 17:05:45,793 main DEBUG LoggerConfig$Builder(additivity="null", level="DEBUG", levelAndRefs="null", name="org.apache.hadoop.hbase.ipc.FailedServers", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-11-14 17:05:45,793 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-11-14 17:05:45,794 main DEBUG LoggerConfig$Builder(additivity="null", level="WARN", levelAndRefs="null", name="org.apache.hadoop.metrics2.impl.MetricsConfig", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-11-14 17:05:45,794 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-11-14 17:05:45,795 main DEBUG LoggerConfig$Builder(additivity="null", level="INFO", levelAndRefs="null", name="org.apache.hadoop.hbase.ScheduledChore", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-11-14 17:05:45,795 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-11-14 17:05:45,796 main DEBUG LoggerConfig$Builder(additivity="null", level="DEBUG", levelAndRefs="null", name="org.apache.hadoop.hbase.regionserver.RSRpcServices", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-11-14 17:05:45,796 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-11-14 17:05:45,797 main DEBUG LoggerConfig$Builder(additivity="null", level="WARN", levelAndRefs="null", name="org.apache.hadoop", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-11-14 17:05:45,797 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-11-14 17:05:45,798 main DEBUG LoggerConfig$Builder(additivity="null", level="DEBUG", levelAndRefs="null", name="org.apache.hadoop.hbase", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-11-14 17:05:45,798 main DEBUG Building Plugin[name=logger, class=org.apache.logging.log4j.core.config.LoggerConfig]. 2024-11-14 17:05:45,799 main DEBUG LoggerConfig$Builder(additivity="null", level="DEBUG", levelAndRefs="null", name="org.apache.hbase.thirdparty.io.netty.channel", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-11-14 17:05:45,799 main DEBUG Building Plugin[name=root, class=org.apache.logging.log4j.core.config.LoggerConfig$RootLogger]. 2024-11-14 17:05:45,801 main DEBUG LoggerConfig$RootLogger$Builder(additivity="null", level="null", levelAndRefs="INFO,Console", includeLocation="null", ={}, ={}, Configuration(PropertiesConfig), Filter=null) 2024-11-14 17:05:45,803 main DEBUG Building Plugin[name=loggers, class=org.apache.logging.log4j.core.config.LoggersPlugin]. 2024-11-14 17:05:45,805 main DEBUG createLoggers(={org.apache.hadoop.metrics2.util.MBeans, org.apache.hadoop.hbase.logging.TestJul2Slf4j, org.apache.zookeeper, org.apache.hadoop.metrics2.impl.MetricsSinkAdapter, org.apache.hadoop.metrics2.impl.MetricsSystemImpl, org.apache.directory, org.apache.hadoop.hbase.ipc.FailedServers, org.apache.hadoop.metrics2.impl.MetricsConfig, org.apache.hadoop.hbase.ScheduledChore, org.apache.hadoop.hbase.regionserver.RSRpcServices, org.apache.hadoop, org.apache.hadoop.hbase, org.apache.hbase.thirdparty.io.netty.channel, root}) 2024-11-14 17:05:45,806 main DEBUG Building Plugin[name=layout, class=org.apache.logging.log4j.core.layout.PatternLayout]. 2024-11-14 17:05:45,807 main DEBUG PatternLayout$Builder(pattern="%d{ISO8601} %-5p [%t%notEmpty{ %X}] %C{2}(%L): %m%n", PatternSelector=null, Configuration(PropertiesConfig), Replace=null, charset="null", alwaysWriteExceptions="null", disableAnsi="null", noConsoleNoAnsi="null", header="null", footer="null") 2024-11-14 17:05:45,808 main DEBUG PluginManager 'Converter' found 47 plugins 2024-11-14 17:05:45,820 main DEBUG Building Plugin[name=appender, class=org.apache.hadoop.hbase.logging.HBaseTestAppender]. 2024-11-14 17:05:45,823 main DEBUG HBaseTestAppender$Builder(target="SYSTEM_ERR", maxSize="1G", bufferedIo="null", bufferSize="null", immediateFlush="null", ignoreExceptions="null", PatternLayout(%d{ISO8601} %-5p [%t%notEmpty{ %X}] %C{2}(%L): %m%n), name="Console", Configuration(PropertiesConfig), Filter=null, ={}) 2024-11-14 17:05:45,825 main DEBUG Starting HBaseTestOutputStreamManager SYSTEM_ERR 2024-11-14 17:05:45,825 main DEBUG Building Plugin[name=appenders, class=org.apache.logging.log4j.core.config.AppendersPlugin]. 2024-11-14 17:05:45,826 main DEBUG createAppenders(={Console}) 2024-11-14 17:05:45,827 main DEBUG Configuration org.apache.logging.log4j.core.config.properties.PropertiesConfiguration@2dfaea86 initialized 2024-11-14 17:05:45,827 main DEBUG Starting configuration org.apache.logging.log4j.core.config.properties.PropertiesConfiguration@2dfaea86 2024-11-14 17:05:45,827 main DEBUG Started configuration org.apache.logging.log4j.core.config.properties.PropertiesConfiguration@2dfaea86 OK. 2024-11-14 17:05:45,828 main DEBUG Shutting down OutputStreamManager SYSTEM_OUT.false.false-1 2024-11-14 17:05:45,829 main DEBUG OutputStream closed 2024-11-14 17:05:45,829 main DEBUG Shut down OutputStreamManager SYSTEM_OUT.false.false-1, all resources released: true 2024-11-14 17:05:45,829 main DEBUG Appender DefaultConsole-1 stopped with status true 2024-11-14 17:05:45,830 main DEBUG Stopped org.apache.logging.log4j.core.config.DefaultConfiguration@3ee37e5a OK 2024-11-14 17:05:45,914 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6 2024-11-14 17:05:45,917 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=StatusLogger 2024-11-14 17:05:45,918 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=ContextSelector 2024-11-14 17:05:45,918 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name= 2024-11-14 17:05:45,919 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.directory 2024-11-14 17:05:45,919 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.metrics2.impl.MetricsSinkAdapter 2024-11-14 17:05:45,920 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.zookeeper 2024-11-14 17:05:45,920 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.hbase.logging.TestJul2Slf4j 2024-11-14 17:05:45,920 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.metrics2.impl.MetricsSystemImpl 2024-11-14 17:05:45,921 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.metrics2.util.MBeans 2024-11-14 17:05:45,921 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.hbase 2024-11-14 17:05:45,921 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop 2024-11-14 17:05:45,922 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.hbase.ipc.FailedServers 2024-11-14 17:05:45,922 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.hbase.regionserver.RSRpcServices 2024-11-14 17:05:45,922 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.metrics2.impl.MetricsConfig 2024-11-14 17:05:45,923 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hbase.thirdparty.io.netty.channel 2024-11-14 17:05:45,923 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Loggers,name=org.apache.hadoop.hbase.ScheduledChore 2024-11-14 17:05:45,924 main DEBUG Registering MBean org.apache.logging.log4j2:type=1dbd16a6,component=Appenders,name=Console 2024-11-14 17:05:45,927 main DEBUG org.apache.logging.log4j.core.util.SystemClock supports precise timestamps. 2024-11-14 17:05:45,927 main DEBUG Reconfiguration complete for context[name=1dbd16a6] at URI jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-logging/target/hbase-logging-2.7.0-SNAPSHOT-tests.jar!/log4j2.properties (org.apache.logging.log4j.core.LoggerContext@1a20270e) with optional ClassLoader: null 2024-11-14 17:05:45,928 main DEBUG Shutdown hook enabled. Registering a new one. 2024-11-14 17:05:45,928 main DEBUG LoggerContext[name=1dbd16a6, org.apache.logging.log4j.core.LoggerContext@1a20270e] started OK. 2024-11-14T17:05:45,945 INFO [main {}] hbase.HBaseClassTestRule(94): Test class org.apache.hadoop.hbase.master.cleaner.TestHFileCleaner timeout: 13 mins 2024-11-14 17:05:45,949 main DEBUG AsyncLogger.ThreadNameStrategy=UNCACHED (user specified null, default is UNCACHED) 2024-11-14 17:05:45,950 main DEBUG org.apache.logging.log4j.core.util.SystemClock supports precise timestamps. 2024-11-14T17:05:46,260 DEBUG [main {}] hbase.HBaseTestingUtility(348): Setting hbase.rootdir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/19748226-9e32-0ba5-9617-9a904d4d6417 2024-11-14T17:05:46,275 INFO [Time-limited test {}] hbase.HBaseZKTestingUtility(82): Created new mini-cluster data directory: /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/19748226-9e32-0ba5-9617-9a904d4d6417/cluster_cd416bbb-ba83-f2ab-800c-fa257bf5af70, deleteOnExit=true 2024-11-14T17:05:46,276 INFO [Time-limited test {}] hbase.HBaseTestingUtility(824): Setting test.cache.data to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/19748226-9e32-0ba5-9617-9a904d4d6417/test.cache.data in system properties and HBase conf 2024-11-14T17:05:46,277 INFO [Time-limited test {}] hbase.HBaseTestingUtility(824): Setting hadoop.tmp.dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/19748226-9e32-0ba5-9617-9a904d4d6417/hadoop.tmp.dir in system properties and HBase conf 2024-11-14T17:05:46,278 INFO [Time-limited test {}] hbase.HBaseTestingUtility(824): Setting hadoop.log.dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/19748226-9e32-0ba5-9617-9a904d4d6417/hadoop.log.dir in system properties and HBase conf 2024-11-14T17:05:46,279 INFO [Time-limited test {}] hbase.HBaseTestingUtility(824): Setting mapreduce.cluster.local.dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/19748226-9e32-0ba5-9617-9a904d4d6417/mapreduce.cluster.local.dir in system properties and HBase conf 2024-11-14T17:05:46,279 INFO [Time-limited test {}] hbase.HBaseTestingUtility(824): Setting mapreduce.cluster.temp.dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/19748226-9e32-0ba5-9617-9a904d4d6417/mapreduce.cluster.temp.dir in system properties and HBase conf 2024-11-14T17:05:46,280 INFO [Time-limited test {}] hbase.HBaseTestingUtility(811): read short circuit is OFF 2024-11-14T17:05:46,397 WARN [Time-limited test {}] util.NativeCodeLoader(60): Unable to load native-hadoop library for your platform... using builtin-java classes where applicable 2024-11-14T17:05:46,508 DEBUG [Time-limited test {}] fs.HFileSystem(310): The file system is not a DistributedFileSystem. Skipping on block location reordering 2024-11-14T17:05:46,513 INFO [Time-limited test {}] hbase.HBaseTestingUtility(824): Setting yarn.node-labels.fs-store.root-dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/19748226-9e32-0ba5-9617-9a904d4d6417/yarn.node-labels.fs-store.root-dir in system properties and HBase conf 2024-11-14T17:05:46,514 INFO [Time-limited test {}] hbase.HBaseTestingUtility(824): Setting yarn.node-attribute.fs-store.root-dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/19748226-9e32-0ba5-9617-9a904d4d6417/yarn.node-attribute.fs-store.root-dir in system properties and HBase conf 2024-11-14T17:05:46,514 INFO [Time-limited test {}] hbase.HBaseTestingUtility(824): Setting yarn.nodemanager.log-dirs to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/19748226-9e32-0ba5-9617-9a904d4d6417/yarn.nodemanager.log-dirs in system properties and HBase conf 2024-11-14T17:05:46,514 INFO [Time-limited test {}] hbase.HBaseTestingUtility(824): Setting yarn.nodemanager.remote-app-log-dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/19748226-9e32-0ba5-9617-9a904d4d6417/yarn.nodemanager.remote-app-log-dir in system properties and HBase conf 2024-11-14T17:05:46,515 INFO [Time-limited test {}] hbase.HBaseTestingUtility(824): Setting yarn.timeline-service.entity-group-fs-store.active-dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/19748226-9e32-0ba5-9617-9a904d4d6417/yarn.timeline-service.entity-group-fs-store.active-dir in system properties and HBase conf 2024-11-14T17:05:46,516 INFO [Time-limited test {}] hbase.HBaseTestingUtility(824): Setting yarn.timeline-service.entity-group-fs-store.done-dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/19748226-9e32-0ba5-9617-9a904d4d6417/yarn.timeline-service.entity-group-fs-store.done-dir in system properties and HBase conf 2024-11-14T17:05:46,516 INFO [Time-limited test {}] hbase.HBaseTestingUtility(824): Setting yarn.nodemanager.remote-app-log-dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/19748226-9e32-0ba5-9617-9a904d4d6417/yarn.nodemanager.remote-app-log-dir in system properties and HBase conf 2024-11-14T17:05:46,516 INFO [Time-limited test {}] hbase.HBaseTestingUtility(824): Setting dfs.journalnode.edits.dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/19748226-9e32-0ba5-9617-9a904d4d6417/dfs.journalnode.edits.dir in system properties and HBase conf 2024-11-14T17:05:46,517 INFO [Time-limited test {}] hbase.HBaseTestingUtility(824): Setting dfs.datanode.shared.file.descriptor.paths to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/19748226-9e32-0ba5-9617-9a904d4d6417/dfs.datanode.shared.file.descriptor.paths in system properties and HBase conf 2024-11-14T17:05:46,517 INFO [Time-limited test {}] hbase.HBaseTestingUtility(824): Setting nfs.dump.dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/19748226-9e32-0ba5-9617-9a904d4d6417/nfs.dump.dir in system properties and HBase conf 2024-11-14T17:05:46,518 INFO [Time-limited test {}] hbase.HBaseTestingUtility(824): Setting java.io.tmpdir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/19748226-9e32-0ba5-9617-9a904d4d6417/java.io.tmpdir in system properties and HBase conf 2024-11-14T17:05:46,518 INFO [Time-limited test {}] hbase.HBaseTestingUtility(824): Setting dfs.journalnode.edits.dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/19748226-9e32-0ba5-9617-9a904d4d6417/dfs.journalnode.edits.dir in system properties and HBase conf 2024-11-14T17:05:46,519 INFO [Time-limited test {}] hbase.HBaseTestingUtility(824): Setting dfs.provided.aliasmap.inmemory.leveldb.dir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/19748226-9e32-0ba5-9617-9a904d4d6417/dfs.provided.aliasmap.inmemory.leveldb.dir in system properties and HBase conf 2024-11-14T17:05:46,519 INFO [Time-limited test {}] hbase.HBaseTestingUtility(824): Setting fs.s3a.committer.staging.tmp.path to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/19748226-9e32-0ba5-9617-9a904d4d6417/fs.s3a.committer.staging.tmp.path in system properties and HBase conf 2024-11-14T17:05:47,430 WARN [Time-limited test {}] impl.MetricsConfig(138): Cannot locate configuration: tried hadoop-metrics2-namenode.properties,hadoop-metrics2.properties 2024-11-14T17:05:47,517 INFO [Time-limited test {}] log.Log(170): Logging initialized @2501ms to org.eclipse.jetty.util.log.Slf4jLog 2024-11-14T17:05:47,613 WARN [Time-limited test {}] server.AuthenticationFilter(240): Unable to initialize FileSignerSecretProvider, falling back to use random secrets. Reason: Could not read signature secret file: /home/jenkins/hadoop-http-auth-signature-secret 2024-11-14T17:05:47,702 INFO [Time-limited test {}] server.Server(375): jetty-9.4.53.v20231009; built: 2023-10-09T12:29:09.265Z; git: 27bde00a0b95a1d5bbee0eae7984f891d2d0f8c9; jvm 17.0.11+9 2024-11-14T17:05:47,728 INFO [Time-limited test {}] session.DefaultSessionIdManager(334): DefaultSessionIdManager workerName=node0 2024-11-14T17:05:47,728 INFO [Time-limited test {}] session.DefaultSessionIdManager(339): No SessionScavenger set, using defaults 2024-11-14T17:05:47,730 INFO [Time-limited test {}] session.HouseKeeper(132): node0 Scavenging every 600000ms 2024-11-14T17:05:47,743 WARN [Time-limited test {}] server.AuthenticationFilter(240): Unable to initialize FileSignerSecretProvider, falling back to use random secrets. Reason: Could not read signature secret file: /home/jenkins/hadoop-http-auth-signature-secret 2024-11-14T17:05:47,746 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.s.ServletContextHandler@3402cd06{logs,/logs,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/19748226-9e32-0ba5-9617-9a904d4d6417/hadoop.log.dir/,AVAILABLE} 2024-11-14T17:05:47,747 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.s.ServletContextHandler@190f9941{static,/static,jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/static,AVAILABLE} 2024-11-14T17:05:47,979 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.w.WebAppContext@6f207b16{hdfs,/,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/19748226-9e32-0ba5-9617-9a904d4d6417/java.io.tmpdir/jetty-localhost-44867-hadoop-hdfs-3_4_1-tests_jar-_-any-16822926823048120863/webapp/,AVAILABLE}{jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/hdfs} 2024-11-14T17:05:47,987 INFO [Time-limited test {}] server.AbstractConnector(333): Started ServerConnector@8075d58{HTTP/1.1, (http/1.1)}{localhost:44867} 2024-11-14T17:05:47,987 INFO [Time-limited test {}] server.Server(415): Started @2972ms 2024-11-14T17:05:48,442 WARN [Time-limited test {}] server.AuthenticationFilter(240): Unable to initialize FileSignerSecretProvider, falling back to use random secrets. Reason: Could not read signature secret file: /home/jenkins/hadoop-http-auth-signature-secret 2024-11-14T17:05:48,457 INFO [Time-limited test {}] server.Server(375): jetty-9.4.53.v20231009; built: 2023-10-09T12:29:09.265Z; git: 27bde00a0b95a1d5bbee0eae7984f891d2d0f8c9; jvm 17.0.11+9 2024-11-14T17:05:48,460 INFO [Time-limited test {}] session.DefaultSessionIdManager(334): DefaultSessionIdManager workerName=node0 2024-11-14T17:05:48,460 INFO [Time-limited test {}] session.DefaultSessionIdManager(339): No SessionScavenger set, using defaults 2024-11-14T17:05:48,461 INFO [Time-limited test {}] session.HouseKeeper(132): node0 Scavenging every 660000ms 2024-11-14T17:05:48,462 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.s.ServletContextHandler@6d607707{logs,/logs,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/19748226-9e32-0ba5-9617-9a904d4d6417/hadoop.log.dir/,AVAILABLE} 2024-11-14T17:05:48,462 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.s.ServletContextHandler@ade538{static,/static,jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/static,AVAILABLE} 2024-11-14T17:05:48,628 INFO [Time-limited test {}] handler.ContextHandler(921): Started o.e.j.w.WebAppContext@530b4d2e{datanode,/,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/19748226-9e32-0ba5-9617-9a904d4d6417/java.io.tmpdir/jetty-localhost-32873-hadoop-hdfs-3_4_1-tests_jar-_-any-5476567866144665394/webapp/,AVAILABLE}{jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/datanode} 2024-11-14T17:05:48,629 INFO [Time-limited test {}] server.AbstractConnector(333): Started ServerConnector@76ee0868{HTTP/1.1, (http/1.1)}{localhost:32873} 2024-11-14T17:05:48,629 INFO [Time-limited test {}] server.Server(415): Started @3614ms 2024-11-14T17:05:48,713 WARN [Time-limited test {}] web.RestCsrfPreventionFilterHandler(75): Got null for restCsrfPreventionFilter - will not do any filtering. 2024-11-14T17:05:49,362 WARN [Thread-72 {}] impl.BlockPoolSlice(347): dfsUsed file missing in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/19748226-9e32-0ba5-9617-9a904d4d6417/cluster_cd416bbb-ba83-f2ab-800c-fa257bf5af70/dfs/data/data1/current/BP-1078482885-172.17.0.2-1731603947145/current, will proceed with Du for space computation calculation, 2024-11-14T17:05:49,362 WARN [Thread-73 {}] impl.BlockPoolSlice(347): dfsUsed file missing in /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/19748226-9e32-0ba5-9617-9a904d4d6417/cluster_cd416bbb-ba83-f2ab-800c-fa257bf5af70/dfs/data/data2/current/BP-1078482885-172.17.0.2-1731603947145/current, will proceed with Du for space computation calculation, 2024-11-14T17:05:49,430 WARN [Thread-58 {}] datanode.DirectoryScanner(302): dfs.datanode.directoryscan.throttle.limit.ms.per.sec set to value above 1000 ms/sec. Assuming default value of -1 2024-11-14T17:05:49,490 INFO [Block report processor {}] blockmanagement.BlockManager(2940): BLOCK* processReport 0x60e916a51dcd6037 with lease ID 0x27dea3b6c70204bd: Processing first storage report for DS-764caf3d-2399-43fc-b2f3-aaed152333f6 from datanode DatanodeRegistration(127.0.0.1:34607, datanodeUuid=a6f1db02-3997-49d4-a79c-c89e571eb9db, infoPort=38091, infoSecurePort=0, ipcPort=34887, storageInfo=lv=-57;cid=testClusterID;nsid=477023090;c=1731603947145) 2024-11-14T17:05:49,491 INFO [Block report processor {}] blockmanagement.BlockManager(2972): BLOCK* processReport 0x60e916a51dcd6037 with lease ID 0x27dea3b6c70204bd: from storage DS-764caf3d-2399-43fc-b2f3-aaed152333f6 node DatanodeRegistration(127.0.0.1:34607, datanodeUuid=a6f1db02-3997-49d4-a79c-c89e571eb9db, infoPort=38091, infoSecurePort=0, ipcPort=34887, storageInfo=lv=-57;cid=testClusterID;nsid=477023090;c=1731603947145), blocks: 0, hasStaleStorage: true, processing time: 1 msecs, invalidatedBlocks: 0 2024-11-14T17:05:49,492 INFO [Block report processor {}] blockmanagement.BlockManager(2940): BLOCK* processReport 0x60e916a51dcd6037 with lease ID 0x27dea3b6c70204bd: Processing first storage report for DS-d2fd4ec1-f4f1-408e-be39-8eb70c21f631 from datanode DatanodeRegistration(127.0.0.1:34607, datanodeUuid=a6f1db02-3997-49d4-a79c-c89e571eb9db, infoPort=38091, infoSecurePort=0, ipcPort=34887, storageInfo=lv=-57;cid=testClusterID;nsid=477023090;c=1731603947145) 2024-11-14T17:05:49,492 INFO [Block report processor {}] blockmanagement.BlockManager(2972): BLOCK* processReport 0x60e916a51dcd6037 with lease ID 0x27dea3b6c70204bd: from storage DS-d2fd4ec1-f4f1-408e-be39-8eb70c21f631 node DatanodeRegistration(127.0.0.1:34607, datanodeUuid=a6f1db02-3997-49d4-a79c-c89e571eb9db, infoPort=38091, infoSecurePort=0, ipcPort=34887, storageInfo=lv=-57;cid=testClusterID;nsid=477023090;c=1731603947145), blocks: 0, hasStaleStorage: false, processing time: 1 msecs, invalidatedBlocks: 0 2024-11-14T17:05:49,545 DEBUG [Time-limited test {}] hbase.HBaseTestingUtility(703): Setting hbase.rootdir to /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/19748226-9e32-0ba5-9617-9a904d4d6417 2024-11-14T17:05:49,551 INFO [Time-limited test {}] cleaner.DirScanPool(74): hfile_cleaner Cleaner pool size is 2 2024-11-14T17:05:49,562 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: master.cleaner.TestHFileCleaner#testTTLCleaner Thread=91, OpenFileDescriptor=338, MaxFileDescriptor=1048576, SystemLoadAverage=249, ProcessCount=11, AvailableMemoryMB=9364 2024-11-14T17:05:49,575 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-14T17:05:49,579 INFO [Time-limited test {}] fs.HFileSystem(339): Added intercepting call to namenode#getBlockLocations so can do block reordering using class org.apache.hadoop.hbase.fs.HFileSystem$ReorderWALBlocks 2024-11-14T17:05:49,691 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: master.cleaner.TestHFileCleaner#testTTLCleaner Thread=92 (was 91) Potentially hanging thread: LeaseRenewer:jenkins@localhost:35523 java.base@17.0.11/java.lang.Thread.sleep(Native Method) app//org.apache.hadoop.hdfs.client.impl.LeaseRenewer.run(LeaseRenewer.java:441) app//org.apache.hadoop.hdfs.client.impl.LeaseRenewer.access$800(LeaseRenewer.java:77) app//org.apache.hadoop.hdfs.client.impl.LeaseRenewer$1.run(LeaseRenewer.java:336) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) - Thread LEAK? -, OpenFileDescriptor=338 (was 338), MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=249 (was 249), ProcessCount=11 (was 11), AvailableMemoryMB=9374 (was 9364) - AvailableMemoryMB LEAK? - 2024-11-14T17:05:49,700 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: master.cleaner.TestHFileCleaner#testThreadCleanup Thread=92, OpenFileDescriptor=338, MaxFileDescriptor=1048576, SystemLoadAverage=249, ProcessCount=11, AvailableMemoryMB=9370 2024-11-14T17:05:49,704 DEBUG [Time-limited test {}] cleaner.HFileCleaner(260): Starting for large file=Thread[Time-limited test-HFileCleaner.large.0-1731603949703,5,FailOnTimeoutGroup] 2024-11-14T17:05:49,705 DEBUG [Time-limited test {}] cleaner.HFileCleaner(275): Starting for small files=Thread[Time-limited test-HFileCleaner.small.0-1731603949704,5,FailOnTimeoutGroup] 2024-11-14T17:05:49,730 WARN [hfile_cleaner-dir-scan-pool-0 {}] cleaner.CleanerChore(361): Found a wrongly formatted file: hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/dfd-dfd - will delete it. 2024-11-14T17:05:49,748 DEBUG [Time-limited test {}] cleaner.HFileCleaner(335): Stopping file delete threads 2024-11-14T17:05:49,748 DEBUG [Time-limited test-HFileCleaner.small.0-1731603949704 {}] cleaner.HFileCleaner(306): Exit Thread[Time-limited test-HFileCleaner.small.0-1731603949704,5,FailOnTimeoutGroup] 2024-11-14T17:05:49,748 DEBUG [Time-limited test-HFileCleaner.large.0-1731603949703 {}] cleaner.HFileCleaner(306): Exit Thread[Time-limited test-HFileCleaner.large.0-1731603949703,5,FailOnTimeoutGroup] 2024-11-14T17:05:49,856 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: master.cleaner.TestHFileCleaner#testThreadCleanup Thread=93 (was 92) Potentially hanging thread: hfile_cleaner-dir-scan-pool-0 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:252) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:1674) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:460) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1061) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) - Thread LEAK? -, OpenFileDescriptor=338 (was 338), MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=249 (was 249), ProcessCount=11 (was 11), AvailableMemoryMB=9367 (was 9370) 2024-11-14T17:05:49,862 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: master.cleaner.TestHFileCleaner#testOnConfigurationChange Thread=93, OpenFileDescriptor=338, MaxFileDescriptor=1048576, SystemLoadAverage=249, ProcessCount=11, AvailableMemoryMB=9366 2024-11-14T17:05:49,863 DEBUG [Time-limited test {}] cleaner.HFileCleaner(260): Starting for large file=Thread[Time-limited test-HFileCleaner.large.0-1731603949863,5,FailOnTimeoutGroup] 2024-11-14T17:05:49,864 DEBUG [Time-limited test {}] cleaner.HFileCleaner(275): Starting for small files=Thread[Time-limited test-HFileCleaner.small.0-1731603949864,5,FailOnTimeoutGroup] 2024-11-14T17:05:50,019 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34607 is added to blk_1073741825_1001 (size=1048576) 2024-11-14T17:05:50,436 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34607 is added to blk_1073741826_1002 (size=1048576) 2024-11-14T17:05:50,851 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34607 is added to blk_1073741827_1003 (size=1048576) 2024-11-14T17:05:51,266 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34607 is added to blk_1073741828_1004 (size=1048576) 2024-11-14T17:05:51,684 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34607 is added to blk_1073741829_1005 (size=1048576) 2024-11-14T17:05:52,103 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34607 is added to blk_1073741830_1006 (size=1024) 2024-11-14T17:05:52,520 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34607 is added to blk_1073741831_1007 (size=1024) 2024-11-14T17:05:52,527 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34607 is added to blk_1073741832_1008 (size=1024) 2024-11-14T17:05:52,937 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34607 is added to blk_1073741833_1009 (size=1024) 2024-11-14T17:05:52,947 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34607 is added to blk_1073741834_1010 (size=1024) 2024-11-14T17:05:53,356 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34607 is added to blk_1073741835_1011 (size=1024) 2024-11-14T17:05:53,765 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34607 is added to blk_1073741836_1012 (size=1024) 2024-11-14T17:05:54,174 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34607 is added to blk_1073741837_1013 (size=1024) 2024-11-14T17:05:54,583 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34607 is added to blk_1073741838_1014 (size=1024) 2024-11-14T17:05:54,992 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34607 is added to blk_1073741839_1015 (size=1024) 2024-11-14T17:05:55,402 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34607 is added to blk_1073741840_1016 (size=1024) 2024-11-14T17:05:55,810 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34607 is added to blk_1073741841_1017 (size=1024) 2024-11-14T17:05:56,222 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34607 is added to blk_1073741842_1018 (size=1024) 2024-11-14T17:05:56,631 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34607 is added to blk_1073741843_1019 (size=1024) 2024-11-14T17:05:57,043 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34607 is added to blk_1073741844_1020 (size=1024) 2024-11-14T17:05:57,051 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34607 is added to blk_1073741845_1021 (size=1024) 2024-11-14T17:05:57,061 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34607 is added to blk_1073741846_1022 (size=1024) 2024-11-14T17:05:57,471 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34607 is added to blk_1073741847_1023 (size=1024) 2024-11-14T17:05:57,880 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34607 is added to blk_1073741848_1024 (size=1024) 2024-11-14T17:05:57,892 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34607 is added to blk_1073741849_1025 (size=1024) 2024-11-14T17:05:58,299 WARN [hfile_cleaner-dir-scan-pool-1 {}] cleaner.CleanerChore(361): Found a wrongly formatted file: hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/large-file-1 - will delete it. 2024-11-14T17:05:58,299 WARN [hfile_cleaner-dir-scan-pool-1 {}] cleaner.CleanerChore(361): Found a wrongly formatted file: hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/large-file-2 - will delete it. 2024-11-14T17:05:58,299 WARN [hfile_cleaner-dir-scan-pool-1 {}] cleaner.CleanerChore(361): Found a wrongly formatted file: hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/large-file-3 - will delete it. 2024-11-14T17:05:58,300 WARN [hfile_cleaner-dir-scan-pool-1 {}] cleaner.CleanerChore(361): Found a wrongly formatted file: hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/large-file-4 - will delete it. 2024-11-14T17:05:58,300 WARN [hfile_cleaner-dir-scan-pool-1 {}] cleaner.CleanerChore(361): Found a wrongly formatted file: hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/large-file-5 - will delete it. 2024-11-14T17:05:58,300 WARN [hfile_cleaner-dir-scan-pool-1 {}] cleaner.CleanerChore(361): Found a wrongly formatted file: hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/small-file-1 - will delete it. 2024-11-14T17:05:58,300 WARN [hfile_cleaner-dir-scan-pool-1 {}] cleaner.CleanerChore(361): Found a wrongly formatted file: hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/small-file-10 - will delete it. 2024-11-14T17:05:58,300 WARN [hfile_cleaner-dir-scan-pool-1 {}] cleaner.CleanerChore(361): Found a wrongly formatted file: hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/small-file-11 - will delete it. 2024-11-14T17:05:58,301 WARN [hfile_cleaner-dir-scan-pool-1 {}] cleaner.CleanerChore(361): Found a wrongly formatted file: hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/small-file-12 - will delete it. 2024-11-14T17:05:58,301 WARN [hfile_cleaner-dir-scan-pool-1 {}] cleaner.CleanerChore(361): Found a wrongly formatted file: hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/small-file-13 - will delete it. 2024-11-14T17:05:58,301 WARN [hfile_cleaner-dir-scan-pool-1 {}] cleaner.CleanerChore(361): Found a wrongly formatted file: hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/small-file-14 - will delete it. 2024-11-14T17:05:58,301 WARN [hfile_cleaner-dir-scan-pool-1 {}] cleaner.CleanerChore(361): Found a wrongly formatted file: hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/small-file-15 - will delete it. 2024-11-14T17:05:58,301 WARN [hfile_cleaner-dir-scan-pool-1 {}] cleaner.CleanerChore(361): Found a wrongly formatted file: hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/small-file-16 - will delete it. 2024-11-14T17:05:58,302 WARN [hfile_cleaner-dir-scan-pool-1 {}] cleaner.CleanerChore(361): Found a wrongly formatted file: hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/small-file-17 - will delete it. 2024-11-14T17:05:58,302 WARN [hfile_cleaner-dir-scan-pool-1 {}] cleaner.CleanerChore(361): Found a wrongly formatted file: hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/small-file-18 - will delete it. 2024-11-14T17:05:58,302 WARN [hfile_cleaner-dir-scan-pool-1 {}] cleaner.CleanerChore(361): Found a wrongly formatted file: hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/small-file-19 - will delete it. 2024-11-14T17:05:58,302 WARN [hfile_cleaner-dir-scan-pool-1 {}] cleaner.CleanerChore(361): Found a wrongly formatted file: hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/small-file-2 - will delete it. 2024-11-14T17:05:58,302 WARN [hfile_cleaner-dir-scan-pool-1 {}] cleaner.CleanerChore(361): Found a wrongly formatted file: hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/small-file-20 - will delete it. 2024-11-14T17:05:58,302 WARN [hfile_cleaner-dir-scan-pool-1 {}] cleaner.CleanerChore(361): Found a wrongly formatted file: hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/small-file-3 - will delete it. 2024-11-14T17:05:58,303 WARN [hfile_cleaner-dir-scan-pool-1 {}] cleaner.CleanerChore(361): Found a wrongly formatted file: hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/small-file-4 - will delete it. 2024-11-14T17:05:58,303 WARN [hfile_cleaner-dir-scan-pool-1 {}] cleaner.CleanerChore(361): Found a wrongly formatted file: hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/small-file-5 - will delete it. 2024-11-14T17:05:58,303 WARN [hfile_cleaner-dir-scan-pool-1 {}] cleaner.CleanerChore(361): Found a wrongly formatted file: hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/small-file-6 - will delete it. 2024-11-14T17:05:58,303 WARN [hfile_cleaner-dir-scan-pool-1 {}] cleaner.CleanerChore(361): Found a wrongly formatted file: hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/small-file-7 - will delete it. 2024-11-14T17:05:58,303 WARN [hfile_cleaner-dir-scan-pool-1 {}] cleaner.CleanerChore(361): Found a wrongly formatted file: hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/small-file-8 - will delete it. 2024-11-14T17:05:58,304 WARN [hfile_cleaner-dir-scan-pool-1 {}] cleaner.CleanerChore(361): Found a wrongly formatted file: hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/small-file-9 - will delete it. 2024-11-14T17:05:58,307 DEBUG [Time-limited test {}] cleaner.TestHFileCleaner(455): File deleted from large queue: 1; from small queue: 1 2024-11-14T17:05:58,307 DEBUG [Time-limited test {}] cleaner.HFileCleaner(466): Updating throttle point, from 524288 to 1024 2024-11-14T17:05:58,307 DEBUG [Time-limited test {}] cleaner.HFileCleaner(473): Updating largeQueueInitSize, from 512 to 1024 2024-11-14T17:05:58,307 DEBUG [Time-limited test {}] cleaner.HFileCleaner(481): Updating smallQueueInitSize, from 512 to 1024 2024-11-14T17:05:58,308 DEBUG [Time-limited test {}] cleaner.HFileCleaner(489): Updating largeFileDeleteThreadNumber, from 1 to 2 2024-11-14T17:05:58,308 DEBUG [Time-limited test {}] cleaner.HFileCleaner(497): Updating smallFileDeleteThreadNumber, from 1 to 4 2024-11-14T17:05:58,308 DEBUG [Time-limited test {}] cleaner.HFileCleaner(335): Stopping file delete threads 2024-11-14T17:05:58,308 DEBUG [Time-limited test {}] cleaner.HFileCleaner(260): Starting for large file=Thread[Time-limited test-HFileCleaner.large.0-1731603958308,5,FailOnTimeoutGroup] 2024-11-14T17:05:58,312 DEBUG [Time-limited test {}] cleaner.HFileCleaner(260): Starting for large file=Thread[Time-limited test-HFileCleaner.large.1-1731603958309,5,FailOnTimeoutGroup] 2024-11-14T17:05:58,313 DEBUG [Time-limited test {}] cleaner.HFileCleaner(275): Starting for small files=Thread[Time-limited test-HFileCleaner.small.0-1731603958312,5,FailOnTimeoutGroup] 2024-11-14T17:05:58,314 DEBUG [Time-limited test {}] cleaner.HFileCleaner(275): Starting for small files=Thread[Time-limited test-HFileCleaner.small.1-1731603958313,5,FailOnTimeoutGroup] 2024-11-14T17:05:58,308 WARN [Time-limited test-HFileCleaner.small.0-1731603949864 {}] cleaner.HFileCleaner(296): Failed to delete hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/small-file-7 java.io.InterruptedIOException: Call interrupted at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1577) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1529) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1426) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:258) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:139) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy40.delete(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.lambda$delete$19(ClientNamenodeProtocolTranslatorPB.java:596) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc(ShadedProtobufHelper.java:160) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.delete(ClientNamenodeProtocolTranslatorPB.java:596) ~[hadoop-hdfs-client-3.4.1.jar:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:437) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:170) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:162) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:100) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:366) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy41.delete(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy44.delete(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy44.delete(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.DFSClient.delete(DFSClient.java:1692) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem$19.doCall(DistributedFileSystem.java:1004) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem$19.doCall(DistributedFileSystem.java:1001) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem.delete(DistributedFileSystem.java:1011) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hbase.master.cleaner.HFileCleaner.consumerLoop(HFileCleaner.java:294) ~[classes/:?] at org.apache.hadoop.hbase.master.cleaner.HFileCleaner$2.run(HFileCleaner.java:269) ~[classes/:?] 2024-11-14T17:05:58,308 WARN [Time-limited test-HFileCleaner.large.0-1731603949863 {}] cleaner.HFileCleaner(296): Failed to delete hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/large-file-4 java.io.InterruptedIOException: Call interrupted at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1577) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1529) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.Client.call(Client.java:1426) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:258) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:139) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy40.delete(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.lambda$delete$19(ClientNamenodeProtocolTranslatorPB.java:596) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc(ShadedProtobufHelper.java:160) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.delete(ClientNamenodeProtocolTranslatorPB.java:596) ~[hadoop-hdfs-client-3.4.1.jar:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:437) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:170) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:162) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:100) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:366) ~[hadoop-common-3.4.1.jar:?] at jdk.proxy2.$Proxy41.delete(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy44.delete(Unknown Source) ~[?:?] at jdk.internal.reflect.GeneratedMethodAccessor5.invoke(Unknown Source) ~[?:?] at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?] at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?] at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:363) ~[classes/:?] at jdk.proxy2.$Proxy44.delete(Unknown Source) ~[?:?] at org.apache.hadoop.hdfs.DFSClient.delete(DFSClient.java:1692) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem$19.doCall(DistributedFileSystem.java:1004) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem$19.doCall(DistributedFileSystem.java:1001) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81) ~[hadoop-common-3.4.1.jar:?] at org.apache.hadoop.hdfs.DistributedFileSystem.delete(DistributedFileSystem.java:1011) ~[hadoop-hdfs-client-3.4.1.jar:?] at org.apache.hadoop.hbase.master.cleaner.HFileCleaner.consumerLoop(HFileCleaner.java:294) ~[classes/:?] at org.apache.hadoop.hbase.master.cleaner.HFileCleaner$1.run(HFileCleaner.java:254) ~[classes/:?] 2024-11-14T17:05:58,315 DEBUG [Time-limited test-HFileCleaner.small.0-1731603949864 {}] cleaner.HFileCleaner(306): Exit Thread[Time-limited test-HFileCleaner.small.0-1731603949864,5,FailOnTimeoutGroup] 2024-11-14T17:05:58,316 DEBUG [Time-limited test-HFileCleaner.large.0-1731603949863 {}] cleaner.HFileCleaner(306): Exit Thread[Time-limited test-HFileCleaner.large.0-1731603949863,5,FailOnTimeoutGroup] 2024-11-14T17:05:58,318 DEBUG [Time-limited test {}] cleaner.HFileCleaner(275): Starting for small files=Thread[Time-limited test-HFileCleaner.small.2-1731603958314,5,FailOnTimeoutGroup] 2024-11-14T17:05:58,320 DEBUG [Time-limited test {}] cleaner.HFileCleaner(275): Starting for small files=Thread[Time-limited test-HFileCleaner.small.3-1731603958318,5,FailOnTimeoutGroup] 2024-11-14T17:05:58,320 DEBUG [Time-limited test {}] cleaner.HFileCleaner(437): Update configuration triggered but nothing changed for this cleaner 2024-11-14T17:05:58,332 DEBUG [Time-limited test {}] cleaner.TestHFileCleaner(475): File deleted from large queue: 21; from small queue: 2 2024-11-14T17:05:58,341 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: master.cleaner.TestHFileCleaner#testOnConfigurationChange Thread=100 (was 93) Potentially hanging thread: Time-limited test-HFileCleaner.large.1-1731603958309 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.park(LockSupport.java:341) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionNode.block(AbstractQueuedSynchronizer.java:506) java.base@17.0.11/java.util.concurrent.ForkJoinPool.unmanagedBlock(ForkJoinPool.java:3465) java.base@17.0.11/java.util.concurrent.ForkJoinPool.managedBlock(ForkJoinPool.java:3436) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:1625) app//org.apache.hadoop.hbase.util.StealJobQueue.take(StealJobQueue.java:101) app//org.apache.hadoop.hbase.master.cleaner.HFileCleaner.consumerLoop(HFileCleaner.java:285) app//org.apache.hadoop.hbase.master.cleaner.HFileCleaner$1.run(HFileCleaner.java:254) Potentially hanging thread: Time-limited test-HFileCleaner.small.0-1731603958312 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.park(LockSupport.java:341) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionNode.block(AbstractQueuedSynchronizer.java:506) java.base@17.0.11/java.util.concurrent.ForkJoinPool.unmanagedBlock(ForkJoinPool.java:3465) java.base@17.0.11/java.util.concurrent.ForkJoinPool.managedBlock(ForkJoinPool.java:3436) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:1625) java.base@17.0.11/java.util.concurrent.PriorityBlockingQueue.take(PriorityBlockingQueue.java:535) app//org.apache.hadoop.hbase.master.cleaner.HFileCleaner.consumerLoop(HFileCleaner.java:285) app//org.apache.hadoop.hbase.master.cleaner.HFileCleaner$2.run(HFileCleaner.java:269) Potentially hanging thread: hfile_cleaner-dir-scan-pool-1 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:252) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:1674) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:460) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1061) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: Time-limited test-HFileCleaner.small.1-1731603958313 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.park(LockSupport.java:341) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionNode.block(AbstractQueuedSynchronizer.java:506) java.base@17.0.11/java.util.concurrent.ForkJoinPool.unmanagedBlock(ForkJoinPool.java:3465) java.base@17.0.11/java.util.concurrent.ForkJoinPool.managedBlock(ForkJoinPool.java:3436) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:1625) java.base@17.0.11/java.util.concurrent.PriorityBlockingQueue.take(PriorityBlockingQueue.java:535) app//org.apache.hadoop.hbase.master.cleaner.HFileCleaner.consumerLoop(HFileCleaner.java:285) app//org.apache.hadoop.hbase.master.cleaner.HFileCleaner$2.run(HFileCleaner.java:269) Potentially hanging thread: Time-limited test-HFileCleaner.small.2-1731603958314 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.park(LockSupport.java:341) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionNode.block(AbstractQueuedSynchronizer.java:506) java.base@17.0.11/java.util.concurrent.ForkJoinPool.unmanagedBlock(ForkJoinPool.java:3465) java.base@17.0.11/java.util.concurrent.ForkJoinPool.managedBlock(ForkJoinPool.java:3436) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:1625) java.base@17.0.11/java.util.concurrent.PriorityBlockingQueue.take(PriorityBlockingQueue.java:535) app//org.apache.hadoop.hbase.master.cleaner.HFileCleaner.consumerLoop(HFileCleaner.java:285) app//org.apache.hadoop.hbase.master.cleaner.HFileCleaner$2.run(HFileCleaner.java:269) Potentially hanging thread: Time-limited test-HFileCleaner.small.3-1731603958318 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.park(LockSupport.java:341) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionNode.block(AbstractQueuedSynchronizer.java:506) java.base@17.0.11/java.util.concurrent.ForkJoinPool.unmanagedBlock(ForkJoinPool.java:3465) java.base@17.0.11/java.util.concurrent.ForkJoinPool.managedBlock(ForkJoinPool.java:3436) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:1625) java.base@17.0.11/java.util.concurrent.PriorityBlockingQueue.take(PriorityBlockingQueue.java:535) app//org.apache.hadoop.hbase.master.cleaner.HFileCleaner.consumerLoop(HFileCleaner.java:285) app//org.apache.hadoop.hbase.master.cleaner.HFileCleaner$2.run(HFileCleaner.java:269) Potentially hanging thread: Time-limited test-HFileCleaner.large.0-1731603958308 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.park(LockSupport.java:341) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionNode.block(AbstractQueuedSynchronizer.java:506) java.base@17.0.11/java.util.concurrent.ForkJoinPool.unmanagedBlock(ForkJoinPool.java:3465) java.base@17.0.11/java.util.concurrent.ForkJoinPool.managedBlock(ForkJoinPool.java:3436) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:1625) app//org.apache.hadoop.hbase.util.StealJobQueue.take(StealJobQueue.java:101) app//org.apache.hadoop.hbase.master.cleaner.HFileCleaner.consumerLoop(HFileCleaner.java:285) app//org.apache.hadoop.hbase.master.cleaner.HFileCleaner$1.run(HFileCleaner.java:254) - Thread LEAK? -, OpenFileDescriptor=340 (was 338) - OpenFileDescriptor LEAK? -, MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=264 (was 249) - SystemLoadAverage LEAK? -, ProcessCount=11 (was 11), AvailableMemoryMB=9921 (was 9366) - AvailableMemoryMB LEAK? - 2024-11-14T17:05:58,352 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: master.cleaner.TestHFileCleaner#testRemovesEmptyDirectories Thread=100, OpenFileDescriptor=340, MaxFileDescriptor=1048576, SystemLoadAverage=264, ProcessCount=11, AvailableMemoryMB=9920 2024-11-14T17:05:58,353 DEBUG [Time-limited test {}] cleaner.HFileCleaner(260): Starting for large file=Thread[Time-limited test-HFileCleaner.large.0-1731603958352,5,FailOnTimeoutGroup] 2024-11-14T17:05:58,353 DEBUG [Time-limited test {}] cleaner.HFileCleaner(275): Starting for small files=Thread[Time-limited test-HFileCleaner.small.0-1731603958353,5,FailOnTimeoutGroup] 2024-11-14T17:05:58,377 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: master.cleaner.TestHFileCleaner#testRemovesEmptyDirectories Thread=102 (was 100) Potentially hanging thread: Time-limited test-HFileCleaner.small.0-1731603958353 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.park(LockSupport.java:341) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionNode.block(AbstractQueuedSynchronizer.java:506) java.base@17.0.11/java.util.concurrent.ForkJoinPool.unmanagedBlock(ForkJoinPool.java:3465) java.base@17.0.11/java.util.concurrent.ForkJoinPool.managedBlock(ForkJoinPool.java:3436) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:1625) java.base@17.0.11/java.util.concurrent.PriorityBlockingQueue.take(PriorityBlockingQueue.java:535) app//org.apache.hadoop.hbase.master.cleaner.HFileCleaner.consumerLoop(HFileCleaner.java:285) app//org.apache.hadoop.hbase.master.cleaner.HFileCleaner$2.run(HFileCleaner.java:269) Potentially hanging thread: Time-limited test-HFileCleaner.large.0-1731603958352 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.park(LockSupport.java:341) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionNode.block(AbstractQueuedSynchronizer.java:506) java.base@17.0.11/java.util.concurrent.ForkJoinPool.unmanagedBlock(ForkJoinPool.java:3465) java.base@17.0.11/java.util.concurrent.ForkJoinPool.managedBlock(ForkJoinPool.java:3436) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:1625) app//org.apache.hadoop.hbase.util.StealJobQueue.take(StealJobQueue.java:101) app//org.apache.hadoop.hbase.master.cleaner.HFileCleaner.consumerLoop(HFileCleaner.java:285) app//org.apache.hadoop.hbase.master.cleaner.HFileCleaner$1.run(HFileCleaner.java:254) - Thread LEAK? -, OpenFileDescriptor=340 (was 340), MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=264 (was 264), ProcessCount=11 (was 11), AvailableMemoryMB=9919 (was 9920) 2024-11-14T17:05:58,383 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: master.cleaner.TestHFileCleaner#testManualMobCleanerStopsMobRemoval Thread=102, OpenFileDescriptor=340, MaxFileDescriptor=1048576, SystemLoadAverage=264, ProcessCount=11, AvailableMemoryMB=9919 2024-11-14T17:05:58,411 DEBUG [Time-limited test {}] util.ClassSize(228): Using Unsafe to estimate memory layout 2024-11-14T17:05:58,428 DEBUG [Time-limited test {}] mob.ManualMobMaintHFileCleaner(77): Had to calculate name of mob region for table testManualMobCleanerStopsMobRemoval and it is 69da8cfa017589d7cb7fb43a6699b397 2024-11-14T17:05:58,428 DEBUG [Time-limited test {}] mob.ManualMobMaintHFileCleaner(83): Keeping file 'hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/data/default/testManualMobCleanerStopsMobRemoval/69da8cfa017589d7cb7fb43a6699b397/family/someHFileThatWouldBeAUUID' because it is from mob dir 2024-11-14T17:05:58,438 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: master.cleaner.TestHFileCleaner#testManualMobCleanerStopsMobRemoval Thread=102 (was 102), OpenFileDescriptor=340 (was 340), MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=264 (was 264), ProcessCount=11 (was 11), AvailableMemoryMB=9905 (was 9919) 2024-11-14T17:05:58,446 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: master.cleaner.TestHFileCleaner#testLargeSmallIsolation Thread=102, OpenFileDescriptor=340, MaxFileDescriptor=1048576, SystemLoadAverage=264, ProcessCount=11, AvailableMemoryMB=9903 2024-11-14T17:05:58,447 DEBUG [Time-limited test {}] cleaner.HFileCleaner(260): Starting for large file=Thread[Time-limited test-HFileCleaner.large.0-1731603958446,5,FailOnTimeoutGroup] 2024-11-14T17:05:58,447 DEBUG [Time-limited test {}] cleaner.HFileCleaner(275): Starting for small files=Thread[Time-limited test-HFileCleaner.small.0-1731603958447,5,FailOnTimeoutGroup] 2024-11-14T17:05:58,491 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34607 is added to blk_1073741850_1026 (size=1048576) 2024-11-14T17:05:58,904 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34607 is added to blk_1073741851_1027 (size=1048576) 2024-11-14T17:05:59,321 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34607 is added to blk_1073741852_1028 (size=1048576) 2024-11-14T17:05:59,733 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34607 is added to blk_1073741853_1029 (size=1048576) 2024-11-14T17:06:00,145 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34607 is added to blk_1073741854_1030 (size=1048576) 2024-11-14T17:06:00,553 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34607 is added to blk_1073741855_1031 (size=1024) 2024-11-14T17:06:00,962 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34607 is added to blk_1073741856_1032 (size=1024) 2024-11-14T17:06:01,370 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34607 is added to blk_1073741857_1033 (size=1024) 2024-11-14T17:06:01,778 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34607 is added to blk_1073741858_1034 (size=1024) 2024-11-14T17:06:02,187 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34607 is added to blk_1073741859_1035 (size=1024) 2024-11-14T17:06:02,599 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34607 is added to blk_1073741860_1036 (size=1024) 2024-11-14T17:06:03,006 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34607 is added to blk_1073741861_1037 (size=1024) 2024-11-14T17:06:03,414 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34607 is added to blk_1073741862_1038 (size=1024) 2024-11-14T17:06:03,822 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34607 is added to blk_1073741863_1039 (size=1024) 2024-11-14T17:06:04,230 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34607 is added to blk_1073741864_1040 (size=1024) 2024-11-14T17:06:04,637 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34607 is added to blk_1073741865_1041 (size=1024) 2024-11-14T17:06:05,045 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34607 is added to blk_1073741866_1042 (size=1024) 2024-11-14T17:06:05,453 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34607 is added to blk_1073741867_1043 (size=1024) 2024-11-14T17:06:05,860 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34607 is added to blk_1073741868_1044 (size=1024) 2024-11-14T17:06:06,268 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34607 is added to blk_1073741869_1045 (size=1024) 2024-11-14T17:06:06,676 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34607 is added to blk_1073741870_1046 (size=1024) 2024-11-14T17:06:07,084 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34607 is added to blk_1073741871_1047 (size=1024) 2024-11-14T17:06:07,492 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34607 is added to blk_1073741872_1048 (size=1024) 2024-11-14T17:06:07,498 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34607 is added to blk_1073741873_1049 (size=1024) 2024-11-14T17:06:07,504 INFO [Block report processor {}] blockmanagement.BlockManager(3777): BLOCK* addStoredBlock: 127.0.0.1:34607 is added to blk_1073741874_1050 (size=1024) 2024-11-14T17:06:07,908 WARN [hfile_cleaner-dir-scan-pool-0 {}] cleaner.CleanerChore(361): Found a wrongly formatted file: hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/large-file-1 - will delete it. 2024-11-14T17:06:07,908 WARN [hfile_cleaner-dir-scan-pool-0 {}] cleaner.CleanerChore(361): Found a wrongly formatted file: hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/large-file-2 - will delete it. 2024-11-14T17:06:07,908 WARN [hfile_cleaner-dir-scan-pool-0 {}] cleaner.CleanerChore(361): Found a wrongly formatted file: hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/large-file-3 - will delete it. 2024-11-14T17:06:07,908 WARN [hfile_cleaner-dir-scan-pool-0 {}] cleaner.CleanerChore(361): Found a wrongly formatted file: hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/large-file-4 - will delete it. 2024-11-14T17:06:07,909 WARN [hfile_cleaner-dir-scan-pool-0 {}] cleaner.CleanerChore(361): Found a wrongly formatted file: hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/large-file-5 - will delete it. 2024-11-14T17:06:07,909 WARN [hfile_cleaner-dir-scan-pool-0 {}] cleaner.CleanerChore(361): Found a wrongly formatted file: hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/small-file-1 - will delete it. 2024-11-14T17:06:07,909 WARN [hfile_cleaner-dir-scan-pool-0 {}] cleaner.CleanerChore(361): Found a wrongly formatted file: hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/small-file-10 - will delete it. 2024-11-14T17:06:07,909 WARN [hfile_cleaner-dir-scan-pool-0 {}] cleaner.CleanerChore(361): Found a wrongly formatted file: hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/small-file-11 - will delete it. 2024-11-14T17:06:07,909 WARN [hfile_cleaner-dir-scan-pool-0 {}] cleaner.CleanerChore(361): Found a wrongly formatted file: hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/small-file-12 - will delete it. 2024-11-14T17:06:07,909 WARN [hfile_cleaner-dir-scan-pool-0 {}] cleaner.CleanerChore(361): Found a wrongly formatted file: hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/small-file-13 - will delete it. 2024-11-14T17:06:07,910 WARN [hfile_cleaner-dir-scan-pool-0 {}] cleaner.CleanerChore(361): Found a wrongly formatted file: hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/small-file-14 - will delete it. 2024-11-14T17:06:07,910 WARN [hfile_cleaner-dir-scan-pool-0 {}] cleaner.CleanerChore(361): Found a wrongly formatted file: hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/small-file-15 - will delete it. 2024-11-14T17:06:07,910 WARN [hfile_cleaner-dir-scan-pool-0 {}] cleaner.CleanerChore(361): Found a wrongly formatted file: hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/small-file-16 - will delete it. 2024-11-14T17:06:07,910 WARN [hfile_cleaner-dir-scan-pool-0 {}] cleaner.CleanerChore(361): Found a wrongly formatted file: hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/small-file-17 - will delete it. 2024-11-14T17:06:07,910 WARN [hfile_cleaner-dir-scan-pool-0 {}] cleaner.CleanerChore(361): Found a wrongly formatted file: hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/small-file-18 - will delete it. 2024-11-14T17:06:07,911 WARN [hfile_cleaner-dir-scan-pool-0 {}] cleaner.CleanerChore(361): Found a wrongly formatted file: hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/small-file-19 - will delete it. 2024-11-14T17:06:07,911 WARN [hfile_cleaner-dir-scan-pool-0 {}] cleaner.CleanerChore(361): Found a wrongly formatted file: hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/small-file-2 - will delete it. 2024-11-14T17:06:07,911 WARN [hfile_cleaner-dir-scan-pool-0 {}] cleaner.CleanerChore(361): Found a wrongly formatted file: hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/small-file-20 - will delete it. 2024-11-14T17:06:07,911 WARN [hfile_cleaner-dir-scan-pool-0 {}] cleaner.CleanerChore(361): Found a wrongly formatted file: hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/small-file-3 - will delete it. 2024-11-14T17:06:07,911 WARN [hfile_cleaner-dir-scan-pool-0 {}] cleaner.CleanerChore(361): Found a wrongly formatted file: hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/small-file-4 - will delete it. 2024-11-14T17:06:07,911 WARN [hfile_cleaner-dir-scan-pool-0 {}] cleaner.CleanerChore(361): Found a wrongly formatted file: hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/small-file-5 - will delete it. 2024-11-14T17:06:07,912 WARN [hfile_cleaner-dir-scan-pool-0 {}] cleaner.CleanerChore(361): Found a wrongly formatted file: hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/small-file-6 - will delete it. 2024-11-14T17:06:07,912 WARN [hfile_cleaner-dir-scan-pool-0 {}] cleaner.CleanerChore(361): Found a wrongly formatted file: hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/small-file-7 - will delete it. 2024-11-14T17:06:07,912 WARN [hfile_cleaner-dir-scan-pool-0 {}] cleaner.CleanerChore(361): Found a wrongly formatted file: hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/small-file-8 - will delete it. 2024-11-14T17:06:07,912 WARN [hfile_cleaner-dir-scan-pool-0 {}] cleaner.CleanerChore(361): Found a wrongly formatted file: hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/small-file-9 - will delete it. 2024-11-14T17:06:07,930 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: master.cleaner.TestHFileCleaner#testLargeSmallIsolation Thread=106 (was 102) Potentially hanging thread: Async disk worker #0 for volume /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/19748226-9e32-0ba5-9617-9a904d4d6417/cluster_cd416bbb-ba83-f2ab-800c-fa257bf5af70/dfs/data/data1 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:252) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:1674) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:460) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1061) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: Time-limited test-HFileCleaner.large.0-1731603958446 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.park(LockSupport.java:341) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionNode.block(AbstractQueuedSynchronizer.java:506) java.base@17.0.11/java.util.concurrent.ForkJoinPool.unmanagedBlock(ForkJoinPool.java:3465) java.base@17.0.11/java.util.concurrent.ForkJoinPool.managedBlock(ForkJoinPool.java:3436) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:1625) app//org.apache.hadoop.hbase.util.StealJobQueue.take(StealJobQueue.java:101) app//org.apache.hadoop.hbase.master.cleaner.HFileCleaner.consumerLoop(HFileCleaner.java:285) app//org.apache.hadoop.hbase.master.cleaner.HFileCleaner$1.run(HFileCleaner.java:254) Potentially hanging thread: Async disk worker #0 for volume /home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/19748226-9e32-0ba5-9617-9a904d4d6417/cluster_cd416bbb-ba83-f2ab-800c-fa257bf5af70/dfs/data/data2 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:252) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:1674) java.base@17.0.11/java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:460) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1061) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1122) java.base@17.0.11/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) java.base@17.0.11/java.lang.Thread.run(Thread.java:840) Potentially hanging thread: Time-limited test-HFileCleaner.small.0-1731603958447 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.park(LockSupport.java:341) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionNode.block(AbstractQueuedSynchronizer.java:506) java.base@17.0.11/java.util.concurrent.ForkJoinPool.unmanagedBlock(ForkJoinPool.java:3465) java.base@17.0.11/java.util.concurrent.ForkJoinPool.managedBlock(ForkJoinPool.java:3436) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:1625) java.base@17.0.11/java.util.concurrent.PriorityBlockingQueue.take(PriorityBlockingQueue.java:535) app//org.apache.hadoop.hbase.master.cleaner.HFileCleaner.consumerLoop(HFileCleaner.java:285) app//org.apache.hadoop.hbase.master.cleaner.HFileCleaner$2.run(HFileCleaner.java:269) - Thread LEAK? -, OpenFileDescriptor=340 (was 340), MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=253 (was 264), ProcessCount=11 (was 11), AvailableMemoryMB=9500 (was 9903) 2024-11-14T17:06:07,937 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: master.cleaner.TestHFileCleaner#testHFileCleaning Thread=106, OpenFileDescriptor=340, MaxFileDescriptor=1048576, SystemLoadAverage=253, ProcessCount=11, AvailableMemoryMB=9499 2024-11-14T17:06:07,938 INFO [Time-limited test {}] cleaner.CleanerChore(192): Initialize cleaner=org.apache.hadoop.hbase.master.cleaner.TimeToLiveHFileCleaner 2024-11-14T17:06:07,938 INFO [Time-limited test {}] cleaner.CleanerChore(192): Initialize cleaner=org.apache.hadoop.hbase.mob.ManualMobMaintHFileCleaner 2024-11-14T17:06:07,938 DEBUG [Time-limited test {}] cleaner.HFileCleaner(260): Starting for large file=Thread[Time-limited test-HFileCleaner.large.0-1731603967938,5,FailOnTimeoutGroup] 2024-11-14T17:06:07,938 DEBUG [Time-limited test {}] cleaner.HFileCleaner(275): Starting for small files=Thread[Time-limited test-HFileCleaner.small.0-1731603967938,5,FailOnTimeoutGroup] 2024-11-14T17:06:07,943 DEBUG [Time-limited test {}] cleaner.TestHFileCleaner(180): Now is: 1731603967939 2024-11-14T17:06:07,947 DEBUG [Time-limited test {}] cleaner.TestHFileCleaner(188): Creating Filehdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/namespace/table/region/someHFileThatWouldBeAUUID.1731603967940, mtime:1731603965938, atime:1731603967944 2024-11-14T17:06:07,950 DEBUG [Time-limited test {}] cleaner.TestHFileCleaner(188): Creating Filehdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/namespace/table/region/someHFileThatWouldBeAUUID.1731603967941, mtime:1731603965938, atime:1731603967948 2024-11-14T17:06:07,954 DEBUG [Time-limited test {}] cleaner.TestHFileCleaner(188): Creating Filehdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/namespace/table/region/someHFileThatWouldBeAUUID.1731603967942, mtime:1731603965938, atime:1731603967952 2024-11-14T17:06:07,958 DEBUG [Time-limited test {}] cleaner.TestHFileCleaner(188): Creating Filehdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/namespace/table/region/someHFileThatWouldBeAUUID.1731603967943, mtime:1731603965938, atime:1731603967955 2024-11-14T17:06:07,962 DEBUG [Time-limited test {}] cleaner.TestHFileCleaner(188): Creating Filehdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/namespace/table/region/someHFileThatWouldBeAUUID.1731603967944, mtime:1731603965938, atime:1731603967959 2024-11-14T17:06:07,966 DEBUG [Time-limited test {}] cleaner.TestHFileCleaner(188): Creating Filehdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/namespace/table/region/someHFileThatWouldBeAUUID.1731603967945, mtime:1731603965938, atime:1731603967964 2024-11-14T17:06:07,970 DEBUG [Time-limited test {}] cleaner.TestHFileCleaner(188): Creating Filehdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/namespace/table/region/someHFileThatWouldBeAUUID.1731603967946, mtime:1731603965938, atime:1731603967968 2024-11-14T17:06:07,974 DEBUG [Time-limited test {}] cleaner.TestHFileCleaner(188): Creating Filehdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/namespace/table/region/someHFileThatWouldBeAUUID.1731603967947, mtime:1731603965938, atime:1731603967971 2024-11-14T17:06:07,977 DEBUG [Time-limited test {}] cleaner.TestHFileCleaner(188): Creating Filehdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/namespace/table/region/someHFileThatWouldBeAUUID.1731603967948, mtime:1731603965938, atime:1731603967975 2024-11-14T17:06:07,981 DEBUG [Time-limited test {}] cleaner.TestHFileCleaner(188): Creating Filehdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/namespace/table/region/someHFileThatWouldBeAUUID.1731603967949, mtime:1731603965938, atime:1731603967978 2024-11-14T17:06:07,984 DEBUG [Time-limited test {}] cleaner.TestHFileCleaner(188): Creating Filehdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/namespace/table/region/someHFileThatWouldBeAUUID.1731603967950, mtime:1731603965938, atime:1731603967982 2024-11-14T17:06:07,987 DEBUG [Time-limited test {}] cleaner.TestHFileCleaner(188): Creating Filehdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/namespace/table/region/someHFileThatWouldBeAUUID.1731603967951, mtime:1731603965938, atime:1731603967985 2024-11-14T17:06:07,990 DEBUG [Time-limited test {}] cleaner.TestHFileCleaner(188): Creating Filehdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/namespace/table/region/someHFileThatWouldBeAUUID.1731603967952, mtime:1731603965938, atime:1731603967988 2024-11-14T17:06:07,994 DEBUG [Time-limited test {}] cleaner.TestHFileCleaner(188): Creating Filehdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/namespace/table/region/someHFileThatWouldBeAUUID.1731603967953, mtime:1731603965938, atime:1731603967991 2024-11-14T17:06:07,998 DEBUG [Time-limited test {}] cleaner.TestHFileCleaner(188): Creating Filehdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/namespace/table/region/someHFileThatWouldBeAUUID.1731603967954, mtime:1731603965938, atime:1731603967995 2024-11-14T17:06:08,001 DEBUG [Time-limited test {}] cleaner.TestHFileCleaner(188): Creating Filehdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/namespace/table/region/someHFileThatWouldBeAUUID.1731603967955, mtime:1731603965938, atime:1731603967999 2024-11-14T17:06:08,005 DEBUG [Time-limited test {}] cleaner.TestHFileCleaner(188): Creating Filehdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/namespace/table/region/someHFileThatWouldBeAUUID.1731603967956, mtime:1731603965938, atime:1731603968002 2024-11-14T17:06:08,008 DEBUG [Time-limited test {}] cleaner.TestHFileCleaner(188): Creating Filehdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/namespace/table/region/someHFileThatWouldBeAUUID.1731603967957, mtime:1731603965938, atime:1731603968006 2024-11-14T17:06:08,011 DEBUG [Time-limited test {}] cleaner.TestHFileCleaner(188): Creating Filehdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/namespace/table/region/someHFileThatWouldBeAUUID.1731603967958, mtime:1731603965938, atime:1731603968009 2024-11-14T17:06:08,014 DEBUG [Time-limited test {}] cleaner.TestHFileCleaner(188): Creating Filehdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/namespace/table/region/someHFileThatWouldBeAUUID.1731603967959, mtime:1731603965938, atime:1731603968012 2024-11-14T17:06:08,018 DEBUG [Time-limited test {}] cleaner.TestHFileCleaner(188): Creating Filehdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/namespace/table/region/someHFileThatWouldBeAUUID.1731603967960, mtime:1731603965938, atime:1731603968015 2024-11-14T17:06:08,021 DEBUG [Time-limited test {}] cleaner.TestHFileCleaner(188): Creating Filehdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/namespace/table/region/someHFileThatWouldBeAUUID.1731603967961, mtime:1731603965938, atime:1731603968019 2024-11-14T17:06:08,025 DEBUG [Time-limited test {}] cleaner.TestHFileCleaner(188): Creating Filehdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/namespace/table/region/someHFileThatWouldBeAUUID.1731603967962, mtime:1731603965938, atime:1731603968022 2024-11-14T17:06:08,029 DEBUG [Time-limited test {}] cleaner.TestHFileCleaner(188): Creating Filehdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/namespace/table/region/someHFileThatWouldBeAUUID.1731603967963, mtime:1731603965938, atime:1731603968027 2024-11-14T17:06:08,032 DEBUG [Time-limited test {}] cleaner.TestHFileCleaner(188): Creating Filehdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/namespace/table/region/someHFileThatWouldBeAUUID.1731603967964, mtime:1731603965938, atime:1731603968030 2024-11-14T17:06:08,035 DEBUG [Time-limited test {}] cleaner.TestHFileCleaner(188): Creating Filehdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/namespace/table/region/someHFileThatWouldBeAUUID.1731603967965, mtime:1731603965938, atime:1731603968033 2024-11-14T17:06:08,039 DEBUG [Time-limited test {}] cleaner.TestHFileCleaner(188): Creating Filehdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/namespace/table/region/someHFileThatWouldBeAUUID.1731603967966, mtime:1731603965938, atime:1731603968036 2024-11-14T17:06:08,042 DEBUG [Time-limited test {}] cleaner.TestHFileCleaner(188): Creating Filehdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/namespace/table/region/someHFileThatWouldBeAUUID.1731603967967, mtime:1731603965938, atime:1731603968040 2024-11-14T17:06:08,046 DEBUG [Time-limited test {}] cleaner.TestHFileCleaner(188): Creating Filehdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/namespace/table/region/someHFileThatWouldBeAUUID.1731603967968, mtime:1731603965938, atime:1731603968043 2024-11-14T17:06:08,050 DEBUG [Time-limited test {}] cleaner.TestHFileCleaner(188): Creating Filehdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/namespace/table/region/someHFileThatWouldBeAUUID.1731603967969, mtime:1731603965938, atime:1731603968047 2024-11-14T17:06:08,053 DEBUG [Time-limited test {}] cleaner.TestHFileCleaner(188): Creating Filehdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/namespace/table/region/someHFileThatWouldBeAUUID.1731603967970, mtime:1731603965938, atime:1731603968051 2024-11-14T17:06:08,058 DEBUG [Time-limited test {}] cleaner.TestHFileCleaner(197): Creating Filehdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/namespace/table/region/someHFileThatWouldBeAUUID.00000000000, mtime:1731603966939, atime:1731603968055 2024-11-14T17:06:08,061 DEBUG [Time-limited test {}] cleaner.TestHFileCleaner(199): hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/namespace/table/region/dfd-dfd 2024-11-14T17:06:08,061 DEBUG [Time-limited test {}] cleaner.TestHFileCleaner(199): hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/namespace/table/region/someHFileThatWouldBeAUUID.00000000000 2024-11-14T17:06:08,061 DEBUG [Time-limited test {}] cleaner.TestHFileCleaner(199): hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/namespace/table/region/someHFileThatWouldBeAUUID.1731603967940 2024-11-14T17:06:08,061 DEBUG [Time-limited test {}] cleaner.TestHFileCleaner(199): hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/namespace/table/region/someHFileThatWouldBeAUUID.1731603967941 2024-11-14T17:06:08,061 DEBUG [Time-limited test {}] cleaner.TestHFileCleaner(199): hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/namespace/table/region/someHFileThatWouldBeAUUID.1731603967942 2024-11-14T17:06:08,062 DEBUG [Time-limited test {}] cleaner.TestHFileCleaner(199): hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/namespace/table/region/someHFileThatWouldBeAUUID.1731603967943 2024-11-14T17:06:08,062 DEBUG [Time-limited test {}] cleaner.TestHFileCleaner(199): hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/namespace/table/region/someHFileThatWouldBeAUUID.1731603967944 2024-11-14T17:06:08,062 DEBUG [Time-limited test {}] cleaner.TestHFileCleaner(199): hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/namespace/table/region/someHFileThatWouldBeAUUID.1731603967945 2024-11-14T17:06:08,062 DEBUG [Time-limited test {}] cleaner.TestHFileCleaner(199): hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/namespace/table/region/someHFileThatWouldBeAUUID.1731603967946 2024-11-14T17:06:08,062 DEBUG [Time-limited test {}] cleaner.TestHFileCleaner(199): hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/namespace/table/region/someHFileThatWouldBeAUUID.1731603967947 2024-11-14T17:06:08,062 DEBUG [Time-limited test {}] cleaner.TestHFileCleaner(199): hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/namespace/table/region/someHFileThatWouldBeAUUID.1731603967948 2024-11-14T17:06:08,062 DEBUG [Time-limited test {}] cleaner.TestHFileCleaner(199): hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/namespace/table/region/someHFileThatWouldBeAUUID.1731603967949 2024-11-14T17:06:08,062 DEBUG [Time-limited test {}] cleaner.TestHFileCleaner(199): hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/namespace/table/region/someHFileThatWouldBeAUUID.1731603967950 2024-11-14T17:06:08,063 DEBUG [Time-limited test {}] cleaner.TestHFileCleaner(199): hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/namespace/table/region/someHFileThatWouldBeAUUID.1731603967951 2024-11-14T17:06:08,063 DEBUG [Time-limited test {}] cleaner.TestHFileCleaner(199): hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/namespace/table/region/someHFileThatWouldBeAUUID.1731603967952 2024-11-14T17:06:08,063 DEBUG [Time-limited test {}] cleaner.TestHFileCleaner(199): hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/namespace/table/region/someHFileThatWouldBeAUUID.1731603967953 2024-11-14T17:06:08,063 DEBUG [Time-limited test {}] cleaner.TestHFileCleaner(199): hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/namespace/table/region/someHFileThatWouldBeAUUID.1731603967954 2024-11-14T17:06:08,063 DEBUG [Time-limited test {}] cleaner.TestHFileCleaner(199): hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/namespace/table/region/someHFileThatWouldBeAUUID.1731603967955 2024-11-14T17:06:08,063 DEBUG [Time-limited test {}] cleaner.TestHFileCleaner(199): hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/namespace/table/region/someHFileThatWouldBeAUUID.1731603967956 2024-11-14T17:06:08,063 DEBUG [Time-limited test {}] cleaner.TestHFileCleaner(199): hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/namespace/table/region/someHFileThatWouldBeAUUID.1731603967957 2024-11-14T17:06:08,063 DEBUG [Time-limited test {}] cleaner.TestHFileCleaner(199): hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/namespace/table/region/someHFileThatWouldBeAUUID.1731603967958 2024-11-14T17:06:08,064 DEBUG [Time-limited test {}] cleaner.TestHFileCleaner(199): hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/namespace/table/region/someHFileThatWouldBeAUUID.1731603967959 2024-11-14T17:06:08,064 DEBUG [Time-limited test {}] cleaner.TestHFileCleaner(199): hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/namespace/table/region/someHFileThatWouldBeAUUID.1731603967960 2024-11-14T17:06:08,064 DEBUG [Time-limited test {}] cleaner.TestHFileCleaner(199): hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/namespace/table/region/someHFileThatWouldBeAUUID.1731603967961 2024-11-14T17:06:08,064 DEBUG [Time-limited test {}] cleaner.TestHFileCleaner(199): hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/namespace/table/region/someHFileThatWouldBeAUUID.1731603967962 2024-11-14T17:06:08,064 DEBUG [Time-limited test {}] cleaner.TestHFileCleaner(199): hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/namespace/table/region/someHFileThatWouldBeAUUID.1731603967963 2024-11-14T17:06:08,064 DEBUG [Time-limited test {}] cleaner.TestHFileCleaner(199): hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/namespace/table/region/someHFileThatWouldBeAUUID.1731603967964 2024-11-14T17:06:08,064 DEBUG [Time-limited test {}] cleaner.TestHFileCleaner(199): hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/namespace/table/region/someHFileThatWouldBeAUUID.1731603967965 2024-11-14T17:06:08,064 DEBUG [Time-limited test {}] cleaner.TestHFileCleaner(199): hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/namespace/table/region/someHFileThatWouldBeAUUID.1731603967966 2024-11-14T17:06:08,065 DEBUG [Time-limited test {}] cleaner.TestHFileCleaner(199): hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/namespace/table/region/someHFileThatWouldBeAUUID.1731603967967 2024-11-14T17:06:08,065 DEBUG [Time-limited test {}] cleaner.TestHFileCleaner(199): hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/namespace/table/region/someHFileThatWouldBeAUUID.1731603967968 2024-11-14T17:06:08,065 DEBUG [Time-limited test {}] cleaner.TestHFileCleaner(199): hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/namespace/table/region/someHFileThatWouldBeAUUID.1731603967969 2024-11-14T17:06:08,065 DEBUG [Time-limited test {}] cleaner.TestHFileCleaner(199): hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/namespace/table/region/someHFileThatWouldBeAUUID.1731603967970 2024-11-14T17:06:08,070 WARN [hfile_cleaner-dir-scan-pool-1 {}] cleaner.CleanerChore(361): Found a wrongly formatted file: hdfs://localhost:35523/user/jenkins/test-data/2cf6bde2-b761-0878-55f3-9d28ba06f789/archive/namespace/table/region/dfd-dfd - will delete it. 2024-11-14T17:06:08,074 DEBUG [hfile_cleaner-dir-scan-pool-1 {}] mob.ManualMobMaintHFileCleaner(77): Had to calculate name of mob region for table archive:namespace and it is 79aaa9e152c2bc55b7a42a270d414302 2024-11-14T17:06:08,086 DEBUG [Time-limited test {}] cleaner.TestHFileCleaner(220): Kept hfiles: someHFileThatWouldBeAUUID.00000000000 2024-11-14T17:06:08,095 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: master.cleaner.TestHFileCleaner#testHFileCleaning Thread=108 (was 106) Potentially hanging thread: Time-limited test-HFileCleaner.large.0-1731603967938 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.park(LockSupport.java:341) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionNode.block(AbstractQueuedSynchronizer.java:506) java.base@17.0.11/java.util.concurrent.ForkJoinPool.unmanagedBlock(ForkJoinPool.java:3465) java.base@17.0.11/java.util.concurrent.ForkJoinPool.managedBlock(ForkJoinPool.java:3436) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:1625) app//org.apache.hadoop.hbase.util.StealJobQueue.take(StealJobQueue.java:101) app//org.apache.hadoop.hbase.master.cleaner.HFileCleaner.consumerLoop(HFileCleaner.java:285) app//org.apache.hadoop.hbase.master.cleaner.HFileCleaner$1.run(HFileCleaner.java:254) Potentially hanging thread: Time-limited test-HFileCleaner.small.0-1731603967938 java.base@17.0.11/jdk.internal.misc.Unsafe.park(Native Method) java.base@17.0.11/java.util.concurrent.locks.LockSupport.park(LockSupport.java:341) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionNode.block(AbstractQueuedSynchronizer.java:506) java.base@17.0.11/java.util.concurrent.ForkJoinPool.unmanagedBlock(ForkJoinPool.java:3465) java.base@17.0.11/java.util.concurrent.ForkJoinPool.managedBlock(ForkJoinPool.java:3436) java.base@17.0.11/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:1625) java.base@17.0.11/java.util.concurrent.PriorityBlockingQueue.take(PriorityBlockingQueue.java:535) app//org.apache.hadoop.hbase.master.cleaner.HFileCleaner.consumerLoop(HFileCleaner.java:285) app//org.apache.hadoop.hbase.master.cleaner.HFileCleaner$2.run(HFileCleaner.java:269) - Thread LEAK? -, OpenFileDescriptor=340 (was 340), MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=253 (was 253), ProcessCount=11 (was 11), AvailableMemoryMB=9495 (was 9499) 2024-11-14T17:06:08,102 INFO [Time-limited test {}] hbase.ResourceChecker(147): before: master.cleaner.TestHFileCleaner#testManualMobCleanerLetsNonMobGo Thread=108, OpenFileDescriptor=340, MaxFileDescriptor=1048576, SystemLoadAverage=253, ProcessCount=11, AvailableMemoryMB=9495 2024-11-14T17:06:08,109 DEBUG [Time-limited test {}] mob.ManualMobMaintHFileCleaner(77): Had to calculate name of mob region for table testManualMobCleanerLetsNonMobGo and it is 9d82fa85c2f878c96865c3ee4d6f9151 2024-11-14T17:06:08,117 INFO [Time-limited test {}] hbase.ResourceChecker(175): after: master.cleaner.TestHFileCleaner#testManualMobCleanerLetsNonMobGo Thread=108 (was 108), OpenFileDescriptor=340 (was 340), MaxFileDescriptor=1048576 (was 1048576), SystemLoadAverage=253 (was 253), ProcessCount=11 (was 11), AvailableMemoryMB=9494 (was 9495) 2024-11-14T17:06:08,121 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.w.WebAppContext@530b4d2e{datanode,/,null,STOPPED}{jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/datanode} 2024-11-14T17:06:08,124 INFO [Time-limited test {}] server.AbstractConnector(383): Stopped ServerConnector@76ee0868{HTTP/1.1, (http/1.1)}{localhost:0} 2024-11-14T17:06:08,124 INFO [Time-limited test {}] session.HouseKeeper(149): node0 Stopped scavenging 2024-11-14T17:06:08,124 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.s.ServletContextHandler@ade538{static,/static,jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/static,STOPPED} 2024-11-14T17:06:08,125 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.s.ServletContextHandler@6d607707{logs,/logs,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/19748226-9e32-0ba5-9617-9a904d4d6417/hadoop.log.dir/,STOPPED} 2024-11-14T17:06:08,129 WARN [BP-1078482885-172.17.0.2-1731603947145 heartbeating to localhost/127.0.0.1:35523 {}] datanode.IncrementalBlockReportManager(160): IncrementalBlockReportManager interrupted 2024-11-14T17:06:08,129 ERROR [Command processor {}] datanode.BPServiceActor$CommandProcessingThread(1416): Command processor encountered interrupt and exit. 2024-11-14T17:06:08,129 WARN [BP-1078482885-172.17.0.2-1731603947145 heartbeating to localhost/127.0.0.1:35523 {}] datanode.BPServiceActor(925): Ending block pool service for: Block pool BP-1078482885-172.17.0.2-1731603947145 (Datanode Uuid a6f1db02-3997-49d4-a79c-c89e571eb9db) service to localhost/127.0.0.1:35523 2024-11-14T17:06:08,129 WARN [Command processor {}] datanode.BPServiceActor$CommandProcessingThread(1400): Ending command processor service for: Thread[Command processor,5,FailOnTimeoutGroup] 2024-11-14T17:06:08,130 WARN [refreshUsed-/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/19748226-9e32-0ba5-9617-9a904d4d6417/cluster_cd416bbb-ba83-f2ab-800c-fa257bf5af70/dfs/data/data1/current/BP-1078482885-172.17.0.2-1731603947145 {}] fs.CachingGetSpaceUsed$RefreshThread(231): Thread Interrupted waiting to refresh disk information: sleep interrupted 2024-11-14T17:06:08,131 WARN [refreshUsed-/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/19748226-9e32-0ba5-9617-9a904d4d6417/cluster_cd416bbb-ba83-f2ab-800c-fa257bf5af70/dfs/data/data2/current/BP-1078482885-172.17.0.2-1731603947145 {}] fs.CachingGetSpaceUsed$RefreshThread(231): Thread Interrupted waiting to refresh disk information: sleep interrupted 2024-11-14T17:06:08,132 WARN [Time-limited test {}] datanode.DataSetLockManager(261): not open lock leak check func 2024-11-14T17:06:08,144 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.w.WebAppContext@6f207b16{hdfs,/,null,STOPPED}{jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/hdfs} 2024-11-14T17:06:08,145 INFO [Time-limited test {}] server.AbstractConnector(383): Stopped ServerConnector@8075d58{HTTP/1.1, (http/1.1)}{localhost:0} 2024-11-14T17:06:08,145 INFO [Time-limited test {}] session.HouseKeeper(149): node0 Stopped scavenging 2024-11-14T17:06:08,145 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.s.ServletContextHandler@190f9941{static,/static,jar:file:/home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/local-repository/org/apache/hadoop/hadoop-hdfs/3.4.1/hadoop-hdfs-3.4.1-tests.jar!/webapps/static,STOPPED} 2024-11-14T17:06:08,145 INFO [Time-limited test {}] handler.ContextHandler(1159): Stopped o.e.j.s.ServletContextHandler@3402cd06{logs,/logs,file:///home/jenkins/jenkins-home/workspace/HBase-Flaky-Tests_branch-2/hbase-server/target/test-data/19748226-9e32-0ba5-9617-9a904d4d6417/hadoop.log.dir/,STOPPED} 2024-11-14T17:06:08,148 WARN [Time-limited test {}] impl.FsDatasetImpl(779): Exception thrown while metric collection. Exception : Cannot invoke "java.util.Map.values()" because "this.executors" is null