Tests are automatically skipped if Docker is not available or container fails to start.
*/
public class HiveTestBase extends PlanTestBase {
- public static final HiveTestFixture HIVE_TEST_FIXTURE;
+ private static final Logger logger = LoggerFactory.getLogger(HiveTestBase.class);
+
+ // Lazy initialization - container is started only when needed
+ private static HiveTestFixture hiveTestFixture;
+ private static HiveContainer hiveContainer;
+ private static String initializationError;
+ private static boolean initialized = false;
+ private static BaseDirTestWatcher generalDirWatcher;
+
+ // Public accessors for backward compatibility
+ public static HiveTestFixture HIVE_TEST_FIXTURE;
+ public static HiveContainer HIVE_CONTAINER;
+
+ /**
+ * Initializes the Hive Docker container and test fixture.
+ * This is called lazily to avoid blocking class loading if Docker is unavailable.
+ */
+ private static synchronized void initializeHiveInfrastructure() {
+ if (initialized) {
+ return;
+ }
+ initialized = true;
- static {
- if (HiveTestUtilities.supportedJavaVersion()) {
- // generate hive data common for all test classes using own dirWatcher
- BaseDirTestWatcher generalDirWatcher = new BaseDirTestWatcher() {
- {
+ generalDirWatcher = new BaseDirTestWatcher() {
+ {
/*
- Below protected method invoked to create directory DirWatcher.dir with path like:
- ./target/org.apache.drill.exec.hive.HiveTestBase123e4567-e89b-12d3-a456-556642440000.
- Then subdirectory with name 'root' will be used to hold metastore_db and other data shared between
- all derivatives of the class. Note that UUID suffix is necessary to avoid conflicts between forked JVMs.
- */
- starting(Description.createSuiteDescription(HiveTestBase.class.getName().concat(UUID.randomUUID().toString())));
- }
- };
+ * Below protected method invoked to create directory DirWatcher.dir with path like:
+ * ./target/org.apache.drill.exec.hive.HiveTestBase123e4567-e89b-12d3-a456-556642440000.
+ * Then subdirectory with name 'root' will be used to hold test data shared between
+ * all derivatives of the class. Note that UUID suffix is necessary to avoid conflicts between forked JVMs.
+ */
+ starting(Description.createSuiteDescription(HiveTestBase.class.getName().concat(UUID.randomUUID().toString())));
+ }
+ };
+
+ try {
+ // Check if Docker is available first
+ if (!HiveContainer.isDockerAvailable()) {
+ initializationError = "Docker is not available. Hive tests will be skipped.";
+ logger.warn(initializationError);
+ return;
+ }
+
+ // Warn about ARM64 performance
+ if (HiveContainer.isArm64()) {
+ System.out.println("WARNING: Running on ARM64 architecture.");
+ System.out.println("Hive Docker tests use x86 emulation and may take 15-30 minutes to start.");
+ System.out.println("Consider skipping these tests with: mvn test -Dhive.test.excludedGroups=org.apache.drill.categories.HiveStorageTest");
+ }
+
+ // Get shared Docker container instance (starts on first access)
+ logger.info("Getting shared Hive Docker container for tests");
+ hiveContainer = HiveContainer.getInstance();
+ HIVE_CONTAINER = hiveContainer;
+ logger.info("Hive container ready");
+
+ System.out.println("Configuring Hive storage plugin for Drill...");
+ long setupStart = System.currentTimeMillis();
+
File baseDir = generalDirWatcher.getRootDir();
- HIVE_TEST_FIXTURE = HiveTestFixture.builder(baseDir).build();
- HiveTestDataGenerator dataGenerator = new HiveTestDataGenerator(generalDirWatcher, baseDir,
- HIVE_TEST_FIXTURE.getWarehouseDir());
- HIVE_TEST_FIXTURE.getDriverManager().runWithinSession(dataGenerator::generateData);
-
- // set hook for clearing watcher's dir on JVM shutdown
- Runtime.getRuntime().addShutdownHook(new Thread(() -> FileUtils.deleteQuietly(generalDirWatcher.getDir())));
- } else {
- HIVE_TEST_FIXTURE = null;
+ hiveTestFixture = HiveTestFixture.builderForDocker(baseDir, hiveContainer).build();
+ HIVE_TEST_FIXTURE = hiveTestFixture;
+
+ // Note: Test data generation for Docker-based Hive will be done via JDBC in individual tests
+ // or test setup methods as needed, since we can't use embedded Hive Driver
+
+ long setupSeconds = (System.currentTimeMillis() - setupStart) / 1000;
+ System.out.println("Hive storage plugin configured in " + setupSeconds + " seconds");
+ System.out.println("Hive test infrastructure ready!");
+
+ // set hook for clearing resources on JVM shutdown
+ Runtime.getRuntime().addShutdownHook(new Thread(() -> {
+ FileUtils.deleteQuietly(generalDirWatcher.getDir());
+ // Note: Container is shared singleton, will be cleaned up by Testcontainers
+ }));
+ } catch (Exception e) {
+ initializationError = "Failed to initialize Hive container: " + e.getMessage();
+ logger.error(initializationError, e);
+ // Don't throw - let tests be skipped gracefully
}
}
@BeforeClass
public static void setUp() {
- HiveTestUtilities.assumeJavaVersion();
- HIVE_TEST_FIXTURE.getPluginManager().addHivePluginTo(bits);
+ // Initialize lazily
+ initializeHiveInfrastructure();
+
+ // Skip tests if initialization failed
+ Assume.assumeTrue("Hive infrastructure not available: " + initializationError,
+ initializationError == null && hiveTestFixture != null);
+
+ hiveTestFixture.getPluginManager().addHivePluginTo(bits);
}
@AfterClass
public static void tearDown() {
- if (HIVE_TEST_FIXTURE != null) {
- HIVE_TEST_FIXTURE.getPluginManager().removeHivePluginFrom(bits);
+ if (hiveTestFixture != null) {
+ hiveTestFixture.getPluginManager().removeHivePluginFrom(bits);
}
}
}
diff --git a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/HiveTestFixture.java b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/HiveTestFixture.java
index 0bf5d42390d..8e6461c8b81 100644
--- a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/HiveTestFixture.java
+++ b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/HiveTestFixture.java
@@ -114,6 +114,33 @@ public static Builder builder(File baseDir) {
return new Builder(requireNonNull(baseDir, "Parameter 'baseDir' can't be null!"));
}
+ /**
+ * Creates a builder configured for Docker-based Hive testing.
+ *
+ * @param baseDir Base directory for test files
+ * @param hiveContainer Hive container instance
+ * @return Builder configured for Docker
+ */
+ public static Builder builderForDocker(File baseDir, HiveContainer hiveContainer) {
+ requireNonNull(baseDir, "Parameter 'baseDir' can't be null!");
+ requireNonNull(hiveContainer, "Parameter 'hiveContainer' can't be null!");
+
+ Builder builder = new Builder(baseDir);
+ String metastoreUri = hiveContainer.getMetastoreUri();
+ String warehouseDir = "/opt/hive/data/warehouse"; // Container's warehouse directory
+
+ // Configure for Docker-based metastore
+ builder.pluginOption(ConfVars.METASTOREURIS, metastoreUri);
+ builder.pluginOption(ConfVars.METASTOREWAREHOUSE, warehouseDir);
+
+ // Configure driver for Docker-based HiveServer2
+ // Driver uses the containerized metastore via thrift
+ builder.driverOption(ConfVars.METASTOREURIS, metastoreUri);
+ builder.driverOption(ConfVars.METASTOREWAREHOUSE, warehouseDir);
+
+ return builder;
+ }
+
public HivePluginManager getPluginManager() {
return pluginManager;
}
diff --git a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/HiveTestSuite.java b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/HiveTestSuite.java
new file mode 100644
index 00000000000..85c036182cc
--- /dev/null
+++ b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/HiveTestSuite.java
@@ -0,0 +1,177 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.hive;
+
+import org.apache.drill.categories.HiveStorageTest;
+import org.apache.drill.categories.SlowTest;
+import org.apache.drill.test.BaseTest;
+import org.apache.drill.test.BaseDirTestWatcher;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.ClassRule;
+import org.junit.experimental.categories.Category;
+import org.junit.runner.RunWith;
+import org.junit.runners.Suite;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.File;
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.concurrent.atomic.AtomicInteger;
+
+/**
+ * Test suite for Hive storage plugin tests using Docker container.
+ * This suite manages the lifecycle of a Hive container and provides
+ * connection details to test classes.
+ */
+@RunWith(Suite.class)
+@Suite.SuiteClasses({
+ // Test classes will be added here
+})
+@Category({SlowTest.class, HiveStorageTest.class})
+public class HiveTestSuite extends BaseTest {
+
+ private static final Logger logger = LoggerFactory.getLogger(HiveTestSuite.class);
+
+ @ClassRule
+ public static final BaseDirTestWatcher dirTestWatcher = new BaseDirTestWatcher();
+
+ private static HiveContainer hiveContainer;
+ private static String metastoreUri;
+ private static String jdbcUrl;
+ private static final AtomicInteger initCount = new AtomicInteger(0);
+
+ /**
+ * Gets the metastore URI for connecting to Hive metastore.
+ *
+ * @return Metastore URI
+ */
+ public static String getMetastoreUri() {
+ return metastoreUri;
+ }
+
+ /**
+ * Gets the JDBC URL for connecting to HiveServer2.
+ *
+ * @return JDBC URL
+ */
+ public static String getJdbcUrl() {
+ return jdbcUrl;
+ }
+
+ /**
+ * Gets the Hive container instance.
+ *
+ * @return HiveContainer instance
+ */
+ public static HiveContainer getHiveContainer() {
+ return hiveContainer;
+ }
+
+ /**
+ * Gets the base directory for test data.
+ *
+ * @return Base directory
+ */
+ public static File getBaseDir() {
+ return dirTestWatcher.getRootDir();
+ }
+
+ @BeforeClass
+ public static void initHive() throws Exception {
+ synchronized (HiveTestSuite.class) {
+ if (initCount.get() == 0) {
+ logger.info("Getting shared Hive container for tests");
+
+ // Get shared Hive container instance
+ hiveContainer = HiveContainer.getInstance();
+
+ metastoreUri = hiveContainer.getMetastoreUri();
+ jdbcUrl = hiveContainer.getJdbcUrl();
+
+ logger.info("Hive container started successfully");
+ logger.info("Metastore URI: {}", metastoreUri);
+ logger.info("JDBC URL: {}", jdbcUrl);
+
+ // Generate test data
+ generateTestData();
+ }
+ initCount.incrementAndGet();
+ }
+ }
+
+ /**
+ * Generates test data in the Hive instance.
+ */
+ private static void generateTestData() {
+ logger.info("Generating test data in Hive");
+ try (Connection connection = getConnection();
+ Statement statement = connection.createStatement()) {
+
+ // Create a simple test table to verify connectivity
+ statement.execute("CREATE DATABASE IF NOT EXISTS default");
+ statement.execute("USE default");
+
+ logger.info("Test data generation completed");
+ } catch (Exception e) {
+ logger.error("Failed to generate test data", e);
+ throw new RuntimeException("Failed to generate test data", e);
+ }
+ }
+
+ /**
+ * Gets a JDBC connection to HiveServer2.
+ *
+ * @return JDBC Connection
+ * @throws SQLException if connection fails
+ */
+ public static Connection getConnection() throws SQLException {
+ try {
+ Class.forName("org.apache.hive.jdbc.HiveDriver");
+ } catch (ClassNotFoundException e) {
+ throw new SQLException("Hive JDBC driver not found", e);
+ }
+ return DriverManager.getConnection(jdbcUrl);
+ }
+
+ /**
+ * Executes a Hive query using JDBC.
+ *
+ * @param query SQL query to execute
+ * @throws SQLException if query execution fails
+ */
+ public static void executeQuery(String query) throws SQLException {
+ try (Connection connection = getConnection();
+ Statement statement = connection.createStatement()) {
+ statement.execute(query);
+ }
+ }
+
+ @AfterClass
+ public static void tearDownHive() {
+ synchronized (HiveTestSuite.class) {
+ if (initCount.decrementAndGet() == 0) {
+ // Container is shared singleton, will be cleaned up by Testcontainers at JVM shutdown
+ logger.info("Test suite finished, container will be reused for other tests");
+ }
+ }
+ }
+}
diff --git a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/HiveTestUtilities.java b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/HiveTestUtilities.java
index 2da8acbd4b4..62c70a34c19 100644
--- a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/HiveTestUtilities.java
+++ b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/HiveTestUtilities.java
@@ -31,12 +31,9 @@
import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
import org.apache.hadoop.util.ComparableVersion;
import org.apache.hive.common.util.HiveVersionInfo;
-import org.junit.AssumptionViolatedException;
import static org.hamcrest.CoreMatchers.containsString;
-import static org.hamcrest.CoreMatchers.startsWith;
import static org.hamcrest.MatcherAssert.assertThat;
-import static org.junit.Assume.assumeThat;
public class HiveTestUtilities {
@@ -124,16 +121,6 @@ public static void assertNativeScanUsed(QueryBuilder queryBuilder, String table)
assertThat(plan, containsString("HiveDrillNativeParquetScan"));
}
- /**
- * Current Hive version doesn't support JDK 9+.
- * Checks if current version is supported by Hive.
- *
- * @return {@code true} if current version is supported by Hive, {@code false} otherwise
- */
- public static boolean supportedJavaVersion() {
- return System.getProperty("java.version").startsWith("1.8");
- }
-
/**
* Checks whether current version is not less than hive 3.0
*/
@@ -141,14 +128,4 @@ public static boolean isHive3() {
return new ComparableVersion(HiveVersionInfo.getVersion())
.compareTo(new ComparableVersion("3.0")) >= 0;
}
-
- /**
- * Checks if current version is supported by Hive.
- *
- * @throws AssumptionViolatedException if current version is not supported by Hive,
- * so unit tests may be skipped.
- */
- public static void assumeJavaVersion() throws AssumptionViolatedException {
- assumeThat("Skipping tests since Hive supports only JDK 8.", System.getProperty("java.version"), startsWith("1.8"));
- }
}
diff --git a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/complex_types/TestHiveArrays.java b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/complex_types/TestHiveArrays.java
index a95a0cf6aa1..2e0116aab00 100644
--- a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/complex_types/TestHiveArrays.java
+++ b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/complex_types/TestHiveArrays.java
@@ -19,7 +19,9 @@
import java.math.BigDecimal;
import java.nio.charset.StandardCharsets;
-import java.nio.file.Paths;
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.Statement;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
@@ -29,15 +31,10 @@
import org.apache.drill.categories.HiveStorageTest;
import org.apache.drill.categories.SlowTest;
import org.apache.drill.exec.ExecConstants;
-import org.apache.drill.exec.hive.HiveClusterTest;
-import org.apache.drill.exec.hive.HiveTestFixture;
-import org.apache.drill.exec.hive.HiveTestUtilities;
+import org.apache.drill.exec.hive.HiveTestBase;
import org.apache.drill.exec.util.StoragePluginTestUtils;
import org.apache.drill.exec.util.Text;
-import org.apache.drill.test.ClusterFixture;
import org.apache.drill.test.TestBuilder;
-import org.apache.hadoop.hive.ql.Driver;
-import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
@@ -46,164 +43,130 @@
import static java.util.Collections.emptyList;
import static org.apache.drill.exec.expr.fn.impl.DateUtility.parseBest;
import static org.apache.drill.exec.expr.fn.impl.DateUtility.parseLocalDate;
-import static org.apache.drill.exec.hive.HiveTestUtilities.assertNativeScanUsed;
import static org.apache.drill.test.TestBuilder.listOf;
import static org.apache.drill.test.TestBuilder.mapOfObject;
@Category({SlowTest.class, HiveStorageTest.class})
-public class TestHiveArrays extends HiveClusterTest {
-
- private static HiveTestFixture hiveTestFixture;
+public class TestHiveArrays extends HiveTestBase {
private static final String[] TYPES = {"int", "string", "varchar(5)", "char(2)", "tinyint",
"smallint", "decimal(9,3)", "boolean", "bigint", "float", "double", "date", "timestamp"};
@BeforeClass
- public static void setUp() throws Exception {
- startCluster(ClusterFixture.builder(dirTestWatcher)
- .sessionOption(ExecConstants.HIVE_OPTIMIZE_PARQUET_SCAN_WITH_NATIVE_READER, true));
- hiveTestFixture = HiveTestFixture.builder(dirTestWatcher).build();
- hiveTestFixture.getDriverManager().runWithinSession(TestHiveArrays::generateData);
- hiveTestFixture.getPluginManager().addHivePluginTo(cluster.drillbit());
- }
-
- @AfterClass
- public static void tearDown() {
- if (hiveTestFixture != null) {
- hiveTestFixture.getPluginManager().removeHivePluginFrom(cluster.drillbit());
+ public static void generateTestData() throws Exception {
+ String jdbcUrl = String.format("jdbc:hive2://%s:%d/default",
+ HIVE_CONTAINER.getHost(),
+ HIVE_CONTAINER.getMappedPort(10000));
+
+ try (Connection conn = DriverManager.getConnection(jdbcUrl, "", "");
+ Statement stmt = conn.createStatement()) {
+
+ // Create and populate tables for each type
+ for (String type : TYPES) {
+ String tableName = getTableNameFromType(type);
+ String hiveType = type.toUpperCase();
+
+ // Create table
+ String ddl = String.format(
+ "CREATE TABLE IF NOT EXISTS %s(rid INT, arr_n_0 ARRAY<%s>, arr_n_1 ARRAY