icebergConfig = config.toIcebergConfig();
+ Catalog backendCatalog =
+ org.apache.iceberg.CatalogUtil.buildIcebergCatalog("backend", icebergConfig, null);
+
+ // Start ICE REST catalog server
+ server = createServer("localhost", 8080, backendCatalog, config, icebergConfig);
+ server.start();
+
+ // Wait for server to be ready
+ while (!server.isStarted()) {
+ Thread.sleep(100);
+ }
+
+ // Server is ready for CLI commands
+ }
+
+ @AfterClass
+ public void tearDown() {
+
+ // Stop the REST catalog server
+ if (server != null) {
+ try {
+ server.stop();
+ } catch (Exception e) {
+ logger.error("Error stopping server: {}", e.getMessage(), e);
+ }
+ }
+
+ // Stop minio container
+ if (minio != null && minio.isRunning()) {
+ minio.stop();
+ }
+ }
+
+ /** Helper method to create a temporary CLI config file */
+ protected File createTempCliConfig() throws Exception {
+ File tempConfigFile = File.createTempFile("ice-rest-cli-", ".yaml");
+ tempConfigFile.deleteOnExit();
+
+ String configContent = "uri: http://localhost:8080\n";
+ Files.write(tempConfigFile.toPath(), configContent.getBytes());
+
+ return tempConfigFile;
+ }
+
+ /** Get the MinIO endpoint URL */
+ protected String getMinioEndpoint() {
+ return "http://" + minio.getHost() + ":" + minio.getMappedPort(9000);
+ }
+
+ /** Get the REST catalog URI */
+ protected String getCatalogUri() {
+ return "http://localhost:8080";
+ }
+}
diff --git a/ice-rest-catalog/src/test/java/com/altinity/ice/rest/catalog/ScenarioBasedIT.java b/ice-rest-catalog/src/test/java/com/altinity/ice/rest/catalog/ScenarioBasedIT.java
new file mode 100644
index 0000000..b435134
--- /dev/null
+++ b/ice-rest-catalog/src/test/java/com/altinity/ice/rest/catalog/ScenarioBasedIT.java
@@ -0,0 +1,164 @@
+/*
+ * Copyright (c) 2025 Altinity Inc and/or its affiliates. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ */
+package com.altinity.ice.rest.catalog;
+
+import java.io.File;
+import java.net.URISyntaxException;
+import java.net.URL;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+/**
+ * Scenario-based integration tests for ICE REST Catalog.
+ *
+ * This test class automatically discovers and executes all test scenarios from the
+ * test/resources/scenarios directory. Each scenario is run as a separate test case.
+ */
+public class ScenarioBasedIT extends RESTCatalogTestBase {
+
+ /**
+ * Data provider that discovers all test scenarios.
+ *
+ * @return Array of scenario names to be used as test parameters
+ * @throws Exception If there's an error discovering scenarios
+ */
+ @DataProvider(name = "scenarios")
+ public Object[][] scenarioProvider() throws Exception {
+ Path scenariosDir = getScenariosDirectory();
+ ScenarioTestRunner runner = createScenarioRunner();
+
+ List scenarios = runner.discoverScenarios();
+
+ if (scenarios.isEmpty()) {
+ logger.warn("No test scenarios found in: {}", scenariosDir);
+ return new Object[0][0];
+ }
+
+ logger.info("Discovered {} test scenario(s): {}", scenarios.size(), scenarios);
+
+ // Convert to Object[][] for TestNG data provider
+ Object[][] data = new Object[scenarios.size()][1];
+ for (int i = 0; i < scenarios.size(); i++) {
+ data[i][0] = scenarios.get(i);
+ }
+ return data;
+ }
+
+ /**
+ * Parameterized test that executes a single scenario.
+ *
+ * @param scenarioName Name of the scenario to execute
+ * @throws Exception If the scenario execution fails
+ */
+ @Test(dataProvider = "scenarios")
+ public void testScenario(String scenarioName) throws Exception {
+ logger.info("====== Starting scenario test: {} ======", scenarioName);
+
+ ScenarioTestRunner runner = createScenarioRunner();
+ ScenarioTestRunner.ScenarioResult result = runner.executeScenario(scenarioName);
+
+ // Log results
+ if (result.runScriptResult() != null) {
+ logger.info("Run script exit code: {}", result.runScriptResult().exitCode());
+ }
+
+ if (result.verifyScriptResult() != null) {
+ logger.info("Verify script exit code: {}", result.verifyScriptResult().exitCode());
+ }
+
+ // Assert success
+ if (!result.isSuccess()) {
+ StringBuilder errorMessage = new StringBuilder();
+ errorMessage.append("Scenario '").append(scenarioName).append("' failed:\n");
+
+ if (result.runScriptResult() != null && result.runScriptResult().exitCode() != 0) {
+ errorMessage.append("\nRun script failed with exit code: ");
+ errorMessage.append(result.runScriptResult().exitCode());
+ errorMessage.append("\nStdout:\n");
+ errorMessage.append(result.runScriptResult().stdout());
+ errorMessage.append("\nStderr:\n");
+ errorMessage.append(result.runScriptResult().stderr());
+ }
+
+ if (result.verifyScriptResult() != null && result.verifyScriptResult().exitCode() != 0) {
+ errorMessage.append("\nVerify script failed with exit code: ");
+ errorMessage.append(result.verifyScriptResult().exitCode());
+ errorMessage.append("\nStdout:\n");
+ errorMessage.append(result.verifyScriptResult().stdout());
+ errorMessage.append("\nStderr:\n");
+ errorMessage.append(result.verifyScriptResult().stderr());
+ }
+
+ throw new AssertionError(errorMessage.toString());
+ }
+
+ logger.info("====== Scenario test passed: {} ======", scenarioName);
+ }
+
+ /**
+ * Create a ScenarioTestRunner with the appropriate template variables.
+ *
+ * @return Configured ScenarioTestRunner
+ * @throws Exception If there's an error creating the runner
+ */
+ private ScenarioTestRunner createScenarioRunner() throws Exception {
+ Path scenariosDir = getScenariosDirectory();
+
+ // Create CLI config file
+ File cliConfig = createTempCliConfig();
+
+ // Build template variables
+ Map templateVars = new HashMap<>();
+ templateVars.put("CLI_CONFIG", cliConfig.getAbsolutePath());
+ templateVars.put("MINIO_ENDPOINT", getMinioEndpoint());
+ templateVars.put("CATALOG_URI", getCatalogUri());
+
+ // Try to find ice-jar in the build
+ String projectRoot = Paths.get("").toAbsolutePath().getParent().toString();
+ String iceJar = projectRoot + "/ice/target/ice-jar";
+ File iceJarFile = new File(iceJar);
+
+ if (iceJarFile.exists() && iceJarFile.canExecute()) {
+ // Use pre-built ice-jar if available
+ templateVars.put("ICE_CLI", iceJar);
+ logger.info("Using ice-jar from: {}", iceJar);
+ } else {
+ // Fall back to using local-ice wrapper script
+ String localIce = projectRoot + "/.bin/local-ice";
+ templateVars.put("ICE_CLI", localIce);
+ logger.info("Using local-ice script from: {}", localIce);
+ }
+
+ return new ScenarioTestRunner(scenariosDir, templateVars);
+ }
+
+ /**
+ * Get the path to the scenarios directory.
+ *
+ * @return Path to scenarios directory
+ * @throws URISyntaxException If the resource URL cannot be converted to a path
+ */
+ private Path getScenariosDirectory() throws URISyntaxException {
+ // Get the scenarios directory from test resources
+ URL scenariosUrl = getClass().getClassLoader().getResource("scenarios");
+
+ if (scenariosUrl == null) {
+ // If not found in resources, try relative to project
+ return Paths.get("src/test/resources/scenarios");
+ }
+
+ return Paths.get(scenariosUrl.toURI());
+ }
+}
diff --git a/ice-rest-catalog/src/test/java/com/altinity/ice/rest/catalog/ScenarioConfig.java b/ice-rest-catalog/src/test/java/com/altinity/ice/rest/catalog/ScenarioConfig.java
new file mode 100644
index 0000000..17d5b16
--- /dev/null
+++ b/ice-rest-catalog/src/test/java/com/altinity/ice/rest/catalog/ScenarioConfig.java
@@ -0,0 +1,37 @@
+/*
+ * Copyright (c) 2025 Altinity Inc and/or its affiliates. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ */
+package com.altinity.ice.rest.catalog;
+
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Configuration class representing a test scenario loaded from scenario.yaml.
+ *
+ * This class uses Jackson/SnakeYAML annotations for YAML deserialization.
+ */
+public record ScenarioConfig(
+ String name,
+ String description,
+ CatalogConfig catalogConfig,
+ Map env,
+ CloudResources cloudResources,
+ List phases) {
+
+ public record CatalogConfig(String warehouse, String name, String uri) {}
+
+ public record CloudResources(S3Resources s3, SqsResources sqs) {}
+
+ public record S3Resources(List buckets) {}
+
+ public record SqsResources(List queues) {}
+
+ public record Phase(String name, String description) {}
+}
diff --git a/ice-rest-catalog/src/test/java/com/altinity/ice/rest/catalog/ScenarioTestRunner.java b/ice-rest-catalog/src/test/java/com/altinity/ice/rest/catalog/ScenarioTestRunner.java
new file mode 100644
index 0000000..83a6b19
--- /dev/null
+++ b/ice-rest-catalog/src/test/java/com/altinity/ice/rest/catalog/ScenarioTestRunner.java
@@ -0,0 +1,276 @@
+/*
+ * Copyright (c) 2025 Altinity Inc and/or its affiliates. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ */
+package com.altinity.ice.rest.catalog;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.dataformat.yaml.YAMLFactory;
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Test runner for scenario-based integration tests.
+ *
+ * This class discovers scenario directories, loads their configuration, processes script
+ * templates, and executes them in a controlled test environment.
+ */
+public class ScenarioTestRunner {
+
+ private static final Logger logger = LoggerFactory.getLogger(ScenarioTestRunner.class);
+ private static final ObjectMapper yamlMapper = new ObjectMapper(new YAMLFactory());
+
+ private final Path scenariosDir;
+ private final Map globalTemplateVars;
+
+ /**
+ * Create a new scenario test runner.
+ *
+ * @param scenariosDir Path to the scenarios directory
+ * @param globalTemplateVars Global template variables available to all scenarios
+ */
+ public ScenarioTestRunner(Path scenariosDir, Map globalTemplateVars) {
+ this.scenariosDir = scenariosDir;
+ this.globalTemplateVars = new HashMap<>(globalTemplateVars);
+ }
+
+ /**
+ * Discover all scenario directories.
+ *
+ * @return List of scenario names (directory names)
+ * @throws IOException If there's an error reading the scenarios directory
+ */
+ public List discoverScenarios() throws IOException {
+ if (!Files.exists(scenariosDir) || !Files.isDirectory(scenariosDir)) {
+ logger.warn("Scenarios directory does not exist: {}", scenariosDir);
+ return new ArrayList<>();
+ }
+
+ try (Stream stream = Files.list(scenariosDir)) {
+ return stream
+ .filter(Files::isDirectory)
+ .map(path -> path.getFileName().toString())
+ .filter(name -> !name.startsWith(".")) // Ignore hidden directories
+ .sorted()
+ .collect(Collectors.toList());
+ }
+ }
+
+ /**
+ * Load a scenario configuration from its directory.
+ *
+ * @param scenarioName Name of the scenario (directory name)
+ * @return ScenarioConfig object
+ * @throws IOException If there's an error reading the scenario configuration
+ */
+ public ScenarioConfig loadScenarioConfig(String scenarioName) throws IOException {
+ Path scenarioDir = scenariosDir.resolve(scenarioName);
+ Path configPath = scenarioDir.resolve("scenario.yaml");
+
+ if (!Files.exists(configPath)) {
+ throw new IOException("scenario.yaml not found in " + scenarioDir);
+ }
+
+ return yamlMapper.readValue(configPath.toFile(), ScenarioConfig.class);
+ }
+
+ /**
+ * Execute a scenario's scripts.
+ *
+ * @param scenarioName Name of the scenario to execute
+ * @return ScenarioResult containing execution results
+ * @throws Exception If there's an error executing the scenario
+ */
+ public ScenarioResult executeScenario(String scenarioName) throws Exception {
+ logger.info("Executing scenario: {}", scenarioName);
+
+ Path scenarioDir = scenariosDir.resolve(scenarioName);
+ ScenarioConfig config = loadScenarioConfig(scenarioName);
+
+ // Build template variables map
+ Map templateVars = new HashMap<>(globalTemplateVars);
+ templateVars.put("SCENARIO_DIR", scenarioDir.toAbsolutePath().toString());
+
+ // Add environment variables from scenario config
+ if (config.env() != null) {
+ templateVars.putAll(config.env());
+ }
+
+ ScriptExecutionResult runScriptResult = null;
+ ScriptExecutionResult verifyScriptResult = null;
+
+ // Execute run.sh.tmpl
+ Path runScriptTemplate = scenarioDir.resolve("run.sh.tmpl");
+ if (Files.exists(runScriptTemplate)) {
+ logger.info("Executing run script for scenario: {}", scenarioName);
+ runScriptResult = executeScript(runScriptTemplate, templateVars);
+
+ if (runScriptResult.exitCode() != 0) {
+ logger.error("Run script failed for scenario: {}", scenarioName);
+ logger.error("Exit code: {}", runScriptResult.exitCode());
+ logger.error("stdout:\n{}", runScriptResult.stdout());
+ logger.error("stderr:\n{}", runScriptResult.stderr());
+ return new ScenarioResult(scenarioName, runScriptResult, verifyScriptResult);
+ }
+ } else {
+ logger.warn("No run.sh.tmpl found for scenario: {}", scenarioName);
+ }
+
+ // Execute verify.sh.tmpl (optional)
+ Path verifyScriptTemplate = scenarioDir.resolve("verify.sh.tmpl");
+ if (Files.exists(verifyScriptTemplate)) {
+ logger.info("Executing verify script for scenario: {}", scenarioName);
+ verifyScriptResult = executeScript(verifyScriptTemplate, templateVars);
+
+ if (verifyScriptResult.exitCode() != 0) {
+ logger.error("Verify script failed for scenario: {}", scenarioName);
+ logger.error("Exit code: {}", verifyScriptResult.exitCode());
+ logger.error("stdout:\n{}", verifyScriptResult.stdout());
+ logger.error("stderr:\n{}", verifyScriptResult.stderr());
+ }
+ }
+
+ return new ScenarioResult(scenarioName, runScriptResult, verifyScriptResult);
+ }
+
+ /**
+ * Execute a script template with the given template variables.
+ *
+ * @param scriptTemplatePath Path to the script template
+ * @param templateVars Map of template variables to substitute
+ * @return ScriptExecutionResult containing execution results
+ * @throws IOException If there's an error reading or executing the script
+ */
+ private ScriptExecutionResult executeScript(
+ Path scriptTemplatePath, Map templateVars) throws IOException {
+ // Read the script template
+ String scriptContent = Files.readString(scriptTemplatePath);
+
+ // Process template variables
+ String processedScript = processTemplate(scriptContent, templateVars);
+
+ // Create a temporary executable script file
+ Path tempScript = Files.createTempFile("scenario-script-", ".sh");
+ try {
+ Files.writeString(tempScript, processedScript);
+ if (!tempScript.toFile().setExecutable(true)) {
+ logger.warn("Could not set script as executable: {}", tempScript);
+ }
+
+ // Execute the script
+ ProcessBuilder processBuilder = new ProcessBuilder("/bin/bash", tempScript.toString());
+
+ // Set environment variables from template vars
+ Map env = processBuilder.environment();
+ env.putAll(templateVars);
+
+ Process process = processBuilder.start();
+
+ // Capture output
+ StringBuilder stdout = new StringBuilder();
+ StringBuilder stderr = new StringBuilder();
+
+ Thread stdoutReader =
+ new Thread(
+ () -> {
+ try (BufferedReader reader =
+ new BufferedReader(new InputStreamReader(process.getInputStream()))) {
+ String line;
+ while ((line = reader.readLine()) != null) {
+ stdout.append(line).append("\n");
+ logger.info("[script] {}", line);
+ }
+ } catch (IOException e) {
+ logger.error("Error reading stdout", e);
+ }
+ });
+
+ Thread stderrReader =
+ new Thread(
+ () -> {
+ try (BufferedReader reader =
+ new BufferedReader(new InputStreamReader(process.getErrorStream()))) {
+ String line;
+ while ((line = reader.readLine()) != null) {
+ stderr.append(line).append("\n");
+ logger.warn("[script] {}", line);
+ }
+ } catch (IOException e) {
+ logger.error("Error reading stderr", e);
+ }
+ });
+
+ stdoutReader.start();
+ stderrReader.start();
+
+ // Wait for the process to complete
+ int exitCode = process.waitFor();
+
+ // Wait for output readers to finish
+ stdoutReader.join();
+ stderrReader.join();
+
+ return new ScriptExecutionResult(exitCode, stdout.toString(), stderr.toString());
+ } catch (InterruptedException e) {
+ Thread.currentThread().interrupt();
+ throw new IOException("Script execution interrupted", e);
+ } finally {
+ // Clean up temporary script file
+ try {
+ Files.deleteIfExists(tempScript);
+ } catch (IOException e) {
+ logger.warn("Failed to delete temporary script file: {}", tempScript, e);
+ }
+ }
+ }
+
+ /**
+ * Process template variables in a script.
+ *
+ * Replaces {{VARIABLE_NAME}} with the corresponding value from templateVars.
+ *
+ * @param template Template string
+ * @param templateVars Map of variable names to values
+ * @return Processed string with variables substituted
+ */
+ private String processTemplate(String template, Map templateVars) {
+ String result = template;
+ for (Map.Entry entry : templateVars.entrySet()) {
+ String placeholder = "{{" + entry.getKey() + "}}";
+ result = result.replace(placeholder, entry.getValue());
+ }
+ return result;
+ }
+
+ /** Result of executing a scenario. */
+ public record ScenarioResult(
+ String scenarioName,
+ ScriptExecutionResult runScriptResult,
+ ScriptExecutionResult verifyScriptResult) {
+
+ public boolean isSuccess() {
+ boolean runSuccess = runScriptResult == null || runScriptResult.exitCode() == 0;
+ boolean verifySuccess = verifyScriptResult == null || verifyScriptResult.exitCode() == 0;
+ return runSuccess && verifySuccess;
+ }
+ }
+
+ /** Result of executing a single script. */
+ public record ScriptExecutionResult(int exitCode, String stdout, String stderr) {}
+}
diff --git a/ice-rest-catalog/src/test/resources/scenarios/README.md b/ice-rest-catalog/src/test/resources/scenarios/README.md
new file mode 100644
index 0000000..0b0ba23
--- /dev/null
+++ b/ice-rest-catalog/src/test/resources/scenarios/README.md
@@ -0,0 +1,128 @@
+# ICE REST Catalog Test Scenarios
+
+This directory contains scenario-based integration tests for the ICE REST Catalog. Each scenario is self-contained with its own configuration, input data, and execution scripts.
+
+## Directory Structure
+
+```
+scenarios/
+ /
+ scenario.yaml # Scenario configuration and metadata
+ run.sh.tmpl # Main test execution script (templated)
+ verify.sh.tmpl # Optional verification script (templated)
+ input.parquet # Input data files (0 or more)
+ input2.parquet
+ ...
+```
+
+## Scenario Configuration (scenario.yaml)
+
+Each scenario must have a `scenario.yaml` file that defines:
+
+```yaml
+name: "Human-readable test name"
+description: "Description of what this scenario tests"
+
+# Optional: Override catalog configuration
+catalogConfig:
+ warehouse: "s3://test-bucket/warehouse"
+
+# Environment variables available to scripts
+env:
+ NAMESPACE_NAME: "test_ns"
+ TABLE_NAME: "test_ns.table1"
+ INPUT_FILE: "input.parquet"
+
+# Optional: Cloud resources needed (for future provisioning)
+cloudResources:
+ s3:
+ buckets:
+ - "test-bucket"
+ sqs:
+ queues:
+ - "test-queue"
+
+# Optional: Test execution phases
+phases:
+ - name: "setup"
+ description: "Initialize resources"
+ - name: "run"
+ description: "Execute main test logic"
+ - name: "verify"
+ description: "Verify results"
+ - name: "cleanup"
+ description: "Clean up resources"
+```
+
+## Script Templates
+
+### run.sh.tmpl
+
+The main test execution script. Template variables are replaced at runtime:
+
+- `{{CLI_CONFIG}}` - Path to temporary ICE CLI config file
+- `{{MINIO_ENDPOINT}}` - MinIO endpoint URL
+- `{{CATALOG_URI}}` - REST catalog URI (e.g., http://localhost:8080)
+- `{{SCENARIO_DIR}}` - Absolute path to the scenario directory
+- All environment variables from `scenario.yaml` env section
+
+Example:
+```bash
+#!/bin/bash
+set -e
+
+# Environment variables from scenario.yaml are available
+echo "Testing namespace: ${NAMESPACE_NAME}"
+
+# Use template variables
+ice --config {{CLI_CONFIG}} create-namespace ${NAMESPACE_NAME}
+
+# Reference input files relative to scenario directory
+INPUT_PATH="{{SCENARIO_DIR}}/${INPUT_FILE}"
+ice --config {{CLI_CONFIG}} insert --create-table ${TABLE_NAME} ${INPUT_PATH}
+```
+
+### verify.sh.tmpl (Optional)
+
+Additional verification logic. Same template variables are available.
+
+Example:
+```bash
+#!/bin/bash
+set -e
+
+# Verify expected output files exist
+if [ ! -f /tmp/test_output.txt ]; then
+ echo "Expected output not found"
+ exit 1
+fi
+
+echo "Verification passed"
+exit 0
+```
+
+## Adding New Scenarios
+
+1. Create a new directory under `scenarios/`
+2. Add `scenario.yaml` with configuration
+3. Add `run.sh.tmpl` with test logic
+4. (Optional) Add `verify.sh.tmpl` for additional verification
+5. Add any input data files (`.parquet`, etc.)
+6. The test framework will automatically discover and run the new scenario
+
+## Running Scenarios
+
+Scenarios are discovered and executed automatically by the `ScenarioBasedIT` test class:
+
+```bash
+mvn test -Dtest=ScenarioBasedIT
+```
+
+To run a specific scenario:
+
+```bash
+mvn test -Dtest=ScenarioBasedIT#testScenario[basic-operations]
+```
+
+
+
diff --git a/ice-rest-catalog/src/test/resources/scenarios/basic-operations/run.sh.tmpl b/ice-rest-catalog/src/test/resources/scenarios/basic-operations/run.sh.tmpl
new file mode 100644
index 0000000..e02cb91
--- /dev/null
+++ b/ice-rest-catalog/src/test/resources/scenarios/basic-operations/run.sh.tmpl
@@ -0,0 +1,130 @@
+#!/bin/bash
+set -e
+
+# Template variables will be replaced at runtime:
+# {{ICE_CLI}} - path to ice CLI executable
+# {{CLI_CONFIG}} - path to temporary ICE CLI config file
+# {{MINIO_ENDPOINT}} - minio endpoint URL
+# {{CATALOG_URI}} - REST catalog URI
+# Environment variables from scenario.yaml are also available
+
+echo "Running basic operations test..."
+
+SCENARIO_DIR="{{SCENARIO_DIR}}"
+INPUT_PATH="${SCENARIO_DIR}/${INPUT_FILE}"
+# Use parquet from insert-scan scenario if not present here
+if [ ! -f "${INPUT_PATH}" ]; then
+ INPUT_PATH="${SCENARIO_DIR}/../insert-scan/${INPUT_FILE}"
+fi
+if [ ! -f "${INPUT_PATH}" ]; then
+ echo "No input parquet found (set INPUT_FILE or add input.parquet to scenario)"
+ exit 1
+fi
+
+# Create namespace via CLI
+{{ICE_CLI}} --config {{CLI_CONFIG}} create-namespace ${NAMESPACE_NAME}
+echo "OK Created namespace: ${NAMESPACE_NAME}"
+
+# List namespaces (ice describe) - no output when no tables yet
+{{ICE_CLI}} --config {{CLI_CONFIG}} describe
+echo "OK Listed namespaces"
+
+# Insert from file (like README: ice insert flowers.iris -p file://...)
+{{ICE_CLI}} --config {{CLI_CONFIG}} insert ${TABLE_IRIS} -p "file://${INPUT_PATH}"
+echo "Inserted from file into ${TABLE_IRIS}"
+# Scan and verify data was inserted
+{{ICE_CLI}} --config {{CLI_CONFIG}} scan ${TABLE_IRIS} --limit 5 > /tmp/basic_ops_scan_iris.txt
+if ! grep -q "sepal.length" /tmp/basic_ops_scan_iris.txt; then
+ echo "FAIL: scan output missing expected column 'sepal.length'"
+ cat /tmp/basic_ops_scan_iris.txt
+ exit 1
+fi
+if ! grep -q "variety" /tmp/basic_ops_scan_iris.txt; then
+ echo "FAIL: scan output missing expected column 'variety'"
+ cat /tmp/basic_ops_scan_iris.txt
+ exit 1
+fi
+echo "OK Scan verified ${TABLE_IRIS}"
+
+# Describe catalog (output includes table ids like namespace.table) and verify namespace/table appears
+{{ICE_CLI}} --config {{CLI_CONFIG}} describe > /tmp/basic_ops_describe.txt
+if ! grep -q "${NAMESPACE_NAME}" /tmp/basic_ops_describe.txt; then
+ echo "FAIL: describe output missing namespace ${NAMESPACE_NAME}"
+ cat /tmp/basic_ops_describe.txt
+ exit 1
+fi
+echo "OK Described catalog (namespace and table listed)"
+
+# Insert with partition (like README: --partition='[{"column":"...","transform":"day"}]')
+# Using a column that exists in iris parquet
+{{ICE_CLI}} --config {{CLI_CONFIG}} insert ${TABLE_PARTITIONED} -p "file://${INPUT_PATH}" \
+ --partition='[{"column":"sepal.length","transform":"identity"}]'
+echo "OK Inserted with partition into ${TABLE_PARTITIONED}"
+# Scan and verify data was inserted
+{{ICE_CLI}} --config {{CLI_CONFIG}} scan ${TABLE_PARTITIONED} --limit 5 > /tmp/basic_ops_scan_partitioned.txt
+if ! grep -q "sepal.length" /tmp/basic_ops_scan_partitioned.txt; then
+ echo "FAIL: scan output missing expected column 'sepal.length'"
+ cat /tmp/basic_ops_scan_partitioned.txt
+ exit 1
+fi
+if ! grep -q "variety" /tmp/basic_ops_scan_partitioned.txt; then
+ echo "FAIL: scan output missing expected column 'variety'"
+ cat /tmp/basic_ops_scan_partitioned.txt
+ exit 1
+fi
+echo "OK Scan verified ${TABLE_PARTITIONED}"
+
+# Insert with sort (like README: --sort='[{"column":"..."}]')
+{{ICE_CLI}} --config {{CLI_CONFIG}} insert ${TABLE_SORTED} -p "file://${INPUT_PATH}" \
+ --sort='[{"column":"sepal.length"}]'
+echo "OK Inserted with sort into ${TABLE_SORTED}"
+# Scan and verify data was inserted
+{{ICE_CLI}} --config {{CLI_CONFIG}} scan ${TABLE_SORTED} --limit 5 > /tmp/basic_ops_scan_sorted.txt
+if ! grep -q "sepal.length" /tmp/basic_ops_scan_sorted.txt; then
+ echo "FAIL: scan output missing expected column 'sepal.length'"
+ cat /tmp/basic_ops_scan_sorted.txt
+ exit 1
+fi
+if ! grep -q "variety" /tmp/basic_ops_scan_sorted.txt; then
+ echo "FAIL: scan output missing expected column 'variety'"
+ cat /tmp/basic_ops_scan_sorted.txt
+ exit 1
+fi
+echo "OK Scan verified ${TABLE_SORTED}"
+
+# Inspect catalog (ice describe)
+{{ICE_CLI}} --config {{CLI_CONFIG}} describe
+echo "OK Described catalog"
+
+# Optional: --no-copy insert (like README: create-table + upload + insert --no-copy)
+# Upload to MinIO via AWS CLI if available so insert --no-copy can reference s3://
+# This section is best-effort: if it fails we skip rather than failing the whole test.
+if command -v aws &>/dev/null && [ -n "{{MINIO_ENDPOINT}}" ]; then
+ S3_PATH="s3://${S3_BUCKET}/${NAMESPACE_NAME}/iris_no_copy/$(basename ${INPUT_PATH})"
+ export AWS_ACCESS_KEY_ID=minioadmin
+ export AWS_SECRET_ACCESS_KEY=minioadmin
+ if aws s3 cp --endpoint-url "{{MINIO_ENDPOINT}}" "${INPUT_PATH}" "${S3_PATH}" 2>/dev/null; then
+ if {{ICE_CLI}} --config {{CLI_CONFIG}} create-table ${TABLE_NO_COPY} --schema-from-parquet="file://${INPUT_PATH}" 2>/dev/null && \
+ {{ICE_CLI}} --config {{CLI_CONFIG}} insert ${TABLE_NO_COPY} --no-copy "${S3_PATH}" 2>/dev/null; then
+ echo "OK Insert with --no-copy into ${TABLE_NO_COPY}"
+ {{ICE_CLI}} --config {{CLI_CONFIG}} delete-table ${TABLE_NO_COPY}
+ else
+ echo "SKIP Skipped --no-copy test (ice CLI could not access S3 path)"
+ # Clean up table if it was created
+ {{ICE_CLI}} --config {{CLI_CONFIG}} delete-table ${TABLE_NO_COPY} 2>/dev/null || true
+ fi
+ fi
+fi
+
+# Cleanup tables then namespace
+{{ICE_CLI}} --config {{CLI_CONFIG}} delete-table ${TABLE_IRIS}
+{{ICE_CLI}} --config {{CLI_CONFIG}} delete-table ${TABLE_PARTITIONED}
+{{ICE_CLI}} --config {{CLI_CONFIG}} delete-table ${TABLE_SORTED}
+echo "OK Deleted tables"
+
+# Delete the namespace via CLI
+{{ICE_CLI}} --config {{CLI_CONFIG}} delete-namespace ${NAMESPACE_NAME}
+echo "OK Deleted namespace: ${NAMESPACE_NAME}"
+
+echo "Basic operations test completed successfully"
+
diff --git a/ice-rest-catalog/src/test/resources/scenarios/basic-operations/scenario.yaml b/ice-rest-catalog/src/test/resources/scenarios/basic-operations/scenario.yaml
new file mode 100644
index 0000000..a37e5ab
--- /dev/null
+++ b/ice-rest-catalog/src/test/resources/scenarios/basic-operations/scenario.yaml
@@ -0,0 +1,38 @@
+name: "Basic Catalog Operations"
+description: "Tests fundamental catalog operations like namespace and table management"
+
+# Configuration for ICE REST catalog server (optional overrides from defaults)
+catalogConfig:
+ warehouse: "s3://test-bucket/warehouse"
+
+# Environment variables that will be available to scripts
+env:
+ NAMESPACE_NAME: "test_ns"
+ INPUT_FILE: "input.parquet"
+ TABLE_IRIS: "test_ns.iris"
+ TABLE_PARTITIONED: "test_ns.taxis_p_by_day"
+ TABLE_SORTED: "test_ns.taxis_s_by_day"
+ TABLE_NO_COPY: "test_ns.iris_no_copy"
+ S3_BUCKET: "test-bucket"
+ # Optional: set MC_ALIAS (e.g. "minio") to run --no-copy test; requires mc configured for MinIO
+ # MC_ALIAS: "minio"
+
+# Cloud resources needed for this scenario (future use for provisioning)
+cloudResources:
+ s3:
+ buckets:
+ - "test-bucket"
+
+# Test phases
+phases:
+ - name: "setup"
+ description: "Create namespace"
+ - name: "verify"
+ description: "Verify namespace exists and can be deleted"
+ - name: "cleanup"
+ description: "Delete namespace"
+
+
+
+
+
diff --git a/ice-rest-catalog/src/test/resources/scenarios/basic-operations/verify.sh.tmpl b/ice-rest-catalog/src/test/resources/scenarios/basic-operations/verify.sh.tmpl
new file mode 100644
index 0000000..f99539f
--- /dev/null
+++ b/ice-rest-catalog/src/test/resources/scenarios/basic-operations/verify.sh.tmpl
@@ -0,0 +1,39 @@
+#!/bin/bash
+set -e
+
+# Verification script - checks that the test completed successfully
+# Exit code 0 = success, non-zero = failure
+# Expects run.sh to have written: basic_ops_describe.txt, basic_ops_scan_iris.txt,
+# basic_ops_scan_partitioned.txt, basic_ops_scan_sorted.txt under /tmp
+
+echo "Verifying basic operations test..."
+
+for f in /tmp/basic_ops_describe.txt /tmp/basic_ops_scan_iris.txt /tmp/basic_ops_scan_partitioned.txt /tmp/basic_ops_scan_sorted.txt; do
+ if [ ! -f "$f" ]; then
+ echo "FAIL Output file not found: $f"
+ exit 1
+ fi
+ if [ ! -s "$f" ]; then
+ echo "FAIL Output file is empty: $f"
+ exit 1
+ fi
+done
+
+# Verify scan outputs contain expected columns (scan outputs use dots: sepal.length)
+for f in /tmp/basic_ops_scan_iris.txt /tmp/basic_ops_scan_partitioned.txt /tmp/basic_ops_scan_sorted.txt; do
+ if ! grep -q "sepal.length" "$f"; then
+ echo "FAIL $f does not contain expected column 'sepal.length'"
+ exit 1
+ fi
+ if ! grep -q "variety" "$f"; then
+ echo "FAIL $f does not contain expected column 'variety'"
+ exit 1
+ fi
+done
+
+# Cleanup temp files
+rm -f /tmp/basic_ops_describe.txt /tmp/basic_ops_scan_iris.txt /tmp/basic_ops_scan_partitioned.txt /tmp/basic_ops_scan_sorted.txt
+
+echo "OK Verification passed"
+exit 0
+
diff --git a/ice-rest-catalog/src/test/resources/scenarios/insert-partitioned/input.parquet b/ice-rest-catalog/src/test/resources/scenarios/insert-partitioned/input.parquet
new file mode 100644
index 0000000..028c64c
Binary files /dev/null and b/ice-rest-catalog/src/test/resources/scenarios/insert-partitioned/input.parquet differ
diff --git a/ice-rest-catalog/src/test/resources/scenarios/insert-partitioned/run.sh.tmpl b/ice-rest-catalog/src/test/resources/scenarios/insert-partitioned/run.sh.tmpl
new file mode 100644
index 0000000..c6bdf2a
--- /dev/null
+++ b/ice-rest-catalog/src/test/resources/scenarios/insert-partitioned/run.sh.tmpl
@@ -0,0 +1,29 @@
+#!/bin/bash
+set -e
+
+echo "Running insert with partitioning test..."
+
+# Create namespace
+{{ICE_CLI}} --config {{CLI_CONFIG}} create-namespace ${NAMESPACE_NAME}
+echo "OK Created namespace: ${NAMESPACE_NAME}"
+
+# Get the full path to the input file
+SCENARIO_DIR="{{SCENARIO_DIR}}"
+INPUT_PATH="${SCENARIO_DIR}/${INPUT_FILE}"
+
+# Create table with partitioning and insert data
+{{ICE_CLI}} --config {{CLI_CONFIG}} insert --create-table ${TABLE_NAME} ${INPUT_PATH} --partition="${PARTITION_SPEC}"
+echo "OK Inserted data with partitioning into table ${TABLE_NAME}"
+
+# Describe the table to verify partitioning (if describe command exists)
+# {{ICE_CLI}} --config {{CLI_CONFIG}} describe-table ${TABLE_NAME}
+
+# Cleanup
+{{ICE_CLI}} --config {{CLI_CONFIG}} delete-table ${TABLE_NAME}
+echo "OK Deleted table: ${TABLE_NAME}"
+
+{{ICE_CLI}} --config {{CLI_CONFIG}} delete-namespace ${NAMESPACE_NAME}
+echo "OK Deleted namespace: ${NAMESPACE_NAME}"
+
+echo "Insert with partitioning test completed successfully"
+
diff --git a/ice-rest-catalog/src/test/resources/scenarios/insert-partitioned/scenario.yaml b/ice-rest-catalog/src/test/resources/scenarios/insert-partitioned/scenario.yaml
new file mode 100644
index 0000000..43c2157
--- /dev/null
+++ b/ice-rest-catalog/src/test/resources/scenarios/insert-partitioned/scenario.yaml
@@ -0,0 +1,31 @@
+name: "Insert with Partitioning"
+description: "Tests inserting data with partitioning specifications"
+
+catalogConfig:
+ warehouse: "s3://test-bucket/warehouse"
+
+env:
+ NAMESPACE_NAME: "test_insert_partitioned"
+ TABLE_NAME: "test_insert_partitioned.iris_partitioned"
+ INPUT_FILE: "input.parquet"
+ PARTITION_SPEC: '[{"column":"variety","transform":"identity"}]'
+
+cloudResources:
+ s3:
+ buckets:
+ - "test-bucket"
+
+phases:
+ - name: "setup"
+ description: "Create namespace"
+ - name: "run"
+ description: "Create partitioned table and insert data"
+ - name: "verify"
+ description: "Verify table was created with correct partitioning"
+ - name: "cleanup"
+ description: "Delete table and namespace"
+
+
+
+
+
diff --git a/ice-rest-catalog/src/test/resources/scenarios/insert-partitioned/verify.sh.tmpl b/ice-rest-catalog/src/test/resources/scenarios/insert-partitioned/verify.sh.tmpl
new file mode 100644
index 0000000..2bdcf76
--- /dev/null
+++ b/ice-rest-catalog/src/test/resources/scenarios/insert-partitioned/verify.sh.tmpl
@@ -0,0 +1,15 @@
+#!/bin/bash
+set -e
+
+echo "Verifying insert with partitioning test..."
+
+# For now, the run.sh itself verifies by successfully completing
+# Future: could add table metadata checks here
+
+echo "OK Verification passed"
+exit 0
+
+
+
+
+
diff --git a/ice-rest-catalog/src/test/resources/scenarios/insert-scan/input.parquet b/ice-rest-catalog/src/test/resources/scenarios/insert-scan/input.parquet
new file mode 100644
index 0000000..028c64c
Binary files /dev/null and b/ice-rest-catalog/src/test/resources/scenarios/insert-scan/input.parquet differ
diff --git a/ice-rest-catalog/src/test/resources/scenarios/insert-scan/run.sh.tmpl b/ice-rest-catalog/src/test/resources/scenarios/insert-scan/run.sh.tmpl
new file mode 100644
index 0000000..81a250a
--- /dev/null
+++ b/ice-rest-catalog/src/test/resources/scenarios/insert-scan/run.sh.tmpl
@@ -0,0 +1,45 @@
+#!/bin/bash
+set -e
+
+echo "Running insert and scan test..."
+
+# Create namespace
+{{ICE_CLI}} --config {{CLI_CONFIG}} create-namespace ${NAMESPACE_NAME}
+echo "OK Created namespace: ${NAMESPACE_NAME}"
+
+# Get the full path to the input file (relative to scenario directory)
+SCENARIO_DIR="{{SCENARIO_DIR}}"
+INPUT_PATH="${SCENARIO_DIR}/${INPUT_FILE}"
+
+# Create table and insert data
+{{ICE_CLI}} --config {{CLI_CONFIG}} insert --create-table ${TABLE_NAME} ${INPUT_PATH}
+echo "OK Inserted data from ${INPUT_FILE} into table ${TABLE_NAME}"
+
+# Scan the table to verify data was inserted
+{{ICE_CLI}} --config {{CLI_CONFIG}} scan ${TABLE_NAME} > /tmp/scan_output.txt
+echo "OK Scanned table ${TABLE_NAME}"
+
+# Check that scan output contains expected data
+if ! grep -q "sepal" /tmp/scan_output.txt; then
+ echo "FAIL Scan output does not contain expected column 'sepal'"
+ cat /tmp/scan_output.txt
+ exit 1
+fi
+
+if ! grep -q "variety" /tmp/scan_output.txt; then
+ echo "FAIL Scan output does not contain expected column 'variety'"
+ cat /tmp/scan_output.txt
+ exit 1
+fi
+
+echo "OK Scan output contains expected columns"
+
+# Cleanup
+{{ICE_CLI}} --config {{CLI_CONFIG}} delete-table ${TABLE_NAME}
+echo "OK Deleted table: ${TABLE_NAME}"
+
+{{ICE_CLI}} --config {{CLI_CONFIG}} delete-namespace ${NAMESPACE_NAME}
+echo "OK Deleted namespace: ${NAMESPACE_NAME}"
+
+echo "Insert and scan test completed successfully"
+
diff --git a/ice-rest-catalog/src/test/resources/scenarios/insert-scan/scenario.yaml b/ice-rest-catalog/src/test/resources/scenarios/insert-scan/scenario.yaml
new file mode 100644
index 0000000..d060db8
--- /dev/null
+++ b/ice-rest-catalog/src/test/resources/scenarios/insert-scan/scenario.yaml
@@ -0,0 +1,28 @@
+name: "Insert and Scan Operations"
+description: "Tests inserting data from parquet files and scanning tables"
+
+catalogConfig:
+ warehouse: "s3://test-bucket/warehouse"
+
+env:
+ NAMESPACE_NAME: "test_scan"
+ TABLE_NAME: "test_scan.users"
+ INPUT_FILE: "input.parquet"
+
+cloudResources:
+ s3:
+ buckets:
+ - "test-bucket"
+
+phases:
+ - name: "setup"
+ description: "Create namespace and table with data"
+ - name: "verify"
+ description: "Scan table and verify data"
+ - name: "cleanup"
+ description: "Delete table and namespace"
+
+
+
+
+
diff --git a/ice-rest-catalog/src/test/resources/scenarios/insert-scan/verify.sh.tmpl b/ice-rest-catalog/src/test/resources/scenarios/insert-scan/verify.sh.tmpl
new file mode 100644
index 0000000..e73e722
--- /dev/null
+++ b/ice-rest-catalog/src/test/resources/scenarios/insert-scan/verify.sh.tmpl
@@ -0,0 +1,26 @@
+#!/bin/bash
+set -e
+
+echo "Verifying insert and scan test..."
+
+# Check that scan output file was created and contains data
+if [ ! -f /tmp/scan_output.txt ]; then
+ echo "FAIL Scan output file not found"
+ exit 1
+fi
+
+if [ ! -s /tmp/scan_output.txt ]; then
+ echo "FAIL Scan output file is empty"
+ exit 1
+fi
+
+echo "OK Scan output file exists and contains data"
+
+# Cleanup temp file
+rm -f /tmp/scan_output.txt
+
+echo "OK Verification passed"
+exit 0
+
+
+
diff --git a/ice/src/test/resources/ice-rest-catalog.yaml b/ice/src/test/resources/ice-rest-catalog.yaml
new file mode 100644
index 0000000..6c27d06
--- /dev/null
+++ b/ice/src/test/resources/ice-rest-catalog.yaml
@@ -0,0 +1,12 @@
+uri: jdbc:sqlite:file:data/ice-rest-catalog/db.sqlite?journal_mode=WAL&synchronous=OFF&journal_size_limit=500
+
+
+s3:
+ endpoint: http://minio:9000
+ pathStyleAccess: true
+ accessKeyID: minioadmin
+ secretAccessKey: minioadmin
+ region: minio
+
+bearerTokens:
+ - value: foo