diff --git a/src/optimize_main.py b/src/optimize_main.py
index cda49811560df50f38fcac7db8fe562585544112..2b3a1dba3e578966722e5361a16270046bd6932c 100644
--- a/src/optimize_main.py
+++ b/src/optimize_main.py
@@ -14,11 +14,9 @@ import numpy as np
 import matplotlib.pyplot as plt
 from pymoo.core.problem import Problem
 from pymoo.optimize import minimize
-from scipy.stats import ttest_ind
 from optimization_libraries import initialize_algorithm
 from parallel_computing import execute_parallel_tasks, cleanup_temp_dirs
 from config import PARAMETERS, OBJECTIVE_NAMES, MAXIMIZE, PARAM_BOUND_VALUES, PARAM_TYPES, PRECISION, PLOT_CONFIG, OPTIMIZATION_CONFIG, N_JOBS
-from adaptive_instance_selection import initial_sampling, evaluate_samples, advanced_clustering_samples, adaptive_select_informative_instances, iterative_refinement, generate_new_samples
 
 class OptimizationProblem(Problem):
     def __init__(self):
@@ -39,33 +37,31 @@ class OptimizationProblem(Problem):
         for i, param_type in enumerate(self.param_types):
             if param_type == "int":
                 X[:, i] = np.round(X[:, i]).astype(int)
+            else:
+                X[:, i] = X[:, i].astype(float)
         param_values_list = [dict(zip(self.param_names, x)) for x in X]
-        results = execute_parallel_tasks(param_values_list, OPTIMIZATION_CONFIG["USE_ADAPTIVE_INSTANCE_SELECTION"], self.maximize_indices)
+        results = execute_parallel_tasks(param_values_list)
 
-        # Debugging output before any processing
         print(f"Initial results: {results}")
         print(f"Number of parameter sets evaluated: {len(param_values_list)}")
         print(f"Expected shape of results: ({len(param_values_list)}, {len(self.objective_names)})")
 
-        # Handle cases where not all results are returned
         if len(results) != len(param_values_list):
             missing_count = len(param_values_list) - len(results)
             results.extend([[np.nan] * len(self.objective_names)] * missing_count)
 
-        # Apply negation to objectives that need to be maximized
         for i in range(len(results)):
             for idx in self.maximize_indices:
                 results[i] = list(results[i])
                 results[i][idx] = -results[i][idx]
-        
-        # Debugging output after processing
+
         print(f"Processed results: {results}")
 
-        # Ensure results are a 2D array of shape (len(X), len(self.objective_names))
         results_array = np.array(results)
         print(f"Shape of results array: {results_array.shape}")
 
-        out["F"] = results_array.reshape(len(X), len(self.objective_names))  # Ensure results are a 2D array
+        out["F"] = results_array.reshape(len(X), len(self.objective_names))
+        out["X"] = X
 
 def create_results_folder():
     results_folder = 'results'
@@ -73,146 +69,78 @@ def create_results_folder():
         os.makedirs(results_folder)
     return results_folder
 
-def run_optimization(use_adaptive_instance_selection):
-
-    # Ensure the results folder exists
+def run_optimization():
     results_folder = create_results_folder()
-
-    # Set the adaptive instance selection flag
-    OPTIMIZATION_CONFIG['USE_ADAPTIVE_INSTANCE_SELECTION'] = use_adaptive_instance_selection
-    adaptive_frequency = OPTIMIZATION_CONFIG['ADAPTIVE_INSTANCE_SELECTION_FREQUENCY']
-
-    # Initialize the population size
     pop_size = OPTIMIZATION_CONFIG['POP_SIZE']
-
-    # Initialize the optimization algorithm
-    algorithm = initialize_algorithm(
-            OPTIMIZATION_CONFIG['ALGORITHM_NAME'],
-            pop_size
-        )
-
-    # Define the optimization problem
+    algorithm = initialize_algorithm(OPTIMIZATION_CONFIG['ALGORITHM_NAME'], pop_size)
     problem = OptimizationProblem()
-
     start_time = time.time()
-    res = None  # Initialize res to handle early termination case
+    res = None
+
     try:
-        # Run the optimization
         for gen in range(OPTIMIZATION_CONFIG['N_GEN']):
-            if pop_size <= OPTIMIZATION_CONFIG['MIN_POP_SIZE']:
-                print("Stopping optimization as population size has reached the minimum threshold.")
-                break
-
             res = minimize(problem, algorithm, ("n_gen", 1), verbose=True)
 
-            if use_adaptive_instance_selection and gen > 0 and (gen + 1) % adaptive_frequency == 0:
-                current_samples = res.pop.get("X")
-                current_results = res.pop.get("F")
-
-                print(f"Generation {gen + 1}: Applying adaptive instance selection")
-                print(f"Current samples: {current_samples.shape}")
-                print(f"Current results: {current_results.shape}")
-
-                try:
-                    # Apply adaptive instance selection
-                    adaptive_samples = adaptive_select_informative_instances(
-                        current_samples, current_results, 
-                        initial_threshold=0.05, adapt_rate=0.01, 
-                        desired_samples=pop_size // 2
-                    )
-
-                    # Select half from algorithm population and half from adaptive instance selection
-                    num_algorithm_samples = pop_size // 2
-                    algorithm_samples_indices = np.random.choice(len(current_samples), num_algorithm_samples, replace=False)
-                    algorithm_samples = current_samples[algorithm_samples_indices]
-
-                    combined_samples = np.vstack((algorithm_samples, adaptive_samples))
-
-                    # Evaluate all combined samples
-                    out = {"F": np.zeros((len(combined_samples), len(problem.objective_names)))}  # Initialize output
-                    problem._evaluate(combined_samples, out=out)  # Evaluate combined samples
-
-                    res.pop.set("X", combined_samples[:pop_size])  # Set only the first pop_size samples
-                    res.pop.set("F", np.array(out["F"])[:pop_size])  # Set only the first pop_size results
-
-                except RuntimeError as e:
-                    print(f"Adaptive instance selection failed: {e}")
-                    # If adaptive instance selection fails, fall back to using the current population
-                    res.pop.set("X", current_samples)
-                    res.pop.set("F", current_results)
-
-                # Reduce population size dynamically
-                pop_size = max(OPTIMIZATION_CONFIG['MIN_POP_SIZE'], int(pop_size * 0.9))  # Reduce by 10% each iteration, minimum threshold
-                algorithm = initialize_algorithm(
-                    OPTIMIZATION_CONFIG['ALGORITHM_NAME'],
-                    pop_size
-                )
-    
     finally:
-        # Cleanup temporary directories
         cleanup_temp_dirs()
     end_time = time.time()
-
     elapsed_time = end_time - start_time
-    print(f"Time with{'out' if not use_adaptive_instance_selection else ''} adaptive instance selection: {elapsed_time:.2f} seconds")
-    
+    print(f"Elapsed time: {elapsed_time:.2f} seconds")
+
     if res is not None:
         print_and_plot_results(res, problem)
 
-        # Save results to a file
-        # Negate back the maximized objectives before saving
         results_to_save = res.F.copy()
+        parameters_to_save = res.X.copy()
         for i in range(len(results_to_save)):
             for idx in problem.maximize_indices:
                 results_to_save[i][idx] = -results_to_save[i][idx]
 
         results_data = {
             "results": results_to_save.tolist(),
+            "parameters": parameters_to_save.tolist(),
             "elapsed_time": elapsed_time,
-            "use_adaptive_instance_selection": use_adaptive_instance_selection
         }
-        filename = os.path.join(results_folder, f'optimization_results_{"with" if use_adaptive_instance_selection else "without"}_adaptive.json')
+        filename = os.path.join(results_folder, f'optimization_results.json')
         with open(filename, 'w') as f:
             json.dump(results_data, f)
-        print(f"Results have been stored in: {filename}")
+        print(f"Results stored in: {filename}")
 
     return res.F if res is not None else None, elapsed_time
 
 def print_and_plot_results(res, problem):
     print("Optimization Results:")
     for i, result in enumerate(res.F):
-        # Negate back the maximized objectives for display
         result = list(result)
         for idx in problem.maximize_indices:
             result[idx] = -result[idx]
         result = tuple(result)
         
         print(f"Solution {i}: ", end="")
-        for name, value in zip(OBJECTIVES, result):
+        for name, value in zip(OBJECTIVE_NAMES, result):
             print(f"{name.capitalize()} = {value:.{PRECISION}f}", end=", ")
         print()
 
-    try: 
-        for idx in problem.maximize_indices:
-            res.F[:, idx] = -res.F[:, idx]
-    
-        plt.figure(figsize=(8, 6))
-        plt.scatter(res.F[:, 0], res.F[:, 1])
-        plt.xlabel(PLOT_CONFIG["PLOT_X"], fontsize=14)
-        plt.ylabel(PLOT_CONFIG["PLOT_Y"], fontsize=14)
-        plt.title(PLOT_CONFIG["PLOT_TITLE"], fontsize=16)
-        plt.grid(True)
-        plt.tight_layout()
-        plt.show()
-    except Exception as e:
-        print(f"Error during plotting: {e}")
+    if PLOT_CONFIG.get("ENABLE_PLOT", True):
+        try: 
+            for idx in problem.maximize_indices:
+                res.F[:, idx] = -res.F[:, idx]
+        
+            plt.figure(figsize=(8, 6))
+            plt.scatter(res.F[:, 0], res.F[:, 1])
+            plt.xlabel(PLOT_CONFIG["PLOT_X"], fontsize=14)
+            plt.ylabel(PLOT_CONFIG["PLOT_Y"], fontsize=14)
+            plt.title(PLOT_CONFIG["PLOT_TITLE"], fontsize=16)
+            plt.grid(True)
+            plt.tight_layout()
+            plt.show()
+        except Exception as e:
+            print(f"Error during plotting: {e}")
 
 def main():
-    use_adaptive_instance_selection = OPTIMIZATION_CONFIG["USE_ADAPTIVE_INSTANCE_SELECTION"]
-
-    print(f"Running optimization with{'out' if not use_adaptive_instance_selection else ''} adaptive instance selection...")
-    results, elapsed_time = run_optimization(use_adaptive_instance_selection=use_adaptive_instance_selection)
-    print(f"Time with{'out' if not use_adaptive_instance_selection else ''} adaptive instance selection: {elapsed_time:.2f} seconds")
+    print(f"Running optimization...")
+    results, elapsed_time = run_optimization()
+    print(f"Time: {elapsed_time:.2f} seconds")
 
 if __name__ == "__main__":
     main()
\ No newline at end of file