Spaces:
Sleeping
Sleeping
| import pyvista as pv | |
| import numpy as np | |
| from scipy.spatial.distance import cosine | |
| from scipy.ndimage import gaussian_filter | |
| import matplotlib.pyplot as plt | |
| import os | |
| # -------------------- PyVista Config -------------------- | |
| pv.OFF_SCREEN = True | |
| pv.set_plot_theme("document") | |
| # -------------------- Paths -------------------- | |
| dataset = "cadillac_const_vel_30" | |
| dataname = "cadillac_3" | |
| train_folder = f'/raid/ansysai/pkakka/6-Transformers/comparePhysicsLM/Data/{dataset}/' | |
| train_save_dir = os.path.join(train_folder, f"../../metrics/{dataset}/") | |
| os.makedirs(train_save_dir, exist_ok=True) | |
| # -------------------- Utility Functions -------------------- | |
| def get_points(file_path, max_points=5000): | |
| """Extract and subsample point cloud from VTP file.""" | |
| try: | |
| mesh = pv.read(file_path) | |
| points = mesh.points | |
| if len(points) > max_points: | |
| indices = np.random.choice(len(points), max_points, replace=False) | |
| points = points[indices] | |
| return points | |
| except Exception as e: | |
| raise ValueError(f"Error reading {file_path}: {e}") | |
| # -------------------- Single VTP Score Function -------------------- | |
| def compute_single_vtp_score(vtp_file_path, train_dist_file='train_dist.npz', smooth_sigma=1): | |
| """ | |
| Compute Cosine similarity score for a single VTP file against saved training distribution. | |
| Args: | |
| vtp_file_path: Path to the VTP file to analyze | |
| train_dist_file: Name of the saved training distribution file | |
| smooth_sigma: Gaussian smoothing parameter | |
| Returns: | |
| float: Cosine similarity score (0-1), higher means closer to training distribution | |
| """ | |
| # Load the saved training distribution | |
| train_dist_path = os.path.join(train_folder + "1_VTK_surface", train_dist_file) | |
| if not os.path.exists(train_dist_path): | |
| raise ValueError(f"Training distribution file not found: {train_dist_path}") | |
| data = np.load(train_dist_path) | |
| train_hist = data['hist'] | |
| bin_edges = [data['edges0'], data['edges1'], data['edges2']] | |
| # Check if VTP file exists | |
| if not os.path.exists(vtp_file_path): | |
| raise ValueError(f"VTP file not found: {vtp_file_path}") | |
| # Get test points from VTP file | |
| test_points = get_points(vtp_file_path) | |
| # Create 3D histogram for test points using same bins as training | |
| test_hist, _ = np.histogramdd(test_points, bins=bin_edges, density=True) | |
| test_hist = gaussian_filter(test_hist, sigma=smooth_sigma) | |
| # Flatten and normalize | |
| test_hist = test_hist.flatten() | |
| test_hist /= test_hist.sum() | |
| # Add small epsilon to avoid zero values | |
| epsilon = 1e-12 | |
| train_hist_safe = train_hist + epsilon | |
| test_hist_safe = test_hist + epsilon | |
| # Compute Cosine similarity (1 - cosine distance) | |
| cosine_distance = cosine(train_hist_safe, test_hist_safe) | |
| cosine_similarity = 1 - cosine_distance | |
| print(f"Cosine Score for {os.path.basename(vtp_file_path)}: {cosine_similarity:.6f}") | |
| return cosine_similarity | |
| # -------------------- Main -------------------- | |
| if __name__ == "__main__": | |
| try: | |
| compute_single_vtp_score(vtp_file_path=train_folder+f"1_VTK_surface/{dataname}/{dataname}.vtp") | |
| print("Cosine similarity analysis completed successfully!") | |
| except Exception as e: | |
| print(f"Error during analysis: {e}") | |
| import traceback | |
| traceback.print_exc() | |