Spaces:
Sleeping
Sleeping
import cv2 | |
import numpy as np | |
from PIL import Image | |
import matplotlib.pyplot as plt | |
import os | |
from typing import Dict, Any | |
import base64 | |
import io | |
class ImageAnalysisTool: | |
"""Standalone image analysis tool for SEM images""" | |
def __init__(self): | |
self.name = "SEM Image Analysis Tool" | |
self.description = "Analyzes SEM images to extract microstructural information about soil cemented materials" | |
def _run(self, image_path: str) -> Dict[str, Any]: | |
""" | |
Analyze SEM image and extract relevant features | |
""" | |
try: | |
# Load and process image | |
image = cv2.imread(image_path, cv2.IMREAD_GRAYSCALE) | |
if image is None: | |
return {"error": f"Could not load image from {image_path}"} | |
# Basic image properties | |
height, width = image.shape | |
mean_intensity = np.mean(image) | |
std_intensity = np.std(image) | |
# Convert to PIL for additional analysis | |
pil_image = Image.fromarray(image) | |
# Encode image for vision model | |
image_base64 = self._encode_image_to_base64(pil_image) | |
# Basic texture analysis | |
texture_features = self._analyze_texture(image) | |
# Porosity estimation (simple threshold-based) | |
porosity_info = self._estimate_porosity(image) | |
# Particle analysis | |
particle_info = self._analyze_particles(image) | |
analysis_results = { | |
"image_properties": { | |
"width": int(width), | |
"height": int(height), | |
"mean_intensity": float(mean_intensity), | |
"std_intensity": float(std_intensity) | |
}, | |
"texture_features": texture_features, | |
"porosity_analysis": porosity_info, | |
"particle_analysis": particle_info, | |
"image_base64": image_base64, | |
"image_path": image_path | |
} | |
return analysis_results | |
except Exception as e: | |
return {"error": f"Error analyzing image: {str(e)}"} | |
def _encode_image_to_base64(self, image: Image.Image) -> str: | |
"""Convert PIL image to base64 string""" | |
buffered = io.BytesIO() | |
image.save(buffered, format="PNG") | |
img_str = base64.b64encode(buffered.getvalue()).decode() | |
return img_str | |
def _analyze_texture(self, image: np.ndarray) -> Dict[str, float]: | |
"""Analyze texture properties of the image""" | |
# Calculate local standard deviation (texture measure) | |
kernel = np.ones((9, 9), np.float32) / 81 | |
mean_filtered = cv2.filter2D(image.astype(np.float32), -1, kernel) | |
sqr_diff = (image.astype(np.float32) - mean_filtered) ** 2 | |
texture_map = cv2.filter2D(sqr_diff, -1, kernel) | |
return { | |
"texture_variance": float(np.mean(texture_map)), | |
"texture_uniformity": float(np.std(texture_map)), | |
"contrast": float(np.max(image) - np.min(image)) | |
} | |
def _estimate_porosity(self, image: np.ndarray) -> Dict[str, Any]: | |
"""Estimate porosity using threshold-based segmentation""" | |
# Use Otsu's thresholding for automatic threshold selection | |
_, binary = cv2.threshold(image, 0, 255, cv2.THRESH_BINARY + cv2.THRESH_OTSU) | |
# Calculate porosity (assuming dark regions are pores) | |
total_pixels = image.shape[0] * image.shape[1] | |
pore_pixels = np.sum(binary == 0) | |
porosity_percentage = (pore_pixels / total_pixels) * 100 | |
# Analyze pore size distribution | |
contours, _ = cv2.findContours(255 - binary, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE) | |
pore_areas = [cv2.contourArea(cnt) for cnt in contours if cv2.contourArea(cnt) > 10] | |
return { | |
"estimated_porosity_percent": float(porosity_percentage), | |
"number_of_pores": len(pore_areas), | |
"average_pore_area": float(np.mean(pore_areas)) if pore_areas else 0, | |
"max_pore_area": float(np.max(pore_areas)) if pore_areas else 0, | |
"min_pore_area": float(np.min(pore_areas)) if pore_areas else 0 | |
} | |
def _analyze_particles(self, image: np.ndarray) -> Dict[str, Any]: | |
"""Analyze particle characteristics""" | |
# Edge detection for particle boundaries | |
edges = cv2.Canny(image, 50, 150) | |
# Find contours (potential particles) | |
contours, _ = cv2.findContours(edges, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE) | |
# Filter contours by area to remove noise | |
min_area = 50 # minimum particle area | |
particles = [cnt for cnt in contours if cv2.contourArea(cnt) > min_area] | |
if not particles: | |
return {"number_of_particles": 0} | |
# Calculate particle properties | |
areas = [cv2.contourArea(cnt) for cnt in particles] | |
perimeters = [cv2.arcLength(cnt, True) for cnt in particles] | |
# Calculate equivalent diameters | |
equivalent_diameters = [2 * np.sqrt(area / np.pi) for area in areas] | |
# Calculate circularity (roundness measure) | |
circularities = [] | |
for i, cnt in enumerate(particles): | |
if perimeters[i] > 0: | |
circularity = 4 * np.pi * areas[i] / (perimeters[i] ** 2) | |
circularities.append(circularity) | |
return { | |
"number_of_particles": len(particles), | |
"average_particle_area": float(np.mean(areas)), | |
"particle_area_std": float(np.std(areas)), | |
"average_equivalent_diameter": float(np.mean(equivalent_diameters)), | |
"diameter_range": { | |
"min": float(np.min(equivalent_diameters)), | |
"max": float(np.max(equivalent_diameters)) | |
}, | |
"average_circularity": float(np.mean(circularities)) if circularities else 0, | |
"circularity_std": float(np.std(circularities)) if circularities else 0 | |
} |