Some checks failed
Build Simulation and Test / Run All Tests (push) Failing after 8m17s
Major rewrite.
100 lines
3.9 KiB
Python
100 lines
3.9 KiB
Python
"""Evolution data collector for tracking neural network changes."""
|
|
|
|
from typing import Dict, Any, List
|
|
from .base_collector import BaseCollector
|
|
from world.objects import DefaultCell
|
|
import numpy as np
|
|
|
|
|
|
class EvolutionCollector(BaseCollector):
|
|
"""Collects evolution and neural network data."""
|
|
|
|
def __init__(self, collection_interval: int = 1000):
|
|
super().__init__(collection_interval)
|
|
|
|
def collect(self, simulation_core) -> Dict[str, Any]:
|
|
"""Collect evolution data from simulation core."""
|
|
world_state = simulation_core.get_world_state()
|
|
|
|
cells_data = []
|
|
network_architectures = {}
|
|
total_weights = []
|
|
layer_sizes_common = {}
|
|
|
|
for entity_data in simulation_core.get_entity_states():
|
|
if entity_data['type'] == 'cell':
|
|
cells_data.append(entity_data)
|
|
|
|
# Track neural network architecture
|
|
nn_data = entity_data['neural_network']
|
|
layers_key = str(nn_data['layer_sizes']) # Convert to string for JSON compatibility
|
|
|
|
if layers_key not in network_architectures:
|
|
network_architectures[layers_key] = 0
|
|
network_architectures[layers_key] += 1
|
|
|
|
# Use layer count as a proxy for complexity
|
|
total_weights.append(len(nn_data['layer_sizes']))
|
|
|
|
# Track layer size frequencies
|
|
for size in nn_data['layer_sizes']:
|
|
if size not in layer_sizes_common:
|
|
layer_sizes_common[size] = 0
|
|
layer_sizes_common[size] += 1
|
|
|
|
# Calculate evolution statistics
|
|
evolution_stats = {}
|
|
if cells_data:
|
|
energies = [c['energy'] for c in cells_data]
|
|
ages = [c['age'] for c in cells_data]
|
|
generations = [c['generation'] for c in cells_data]
|
|
|
|
evolution_stats = {
|
|
'cell_count': len(cells_data),
|
|
'energy_distribution': {
|
|
'mean': np.mean(energies),
|
|
'std': np.std(energies),
|
|
'min': min(energies),
|
|
'max': max(energies),
|
|
'median': np.median(energies)
|
|
},
|
|
'age_distribution': {
|
|
'mean': np.mean(ages),
|
|
'std': np.std(ages),
|
|
'min': min(ages),
|
|
'max': max(ages),
|
|
'median': np.median(ages)
|
|
},
|
|
'generation_distribution': {
|
|
'mean': np.mean(generations),
|
|
'std': np.std(generations),
|
|
'min': min(generations),
|
|
'max': max(generations),
|
|
'median': np.median(generations)
|
|
}
|
|
}
|
|
|
|
# Network architecture statistics
|
|
network_stats = {}
|
|
if total_weights:
|
|
network_stats = {
|
|
'architecture_diversity': len(network_architectures),
|
|
'most_common_architecture': max(network_architectures.items(), key=lambda x: x[1]) if network_architectures else None,
|
|
'complexity_distribution': {
|
|
'mean': np.mean(total_weights),
|
|
'std': np.std(total_weights),
|
|
'min': min(total_weights),
|
|
'max': max(total_weights)
|
|
},
|
|
'layer_size_diversity': len(layer_sizes_common),
|
|
'most_common_layer_sizes': sorted(layer_sizes_common.items(), key=lambda x: x[1], reverse=True)[:5]
|
|
}
|
|
|
|
return {
|
|
'timestamp': simulation_core.timing.last_tick_time,
|
|
'tick_count': world_state['tick_count'],
|
|
'evolution_statistics': evolution_stats,
|
|
'network_statistics': network_stats,
|
|
'network_architectures': dict(network_architectures),
|
|
'collection_type': 'evolution'
|
|
} |