diff --git a/ui/hud.py b/ui/hud.py index 4877092..a0b31f7 100644 --- a/ui/hud.py +++ b/ui/hud.py @@ -152,15 +152,9 @@ class HUD: LAYER_VERTICAL_MARGIN = 30 # Top and bottom margin within visualization for neurons # Connection appearance constants - WEIGHT_NORMALIZATION_DIVISOR = 2 # Divisor for normalizing weights to [-1, 1] range MAX_CONNECTION_THICKNESS = 4 # Maximum thickness for connection lines MIN_CONNECTION_THICKNESS = 1 # Minimum thickness for connection lines - # Connection colors (RGB values) - CONNECTION_BASE_INTENSITY = 128 # Base color intensity for connections - CONNECTION_POSITIVE_GREEN = 128 # Green component for positive weights - CONNECTION_NEGATIVE_RED = 128 # Red component for negative weights - # Neuron activation colors NEURON_BASE_INTENSITY = 100 # Base color intensity for neurons NEURON_ACTIVATION_INTENSITY = 155 # Additional intensity based on activation @@ -359,12 +353,14 @@ class HUD: screen.blit(activation_text, text_rect) # Draw layer labels - layer_labels = ["Input", "Hidden", "Output"] - for layer_idx in range(len(network.layers)): - if layer_idx >= len(layer_labels): - label = f"Layer {layer_idx}" + num_layers = len(network.layers) + for layer_idx in range(num_layers): + if layer_idx == 0: + label = "Input" + elif layer_idx == num_layers - 1: + label = "Output" else: - label = layer_labels[layer_idx] if layer_idx < len(layer_labels) else f"Hidden {layer_idx - 1}" + label = f"Hidden {layer_idx}" if num_layers > 3 else "Hidden" # Find average x position for this layer x_positions = [pos[0] for (l_idx, n_idx), pos in neuron_positions.items() if l_idx == layer_idx] diff --git a/world/base/brain.py b/world/base/brain.py index ad4ab23..fadf7fb 100644 --- a/world/base/brain.py +++ b/world/base/brain.py @@ -129,7 +129,8 @@ class FlexibleNeuralNetwork: (mutated._add_connection, 1.5), # Moderate - grow connectivity (mutated._remove_connection, 0.8), # Less common - reduce connectivity (mutated._add_neuron, 0.3), # Rare - structural growth - (mutated._remove_neuron, 0.1) # Very rare - structural reduction + (mutated._remove_neuron, 0.1), # Very rare - structural reduction + (mutated._add_layer, 0.05), # New: create a new layer (very rare) ] # Apply weighted random mutations @@ -339,6 +340,32 @@ class FlexibleNeuralNetwork: adjusted_connections.append((src_layer, src_neuron, weight)) neuron['connections'] = adjusted_connections + def _add_layer(self): + """Add a new hidden layer at a random position with at least one neuron.""" + if len(self.layers) < 2: + return # Need at least input and output layers + + # Choose a position between input and output layers + insert_idx = random.randint(1, len(self.layers) - 1) + # Create a new hidden neuron + new_neuron = { + 'type': 'hidden', + 'id': f'hidden_{random.randint(1000, 9999)}', + 'bias': random.uniform(-1, 1), + 'connections': [] + } + # Connect to all neurons in the previous layer + for prev_idx in range(len(self.layers[insert_idx - 1])): + if random.random() < 0.5: + new_neuron['connections'].append((insert_idx - 1, prev_idx, random.uniform(-2, 2))) + # Insert the new layer + self.layers.insert(insert_idx, [new_neuron]) + # Connect neurons in the next layer to the new neuron + if insert_idx + 1 < len(self.layers): + for neuron in self.layers[insert_idx + 1]: + if 'connections' in neuron and random.random() < 0.5: + neuron['connections'].append((insert_idx, 0, random.uniform(-2, 2))) + def _ensure_network_connectivity(self): """Ensure the network maintains basic connectivity from inputs to outputs.""" # Check if output neurons have any connections diff --git a/world/objects.py b/world/objects.py index 4b93cd2..c6bd3d6 100644 --- a/world/objects.py +++ b/world/objects.py @@ -274,7 +274,7 @@ class DefaultCell(BaseEntity): :return: Self. """ self.tick_count += 1 - if self.tick_count % 100 == 0: + if self.tick_count % 10 == 0: self.behavioral_model = self.behavioral_model.mutate(1) if interactable is None: