Compare commits

..

No commits in common. "8a53d4ac3799ff6f46153b3c4fdd43ea658c1360" and "91fef1557292bd66de1819312955df05a4b57131" have entirely different histories.

3 changed files with 15 additions and 35 deletions

View File

@ -152,9 +152,15 @@ class HUD:
LAYER_VERTICAL_MARGIN = 30 # Top and bottom margin within visualization for neurons
# Connection appearance constants
WEIGHT_NORMALIZATION_DIVISOR = 2 # Divisor for normalizing weights to [-1, 1] range
MAX_CONNECTION_THICKNESS = 4 # Maximum thickness for connection lines
MIN_CONNECTION_THICKNESS = 1 # Minimum thickness for connection lines
# Connection colors (RGB values)
CONNECTION_BASE_INTENSITY = 128 # Base color intensity for connections
CONNECTION_POSITIVE_GREEN = 128 # Green component for positive weights
CONNECTION_NEGATIVE_RED = 128 # Red component for negative weights
# Neuron activation colors
NEURON_BASE_INTENSITY = 100 # Base color intensity for neurons
NEURON_ACTIVATION_INTENSITY = 155 # Additional intensity based on activation
@ -353,14 +359,12 @@ class HUD:
screen.blit(activation_text, text_rect)
# Draw layer labels
num_layers = len(network.layers)
for layer_idx in range(num_layers):
if layer_idx == 0:
label = "Input"
elif layer_idx == num_layers - 1:
label = "Output"
layer_labels = ["Input", "Hidden", "Output"]
for layer_idx in range(len(network.layers)):
if layer_idx >= len(layer_labels):
label = f"Layer {layer_idx}"
else:
label = f"Hidden {layer_idx}" if num_layers > 3 else "Hidden"
label = layer_labels[layer_idx] if layer_idx < len(layer_labels) else f"Hidden {layer_idx - 1}"
# Find average x position for this layer
x_positions = [pos[0] for (l_idx, n_idx), pos in neuron_positions.items() if l_idx == layer_idx]

View File

@ -129,8 +129,7 @@ class FlexibleNeuralNetwork:
(mutated._add_connection, 1.5), # Moderate - grow connectivity
(mutated._remove_connection, 0.8), # Less common - reduce connectivity
(mutated._add_neuron, 0.3), # Rare - structural growth
(mutated._remove_neuron, 0.1), # Very rare - structural reduction
(mutated._add_layer, 0.05), # New: create a new layer (very rare)
(mutated._remove_neuron, 0.1) # Very rare - structural reduction
]
# Apply weighted random mutations
@ -340,32 +339,6 @@ class FlexibleNeuralNetwork:
adjusted_connections.append((src_layer, src_neuron, weight))
neuron['connections'] = adjusted_connections
def _add_layer(self):
"""Add a new hidden layer at a random position with at least one neuron."""
if len(self.layers) < 2:
return # Need at least input and output layers
# Choose a position between input and output layers
insert_idx = random.randint(1, len(self.layers) - 1)
# Create a new hidden neuron
new_neuron = {
'type': 'hidden',
'id': f'hidden_{random.randint(1000, 9999)}',
'bias': random.uniform(-1, 1),
'connections': []
}
# Connect to all neurons in the previous layer
for prev_idx in range(len(self.layers[insert_idx - 1])):
if random.random() < 0.5:
new_neuron['connections'].append((insert_idx - 1, prev_idx, random.uniform(-2, 2)))
# Insert the new layer
self.layers.insert(insert_idx, [new_neuron])
# Connect neurons in the next layer to the new neuron
if insert_idx + 1 < len(self.layers):
for neuron in self.layers[insert_idx + 1]:
if 'connections' in neuron and random.random() < 0.5:
neuron['connections'].append((insert_idx, 0, random.uniform(-2, 2)))
def _ensure_network_connectivity(self):
"""Ensure the network maintains basic connectivity from inputs to outputs."""
# Check if output neurons have any connections

View File

@ -273,6 +273,9 @@ class DefaultCell(BaseEntity):
:param interactable: List of nearby entities (unused).
:return: Self.
"""
self.tick_count += 1
if self.tick_count % 100 == 0:
self.behavioral_model = self.behavioral_model.mutate(1)
if interactable is None:
interactable = []