Skip to content

Commit

Permalink
truncate genome placeholder fix
Browse files Browse the repository at this point in the history
  • Loading branch information
nlorant-s committed Jul 18, 2024
1 parent 3d73e5f commit 12bbb0e
Show file tree
Hide file tree
Showing 3 changed files with 194 additions and 102 deletions.
33 changes: 26 additions & 7 deletions neuropush/neural_network.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,19 +8,33 @@ def __init__(self, layer_sizes, flattened_weights):
self.weights = []
self.biases = []

# print(f"Initializing network with layer sizes: {layer_sizes}")
# print(f"Total flattened weights: {len(flattened_weights)}")

index = 0
for i in range(1, self.num_layers):
weight_matrix_size = layer_sizes[i] * layer_sizes[i-1]
bias_vector_size = layer_sizes[i]

# print(f"Layer {i}: Weight matrix size: {weight_matrix_size}, Bias vector size: {bias_vector_size}")

# if index + weight_matrix_size > len(flattened_weights):
# raise ValueError(f"Not enough weights for layer {i}. Need {weight_matrix_size}, but only {len(flattened_weights) - index} left.")

weight_matrix = np.array(flattened_weights[index:index+weight_matrix_size]).reshape(layer_sizes[i], layer_sizes[i-1])
index += weight_matrix_size

# if index + bias_vector_size > len(flattened_weights):
# raise ValueError(f"Not enough weights for bias in layer {i}. Need {bias_vector_size}, but only {len(flattened_weights) - index} left.")

bias_vector = np.array(flattened_weights[index:index+bias_vector_size]).reshape(layer_sizes[i], 1)
index += bias_vector_size

self.weights.append(weight_matrix)
self.biases.append(bias_vector)

# if index < len(flattened_weights):
# print(f"Warning: {len(flattened_weights) - index} unused weights")

def sigmoid(self, x):
return 1 / (1 + np.exp(-x))
Expand All @@ -39,17 +53,22 @@ def predict(self, X):
np.ndarray: Output of the network with shape (n_samples, n_outputs).
"""
a = X

''' No last layer sigmoid
for weight, bias in zip(self.weights, self.biases):
a = self.relu(np.dot(a, weight.T) + bias.T)
'''
for i, (weight, bias) in enumerate(zip(self.weights, self.biases)):
z = np.dot(a, weight.T) + bias.T
if i == len(self.weights) - 1: # Last layer
a = self.sigmoid(z)
else:
a = self.relu(z)
return a
try:
for i, (weight, bias) in enumerate(zip(self.weights, self.biases)):
z = np.dot(a, weight.T) + bias.T
if i == len(self.weights) - 1: # Last layer
a = self.sigmoid(z)
else:
a = self.relu(z)
return a
except Exception as e:
print("predict() error:", e)
return None

def visualize_network(network, display='hide'):
if display == 'show':
Expand Down
69 changes: 62 additions & 7 deletions neuropush/neuromutations.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,16 @@
import random
from pyshgp.gp.variation import LiteralMutation, AdditionMutation, DeletionMutation, VariationOperator
from pyshgp.gp.variation import LiteralMutation, VariationOperator
from pyshgp.push.types import PushFloat, PushInt
from pyshgp.push.atoms import Literal
from pyshgp.push.types import PushType
from pyshgp.gp.genome import Genome, GeneSpawner
from pyshgp.tap import tap
from pyshgp.utils import DiscreteProbDistrib, instantiate_using

from abc import ABC, abstractmethod
from typing import Sequence, Union
import math
from numpy.random import random, choice

class NullMutation(LiteralMutation):
def __init__(self):
Expand All @@ -11,10 +20,56 @@ def _mutate_literal(self, literal: Literal) -> Literal:
new_value = literal.value
return Literal(value=new_value, push_type=PushFloat)

class SlightMutation(LiteralMutation):
def __init__(self):
super().__init__(PushInt)
class IntMutation(VariationOperator):
"""Mutates the value of one PushInt literal in the genome.
def _mutate_literal(self, literal: Literal) -> Literal:
new_value = 5
return Literal(value=new_value, push_type=PushInt)
Parameters
----------
rate : float
The probability of applying the mutation to a given PushInt literal.
Default is 0.1.
Attributes
----------
rate : float
The probability of applying the mutation to a given PushInt literal.
num_parents : int
Number of parent Genomes the operator needs to produce a child
Individual.
"""

def __init__(self, rate: float = 0.1):
super().__init__(1)
self.rate = rate

@tap
def produce(self, parents: Sequence[Genome], spawner: GeneSpawner) -> Genome:
"""Produce a child Genome by mutating one PushInt literal.
Parameters
----------
parents : Sequence[Genome]
A list containing a single parent Genome.
spawner : GeneSpawner
A GeneSpawner that can be used to produce new genes (aka Atoms).
Returns
-------
Genome
A new Genome with potentially one mutated PushInt literal.
"""
super().produce(parents, spawner)
self.checknum_parents(parents)
new_genome = Genome()
mutated = False

for atom in parents[0]:
if isinstance(atom, Literal) and atom.push_type == PushType.INT and random() < self.rate and not mutated:
new_value = spawner.random_int()
new_atom = Literal(new_value, PushType.INT)
new_genome = new_genome.append(new_atom)
mutated = True
else:
new_genome = new_genome.append(atom)

return new_genome
Loading

0 comments on commit 12bbb0e

Please sign in to comment.