#!/usr/bin/python # # Hopfield neural network associative memory # -------------------------------------------- # Copyright (C) 2007 Robert Nowotniak # Copyright (C) 2007 Michal Wysokinski # # from random import random, choice from math import sqrt import sys, getopt NUMBER_OF_NEURONS = 40 # # Network training set # TRAIN_SET = ( 'robertn.', 'du.ics.p', 'zarin.pl', 'arch?q=p', 'ig?hl=pl', '?q=pytho', 'mail.org', ) ################################################################################ # Coding functions # # ---------------- # # 8-characters length strings of legal characters to # # 40-elements bipolar binary list and vice versa # ################################################################################ class CharacterNotSupported: pass class BadStringLength: pass class BadCodeLength: pass def codeChar(char): number = ord(char) if ord('a') <= number <= ord('z'): number -= ord('a'); elif number == ord('_'): number = ord('z') - ord('a') + 1 elif number == ord('/'): number = ord('z') - ord('a') + 2 elif number == ord('.'): number = ord('z') - ord('a') + 3 elif number == ord('?'): number = ord('z') - ord('a') + 4 elif number == ord('='): number = ord('z') - ord('a') + 5 elif number == ord('&'): number = ord('z') - ord('a') + 6 else: raise CharacterNotSupported return (number&16 and 1 or -1, number&8 and 1 or -1, number&4 and 1 or -1, number&2 and 1 or -1, number&1 and 1 or -1) def codeString(string): if len(string) != 8: raise BadStringLength else: coded = [] for char in string: coded.extend(codeChar(char)) return coded def decodeChar(coded): if len(coded) != 5: raise BadCodeLength number = 0; factor = 16; for i in coded: if i == 1: number += factor factor /= 2 if number <= ord('z') - ord('a'): return chr(number + ord('a')) elif number == (ord('z') - ord('a') + 1): return '_' elif number == (ord('z') - ord('a') + 2): return '/' elif number == (ord('z') - ord('a') + 3): return '.' elif number == (ord('z') - ord('a') + 4): return '?' elif number == (ord('z') - ord('a') + 5): return '=' elif number == (ord('z') - ord('a') + 6): return '&' def decodeString(code): coded = [] i = 0 while i < 40: char = code[i:i+5] i += 5 coded.append(decodeChar(char)) return "".join(coded) ################################################################################ # Neuron class # ################################################################################ class Neuron: def __init__(self, number_of_inputs): self.inputs = [0.0 for x in xrange(number_of_inputs)] self.weights = [0.0 for x in xrange(number_of_inputs)] # Required for histeresis property of neuron self.lastvalue = 1.0 def __getSum(self): sum = 0.0 for i in xrange(len(self.weights)): sum += self.weights[i] * self.inputs[i] return sum def activation(self): result = self.__getSum() result = cmp(result, 0) # Histeresis property if result == 0: return self.lastvalue self.lastvalue = result return result def __call__(self): return self.activation() ################################################################################ # Main program # ################################################################################ print 'Hopfield neural network associative memory' def usage(): print """ %s [-h|help] [-v|--verbose] [-t ] -h, --help -- show this help -v, --verbose -- verbose output (weight matrix etc.) -t -- limit number of training set elements """ % sys.argv[0] # # Parse command line arguments # try: opts, args = getopt.getopt(sys.argv[1:], "vt:h", ['verbose', 'help']) except getopt.GetoptError: print 'Wrong arguments' usage() sys.exit(2) verbose = False train_limit = None for o, a in opts: if o in ('-v', '--verbose'): verbose = True if o in ('-h', '--help'): usage() sys.exit(0) if o in ('-t'): train_limit = int(a) if train_limit: TRAIN_SET = TRAIN_SET[:train_limit] # Prefix of URL string used for network testing if len(args) > 0: prefix = args[0][:8] else: prefix = choice(TRAIN_SET)[:4] # Code training set coded_train_set = [codeString(s) for s in TRAIN_SET] # Create a layer of zero'ed neurons net = [Neuron(NUMBER_OF_NEURONS) for i in xrange(NUMBER_OF_NEURONS)] # Train - using generalized Hebb rule (mean train set element) for i in xrange(len(net)): # neurons loop n = net[i] for j in xrange(NUMBER_OF_NEURONS): # weights loop # on the main diagonal we keep weights of bias inputs # instead of neuron self feedback w = 0.0 for k in xrange(len(coded_train_set)): w += coded_train_set[k][i] * coded_train_set[k][j] w /= NUMBER_OF_NEURONS n.weights[j] = w if verbose: # Show neuron weight matrix print 'Neuron weight matrix:' for n in net: for j in xrange(NUMBER_OF_NEURONS): print '%+.3f' % n.weights[j], print print print '---' print 'training set:' for t in coded_train_set: if verbose: print t, print ' ' + decodeString(t) print '---' keep_trying = True while keep_trying: # Set inputs chars = [chr(c) for c in xrange(ord('a'), ord('z'))] + ['_','/','.','?','=','&'] input = codeString(prefix + ''.join([choice(chars) for i in xrange(8 - len(prefix))])) print 'network input:', if verbose: print input, print decodeString(input) for i in xrange(len(net)): n = net[i] n.inputs = input[:] # bias input instead of self feedback n.inputs[i] = 1 # Compute the network output asynchronously while True: for n in net: Y = [n() for n in net] n.inputs = Y Y = [n() for n in net] if verbose: print Y, print decodeString(Y) if Y == input: # network is in stable state break input = Y print ' final result:', output = [n() for n in net] if verbose: print output, result = decodeString(output) print result, if result not in TRAIN_SET: print ' *** WRONG ***' else: print keep_trying = False if len(prefix) == 8: # prevent endless loop keep_trying = False