Shop OBEX P1 Docs P2 Docs Learn Events
Neural Net with "Classes" in FLEXC solve the XOR challenge — Parallax Forums

Neural Net with "Classes" in FLEXC solve the XOR challenge

Here is an example of a neural network (NN) that solves the xor problem.
There are 4 phases:
init: the NN is initialized
Training: the NN is trained.
test: the NN is applied.
print: the accuracy over many tests

The training phase takes a relatively long time compared to running on a PC.
But it is correct.
An accuracy of 1.0 means that the NN calculates 100% correctly.
Maybe we can accelerate the training phase if we take advantage of the parallelism of the P2?

how to compile:
fastspin -2b xor_p2.cpp
Propeller Spin/PASM Compiler 'FastSpin' (c) 2011-2020 Total Spectrum Software Inc.
Version 4.2.6 Compiled on: Jul 30 2020
xor_p2.cpp
time.c
rand.c
sleep.c
random.c
fmt.c
e_expf.c
posixio.c
bufio.c
errno.c
xor_p2.p2asm
Done.
Program size is 21496 bytes



possible output:
loadp2   -p /dev/ttyUSB0 -t -b 230400 xor_p2.binary
( Entering terminal mode.  Press Ctrl-] to exit. )
init 
training 
test 
Accurancy: 1.000000
init 
training 
test 
Accurancy: 1.000000
init 
training 
test 
Accurancy: 0.755600
init 
training 
test 
Accurancy: 1.000000
init 
training 
test 
Accurancy: 1.000000
init 
training 
test 
Accurancy: 0.745700


#include <stdio.h>
#include <string.h>
#include <stdlib.h>
#include <unistd.h>
#include <math.h>

#include <propeller2.h>
#define P2_TARGET_MHZ 160
#include "sys/p2es_clock.h"

#define BAUD 230400

extern float _randfloat();

float sigmoid(float z){
	float value = 1 + exp(-z);
	return 1/value;
}

float gauss ()
{
  float x = _randfloat();
  float r = exp(-0.5 * ((x)/0.1) * ((x)/0.1));
  r = -0.1 + r*0.2;	
  return r;
}	

typedef struct _Neuron 
{
	float in1;
	float in2;
	float out;
	float weight1;
	float weight2;
	float bias;
	int layer;
	float y;
	float teta;
	void init(void)
	{
		weight1 = gauss();
		weight2 = gauss();
		bias = gauss();
		layer = 0;
		out = 0.0;
		teta = 0.0;	
	}
	void computeY(void)
	{
		y = weight1 * in1 + weight2 * in2 + bias;
	}

	void computeF(void)
	{
		computeY();
		out = sigmoid(y);
	}	

	
} Neuron;

Neuron neuron[3];

float label;
float alpha = 0.1; //Learning rate

float computeTeta(int numberNeuron)
{
	float f = neuron[numberNeuron].out;
	float teta;
	float h;
	
	if(neuron[numberNeuron].layer == 1)
	{ 										// Ouput-Layer
		teta = -(label-f)*f*(1-f);
	} 
	else 
	{ 									   // Hidden-Layer
		switch (numberNeuron)
		{
			case 0: h = neuron[2].weight1; break;
			case 1: h = neuron[2].weight2; break;
			case 2: h = neuron[2].bias;break;
		}
		teta = f *(1-f) * neuron[2].teta * h;
	}	
	
	neuron[numberNeuron].teta = teta;
	return teta;
}

void gradientStep(int numberNeuron)
{
	float teta = computeTeta(numberNeuron);
	float weight1 = neuron[numberNeuron].weight1;
	float weight2 = neuron[numberNeuron].weight2;
	float bias    = neuron[numberNeuron].bias;

	weight1 = weight1 - alpha * teta * neuron[numberNeuron].in1;
	weight2 = weight2 - alpha * teta * neuron[numberNeuron].in2;
	bias    = bias    - alpha * teta;
	
	neuron[numberNeuron].weight1 = weight1;
	neuron[numberNeuron].weight2 = weight2;
	neuron[numberNeuron].bias = bias;	
}

void backpropagation()
{
	gradientStep(2);
	gradientStep(1);
	gradientStep(0);
}

void trainNeurons()
{
	int cycles = 100000;	
	for(int i = 0; i<cycles; i++)
	{
		int in1 = rand() % 2;	
		int in2 = rand() % 2;
	
		label = (float)(in1^in2);
		
		neuron[0].in1 = in1;		
		neuron[0].in2 = in2;
		neuron[1].in1 = in1;
		neuron[1].in2 = in2;
		
		neuron[0].computeF();			
		neuron[1].computeF();
		
		neuron[2].in1 = neuron[0].out;	
		neuron[2].in2 = neuron[1].out;
		neuron[2].computeF();					
		
		backpropagation();
	}
	
}

void testNeurons()
{
	int count = 0;
	int cycles = 10000;
	for(int i = 0; i<cycles; i++)
	{
		int in1 = rand() % 2;
		int in2 = rand() % 2;
		neuron[0].in1 = in1;
		neuron[0].in2 = in2;
		neuron[1].in1 = in1;
		neuron[1].in2 = in2;
		neuron[0].computeF();
		neuron[1].computeF();
		neuron[2].in1 = neuron[0].out;
		neuron[2].in2 = neuron[1].out;
		neuron[2].computeF();
		
		if(((in1 == 0 && in2 == 0)||(in1 == 1 && in2 == 1)) && neuron[2].out < 0.5)
		{
			count++;
		} 
		else if(((in1 == 0 && in2 == 1)||(in1 == 1 && in2 == 0)) && neuron[2].out >= 0.5)
		{
			count++;
		}
	}
	printf("Accurancy: %f\n",(float)count/(float)cycles);
}


void main ()
{
    _clkset(_SETFREQ, _CLOCKFREQ);
    _setbaud(BAUD);	
    sleep(1);
loop:    
	printf("init \n");
	neuron[0].init();
	neuron[1].init();
	neuron[2].init();
	neuron[2].layer = 1; //mark output layer

	srand(time(NULL));
	printf("training \n");	
	trainNeurons();
	printf("test \n");
	testNeurons(); 
	
	goto loop;   
}



/*
+--------------------------------------------------------------------
| TERMS OF USE: MIT License
+--------------------------------------------------------------------
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files
(the "Software"), to deal in the Software without restriction,
including without limitation the rights to use, copy, modify, merge,
publish, distribute, sublicense, and/or sell copies of the Software,
and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:

The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.

THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+--------------------------------------------------------------------
*/


Comments

  • Very interesting! I were also able to run a NN on the first P1 I bought, if I remember correctly it was some sample related to sun spots. It took forever compared to a PC but that was also expected.
    Now if I only had a P2...

  • Uskon, että P2-levyt ovat saatavana pian EU: ssa.
Sign In or Register to comment.