Example 3: Sequential Learning
Example 3 presents a skeleton for sequential learning. Sequential learning, or adaptation, is an update process applied to the conditional probability tables. After a network has been built, sequential learning can be applied during operation in order to maintain the correspondence between the model (conditional probability tables) and the real-world domain.
After the network is loaded in HUGIN, the learning parameters are specified. Then follows the build-up and entering of cases, and finally, the tables are updated and node marginals are printed.
#if X64
using size_t = System.UInt64;
using h_index_t = System.Int64;
#else
using size_t = System.UInt32;
using h_index_t = System.Int32;
#endif
#if H_DOUBLE
using h_number_t = System.Double;
#else
using h_number_t = System.Single;
#endif
using System;
using HAPI;
namespace Example
{
public class Adapt
{
public Adapt(String fileName)
{
String netFileName = fileName + ".net";
Domain d = new Domain(netFileName, new DefaultClassParseListener());
String logFileName = fileName + ".log";
d.OpenLogFile(logFileName);
d.Compile();
SpecifyLearningParameters(d);
PrintLearningParameters(d);
EnterCase(d);
PrintCase(d);
d.Propagate(Domain.Equilibrium.H_EQUILIBRIUM_SUM, Domain.EvidenceMode.H_EVIDENCE_MODE_NORMAL);
d.Adapt();
d.Initialize();
PrintNodeMarginals(d);
d.SaveAsNet("q.net");
}
private void SpecifyLearningParameters(Domain d)
{
NodeList nl = d.GetNodes();
h_number_t[] data;
foreach (DiscreteChanceNode node in nl)
{
Table table = node.GetExperienceTable();
data = new h_number_t[table.GetSize()];
for (size_t i = 0; i < table.GetSize(); i++)
data[i] = 1;
table.SetData(data);
}
foreach (DiscreteChanceNode node in nl)
{
Table table = node.GetFadingTable();
data = new h_number_t[table.GetSize()];
for (size_t i = 0; i < table.GetSize(); i++)
data[i] = 1;
table.SetData(data);
}
}
private void PrintLearningParameters(Domain d)
{
NodeList nl = d.GetNodes();
foreach (DiscreteChanceNode dcNode in nl)
{
Console.WriteLine(dcNode.GetLabel() + " (" + dcNode.GetName() + "): ");
Console.Write(" ");
if (dcNode.HasExperienceTable())
{
Table table = dcNode.GetExperienceTable();
h_number_t[] data = table.GetData();
size_t tblSize = table.GetSize();
for (size_t i = 0; i < tblSize; i++)
Console.Write(data[i] + " ");
Console.WriteLine();
}
else
Console.WriteLine("No experience table");
Console.Write(" ");
if (dcNode.HasFadingTable())
{
Table table = dcNode.GetFadingTable();
h_number_t[] data = table.GetData();
size_t tblSize = table.GetSize();
for (size_t i = 0; i < tblSize; i++)
Console.Write(data[i] + " ");
Console.WriteLine();
}
else
Console.WriteLine("No fading table");
}
}
private void EnterCase(Domain d)
{
NodeList nl = d.GetNodes();
foreach (DiscreteChanceNode dcNode in nl)
{
dcNode.SelectState(0);
}
DiscreteChanceNode node = (DiscreteChanceNode)nl[1];
node.RetractFindings();
}
private void PrintCase(Domain d)
{
NodeList nl = d.GetNodes();
foreach (DiscreteChanceNode dcNode in nl)
{
Console.Write(" (" + dcNode.GetName() + ",");
if (dcNode.EvidenceIsEntered())
Console.Write(" evidence is entered) ");
else
Console.Write(" evidence is not entered) ");
}
Console.WriteLine();
}
private void PrintNodeMarginals(Domain d)
{
NodeList nl = d.GetNodes();
foreach (DiscreteChanceNode dcNode in nl)
{
size_t nStates = dcNode.GetNumberOfStates();
Console.WriteLine(dcNode.GetLabel() + " (" + dcNode.GetName() + ")");
for (size_t i = 0; i < nStates; i++)
Console.WriteLine(" - " + dcNode.GetStateLabel(i) + ": " + dcNode.GetBelief(i));
}
}
public static void Main(String[] args)
{
try
{
new Adapt(args[0]);
}
catch (ExceptionHugin eh)
{
Console.WriteLine(eh);
}
}
}
}