Skip to content

Instantly share code, notes, and snippets.

@freemo
Last active April 12, 2017 15:42
Show Gist options
  • Save freemo/675f220953e1e9a15a3098a2c30d7581 to your computer and use it in GitHub Desktop.
Save freemo/675f220953e1e9a15a3098a2c30d7581 to your computer and use it in GitHub Desktop.
Hyperassociative Map
/**
* A Hyperassociative Map is a new type of algorithm that organizes an arbitrary
* graph of interconnected nodes according to its associations to other nodes.
* Once a new Hyperassociative Map has been associated and aligned, nodes that
* are most closely associated will be closest to each other.
* For more info, please see the
* <a href ="http://wiki.syncleus.com/index.php/dANN:Hyperassociative_Map">
* Hyperassociative-Map dANN Wiki page</a>.
* @author Jeffrey Phillips Freeman
* @param <G> The graph type
* @param <N> The node type
*/
public class HyperassociativeMap<G extends Graph<N, ?>, N> implements GraphDrawer<G, N>
{
private static final double REPULSIVE_WEAKNESS = 2.0;
private static final double DEFAULT_LEARNING_RATE = 0.5;
private static final double DEFAULT_MAX_MOVEMENT = 0.0;
private static final double DEFAULT_TOTAL_MOVEMENT = 0.0;
private static final double DEFAULT_ACCEPTABLE_DISTANCE_FACTOR = 0.75;
private static final double EQUILIBRIUM_DISTANCE = 1.0;
private static final double EQUILIBRIUM_ALIGNMENT_FACTOR = 0.005;
private static final double LEARNING_RATE_PROCESSING_ADJUSTMENT = 1.01;
private final G graph;
private final int dimensions;
private static final Logger LOGGER = Logger.getLogger(HyperassociativeMap.class);
private Map<N, Vector> coordinates = Collections.synchronizedMap(new HashMap<N, Vector>());
private static final Random RANDOM = new Random();
private final boolean useWeights;
private double equilibriumDistance;
private double learningRate = DEFAULT_LEARNING_RATE;
private double maxMovement = DEFAULT_MAX_MOVEMENT;
private double totalMovement = DEFAULT_TOTAL_MOVEMENT;
private double acceptableDistanceFactor = DEFAULT_ACCEPTABLE_DISTANCE_FACTOR;
private class Align implements Callable<Vector>
{
private final N node;
public Align(final N node)
{
this.node = node;
}
@Override
public Vector call()
{
return align(node);
}
}
public HyperassociativeMap(final G graph, final int dimensions, final double equilibriumDistance,
final boolean useWeights)
{
if (graph == null)
throw new IllegalArgumentException("Graph can not be null");
if (dimensions <= 0)
throw new IllegalArgumentException("dimensions must be 1 or more");
this.graph = graph;
this.dimensions = dimensions;
this.equilibriumDistance = equilibriumDistance;
this.useWeights = useWeights;
// refresh all nodes
for (final N node : this.graph.getNodes())
{
this.coordinates.put(node, randomCoordinates(this.dimensions));
}
}
public HyperassociativeMap(final G graph, final int dimensions)
{
this(graph, dimensions, EQUILIBRIUM_DISTANCE, true);
}
public void resetLearning()
{
learningRate = DEFAULT_LEARNING_RATE;
maxMovement = DEFAULT_TOTAL_MOVEMENT;
totalMovement = DEFAULT_TOTAL_MOVEMENT;
acceptableDistanceFactor = DEFAULT_ACCEPTABLE_DISTANCE_FACTOR;
}
@Override
public void reset()
{
resetLearning();
// randomize all nodes
for (final N node : coordinates.keySet())
{
coordinates.put(node, randomCoordinates(dimensions));
}
}
@Override
public boolean isAligned()
{
return (maxMovement < (EQUILIBRIUM_ALIGNMENT_FACTOR * equilibriumDistance))
&& (maxMovement > DEFAULT_MAX_MOVEMENT);
}
private double getAverageMovement()
{
return totalMovement / Topography.getOrder((Graph<N, ?>) graph);
}
@Override
public void align()
{
// refresh all nodes
if (!coordinates.keySet().equals(graph.getNodes()))
{
final Map<N, Vector> newCoordinates = new HashMap<N, Vector>();
for (final N node : graph.getNodes())
{
if (coordinates.containsKey(node))
{
newCoordinates.put(node, coordinates.get(node));
}
else
{
newCoordinates.put(node, randomCoordinates(dimensions));
}
}
coordinates = Collections.synchronizedMap(newCoordinates);
}
totalMovement = DEFAULT_TOTAL_MOVEMENT;
maxMovement = DEFAULT_MAX_MOVEMENT;
Vector center;
center = processLocally();
LOGGER.debug("maxMove: " + maxMovement + ", Average Move: " + getAverageMovement());
// divide each coordinate of the sum of all the points by the number of
// nodes in order to calculate the average point, or center of all the
// points
for (int dimensionIndex = 1; dimensionIndex <= dimensions; dimensionIndex++)
{
center = center.setCoordinate(center.getCoordinate(dimensionIndex) /
graph.getNodes().size(), dimensionIndex);
}
recenterNodes(center);
}
@Override
public int getDimensions()
{
return dimensions;
}
@Override
public Map<N, Vector> getCoordinates()
{
return Collections.unmodifiableMap(coordinates);
}
private void recenterNodes(final Vector center)
{
for (final N node : graph.getNodes())
{
coordinates.put(node, coordinates.get(node).calculateRelativeTo(center));
}
}
public boolean isUsingWeights()
{
return useWeights;
}
Map<N, Double> getNeighbors(final N nodeToQuery)
{
final Map<N, Double> neighbors = new HashMap<N, Double>();
for (final TraversableCloud<N> neighborEdge : graph.getAdjacentEdges(nodeToQuery))
{
final Double currentWeight = ((neighborEdge instanceof Weighted) && useWeights) ?
((Weighted) neighborEdge).getWeight() :
equilibriumDistance;
for (final N neighbor : neighborEdge.getNodes())
{
if (!neighbor.equals(nodeToQuery))
{
neighbors.put(neighbor, currentWeight);
}
}
}
return neighbors;
}
private Vector align(final N nodeToAlign)
{
// calculate equilibrium with neighbors
final Vector location = coordinates.get(nodeToAlign);
final Map<N, Double> neighbors = getNeighbors(nodeToAlign);
Vector compositeVector = new Vector(location.getDimensions());
// align with neighbours
for (final Entry<N, Double> neighborEntry : neighbors.entrySet())
{
final N neighbor = neighborEntry.getKey();
final double associationEquilibriumDistance = neighborEntry.getValue();
Vector neighborVector = coordinates.get(neighbor).calculateRelativeTo(location);
double newDistance = Math.abs(neighborVector.getDistance()) -
associationEquilibriumDistance;
newDistance *= learningRate;
neighborVector = neighborVector.setDistance(newDistance);
compositeVector = compositeVector.add(neighborVector);
}
// calculate repulsion with all non-neighbors
for (final N node : graph.getNodes())
{
if ((!neighbors.containsKey(node)) && (node != nodeToAlign)
&& (!graph.getAdjacentNodes(node).contains(nodeToAlign)))
{
Vector nodeVector = coordinates.get(node).calculateRelativeTo(location);
double newDistance = -EQUILIBRIUM_DISTANCE /
Math.pow(nodeVector.getDistance(),
REPULSIVE_WEAKNESS);
if (Math.abs(newDistance) > Math.abs(equilibriumDistance))
{
newDistance = Math.copySign(equilibriumDistance, newDistance);
}
newDistance *= learningRate;
nodeVector = nodeVector.setDistance(newDistance);
compositeVector = compositeVector.add(nodeVector);
}
}
return location.add(compositeVector);
}
/**
* Obtains a Vector with RANDOM coordinates for the specified number of
* dimensions.
* The coordinates will be in range [-1.0, 1.0].
*
* @param dimensions Number of dimensions for the RANDOM Vector
* @return New RANDOM Vector
* @since 1.0
*/
public static Vector randomCoordinates(final int dimensions)
{
final double[] randomCoordinates = new double[dimensions];
for (int randomCoordinatesIndex = 0; randomCoordinatesIndex < dimensions;
randomCoordinatesIndex++)
{
randomCoordinates[randomCoordinatesIndex] = (RANDOM.nextDouble() * 2.0) - 1.0;
}
return new Vector(randomCoordinates);
}
/**
* Returns the inverse hyperbolic tangent of a value.
* You may see
* <a href="http://www.mathworks.com/help/techdoc/ref/atanh.html">
* MathWorks atanh page</a> for more info.
* @param value the input.
* @return the inverse hyperbolic tangent of value.
*/
private static double atanh(final double value)
{
return Math.log(Math.abs((value + 1.0) / (1.0 - value))) / 2;
}
private Vector processLocally()
{
Vector pointSum = new Vector(dimensions);
for (final N node : graph.getNodes())
{
final Vector newPoint = align(node);
final Vector oldLocation = coordinates.get(node);
double moveDistance = Math.abs(newPoint.calculateRelativeTo(oldLocation)
.getDistance());
if (moveDistance > equilibriumDistance * acceptableDistanceFactor)
{
final double newLearningRate = ((equilibriumDistance *
acceptableDistanceFactor) /
moveDistance);
learningRate -= Math.abs(newLearningRate - learningRate) * 0.01;
}
else {
if (moveDistance > maxMovement) {
maxMovement = moveDistance;
}
totalMovement += moveDistance;
coordinates.put(node, newPoint);
}
System.out.println("learning rate: " + learningRate);
}
if ((learningRate * LEARNING_RATE_PROCESSING_ADJUSTMENT) < 1.0)
{
learningRate *= LEARNING_RATE_PROCESSING_ADJUSTMENT;
LOGGER.debug("learning rate: " + learningRate + ", acceptableDistanceFactor: " +
acceptableDistanceFactor);
}
return pointSum;
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment