spiking surrogates implemented

This commit is contained in:
David Shorten 2021-08-11 09:20:35 +10:00
parent dc85001ff7
commit dd215fdfbb
5 changed files with 354 additions and 1359 deletions

View File

@ -282,68 +282,12 @@ public interface TransferEntropyCalculatorSpiking {
* will vary depending on the underlying implementation
*/
public SpikingLocalInformationValues computeLocalOfPreviousObservations() throws Exception;
/**
* Generate a bootstrapped distribution of what the TE would look like,
* under a null hypothesis that the source values of our
* samples had no temporal relation to the destination value.
*
* <p>See Section II.E "Statistical significance testing" of
* the JIDT paper below for a description of how this is done for MI and TE in general.
* </p>
*
* <p>Note that if several disjoint time-series have been added
* as observations using {@link #addObservations(double[])} etc.,
* then these separate "trials" will be mixed up in the generation
* of surrogates here.</p>
*
* <p>This method (in contrast to {@link #computeSignificance(int[][])})
* creates <i>random</i> shufflings of the source embedding vectors for the surrogate
* calculations.</p>
*
* @param numPermutationsToCheck number of surrogate samples to bootstrap
* to generate the distribution.
* @return the distribution of TE scores under this null hypothesis.
* @see "J.T. Lizier, 'JIDT: An information-theoretic
* toolkit for studying the dynamics of complex systems', 2014."
*/
public EmpiricalMeasurementDistribution computeSignificance(int numPermutationsToCheck) throws Exception;
/**
* Generate a bootstrapped distribution of what the TE would look like,
* under a null hypothesis that the source values of our
* samples had no relation to the destination value.
*
* <p>See Section II.E "Statistical significance testing" of
* the JIDT paper below for a description of how this is done for MI and TE.
* </p>
*
* <p>Note that if several disjoint time-series have been added
* as observations using {@link #addObservations(double[])} etc.,
* then these separate "trials" will be mixed up in the generation
* of surrogates here.</p>
*
* <p>This method (in contrast to {@link #computeSignificance(int)})
* allows the user to specify how to construct the surrogates,
* such that repeatable results may be obtained.</p>
*
* @param newOrderings a specification of how to shuffle the source embedding vectors
* between all of our samples to create the surrogates to generate the distribution with. The first
* index is the permutation number (i.e. newOrderings.length is the number
* of surrogate samples we use to bootstrap to generate the distribution here.)
* Each array newOrderings[i] should be an array of length N (where
* would be the value returned by {@link #getNumObservations()}),
* containing a permutation of the values in 0..(N-1).
* TODO Need to think this through a little more before implementing.
* @return the distribution of channel measure scores under this null hypothesis.
* @see "J.T. Lizier, 'JIDT: An information-theoretic
* toolkit for studying the dynamics of complex systems', 2014."
* @throws Exception where the length of each permutation in newOrderings
* is not equal to the number N samples that were previously supplied.
*/
public EmpiricalMeasurementDistribution computeSignificance(
int[][] newOrderings) throws Exception;
public EmpiricalMeasurementDistribution computeSignificance(int numPermutationsToCheck, double estimatedValue) throws Exception;
public EmpiricalMeasurementDistribution computeSignificance(int numPermutationsToCheck,
double estimatedValue, long randomSeed) throws Exception;
/**
* Set or clear debug mode for extra debug printing to stdout
*

View File

@ -6,6 +6,7 @@ import java.util.Iterator;
import java.util.PriorityQueue;
import java.util.Random;
import java.util.Vector;
import java.util.ArrayList;
//import infodynamics.measures.continuous.kraskov.EuclideanUtils;
import infodynamics.measures.spiking.TransferEntropyCalculatorSpiking;
@ -32,6 +33,14 @@ import infodynamics.utils.ParsedProperties;
* @author Joseph Lizier (<a href="joseph.lizier at gmail.com">email</a>,
* <a href="http://lizier.me/joseph/">www</a>)
*/
/*
* TODO
* This implementation of the estimator does not implement dynamic exclusion windows. Such windows make sure
* that history embeddings that overlap are not considered in nearest-neighbour searches (as this breaks the
* independece assumption). Getting dynamic exclusion windows working will probably require modifications to the
* KdTree class.
*/
public class TransferEntropyCalculatorSpikingIntegration implements TransferEntropyCalculatorSpiking {
/**
@ -76,9 +85,9 @@ public class TransferEntropyCalculatorSpikingIntegration implements TransferEntr
*/
protected Vector<double[][]> vectorOfConditionalSpikeTimes = null;
Vector<double[]> ConditioningEmbeddingsFromSpikes = null;
Vector<double[]> conditioningEmbeddingsFromSpikes = null;
Vector<double[]> jointEmbeddingsFromSpikes = null;
Vector<double[]> ConditioningEmbeddingsFromSamples = null;
Vector<double[]> conditioningEmbeddingsFromSamples = null;
Vector<double[]> jointEmbeddingsFromSamples = null;
Vector<Double> processTimeLengths = null;
@ -114,7 +123,21 @@ public class TransferEntropyCalculatorSpikingIntegration implements TransferEntr
* of the number of target spikes.
*/
public static final String PROP_SAMPLE_MULTIPLIER = "NUM_SAMPLES_MULTIPLIER";
protected double num_samples_multiplier = 1.0;
protected double numSamplesMultiplier = 1.0;
/**
* Property name for the number of random sample points to use in the construction of the surrogates as a multiple
* of the number of target spikes.
*/
public static final String PROP_SURROGATE_SAMPLE_MULTIPLIER = "SURROGATE_NUM_SAMPLES_MULTIPLIER";
protected double surrogateNumSamplesMultiplier = 1.0;
/**
* Property for the number of nearest neighbours to use in the construction of the surrogates
*/
public static final String PROP_K_PERM = "K_PERM";
protected int kPerm = 10;
/**
* Property name for what type of norm to use between data points
* for each marginal variable -- Options are defined by
@ -182,29 +205,31 @@ public class TransferEntropyCalculatorSpikingIntegration implements TransferEntr
public void setProperty(String propertyName, String propertyValue) throws Exception {
boolean propertySet = true;
if (propertyName.equalsIgnoreCase(K_PROP_NAME)) {
int k_temp = Integer.parseInt(propertyValue);
if (k_temp < 1) {
int kTemp = Integer.parseInt(propertyValue);
if (kTemp < 1) {
throw new Exception ("Invalid k value less than 1.");
} else {
k = k_temp;
k = kTemp;
}
} else if (propertyName.equalsIgnoreCase(L_PROP_NAME)) {
int l_temp = Integer.parseInt(propertyValue);
if (l_temp < 1) {
int lTemp = Integer.parseInt(propertyValue);
if (lTemp < 1) {
throw new Exception ("Invalid l value less than 1.");
} else {
l = l_temp;
l = lTemp;
}
} else if (propertyName.equalsIgnoreCase(COND_EMBED_LENGTHS_PROP_NAME)) {
int[] condEmbedDims_temp = ParsedProperties.parseStringArrayOfInts(propertyValue);
for (int dim : condEmbedDims_temp) {
int[] condEmbedDimsTemp = ParsedProperties.parseStringArrayOfInts(propertyValue);
for (int dim : condEmbedDimsTemp) {
if (dim < 1) {
throw new Exception ("Invalid conditional embedding value less than 1.");
}
}
condEmbedDims = condEmbedDims_temp;
condEmbedDims = condEmbedDimsTemp;
} else if (propertyName.equalsIgnoreCase(KNNS_PROP_NAME)) {
Knns = Integer.parseInt(propertyValue);
} else if (propertyName.equalsIgnoreCase(PROP_K_PERM)) {
kPerm = Integer.parseInt(propertyValue);
} else if (propertyName.equalsIgnoreCase(PROP_ADD_NOISE)) {
if (propertyValue.equals("0") || propertyValue.equalsIgnoreCase("false")) {
addNoise = false;
@ -215,14 +240,21 @@ public class TransferEntropyCalculatorSpikingIntegration implements TransferEntr
}
} else if (propertyName.equalsIgnoreCase(PROP_SAMPLE_MULTIPLIER)) {
double temp_num_samples_multiplier = Double.parseDouble(propertyValue);
if (temp_num_samples_multiplier <= 0) {
double tempNumSamplesMultiplier = Double.parseDouble(propertyValue);
if (tempNumSamplesMultiplier <= 0) {
throw new Exception ("Num samples multiplier must be greater than 0.");
} else {
num_samples_multiplier = temp_num_samples_multiplier;
numSamplesMultiplier = tempNumSamplesMultiplier;
}
} else if (propertyName.equalsIgnoreCase(PROP_NORM_TYPE)) {
normType = KdTree.validateNormType(propertyValue);
} else if (propertyName.equalsIgnoreCase(PROP_SURROGATE_SAMPLE_MULTIPLIER)) {
double tempSurrogateNumSamplesMultiplier = Double.parseDouble(propertyValue);
if (tempSurrogateNumSamplesMultiplier <= 0) {
throw new Exception ("Surrogate Num samples multiplier must be greater than 0.");
} else {
surrogateNumSamplesMultiplier = tempSurrogateNumSamplesMultiplier;
}
} else {
// No property was set on this class
propertySet = false;
@ -251,7 +283,7 @@ public class TransferEntropyCalculatorSpikingIntegration implements TransferEntr
} else if (propertyName.equalsIgnoreCase(PROP_ADD_NOISE)) {
return Double.toString(noiseLevel);
} else if (propertyName.equalsIgnoreCase(PROP_SAMPLE_MULTIPLIER)) {
return Double.toString(num_samples_multiplier);
return Double.toString(numSamplesMultiplier);
} else {
// No property matches for this class
return null;
@ -319,45 +351,43 @@ public class TransferEntropyCalculatorSpikingIntegration implements TransferEntr
@Override
public void finaliseAddObservations() throws Exception {
ConditioningEmbeddingsFromSpikes = new Vector<double[]>();
conditioningEmbeddingsFromSpikes = new Vector<double[]>();
jointEmbeddingsFromSpikes = new Vector<double[]>();
ConditioningEmbeddingsFromSamples = new Vector<double[]>();
conditioningEmbeddingsFromSamples = new Vector<double[]>();
jointEmbeddingsFromSamples = new Vector<double[]>();
processTimeLengths = new Vector<Double>();
// Send all of the observations through:
Iterator<double[]> sourceIterator = vectorOfSourceSpikeTimes.iterator();
int timeSeriesIndex = 0;
Iterator<double[][]> conditionalIterator = null;
if (vectorOfConditionalSpikeTimes.size() > 0) {
Iterator<double[][]> conditionalIterator = vectorOfConditionalSpikeTimes.iterator();
for (double[] destSpikeTimes : vectorOfDestinationSpikeTimes) {
double[] sourceSpikeTimes = sourceIterator.next();
double[][] conditionalSpikeTimes = conditionalIterator.next();
processEventsFromSpikingTimeSeries(sourceSpikeTimes, destSpikeTimes, conditionalSpikeTimes, ConditioningEmbeddingsFromSpikes,
jointEmbeddingsFromSpikes, ConditioningEmbeddingsFromSamples, jointEmbeddingsFromSamples,
processTimeLengths);
}
} else {
for (double[] destSpikeTimes : vectorOfDestinationSpikeTimes) {
double[] sourceSpikeTimes = sourceIterator.next();
double[][] conditionalSpikeTimes = new double[][] {};
processEventsFromSpikingTimeSeries(sourceSpikeTimes, destSpikeTimes, conditionalSpikeTimes, ConditioningEmbeddingsFromSpikes,
jointEmbeddingsFromSpikes, ConditioningEmbeddingsFromSamples, jointEmbeddingsFromSamples,
processTimeLengths);
conditionalIterator = vectorOfConditionalSpikeTimes.iterator();
}
int timeSeriesIndex = 0;
for (double[] destSpikeTimes : vectorOfDestinationSpikeTimes) {
double[] sourceSpikeTimes = sourceIterator.next();
double[][] conditionalSpikeTimes = null;
if (vectorOfConditionalSpikeTimes.size() > 0) {
conditionalSpikeTimes = conditionalIterator.next();
} else {
conditionalSpikeTimes = new double[][] {};
}
processEventsFromSpikingTimeSeries(sourceSpikeTimes, destSpikeTimes, conditionalSpikeTimes, conditioningEmbeddingsFromSpikes,
jointEmbeddingsFromSpikes, conditioningEmbeddingsFromSamples, jointEmbeddingsFromSamples,
numSamplesMultiplier);
}
// Convert the vectors to arrays so that they can be put in the trees
double[][] arrayedTargetEmbeddingsFromSpikes = new double[ConditioningEmbeddingsFromSpikes.size()][k];
double[][] arrayedJointEmbeddingsFromSpikes = new double[ConditioningEmbeddingsFromSpikes.size()][k + l];
for (int i = 0; i < ConditioningEmbeddingsFromSpikes.size(); i++) {
arrayedTargetEmbeddingsFromSpikes[i] = ConditioningEmbeddingsFromSpikes.elementAt(i);
double[][] arrayedTargetEmbeddingsFromSpikes = new double[conditioningEmbeddingsFromSpikes.size()][k];
double[][] arrayedJointEmbeddingsFromSpikes = new double[conditioningEmbeddingsFromSpikes.size()][k + l];
for (int i = 0; i < conditioningEmbeddingsFromSpikes.size(); i++) {
arrayedTargetEmbeddingsFromSpikes[i] = conditioningEmbeddingsFromSpikes.elementAt(i);
arrayedJointEmbeddingsFromSpikes[i] = jointEmbeddingsFromSpikes.elementAt(i);
}
double[][] arrayedTargetEmbeddingsFromSamples = new double[ConditioningEmbeddingsFromSamples.size()][k];
double[][] arrayedJointEmbeddingsFromSamples = new double[ConditioningEmbeddingsFromSamples.size()][k + l];
for (int i = 0; i < ConditioningEmbeddingsFromSamples.size(); i++) {
arrayedTargetEmbeddingsFromSamples[i] = ConditioningEmbeddingsFromSamples.elementAt(i);
double[][] arrayedTargetEmbeddingsFromSamples = new double[conditioningEmbeddingsFromSamples.size()][k];
double[][] arrayedJointEmbeddingsFromSamples = new double[conditioningEmbeddingsFromSamples.size()][k + l];
for (int i = 0; i < conditioningEmbeddingsFromSamples.size(); i++) {
arrayedTargetEmbeddingsFromSamples[i] = conditioningEmbeddingsFromSamples.elementAt(i);
arrayedJointEmbeddingsFromSamples[i] = jointEmbeddingsFromSamples.elementAt(i);
}
@ -372,48 +402,48 @@ public class TransferEntropyCalculatorSpikingIntegration implements TransferEntr
kdTreeConditioningAtSamples.setNormType(normType);
}
protected void makeEmbeddingsAtPoints(double[] pointsAtWhichToMakeEmbeddings, int index_of_first_point_to_use,
protected void makeEmbeddingsAtPoints(double[] pointsAtWhichToMakeEmbeddings, int indexOfFirstPointToUse,
double[] sourceSpikeTimes, double[] destSpikeTimes,
double[][] conditionalSpikeTimes,
Vector<double[]> ConditioningEmbeddings,
Vector<double[]> conditioningEmbeddings,
Vector<double[]> jointEmbeddings) {
Random random = new Random();
int embedding_point_index = index_of_first_point_to_use;
int most_recent_dest_index = k;
int most_recent_source_index = l;
int[] most_recent_conditioning_indices = Arrays.copyOf(condEmbedDims, condEmbedDims.length);
int total_length_of_conditioning_embeddings = 0;
int embeddingPointIndex = indexOfFirstPointToUse;
int mostRecentDestIndex = k;
int mostRecentSourceIndex = l;
int[] mostRecentConditioningIndices = Arrays.copyOf(condEmbedDims, condEmbedDims.length);
int totalLengthOfConditioningEmbeddings = 0;
for (int i = 0; i < condEmbedDims.length; i++) {
total_length_of_conditioning_embeddings += condEmbedDims[i];
totalLengthOfConditioningEmbeddings += condEmbedDims[i];
}
// Loop through the points at which embeddings need to be made
for (; embedding_point_index < pointsAtWhichToMakeEmbeddings.length; embedding_point_index++) {
for (; embeddingPointIndex < pointsAtWhichToMakeEmbeddings.length; embeddingPointIndex++) {
// Advance the tracker of the most recent dest index
while (most_recent_dest_index < (destSpikeTimes.length - 1)) {
if (destSpikeTimes[most_recent_dest_index + 1] < pointsAtWhichToMakeEmbeddings[embedding_point_index]) {
most_recent_dest_index++;
while (mostRecentDestIndex < (destSpikeTimes.length - 1)) {
if (destSpikeTimes[mostRecentDestIndex + 1] < pointsAtWhichToMakeEmbeddings[embeddingPointIndex]) {
mostRecentDestIndex++;
} else {
break;
}
}
// Do the same for the most recent source index
while (most_recent_source_index < (sourceSpikeTimes.length - 1)) {
if (sourceSpikeTimes[most_recent_source_index
+ 1] < pointsAtWhichToMakeEmbeddings[embedding_point_index]) {
most_recent_source_index++;
while (mostRecentSourceIndex < (sourceSpikeTimes.length - 1)) {
if (sourceSpikeTimes[mostRecentSourceIndex
+ 1] < pointsAtWhichToMakeEmbeddings[embeddingPointIndex]) {
mostRecentSourceIndex++;
} else {
break;
}
}
// Now advance the trackers for the most recent conditioning indices
for (int j = 0; j < most_recent_conditioning_indices.length; j++) {
while (most_recent_conditioning_indices[j] < (conditionalSpikeTimes[j].length - 1)) {
if (conditionalSpikeTimes[j][most_recent_conditioning_indices[j] + 1] < pointsAtWhichToMakeEmbeddings[embedding_point_index]) {
most_recent_conditioning_indices[j]++;
for (int j = 0; j < mostRecentConditioningIndices.length; j++) {
while (mostRecentConditioningIndices[j] < (conditionalSpikeTimes[j].length - 1)) {
if (conditionalSpikeTimes[j][mostRecentConditioningIndices[j] + 1] < pointsAtWhichToMakeEmbeddings[embeddingPointIndex]) {
mostRecentConditioningIndices[j]++;
} else {
break;
}
@ -421,45 +451,45 @@ public class TransferEntropyCalculatorSpikingIntegration implements TransferEntr
}
double[] conditioningPast = new double[k + total_length_of_conditioning_embeddings];
double[] jointPast = new double[k + total_length_of_conditioning_embeddings + l];
double[] conditioningPast = new double[k + totalLengthOfConditioningEmbeddings];
double[] jointPast = new double[k + totalLengthOfConditioningEmbeddings + l];
// Add the embedding intervals from the target process
conditioningPast[0] = pointsAtWhichToMakeEmbeddings[embedding_point_index] - destSpikeTimes[most_recent_dest_index];
jointPast[0] = pointsAtWhichToMakeEmbeddings[embedding_point_index]
- destSpikeTimes[most_recent_dest_index];
conditioningPast[0] = pointsAtWhichToMakeEmbeddings[embeddingPointIndex] - destSpikeTimes[mostRecentDestIndex];
jointPast[0] = pointsAtWhichToMakeEmbeddings[embeddingPointIndex]
- destSpikeTimes[mostRecentDestIndex];
for (int i = 1; i < k; i++) {
conditioningPast[i] = destSpikeTimes[most_recent_dest_index - i + 1]
- destSpikeTimes[most_recent_dest_index - i];
jointPast[i] = destSpikeTimes[most_recent_dest_index - i + 1]
- destSpikeTimes[most_recent_dest_index - i];
conditioningPast[i] = destSpikeTimes[mostRecentDestIndex - i + 1]
- destSpikeTimes[mostRecentDestIndex - i];
jointPast[i] = destSpikeTimes[mostRecentDestIndex - i + 1]
- destSpikeTimes[mostRecentDestIndex - i];
}
// Add the embeding intervals from the conditional processes
int index_of_next_embedding_interval = k;
int indexOfNextEmbeddingInterval = k;
for (int i = 0; i < condEmbedDims.length; i++) {
conditioningPast[index_of_next_embedding_interval] =
pointsAtWhichToMakeEmbeddings[embedding_point_index] - conditionalSpikeTimes[i][most_recent_conditioning_indices[i]];
jointPast[index_of_next_embedding_interval] =
pointsAtWhichToMakeEmbeddings[embedding_point_index] - conditionalSpikeTimes[i][most_recent_conditioning_indices[i]];
index_of_next_embedding_interval += 1;
conditioningPast[indexOfNextEmbeddingInterval] =
pointsAtWhichToMakeEmbeddings[embeddingPointIndex] - conditionalSpikeTimes[i][mostRecentConditioningIndices[i]];
jointPast[indexOfNextEmbeddingInterval] =
pointsAtWhichToMakeEmbeddings[embeddingPointIndex] - conditionalSpikeTimes[i][mostRecentConditioningIndices[i]];
indexOfNextEmbeddingInterval += 1;
for (int j = 1; j < condEmbedDims[i]; j++) {
conditioningPast[index_of_next_embedding_interval] =
conditionalSpikeTimes[i][most_recent_conditioning_indices[i] - j + 1] -
conditionalSpikeTimes[i][most_recent_conditioning_indices[i] - j];
jointPast[index_of_next_embedding_interval] =
conditionalSpikeTimes[i][most_recent_conditioning_indices[i] - j + 1] -
conditionalSpikeTimes[i][most_recent_conditioning_indices[i] - j];
index_of_next_embedding_interval += 1;
conditioningPast[indexOfNextEmbeddingInterval] =
conditionalSpikeTimes[i][mostRecentConditioningIndices[i] - j + 1] -
conditionalSpikeTimes[i][mostRecentConditioningIndices[i] - j];
jointPast[indexOfNextEmbeddingInterval] =
conditionalSpikeTimes[i][mostRecentConditioningIndices[i] - j + 1] -
conditionalSpikeTimes[i][mostRecentConditioningIndices[i] - j];
indexOfNextEmbeddingInterval += 1;
}
}
// Add the embedding intervals from the source process (this only gets added to the joint embeddings)
jointPast[k + total_length_of_conditioning_embeddings] = pointsAtWhichToMakeEmbeddings[embedding_point_index]
- sourceSpikeTimes[most_recent_source_index];
jointPast[k + totalLengthOfConditioningEmbeddings] = pointsAtWhichToMakeEmbeddings[embeddingPointIndex]
- sourceSpikeTimes[mostRecentSourceIndex];
for (int i = 1; i < l; i++) {
jointPast[k + total_length_of_conditioning_embeddings + i] = sourceSpikeTimes[most_recent_source_index - i + 1]
- sourceSpikeTimes[most_recent_source_index - i];
jointPast[k + totalLengthOfConditioningEmbeddings + i] = sourceSpikeTimes[mostRecentSourceIndex - i + 1]
- sourceSpikeTimes[mostRecentSourceIndex - i];
}
// Add Gaussian noise, if necessary
@ -472,51 +502,85 @@ public class TransferEntropyCalculatorSpikingIntegration implements TransferEntr
}
}
ConditioningEmbeddings.add(conditioningPast);
conditioningEmbeddings.add(conditioningPast);
jointEmbeddings.add(jointPast);
}
}
protected void processEventsFromSpikingTimeSeries(double[] sourceSpikeTimes, double[] destSpikeTimes, double[][] conditionalSpikeTimes,
Vector<double[]> ConditioningEmbeddingsFromSpikes, Vector<double[]> jointEmbeddingsFromSpikes,
Vector<double[]> ConditioningEmbeddingsFromSamples, Vector<double[]> jointEmbeddingsFromSamples,
Vector<Double> processTimeLengths)
throws Exception {
protected int getFirstDestIndex(double[] sourceSpikeTimes, double[] destSpikeTimes, double[][] conditionalSpikeTimes, Boolean setProcessTimeLengths)
throws Exception{
// First sort the spike times in case they were not properly in ascending order:
Arrays.sort(sourceSpikeTimes);
Arrays.sort(destSpikeTimes);
int first_target_index_of_embedding = k;
while (destSpikeTimes[first_target_index_of_embedding] <= sourceSpikeTimes[l - 1]) {
first_target_index_of_embedding++;
int firstTargetIndexOfEMbedding = k;
while (destSpikeTimes[firstTargetIndexOfEMbedding] <= sourceSpikeTimes[l - 1]) {
firstTargetIndexOfEMbedding++;
}
if (conditionalSpikeTimes.length != condEmbedDims.length) {
throw new Exception("Number of conditional embedding lengths does not match the number of conditional processes");
}
for (int i = 0; i < conditionalSpikeTimes.length; i++) {
while (destSpikeTimes[first_target_index_of_embedding] <= conditionalSpikeTimes[i][condEmbedDims[i]]) {
first_target_index_of_embedding++;
while (destSpikeTimes[firstTargetIndexOfEMbedding] <= conditionalSpikeTimes[i][condEmbedDims[i]]) {
firstTargetIndexOfEMbedding++;
}
}
//processTimeLengths.add(destSpikeTimes[sourceSpikeTimes.length - 1] - destSpikeTimes[first_target_index_of_embedding]);
processTimeLengths.add(destSpikeTimes[destSpikeTimes.length - 1] - destSpikeTimes[first_target_index_of_embedding]);
// We don't want to reset these lengths when resampling for surrogates
if (setProcessTimeLengths) {
processTimeLengths.add(destSpikeTimes[destSpikeTimes.length - 1] - destSpikeTimes[firstTargetIndexOfEMbedding]);
}
double sample_lower_bound = destSpikeTimes[first_target_index_of_embedding];
double sample_upper_bound = destSpikeTimes[destSpikeTimes.length - 1];
int num_samples = (int) Math.round(num_samples_multiplier * (destSpikeTimes.length - first_target_index_of_embedding + 1));
return firstTargetIndexOfEMbedding;
}
protected double[] generateRandomSampleTimes(double[] sourceSpikeTimes, double[] destSpikeTimes, double[][] conditionalSpikeTimes,
double actualNumSamplesMultiplier, int firstTargetIndexOfEMbedding) {
double sampleLowerBound = destSpikeTimes[firstTargetIndexOfEMbedding];
double sampleUpperBound = destSpikeTimes[destSpikeTimes.length - 1];
int num_samples = (int) Math.round(actualNumSamplesMultiplier * (destSpikeTimes.length - firstTargetIndexOfEMbedding + 1));
double[] randomSampleTimes = new double[num_samples];
Random rand = new Random();
for (int i = 0; i < randomSampleTimes.length; i++) {
randomSampleTimes[i] = sample_lower_bound + rand.nextDouble() * (sample_upper_bound - sample_lower_bound);
randomSampleTimes[i] = sampleLowerBound + rand.nextDouble() * (sampleUpperBound - sampleLowerBound);
}
Arrays.sort(randomSampleTimes);
makeEmbeddingsAtPoints(destSpikeTimes, first_target_index_of_embedding, sourceSpikeTimes, destSpikeTimes, conditionalSpikeTimes,
ConditioningEmbeddingsFromSpikes, jointEmbeddingsFromSpikes);
return randomSampleTimes;
}
protected void processEventsFromSpikingTimeSeries(double[] sourceSpikeTimes, double[] destSpikeTimes, double[][] conditionalSpikeTimes,
Vector<double[]> conditioningEmbeddingsFromSpikes, Vector<double[]> jointEmbeddingsFromSpikes,
Vector<double[]> conditioningEmbeddingsFromSamples, Vector<double[]> jointEmbeddingsFromSamples,
double actualNumSamplesMultiplier)
throws Exception {
int firstTargetIndexOfEMbedding = getFirstDestIndex(sourceSpikeTimes, destSpikeTimes, conditionalSpikeTimes, true);
double[] randomSampleTimes = generateRandomSampleTimes(sourceSpikeTimes, destSpikeTimes, conditionalSpikeTimes, actualNumSamplesMultiplier, firstTargetIndexOfEMbedding);
makeEmbeddingsAtPoints(destSpikeTimes, firstTargetIndexOfEMbedding, sourceSpikeTimes, destSpikeTimes, conditionalSpikeTimes,
conditioningEmbeddingsFromSpikes, jointEmbeddingsFromSpikes);
makeEmbeddingsAtPoints(randomSampleTimes, 0, sourceSpikeTimes, destSpikeTimes, conditionalSpikeTimes,
ConditioningEmbeddingsFromSamples, jointEmbeddingsFromSamples);
conditioningEmbeddingsFromSamples, jointEmbeddingsFromSamples);
}
/*
* Method to do the
*/
protected void processEventsFromSpikingTimeSeries(double[] sourceSpikeTimes, double[] destSpikeTimes, double[][] conditionalSpikeTimes,
Vector<double[]> conditioningEmbeddingsFromSamples, Vector<double[]> jointEmbeddingsFromSamples,
double actualNumSamplesMultiplier)
throws Exception {
int firstTargetIndexOfEMbedding = getFirstDestIndex(sourceSpikeTimes, destSpikeTimes, conditionalSpikeTimes, false);
double[] randomSampleTimes = generateRandomSampleTimes(sourceSpikeTimes, destSpikeTimes, conditionalSpikeTimes, actualNumSamplesMultiplier, firstTargetIndexOfEMbedding);
makeEmbeddingsAtPoints(randomSampleTimes, 0, sourceSpikeTimes, destSpikeTimes, conditionalSpikeTimes,
conditioningEmbeddingsFromSamples, jointEmbeddingsFromSamples);
}
/*
@ -553,28 +617,31 @@ public class TransferEntropyCalculatorSpikingIntegration implements TransferEntr
return new distanceAndNumPoints(maxDistance, i);
}
/*
* (non-Javadoc)
*
* @see infodynamics.measures.spiking.TransferEntropyCalculatorSpiking#
* computeAverageLocalOfObservations()
*/
@Override
public double computeAverageLocalOfObservations() throws Exception {
return computeAverageLocalOfObservations(kdTreeJointAtSpikes, jointEmbeddingsFromSpikes);
}
/*
* We take the actual joint tree at spikes (along with the associated embeddings) as an argument, as we will need to swap these out when
* computing surrogates.
*/
public double computeAverageLocalOfObservations(KdTree actualKdTreeJointAtSpikes, Vector<double[]> actualJointEmbeddingsFromSpikes) throws Exception {
double currentSum = 0;
for (int i = 0; i < ConditioningEmbeddingsFromSpikes.size(); i++) {
for (int i = 0; i < conditioningEmbeddingsFromSpikes.size(); i++) {
double radiusJointSpikes = kdTreeJointAtSpikes.findKNearestNeighbours(Knns, i).poll().norms[0];
double radiusJointSpikes = actualKdTreeJointAtSpikes.findKNearestNeighbours(Knns, i).poll().norms[0];
double radiusJointSamples = kdTreeJointAtSamples.findKNearestNeighbours(Knns,
new double[][] { jointEmbeddingsFromSpikes.elementAt(i) }).poll().norms[0];
new double[][] { actualJointEmbeddingsFromSpikes.elementAt(i) }).poll().norms[0];
/*
The algorithm specified in box 1 of doi.org/10.1371/journal.pcbi.1008054 specifies finding the maximum of the two radii
just calculated and then redoing the searches in both sets at this radius. In this implementation, however, we make use
of the fact that one radius is equal to the maximum, and so only one search needs to be redone.
*/
double eps = 0.01;
double eps = 0.0;
// Need variables for the number of neighbours as this is now variable within the maximum radius
int kJointSpikes = 0;
int kJointSamples = 0;
@ -587,11 +654,11 @@ public class TransferEntropyCalculatorSpikingIntegration implements TransferEntr
int[] indicesWithinR = new int[jointEmbeddingsFromSamples.size()];
boolean[] isWithinR = new boolean[jointEmbeddingsFromSamples.size()];
kdTreeJointAtSamples.findPointsWithinR(radiusJointSpikes + eps,
new double[][] { jointEmbeddingsFromSpikes.elementAt(i) },
new double[][] { actualJointEmbeddingsFromSpikes.elementAt(i) },
true,
isWithinR,
indicesWithinR);
distanceAndNumPoints temp = findMaxDistanceAndNumPointsFromIndices(jointEmbeddingsFromSpikes.elementAt(i), indicesWithinR,
distanceAndNumPoints temp = findMaxDistanceAndNumPointsFromIndices(actualJointEmbeddingsFromSpikes.elementAt(i), indicesWithinR,
jointEmbeddingsFromSamples);
kJointSamples = temp.numPoints;
radiusJointSamples = temp.distance;
@ -603,13 +670,13 @@ public class TransferEntropyCalculatorSpikingIntegration implements TransferEntr
kJointSamples = Knns;
int[] indicesWithinR = new int[jointEmbeddingsFromSamples.size()];
boolean[] isWithinR = new boolean[jointEmbeddingsFromSamples.size()];
kdTreeJointAtSpikes.findPointsWithinR(radiusJointSamples + eps,
new double[][] { jointEmbeddingsFromSpikes.elementAt(i) },
actualKdTreeJointAtSpikes.findPointsWithinR(radiusJointSamples + eps,
new double[][] { actualJointEmbeddingsFromSpikes.elementAt(i) },
true,
isWithinR,
indicesWithinR);
distanceAndNumPoints temp = findMaxDistanceAndNumPointsFromIndices(jointEmbeddingsFromSpikes.elementAt(i), indicesWithinR,
jointEmbeddingsFromSpikes);
distanceAndNumPoints temp = findMaxDistanceAndNumPointsFromIndices(actualJointEmbeddingsFromSpikes.elementAt(i), indicesWithinR,
actualJointEmbeddingsFromSpikes);
// -1 due to the point itself being in the set
kJointSpikes = temp.numPoints - 1;
radiusJointSpikes = temp.distance;
@ -618,58 +685,160 @@ public class TransferEntropyCalculatorSpikingIntegration implements TransferEntr
// Repeat the above steps, but in the conditioning (rather than joint) space.
double radiusConditioningSpikes = kdTreeConditioningAtSpikes.findKNearestNeighbours(Knns, i).poll().norms[0];
double radiusConditioningSamples = kdTreeConditioningAtSamples.findKNearestNeighbours(Knns,
new double[][] { ConditioningEmbeddingsFromSpikes.elementAt(i) }).poll().norms[0];
new double[][] { conditioningEmbeddingsFromSpikes.elementAt(i) }).poll().norms[0];
int kConditioningSpikes = 0;
int kConditioningSamples = 0;
if (radiusConditioningSpikes >= radiusConditioningSamples) {
kConditioningSpikes = Knns;
int[] indicesWithinR = new int[ConditioningEmbeddingsFromSamples.size()];
boolean[] isWithinR = new boolean[ConditioningEmbeddingsFromSamples.size()];
int[] indicesWithinR = new int[conditioningEmbeddingsFromSamples.size()];
boolean[] isWithinR = new boolean[conditioningEmbeddingsFromSamples.size()];
kdTreeConditioningAtSamples.findPointsWithinR(radiusConditioningSpikes + eps,
new double[][] { ConditioningEmbeddingsFromSpikes.elementAt(i) },
new double[][] { conditioningEmbeddingsFromSpikes.elementAt(i) },
true,
isWithinR,
indicesWithinR);
distanceAndNumPoints temp = findMaxDistanceAndNumPointsFromIndices(ConditioningEmbeddingsFromSpikes.elementAt(i), indicesWithinR,
ConditioningEmbeddingsFromSamples);
distanceAndNumPoints temp = findMaxDistanceAndNumPointsFromIndices(conditioningEmbeddingsFromSpikes.elementAt(i), indicesWithinR,
conditioningEmbeddingsFromSamples);
kConditioningSamples = temp.numPoints;
radiusConditioningSamples = temp.distance;
} else {
kConditioningSamples = Knns;
int[] indicesWithinR = new int[ConditioningEmbeddingsFromSamples.size()];
boolean[] isWithinR = new boolean[ConditioningEmbeddingsFromSamples.size()];
int[] indicesWithinR = new int[conditioningEmbeddingsFromSamples.size()];
boolean[] isWithinR = new boolean[conditioningEmbeddingsFromSamples.size()];
kdTreeConditioningAtSpikes.findPointsWithinR(radiusConditioningSamples + eps,
new double[][] { ConditioningEmbeddingsFromSpikes.elementAt(i) },
new double[][] { conditioningEmbeddingsFromSpikes.elementAt(i) },
true,
isWithinR,
indicesWithinR);
distanceAndNumPoints temp = findMaxDistanceAndNumPointsFromIndices(ConditioningEmbeddingsFromSpikes.elementAt(i), indicesWithinR,
ConditioningEmbeddingsFromSpikes);
distanceAndNumPoints temp = findMaxDistanceAndNumPointsFromIndices(conditioningEmbeddingsFromSpikes.elementAt(i), indicesWithinR,
conditioningEmbeddingsFromSpikes);
// -1 due to the point itself being in the set
kConditioningSpikes = temp.numPoints - 1;
radiusConditioningSpikes = temp.distance;
}
/*
* TODO
* The KdTree class defaults to the squared euclidean distance when the euclidean norm is specified. This is fine for Kraskov estimators
* (as the radii are never used, just the numbers of points within radii). It causes problems here though, as we do use the radii and the
* squared euclidean distance is not a distance metric. We get around this by just taking the square root here, but it might be better to
* fix this in the KdTree class.
*/
if (normType == EuclideanUtils.NORM_EUCLIDEAN) {
radiusJointSpikes = Math.sqrt(radiusJointSpikes);
radiusJointSamples = Math.sqrt(radiusJointSamples);
radiusConditioningSpikes = Math.sqrt(radiusConditioningSpikes);
radiusConditioningSamples = Math.sqrt(radiusConditioningSamples);
}
currentSum += (MathsUtils.digamma(kJointSpikes) - MathsUtils.digamma(kJointSamples) +
((k + l) * (-Math.log(radiusJointSpikes) + Math.log(radiusJointSamples))) -
MathsUtils.digamma(kConditioningSpikes) + MathsUtils.digamma(kConditioningSamples) +
+ (k * (Math.log(radiusConditioningSpikes) - Math.log(radiusConditioningSamples))));
if (Double.isNaN(currentSum)) {
throw new Exception(kJointSpikes + " " + kJointSamples + " " + kConditioningSpikes + " " + kConditioningSamples + "\n" +
radiusJointSpikes + " " + radiusJointSamples + " " + radiusConditioningSpikes + " " + radiusConditioningSamples);
throw new Exception("NaNs in TE clac");
}
}
// Normalise by time
double time_sum = 0;
double timeSum = 0;
for (Double time : processTimeLengths) {
time_sum += time;
timeSum += time;
}
currentSum /= time_sum;
currentSum /= timeSum;
return currentSum;
}
@Override
public EmpiricalMeasurementDistribution computeSignificance(int numPermutationsToCheck, double estimatedValue) throws Exception {
return computeSignificance(numPermutationsToCheck,
estimatedValue,
System.currentTimeMillis());
}
@Override
public EmpiricalMeasurementDistribution computeSignificance(int numPermutationsToCheck,
double estimatedValue, long randomSeed) throws Exception{
Random random = new Random(randomSeed);
double[] surrogateTEValues = new double[numPermutationsToCheck];
for (int permutationNumber = 0; permutationNumber < numPermutationsToCheck; permutationNumber++) {
Vector<double[]> resampledConditioningEmbeddingsFromSamples = new Vector<double[]>();
Vector<double[]> resampledJointEmbeddingsFromSamples = new Vector<double[]>();
// Send all of the observations through:
Iterator<double[]> sourceIterator = vectorOfSourceSpikeTimes.iterator();
Iterator<double[][]> conditionalIterator = null;
if (vectorOfConditionalSpikeTimes.size() > 0) {
conditionalIterator = vectorOfConditionalSpikeTimes.iterator();
}
int timeSeriesIndex = 0;
for (double[] destSpikeTimes : vectorOfDestinationSpikeTimes) {
double[] sourceSpikeTimes = sourceIterator.next();
double[][] conditionalSpikeTimes = null;
if (vectorOfConditionalSpikeTimes.size() > 0) {
conditionalSpikeTimes = conditionalIterator.next();
} else {
conditionalSpikeTimes = new double[][] {};
}
processEventsFromSpikingTimeSeries(sourceSpikeTimes, destSpikeTimes, conditionalSpikeTimes,
resampledConditioningEmbeddingsFromSamples, resampledJointEmbeddingsFromSamples,
surrogateNumSamplesMultiplier);
}
// Convert the vectors to arrays so that they can be put in the trees
double[][] arrayedResampledConditioningEmbeddingsFromSamples = new double[resampledConditioningEmbeddingsFromSamples.size()][k];
for (int i = 0; i < resampledConditioningEmbeddingsFromSamples.size(); i++) {
arrayedResampledConditioningEmbeddingsFromSamples[i] = resampledConditioningEmbeddingsFromSamples.elementAt(i);
}
KdTree resampledKdTreeConditioningAtSamples = new KdTree(arrayedResampledConditioningEmbeddingsFromSamples);
resampledKdTreeConditioningAtSamples.setNormType(normType);
Vector<double[]> conditionallyPermutedJointEmbeddingsFromSpikes = new Vector(jointEmbeddingsFromSpikes);
Vector<Integer> usedIndices = new Vector<Integer>();
for (int i = 0; i < conditionallyPermutedJointEmbeddingsFromSpikes.size(); i++) {
PriorityQueue<NeighbourNodeData> neighbours =
resampledKdTreeConditioningAtSamples.findKNearestNeighbours(kPerm,
new double[][] {conditioningEmbeddingsFromSpikes.elementAt(i)});
ArrayList<Integer> foundIndices = new ArrayList<Integer>();
for (int j = 0; j < kPerm; j++) {
foundIndices.add(neighbours.poll().sampleIndex);
}
ArrayList<Integer> prunedIndices = new ArrayList<Integer>(foundIndices);
prunedIndices.removeAll(usedIndices);
int chosenIndex = 0;
if (prunedIndices.size() > 0) {
chosenIndex = prunedIndices.get(random.nextInt(prunedIndices.size()));
} else {
chosenIndex = foundIndices.get(random.nextInt(foundIndices.size()));
}
usedIndices.add(chosenIndex);
int embeddingLength = conditionallyPermutedJointEmbeddingsFromSpikes.elementAt(i).length;
for(int j = 0; j < l; j++) {
conditionallyPermutedJointEmbeddingsFromSpikes.elementAt(i)[embeddingLength - l + j] =
resampledJointEmbeddingsFromSamples.elementAt(chosenIndex)[embeddingLength - l + j];
}
}
double[][] arrayedConditionallyPermutedJointEmbeddingsFromSpikes = new double[conditionallyPermutedJointEmbeddingsFromSpikes.size()][];
for (int i = 0; i < conditionallyPermutedJointEmbeddingsFromSpikes.size(); i++) {
arrayedConditionallyPermutedJointEmbeddingsFromSpikes[i] = conditionallyPermutedJointEmbeddingsFromSpikes.elementAt(i);
}
KdTree conditionallyPermutedKdTreeJointFromSpikes = new KdTree(arrayedConditionallyPermutedJointEmbeddingsFromSpikes);
conditionallyPermutedKdTreeJointFromSpikes.setNormType(normType);
surrogateTEValues[permutationNumber] = computeAverageLocalOfObservations(conditionallyPermutedKdTreeJointFromSpikes,
conditionallyPermutedJointEmbeddingsFromSpikes);
}
return new EmpiricalMeasurementDistribution(surrogateTEValues, estimatedValue);
}
/*
* (non-Javadoc)
*
@ -682,29 +851,6 @@ public class TransferEntropyCalculatorSpikingIntegration implements TransferEntr
return null;
}
/*
* (non-Javadoc)
*
* @see infodynamics.measures.spiking.TransferEntropyCalculatorSpiking#
* computeSignificance(int)
*/
@Override
public EmpiricalMeasurementDistribution computeSignificance(int numPermutationsToCheck) throws Exception {
// TODO Auto-generated method stub
return null;
}
/*
* (non-Javadoc)
*
* @see infodynamics.measures.spiking.TransferEntropyCalculatorSpiking#
* computeSignificance(int[][])
*/
@Override
public EmpiricalMeasurementDistribution computeSignificance(int[][] newOrderings) throws Exception {
// TODO Auto-generated method stub
return null;
}
/*
* (non-Javadoc)

View File

@ -275,10 +275,10 @@ public class KdTree extends NearestNeighbourSearcher {
// Could remove these since the code is now functional,
// but may be better to leave them in just in case the code breaks:
if (leftIndex > leftStart + leftNumPoints) {
throw new RuntimeException("Exceeded expected number of points on left");
throw new RuntimeException("Exceeded expected number of points on left - likely had an NaN in the data");
}
if (rightIndex > rightStart + rightNumPoints) {
throw new RuntimeException("Exceeded expected number of points on right");
throw new RuntimeException("Exceeded expected number of points on right - likely had an NaN in the data");
}
// Update the pointer for the sorted indices for this dimension,
// and keep the new temporary array
@ -337,7 +337,7 @@ public class KdTree extends NearestNeighbourSearcher {
double difference = x1[d] - x2[d];
distance += difference * difference;
}
return Math.sqrt(distance);
return distance;
}
}
@ -359,7 +359,6 @@ public class KdTree extends NearestNeighbourSearcher {
*/
public final static double normWithAbort(double[] x1, double[] x2,
double limit, int normToUse) {
double distance = 0.0;
switch (normToUse) {
case EuclideanUtils.NORM_MAX_NORM:
@ -380,17 +379,15 @@ public class KdTree extends NearestNeighbourSearcher {
return distance;
// case EuclideanUtils.NORM_EUCLIDEAN_SQUARED:
default:
// Limit is often r, so must square
limit = limit * limit;
// Inlined from {@link EuclideanUtils}:
for (int d = 0; d < x1.length; d++) {
double difference = x1[d] - x2[d];
distance += difference * difference;
if (distance > limit) {
return Double.POSITIVE_INFINITY;
return Double.POSITIVE_INFINITY;
}
}
return Math.sqrt(distance);
return distance;
}
}
@ -1308,10 +1305,9 @@ public class KdTree extends NearestNeighbourSearcher {
if (normTypeToUse == EuclideanUtils.NORM_MAX_NORM) {
absDistOnThisDim = (distOnThisDim > 0) ? distOnThisDim : - distOnThisDim;
} else {
absDistOnThisDim = (distOnThisDim > 0) ? distOnThisDim : - distOnThisDim;
// norm type is EuclideanUtils#NORM_EUCLIDEAN_SQUARED
// Track the square distance
//absDistOnThisDim = distOnThisDim;
absDistOnThisDim = distOnThisDim * distOnThisDim;
}
if ((node.indexOfThisPoint != sampleIndex) &&
@ -2057,7 +2053,7 @@ public class KdTree extends NearestNeighbourSearcher {
KdTreeNode node, int level, double r, boolean allowEqualToR,
boolean[] isWithinR, int[] indicesWithinR,
int nextIndexInIndicesWithinR) {
//System.out.println("foo");
// Point to the correct array for the data at this level
int currentDim = level % totalDimensions;
double[][] data = dimensionToArray[currentDim];
@ -2072,9 +2068,9 @@ public class KdTree extends NearestNeighbourSearcher {
if (normTypeToUse == EuclideanUtils.NORM_MAX_NORM) {
absDistOnThisDim = (distOnThisDim > 0) ? distOnThisDim : - distOnThisDim;
} else {
absDistOnThisDim = (distOnThisDim > 0) ? distOnThisDim : - distOnThisDim;
// norm type is EuclideanUtils#NORM_EUCLIDEAN_SQUARED
// Track the square distance
absDistOnThisDim = distOnThisDim * distOnThisDim;
}
if ((absDistOnThisDim < r) ||

View File

@ -27,8 +27,8 @@ import os
import numpy as np
NUM_REPS = 5
NUM_SPIKES = int(5e3)
NUM_REPS = 2
NUM_SPIKES = int(2e3)
NUM_OBSERVATIONS = 2
# Params for canonical example generation
@ -99,6 +99,8 @@ for i in range(NUM_REPS):
teCalc.finaliseAddObservations();
result = teCalc.computeAverageLocalOfObservations()
print("TE result %.4f nats" % (result,))
sig = teCalc.computeSignificance(10, result)
print(sig.pValue)
results_poisson[i] = result
print("Summary: mean ", np.mean(results_poisson), " std dev ", np.std(results_poisson))
@ -111,59 +113,67 @@ print("Summary: mean ", np.mean(results_poisson), " std dev ", np.std(results_po
teCalc = teCalcClass()
teCalc.setProperty("knns", "4")
print("Noisy copy zero TE")
teCalc.setProperty("COND_EMBED_LENGTHS", "2")
teCalc.setProperty("COND_EMBED_LENGTHS", "1")
teCalc.setProperty("k_HISTORY", "2")
teCalc.setProperty("l_HISTORY", "2")
teCalc.setProperty("l_HISTORY", "1")
#teCalc.setProperty("NORM_TYPE", "MAX_NORM")
results_noisy_zero = np.zeros(NUM_REPS)
for i in range(NUM_REPS):
teCalc.startAddObservations()
for j in range(NUM_OBSERVATIONS):
condArray = NUM_SPIKES*np.random.random((1, NUM_SPIKES))
condArray = np.ones((1, NUM_SPIKES)) + 0.05 * np.random.random((1, NUM_SPIKES))
condArray = np.cumsum(condArray, axis = 1)
condArray.sort(axis = 1)
sourceArray = condArray[0, :] + 0.25 + 0.1 * np.random.normal(size = condArray.shape[1])
sourceArray = condArray[0, :] + 0.25 + 0.05 * np.random.normal(size = condArray.shape[1])
sourceArray.sort()
destArray = condArray[0, :] + 0.5 + 0.1 * np.random.normal(size = condArray.shape[1])
destArray = condArray[0, :] + 0.5 + 0.05 * np.random.normal(size = condArray.shape[1])
destArray.sort()
teCalc.addObservations(JArray(JDouble, 1)(sourceArray), JArray(JDouble, 1)(destArray), JArray(JDouble, 2)(condArray))
teCalc.finaliseAddObservations();
result = teCalc.computeAverageLocalOfObservations()
print("TE result %.4f nats" % (result,))
results_poisson[i] = result
print("Summary: mean ", np.mean(results_poisson), " std dev ", np.std(results_poisson))
sig = teCalc.computeSignificance(10, result)
print(sig.pValue)
results_noisy_zero[i] = result
print("Summary: mean ", np.mean(results_noisy_zero), " std dev ", np.std(results_noisy_zero))
teCalc = teCalcClass()
teCalc.setProperty("knns", "4")
print("Noisy copy non-zero TE")
teCalc.setProperty("COND_EMBED_LENGTHS", "2")
teCalc.setProperty("COND_EMBED_LENGTHS", "1")
teCalc.setProperty("k_HISTORY", "2")
teCalc.setProperty("l_HISTORY", "2")
teCalc.setProperty("l_HISTORY", "1")
#teCalc.setProperty("NORM_TYPE", "MAX_NORM")
results_noisy_zero = np.zeros(NUM_REPS)
results_noisy_non_zero = np.zeros(NUM_REPS)
for i in range(NUM_REPS):
teCalc.startAddObservations()
for j in range(NUM_OBSERVATIONS):
sourceArray = NUM_SPIKES*np.random.random(NUM_SPIKES)
sourceArray = np.ones((1, NUM_SPIKES)) + 0.05 * np.random.random((1, NUM_SPIKES))
sourceArray = np.cumsum(sourceArray)
sourceArray.sort()
condArray = sourceArray + 0.25 + 0.1 * np.random.normal(size = sourceArray.shape)
condArray = sourceArray + 0.25 + 0.05 * np.random.normal(size = sourceArray.shape)
condArray.sort()
condArray = np.expand_dims(condArray, 0)
destArray = sourceArray + 0.5 + 0.1 * np.random.normal(size = sourceArray.shape)
destArray = sourceArray + 0.5 + 0.05 * np.random.normal(size = sourceArray.shape)
destArray.sort()
teCalc.addObservations(JArray(JDouble, 1)(sourceArray), JArray(JDouble, 1)(destArray), JArray(JDouble, 2)(condArray))
teCalc.finaliseAddObservations();
result = teCalc.computeAverageLocalOfObservations()
print("TE result %.4f nats" % (result,))
results_poisson[i] = result
print("Summary: mean ", np.mean(results_poisson), " std dev ", np.std(results_poisson))
sig = teCalc.computeSignificance(10, result)
print(sig.pValue)
results_noisy_non_zero[i] = result
print("Summary: mean ", np.mean(results_noisy_non_zero), " std dev ", np.std(results_noisy_zero))
print("Canonical example")
teCalc = teCalcClass()
teCalc.setProperty("knns", "4")
teCalc.setProperty("k_HISTORY", "2")
teCalc.setProperty("l_HISTORY", "1")
teCalc.setProperty("k_HISTORY", "1")
teCalc.setProperty("l_HISTORY", "2")
#teCalc.setProperty("NUM_SAMPLES_MULTIPLIER", "1")
#teCalc.setProperty("NORM_TYPE", "MAX_NORM")
@ -174,4 +184,6 @@ for i in range(NUM_REPS):
result = teCalc.computeAverageLocalOfObservations()
results_canonical[i] = result
print("TE result %.4f nats" % (result,))
sig = teCalc.computeSignificance(10, result)
print(sig.pValue)
print("Summary: mean ", np.mean(results_canonical), " std dev ", np.std(results_canonical))