Line | Hits | Source |
---|---|---|
1 | /* | |
2 | * Copyright (c) 2003, the JUNG Project and the Regents of the University | |
3 | * of California | |
4 | * All rights reserved. | |
5 | * | |
6 | * This software is open-source under the BSD license; see either | |
7 | * "license.txt" or | |
8 | * http://jung.sourceforge.net/license.txt for a description. | |
9 | * | |
10 | * Created on Feb 18, 2004 | |
11 | */ | |
12 | package edu.uci.ics.jung.statistics; | |
13 | ||
14 | import java.util.Collection; | |
15 | import java.util.Iterator; | |
16 | ||
17 | /** | |
18 | * A utility class for calculating properties of discrete distributions. | |
19 | * Generally, these distributions are represented as arrays of | |
20 | * <code>double</code> values, which are assumed to be normalized | |
21 | * such that the entries in a single array sum to 1. | |
22 | * | |
23 | * @author Joshua O'Madadhain | |
24 | */ | |
25 | 0 | public class DiscreteDistribution |
26 | { | |
27 | ||
28 | /** | |
29 | * Returns the Kullback-Leibler divergence between the | |
30 | * two specified distributions, which must have the same | |
31 | * number of elements. This is defined as | |
32 | * the sum over all <code>i</code> of | |
33 | * <code>dist[i] * Math.log(dist[i] / reference[i])</code>. | |
34 | * Note that this value is not symmetric; see | |
35 | * <code>symmetricKL</code> for a symmetric variant. | |
36 | * @see #symmetricKL(double[], double[]) | |
37 | */ | |
38 | public static double KullbackLeibler(double[] dist, double[] reference) | |
39 | { | |
40 | 0 | double distance = 0; |
41 | ||
42 | 0 | checkLengths(dist, reference); |
43 | ||
44 | 0 | for (int i = 0; i < dist.length; i++) |
45 | { | |
46 | 0 | if (dist[i] > 0 && reference[i] > 0) |
47 | 0 | distance += dist[i] * Math.log(dist[i] / reference[i]); |
48 | } | |
49 | 0 | return distance; |
50 | } | |
51 | ||
52 | /** | |
53 | * Returns <code>KullbackLeibler(dist, reference) + KullbackLeibler(reference, dist)</code>. | |
54 | * @see #KullbackLeibler(double[], double[]) | |
55 | */ | |
56 | public static double symmetricKL(double[] dist, double[] reference) | |
57 | { | |
58 | 0 | return KullbackLeibler(dist, reference) |
59 | + KullbackLeibler(reference, dist); | |
60 | } | |
61 | ||
62 | /** | |
63 | * Returns the squared difference between the | |
64 | * two specified distributions, which must have the same | |
65 | * number of elements. This is defined as | |
66 | * the sum over all <code>i</code> of the square of | |
67 | * <code>(dist[i] - reference[i])</code>. | |
68 | */ | |
69 | public static double squaredError(double[] dist, double[] reference) | |
70 | { | |
71 | 34 | double error = 0; |
72 | ||
73 | 34 | checkLengths(dist, reference); |
74 | ||
75 | 68 | for (int i = 0; i < dist.length; i++) |
76 | { | |
77 | 34 | double difference = dist[i] - reference[i]; |
78 | 34 | error += difference * difference; |
79 | } | |
80 | 34 | return error; |
81 | } | |
82 | ||
83 | /** | |
84 | * Returns the cosine distance between the two | |
85 | * specified distributions, which must have the same number | |
86 | * of elements. The distributions are treated as vectors | |
87 | * in <code>dist.length</code>-dimensional space. | |
88 | * Given the following definitions | |
89 | * <ul> | |
90 | * <li/><code>v</code> = the sum over all <code>i</code> of <code>dist[i] * dist[i]</code> | |
91 | * <li/><code>w</code> = the sum over all <code>i</code> of <code>reference[i] * reference[i]</code> | |
92 | * <li/><code>vw</code> = the sum over all <code>i</code> of <code>dist[i] * reference[i]</code> | |
93 | * </ul> | |
94 | * the value returned is defined as <code>vw / (Math.sqrt(v) * Math.sqrt(w))</code>. | |
95 | */ | |
96 | public static double cosine(double[] dist, double[] reference) | |
97 | { | |
98 | 0 | double v_prod = 0; // dot product x*x |
99 | 0 | double w_prod = 0; // dot product y*y |
100 | 0 | double vw_prod = 0; // dot product x*y |
101 | ||
102 | 0 | checkLengths(dist, reference); |
103 | ||
104 | 0 | for (int i = 0; i < dist.length; i++) |
105 | { | |
106 | 0 | vw_prod += dist[i] * reference[i]; |
107 | 0 | v_prod += dist[i] * dist[i]; |
108 | 0 | w_prod += reference[i] * reference[i]; |
109 | } | |
110 | // cosine distance between v and w | |
111 | 0 | return vw_prod / (Math.sqrt(v_prod) * Math.sqrt(w_prod)); |
112 | } | |
113 | ||
114 | /** | |
115 | * Returns the entropy of this distribution. | |
116 | * High entropy indicates that the distribution is | |
117 | * close to uniform; low entropy indicates that the | |
118 | * distribution is close to a Dirac delta (i.e., if | |
119 | * the probability mass is concentrated at a single | |
120 | * point, this method returns 0). Entropy is defined as | |
121 | * the sum over all <code>i</code> of | |
122 | * <code>-(dist[i] * Math.log(dist[i]))</code> | |
123 | */ | |
124 | public static double entropy(double[] dist) | |
125 | { | |
126 | 0 | double total = 0; |
127 | ||
128 | 0 | for (int i = 0; i < dist.length; i++) |
129 | { | |
130 | 0 | if (dist[i] > 0) |
131 | 0 | total += dist[i] * Math.log(dist[i]); |
132 | } | |
133 | 0 | return -total; |
134 | } | |
135 | ||
136 | /** | |
137 | * Throws an <code>IllegalArgumentException</code> if the two arrays are not of the same length. | |
138 | */ | |
139 | protected static void checkLengths(double[] dist, double[] reference) | |
140 | { | |
141 | 34 | if (dist.length != reference.length) |
142 | 0 | throw new IllegalArgumentException("Arrays must be of the same length"); |
143 | 34 | } |
144 | ||
145 | /** | |
146 | * Normalizes, with Lagrangian smoothing, the specified <code>double</code> | |
147 | * array, so that the values sum to 1 (i.e., can be treated as probabilities). | |
148 | * The effect of the Lagrangian smoothing is to ensure that all entries | |
149 | * are nonzero; effectively, a value of <code>alpha</code> is added to each | |
150 | * entry in the original array prior to normalization. | |
151 | * @param counts | |
152 | * @param alpha | |
153 | */ | |
154 | public static void normalize(double[] counts, double alpha) | |
155 | { | |
156 | 0 | double total_count = 0; |
157 | ||
158 | 0 | for (int i = 0; i < counts.length; i++) |
159 | 0 | total_count += counts[i]; |
160 | ||
161 | 0 | for (int i = 0; i < counts.length; i++) |
162 | 0 | counts[i] = (counts[i] + alpha) |
163 | / (total_count + counts.length * alpha); | |
164 | 0 | } |
165 | ||
166 | /** | |
167 | * Returns the mean of the specified <code>Collection</code> of | |
168 | * distributions, which are assumed to be normalized arrays of | |
169 | * <code>double</code> values. | |
170 | * @see #mean(double[][]) | |
171 | */ | |
172 | public static double[] mean(Collection distributions) | |
173 | { | |
174 | 2 | if (distributions.isEmpty()) |
175 | 0 | throw new IllegalArgumentException("Distribution collection must be non-empty"); |
176 | 2 | Iterator iter = distributions.iterator(); |
177 | 2 | double[] first = (double[])iter.next(); |
178 | 2 | double[][] d_array = new double[distributions.size()][first.length]; |
179 | 2 | d_array[0] = first; |
180 | 5 | for (int i = 1; i < d_array.length; i++) |
181 | 3 | d_array[i] = (double[])iter.next(); |
182 | ||
183 | 2 | return mean(d_array); |
184 | } | |
185 | ||
186 | /** | |
187 | * Returns the mean of the specified array of distributions, | |
188 | * represented as normalized arrays of <code>double</code> values. | |
189 | * Will throw an "index out of bounds" exception if the | |
190 | * distribution arrays are not all of the same length. | |
191 | */ | |
192 | public static double[] mean(double[][] distributions) | |
193 | { | |
194 | 6 | double[] d_mean = new double[distributions[0].length]; |
195 | 12 | for (int j = 0; j < d_mean.length; j++) |
196 | 6 | d_mean[j] = 0; |
197 | ||
198 | 21 | for (int i = 0; i < distributions.length; i++) |
199 | 30 | for (int j = 0; j < d_mean.length; j++) |
200 | 15 | d_mean[j] += distributions[i][j] / distributions.length; |
201 | ||
202 | 6 | return d_mean; |
203 | } | |
204 | ||
205 | } |
this report was generated by version 1.0.5 of jcoverage. |
copyright © 2003, jcoverage ltd. all rights reserved. |