001/** 002 * Copyright (c) 2011, The University of Southampton and the individual contributors. 003 * All rights reserved. 004 * 005 * Redistribution and use in source and binary forms, with or without modification, 006 * are permitted provided that the following conditions are met: 007 * 008 * * Redistributions of source code must retain the above copyright notice, 009 * this list of conditions and the following disclaimer. 010 * 011 * * Redistributions in binary form must reproduce the above copyright notice, 012 * this list of conditions and the following disclaimer in the documentation 013 * and/or other materials provided with the distribution. 014 * 015 * * Neither the name of the University of Southampton nor the names of its 016 * contributors may be used to endorse or promote products derived from this 017 * software without specific prior written permission. 018 * 019 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND 020 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 021 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 022 * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR 023 * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 024 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 025 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON 026 * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 027 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 028 * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 029 */ 030package org.openimaj.image.objectdetection.haar.training; 031 032import java.util.List; 033 034import org.openimaj.image.analysis.algorithm.SummedSqTiltAreaTable; 035import org.openimaj.image.objectdetection.haar.HaarFeature; 036import org.openimaj.util.array.ArrayUtils; 037import org.openimaj.util.function.Operation; 038import org.openimaj.util.parallel.Parallel; 039 040public class CachedTrainingData implements HaarTrainingData { 041 float[][] responses; 042 boolean[] classes; 043 int[][] sortedIndices; 044 List<HaarFeature> features; 045 int width, height; 046 047 float computeWindowVarianceNorm(SummedSqTiltAreaTable sat) { 048 final int w = width - 2; 049 final int h = height - 2; 050 051 final int x = 1; // shift by 1 scaled px to centre box 052 final int y = 1; 053 054 final float sum = sat.sum.pixels[y + h][x + w] + sat.sum.pixels[y][x] - 055 sat.sum.pixels[y + h][x] - sat.sum.pixels[y][x + w]; 056 final float sqSum = sat.sqSum.pixels[y + w][x + w] + sat.sqSum.pixels[y][x] - 057 sat.sqSum.pixels[y + w][x] - sat.sqSum.pixels[y][x + w]; 058 059 final float cachedInvArea = 1.0f / (w * h); 060 final float mean = sum * cachedInvArea; 061 float wvNorm = sqSum * cachedInvArea - mean * mean; 062 wvNorm = (float) ((wvNorm >= 0) ? Math.sqrt(wvNorm) : 1); 063 064 return wvNorm; 065 } 066 067 public CachedTrainingData(final List<SummedSqTiltAreaTable> positive, final List<SummedSqTiltAreaTable> negative, 068 final List<HaarFeature> features) 069 { 070 this.width = positive.get(0).sum.width - 1; 071 this.height = positive.get(0).sum.height - 1; 072 073 this.features = features; 074 final int nfeatures = features.size(); 075 076 classes = new boolean[positive.size() + negative.size()]; 077 responses = new float[nfeatures][classes.length]; 078 sortedIndices = new int[nfeatures][]; 079 // for (int f = 0; f < nfeatures; f++) { 080 081 Parallel.forIndex(0, nfeatures, 1, new Operation<Integer>() { 082 083 @Override 084 public void perform(Integer f) { 085 final HaarFeature feature = features.get(f); 086 int count = 0; 087 088 for (final SummedSqTiltAreaTable t : positive) { 089 final float wvNorm = computeWindowVarianceNorm(t); 090 responses[f][count] = feature.computeResponse(t, 0, 0) / wvNorm; 091 classes[count] = true; 092 ++count; 093 } 094 095 for (final SummedSqTiltAreaTable t : negative) { 096 final float wvNorm = computeWindowVarianceNorm(t); 097 responses[f][count] = feature.computeResponse(t, 0, 0) / wvNorm; 098 classes[count] = false; 099 ++count; 100 } 101 102 sortedIndices[f] = ArrayUtils.indexSort(responses[f]); 103 } 104 }); 105 } 106 107 @Override 108 public float[] getResponses(int dimension) { 109 return responses[dimension]; 110 } 111 112 @Override 113 public boolean[] getClasses() { 114 return classes; 115 } 116 117 @Override 118 public int numInstances() { 119 return classes.length; 120 } 121 122 @Override 123 public int numFeatures() { 124 return responses.length; 125 } 126 127 @Override 128 public float[] getInstanceFeature(int idx) { 129 final float[] feature = new float[responses.length]; 130 131 for (int i = 0; i < feature.length; i++) { 132 feature[i] = responses[i][idx]; 133 } 134 135 return feature; 136 } 137 138 @Override 139 public int[] getSortedIndices(int d) { 140 return sortedIndices[d]; 141 } 142 143 @Override 144 public HaarFeature getFeature(int dimension) { 145 return features.get(dimension); 146 } 147}