001/* 002 * Licensed to the Apache Software Foundation (ASF) under one or more 003 * contributor license agreements. See the NOTICE file distributed with 004 * this work for additional information regarding copyright ownership. 005 * The ASF licenses this file to You under the Apache License, Version 2.0 006 * (the "License"); you may not use this file except in compliance with 007 * the License. You may obtain a copy of the License at 008 * 009 * http://www.apache.org/licenses/LICENSE-2.0 010 * 011 * Unless required by applicable law or agreed to in writing, software 012 * distributed under the License is distributed on an "AS IS" BASIS, 013 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 014 * See the License for the specific language governing permissions and 015 * limitations under the License. 016 */ 017 018package org.apache.commons.math4.neuralnet; 019 020import java.util.function.DoubleUnaryOperator; 021 022import org.apache.commons.rng.UniformRandomProvider; 023import org.apache.commons.rng.sampling.distribution.ContinuousUniformSampler; 024 025/** 026 * Creates functions that will select the initial values of a neuron's 027 * features. 028 * 029 * @since 3.3 030 */ 031public final class FeatureInitializerFactory { 032 /** Class contains only static methods. */ 033 private FeatureInitializerFactory() {} 034 035 /** 036 * Uniform sampling of the given range. 037 * 038 * @param min Lower bound of the range. 039 * @param max Upper bound of the range. 040 * @param rng Random number generator used to draw samples from a 041 * uniform distribution. 042 * @return an initializer such that the features will be initialized with 043 * values within the given range. 044 * @throws IllegalArgumentException if {@code min >= max}. 045 */ 046 public static FeatureInitializer uniform(final UniformRandomProvider rng, 047 final double min, 048 final double max) { 049 return randomize(new ContinuousUniformSampler(rng, min, max), 050 function(x -> 0, 0, 0)); 051 } 052 053 /** 054 * Creates an initializer from a univariate function {@code f(x)}. 055 * The argument {@code x} is set to {@code init} at the first call 056 * and will be incremented at each call. 057 * 058 * @param f Function. 059 * @param init Initial value. 060 * @param inc Increment 061 * @return the initializer. 062 */ 063 public static FeatureInitializer function(final DoubleUnaryOperator f, 064 final double init, 065 final double inc) { 066 return new FeatureInitializer() { 067 /** Argument. */ 068 private double arg = init; 069 070 /** {@inheritDoc} */ 071 @Override 072 public double value() { 073 final double result = f.applyAsDouble(arg); 074 arg += inc; 075 return result; 076 } 077 }; 078 } 079 080 /** 081 * Adds some amount of random data to the given initializer. 082 * 083 * @param random Random variable distribution sampler. 084 * @param orig Original initializer. 085 * @return an initializer whose {@link FeatureInitializer#value() value} 086 * method will return {@code orig.value() + random.sample()}. 087 */ 088 public static FeatureInitializer randomize(final ContinuousUniformSampler random, 089 final FeatureInitializer orig) { 090 return new FeatureInitializer() { 091 /** {@inheritDoc} */ 092 @Override 093 public double value() { 094 return orig.value() + random.sample(); 095 } 096 }; 097 } 098}