diff --git a/.github/workflows/personal-dictionary.txt b/.github/workflows/personal-dictionary.txt index 5b6de7a..863946f 100644 --- a/.github/workflows/personal-dictionary.txt +++ b/.github/workflows/personal-dictionary.txt @@ -1,6 +1,8 @@ personal_ws-1.1 es 0 utf-8 +ActivationFunctions Amat Approximators +Aristóteles Backpropagation Backpropagations Bernstein @@ -18,6 +20,7 @@ Factorizando Feedforward Fn ForwardPropagation +FromMatrixNN Funtores GN GPUs @@ -28,17 +31,19 @@ HUxx Halber HardTanh Hardtanh -Hardtanh Hornik +IndicatorFunction IntervaloCentral Isoperimetry Iésima JJ Julián Jupyter +KNN LReLU LaTeX Liang +LinRange Liskov Lusin López @@ -50,14 +55,21 @@ Merí Mesejo Multilayer NN +Nagaraj Nótese Ockham +OneLayerNeuralNetwork +OptimizedNeuralNetwork +OptimizedNeuronalNetwork Palett Perceptrón Pérez +RampFunction +RandomWeightsNN ReLU Readme Rosenblatt +STL Sebastien Sellke Sigmoid @@ -66,7 +78,9 @@ Sigmoidea Stinchcombe TFG TeoremaStoneWeiertrass +ThresholdFunction Tietze +UCI UMVUE Wilcoxon Wortman @@ -78,14 +92,20 @@ aproximadores autres auxiliarDiferenciaPorDerivada backpropagation +baselinestretch bgcolor ceil cienciadedatos codominio codominios contutor +covariate +covariates +csv cte darkRed +dat +derivativeRampFunction diferenciabilidad diferenciable diferenciables @@ -101,6 +121,7 @@ feedforward fg fj fjk +framesep gj gjk hiperplanos @@ -110,11 +131,15 @@ ij ik inasumible inecuación +initializer insesgado insesgados ipynb +isoperimetry jejejeje jk +jl +lRelu lcc linenos lineos @@ -125,6 +150,7 @@ modus muestral multicapa multicapas +nn nx nótese operandi @@ -134,6 +160,8 @@ pag parametrized paramétrico paramétricos +pdf +perceptron perceptrones perceptrón png @@ -141,7 +169,9 @@ posteriori precompilados preimágenes primeraCapa +println qB +redimensionando reenfocar reescalados referenciada @@ -156,15 +186,21 @@ rrnng separabilidad sigmoide sigmoidea +sobreajustado +sobreajuste +sobreentrenado sobreescribir solventable squasher +stl struct subespacio subespacios subrecubrimiento +subsección sumatoria sumatorias +sutilBackground sutilGreen tanh teo diff --git a/.gitignore b/.gitignore index 8dccbb6..803cd14 100644 --- a/.gitignore +++ b/.gitignore @@ -44,3 +44,4 @@ Notas/ Experimentos/comparativas-funciones-activacion/pruebas-linter.jl Experimentos/comparativas-funciones-activacion/boxplot.jl Experimentos/comparativas-funciones-activacion/img/boxplot-whiskers-activation-function.png +Memoria/capitulos/.ipynb_checkpoints/Ejemplo-uso-biblioteca-checkpoint.ipynb diff --git a/Biblioteca-Redes-Neuronales/src/one_layer_neuronal_network.jl b/Biblioteca-Redes-Neuronales/src/one_layer_neuronal_network.jl deleted file mode 100644 index f82b1f9..0000000 --- a/Biblioteca-Redes-Neuronales/src/one_layer_neuronal_network.jl +++ /dev/null @@ -1,59 +0,0 @@ -module ModuleOneLayerNeuralNetwork - - -export OneLayerNeuralNetworkRandomWeights -export ForwardPropagation - -""" - AbstractOneLayerNeuralNetwork -The basic elements that define a one layer neural network -""" -abstract type AbstractOneLayerNeuralNetwork end - -""" - OneLayerNeuralNetworkRandomWeights -# Arguments -- `activation_function` should be a Real to Real function -- `derivative_activation_function` should be a Real to Real function -""" -mutable struct OneLayerNeuralNetworkRandomWeights <: AbstractOneLayerNeuralNetwork - entry_dimesion :: Int - number_of_hide_units :: Int - output_dimension :: Int - activation_function - derivative_activation_function - W1 - W2 - - function OneLayerNeuralNetworkRandomWeights(entry_dimesion, - number_of_hide_units, - output_dimension, - activation_function, - derivative_activation_function) - - W1 = rand(Float64, number_of_hide_units, entry_dimesion+1) - W2 = rand(Float64, output_dimension, number_of_hide_units) - return new( - entry_dimesion, - number_of_hide_units, - output_dimension, - activation_function, - derivative_activation_function, - W1, - W2 - ) - - end -end - -""" -ForwardPropagation (h::AbstractOneLayerNeuralNetwork, x::Vector{Real}) -""" -function ForwardPropagation(h::AbstractOneLayerNeuralNetwork, x) - s = h.W1 * push!(x,1) - ∑= map(h.activation_function,s) - x = h.W2 * ∑ - return x -end - -end # end OneLayerNeuralNetwork \ No newline at end of file diff --git a/Biblioteca-Redes-Neuronales/test/one_layer_neural_network.test.jl b/Biblioteca-Redes-Neuronales/test/one_layer_neural_network.test.jl deleted file mode 100644 index a193da4..0000000 --- a/Biblioteca-Redes-Neuronales/test/one_layer_neural_network.test.jl +++ /dev/null @@ -1,29 +0,0 @@ -using Test - -include("./../src/activation_functions.jl") -include("./../src/one_layer_neuronal_network.jl") -using .ActivationFunctions -using .ModuleOneLayerNeuralNetwork - -entry_dimesion = 2 -number_of_hide_units = 3 -output_dimension = 2 -OLNN = OneLayerNeuralNetworkRandomWeights( - entry_dimesion, - number_of_hide_units, - output_dimension, - ReLU, - ReLU - ) - -@testset "Dimension of one layer networks" begin - # Weights have correct dimensions - @test size(OLNN.W1)==(number_of_hide_units, 1+entry_dimesion) - @test size(OLNN.W2)==(output_dimension, number_of_hide_units) -end - -@testset "ForwardPropagation" begin - @test typeof(ForwardPropagation(OLNN,[1,2.0])) == Vector{Float64} -end - - diff --git a/Experimentos/.config.toml b/Experimentos/.config.toml index 43cc504..50520c2 100644 --- a/Experimentos/.config.toml +++ b/Experimentos/.config.toml @@ -23,3 +23,30 @@ FACTOR = +1000000 # Parámetro implicado con la cardinalidad del conjunt #DIRECTORIO_IMAGENES = "./Experimentos/comparativas-funciones-activacion/img/" #carpeta que contendrá las imágenes # Descomentar: Para mostrar en la carpeta de la memoria DIRECTORIO_IMAGENES = "./Memoria/img/funciones-activacion/" #carpeta que contendrá las imágenes + +# Configuración de +[visualizacion-inicializacion-pesos-R] +# Descomentar: Para mostrar en la carpeta del experimento +DIRECTORIO_IMAGENES = "./Experimentos/inicializacion-pesos-red-neuronal/img/0_sintetico_homogeneo/" #carpeta que contendrá las imágenes +# Descomentar: Para mostrar en la carpeta de la memoria +#DIRECTORIO_IMAGENES = "./Memoria/img/7-algoritmo-inicializar-pesos/" + +# Configuración de +[visualizacion-inicializacion-pesos-R-aleatorio] +# Descomentar: Para mostrar en la carpeta del experimento +DIRECTORIO_IMAGENES = "./Experimentos/inicializacion-pesos-red-neuronal/img/0_1_aleatorio/" #carpeta que contendrá las imágenes +# Descomentar: Para mostrar en la carpeta de la memoria +#DIRECTORIO_IMAGENES = "./Memoria/img/7-algoritmo-inicializar-pesos/" +DIRECTORIO_RESULTADOS = "Experimentos/inicializacion-pesos-red-neuronal/resultados/1_sinteticos_heterogeneo/" +NOMBRE_FICHERO_RESULTADOS = "resultados.csv" +NUMERO_PARTICIONES = +15 # Veces que se tomarán medidas +LIMITE_INFERIOR = -10 # Cota inferior de los valores posibles +LIMITE_SUPERIOR = +10 # Cota superior de los valores posibles +FACTOR = +4 # Por cada neurona cuantos datos hay + +# Configuración de +[air-self-noise] +# Descomentar: Para mostrar en la carpeta de la memoria +FICHERO_DATOS = "Experimentos/inicializacion-pesos-red-neuronal/data/airfoil_self_noise.csv" +DIRECTORIO_RESULTADOS = "Experimentos/inicializacion-pesos-red-neuronal/resultados/2_air_self_noise/" +NUMERO_EJECUCIONES = +15 # Veces que se tomarán medidas diff --git a/Experimentos/comparativas-funciones-activacion/velocidad_funciones_activacion.jl b/Experimentos/comparativas-funciones-activacion/velocidad_funciones_activacion.jl index d87a0ee..14e024a 100644 --- a/Experimentos/comparativas-funciones-activacion/velocidad_funciones_activacion.jl +++ b/Experimentos/comparativas-funciones-activacion/velocidad_funciones_activacion.jl @@ -10,7 +10,7 @@ # El directorio donde se guarda los ficheros es: DIRECTORIO_RESULTADOS ################################################################################### -include("../../Biblioteca-Redes-Neuronales/src/activation_functions.jl") +include("../../OptimizedNeuralNetwork.jl/src/activation_functions.jl") using .ActivationFunctions # Bibliotecas para tiempos y estadísticas using TimerOutputs diff --git a/Experimentos/comparativas-funciones-activacion/visualizacion-funciones-activacion.jl b/Experimentos/comparativas-funciones-activacion/visualizacion-funciones-activacion.jl index dbd315e..fe2ba14 100644 --- a/Experimentos/comparativas-funciones-activacion/visualizacion-funciones-activacion.jl +++ b/Experimentos/comparativas-funciones-activacion/visualizacion-funciones-activacion.jl @@ -4,7 +4,7 @@ # Paquetes using Plots using TOML -include("../../Biblioteca-Redes-Neuronales/src/activation_functions.jl") +include("../../OptimizedNeuralNetwork.jl/src/activation_functions.jl") using .ActivationFunctions FICHERO_CONFIGURACION = "Experimentos/.config.toml" diff --git a/Experimentos/inicializacion-pesos-red-neuronal/0_experimento_sintetico.jl b/Experimentos/inicializacion-pesos-red-neuronal/0_experimento_sintetico.jl new file mode 100644 index 0000000..748755c --- /dev/null +++ b/Experimentos/inicializacion-pesos-red-neuronal/0_experimento_sintetico.jl @@ -0,0 +1,43 @@ +######################################################## +# EXPERIMENTO SINTÉTICO DE NUESTRO algoritmo +# Visualiza para ciertos tamaños de muestra el error obtenido +######################################################## +using Random +using Plots +using TOML +FICHERO_CONFIGURACION = "Experimentos/.config.toml" +config = TOML.parsefile(FICHERO_CONFIGURACION)["visualizacion-inicializacion-pesos-R"] +img_path = config["DIRECTORIO_IMAGENES"] + +Random.seed!(1) +include("../../OptimizedNeuralNetwork.jl/src/OptimizedNeuralNetwork.jl") +using .OptimizedNeuralNetwork + +M = 1 +K_range = 3 +f_regression(x)=(x<1) ? exp(-x)-4 : log(x) +for (data_set_size,n) in zip([3,4,5, 8,15,23,51,73,100, 103],[2,3,5,7,10,20,51,72,90, 100]) + + println("EXPERIMENTO SINTÉTICO") + println("n=$n y tamaño conjunto $data_set_size") + # Partición del conjunto de muestra + X_train= Vector(LinRange(-K_range, K_range, n)) + Y_train = map(f_regression, X_train) + # Cálculo de la red neuronal con pesos inicializados + h = nn_from_data(X_train, Y_train, n, M) + # Función de evaluación por forward propagation + evaluate(x)=forward_propagation(h, + RampFunction,x) + # Visualización + interval = [-K_range,K_range] + file_name = "f_ideal_y_rn_con_$(n)_neuronas" + plot(x->evaluate([x])[1], + -K_range,K_range, + label="red neuronal n=$n" + ) + plot!(f_regression, + label="f ideal", + title="Comparativa función ideal y red neuronal n=$n" + ) + png(img_path*file_name) +end \ No newline at end of file diff --git a/Experimentos/inicializacion-pesos-red-neuronal/1_experimento_sintetico_heterogeneo.jl b/Experimentos/inicializacion-pesos-red-neuronal/1_experimento_sintetico_heterogeneo.jl new file mode 100644 index 0000000..52cc547 --- /dev/null +++ b/Experimentos/inicializacion-pesos-red-neuronal/1_experimento_sintetico_heterogeneo.jl @@ -0,0 +1,57 @@ +######################################################## +# EXPERIMENTO SINTÉTICO DE NUESTRO algoritmo +# Visualiza para ciertos tamaños de muestra el error obtenido +######################################################## +using Random +using Plots +using TOML +FICHERO_CONFIGURACION = "Experimentos/.config.toml" +config = TOML.parsefile(FICHERO_CONFIGURACION)["visualizacion-inicializacion-pesos-R-aleatorio"] +img_path = config["DIRECTORIO_IMAGENES"] +NOMBRE_FICHERO_RESULTADOS = config["NOMBRE_FICHERO_RESULTADOS"] +# número de particiones +numero_particiones = config["NUMERO_PARTICIONES"] +include("../../OptimizedNeuralNetwork.jl/src/OptimizedNeuralNetwork.jl") +using .OptimizedNeuralNetwork + +Random.seed!(1) + + +M = 1 +# Conjunto de datos sobre los que se van a comparar +limite_inf = config["LIMITE_INFERIOR"] +limite_sup = config["LIMITE_SUPERIOR"] +factor = config["FACTOR"] # muestras de entrenamiento + +f_regression(x)=(x<1) ? exp(-x)-4 : log(x) +for n in [3,5,7,15,20,40,60] + data_set_size = factor*n + + println("EXPERIMENTO SINTÉTICO") + println("n=$n y tamaño conjunto $data_set_size") + # Partición del conjunto de muestra + X_train = Vector(LinRange(limite_inf, limite_sup, data_set_size)) + X_train = shuffle(X_train) + Y_train = map(f_regression, X_train) + # Cálculo de la red neuronal con pesos inicializados + h = nn_from_data(X_train, Y_train, n, M) + # Función de evaluación por forward propagation + evaluate(x)=forward_propagation(h, + RampFunction,x) + # Visualización + + interval = [limite_inf, limite_sup] + file_name = "f_ideal_y_rn_con_$(n)_neuronas" + plot(x->evaluate([x])[1], + limite_inf, limite_sup, + label="red neuronal n=$n" + ) + plot!(f_regression, + label="f ideal", + title="Comparativa función ideal y red neuronal n=$n, rango aleatorio" + ) + png(img_path*file_name) + + media, mediana, desv, cor = regression(X_train, Y_train,x->evaluate([x])[1]) + println(media, mediana, desv, cor) +end \ No newline at end of file diff --git a/Experimentos/inicializacion-pesos-red-neuronal/2_air_self_noise.jl b/Experimentos/inicializacion-pesos-red-neuronal/2_air_self_noise.jl new file mode 100644 index 0000000..2fb21b6 --- /dev/null +++ b/Experimentos/inicializacion-pesos-red-neuronal/2_air_self_noise.jl @@ -0,0 +1,191 @@ +######################################################## +# Experimento del algoritmo inicialización con base de datos de air self noise +######################################################## +using Random +using Plots +using TOML +using CSV +using DataFrames +using StatsBase +using HypothesisTests +using TimerOutputs + +FICHERO_CONFIGURACION = "Experimentos/.config.toml" +config = TOML.parsefile(FICHERO_CONFIGURACION)["air-self-noise"] +FILE = config["FICHERO_DATOS"] +DIRECTORIO_RESULTADOS = config["DIRECTORIO_RESULTADOS"] +NUMERO_EJECUCIONES = config["NUMERO_EJECUCIONES"] + +include("../../OptimizedNeuralNetwork.jl/src/OptimizedNeuralNetwork.jl") + +using .OptimizedNeuralNetwork + +#------------------------------------------------------ +# Data preprocesing +#------------------------------------------------------ +input_dimension = 5 +output_dimension = 1 +atributes = 1:input_dimension +label = 6 +df = DataFrame(CSV.File(FILE, header=false)) +# Miramos si hay valores perdidos +display(describe(df)) # No hay +# Transformamos en matrices y vectores +X = Matrix(df[:, atributes]) +Y = Vector(df[:,label]) +len = length(Y) +index = Integer(2*len/3) #separador de conjunto de entrenamiento y test +α = 0.9 # heurística ortogonalidad comentada en memoria +n = ceil(Integer, index*α) # numbers of nodes +valor_estancamiento = 5 +tol = 0.001 +η = 0.005 # valor heurístico tras varias pruebas (0.1 era demasiado grande) + +println("Se va a entrenar con $n neuronas") +println("El tamaño de test es de $(len-index)") +println("El conjunto de entrenamiento $(index)") + +### Valores donde almacenar los datos +# índeces del array donde se almacenan los datos +indice_algoritmo_inicializcion = 1 +indice_aleatorio_y_backpropagation = 2 + +dfTiempo = [ + Array{Float64}(undef, 2 ) + for _ in 1:NUMERO_EJECUCIONES + ] +dfNombre = ["Algoritmo inicialización", "Aleatorio y Backpropagation"] +dfErrorEntrenamiento = [ + Array{Float64}(undef, 2 ) + for _ in 1:NUMERO_EJECUCIONES + ] +dfErrorTest = [ + Array{Float64}(undef, 2 ) + for _ in 1:NUMERO_EJECUCIONES +] +for i in 1:NUMERO_EJECUCIONES + # suffle + sort = randperm(len) + Xs = X[sort, :] + Ys = Y[sort] + # separate train from test + X_train = Xs[1:index,:] + Y_train = Ys[1:index] + X_test = Xs[index+1:len, :] + Y_test = Ys[index+1:len] + + # Data normalization + #dt = fit(ZScoreTransform, X_train, dims=1) + dt = fit(UnitRangeTransform, X_train, dims=1) + X_test_normalized = StatsBase.transform(dt, X_test) + X_train_normalized = StatsBase.transform(dt, X_train) + + dt_y = fit(UnitRangeTransform, Y_train, dims=1) + Y_test_normalized = StatsBase.transform(dt_y, Y_test) + Y_train_normalized = StatsBase.transform(dt_y, Y_train) + + #------------------------------------------------------ + # Get neuronal network + #------------------------------------------------------ + println("\nEjecución $i") + # Experimentamos con nuestro algoritmo + M = 1 + dfTiempo[i][indice_algoritmo_inicializcion]= @elapsed h_initialized = nn_from_data(X_train_normalized, Y_train_normalized, n, M) + # Función de evaluación por forward propagation + evaluate_initialized(x) = forward_propagation(h_initialized, + RampFunction,x + ) + + println("Resultados con h ajustada") + error_in_train_initialize = error_in_data_set(X_train_normalized, Y_train_normalized, evaluate_initialized) + println("Ha tardado un tiempo de $(dfTiempo[i][indice_algoritmo_inicializcion])") + println("El error en el conjunto de entrenamiento es de $error_in_train_initialize") + + # Almacenamos datos en el csv + dfErrorEntrenamiento[i][indice_algoritmo_inicializcion] = error_in_train_initialize + dfErrorTest[i][indice_algoritmo_inicializcion] = error_in_data_set(X_test_normalized, Y_test_normalized, evaluate_initialized) + + ####### Test con backpropagation + # Entrenaremos hasta que el error sea igual o menor + # Ajuste con comienzo de datos inicial + println("\n--- Resultados con h aleatoria ---") + time_backpropagation = @elapsed h_random = RandomWeightsNN(input_dimension, n, output_dimension) + evaluate_random(x) = forward_propagation(h_random,RampFunction,x) + error_in_train_backpropagation = error_in_data_set(X_train_normalized, Y_train_normalized, evaluate_random) + iterations = 0 + last_error = error_in_train_backpropagation + stoped_iterations = 0 + println("El error inicial en backpropagation es de $error_in_train_backpropagation") + while( error_in_train_initialize < error_in_train_backpropagation + && + stoped_iterations < valor_estancamiento + ) + println("El error en la iteración $iterations: $error_in_train_backpropagation") + time_backpropagation += @elapsed backpropagation!(h_random, + X_train_normalized, Y_train_normalized, + RampFunction, derivativeRampFunction, + n, + η) + iterations += 1 + evaluate_random(x) = forward_propagation(h_random, + RampFunction,x + ) + error_in_train_backpropagation = error_in_data_set(X_train_normalized, Y_train_normalized, evaluate_random) + if(abs(error_in_train_backpropagation - last_error) < tol || error_in_train_backpropagation > tol + last_error) + stoped_iterations += 1 + else + stoped_iterations = 0 + end + last_error = error_in_train_backpropagation + end + evaluate_random(x) = forward_propagation(h_random,RampFunction,x) + dfTiempo[i][indice_aleatorio_y_backpropagation] = time_backpropagation + dfErrorEntrenamiento[i][indice_aleatorio_y_backpropagation] = error_in_train_backpropagation + dfErrorTest[i][indice_aleatorio_y_backpropagation] = error_in_data_set(X_test_normalized, Y_test_normalized, evaluate_random) + println(regression(X_test_normalized, Y_test_normalized, evaluate_random)) + println("Durante $time_backpropagation") +end + +# Mostramos resultados y los guardamos en le fichero de resultados +#Mostramos en pantalla resultados +DF_NOMBRES = DataFrame( + Método = dfNombre, +) +# Añadimos los tiempos +DF_TIEMPOS = hcat( + DF_NOMBRES, + DataFrame(dfTiempo, ["Tiempo $(i)" for i in 1:NUMERO_EJECUCIONES]) +) +# Añadimos error en entrenamiento +DF_ERROR_ENTRENAMIENTO = hcat( + DF_NOMBRES, + DataFrame(dfErrorEntrenamiento, ["Error entrenamiento $(i)" for i in 1:NUMERO_EJECUCIONES]) +) + +DF_ERROR_TEST = hcat( + DF_NOMBRES, + DataFrame(dfErrorTest, ["Error test $(i)" for i in 1:NUMERO_EJECUCIONES]) +) + +# Guardamos los datos en el directorio respectivo +CSV.write(DIRECTORIO_RESULTADOS*"tiempos.csv", DF_TIEMPOS) +CSV.write(DIRECTORIO_RESULTADOS*"error_entrenamiento.csv", DF_ERROR_ENTRENAMIENTO) +CSV.write(DIRECTORIO_RESULTADOS*"error_test.csv", DF_ERROR_TEST) + +# Test de los signos de Wilcoxon + +resultados = "\n + $(SignedRankTest(map(x-> x[1]-x[2],dfTiempo))) + " +println(resultados) +write(DIRECTORIO_RESULTADOS*"TEST_WILCOXON", resultados) + + + + + + + + + + diff --git a/Experimentos/inicializacion-pesos-red-neuronal/data/airfoil_self_noise.csv b/Experimentos/inicializacion-pesos-red-neuronal/data/airfoil_self_noise.csv new file mode 100644 index 0000000..d14d4b6 --- /dev/null +++ b/Experimentos/inicializacion-pesos-red-neuronal/data/airfoil_self_noise.csv @@ -0,0 +1,1503 @@ +800,0,0.3048,71.3,0.00266337,126.201 +1000,0,0.3048,71.3,0.00266337,125.201 +1250,0,0.3048,71.3,0.00266337,125.951 +1600,0,0.3048,71.3,0.00266337,127.591 +2000,0,0.3048,71.3,0.00266337,127.461 +2500,0,0.3048,71.3,0.00266337,125.571 +3150,0,0.3048,71.3,0.00266337,125.201 +4000,0,0.3048,71.3,0.00266337,123.061 +5000,0,0.3048,71.3,0.00266337,121.301 +6300,0,0.3048,71.3,0.00266337,119.541 +8000,0,0.3048,71.3,0.00266337,117.151 +10000,0,0.3048,71.3,0.00266337,115.391 +12500,0,0.3048,71.3,0.00266337,112.241 +16000,0,0.3048,71.3,0.00266337,108.721 +500,0,0.3048,55.5,0.00283081,126.416 +630,0,0.3048,55.5,0.00283081,127.696 +800,0,0.3048,55.5,0.00283081,128.086 +1000,0,0.3048,55.5,0.00283081,126.966 +1250,0,0.3048,55.5,0.00283081,126.086 +1600,0,0.3048,55.5,0.00283081,126.986 +2000,0,0.3048,55.5,0.00283081,126.616 +2500,0,0.3048,55.5,0.00283081,124.106 +3150,0,0.3048,55.5,0.00283081,123.236 +4000,0,0.3048,55.5,0.00283081,121.106 +5000,0,0.3048,55.5,0.00283081,119.606 +6300,0,0.3048,55.5,0.00283081,117.976 +8000,0,0.3048,55.5,0.00283081,116.476 +10000,0,0.3048,55.5,0.00283081,113.076 +12500,0,0.3048,55.5,0.00283081,111.076 +200,0,0.3048,39.6,0.00310138,118.129 +250,0,0.3048,39.6,0.00310138,119.319 +315,0,0.3048,39.6,0.00310138,122.779 +400,0,0.3048,39.6,0.00310138,124.809 +500,0,0.3048,39.6,0.00310138,126.959 +630,0,0.3048,39.6,0.00310138,128.629 +800,0,0.3048,39.6,0.00310138,129.099 +1000,0,0.3048,39.6,0.00310138,127.899 +1250,0,0.3048,39.6,0.00310138,125.499 +1600,0,0.3048,39.6,0.00310138,124.049 +2000,0,0.3048,39.6,0.00310138,123.689 +2500,0,0.3048,39.6,0.00310138,121.399 +3150,0,0.3048,39.6,0.00310138,120.319 +4000,0,0.3048,39.6,0.00310138,119.229 +5000,0,0.3048,39.6,0.00310138,117.789 +6300,0,0.3048,39.6,0.00310138,116.229 +8000,0,0.3048,39.6,0.00310138,114.779 +10000,0,0.3048,39.6,0.00310138,112.139 +12500,0,0.3048,39.6,0.00310138,109.619 +200,0,0.3048,31.7,0.00331266,117.195 +250,0,0.3048,31.7,0.00331266,118.595 +315,0,0.3048,31.7,0.00331266,122.765 +400,0,0.3048,31.7,0.00331266,125.045 +500,0,0.3048,31.7,0.00331266,127.315 +630,0,0.3048,31.7,0.00331266,129.095 +800,0,0.3048,31.7,0.00331266,129.235 +1000,0,0.3048,31.7,0.00331266,127.365 +1250,0,0.3048,31.7,0.00331266,124.355 +1600,0,0.3048,31.7,0.00331266,122.365 +2000,0,0.3048,31.7,0.00331266,122.375 +2500,0,0.3048,31.7,0.00331266,120.755 +3150,0,0.3048,31.7,0.00331266,119.135 +4000,0,0.3048,31.7,0.00331266,118.145 +5000,0,0.3048,31.7,0.00331266,115.645 +6300,0,0.3048,31.7,0.00331266,113.775 +8000,0,0.3048,31.7,0.00331266,110.515 +10000,0,0.3048,31.7,0.00331266,108.265 +800,1.5,0.3048,71.3,0.00336729,127.122 +1000,1.5,0.3048,71.3,0.00336729,125.992 +1250,1.5,0.3048,71.3,0.00336729,125.872 +1600,1.5,0.3048,71.3,0.00336729,126.632 +2000,1.5,0.3048,71.3,0.00336729,126.642 +2500,1.5,0.3048,71.3,0.00336729,124.512 +3150,1.5,0.3048,71.3,0.00336729,123.392 +4000,1.5,0.3048,71.3,0.00336729,121.762 +5000,1.5,0.3048,71.3,0.00336729,119.632 +6300,1.5,0.3048,71.3,0.00336729,118.122 +8000,1.5,0.3048,71.3,0.00336729,115.372 +10000,1.5,0.3048,71.3,0.00336729,113.492 +12500,1.5,0.3048,71.3,0.00336729,109.222 +16000,1.5,0.3048,71.3,0.00336729,106.582 +315,1.5,0.3048,39.6,0.00392107,121.851 +400,1.5,0.3048,39.6,0.00392107,124.001 +500,1.5,0.3048,39.6,0.00392107,126.661 +630,1.5,0.3048,39.6,0.00392107,128.311 +800,1.5,0.3048,39.6,0.00392107,128.831 +1000,1.5,0.3048,39.6,0.00392107,127.581 +1250,1.5,0.3048,39.6,0.00392107,125.211 +1600,1.5,0.3048,39.6,0.00392107,122.211 +2000,1.5,0.3048,39.6,0.00392107,122.101 +2500,1.5,0.3048,39.6,0.00392107,120.981 +3150,1.5,0.3048,39.6,0.00392107,119.111 +4000,1.5,0.3048,39.6,0.00392107,117.741 +5000,1.5,0.3048,39.6,0.00392107,116.241 +6300,1.5,0.3048,39.6,0.00392107,114.751 +8000,1.5,0.3048,39.6,0.00392107,112.251 +10000,1.5,0.3048,39.6,0.00392107,108.991 +12500,1.5,0.3048,39.6,0.00392107,106.111 +400,3,0.3048,71.3,0.00425727,127.564 +500,3,0.3048,71.3,0.00425727,128.454 +630,3,0.3048,71.3,0.00425727,129.354 +800,3,0.3048,71.3,0.00425727,129.494 +1000,3,0.3048,71.3,0.00425727,129.004 +1250,3,0.3048,71.3,0.00425727,127.634 +1600,3,0.3048,71.3,0.00425727,126.514 +2000,3,0.3048,71.3,0.00425727,125.524 +2500,3,0.3048,71.3,0.00425727,124.024 +3150,3,0.3048,71.3,0.00425727,121.514 +4000,3,0.3048,71.3,0.00425727,120.264 +5000,3,0.3048,71.3,0.00425727,118.134 +6300,3,0.3048,71.3,0.00425727,116.134 +8000,3,0.3048,71.3,0.00425727,114.634 +10000,3,0.3048,71.3,0.00425727,110.224 +400,3,0.3048,55.5,0.00452492,126.159 +500,3,0.3048,55.5,0.00452492,128.179 +630,3,0.3048,55.5,0.00452492,129.569 +800,3,0.3048,55.5,0.00452492,129.949 +1000,3,0.3048,55.5,0.00452492,129.329 +1250,3,0.3048,55.5,0.00452492,127.329 +1600,3,0.3048,55.5,0.00452492,124.439 +2000,3,0.3048,55.5,0.00452492,123.069 +2500,3,0.3048,55.5,0.00452492,122.439 +3150,3,0.3048,55.5,0.00452492,120.189 +4000,3,0.3048,55.5,0.00452492,118.689 +5000,3,0.3048,55.5,0.00452492,117.309 +6300,3,0.3048,55.5,0.00452492,115.679 +8000,3,0.3048,55.5,0.00452492,113.799 +10000,3,0.3048,55.5,0.00452492,112.169 +315,3,0.3048,39.6,0.00495741,123.312 +400,3,0.3048,39.6,0.00495741,125.472 +500,3,0.3048,39.6,0.00495741,127.632 +630,3,0.3048,39.6,0.00495741,129.292 +800,3,0.3048,39.6,0.00495741,129.552 +1000,3,0.3048,39.6,0.00495741,128.312 +1250,3,0.3048,39.6,0.00495741,125.802 +1600,3,0.3048,39.6,0.00495741,122.782 +2000,3,0.3048,39.6,0.00495741,120.532 +2500,3,0.3048,39.6,0.00495741,120.162 +3150,3,0.3048,39.6,0.00495741,118.922 +4000,3,0.3048,39.6,0.00495741,116.792 +5000,3,0.3048,39.6,0.00495741,115.792 +6300,3,0.3048,39.6,0.00495741,114.042 +8000,3,0.3048,39.6,0.00495741,110.652 +315,3,0.3048,31.7,0.00529514,123.118 +400,3,0.3048,31.7,0.00529514,125.398 +500,3,0.3048,31.7,0.00529514,127.548 +630,3,0.3048,31.7,0.00529514,128.698 +800,3,0.3048,31.7,0.00529514,128.708 +1000,3,0.3048,31.7,0.00529514,126.838 +1250,3,0.3048,31.7,0.00529514,124.838 +1600,3,0.3048,31.7,0.00529514,122.088 +2000,3,0.3048,31.7,0.00529514,120.088 +2500,3,0.3048,31.7,0.00529514,119.598 +3150,3,0.3048,31.7,0.00529514,118.108 +4000,3,0.3048,31.7,0.00529514,115.608 +5000,3,0.3048,31.7,0.00529514,113.858 +6300,3,0.3048,31.7,0.00529514,109.718 +250,4,0.3048,71.3,0.00497773,126.395 +315,4,0.3048,71.3,0.00497773,128.175 +400,4,0.3048,71.3,0.00497773,129.575 +500,4,0.3048,71.3,0.00497773,130.715 +630,4,0.3048,71.3,0.00497773,131.615 +800,4,0.3048,71.3,0.00497773,131.755 +1000,4,0.3048,71.3,0.00497773,131.015 +1250,4,0.3048,71.3,0.00497773,129.395 +1600,4,0.3048,71.3,0.00497773,126.645 +2000,4,0.3048,71.3,0.00497773,124.395 +2500,4,0.3048,71.3,0.00497773,123.775 +3150,4,0.3048,71.3,0.00497773,121.775 +4000,4,0.3048,71.3,0.00497773,119.535 +5000,4,0.3048,71.3,0.00497773,117.785 +6300,4,0.3048,71.3,0.00497773,116.165 +8000,4,0.3048,71.3,0.00497773,113.665 +10000,4,0.3048,71.3,0.00497773,110.905 +12500,4,0.3048,71.3,0.00497773,107.405 +250,4,0.3048,39.6,0.00579636,123.543 +315,4,0.3048,39.6,0.00579636,126.843 +400,4,0.3048,39.6,0.00579636,128.633 +500,4,0.3048,39.6,0.00579636,130.173 +630,4,0.3048,39.6,0.00579636,131.073 +800,4,0.3048,39.6,0.00579636,130.723 +1000,4,0.3048,39.6,0.00579636,128.723 +1250,4,0.3048,39.6,0.00579636,126.343 +1600,4,0.3048,39.6,0.00579636,123.213 +2000,4,0.3048,39.6,0.00579636,120.963 +2500,4,0.3048,39.6,0.00579636,120.233 +3150,4,0.3048,39.6,0.00579636,118.743 +4000,4,0.3048,39.6,0.00579636,115.863 +5000,4,0.3048,39.6,0.00579636,113.733 +1250,0,0.2286,71.3,0.00214345,128.144 +1600,0,0.2286,71.3,0.00214345,129.134 +2000,0,0.2286,71.3,0.00214345,128.244 +2500,0,0.2286,71.3,0.00214345,128.354 +3150,0,0.2286,71.3,0.00214345,127.834 +4000,0,0.2286,71.3,0.00214345,125.824 +5000,0,0.2286,71.3,0.00214345,124.304 +6300,0,0.2286,71.3,0.00214345,122.044 +8000,0,0.2286,71.3,0.00214345,118.024 +10000,0,0.2286,71.3,0.00214345,118.134 +12500,0,0.2286,71.3,0.00214345,117.624 +16000,0,0.2286,71.3,0.00214345,114.984 +20000,0,0.2286,71.3,0.00214345,114.474 +315,0,0.2286,55.5,0.00229336,119.540 +400,0,0.2286,55.5,0.00229336,121.660 +500,0,0.2286,55.5,0.00229336,123.780 +630,0,0.2286,55.5,0.00229336,126.160 +800,0,0.2286,55.5,0.00229336,127.530 +1000,0,0.2286,55.5,0.00229336,128.290 +1250,0,0.2286,55.5,0.00229336,127.910 +1600,0,0.2286,55.5,0.00229336,126.790 +2000,0,0.2286,55.5,0.00229336,126.540 +2500,0,0.2286,55.5,0.00229336,126.540 +3150,0,0.2286,55.5,0.00229336,125.160 +4000,0,0.2286,55.5,0.00229336,123.410 +5000,0,0.2286,55.5,0.00229336,122.410 +6300,0,0.2286,55.5,0.00229336,118.410 +315,0,0.2286,39.6,0.00253511,121.055 +400,0,0.2286,39.6,0.00253511,123.565 +500,0,0.2286,39.6,0.00253511,126.195 +630,0,0.2286,39.6,0.00253511,128.705 +800,0,0.2286,39.6,0.00253511,130.205 +1000,0,0.2286,39.6,0.00253511,130.435 +1250,0,0.2286,39.6,0.00253511,129.395 +1600,0,0.2286,39.6,0.00253511,127.095 +2000,0,0.2286,39.6,0.00253511,125.305 +2500,0,0.2286,39.6,0.00253511,125.025 +3150,0,0.2286,39.6,0.00253511,124.625 +4000,0,0.2286,39.6,0.00253511,123.465 +5000,0,0.2286,39.6,0.00253511,122.175 +6300,0,0.2286,39.6,0.00253511,117.465 +315,0,0.2286,31.7,0.0027238,120.595 +400,0,0.2286,31.7,0.0027238,123.635 +500,0,0.2286,31.7,0.0027238,126.675 +630,0,0.2286,31.7,0.0027238,129.465 +800,0,0.2286,31.7,0.0027238,130.725 +1000,0,0.2286,31.7,0.0027238,130.595 +1250,0,0.2286,31.7,0.0027238,128.805 +1600,0,0.2286,31.7,0.0027238,125.625 +2000,0,0.2286,31.7,0.0027238,123.455 +2500,0,0.2286,31.7,0.0027238,123.445 +3150,0,0.2286,31.7,0.0027238,123.445 +4000,0,0.2286,31.7,0.0027238,122.035 +5000,0,0.2286,31.7,0.0027238,120.505 +6300,0,0.2286,31.7,0.0027238,116.815 +400,2,0.2286,71.3,0.00293031,125.116 +500,2,0.2286,71.3,0.00293031,126.486 +630,2,0.2286,71.3,0.00293031,127.356 +800,2,0.2286,71.3,0.00293031,128.216 +1000,2,0.2286,71.3,0.00293031,128.956 +1250,2,0.2286,71.3,0.00293031,128.816 +1600,2,0.2286,71.3,0.00293031,127.796 +2000,2,0.2286,71.3,0.00293031,126.896 +2500,2,0.2286,71.3,0.00293031,127.006 +3150,2,0.2286,71.3,0.00293031,126.116 +4000,2,0.2286,71.3,0.00293031,124.086 +5000,2,0.2286,71.3,0.00293031,122.816 +6300,2,0.2286,71.3,0.00293031,120.786 +8000,2,0.2286,71.3,0.00293031,115.996 +10000,2,0.2286,71.3,0.00293031,113.086 +400,2,0.2286,55.5,0.00313525,122.292 +500,2,0.2286,55.5,0.00313525,124.692 +630,2,0.2286,55.5,0.00313525,126.842 +800,2,0.2286,55.5,0.00313525,128.492 +1000,2,0.2286,55.5,0.00313525,129.002 +1250,2,0.2286,55.5,0.00313525,128.762 +1600,2,0.2286,55.5,0.00313525,126.752 +2000,2,0.2286,55.5,0.00313525,124.612 +2500,2,0.2286,55.5,0.00313525,123.862 +3150,2,0.2286,55.5,0.00313525,123.742 +4000,2,0.2286,55.5,0.00313525,122.232 +5000,2,0.2286,55.5,0.00313525,120.472 +6300,2,0.2286,55.5,0.00313525,118.712 +315,2,0.2286,39.6,0.00346574,120.137 +400,2,0.2286,39.6,0.00346574,122.147 +500,2,0.2286,39.6,0.00346574,125.157 +630,2,0.2286,39.6,0.00346574,127.417 +800,2,0.2286,39.6,0.00346574,129.037 +1000,2,0.2286,39.6,0.00346574,129.147 +1250,2,0.2286,39.6,0.00346574,128.257 +1600,2,0.2286,39.6,0.00346574,125.837 +2000,2,0.2286,39.6,0.00346574,122.797 +2500,2,0.2286,39.6,0.00346574,121.397 +3150,2,0.2286,39.6,0.00346574,121.627 +4000,2,0.2286,39.6,0.00346574,120.227 +5000,2,0.2286,39.6,0.00346574,118.827 +6300,2,0.2286,39.6,0.00346574,116.417 +315,2,0.2286,31.7,0.00372371,120.147 +400,2,0.2286,31.7,0.00372371,123.417 +500,2,0.2286,31.7,0.00372371,126.677 +630,2,0.2286,31.7,0.00372371,129.057 +800,2,0.2286,31.7,0.00372371,130.307 +1000,2,0.2286,31.7,0.00372371,130.307 +1250,2,0.2286,31.7,0.00372371,128.677 +1600,2,0.2286,31.7,0.00372371,125.797 +2000,2,0.2286,31.7,0.00372371,123.037 +2500,2,0.2286,31.7,0.00372371,121.407 +3150,2,0.2286,31.7,0.00372371,121.527 +4000,2,0.2286,31.7,0.00372371,120.527 +5000,2,0.2286,31.7,0.00372371,118.267 +6300,2,0.2286,31.7,0.00372371,115.137 +500,4,0.2286,71.3,0.00400603,126.758 +630,4,0.2286,71.3,0.00400603,129.038 +800,4,0.2286,71.3,0.00400603,130.688 +1000,4,0.2286,71.3,0.00400603,131.708 +1250,4,0.2286,71.3,0.00400603,131.718 +1600,4,0.2286,71.3,0.00400603,129.468 +2000,4,0.2286,71.3,0.00400603,126.218 +2500,4,0.2286,71.3,0.00400603,124.338 +3150,4,0.2286,71.3,0.00400603,124.108 +4000,4,0.2286,71.3,0.00400603,121.728 +5000,4,0.2286,71.3,0.00400603,121.118 +6300,4,0.2286,71.3,0.00400603,118.618 +8000,4,0.2286,71.3,0.00400603,112.848 +10000,4,0.2286,71.3,0.00400603,113.108 +12500,4,0.2286,71.3,0.00400603,114.258 +16000,4,0.2286,71.3,0.00400603,112.768 +20000,4,0.2286,71.3,0.00400603,109.638 +400,4,0.2286,55.5,0.0042862,123.274 +500,4,0.2286,55.5,0.0042862,127.314 +630,4,0.2286,55.5,0.0042862,129.964 +800,4,0.2286,55.5,0.0042862,131.864 +1000,4,0.2286,55.5,0.0042862,132.134 +1250,4,0.2286,55.5,0.0042862,131.264 +1600,4,0.2286,55.5,0.0042862,128.264 +2000,4,0.2286,55.5,0.0042862,124.254 +2500,4,0.2286,55.5,0.0042862,122.384 +3150,4,0.2286,55.5,0.0042862,122.394 +4000,4,0.2286,55.5,0.0042862,120.654 +5000,4,0.2286,55.5,0.0042862,120.034 +6300,4,0.2286,55.5,0.0042862,117.154 +8000,4,0.2286,55.5,0.0042862,112.524 +315,4,0.2286,39.6,0.00473801,122.229 +400,4,0.2286,39.6,0.00473801,123.879 +500,4,0.2286,39.6,0.00473801,127.039 +630,4,0.2286,39.6,0.00473801,129.579 +800,4,0.2286,39.6,0.00473801,130.469 +1000,4,0.2286,39.6,0.00473801,129.969 +1250,4,0.2286,39.6,0.00473801,128.339 +1600,4,0.2286,39.6,0.00473801,125.319 +2000,4,0.2286,39.6,0.00473801,121.659 +2500,4,0.2286,39.6,0.00473801,119.649 +3150,4,0.2286,39.6,0.00473801,120.419 +4000,4,0.2286,39.6,0.00473801,119.159 +5000,4,0.2286,39.6,0.00473801,117.649 +6300,4,0.2286,39.6,0.00473801,114.249 +8000,4,0.2286,39.6,0.00473801,113.129 +250,4,0.2286,31.7,0.00509068,120.189 +315,4,0.2286,31.7,0.00509068,123.609 +400,4,0.2286,31.7,0.00509068,126.149 +500,4,0.2286,31.7,0.00509068,128.939 +630,4,0.2286,31.7,0.00509068,130.349 +800,4,0.2286,31.7,0.00509068,130.869 +1000,4,0.2286,31.7,0.00509068,129.869 +1250,4,0.2286,31.7,0.00509068,128.119 +1600,4,0.2286,31.7,0.00509068,125.229 +2000,4,0.2286,31.7,0.00509068,122.089 +2500,4,0.2286,31.7,0.00509068,120.209 +3150,4,0.2286,31.7,0.00509068,120.229 +4000,4,0.2286,31.7,0.00509068,118.859 +5000,4,0.2286,31.7,0.00509068,115.969 +6300,4,0.2286,31.7,0.00509068,112.699 +400,5.3,0.2286,71.3,0.0051942,127.700 +500,5.3,0.2286,71.3,0.0051942,129.880 +630,5.3,0.2286,71.3,0.0051942,131.800 +800,5.3,0.2286,71.3,0.0051942,133.480 +1000,5.3,0.2286,71.3,0.0051942,134.000 +1250,5.3,0.2286,71.3,0.0051942,133.380 +1600,5.3,0.2286,71.3,0.0051942,130.460 +2000,5.3,0.2286,71.3,0.0051942,125.890 +2500,5.3,0.2286,71.3,0.0051942,123.740 +3150,5.3,0.2286,71.3,0.0051942,123.120 +4000,5.3,0.2286,71.3,0.0051942,120.330 +5000,5.3,0.2286,71.3,0.0051942,118.050 +6300,5.3,0.2286,71.3,0.0051942,116.920 +8000,5.3,0.2286,71.3,0.0051942,114.900 +10000,5.3,0.2286,71.3,0.0051942,111.350 +250,5.3,0.2286,39.6,0.00614329,127.011 +315,5.3,0.2286,39.6,0.00614329,129.691 +400,5.3,0.2286,39.6,0.00614329,131.221 +500,5.3,0.2286,39.6,0.00614329,132.251 +630,5.3,0.2286,39.6,0.00614329,132.011 +800,5.3,0.2286,39.6,0.00614329,129.491 +1000,5.3,0.2286,39.6,0.00614329,125.581 +1250,5.3,0.2286,39.6,0.00614329,125.721 +1600,5.3,0.2286,39.6,0.00614329,123.081 +2000,5.3,0.2286,39.6,0.00614329,117.911 +2500,5.3,0.2286,39.6,0.00614329,116.151 +3150,5.3,0.2286,39.6,0.00614329,118.441 +4000,5.3,0.2286,39.6,0.00614329,115.801 +5000,5.3,0.2286,39.6,0.00614329,115.311 +6300,5.3,0.2286,39.6,0.00614329,112.541 +200,7.3,0.2286,71.3,0.0104404,138.758 +250,7.3,0.2286,71.3,0.0104404,139.918 +315,7.3,0.2286,71.3,0.0104404,139.808 +400,7.3,0.2286,71.3,0.0104404,139.438 +500,7.3,0.2286,71.3,0.0104404,136.798 +630,7.3,0.2286,71.3,0.0104404,133.768 +800,7.3,0.2286,71.3,0.0104404,130.748 +1000,7.3,0.2286,71.3,0.0104404,126.838 +1250,7.3,0.2286,71.3,0.0104404,127.358 +1600,7.3,0.2286,71.3,0.0104404,125.728 +2000,7.3,0.2286,71.3,0.0104404,122.708 +2500,7.3,0.2286,71.3,0.0104404,122.088 +3150,7.3,0.2286,71.3,0.0104404,120.458 +4000,7.3,0.2286,71.3,0.0104404,119.208 +5000,7.3,0.2286,71.3,0.0104404,115.298 +6300,7.3,0.2286,71.3,0.0104404,115.818 +200,7.3,0.2286,55.5,0.0111706,135.234 +250,7.3,0.2286,55.5,0.0111706,136.384 +315,7.3,0.2286,55.5,0.0111706,136.284 +400,7.3,0.2286,55.5,0.0111706,135.924 +500,7.3,0.2286,55.5,0.0111706,133.174 +630,7.3,0.2286,55.5,0.0111706,130.934 +800,7.3,0.2286,55.5,0.0111706,128.444 +1000,7.3,0.2286,55.5,0.0111706,125.194 +1250,7.3,0.2286,55.5,0.0111706,125.724 +1600,7.3,0.2286,55.5,0.0111706,123.354 +2000,7.3,0.2286,55.5,0.0111706,120.354 +2500,7.3,0.2286,55.5,0.0111706,118.994 +3150,7.3,0.2286,55.5,0.0111706,117.134 +4000,7.3,0.2286,55.5,0.0111706,117.284 +5000,7.3,0.2286,55.5,0.0111706,113.144 +6300,7.3,0.2286,55.5,0.0111706,111.534 +200,7.3,0.2286,39.6,0.0123481,130.989 +250,7.3,0.2286,39.6,0.0123481,131.889 +315,7.3,0.2286,39.6,0.0123481,132.149 +400,7.3,0.2286,39.6,0.0123481,132.039 +500,7.3,0.2286,39.6,0.0123481,130.299 +630,7.3,0.2286,39.6,0.0123481,128.929 +800,7.3,0.2286,39.6,0.0123481,126.299 +1000,7.3,0.2286,39.6,0.0123481,122.539 +1250,7.3,0.2286,39.6,0.0123481,123.189 +1600,7.3,0.2286,39.6,0.0123481,121.059 +2000,7.3,0.2286,39.6,0.0123481,117.809 +2500,7.3,0.2286,39.6,0.0123481,116.559 +3150,7.3,0.2286,39.6,0.0123481,114.309 +4000,7.3,0.2286,39.6,0.0123481,114.079 +5000,7.3,0.2286,39.6,0.0123481,111.959 +6300,7.3,0.2286,39.6,0.0123481,110.839 +200,7.3,0.2286,31.7,0.0132672,128.679 +250,7.3,0.2286,31.7,0.0132672,130.089 +315,7.3,0.2286,31.7,0.0132672,130.239 +400,7.3,0.2286,31.7,0.0132672,130.269 +500,7.3,0.2286,31.7,0.0132672,128.169 +630,7.3,0.2286,31.7,0.0132672,126.189 +800,7.3,0.2286,31.7,0.0132672,123.209 +1000,7.3,0.2286,31.7,0.0132672,119.099 +1250,7.3,0.2286,31.7,0.0132672,120.509 +1600,7.3,0.2286,31.7,0.0132672,119.039 +2000,7.3,0.2286,31.7,0.0132672,115.309 +2500,7.3,0.2286,31.7,0.0132672,114.709 +3150,7.3,0.2286,31.7,0.0132672,113.229 +4000,7.3,0.2286,31.7,0.0132672,112.639 +5000,7.3,0.2286,31.7,0.0132672,111.029 +6300,7.3,0.2286,31.7,0.0132672,110.689 +800,0,0.1524,71.3,0.0015988,125.817 +1000,0,0.1524,71.3,0.0015988,127.307 +1250,0,0.1524,71.3,0.0015988,128.927 +1600,0,0.1524,71.3,0.0015988,129.667 +2000,0,0.1524,71.3,0.0015988,128.647 +2500,0,0.1524,71.3,0.0015988,128.127 +3150,0,0.1524,71.3,0.0015988,129.377 +4000,0,0.1524,71.3,0.0015988,128.857 +5000,0,0.1524,71.3,0.0015988,126.457 +6300,0,0.1524,71.3,0.0015988,125.427 +8000,0,0.1524,71.3,0.0015988,122.527 +10000,0,0.1524,71.3,0.0015988,120.247 +12500,0,0.1524,71.3,0.0015988,117.087 +16000,0,0.1524,71.3,0.0015988,113.297 +500,0,0.1524,55.5,0.00172668,120.573 +630,0,0.1524,55.5,0.00172668,123.583 +800,0,0.1524,55.5,0.00172668,126.713 +1000,0,0.1524,55.5,0.00172668,128.583 +1250,0,0.1524,55.5,0.00172668,129.953 +1600,0,0.1524,55.5,0.00172668,130.183 +2000,0,0.1524,55.5,0.00172668,129.673 +2500,0,0.1524,55.5,0.00172668,127.763 +3150,0,0.1524,55.5,0.00172668,127.753 +4000,0,0.1524,55.5,0.00172668,127.233 +5000,0,0.1524,55.5,0.00172668,125.203 +6300,0,0.1524,55.5,0.00172668,123.303 +8000,0,0.1524,55.5,0.00172668,121.903 +10000,0,0.1524,55.5,0.00172668,119.253 +12500,0,0.1524,55.5,0.00172668,117.093 +16000,0,0.1524,55.5,0.00172668,112.803 +500,0,0.1524,39.6,0.00193287,119.513 +630,0,0.1524,39.6,0.00193287,124.403 +800,0,0.1524,39.6,0.00193287,127.903 +1000,0,0.1524,39.6,0.00193287,130.033 +1250,0,0.1524,39.6,0.00193287,131.023 +1600,0,0.1524,39.6,0.00193287,131.013 +2000,0,0.1524,39.6,0.00193287,129.633 +2500,0,0.1524,39.6,0.00193287,126.863 +3150,0,0.1524,39.6,0.00193287,125.603 +4000,0,0.1524,39.6,0.00193287,125.343 +5000,0,0.1524,39.6,0.00193287,123.453 +6300,0,0.1524,39.6,0.00193287,121.313 +8000,0,0.1524,39.6,0.00193287,120.553 +10000,0,0.1524,39.6,0.00193287,115.413 +500,0,0.1524,31.7,0.00209405,121.617 +630,0,0.1524,31.7,0.00209405,125.997 +800,0,0.1524,31.7,0.00209405,129.117 +1000,0,0.1524,31.7,0.00209405,130.987 +1250,0,0.1524,31.7,0.00209405,131.467 +1600,0,0.1524,31.7,0.00209405,130.817 +2000,0,0.1524,31.7,0.00209405,128.907 +2500,0,0.1524,31.7,0.00209405,125.867 +3150,0,0.1524,31.7,0.00209405,124.207 +4000,0,0.1524,31.7,0.00209405,123.807 +5000,0,0.1524,31.7,0.00209405,122.397 +6300,0,0.1524,31.7,0.00209405,119.737 +8000,0,0.1524,31.7,0.00209405,117.957 +630,2.7,0.1524,71.3,0.00243851,127.404 +800,2.7,0.1524,71.3,0.00243851,127.394 +1000,2.7,0.1524,71.3,0.00243851,128.774 +1250,2.7,0.1524,71.3,0.00243851,130.144 +1600,2.7,0.1524,71.3,0.00243851,130.644 +2000,2.7,0.1524,71.3,0.00243851,130.114 +2500,2.7,0.1524,71.3,0.00243851,128.334 +3150,2.7,0.1524,71.3,0.00243851,127.054 +4000,2.7,0.1524,71.3,0.00243851,126.534 +5000,2.7,0.1524,71.3,0.00243851,124.364 +6300,2.7,0.1524,71.3,0.00243851,121.944 +8000,2.7,0.1524,71.3,0.00243851,120.534 +10000,2.7,0.1524,71.3,0.00243851,116.724 +12500,2.7,0.1524,71.3,0.00243851,113.034 +16000,2.7,0.1524,71.3,0.00243851,110.364 +500,2.7,0.1524,39.6,0.00294804,121.009 +630,2.7,0.1524,39.6,0.00294804,125.809 +800,2.7,0.1524,39.6,0.00294804,128.829 +1000,2.7,0.1524,39.6,0.00294804,130.589 +1250,2.7,0.1524,39.6,0.00294804,130.829 +1600,2.7,0.1524,39.6,0.00294804,130.049 +2000,2.7,0.1524,39.6,0.00294804,128.139 +2500,2.7,0.1524,39.6,0.00294804,125.589 +3150,2.7,0.1524,39.6,0.00294804,122.919 +4000,2.7,0.1524,39.6,0.00294804,121.889 +5000,2.7,0.1524,39.6,0.00294804,121.499 +6300,2.7,0.1524,39.6,0.00294804,119.209 +8000,2.7,0.1524,39.6,0.00294804,116.659 +10000,2.7,0.1524,39.6,0.00294804,112.589 +12500,2.7,0.1524,39.6,0.00294804,108.649 +400,5.4,0.1524,71.3,0.00401199,124.121 +500,5.4,0.1524,71.3,0.00401199,126.291 +630,5.4,0.1524,71.3,0.00401199,128.971 +800,5.4,0.1524,71.3,0.00401199,131.281 +1000,5.4,0.1524,71.3,0.00401199,133.201 +1250,5.4,0.1524,71.3,0.00401199,134.111 +1600,5.4,0.1524,71.3,0.00401199,133.241 +2000,5.4,0.1524,71.3,0.00401199,131.111 +2500,5.4,0.1524,71.3,0.00401199,127.591 +3150,5.4,0.1524,71.3,0.00401199,123.311 +4000,5.4,0.1524,71.3,0.00401199,121.431 +5000,5.4,0.1524,71.3,0.00401199,120.061 +6300,5.4,0.1524,71.3,0.00401199,116.411 +400,5.4,0.1524,55.5,0.00433288,126.807 +500,5.4,0.1524,55.5,0.00433288,129.367 +630,5.4,0.1524,55.5,0.00433288,131.807 +800,5.4,0.1524,55.5,0.00433288,133.097 +1000,5.4,0.1524,55.5,0.00433288,132.127 +1250,5.4,0.1524,55.5,0.00433288,130.777 +1600,5.4,0.1524,55.5,0.00433288,130.567 +2000,5.4,0.1524,55.5,0.00433288,128.707 +2500,5.4,0.1524,55.5,0.00433288,124.077 +3150,5.4,0.1524,55.5,0.00433288,121.587 +4000,5.4,0.1524,55.5,0.00433288,119.737 +5000,5.4,0.1524,55.5,0.00433288,118.757 +6300,5.4,0.1524,55.5,0.00433288,117.287 +8000,5.4,0.1524,55.5,0.00433288,114.927 +315,5.4,0.1524,39.6,0.00485029,125.347 +400,5.4,0.1524,39.6,0.00485029,127.637 +500,5.4,0.1524,39.6,0.00485029,129.937 +630,5.4,0.1524,39.6,0.00485029,132.357 +800,5.4,0.1524,39.6,0.00485029,132.757 +1000,5.4,0.1524,39.6,0.00485029,130.507 +1250,5.4,0.1524,39.6,0.00485029,127.117 +1600,5.4,0.1524,39.6,0.00485029,126.267 +2000,5.4,0.1524,39.6,0.00485029,124.647 +2500,5.4,0.1524,39.6,0.00485029,120.497 +3150,5.4,0.1524,39.6,0.00485029,119.137 +4000,5.4,0.1524,39.6,0.00485029,117.137 +5000,5.4,0.1524,39.6,0.00485029,117.037 +6300,5.4,0.1524,39.6,0.00485029,116.677 +315,5.4,0.1524,31.7,0.00525474,125.741 +400,5.4,0.1524,31.7,0.00525474,127.781 +500,5.4,0.1524,31.7,0.00525474,129.681 +630,5.4,0.1524,31.7,0.00525474,131.471 +800,5.4,0.1524,31.7,0.00525474,131.491 +1000,5.4,0.1524,31.7,0.00525474,128.241 +1250,5.4,0.1524,31.7,0.00525474,123.991 +1600,5.4,0.1524,31.7,0.00525474,123.761 +2000,5.4,0.1524,31.7,0.00525474,122.771 +2500,5.4,0.1524,31.7,0.00525474,119.151 +3150,5.4,0.1524,31.7,0.00525474,118.291 +4000,5.4,0.1524,31.7,0.00525474,116.181 +5000,5.4,0.1524,31.7,0.00525474,115.691 +6300,5.4,0.1524,31.7,0.00525474,115.591 +315,7.2,0.1524,71.3,0.00752039,128.713 +400,7.2,0.1524,71.3,0.00752039,130.123 +500,7.2,0.1524,71.3,0.00752039,132.043 +630,7.2,0.1524,71.3,0.00752039,134.853 +800,7.2,0.1524,71.3,0.00752039,136.023 +1000,7.2,0.1524,71.3,0.00752039,134.273 +1250,7.2,0.1524,71.3,0.00752039,132.513 +1600,7.2,0.1524,71.3,0.00752039,130.893 +2000,7.2,0.1524,71.3,0.00752039,128.643 +2500,7.2,0.1524,71.3,0.00752039,124.353 +3150,7.2,0.1524,71.3,0.00752039,116.783 +4000,7.2,0.1524,71.3,0.00752039,119.343 +5000,7.2,0.1524,71.3,0.00752039,118.343 +6300,7.2,0.1524,71.3,0.00752039,116.603 +8000,7.2,0.1524,71.3,0.00752039,113.333 +10000,7.2,0.1524,71.3,0.00752039,110.313 +250,7.2,0.1524,39.6,0.00909175,127.488 +315,7.2,0.1524,39.6,0.00909175,130.558 +400,7.2,0.1524,39.6,0.00909175,132.118 +500,7.2,0.1524,39.6,0.00909175,132.658 +630,7.2,0.1524,39.6,0.00909175,133.198 +800,7.2,0.1524,39.6,0.00909175,132.358 +1000,7.2,0.1524,39.6,0.00909175,128.338 +1250,7.2,0.1524,39.6,0.00909175,122.428 +1600,7.2,0.1524,39.6,0.00909175,120.058 +2000,7.2,0.1524,39.6,0.00909175,120.228 +2500,7.2,0.1524,39.6,0.00909175,117.478 +3150,7.2,0.1524,39.6,0.00909175,111.818 +4000,7.2,0.1524,39.6,0.00909175,114.258 +5000,7.2,0.1524,39.6,0.00909175,113.288 +6300,7.2,0.1524,39.6,0.00909175,112.688 +8000,7.2,0.1524,39.6,0.00909175,111.588 +10000,7.2,0.1524,39.6,0.00909175,110.868 +200,9.9,0.1524,71.3,0.0193001,134.319 +250,9.9,0.1524,71.3,0.0193001,135.329 +315,9.9,0.1524,71.3,0.0193001,135.459 +400,9.9,0.1524,71.3,0.0193001,135.079 +500,9.9,0.1524,71.3,0.0193001,131.279 +630,9.9,0.1524,71.3,0.0193001,129.889 +800,9.9,0.1524,71.3,0.0193001,128.879 +1000,9.9,0.1524,71.3,0.0193001,126.349 +1250,9.9,0.1524,71.3,0.0193001,122.679 +1600,9.9,0.1524,71.3,0.0193001,121.789 +2000,9.9,0.1524,71.3,0.0193001,120.779 +2500,9.9,0.1524,71.3,0.0193001,119.639 +3150,9.9,0.1524,71.3,0.0193001,116.849 +4000,9.9,0.1524,71.3,0.0193001,115.079 +5000,9.9,0.1524,71.3,0.0193001,114.569 +6300,9.9,0.1524,71.3,0.0193001,112.039 +200,9.9,0.1524,55.5,0.0208438,131.955 +250,9.9,0.1524,55.5,0.0208438,133.235 +315,9.9,0.1524,55.5,0.0208438,132.355 +400,9.9,0.1524,55.5,0.0208438,131.605 +500,9.9,0.1524,55.5,0.0208438,127.815 +630,9.9,0.1524,55.5,0.0208438,127.315 +800,9.9,0.1524,55.5,0.0208438,126.565 +1000,9.9,0.1524,55.5,0.0208438,124.665 +1250,9.9,0.1524,55.5,0.0208438,121.635 +1600,9.9,0.1524,55.5,0.0208438,119.875 +2000,9.9,0.1524,55.5,0.0208438,119.505 +2500,9.9,0.1524,55.5,0.0208438,118.365 +3150,9.9,0.1524,55.5,0.0208438,115.085 +4000,9.9,0.1524,55.5,0.0208438,112.945 +5000,9.9,0.1524,55.5,0.0208438,112.065 +6300,9.9,0.1524,55.5,0.0208438,110.555 +200,9.9,0.1524,39.6,0.0233328,127.315 +250,9.9,0.1524,39.6,0.0233328,128.335 +315,9.9,0.1524,39.6,0.0233328,128.595 +400,9.9,0.1524,39.6,0.0233328,128.345 +500,9.9,0.1524,39.6,0.0233328,126.835 +630,9.9,0.1524,39.6,0.0233328,126.465 +800,9.9,0.1524,39.6,0.0233328,126.345 +1000,9.9,0.1524,39.6,0.0233328,123.835 +1250,9.9,0.1524,39.6,0.0233328,120.555 +1600,9.9,0.1524,39.6,0.0233328,118.545 +2000,9.9,0.1524,39.6,0.0233328,117.925 +2500,9.9,0.1524,39.6,0.0233328,116.295 +3150,9.9,0.1524,39.6,0.0233328,113.525 +4000,9.9,0.1524,39.6,0.0233328,112.265 +5000,9.9,0.1524,39.6,0.0233328,111.135 +6300,9.9,0.1524,39.6,0.0233328,109.885 +200,9.9,0.1524,31.7,0.0252785,127.299 +250,9.9,0.1524,31.7,0.0252785,128.559 +315,9.9,0.1524,31.7,0.0252785,128.809 +400,9.9,0.1524,31.7,0.0252785,128.939 +500,9.9,0.1524,31.7,0.0252785,127.179 +630,9.9,0.1524,31.7,0.0252785,126.049 +800,9.9,0.1524,31.7,0.0252785,125.539 +1000,9.9,0.1524,31.7,0.0252785,122.149 +1250,9.9,0.1524,31.7,0.0252785,118.619 +1600,9.9,0.1524,31.7,0.0252785,117.119 +2000,9.9,0.1524,31.7,0.0252785,116.859 +2500,9.9,0.1524,31.7,0.0252785,114.729 +3150,9.9,0.1524,31.7,0.0252785,112.209 +4000,9.9,0.1524,31.7,0.0252785,111.459 +5000,9.9,0.1524,31.7,0.0252785,109.949 +6300,9.9,0.1524,31.7,0.0252785,108.689 +200,12.6,0.1524,71.3,0.0483159,128.354 +250,12.6,0.1524,71.3,0.0483159,129.744 +315,12.6,0.1524,71.3,0.0483159,128.484 +400,12.6,0.1524,71.3,0.0483159,127.094 +500,12.6,0.1524,71.3,0.0483159,121.664 +630,12.6,0.1524,71.3,0.0483159,123.304 +800,12.6,0.1524,71.3,0.0483159,123.054 +1000,12.6,0.1524,71.3,0.0483159,122.044 +1250,12.6,0.1524,71.3,0.0483159,120.154 +1600,12.6,0.1524,71.3,0.0483159,120.534 +2000,12.6,0.1524,71.3,0.0483159,117.504 +2500,12.6,0.1524,71.3,0.0483159,115.234 +3150,12.6,0.1524,71.3,0.0483159,113.334 +4000,12.6,0.1524,71.3,0.0483159,108.034 +5000,12.6,0.1524,71.3,0.0483159,108.034 +6300,12.6,0.1524,71.3,0.0483159,107.284 +200,12.6,0.1524,39.6,0.0584113,114.750 +250,12.6,0.1524,39.6,0.0584113,115.890 +315,12.6,0.1524,39.6,0.0584113,116.020 +400,12.6,0.1524,39.6,0.0584113,115.910 +500,12.6,0.1524,39.6,0.0584113,114.900 +630,12.6,0.1524,39.6,0.0584113,116.550 +800,12.6,0.1524,39.6,0.0584113,116.560 +1000,12.6,0.1524,39.6,0.0584113,114.670 +1250,12.6,0.1524,39.6,0.0584113,112.160 +1600,12.6,0.1524,39.6,0.0584113,110.780 +2000,12.6,0.1524,39.6,0.0584113,109.520 +2500,12.6,0.1524,39.6,0.0584113,106.880 +3150,12.6,0.1524,39.6,0.0584113,106.260 +4000,12.6,0.1524,39.6,0.0584113,104.500 +5000,12.6,0.1524,39.6,0.0584113,104.130 +6300,12.6,0.1524,39.6,0.0584113,103.380 +800,0,0.0508,71.3,0.000740478,130.960 +1000,0,0.0508,71.3,0.000740478,129.450 +1250,0,0.0508,71.3,0.000740478,128.560 +1600,0,0.0508,71.3,0.000740478,129.680 +2000,0,0.0508,71.3,0.000740478,131.060 +2500,0,0.0508,71.3,0.000740478,131.310 +3150,0,0.0508,71.3,0.000740478,135.070 +4000,0,0.0508,71.3,0.000740478,134.430 +5000,0,0.0508,71.3,0.000740478,134.430 +6300,0,0.0508,71.3,0.000740478,133.040 +8000,0,0.0508,71.3,0.000740478,130.890 +10000,0,0.0508,71.3,0.000740478,128.740 +12500,0,0.0508,71.3,0.000740478,125.220 +800,0,0.0508,55.5,0.00076193,124.336 +1000,0,0.0508,55.5,0.00076193,125.586 +1250,0,0.0508,55.5,0.00076193,127.076 +1600,0,0.0508,55.5,0.00076193,128.576 +2000,0,0.0508,55.5,0.00076193,131.456 +2500,0,0.0508,55.5,0.00076193,133.956 +3150,0,0.0508,55.5,0.00076193,134.826 +4000,0,0.0508,55.5,0.00076193,134.946 +5000,0,0.0508,55.5,0.00076193,134.556 +6300,0,0.0508,55.5,0.00076193,132.796 +8000,0,0.0508,55.5,0.00076193,130.156 +10000,0,0.0508,55.5,0.00076193,127.636 +12500,0,0.0508,55.5,0.00076193,125.376 +800,0,0.0508,39.6,0.000791822,126.508 +1000,0,0.0508,39.6,0.000791822,127.638 +1250,0,0.0508,39.6,0.000791822,129.148 +1600,0,0.0508,39.6,0.000791822,130.908 +2000,0,0.0508,39.6,0.000791822,132.918 +2500,0,0.0508,39.6,0.000791822,134.938 +3150,0,0.0508,39.6,0.000791822,135.938 +4000,0,0.0508,39.6,0.000791822,135.308 +5000,0,0.0508,39.6,0.000791822,134.308 +6300,0,0.0508,39.6,0.000791822,131.918 +8000,0,0.0508,39.6,0.000791822,128.518 +10000,0,0.0508,39.6,0.000791822,125.998 +12500,0,0.0508,39.6,0.000791822,123.988 +800,0,0.0508,31.7,0.000812164,122.790 +1000,0,0.0508,31.7,0.000812164,126.780 +1250,0,0.0508,31.7,0.000812164,129.270 +1600,0,0.0508,31.7,0.000812164,131.010 +2000,0,0.0508,31.7,0.000812164,133.010 +2500,0,0.0508,31.7,0.000812164,134.870 +3150,0,0.0508,31.7,0.000812164,135.490 +4000,0,0.0508,31.7,0.000812164,134.110 +5000,0,0.0508,31.7,0.000812164,133.230 +6300,0,0.0508,31.7,0.000812164,130.340 +8000,0,0.0508,31.7,0.000812164,126.590 +10000,0,0.0508,31.7,0.000812164,122.450 +12500,0,0.0508,31.7,0.000812164,119.070 +1600,4.2,0.0508,71.3,0.00142788,124.318 +2000,4.2,0.0508,71.3,0.00142788,129.848 +2500,4.2,0.0508,71.3,0.00142788,131.978 +3150,4.2,0.0508,71.3,0.00142788,133.728 +4000,4.2,0.0508,71.3,0.00142788,133.598 +5000,4.2,0.0508,71.3,0.00142788,132.828 +6300,4.2,0.0508,71.3,0.00142788,129.308 +8000,4.2,0.0508,71.3,0.00142788,125.268 +10000,4.2,0.0508,71.3,0.00142788,121.238 +12500,4.2,0.0508,71.3,0.00142788,117.328 +1000,4.2,0.0508,39.6,0.00152689,125.647 +1250,4.2,0.0508,39.6,0.00152689,128.427 +1600,4.2,0.0508,39.6,0.00152689,130.197 +2000,4.2,0.0508,39.6,0.00152689,132.587 +2500,4.2,0.0508,39.6,0.00152689,133.847 +3150,4.2,0.0508,39.6,0.00152689,133.587 +4000,4.2,0.0508,39.6,0.00152689,131.807 +5000,4.2,0.0508,39.6,0.00152689,129.777 +6300,4.2,0.0508,39.6,0.00152689,125.717 +8000,4.2,0.0508,39.6,0.00152689,120.397 +10000,4.2,0.0508,39.6,0.00152689,116.967 +800,8.4,0.0508,71.3,0.00529514,127.556 +1000,8.4,0.0508,71.3,0.00529514,129.946 +1250,8.4,0.0508,71.3,0.00529514,132.086 +1600,8.4,0.0508,71.3,0.00529514,133.846 +2000,8.4,0.0508,71.3,0.00529514,134.476 +2500,8.4,0.0508,71.3,0.00529514,134.226 +3150,8.4,0.0508,71.3,0.00529514,131.966 +4000,8.4,0.0508,71.3,0.00529514,126.926 +5000,8.4,0.0508,71.3,0.00529514,121.146 +400,8.4,0.0508,55.5,0.00544854,121.582 +500,8.4,0.0508,55.5,0.00544854,123.742 +630,8.4,0.0508,55.5,0.00544854,126.152 +800,8.4,0.0508,55.5,0.00544854,128.562 +1000,8.4,0.0508,55.5,0.00544854,130.722 +1250,8.4,0.0508,55.5,0.00544854,132.252 +1600,8.4,0.0508,55.5,0.00544854,133.032 +2000,8.4,0.0508,55.5,0.00544854,133.042 +2500,8.4,0.0508,55.5,0.00544854,131.542 +3150,8.4,0.0508,55.5,0.00544854,128.402 +4000,8.4,0.0508,55.5,0.00544854,122.612 +5000,8.4,0.0508,55.5,0.00544854,115.812 +400,8.4,0.0508,39.6,0.00566229,120.015 +500,8.4,0.0508,39.6,0.00566229,122.905 +630,8.4,0.0508,39.6,0.00566229,126.045 +800,8.4,0.0508,39.6,0.00566229,128.435 +1000,8.4,0.0508,39.6,0.00566229,130.195 +1250,8.4,0.0508,39.6,0.00566229,131.205 +1600,8.4,0.0508,39.6,0.00566229,130.965 +2000,8.4,0.0508,39.6,0.00566229,129.965 +2500,8.4,0.0508,39.6,0.00566229,127.465 +3150,8.4,0.0508,39.6,0.00566229,123.965 +4000,8.4,0.0508,39.6,0.00566229,118.955 +400,8.4,0.0508,31.7,0.00580776,120.076 +500,8.4,0.0508,31.7,0.00580776,122.966 +630,8.4,0.0508,31.7,0.00580776,125.856 +800,8.4,0.0508,31.7,0.00580776,128.246 +1000,8.4,0.0508,31.7,0.00580776,129.516 +1250,8.4,0.0508,31.7,0.00580776,130.156 +1600,8.4,0.0508,31.7,0.00580776,129.296 +2000,8.4,0.0508,31.7,0.00580776,127.686 +2500,8.4,0.0508,31.7,0.00580776,125.576 +3150,8.4,0.0508,31.7,0.00580776,122.086 +4000,8.4,0.0508,31.7,0.00580776,118.106 +200,11.2,0.0508,71.3,0.014072,125.941 +250,11.2,0.0508,71.3,0.014072,127.101 +315,11.2,0.0508,71.3,0.014072,128.381 +400,11.2,0.0508,71.3,0.014072,129.281 +500,11.2,0.0508,71.3,0.014072,130.311 +630,11.2,0.0508,71.3,0.014072,133.611 +800,11.2,0.0508,71.3,0.014072,136.031 +1000,11.2,0.0508,71.3,0.014072,136.941 +1250,11.2,0.0508,71.3,0.014072,136.191 +1600,11.2,0.0508,71.3,0.014072,135.191 +2000,11.2,0.0508,71.3,0.014072,133.311 +2500,11.2,0.0508,71.3,0.014072,130.541 +3150,11.2,0.0508,71.3,0.014072,127.141 +4000,11.2,0.0508,71.3,0.014072,122.471 +200,11.2,0.0508,39.6,0.0150478,125.010 +250,11.2,0.0508,39.6,0.0150478,126.430 +315,11.2,0.0508,39.6,0.0150478,128.990 +400,11.2,0.0508,39.6,0.0150478,130.670 +500,11.2,0.0508,39.6,0.0150478,131.960 +630,11.2,0.0508,39.6,0.0150478,133.130 +800,11.2,0.0508,39.6,0.0150478,133.790 +1000,11.2,0.0508,39.6,0.0150478,132.430 +1250,11.2,0.0508,39.6,0.0150478,130.050 +1600,11.2,0.0508,39.6,0.0150478,126.540 +2000,11.2,0.0508,39.6,0.0150478,124.420 +2500,11.2,0.0508,39.6,0.0150478,122.170 +3150,11.2,0.0508,39.6,0.0150478,119.670 +4000,11.2,0.0508,39.6,0.0150478,115.520 +200,15.4,0.0508,71.3,0.0264269,123.595 +250,15.4,0.0508,71.3,0.0264269,124.835 +315,15.4,0.0508,71.3,0.0264269,126.195 +400,15.4,0.0508,71.3,0.0264269,126.805 +500,15.4,0.0508,71.3,0.0264269,127.285 +630,15.4,0.0508,71.3,0.0264269,129.645 +800,15.4,0.0508,71.3,0.0264269,131.515 +1000,15.4,0.0508,71.3,0.0264269,131.865 +1250,15.4,0.0508,71.3,0.0264269,130.845 +1600,15.4,0.0508,71.3,0.0264269,130.065 +2000,15.4,0.0508,71.3,0.0264269,129.285 +2500,15.4,0.0508,71.3,0.0264269,127.625 +3150,15.4,0.0508,71.3,0.0264269,125.715 +4000,15.4,0.0508,71.3,0.0264269,122.675 +5000,15.4,0.0508,71.3,0.0264269,119.135 +6300,15.4,0.0508,71.3,0.0264269,115.215 +8000,15.4,0.0508,71.3,0.0264269,112.675 +200,15.4,0.0508,55.5,0.0271925,122.940 +250,15.4,0.0508,55.5,0.0271925,124.170 +315,15.4,0.0508,55.5,0.0271925,125.390 +400,15.4,0.0508,55.5,0.0271925,126.500 +500,15.4,0.0508,55.5,0.0271925,127.220 +630,15.4,0.0508,55.5,0.0271925,129.330 +800,15.4,0.0508,55.5,0.0271925,130.430 +1000,15.4,0.0508,55.5,0.0271925,130.400 +1250,15.4,0.0508,55.5,0.0271925,130.000 +1600,15.4,0.0508,55.5,0.0271925,128.200 +2000,15.4,0.0508,55.5,0.0271925,127.040 +2500,15.4,0.0508,55.5,0.0271925,125.630 +3150,15.4,0.0508,55.5,0.0271925,123.460 +4000,15.4,0.0508,55.5,0.0271925,120.920 +5000,15.4,0.0508,55.5,0.0271925,117.110 +6300,15.4,0.0508,55.5,0.0271925,112.930 +200,15.4,0.0508,39.6,0.0282593,121.783 +250,15.4,0.0508,39.6,0.0282593,122.893 +315,15.4,0.0508,39.6,0.0282593,124.493 +400,15.4,0.0508,39.6,0.0282593,125.353 +500,15.4,0.0508,39.6,0.0282593,125.963 +630,15.4,0.0508,39.6,0.0282593,127.443 +800,15.4,0.0508,39.6,0.0282593,128.423 +1000,15.4,0.0508,39.6,0.0282593,127.893 +1250,15.4,0.0508,39.6,0.0282593,126.743 +1600,15.4,0.0508,39.6,0.0282593,124.843 +2000,15.4,0.0508,39.6,0.0282593,123.443 +2500,15.4,0.0508,39.6,0.0282593,122.413 +3150,15.4,0.0508,39.6,0.0282593,120.513 +4000,15.4,0.0508,39.6,0.0282593,118.113 +5000,15.4,0.0508,39.6,0.0282593,114.453 +6300,15.4,0.0508,39.6,0.0282593,109.663 +200,15.4,0.0508,31.7,0.0289853,119.975 +250,15.4,0.0508,31.7,0.0289853,121.225 +315,15.4,0.0508,31.7,0.0289853,122.845 +400,15.4,0.0508,31.7,0.0289853,123.705 +500,15.4,0.0508,31.7,0.0289853,123.695 +630,15.4,0.0508,31.7,0.0289853,124.685 +800,15.4,0.0508,31.7,0.0289853,125.555 +1000,15.4,0.0508,31.7,0.0289853,124.525 +1250,15.4,0.0508,31.7,0.0289853,123.255 +1600,15.4,0.0508,31.7,0.0289853,121.485 +2000,15.4,0.0508,31.7,0.0289853,120.835 +2500,15.4,0.0508,31.7,0.0289853,119.945 +3150,15.4,0.0508,31.7,0.0289853,118.045 +4000,15.4,0.0508,31.7,0.0289853,115.635 +5000,15.4,0.0508,31.7,0.0289853,112.355 +6300,15.4,0.0508,31.7,0.0289853,108.185 +200,19.7,0.0508,71.3,0.0341183,118.005 +250,19.7,0.0508,71.3,0.0341183,119.115 +315,19.7,0.0508,71.3,0.0341183,121.235 +400,19.7,0.0508,71.3,0.0341183,123.865 +500,19.7,0.0508,71.3,0.0341183,126.995 +630,19.7,0.0508,71.3,0.0341183,128.365 +800,19.7,0.0508,71.3,0.0341183,124.555 +1000,19.7,0.0508,71.3,0.0341183,121.885 +1250,19.7,0.0508,71.3,0.0341183,121.485 +1600,19.7,0.0508,71.3,0.0341183,120.575 +2000,19.7,0.0508,71.3,0.0341183,120.055 +2500,19.7,0.0508,71.3,0.0341183,118.385 +3150,19.7,0.0508,71.3,0.0341183,116.225 +4000,19.7,0.0508,71.3,0.0341183,113.045 +200,19.7,0.0508,39.6,0.036484,125.974 +250,19.7,0.0508,39.6,0.036484,127.224 +315,19.7,0.0508,39.6,0.036484,129.864 +400,19.7,0.0508,39.6,0.036484,130.614 +500,19.7,0.0508,39.6,0.036484,128.444 +630,19.7,0.0508,39.6,0.036484,120.324 +800,19.7,0.0508,39.6,0.036484,119.174 +1000,19.7,0.0508,39.6,0.036484,118.904 +1250,19.7,0.0508,39.6,0.036484,118.634 +1600,19.7,0.0508,39.6,0.036484,117.604 +2000,19.7,0.0508,39.6,0.036484,117.724 +2500,19.7,0.0508,39.6,0.036484,116.184 +3150,19.7,0.0508,39.6,0.036484,113.004 +4000,19.7,0.0508,39.6,0.036484,108.684 +2500,0,0.0254,71.3,0.000400682,133.707 +3150,0,0.0254,71.3,0.000400682,137.007 +4000,0,0.0254,71.3,0.000400682,138.557 +5000,0,0.0254,71.3,0.000400682,136.837 +6300,0,0.0254,71.3,0.000400682,134.987 +8000,0,0.0254,71.3,0.000400682,129.867 +10000,0,0.0254,71.3,0.000400682,130.787 +12500,0,0.0254,71.3,0.000400682,133.207 +16000,0,0.0254,71.3,0.000400682,130.477 +20000,0,0.0254,71.3,0.000400682,123.217 +2000,0,0.0254,55.5,0.00041229,127.623 +2500,0,0.0254,55.5,0.00041229,130.073 +3150,0,0.0254,55.5,0.00041229,130.503 +4000,0,0.0254,55.5,0.00041229,133.223 +5000,0,0.0254,55.5,0.00041229,135.803 +6300,0,0.0254,55.5,0.00041229,136.103 +8000,0,0.0254,55.5,0.00041229,136.163 +10000,0,0.0254,55.5,0.00041229,134.563 +12500,0,0.0254,55.5,0.00041229,131.453 +16000,0,0.0254,55.5,0.00041229,125.683 +20000,0,0.0254,55.5,0.00041229,121.933 +1600,0,0.0254,39.6,0.000428464,124.156 +2000,0,0.0254,39.6,0.000428464,130.026 +2500,0,0.0254,39.6,0.000428464,131.836 +3150,0,0.0254,39.6,0.000428464,133.276 +4000,0,0.0254,39.6,0.000428464,135.346 +5000,0,0.0254,39.6,0.000428464,136.536 +6300,0,0.0254,39.6,0.000428464,136.826 +8000,0,0.0254,39.6,0.000428464,135.866 +10000,0,0.0254,39.6,0.000428464,133.376 +12500,0,0.0254,39.6,0.000428464,129.116 +16000,0,0.0254,39.6,0.000428464,124.986 +1000,0,0.0254,31.7,0.000439472,125.127 +1250,0,0.0254,31.7,0.000439472,127.947 +1600,0,0.0254,31.7,0.000439472,129.267 +2000,0,0.0254,31.7,0.000439472,130.697 +2500,0,0.0254,31.7,0.000439472,132.897 +3150,0,0.0254,31.7,0.000439472,135.227 +4000,0,0.0254,31.7,0.000439472,137.047 +5000,0,0.0254,31.7,0.000439472,138.607 +6300,0,0.0254,31.7,0.000439472,138.537 +8000,0,0.0254,31.7,0.000439472,137.207 +10000,0,0.0254,31.7,0.000439472,134.227 +12500,0,0.0254,31.7,0.000439472,128.977 +16000,0,0.0254,31.7,0.000439472,125.627 +2000,4.8,0.0254,71.3,0.000848633,128.398 +2500,4.8,0.0254,71.3,0.000848633,130.828 +3150,4.8,0.0254,71.3,0.000848633,133.378 +4000,4.8,0.0254,71.3,0.000848633,134.928 +5000,4.8,0.0254,71.3,0.000848633,135.468 +6300,4.8,0.0254,71.3,0.000848633,134.498 +8000,4.8,0.0254,71.3,0.000848633,131.518 +10000,4.8,0.0254,71.3,0.000848633,127.398 +12500,4.8,0.0254,71.3,0.000848633,127.688 +16000,4.8,0.0254,71.3,0.000848633,124.208 +20000,4.8,0.0254,71.3,0.000848633,119.708 +1600,4.8,0.0254,55.5,0.000873218,121.474 +2000,4.8,0.0254,55.5,0.000873218,125.054 +2500,4.8,0.0254,55.5,0.000873218,129.144 +3150,4.8,0.0254,55.5,0.000873218,132.354 +4000,4.8,0.0254,55.5,0.000873218,133.924 +5000,4.8,0.0254,55.5,0.000873218,135.484 +6300,4.8,0.0254,55.5,0.000873218,135.164 +8000,4.8,0.0254,55.5,0.000873218,132.184 +10000,4.8,0.0254,55.5,0.000873218,126.944 +12500,4.8,0.0254,55.5,0.000873218,125.094 +16000,4.8,0.0254,55.5,0.000873218,124.394 +20000,4.8,0.0254,55.5,0.000873218,121.284 +500,4.8,0.0254,39.6,0.000907475,116.366 +630,4.8,0.0254,39.6,0.000907475,118.696 +800,4.8,0.0254,39.6,0.000907475,120.766 +1000,4.8,0.0254,39.6,0.000907475,122.956 +1250,4.8,0.0254,39.6,0.000907475,125.026 +1600,4.8,0.0254,39.6,0.000907475,125.966 +2000,4.8,0.0254,39.6,0.000907475,128.916 +2500,4.8,0.0254,39.6,0.000907475,131.236 +3150,4.8,0.0254,39.6,0.000907475,133.436 +4000,4.8,0.0254,39.6,0.000907475,134.996 +5000,4.8,0.0254,39.6,0.000907475,135.426 +6300,4.8,0.0254,39.6,0.000907475,134.336 +8000,4.8,0.0254,39.6,0.000907475,131.346 +10000,4.8,0.0254,39.6,0.000907475,126.066 +500,4.8,0.0254,31.7,0.000930789,116.128 +630,4.8,0.0254,31.7,0.000930789,120.078 +800,4.8,0.0254,31.7,0.000930789,122.648 +1000,4.8,0.0254,31.7,0.000930789,125.348 +1250,4.8,0.0254,31.7,0.000930789,127.408 +1600,4.8,0.0254,31.7,0.000930789,128.718 +2000,4.8,0.0254,31.7,0.000930789,130.148 +2500,4.8,0.0254,31.7,0.000930789,132.588 +3150,4.8,0.0254,31.7,0.000930789,134.268 +4000,4.8,0.0254,31.7,0.000930789,135.328 +5000,4.8,0.0254,31.7,0.000930789,135.248 +6300,4.8,0.0254,31.7,0.000930789,132.898 +8000,4.8,0.0254,31.7,0.000930789,127.008 +630,9.5,0.0254,71.3,0.00420654,125.726 +800,9.5,0.0254,71.3,0.00420654,127.206 +1000,9.5,0.0254,71.3,0.00420654,129.556 +1250,9.5,0.0254,71.3,0.00420654,131.656 +1600,9.5,0.0254,71.3,0.00420654,133.756 +2000,9.5,0.0254,71.3,0.00420654,134.976 +2500,9.5,0.0254,71.3,0.00420654,135.956 +3150,9.5,0.0254,71.3,0.00420654,136.166 +4000,9.5,0.0254,71.3,0.00420654,134.236 +5000,9.5,0.0254,71.3,0.00420654,131.186 +6300,9.5,0.0254,71.3,0.00420654,127.246 +400,9.5,0.0254,55.5,0.0043284,120.952 +500,9.5,0.0254,55.5,0.0043284,123.082 +630,9.5,0.0254,55.5,0.0043284,125.452 +800,9.5,0.0254,55.5,0.0043284,128.082 +1000,9.5,0.0254,55.5,0.0043284,130.332 +1250,9.5,0.0254,55.5,0.0043284,132.202 +1600,9.5,0.0254,55.5,0.0043284,133.062 +2000,9.5,0.0254,55.5,0.0043284,134.052 +2500,9.5,0.0254,55.5,0.0043284,134.152 +3150,9.5,0.0254,55.5,0.0043284,133.252 +4000,9.5,0.0254,55.5,0.0043284,131.582 +5000,9.5,0.0254,55.5,0.0043284,128.412 +6300,9.5,0.0254,55.5,0.0043284,124.222 +200,9.5,0.0254,39.6,0.00449821,116.074 +250,9.5,0.0254,39.6,0.00449821,116.924 +315,9.5,0.0254,39.6,0.00449821,119.294 +400,9.5,0.0254,39.6,0.00449821,121.154 +500,9.5,0.0254,39.6,0.00449821,123.894 +630,9.5,0.0254,39.6,0.00449821,126.514 +800,9.5,0.0254,39.6,0.00449821,129.014 +1000,9.5,0.0254,39.6,0.00449821,130.374 +1250,9.5,0.0254,39.6,0.00449821,130.964 +1600,9.5,0.0254,39.6,0.00449821,131.184 +2000,9.5,0.0254,39.6,0.00449821,131.274 +2500,9.5,0.0254,39.6,0.00449821,131.234 +3150,9.5,0.0254,39.6,0.00449821,129.934 +4000,9.5,0.0254,39.6,0.00449821,127.864 +5000,9.5,0.0254,39.6,0.00449821,125.044 +6300,9.5,0.0254,39.6,0.00449821,120.324 +200,9.5,0.0254,31.7,0.00461377,119.146 +250,9.5,0.0254,31.7,0.00461377,120.136 +315,9.5,0.0254,31.7,0.00461377,122.766 +400,9.5,0.0254,31.7,0.00461377,124.756 +500,9.5,0.0254,31.7,0.00461377,126.886 +630,9.5,0.0254,31.7,0.00461377,129.006 +800,9.5,0.0254,31.7,0.00461377,130.746 +1000,9.5,0.0254,31.7,0.00461377,131.346 +1250,9.5,0.0254,31.7,0.00461377,131.446 +1600,9.5,0.0254,31.7,0.00461377,131.036 +2000,9.5,0.0254,31.7,0.00461377,130.496 +2500,9.5,0.0254,31.7,0.00461377,130.086 +3150,9.5,0.0254,31.7,0.00461377,128.536 +4000,9.5,0.0254,31.7,0.00461377,126.736 +5000,9.5,0.0254,31.7,0.00461377,124.426 +6300,9.5,0.0254,31.7,0.00461377,120.726 +250,12.7,0.0254,71.3,0.0121808,119.698 +315,12.7,0.0254,71.3,0.0121808,122.938 +400,12.7,0.0254,71.3,0.0121808,125.048 +500,12.7,0.0254,71.3,0.0121808,126.898 +630,12.7,0.0254,71.3,0.0121808,128.878 +800,12.7,0.0254,71.3,0.0121808,130.348 +1000,12.7,0.0254,71.3,0.0121808,131.698 +1250,12.7,0.0254,71.3,0.0121808,133.048 +1600,12.7,0.0254,71.3,0.0121808,134.528 +2000,12.7,0.0254,71.3,0.0121808,134.228 +2500,12.7,0.0254,71.3,0.0121808,134.058 +3150,12.7,0.0254,71.3,0.0121808,133.758 +4000,12.7,0.0254,71.3,0.0121808,131.808 +5000,12.7,0.0254,71.3,0.0121808,128.978 +6300,12.7,0.0254,71.3,0.0121808,125.398 +8000,12.7,0.0254,71.3,0.0121808,120.538 +10000,12.7,0.0254,71.3,0.0121808,114.418 +250,12.7,0.0254,39.6,0.0130253,121.547 +315,12.7,0.0254,39.6,0.0130253,123.537 +400,12.7,0.0254,39.6,0.0130253,125.527 +500,12.7,0.0254,39.6,0.0130253,127.127 +630,12.7,0.0254,39.6,0.0130253,128.867 +800,12.7,0.0254,39.6,0.0130253,130.217 +1000,12.7,0.0254,39.6,0.0130253,130.947 +1250,12.7,0.0254,39.6,0.0130253,130.777 +1600,12.7,0.0254,39.6,0.0130253,129.977 +2000,12.7,0.0254,39.6,0.0130253,129.567 +2500,12.7,0.0254,39.6,0.0130253,129.027 +3150,12.7,0.0254,39.6,0.0130253,127.847 +4000,12.7,0.0254,39.6,0.0130253,126.537 +5000,12.7,0.0254,39.6,0.0130253,125.107 +6300,12.7,0.0254,39.6,0.0130253,123.177 +8000,12.7,0.0254,39.6,0.0130253,120.607 +10000,12.7,0.0254,39.6,0.0130253,116.017 +200,17.4,0.0254,71.3,0.016104,112.506 +250,17.4,0.0254,71.3,0.016104,113.796 +315,17.4,0.0254,71.3,0.016104,115.846 +400,17.4,0.0254,71.3,0.016104,117.396 +500,17.4,0.0254,71.3,0.016104,119.806 +630,17.4,0.0254,71.3,0.016104,122.606 +800,17.4,0.0254,71.3,0.016104,124.276 +1000,17.4,0.0254,71.3,0.016104,125.816 +1250,17.4,0.0254,71.3,0.016104,126.356 +1600,17.4,0.0254,71.3,0.016104,126.406 +2000,17.4,0.0254,71.3,0.016104,126.826 +2500,17.4,0.0254,71.3,0.016104,126.746 +3150,17.4,0.0254,71.3,0.016104,126.536 +4000,17.4,0.0254,71.3,0.016104,125.586 +5000,17.4,0.0254,71.3,0.016104,123.126 +6300,17.4,0.0254,71.3,0.016104,119.916 +8000,17.4,0.0254,71.3,0.016104,115.466 +200,17.4,0.0254,55.5,0.0165706,109.951 +250,17.4,0.0254,55.5,0.0165706,110.491 +315,17.4,0.0254,55.5,0.0165706,111.911 +400,17.4,0.0254,55.5,0.0165706,115.461 +500,17.4,0.0254,55.5,0.0165706,119.621 +630,17.4,0.0254,55.5,0.0165706,122.411 +800,17.4,0.0254,55.5,0.0165706,123.091 +1000,17.4,0.0254,55.5,0.0165706,126.001 +1250,17.4,0.0254,55.5,0.0165706,129.301 +1600,17.4,0.0254,55.5,0.0165706,126.471 +2000,17.4,0.0254,55.5,0.0165706,125.261 +2500,17.4,0.0254,55.5,0.0165706,124.931 +3150,17.4,0.0254,55.5,0.0165706,124.101 +4000,17.4,0.0254,55.5,0.0165706,121.771 +5000,17.4,0.0254,55.5,0.0165706,118.941 +6300,17.4,0.0254,55.5,0.0165706,114.861 +200,17.4,0.0254,39.6,0.0172206,114.044 +250,17.4,0.0254,39.6,0.0172206,114.714 +315,17.4,0.0254,39.6,0.0172206,115.144 +400,17.4,0.0254,39.6,0.0172206,115.444 +500,17.4,0.0254,39.6,0.0172206,117.514 +630,17.4,0.0254,39.6,0.0172206,124.514 +800,17.4,0.0254,39.6,0.0172206,135.324 +1000,17.4,0.0254,39.6,0.0172206,138.274 +1250,17.4,0.0254,39.6,0.0172206,131.364 +1600,17.4,0.0254,39.6,0.0172206,127.614 +2000,17.4,0.0254,39.6,0.0172206,126.644 +2500,17.4,0.0254,39.6,0.0172206,124.154 +3150,17.4,0.0254,39.6,0.0172206,123.564 +4000,17.4,0.0254,39.6,0.0172206,122.724 +5000,17.4,0.0254,39.6,0.0172206,119.854 +200,17.4,0.0254,31.7,0.0176631,116.146 +250,17.4,0.0254,31.7,0.0176631,116.956 +315,17.4,0.0254,31.7,0.0176631,118.416 +400,17.4,0.0254,31.7,0.0176631,120.766 +500,17.4,0.0254,31.7,0.0176631,127.676 +630,17.4,0.0254,31.7,0.0176631,136.886 +800,17.4,0.0254,31.7,0.0176631,139.226 +1000,17.4,0.0254,31.7,0.0176631,131.796 +1250,17.4,0.0254,31.7,0.0176631,128.306 +1600,17.4,0.0254,31.7,0.0176631,126.846 +2000,17.4,0.0254,31.7,0.0176631,124.356 +2500,17.4,0.0254,31.7,0.0176631,124.166 +3150,17.4,0.0254,31.7,0.0176631,123.466 +4000,17.4,0.0254,31.7,0.0176631,121.996 +5000,17.4,0.0254,31.7,0.0176631,117.996 +315,22.2,0.0254,71.3,0.0214178,115.857 +400,22.2,0.0254,71.3,0.0214178,117.927 +500,22.2,0.0254,71.3,0.0214178,117.967 +630,22.2,0.0254,71.3,0.0214178,120.657 +800,22.2,0.0254,71.3,0.0214178,123.227 +1000,22.2,0.0254,71.3,0.0214178,134.247 +1250,22.2,0.0254,71.3,0.0214178,140.987 +1600,22.2,0.0254,71.3,0.0214178,131.817 +2000,22.2,0.0254,71.3,0.0214178,127.197 +2500,22.2,0.0254,71.3,0.0214178,126.097 +3150,22.2,0.0254,71.3,0.0214178,124.127 +4000,22.2,0.0254,71.3,0.0214178,123.917 +5000,22.2,0.0254,71.3,0.0214178,125.727 +6300,22.2,0.0254,71.3,0.0214178,123.127 +8000,22.2,0.0254,71.3,0.0214178,121.657 +200,22.2,0.0254,39.6,0.0229028,116.066 +250,22.2,0.0254,39.6,0.0229028,117.386 +315,22.2,0.0254,39.6,0.0229028,120.716 +400,22.2,0.0254,39.6,0.0229028,123.416 +500,22.2,0.0254,39.6,0.0229028,129.776 +630,22.2,0.0254,39.6,0.0229028,137.026 +800,22.2,0.0254,39.6,0.0229028,137.076 +1000,22.2,0.0254,39.6,0.0229028,128.416 +1250,22.2,0.0254,39.6,0.0229028,126.446 +1600,22.2,0.0254,39.6,0.0229028,122.216 +2000,22.2,0.0254,39.6,0.0229028,121.256 +2500,22.2,0.0254,39.6,0.0229028,121.306 +3150,22.2,0.0254,39.6,0.0229028,120.856 +4000,22.2,0.0254,39.6,0.0229028,119.646 +5000,22.2,0.0254,39.6,0.0229028,118.816 +630,0,0.1016,71.3,0.00121072,124.155 +800,0,0.1016,71.3,0.00121072,126.805 +1000,0,0.1016,71.3,0.00121072,128.825 +1250,0,0.1016,71.3,0.00121072,130.335 +1600,0,0.1016,71.3,0.00121072,131.725 +2000,0,0.1016,71.3,0.00121072,132.095 +2500,0,0.1016,71.3,0.00121072,132.595 +3150,0,0.1016,71.3,0.00121072,131.955 +4000,0,0.1016,71.3,0.00121072,130.935 +5000,0,0.1016,71.3,0.00121072,130.795 +6300,0,0.1016,71.3,0.00121072,129.395 +8000,0,0.1016,71.3,0.00121072,125.465 +10000,0,0.1016,71.3,0.00121072,123.305 +12500,0,0.1016,71.3,0.00121072,119.375 +630,0,0.1016,55.5,0.00131983,126.170 +800,0,0.1016,55.5,0.00131983,127.920 +1000,0,0.1016,55.5,0.00131983,129.800 +1250,0,0.1016,55.5,0.00131983,131.430 +1600,0,0.1016,55.5,0.00131983,132.050 +2000,0,0.1016,55.5,0.00131983,132.540 +2500,0,0.1016,55.5,0.00131983,133.040 +3150,0,0.1016,55.5,0.00131983,131.780 +4000,0,0.1016,55.5,0.00131983,129.500 +5000,0,0.1016,55.5,0.00131983,128.360 +6300,0,0.1016,55.5,0.00131983,127.730 +8000,0,0.1016,55.5,0.00131983,124.450 +10000,0,0.1016,55.5,0.00131983,121.930 +12500,0,0.1016,55.5,0.00131983,119.910 +630,0,0.1016,39.6,0.00146332,125.401 +800,0,0.1016,39.6,0.00146332,128.401 +1000,0,0.1016,39.6,0.00146332,130.781 +1250,0,0.1016,39.6,0.00146332,132.271 +1600,0,0.1016,39.6,0.00146332,133.261 +2000,0,0.1016,39.6,0.00146332,133.251 +2500,0,0.1016,39.6,0.00146332,132.611 +3150,0,0.1016,39.6,0.00146332,130.961 +4000,0,0.1016,39.6,0.00146332,127.801 +5000,0,0.1016,39.6,0.00146332,126.021 +6300,0,0.1016,39.6,0.00146332,125.631 +8000,0,0.1016,39.6,0.00146332,122.341 +10000,0,0.1016,39.6,0.00146332,119.561 +630,0,0.1016,31.7,0.00150092,126.413 +800,0,0.1016,31.7,0.00150092,129.053 +1000,0,0.1016,31.7,0.00150092,131.313 +1250,0,0.1016,31.7,0.00150092,133.063 +1600,0,0.1016,31.7,0.00150092,133.553 +2000,0,0.1016,31.7,0.00150092,133.153 +2500,0,0.1016,31.7,0.00150092,132.003 +3150,0,0.1016,31.7,0.00150092,129.973 +4000,0,0.1016,31.7,0.00150092,126.933 +5000,0,0.1016,31.7,0.00150092,124.393 +6300,0,0.1016,31.7,0.00150092,124.253 +8000,0,0.1016,31.7,0.00150092,120.193 +10000,0,0.1016,31.7,0.00150092,115.893 +800,3.3,0.1016,71.3,0.00202822,131.074 +1000,3.3,0.1016,71.3,0.00202822,131.434 +1250,3.3,0.1016,71.3,0.00202822,132.304 +1600,3.3,0.1016,71.3,0.00202822,133.664 +2000,3.3,0.1016,71.3,0.00202822,134.034 +2500,3.3,0.1016,71.3,0.00202822,133.894 +3150,3.3,0.1016,71.3,0.00202822,132.114 +4000,3.3,0.1016,71.3,0.00202822,128.704 +5000,3.3,0.1016,71.3,0.00202822,127.054 +6300,3.3,0.1016,71.3,0.00202822,124.904 +8000,3.3,0.1016,71.3,0.00202822,121.234 +10000,3.3,0.1016,71.3,0.00202822,116.694 +630,3.3,0.1016,55.5,0.002211,126.599 +800,3.3,0.1016,55.5,0.002211,129.119 +1000,3.3,0.1016,55.5,0.002211,131.129 +1250,3.3,0.1016,55.5,0.002211,132.769 +1600,3.3,0.1016,55.5,0.002211,133.649 +2000,3.3,0.1016,55.5,0.002211,133.649 +2500,3.3,0.1016,55.5,0.002211,132.889 +3150,3.3,0.1016,55.5,0.002211,130.629 +4000,3.3,0.1016,55.5,0.002211,127.229 +5000,3.3,0.1016,55.5,0.002211,124.839 +6300,3.3,0.1016,55.5,0.002211,123.839 +8000,3.3,0.1016,55.5,0.002211,120.569 +10000,3.3,0.1016,55.5,0.002211,115.659 +630,3.3,0.1016,39.6,0.00245138,127.251 +800,3.3,0.1016,39.6,0.00245138,129.991 +1000,3.3,0.1016,39.6,0.00245138,131.971 +1250,3.3,0.1016,39.6,0.00245138,133.211 +1600,3.3,0.1016,39.6,0.00245138,133.071 +2000,3.3,0.1016,39.6,0.00245138,132.301 +2500,3.3,0.1016,39.6,0.00245138,130.791 +3150,3.3,0.1016,39.6,0.00245138,128.401 +4000,3.3,0.1016,39.6,0.00245138,124.881 +5000,3.3,0.1016,39.6,0.00245138,122.371 +6300,3.3,0.1016,39.6,0.00245138,120.851 +8000,3.3,0.1016,39.6,0.00245138,118.091 +10000,3.3,0.1016,39.6,0.00245138,115.321 +630,3.3,0.1016,31.7,0.00251435,128.952 +800,3.3,0.1016,31.7,0.00251435,131.362 +1000,3.3,0.1016,31.7,0.00251435,133.012 +1250,3.3,0.1016,31.7,0.00251435,134.022 +1600,3.3,0.1016,31.7,0.00251435,133.402 +2000,3.3,0.1016,31.7,0.00251435,131.642 +2500,3.3,0.1016,31.7,0.00251435,130.392 +3150,3.3,0.1016,31.7,0.00251435,128.252 +4000,3.3,0.1016,31.7,0.00251435,124.852 +5000,3.3,0.1016,31.7,0.00251435,122.082 +6300,3.3,0.1016,31.7,0.00251435,120.702 +8000,3.3,0.1016,31.7,0.00251435,117.432 +630,6.7,0.1016,71.3,0.00478288,131.448 +800,6.7,0.1016,71.3,0.00478288,134.478 +1000,6.7,0.1016,71.3,0.00478288,136.758 +1250,6.7,0.1016,71.3,0.00478288,137.658 +1600,6.7,0.1016,71.3,0.00478288,136.678 +2000,6.7,0.1016,71.3,0.00478288,134.568 +2500,6.7,0.1016,71.3,0.00478288,131.458 +3150,6.7,0.1016,71.3,0.00478288,124.458 +500,6.7,0.1016,55.5,0.0052139,129.343 +630,6.7,0.1016,55.5,0.0052139,133.023 +800,6.7,0.1016,55.5,0.0052139,135.953 +1000,6.7,0.1016,55.5,0.0052139,137.233 +1250,6.7,0.1016,55.5,0.0052139,136.883 +1600,6.7,0.1016,55.5,0.0052139,133.653 +2000,6.7,0.1016,55.5,0.0052139,129.653 +2500,6.7,0.1016,55.5,0.0052139,124.273 +400,6.7,0.1016,39.6,0.00578076,128.295 +500,6.7,0.1016,39.6,0.00578076,130.955 +630,6.7,0.1016,39.6,0.00578076,133.355 +800,6.7,0.1016,39.6,0.00578076,134.625 +1000,6.7,0.1016,39.6,0.00578076,134.515 +1250,6.7,0.1016,39.6,0.00578076,132.395 +1600,6.7,0.1016,39.6,0.00578076,127.375 +2000,6.7,0.1016,39.6,0.00578076,122.235 +315,6.7,0.1016,31.7,0.00592927,126.266 +400,6.7,0.1016,31.7,0.00592927,128.296 +500,6.7,0.1016,31.7,0.00592927,130.206 +630,6.7,0.1016,31.7,0.00592927,132.116 +800,6.7,0.1016,31.7,0.00592927,132.886 +1000,6.7,0.1016,31.7,0.00592927,131.636 +1250,6.7,0.1016,31.7,0.00592927,129.256 +1600,6.7,0.1016,31.7,0.00592927,124.346 +2000,6.7,0.1016,31.7,0.00592927,120.446 +200,8.9,0.1016,71.3,0.0103088,133.503 +250,8.9,0.1016,71.3,0.0103088,134.533 +315,8.9,0.1016,71.3,0.0103088,136.583 +400,8.9,0.1016,71.3,0.0103088,138.123 +500,8.9,0.1016,71.3,0.0103088,138.523 +630,8.9,0.1016,71.3,0.0103088,138.423 +800,8.9,0.1016,71.3,0.0103088,137.813 +1000,8.9,0.1016,71.3,0.0103088,135.433 +1250,8.9,0.1016,71.3,0.0103088,132.793 +1600,8.9,0.1016,71.3,0.0103088,128.763 +2000,8.9,0.1016,71.3,0.0103088,124.233 +2500,8.9,0.1016,71.3,0.0103088,123.623 +3150,8.9,0.1016,71.3,0.0103088,123.263 +4000,8.9,0.1016,71.3,0.0103088,120.243 +5000,8.9,0.1016,71.3,0.0103088,116.723 +6300,8.9,0.1016,71.3,0.0103088,117.253 +200,8.9,0.1016,39.6,0.0124596,133.420 +250,8.9,0.1016,39.6,0.0124596,134.340 +315,8.9,0.1016,39.6,0.0124596,135.380 +400,8.9,0.1016,39.6,0.0124596,135.540 +500,8.9,0.1016,39.6,0.0124596,133.790 +630,8.9,0.1016,39.6,0.0124596,131.920 +800,8.9,0.1016,39.6,0.0124596,130.940 +1000,8.9,0.1016,39.6,0.0124596,129.580 +1250,8.9,0.1016,39.6,0.0124596,127.710 +1600,8.9,0.1016,39.6,0.0124596,123.820 +2000,8.9,0.1016,39.6,0.0124596,119.040 +2500,8.9,0.1016,39.6,0.0124596,119.190 +3150,8.9,0.1016,39.6,0.0124596,119.350 +4000,8.9,0.1016,39.6,0.0124596,116.220 +5000,8.9,0.1016,39.6,0.0124596,113.080 +6300,8.9,0.1016,39.6,0.0124596,113.110 +200,12.3,0.1016,71.3,0.0337792,130.588 +250,12.3,0.1016,71.3,0.0337792,131.568 +315,12.3,0.1016,71.3,0.0337792,137.068 +400,12.3,0.1016,71.3,0.0337792,139.428 +500,12.3,0.1016,71.3,0.0337792,140.158 +630,12.3,0.1016,71.3,0.0337792,135.368 +800,12.3,0.1016,71.3,0.0337792,127.318 +1000,12.3,0.1016,71.3,0.0337792,127.928 +1250,12.3,0.1016,71.3,0.0337792,126.648 +1600,12.3,0.1016,71.3,0.0337792,124.748 +2000,12.3,0.1016,71.3,0.0337792,122.218 +2500,12.3,0.1016,71.3,0.0337792,121.318 +3150,12.3,0.1016,71.3,0.0337792,120.798 +4000,12.3,0.1016,71.3,0.0337792,118.018 +5000,12.3,0.1016,71.3,0.0337792,116.108 +6300,12.3,0.1016,71.3,0.0337792,113.958 +200,12.3,0.1016,55.5,0.0368233,132.304 +250,12.3,0.1016,55.5,0.0368233,133.294 +315,12.3,0.1016,55.5,0.0368233,135.674 +400,12.3,0.1016,55.5,0.0368233,136.414 +500,12.3,0.1016,55.5,0.0368233,133.774 +630,12.3,0.1016,55.5,0.0368233,124.244 +800,12.3,0.1016,55.5,0.0368233,125.114 +1000,12.3,0.1016,55.5,0.0368233,125.484 +1250,12.3,0.1016,55.5,0.0368233,124.214 +1600,12.3,0.1016,55.5,0.0368233,121.824 +2000,12.3,0.1016,55.5,0.0368233,118.564 +2500,12.3,0.1016,55.5,0.0368233,117.054 +3150,12.3,0.1016,55.5,0.0368233,116.914 +4000,12.3,0.1016,55.5,0.0368233,114.404 +5000,12.3,0.1016,55.5,0.0368233,112.014 +6300,12.3,0.1016,55.5,0.0368233,110.124 +200,12.3,0.1016,39.6,0.0408268,128.545 +250,12.3,0.1016,39.6,0.0408268,129.675 +315,12.3,0.1016,39.6,0.0408268,129.415 +400,12.3,0.1016,39.6,0.0408268,128.265 +500,12.3,0.1016,39.6,0.0408268,122.205 +630,12.3,0.1016,39.6,0.0408268,121.315 +800,12.3,0.1016,39.6,0.0408268,122.315 +1000,12.3,0.1016,39.6,0.0408268,122.435 +1250,12.3,0.1016,39.6,0.0408268,121.165 +1600,12.3,0.1016,39.6,0.0408268,117.875 +2000,12.3,0.1016,39.6,0.0408268,114.085 +2500,12.3,0.1016,39.6,0.0408268,113.315 +3150,12.3,0.1016,39.6,0.0408268,113.055 +4000,12.3,0.1016,39.6,0.0408268,110.905 +5000,12.3,0.1016,39.6,0.0408268,108.625 +6300,12.3,0.1016,39.6,0.0408268,107.985 +200,12.3,0.1016,31.7,0.0418756,124.987 +250,12.3,0.1016,31.7,0.0418756,125.857 +315,12.3,0.1016,31.7,0.0418756,124.717 +400,12.3,0.1016,31.7,0.0418756,123.207 +500,12.3,0.1016,31.7,0.0418756,118.667 +630,12.3,0.1016,31.7,0.0418756,119.287 +800,12.3,0.1016,31.7,0.0418756,120.037 +1000,12.3,0.1016,31.7,0.0418756,119.777 +1250,12.3,0.1016,31.7,0.0418756,118.767 +1600,12.3,0.1016,31.7,0.0418756,114.477 +2000,12.3,0.1016,31.7,0.0418756,110.447 +2500,12.3,0.1016,31.7,0.0418756,110.317 +3150,12.3,0.1016,31.7,0.0418756,110.307 +4000,12.3,0.1016,31.7,0.0418756,108.407 +5000,12.3,0.1016,31.7,0.0418756,107.147 +6300,12.3,0.1016,31.7,0.0418756,107.267 +200,15.6,0.1016,71.3,0.0437259,130.898 +250,15.6,0.1016,71.3,0.0437259,132.158 +315,15.6,0.1016,71.3,0.0437259,133.808 +400,15.6,0.1016,71.3,0.0437259,134.058 +500,15.6,0.1016,71.3,0.0437259,130.638 +630,15.6,0.1016,71.3,0.0437259,122.288 +800,15.6,0.1016,71.3,0.0437259,124.188 +1000,15.6,0.1016,71.3,0.0437259,124.438 +1250,15.6,0.1016,71.3,0.0437259,123.178 +1600,15.6,0.1016,71.3,0.0437259,121.528 +2000,15.6,0.1016,71.3,0.0437259,119.888 +2500,15.6,0.1016,71.3,0.0437259,118.998 +3150,15.6,0.1016,71.3,0.0437259,116.468 +4000,15.6,0.1016,71.3,0.0437259,113.298 +200,15.6,0.1016,39.6,0.0528487,123.514 +250,15.6,0.1016,39.6,0.0528487,124.644 +315,15.6,0.1016,39.6,0.0528487,122.754 +400,15.6,0.1016,39.6,0.0528487,120.484 +500,15.6,0.1016,39.6,0.0528487,115.304 +630,15.6,0.1016,39.6,0.0528487,118.084 +800,15.6,0.1016,39.6,0.0528487,118.964 +1000,15.6,0.1016,39.6,0.0528487,119.224 +1250,15.6,0.1016,39.6,0.0528487,118.214 +1600,15.6,0.1016,39.6,0.0528487,114.554 +2000,15.6,0.1016,39.6,0.0528487,110.894 +2500,15.6,0.1016,39.6,0.0528487,110.264 +3150,15.6,0.1016,39.6,0.0528487,109.254 +4000,15.6,0.1016,39.6,0.0528487,106.604 +5000,15.6,0.1016,39.6,0.0528487,106.224 +6300,15.6,0.1016,39.6,0.0528487,104.204 diff --git a/Experimentos/inicializacion-pesos-red-neuronal/data/airfoil_self_noise.dat b/Experimentos/inicializacion-pesos-red-neuronal/data/airfoil_self_noise.dat new file mode 100644 index 0000000..759745b --- /dev/null +++ b/Experimentos/inicializacion-pesos-red-neuronal/data/airfoil_self_noise.dat @@ -0,0 +1,1503 @@ +800 0 0.3048 71.3 0.00266337 126.201 +1000 0 0.3048 71.3 0.00266337 125.201 +1250 0 0.3048 71.3 0.00266337 125.951 +1600 0 0.3048 71.3 0.00266337 127.591 +2000 0 0.3048 71.3 0.00266337 127.461 +2500 0 0.3048 71.3 0.00266337 125.571 +3150 0 0.3048 71.3 0.00266337 125.201 +4000 0 0.3048 71.3 0.00266337 123.061 +5000 0 0.3048 71.3 0.00266337 121.301 +6300 0 0.3048 71.3 0.00266337 119.541 +8000 0 0.3048 71.3 0.00266337 117.151 +10000 0 0.3048 71.3 0.00266337 115.391 +12500 0 0.3048 71.3 0.00266337 112.241 +16000 0 0.3048 71.3 0.00266337 108.721 +500 0 0.3048 55.5 0.00283081 126.416 +630 0 0.3048 55.5 0.00283081 127.696 +800 0 0.3048 55.5 0.00283081 128.086 +1000 0 0.3048 55.5 0.00283081 126.966 +1250 0 0.3048 55.5 0.00283081 126.086 +1600 0 0.3048 55.5 0.00283081 126.986 +2000 0 0.3048 55.5 0.00283081 126.616 +2500 0 0.3048 55.5 0.00283081 124.106 +3150 0 0.3048 55.5 0.00283081 123.236 +4000 0 0.3048 55.5 0.00283081 121.106 +5000 0 0.3048 55.5 0.00283081 119.606 +6300 0 0.3048 55.5 0.00283081 117.976 +8000 0 0.3048 55.5 0.00283081 116.476 +10000 0 0.3048 55.5 0.00283081 113.076 +12500 0 0.3048 55.5 0.00283081 111.076 +200 0 0.3048 39.6 0.00310138 118.129 +250 0 0.3048 39.6 0.00310138 119.319 +315 0 0.3048 39.6 0.00310138 122.779 +400 0 0.3048 39.6 0.00310138 124.809 +500 0 0.3048 39.6 0.00310138 126.959 +630 0 0.3048 39.6 0.00310138 128.629 +800 0 0.3048 39.6 0.00310138 129.099 +1000 0 0.3048 39.6 0.00310138 127.899 +1250 0 0.3048 39.6 0.00310138 125.499 +1600 0 0.3048 39.6 0.00310138 124.049 +2000 0 0.3048 39.6 0.00310138 123.689 +2500 0 0.3048 39.6 0.00310138 121.399 +3150 0 0.3048 39.6 0.00310138 120.319 +4000 0 0.3048 39.6 0.00310138 119.229 +5000 0 0.3048 39.6 0.00310138 117.789 +6300 0 0.3048 39.6 0.00310138 116.229 +8000 0 0.3048 39.6 0.00310138 114.779 +10000 0 0.3048 39.6 0.00310138 112.139 +12500 0 0.3048 39.6 0.00310138 109.619 +200 0 0.3048 31.7 0.00331266 117.195 +250 0 0.3048 31.7 0.00331266 118.595 +315 0 0.3048 31.7 0.00331266 122.765 +400 0 0.3048 31.7 0.00331266 125.045 +500 0 0.3048 31.7 0.00331266 127.315 +630 0 0.3048 31.7 0.00331266 129.095 +800 0 0.3048 31.7 0.00331266 129.235 +1000 0 0.3048 31.7 0.00331266 127.365 +1250 0 0.3048 31.7 0.00331266 124.355 +1600 0 0.3048 31.7 0.00331266 122.365 +2000 0 0.3048 31.7 0.00331266 122.375 +2500 0 0.3048 31.7 0.00331266 120.755 +3150 0 0.3048 31.7 0.00331266 119.135 +4000 0 0.3048 31.7 0.00331266 118.145 +5000 0 0.3048 31.7 0.00331266 115.645 +6300 0 0.3048 31.7 0.00331266 113.775 +8000 0 0.3048 31.7 0.00331266 110.515 +10000 0 0.3048 31.7 0.00331266 108.265 +800 1.5 0.3048 71.3 0.00336729 127.122 +1000 1.5 0.3048 71.3 0.00336729 125.992 +1250 1.5 0.3048 71.3 0.00336729 125.872 +1600 1.5 0.3048 71.3 0.00336729 126.632 +2000 1.5 0.3048 71.3 0.00336729 126.642 +2500 1.5 0.3048 71.3 0.00336729 124.512 +3150 1.5 0.3048 71.3 0.00336729 123.392 +4000 1.5 0.3048 71.3 0.00336729 121.762 +5000 1.5 0.3048 71.3 0.00336729 119.632 +6300 1.5 0.3048 71.3 0.00336729 118.122 +8000 1.5 0.3048 71.3 0.00336729 115.372 +10000 1.5 0.3048 71.3 0.00336729 113.492 +12500 1.5 0.3048 71.3 0.00336729 109.222 +16000 1.5 0.3048 71.3 0.00336729 106.582 +315 1.5 0.3048 39.6 0.00392107 121.851 +400 1.5 0.3048 39.6 0.00392107 124.001 +500 1.5 0.3048 39.6 0.00392107 126.661 +630 1.5 0.3048 39.6 0.00392107 128.311 +800 1.5 0.3048 39.6 0.00392107 128.831 +1000 1.5 0.3048 39.6 0.00392107 127.581 +1250 1.5 0.3048 39.6 0.00392107 125.211 +1600 1.5 0.3048 39.6 0.00392107 122.211 +2000 1.5 0.3048 39.6 0.00392107 122.101 +2500 1.5 0.3048 39.6 0.00392107 120.981 +3150 1.5 0.3048 39.6 0.00392107 119.111 +4000 1.5 0.3048 39.6 0.00392107 117.741 +5000 1.5 0.3048 39.6 0.00392107 116.241 +6300 1.5 0.3048 39.6 0.00392107 114.751 +8000 1.5 0.3048 39.6 0.00392107 112.251 +10000 1.5 0.3048 39.6 0.00392107 108.991 +12500 1.5 0.3048 39.6 0.00392107 106.111 +400 3 0.3048 71.3 0.00425727 127.564 +500 3 0.3048 71.3 0.00425727 128.454 +630 3 0.3048 71.3 0.00425727 129.354 +800 3 0.3048 71.3 0.00425727 129.494 +1000 3 0.3048 71.3 0.00425727 129.004 +1250 3 0.3048 71.3 0.00425727 127.634 +1600 3 0.3048 71.3 0.00425727 126.514 +2000 3 0.3048 71.3 0.00425727 125.524 +2500 3 0.3048 71.3 0.00425727 124.024 +3150 3 0.3048 71.3 0.00425727 121.514 +4000 3 0.3048 71.3 0.00425727 120.264 +5000 3 0.3048 71.3 0.00425727 118.134 +6300 3 0.3048 71.3 0.00425727 116.134 +8000 3 0.3048 71.3 0.00425727 114.634 +10000 3 0.3048 71.3 0.00425727 110.224 +400 3 0.3048 55.5 0.00452492 126.159 +500 3 0.3048 55.5 0.00452492 128.179 +630 3 0.3048 55.5 0.00452492 129.569 +800 3 0.3048 55.5 0.00452492 129.949 +1000 3 0.3048 55.5 0.00452492 129.329 +1250 3 0.3048 55.5 0.00452492 127.329 +1600 3 0.3048 55.5 0.00452492 124.439 +2000 3 0.3048 55.5 0.00452492 123.069 +2500 3 0.3048 55.5 0.00452492 122.439 +3150 3 0.3048 55.5 0.00452492 120.189 +4000 3 0.3048 55.5 0.00452492 118.689 +5000 3 0.3048 55.5 0.00452492 117.309 +6300 3 0.3048 55.5 0.00452492 115.679 +8000 3 0.3048 55.5 0.00452492 113.799 +10000 3 0.3048 55.5 0.00452492 112.169 +315 3 0.3048 39.6 0.00495741 123.312 +400 3 0.3048 39.6 0.00495741 125.472 +500 3 0.3048 39.6 0.00495741 127.632 +630 3 0.3048 39.6 0.00495741 129.292 +800 3 0.3048 39.6 0.00495741 129.552 +1000 3 0.3048 39.6 0.00495741 128.312 +1250 3 0.3048 39.6 0.00495741 125.802 +1600 3 0.3048 39.6 0.00495741 122.782 +2000 3 0.3048 39.6 0.00495741 120.532 +2500 3 0.3048 39.6 0.00495741 120.162 +3150 3 0.3048 39.6 0.00495741 118.922 +4000 3 0.3048 39.6 0.00495741 116.792 +5000 3 0.3048 39.6 0.00495741 115.792 +6300 3 0.3048 39.6 0.00495741 114.042 +8000 3 0.3048 39.6 0.00495741 110.652 +315 3 0.3048 31.7 0.00529514 123.118 +400 3 0.3048 31.7 0.00529514 125.398 +500 3 0.3048 31.7 0.00529514 127.548 +630 3 0.3048 31.7 0.00529514 128.698 +800 3 0.3048 31.7 0.00529514 128.708 +1000 3 0.3048 31.7 0.00529514 126.838 +1250 3 0.3048 31.7 0.00529514 124.838 +1600 3 0.3048 31.7 0.00529514 122.088 +2000 3 0.3048 31.7 0.00529514 120.088 +2500 3 0.3048 31.7 0.00529514 119.598 +3150 3 0.3048 31.7 0.00529514 118.108 +4000 3 0.3048 31.7 0.00529514 115.608 +5000 3 0.3048 31.7 0.00529514 113.858 +6300 3 0.3048 31.7 0.00529514 109.718 +250 4 0.3048 71.3 0.00497773 126.395 +315 4 0.3048 71.3 0.00497773 128.175 +400 4 0.3048 71.3 0.00497773 129.575 +500 4 0.3048 71.3 0.00497773 130.715 +630 4 0.3048 71.3 0.00497773 131.615 +800 4 0.3048 71.3 0.00497773 131.755 +1000 4 0.3048 71.3 0.00497773 131.015 +1250 4 0.3048 71.3 0.00497773 129.395 +1600 4 0.3048 71.3 0.00497773 126.645 +2000 4 0.3048 71.3 0.00497773 124.395 +2500 4 0.3048 71.3 0.00497773 123.775 +3150 4 0.3048 71.3 0.00497773 121.775 +4000 4 0.3048 71.3 0.00497773 119.535 +5000 4 0.3048 71.3 0.00497773 117.785 +6300 4 0.3048 71.3 0.00497773 116.165 +8000 4 0.3048 71.3 0.00497773 113.665 +10000 4 0.3048 71.3 0.00497773 110.905 +12500 4 0.3048 71.3 0.00497773 107.405 +250 4 0.3048 39.6 0.00579636 123.543 +315 4 0.3048 39.6 0.00579636 126.843 +400 4 0.3048 39.6 0.00579636 128.633 +500 4 0.3048 39.6 0.00579636 130.173 +630 4 0.3048 39.6 0.00579636 131.073 +800 4 0.3048 39.6 0.00579636 130.723 +1000 4 0.3048 39.6 0.00579636 128.723 +1250 4 0.3048 39.6 0.00579636 126.343 +1600 4 0.3048 39.6 0.00579636 123.213 +2000 4 0.3048 39.6 0.00579636 120.963 +2500 4 0.3048 39.6 0.00579636 120.233 +3150 4 0.3048 39.6 0.00579636 118.743 +4000 4 0.3048 39.6 0.00579636 115.863 +5000 4 0.3048 39.6 0.00579636 113.733 +1250 0 0.2286 71.3 0.00214345 128.144 +1600 0 0.2286 71.3 0.00214345 129.134 +2000 0 0.2286 71.3 0.00214345 128.244 +2500 0 0.2286 71.3 0.00214345 128.354 +3150 0 0.2286 71.3 0.00214345 127.834 +4000 0 0.2286 71.3 0.00214345 125.824 +5000 0 0.2286 71.3 0.00214345 124.304 +6300 0 0.2286 71.3 0.00214345 122.044 +8000 0 0.2286 71.3 0.00214345 118.024 +10000 0 0.2286 71.3 0.00214345 118.134 +12500 0 0.2286 71.3 0.00214345 117.624 +16000 0 0.2286 71.3 0.00214345 114.984 +20000 0 0.2286 71.3 0.00214345 114.474 +315 0 0.2286 55.5 0.00229336 119.540 +400 0 0.2286 55.5 0.00229336 121.660 +500 0 0.2286 55.5 0.00229336 123.780 +630 0 0.2286 55.5 0.00229336 126.160 +800 0 0.2286 55.5 0.00229336 127.530 +1000 0 0.2286 55.5 0.00229336 128.290 +1250 0 0.2286 55.5 0.00229336 127.910 +1600 0 0.2286 55.5 0.00229336 126.790 +2000 0 0.2286 55.5 0.00229336 126.540 +2500 0 0.2286 55.5 0.00229336 126.540 +3150 0 0.2286 55.5 0.00229336 125.160 +4000 0 0.2286 55.5 0.00229336 123.410 +5000 0 0.2286 55.5 0.00229336 122.410 +6300 0 0.2286 55.5 0.00229336 118.410 +315 0 0.2286 39.6 0.00253511 121.055 +400 0 0.2286 39.6 0.00253511 123.565 +500 0 0.2286 39.6 0.00253511 126.195 +630 0 0.2286 39.6 0.00253511 128.705 +800 0 0.2286 39.6 0.00253511 130.205 +1000 0 0.2286 39.6 0.00253511 130.435 +1250 0 0.2286 39.6 0.00253511 129.395 +1600 0 0.2286 39.6 0.00253511 127.095 +2000 0 0.2286 39.6 0.00253511 125.305 +2500 0 0.2286 39.6 0.00253511 125.025 +3150 0 0.2286 39.6 0.00253511 124.625 +4000 0 0.2286 39.6 0.00253511 123.465 +5000 0 0.2286 39.6 0.00253511 122.175 +6300 0 0.2286 39.6 0.00253511 117.465 +315 0 0.2286 31.7 0.0027238 120.595 +400 0 0.2286 31.7 0.0027238 123.635 +500 0 0.2286 31.7 0.0027238 126.675 +630 0 0.2286 31.7 0.0027238 129.465 +800 0 0.2286 31.7 0.0027238 130.725 +1000 0 0.2286 31.7 0.0027238 130.595 +1250 0 0.2286 31.7 0.0027238 128.805 +1600 0 0.2286 31.7 0.0027238 125.625 +2000 0 0.2286 31.7 0.0027238 123.455 +2500 0 0.2286 31.7 0.0027238 123.445 +3150 0 0.2286 31.7 0.0027238 123.445 +4000 0 0.2286 31.7 0.0027238 122.035 +5000 0 0.2286 31.7 0.0027238 120.505 +6300 0 0.2286 31.7 0.0027238 116.815 +400 2 0.2286 71.3 0.00293031 125.116 +500 2 0.2286 71.3 0.00293031 126.486 +630 2 0.2286 71.3 0.00293031 127.356 +800 2 0.2286 71.3 0.00293031 128.216 +1000 2 0.2286 71.3 0.00293031 128.956 +1250 2 0.2286 71.3 0.00293031 128.816 +1600 2 0.2286 71.3 0.00293031 127.796 +2000 2 0.2286 71.3 0.00293031 126.896 +2500 2 0.2286 71.3 0.00293031 127.006 +3150 2 0.2286 71.3 0.00293031 126.116 +4000 2 0.2286 71.3 0.00293031 124.086 +5000 2 0.2286 71.3 0.00293031 122.816 +6300 2 0.2286 71.3 0.00293031 120.786 +8000 2 0.2286 71.3 0.00293031 115.996 +10000 2 0.2286 71.3 0.00293031 113.086 +400 2 0.2286 55.5 0.00313525 122.292 +500 2 0.2286 55.5 0.00313525 124.692 +630 2 0.2286 55.5 0.00313525 126.842 +800 2 0.2286 55.5 0.00313525 128.492 +1000 2 0.2286 55.5 0.00313525 129.002 +1250 2 0.2286 55.5 0.00313525 128.762 +1600 2 0.2286 55.5 0.00313525 126.752 +2000 2 0.2286 55.5 0.00313525 124.612 +2500 2 0.2286 55.5 0.00313525 123.862 +3150 2 0.2286 55.5 0.00313525 123.742 +4000 2 0.2286 55.5 0.00313525 122.232 +5000 2 0.2286 55.5 0.00313525 120.472 +6300 2 0.2286 55.5 0.00313525 118.712 +315 2 0.2286 39.6 0.00346574 120.137 +400 2 0.2286 39.6 0.00346574 122.147 +500 2 0.2286 39.6 0.00346574 125.157 +630 2 0.2286 39.6 0.00346574 127.417 +800 2 0.2286 39.6 0.00346574 129.037 +1000 2 0.2286 39.6 0.00346574 129.147 +1250 2 0.2286 39.6 0.00346574 128.257 +1600 2 0.2286 39.6 0.00346574 125.837 +2000 2 0.2286 39.6 0.00346574 122.797 +2500 2 0.2286 39.6 0.00346574 121.397 +3150 2 0.2286 39.6 0.00346574 121.627 +4000 2 0.2286 39.6 0.00346574 120.227 +5000 2 0.2286 39.6 0.00346574 118.827 +6300 2 0.2286 39.6 0.00346574 116.417 +315 2 0.2286 31.7 0.00372371 120.147 +400 2 0.2286 31.7 0.00372371 123.417 +500 2 0.2286 31.7 0.00372371 126.677 +630 2 0.2286 31.7 0.00372371 129.057 +800 2 0.2286 31.7 0.00372371 130.307 +1000 2 0.2286 31.7 0.00372371 130.307 +1250 2 0.2286 31.7 0.00372371 128.677 +1600 2 0.2286 31.7 0.00372371 125.797 +2000 2 0.2286 31.7 0.00372371 123.037 +2500 2 0.2286 31.7 0.00372371 121.407 +3150 2 0.2286 31.7 0.00372371 121.527 +4000 2 0.2286 31.7 0.00372371 120.527 +5000 2 0.2286 31.7 0.00372371 118.267 +6300 2 0.2286 31.7 0.00372371 115.137 +500 4 0.2286 71.3 0.00400603 126.758 +630 4 0.2286 71.3 0.00400603 129.038 +800 4 0.2286 71.3 0.00400603 130.688 +1000 4 0.2286 71.3 0.00400603 131.708 +1250 4 0.2286 71.3 0.00400603 131.718 +1600 4 0.2286 71.3 0.00400603 129.468 +2000 4 0.2286 71.3 0.00400603 126.218 +2500 4 0.2286 71.3 0.00400603 124.338 +3150 4 0.2286 71.3 0.00400603 124.108 +4000 4 0.2286 71.3 0.00400603 121.728 +5000 4 0.2286 71.3 0.00400603 121.118 +6300 4 0.2286 71.3 0.00400603 118.618 +8000 4 0.2286 71.3 0.00400603 112.848 +10000 4 0.2286 71.3 0.00400603 113.108 +12500 4 0.2286 71.3 0.00400603 114.258 +16000 4 0.2286 71.3 0.00400603 112.768 +20000 4 0.2286 71.3 0.00400603 109.638 +400 4 0.2286 55.5 0.0042862 123.274 +500 4 0.2286 55.5 0.0042862 127.314 +630 4 0.2286 55.5 0.0042862 129.964 +800 4 0.2286 55.5 0.0042862 131.864 +1000 4 0.2286 55.5 0.0042862 132.134 +1250 4 0.2286 55.5 0.0042862 131.264 +1600 4 0.2286 55.5 0.0042862 128.264 +2000 4 0.2286 55.5 0.0042862 124.254 +2500 4 0.2286 55.5 0.0042862 122.384 +3150 4 0.2286 55.5 0.0042862 122.394 +4000 4 0.2286 55.5 0.0042862 120.654 +5000 4 0.2286 55.5 0.0042862 120.034 +6300 4 0.2286 55.5 0.0042862 117.154 +8000 4 0.2286 55.5 0.0042862 112.524 +315 4 0.2286 39.6 0.00473801 122.229 +400 4 0.2286 39.6 0.00473801 123.879 +500 4 0.2286 39.6 0.00473801 127.039 +630 4 0.2286 39.6 0.00473801 129.579 +800 4 0.2286 39.6 0.00473801 130.469 +1000 4 0.2286 39.6 0.00473801 129.969 +1250 4 0.2286 39.6 0.00473801 128.339 +1600 4 0.2286 39.6 0.00473801 125.319 +2000 4 0.2286 39.6 0.00473801 121.659 +2500 4 0.2286 39.6 0.00473801 119.649 +3150 4 0.2286 39.6 0.00473801 120.419 +4000 4 0.2286 39.6 0.00473801 119.159 +5000 4 0.2286 39.6 0.00473801 117.649 +6300 4 0.2286 39.6 0.00473801 114.249 +8000 4 0.2286 39.6 0.00473801 113.129 +250 4 0.2286 31.7 0.00509068 120.189 +315 4 0.2286 31.7 0.00509068 123.609 +400 4 0.2286 31.7 0.00509068 126.149 +500 4 0.2286 31.7 0.00509068 128.939 +630 4 0.2286 31.7 0.00509068 130.349 +800 4 0.2286 31.7 0.00509068 130.869 +1000 4 0.2286 31.7 0.00509068 129.869 +1250 4 0.2286 31.7 0.00509068 128.119 +1600 4 0.2286 31.7 0.00509068 125.229 +2000 4 0.2286 31.7 0.00509068 122.089 +2500 4 0.2286 31.7 0.00509068 120.209 +3150 4 0.2286 31.7 0.00509068 120.229 +4000 4 0.2286 31.7 0.00509068 118.859 +5000 4 0.2286 31.7 0.00509068 115.969 +6300 4 0.2286 31.7 0.00509068 112.699 +400 5.3 0.2286 71.3 0.0051942 127.700 +500 5.3 0.2286 71.3 0.0051942 129.880 +630 5.3 0.2286 71.3 0.0051942 131.800 +800 5.3 0.2286 71.3 0.0051942 133.480 +1000 5.3 0.2286 71.3 0.0051942 134.000 +1250 5.3 0.2286 71.3 0.0051942 133.380 +1600 5.3 0.2286 71.3 0.0051942 130.460 +2000 5.3 0.2286 71.3 0.0051942 125.890 +2500 5.3 0.2286 71.3 0.0051942 123.740 +3150 5.3 0.2286 71.3 0.0051942 123.120 +4000 5.3 0.2286 71.3 0.0051942 120.330 +5000 5.3 0.2286 71.3 0.0051942 118.050 +6300 5.3 0.2286 71.3 0.0051942 116.920 +8000 5.3 0.2286 71.3 0.0051942 114.900 +10000 5.3 0.2286 71.3 0.0051942 111.350 +250 5.3 0.2286 39.6 0.00614329 127.011 +315 5.3 0.2286 39.6 0.00614329 129.691 +400 5.3 0.2286 39.6 0.00614329 131.221 +500 5.3 0.2286 39.6 0.00614329 132.251 +630 5.3 0.2286 39.6 0.00614329 132.011 +800 5.3 0.2286 39.6 0.00614329 129.491 +1000 5.3 0.2286 39.6 0.00614329 125.581 +1250 5.3 0.2286 39.6 0.00614329 125.721 +1600 5.3 0.2286 39.6 0.00614329 123.081 +2000 5.3 0.2286 39.6 0.00614329 117.911 +2500 5.3 0.2286 39.6 0.00614329 116.151 +3150 5.3 0.2286 39.6 0.00614329 118.441 +4000 5.3 0.2286 39.6 0.00614329 115.801 +5000 5.3 0.2286 39.6 0.00614329 115.311 +6300 5.3 0.2286 39.6 0.00614329 112.541 +200 7.3 0.2286 71.3 0.0104404 138.758 +250 7.3 0.2286 71.3 0.0104404 139.918 +315 7.3 0.2286 71.3 0.0104404 139.808 +400 7.3 0.2286 71.3 0.0104404 139.438 +500 7.3 0.2286 71.3 0.0104404 136.798 +630 7.3 0.2286 71.3 0.0104404 133.768 +800 7.3 0.2286 71.3 0.0104404 130.748 +1000 7.3 0.2286 71.3 0.0104404 126.838 +1250 7.3 0.2286 71.3 0.0104404 127.358 +1600 7.3 0.2286 71.3 0.0104404 125.728 +2000 7.3 0.2286 71.3 0.0104404 122.708 +2500 7.3 0.2286 71.3 0.0104404 122.088 +3150 7.3 0.2286 71.3 0.0104404 120.458 +4000 7.3 0.2286 71.3 0.0104404 119.208 +5000 7.3 0.2286 71.3 0.0104404 115.298 +6300 7.3 0.2286 71.3 0.0104404 115.818 +200 7.3 0.2286 55.5 0.0111706 135.234 +250 7.3 0.2286 55.5 0.0111706 136.384 +315 7.3 0.2286 55.5 0.0111706 136.284 +400 7.3 0.2286 55.5 0.0111706 135.924 +500 7.3 0.2286 55.5 0.0111706 133.174 +630 7.3 0.2286 55.5 0.0111706 130.934 +800 7.3 0.2286 55.5 0.0111706 128.444 +1000 7.3 0.2286 55.5 0.0111706 125.194 +1250 7.3 0.2286 55.5 0.0111706 125.724 +1600 7.3 0.2286 55.5 0.0111706 123.354 +2000 7.3 0.2286 55.5 0.0111706 120.354 +2500 7.3 0.2286 55.5 0.0111706 118.994 +3150 7.3 0.2286 55.5 0.0111706 117.134 +4000 7.3 0.2286 55.5 0.0111706 117.284 +5000 7.3 0.2286 55.5 0.0111706 113.144 +6300 7.3 0.2286 55.5 0.0111706 111.534 +200 7.3 0.2286 39.6 0.0123481 130.989 +250 7.3 0.2286 39.6 0.0123481 131.889 +315 7.3 0.2286 39.6 0.0123481 132.149 +400 7.3 0.2286 39.6 0.0123481 132.039 +500 7.3 0.2286 39.6 0.0123481 130.299 +630 7.3 0.2286 39.6 0.0123481 128.929 +800 7.3 0.2286 39.6 0.0123481 126.299 +1000 7.3 0.2286 39.6 0.0123481 122.539 +1250 7.3 0.2286 39.6 0.0123481 123.189 +1600 7.3 0.2286 39.6 0.0123481 121.059 +2000 7.3 0.2286 39.6 0.0123481 117.809 +2500 7.3 0.2286 39.6 0.0123481 116.559 +3150 7.3 0.2286 39.6 0.0123481 114.309 +4000 7.3 0.2286 39.6 0.0123481 114.079 +5000 7.3 0.2286 39.6 0.0123481 111.959 +6300 7.3 0.2286 39.6 0.0123481 110.839 +200 7.3 0.2286 31.7 0.0132672 128.679 +250 7.3 0.2286 31.7 0.0132672 130.089 +315 7.3 0.2286 31.7 0.0132672 130.239 +400 7.3 0.2286 31.7 0.0132672 130.269 +500 7.3 0.2286 31.7 0.0132672 128.169 +630 7.3 0.2286 31.7 0.0132672 126.189 +800 7.3 0.2286 31.7 0.0132672 123.209 +1000 7.3 0.2286 31.7 0.0132672 119.099 +1250 7.3 0.2286 31.7 0.0132672 120.509 +1600 7.3 0.2286 31.7 0.0132672 119.039 +2000 7.3 0.2286 31.7 0.0132672 115.309 +2500 7.3 0.2286 31.7 0.0132672 114.709 +3150 7.3 0.2286 31.7 0.0132672 113.229 +4000 7.3 0.2286 31.7 0.0132672 112.639 +5000 7.3 0.2286 31.7 0.0132672 111.029 +6300 7.3 0.2286 31.7 0.0132672 110.689 +800 0 0.1524 71.3 0.0015988 125.817 +1000 0 0.1524 71.3 0.0015988 127.307 +1250 0 0.1524 71.3 0.0015988 128.927 +1600 0 0.1524 71.3 0.0015988 129.667 +2000 0 0.1524 71.3 0.0015988 128.647 +2500 0 0.1524 71.3 0.0015988 128.127 +3150 0 0.1524 71.3 0.0015988 129.377 +4000 0 0.1524 71.3 0.0015988 128.857 +5000 0 0.1524 71.3 0.0015988 126.457 +6300 0 0.1524 71.3 0.0015988 125.427 +8000 0 0.1524 71.3 0.0015988 122.527 +10000 0 0.1524 71.3 0.0015988 120.247 +12500 0 0.1524 71.3 0.0015988 117.087 +16000 0 0.1524 71.3 0.0015988 113.297 +500 0 0.1524 55.5 0.00172668 120.573 +630 0 0.1524 55.5 0.00172668 123.583 +800 0 0.1524 55.5 0.00172668 126.713 +1000 0 0.1524 55.5 0.00172668 128.583 +1250 0 0.1524 55.5 0.00172668 129.953 +1600 0 0.1524 55.5 0.00172668 130.183 +2000 0 0.1524 55.5 0.00172668 129.673 +2500 0 0.1524 55.5 0.00172668 127.763 +3150 0 0.1524 55.5 0.00172668 127.753 +4000 0 0.1524 55.5 0.00172668 127.233 +5000 0 0.1524 55.5 0.00172668 125.203 +6300 0 0.1524 55.5 0.00172668 123.303 +8000 0 0.1524 55.5 0.00172668 121.903 +10000 0 0.1524 55.5 0.00172668 119.253 +12500 0 0.1524 55.5 0.00172668 117.093 +16000 0 0.1524 55.5 0.00172668 112.803 +500 0 0.1524 39.6 0.00193287 119.513 +630 0 0.1524 39.6 0.00193287 124.403 +800 0 0.1524 39.6 0.00193287 127.903 +1000 0 0.1524 39.6 0.00193287 130.033 +1250 0 0.1524 39.6 0.00193287 131.023 +1600 0 0.1524 39.6 0.00193287 131.013 +2000 0 0.1524 39.6 0.00193287 129.633 +2500 0 0.1524 39.6 0.00193287 126.863 +3150 0 0.1524 39.6 0.00193287 125.603 +4000 0 0.1524 39.6 0.00193287 125.343 +5000 0 0.1524 39.6 0.00193287 123.453 +6300 0 0.1524 39.6 0.00193287 121.313 +8000 0 0.1524 39.6 0.00193287 120.553 +10000 0 0.1524 39.6 0.00193287 115.413 +500 0 0.1524 31.7 0.00209405 121.617 +630 0 0.1524 31.7 0.00209405 125.997 +800 0 0.1524 31.7 0.00209405 129.117 +1000 0 0.1524 31.7 0.00209405 130.987 +1250 0 0.1524 31.7 0.00209405 131.467 +1600 0 0.1524 31.7 0.00209405 130.817 +2000 0 0.1524 31.7 0.00209405 128.907 +2500 0 0.1524 31.7 0.00209405 125.867 +3150 0 0.1524 31.7 0.00209405 124.207 +4000 0 0.1524 31.7 0.00209405 123.807 +5000 0 0.1524 31.7 0.00209405 122.397 +6300 0 0.1524 31.7 0.00209405 119.737 +8000 0 0.1524 31.7 0.00209405 117.957 +630 2.7 0.1524 71.3 0.00243851 127.404 +800 2.7 0.1524 71.3 0.00243851 127.394 +1000 2.7 0.1524 71.3 0.00243851 128.774 +1250 2.7 0.1524 71.3 0.00243851 130.144 +1600 2.7 0.1524 71.3 0.00243851 130.644 +2000 2.7 0.1524 71.3 0.00243851 130.114 +2500 2.7 0.1524 71.3 0.00243851 128.334 +3150 2.7 0.1524 71.3 0.00243851 127.054 +4000 2.7 0.1524 71.3 0.00243851 126.534 +5000 2.7 0.1524 71.3 0.00243851 124.364 +6300 2.7 0.1524 71.3 0.00243851 121.944 +8000 2.7 0.1524 71.3 0.00243851 120.534 +10000 2.7 0.1524 71.3 0.00243851 116.724 +12500 2.7 0.1524 71.3 0.00243851 113.034 +16000 2.7 0.1524 71.3 0.00243851 110.364 +500 2.7 0.1524 39.6 0.00294804 121.009 +630 2.7 0.1524 39.6 0.00294804 125.809 +800 2.7 0.1524 39.6 0.00294804 128.829 +1000 2.7 0.1524 39.6 0.00294804 130.589 +1250 2.7 0.1524 39.6 0.00294804 130.829 +1600 2.7 0.1524 39.6 0.00294804 130.049 +2000 2.7 0.1524 39.6 0.00294804 128.139 +2500 2.7 0.1524 39.6 0.00294804 125.589 +3150 2.7 0.1524 39.6 0.00294804 122.919 +4000 2.7 0.1524 39.6 0.00294804 121.889 +5000 2.7 0.1524 39.6 0.00294804 121.499 +6300 2.7 0.1524 39.6 0.00294804 119.209 +8000 2.7 0.1524 39.6 0.00294804 116.659 +10000 2.7 0.1524 39.6 0.00294804 112.589 +12500 2.7 0.1524 39.6 0.00294804 108.649 +400 5.4 0.1524 71.3 0.00401199 124.121 +500 5.4 0.1524 71.3 0.00401199 126.291 +630 5.4 0.1524 71.3 0.00401199 128.971 +800 5.4 0.1524 71.3 0.00401199 131.281 +1000 5.4 0.1524 71.3 0.00401199 133.201 +1250 5.4 0.1524 71.3 0.00401199 134.111 +1600 5.4 0.1524 71.3 0.00401199 133.241 +2000 5.4 0.1524 71.3 0.00401199 131.111 +2500 5.4 0.1524 71.3 0.00401199 127.591 +3150 5.4 0.1524 71.3 0.00401199 123.311 +4000 5.4 0.1524 71.3 0.00401199 121.431 +5000 5.4 0.1524 71.3 0.00401199 120.061 +6300 5.4 0.1524 71.3 0.00401199 116.411 +400 5.4 0.1524 55.5 0.00433288 126.807 +500 5.4 0.1524 55.5 0.00433288 129.367 +630 5.4 0.1524 55.5 0.00433288 131.807 +800 5.4 0.1524 55.5 0.00433288 133.097 +1000 5.4 0.1524 55.5 0.00433288 132.127 +1250 5.4 0.1524 55.5 0.00433288 130.777 +1600 5.4 0.1524 55.5 0.00433288 130.567 +2000 5.4 0.1524 55.5 0.00433288 128.707 +2500 5.4 0.1524 55.5 0.00433288 124.077 +3150 5.4 0.1524 55.5 0.00433288 121.587 +4000 5.4 0.1524 55.5 0.00433288 119.737 +5000 5.4 0.1524 55.5 0.00433288 118.757 +6300 5.4 0.1524 55.5 0.00433288 117.287 +8000 5.4 0.1524 55.5 0.00433288 114.927 +315 5.4 0.1524 39.6 0.00485029 125.347 +400 5.4 0.1524 39.6 0.00485029 127.637 +500 5.4 0.1524 39.6 0.00485029 129.937 +630 5.4 0.1524 39.6 0.00485029 132.357 +800 5.4 0.1524 39.6 0.00485029 132.757 +1000 5.4 0.1524 39.6 0.00485029 130.507 +1250 5.4 0.1524 39.6 0.00485029 127.117 +1600 5.4 0.1524 39.6 0.00485029 126.267 +2000 5.4 0.1524 39.6 0.00485029 124.647 +2500 5.4 0.1524 39.6 0.00485029 120.497 +3150 5.4 0.1524 39.6 0.00485029 119.137 +4000 5.4 0.1524 39.6 0.00485029 117.137 +5000 5.4 0.1524 39.6 0.00485029 117.037 +6300 5.4 0.1524 39.6 0.00485029 116.677 +315 5.4 0.1524 31.7 0.00525474 125.741 +400 5.4 0.1524 31.7 0.00525474 127.781 +500 5.4 0.1524 31.7 0.00525474 129.681 +630 5.4 0.1524 31.7 0.00525474 131.471 +800 5.4 0.1524 31.7 0.00525474 131.491 +1000 5.4 0.1524 31.7 0.00525474 128.241 +1250 5.4 0.1524 31.7 0.00525474 123.991 +1600 5.4 0.1524 31.7 0.00525474 123.761 +2000 5.4 0.1524 31.7 0.00525474 122.771 +2500 5.4 0.1524 31.7 0.00525474 119.151 +3150 5.4 0.1524 31.7 0.00525474 118.291 +4000 5.4 0.1524 31.7 0.00525474 116.181 +5000 5.4 0.1524 31.7 0.00525474 115.691 +6300 5.4 0.1524 31.7 0.00525474 115.591 +315 7.2 0.1524 71.3 0.00752039 128.713 +400 7.2 0.1524 71.3 0.00752039 130.123 +500 7.2 0.1524 71.3 0.00752039 132.043 +630 7.2 0.1524 71.3 0.00752039 134.853 +800 7.2 0.1524 71.3 0.00752039 136.023 +1000 7.2 0.1524 71.3 0.00752039 134.273 +1250 7.2 0.1524 71.3 0.00752039 132.513 +1600 7.2 0.1524 71.3 0.00752039 130.893 +2000 7.2 0.1524 71.3 0.00752039 128.643 +2500 7.2 0.1524 71.3 0.00752039 124.353 +3150 7.2 0.1524 71.3 0.00752039 116.783 +4000 7.2 0.1524 71.3 0.00752039 119.343 +5000 7.2 0.1524 71.3 0.00752039 118.343 +6300 7.2 0.1524 71.3 0.00752039 116.603 +8000 7.2 0.1524 71.3 0.00752039 113.333 +10000 7.2 0.1524 71.3 0.00752039 110.313 +250 7.2 0.1524 39.6 0.00909175 127.488 +315 7.2 0.1524 39.6 0.00909175 130.558 +400 7.2 0.1524 39.6 0.00909175 132.118 +500 7.2 0.1524 39.6 0.00909175 132.658 +630 7.2 0.1524 39.6 0.00909175 133.198 +800 7.2 0.1524 39.6 0.00909175 132.358 +1000 7.2 0.1524 39.6 0.00909175 128.338 +1250 7.2 0.1524 39.6 0.00909175 122.428 +1600 7.2 0.1524 39.6 0.00909175 120.058 +2000 7.2 0.1524 39.6 0.00909175 120.228 +2500 7.2 0.1524 39.6 0.00909175 117.478 +3150 7.2 0.1524 39.6 0.00909175 111.818 +4000 7.2 0.1524 39.6 0.00909175 114.258 +5000 7.2 0.1524 39.6 0.00909175 113.288 +6300 7.2 0.1524 39.6 0.00909175 112.688 +8000 7.2 0.1524 39.6 0.00909175 111.588 +10000 7.2 0.1524 39.6 0.00909175 110.868 +200 9.9 0.1524 71.3 0.0193001 134.319 +250 9.9 0.1524 71.3 0.0193001 135.329 +315 9.9 0.1524 71.3 0.0193001 135.459 +400 9.9 0.1524 71.3 0.0193001 135.079 +500 9.9 0.1524 71.3 0.0193001 131.279 +630 9.9 0.1524 71.3 0.0193001 129.889 +800 9.9 0.1524 71.3 0.0193001 128.879 +1000 9.9 0.1524 71.3 0.0193001 126.349 +1250 9.9 0.1524 71.3 0.0193001 122.679 +1600 9.9 0.1524 71.3 0.0193001 121.789 +2000 9.9 0.1524 71.3 0.0193001 120.779 +2500 9.9 0.1524 71.3 0.0193001 119.639 +3150 9.9 0.1524 71.3 0.0193001 116.849 +4000 9.9 0.1524 71.3 0.0193001 115.079 +5000 9.9 0.1524 71.3 0.0193001 114.569 +6300 9.9 0.1524 71.3 0.0193001 112.039 +200 9.9 0.1524 55.5 0.0208438 131.955 +250 9.9 0.1524 55.5 0.0208438 133.235 +315 9.9 0.1524 55.5 0.0208438 132.355 +400 9.9 0.1524 55.5 0.0208438 131.605 +500 9.9 0.1524 55.5 0.0208438 127.815 +630 9.9 0.1524 55.5 0.0208438 127.315 +800 9.9 0.1524 55.5 0.0208438 126.565 +1000 9.9 0.1524 55.5 0.0208438 124.665 +1250 9.9 0.1524 55.5 0.0208438 121.635 +1600 9.9 0.1524 55.5 0.0208438 119.875 +2000 9.9 0.1524 55.5 0.0208438 119.505 +2500 9.9 0.1524 55.5 0.0208438 118.365 +3150 9.9 0.1524 55.5 0.0208438 115.085 +4000 9.9 0.1524 55.5 0.0208438 112.945 +5000 9.9 0.1524 55.5 0.0208438 112.065 +6300 9.9 0.1524 55.5 0.0208438 110.555 +200 9.9 0.1524 39.6 0.0233328 127.315 +250 9.9 0.1524 39.6 0.0233328 128.335 +315 9.9 0.1524 39.6 0.0233328 128.595 +400 9.9 0.1524 39.6 0.0233328 128.345 +500 9.9 0.1524 39.6 0.0233328 126.835 +630 9.9 0.1524 39.6 0.0233328 126.465 +800 9.9 0.1524 39.6 0.0233328 126.345 +1000 9.9 0.1524 39.6 0.0233328 123.835 +1250 9.9 0.1524 39.6 0.0233328 120.555 +1600 9.9 0.1524 39.6 0.0233328 118.545 +2000 9.9 0.1524 39.6 0.0233328 117.925 +2500 9.9 0.1524 39.6 0.0233328 116.295 +3150 9.9 0.1524 39.6 0.0233328 113.525 +4000 9.9 0.1524 39.6 0.0233328 112.265 +5000 9.9 0.1524 39.6 0.0233328 111.135 +6300 9.9 0.1524 39.6 0.0233328 109.885 +200 9.9 0.1524 31.7 0.0252785 127.299 +250 9.9 0.1524 31.7 0.0252785 128.559 +315 9.9 0.1524 31.7 0.0252785 128.809 +400 9.9 0.1524 31.7 0.0252785 128.939 +500 9.9 0.1524 31.7 0.0252785 127.179 +630 9.9 0.1524 31.7 0.0252785 126.049 +800 9.9 0.1524 31.7 0.0252785 125.539 +1000 9.9 0.1524 31.7 0.0252785 122.149 +1250 9.9 0.1524 31.7 0.0252785 118.619 +1600 9.9 0.1524 31.7 0.0252785 117.119 +2000 9.9 0.1524 31.7 0.0252785 116.859 +2500 9.9 0.1524 31.7 0.0252785 114.729 +3150 9.9 0.1524 31.7 0.0252785 112.209 +4000 9.9 0.1524 31.7 0.0252785 111.459 +5000 9.9 0.1524 31.7 0.0252785 109.949 +6300 9.9 0.1524 31.7 0.0252785 108.689 +200 12.6 0.1524 71.3 0.0483159 128.354 +250 12.6 0.1524 71.3 0.0483159 129.744 +315 12.6 0.1524 71.3 0.0483159 128.484 +400 12.6 0.1524 71.3 0.0483159 127.094 +500 12.6 0.1524 71.3 0.0483159 121.664 +630 12.6 0.1524 71.3 0.0483159 123.304 +800 12.6 0.1524 71.3 0.0483159 123.054 +1000 12.6 0.1524 71.3 0.0483159 122.044 +1250 12.6 0.1524 71.3 0.0483159 120.154 +1600 12.6 0.1524 71.3 0.0483159 120.534 +2000 12.6 0.1524 71.3 0.0483159 117.504 +2500 12.6 0.1524 71.3 0.0483159 115.234 +3150 12.6 0.1524 71.3 0.0483159 113.334 +4000 12.6 0.1524 71.3 0.0483159 108.034 +5000 12.6 0.1524 71.3 0.0483159 108.034 +6300 12.6 0.1524 71.3 0.0483159 107.284 +200 12.6 0.1524 39.6 0.0584113 114.750 +250 12.6 0.1524 39.6 0.0584113 115.890 +315 12.6 0.1524 39.6 0.0584113 116.020 +400 12.6 0.1524 39.6 0.0584113 115.910 +500 12.6 0.1524 39.6 0.0584113 114.900 +630 12.6 0.1524 39.6 0.0584113 116.550 +800 12.6 0.1524 39.6 0.0584113 116.560 +1000 12.6 0.1524 39.6 0.0584113 114.670 +1250 12.6 0.1524 39.6 0.0584113 112.160 +1600 12.6 0.1524 39.6 0.0584113 110.780 +2000 12.6 0.1524 39.6 0.0584113 109.520 +2500 12.6 0.1524 39.6 0.0584113 106.880 +3150 12.6 0.1524 39.6 0.0584113 106.260 +4000 12.6 0.1524 39.6 0.0584113 104.500 +5000 12.6 0.1524 39.6 0.0584113 104.130 +6300 12.6 0.1524 39.6 0.0584113 103.380 +800 0 0.0508 71.3 0.000740478 130.960 +1000 0 0.0508 71.3 0.000740478 129.450 +1250 0 0.0508 71.3 0.000740478 128.560 +1600 0 0.0508 71.3 0.000740478 129.680 +2000 0 0.0508 71.3 0.000740478 131.060 +2500 0 0.0508 71.3 0.000740478 131.310 +3150 0 0.0508 71.3 0.000740478 135.070 +4000 0 0.0508 71.3 0.000740478 134.430 +5000 0 0.0508 71.3 0.000740478 134.430 +6300 0 0.0508 71.3 0.000740478 133.040 +8000 0 0.0508 71.3 0.000740478 130.890 +10000 0 0.0508 71.3 0.000740478 128.740 +12500 0 0.0508 71.3 0.000740478 125.220 +800 0 0.0508 55.5 0.00076193 124.336 +1000 0 0.0508 55.5 0.00076193 125.586 +1250 0 0.0508 55.5 0.00076193 127.076 +1600 0 0.0508 55.5 0.00076193 128.576 +2000 0 0.0508 55.5 0.00076193 131.456 +2500 0 0.0508 55.5 0.00076193 133.956 +3150 0 0.0508 55.5 0.00076193 134.826 +4000 0 0.0508 55.5 0.00076193 134.946 +5000 0 0.0508 55.5 0.00076193 134.556 +6300 0 0.0508 55.5 0.00076193 132.796 +8000 0 0.0508 55.5 0.00076193 130.156 +10000 0 0.0508 55.5 0.00076193 127.636 +12500 0 0.0508 55.5 0.00076193 125.376 +800 0 0.0508 39.6 0.000791822 126.508 +1000 0 0.0508 39.6 0.000791822 127.638 +1250 0 0.0508 39.6 0.000791822 129.148 +1600 0 0.0508 39.6 0.000791822 130.908 +2000 0 0.0508 39.6 0.000791822 132.918 +2500 0 0.0508 39.6 0.000791822 134.938 +3150 0 0.0508 39.6 0.000791822 135.938 +4000 0 0.0508 39.6 0.000791822 135.308 +5000 0 0.0508 39.6 0.000791822 134.308 +6300 0 0.0508 39.6 0.000791822 131.918 +8000 0 0.0508 39.6 0.000791822 128.518 +10000 0 0.0508 39.6 0.000791822 125.998 +12500 0 0.0508 39.6 0.000791822 123.988 +800 0 0.0508 31.7 0.000812164 122.790 +1000 0 0.0508 31.7 0.000812164 126.780 +1250 0 0.0508 31.7 0.000812164 129.270 +1600 0 0.0508 31.7 0.000812164 131.010 +2000 0 0.0508 31.7 0.000812164 133.010 +2500 0 0.0508 31.7 0.000812164 134.870 +3150 0 0.0508 31.7 0.000812164 135.490 +4000 0 0.0508 31.7 0.000812164 134.110 +5000 0 0.0508 31.7 0.000812164 133.230 +6300 0 0.0508 31.7 0.000812164 130.340 +8000 0 0.0508 31.7 0.000812164 126.590 +10000 0 0.0508 31.7 0.000812164 122.450 +12500 0 0.0508 31.7 0.000812164 119.070 +1600 4.2 0.0508 71.3 0.00142788 124.318 +2000 4.2 0.0508 71.3 0.00142788 129.848 +2500 4.2 0.0508 71.3 0.00142788 131.978 +3150 4.2 0.0508 71.3 0.00142788 133.728 +4000 4.2 0.0508 71.3 0.00142788 133.598 +5000 4.2 0.0508 71.3 0.00142788 132.828 +6300 4.2 0.0508 71.3 0.00142788 129.308 +8000 4.2 0.0508 71.3 0.00142788 125.268 +10000 4.2 0.0508 71.3 0.00142788 121.238 +12500 4.2 0.0508 71.3 0.00142788 117.328 +1000 4.2 0.0508 39.6 0.00152689 125.647 +1250 4.2 0.0508 39.6 0.00152689 128.427 +1600 4.2 0.0508 39.6 0.00152689 130.197 +2000 4.2 0.0508 39.6 0.00152689 132.587 +2500 4.2 0.0508 39.6 0.00152689 133.847 +3150 4.2 0.0508 39.6 0.00152689 133.587 +4000 4.2 0.0508 39.6 0.00152689 131.807 +5000 4.2 0.0508 39.6 0.00152689 129.777 +6300 4.2 0.0508 39.6 0.00152689 125.717 +8000 4.2 0.0508 39.6 0.00152689 120.397 +10000 4.2 0.0508 39.6 0.00152689 116.967 +800 8.4 0.0508 71.3 0.00529514 127.556 +1000 8.4 0.0508 71.3 0.00529514 129.946 +1250 8.4 0.0508 71.3 0.00529514 132.086 +1600 8.4 0.0508 71.3 0.00529514 133.846 +2000 8.4 0.0508 71.3 0.00529514 134.476 +2500 8.4 0.0508 71.3 0.00529514 134.226 +3150 8.4 0.0508 71.3 0.00529514 131.966 +4000 8.4 0.0508 71.3 0.00529514 126.926 +5000 8.4 0.0508 71.3 0.00529514 121.146 +400 8.4 0.0508 55.5 0.00544854 121.582 +500 8.4 0.0508 55.5 0.00544854 123.742 +630 8.4 0.0508 55.5 0.00544854 126.152 +800 8.4 0.0508 55.5 0.00544854 128.562 +1000 8.4 0.0508 55.5 0.00544854 130.722 +1250 8.4 0.0508 55.5 0.00544854 132.252 +1600 8.4 0.0508 55.5 0.00544854 133.032 +2000 8.4 0.0508 55.5 0.00544854 133.042 +2500 8.4 0.0508 55.5 0.00544854 131.542 +3150 8.4 0.0508 55.5 0.00544854 128.402 +4000 8.4 0.0508 55.5 0.00544854 122.612 +5000 8.4 0.0508 55.5 0.00544854 115.812 +400 8.4 0.0508 39.6 0.00566229 120.015 +500 8.4 0.0508 39.6 0.00566229 122.905 +630 8.4 0.0508 39.6 0.00566229 126.045 +800 8.4 0.0508 39.6 0.00566229 128.435 +1000 8.4 0.0508 39.6 0.00566229 130.195 +1250 8.4 0.0508 39.6 0.00566229 131.205 +1600 8.4 0.0508 39.6 0.00566229 130.965 +2000 8.4 0.0508 39.6 0.00566229 129.965 +2500 8.4 0.0508 39.6 0.00566229 127.465 +3150 8.4 0.0508 39.6 0.00566229 123.965 +4000 8.4 0.0508 39.6 0.00566229 118.955 +400 8.4 0.0508 31.7 0.00580776 120.076 +500 8.4 0.0508 31.7 0.00580776 122.966 +630 8.4 0.0508 31.7 0.00580776 125.856 +800 8.4 0.0508 31.7 0.00580776 128.246 +1000 8.4 0.0508 31.7 0.00580776 129.516 +1250 8.4 0.0508 31.7 0.00580776 130.156 +1600 8.4 0.0508 31.7 0.00580776 129.296 +2000 8.4 0.0508 31.7 0.00580776 127.686 +2500 8.4 0.0508 31.7 0.00580776 125.576 +3150 8.4 0.0508 31.7 0.00580776 122.086 +4000 8.4 0.0508 31.7 0.00580776 118.106 +200 11.2 0.0508 71.3 0.014072 125.941 +250 11.2 0.0508 71.3 0.014072 127.101 +315 11.2 0.0508 71.3 0.014072 128.381 +400 11.2 0.0508 71.3 0.014072 129.281 +500 11.2 0.0508 71.3 0.014072 130.311 +630 11.2 0.0508 71.3 0.014072 133.611 +800 11.2 0.0508 71.3 0.014072 136.031 +1000 11.2 0.0508 71.3 0.014072 136.941 +1250 11.2 0.0508 71.3 0.014072 136.191 +1600 11.2 0.0508 71.3 0.014072 135.191 +2000 11.2 0.0508 71.3 0.014072 133.311 +2500 11.2 0.0508 71.3 0.014072 130.541 +3150 11.2 0.0508 71.3 0.014072 127.141 +4000 11.2 0.0508 71.3 0.014072 122.471 +200 11.2 0.0508 39.6 0.0150478 125.010 +250 11.2 0.0508 39.6 0.0150478 126.430 +315 11.2 0.0508 39.6 0.0150478 128.990 +400 11.2 0.0508 39.6 0.0150478 130.670 +500 11.2 0.0508 39.6 0.0150478 131.960 +630 11.2 0.0508 39.6 0.0150478 133.130 +800 11.2 0.0508 39.6 0.0150478 133.790 +1000 11.2 0.0508 39.6 0.0150478 132.430 +1250 11.2 0.0508 39.6 0.0150478 130.050 +1600 11.2 0.0508 39.6 0.0150478 126.540 +2000 11.2 0.0508 39.6 0.0150478 124.420 +2500 11.2 0.0508 39.6 0.0150478 122.170 +3150 11.2 0.0508 39.6 0.0150478 119.670 +4000 11.2 0.0508 39.6 0.0150478 115.520 +200 15.4 0.0508 71.3 0.0264269 123.595 +250 15.4 0.0508 71.3 0.0264269 124.835 +315 15.4 0.0508 71.3 0.0264269 126.195 +400 15.4 0.0508 71.3 0.0264269 126.805 +500 15.4 0.0508 71.3 0.0264269 127.285 +630 15.4 0.0508 71.3 0.0264269 129.645 +800 15.4 0.0508 71.3 0.0264269 131.515 +1000 15.4 0.0508 71.3 0.0264269 131.865 +1250 15.4 0.0508 71.3 0.0264269 130.845 +1600 15.4 0.0508 71.3 0.0264269 130.065 +2000 15.4 0.0508 71.3 0.0264269 129.285 +2500 15.4 0.0508 71.3 0.0264269 127.625 +3150 15.4 0.0508 71.3 0.0264269 125.715 +4000 15.4 0.0508 71.3 0.0264269 122.675 +5000 15.4 0.0508 71.3 0.0264269 119.135 +6300 15.4 0.0508 71.3 0.0264269 115.215 +8000 15.4 0.0508 71.3 0.0264269 112.675 +200 15.4 0.0508 55.5 0.0271925 122.940 +250 15.4 0.0508 55.5 0.0271925 124.170 +315 15.4 0.0508 55.5 0.0271925 125.390 +400 15.4 0.0508 55.5 0.0271925 126.500 +500 15.4 0.0508 55.5 0.0271925 127.220 +630 15.4 0.0508 55.5 0.0271925 129.330 +800 15.4 0.0508 55.5 0.0271925 130.430 +1000 15.4 0.0508 55.5 0.0271925 130.400 +1250 15.4 0.0508 55.5 0.0271925 130.000 +1600 15.4 0.0508 55.5 0.0271925 128.200 +2000 15.4 0.0508 55.5 0.0271925 127.040 +2500 15.4 0.0508 55.5 0.0271925 125.630 +3150 15.4 0.0508 55.5 0.0271925 123.460 +4000 15.4 0.0508 55.5 0.0271925 120.920 +5000 15.4 0.0508 55.5 0.0271925 117.110 +6300 15.4 0.0508 55.5 0.0271925 112.930 +200 15.4 0.0508 39.6 0.0282593 121.783 +250 15.4 0.0508 39.6 0.0282593 122.893 +315 15.4 0.0508 39.6 0.0282593 124.493 +400 15.4 0.0508 39.6 0.0282593 125.353 +500 15.4 0.0508 39.6 0.0282593 125.963 +630 15.4 0.0508 39.6 0.0282593 127.443 +800 15.4 0.0508 39.6 0.0282593 128.423 +1000 15.4 0.0508 39.6 0.0282593 127.893 +1250 15.4 0.0508 39.6 0.0282593 126.743 +1600 15.4 0.0508 39.6 0.0282593 124.843 +2000 15.4 0.0508 39.6 0.0282593 123.443 +2500 15.4 0.0508 39.6 0.0282593 122.413 +3150 15.4 0.0508 39.6 0.0282593 120.513 +4000 15.4 0.0508 39.6 0.0282593 118.113 +5000 15.4 0.0508 39.6 0.0282593 114.453 +6300 15.4 0.0508 39.6 0.0282593 109.663 +200 15.4 0.0508 31.7 0.0289853 119.975 +250 15.4 0.0508 31.7 0.0289853 121.225 +315 15.4 0.0508 31.7 0.0289853 122.845 +400 15.4 0.0508 31.7 0.0289853 123.705 +500 15.4 0.0508 31.7 0.0289853 123.695 +630 15.4 0.0508 31.7 0.0289853 124.685 +800 15.4 0.0508 31.7 0.0289853 125.555 +1000 15.4 0.0508 31.7 0.0289853 124.525 +1250 15.4 0.0508 31.7 0.0289853 123.255 +1600 15.4 0.0508 31.7 0.0289853 121.485 +2000 15.4 0.0508 31.7 0.0289853 120.835 +2500 15.4 0.0508 31.7 0.0289853 119.945 +3150 15.4 0.0508 31.7 0.0289853 118.045 +4000 15.4 0.0508 31.7 0.0289853 115.635 +5000 15.4 0.0508 31.7 0.0289853 112.355 +6300 15.4 0.0508 31.7 0.0289853 108.185 +200 19.7 0.0508 71.3 0.0341183 118.005 +250 19.7 0.0508 71.3 0.0341183 119.115 +315 19.7 0.0508 71.3 0.0341183 121.235 +400 19.7 0.0508 71.3 0.0341183 123.865 +500 19.7 0.0508 71.3 0.0341183 126.995 +630 19.7 0.0508 71.3 0.0341183 128.365 +800 19.7 0.0508 71.3 0.0341183 124.555 +1000 19.7 0.0508 71.3 0.0341183 121.885 +1250 19.7 0.0508 71.3 0.0341183 121.485 +1600 19.7 0.0508 71.3 0.0341183 120.575 +2000 19.7 0.0508 71.3 0.0341183 120.055 +2500 19.7 0.0508 71.3 0.0341183 118.385 +3150 19.7 0.0508 71.3 0.0341183 116.225 +4000 19.7 0.0508 71.3 0.0341183 113.045 +200 19.7 0.0508 39.6 0.036484 125.974 +250 19.7 0.0508 39.6 0.036484 127.224 +315 19.7 0.0508 39.6 0.036484 129.864 +400 19.7 0.0508 39.6 0.036484 130.614 +500 19.7 0.0508 39.6 0.036484 128.444 +630 19.7 0.0508 39.6 0.036484 120.324 +800 19.7 0.0508 39.6 0.036484 119.174 +1000 19.7 0.0508 39.6 0.036484 118.904 +1250 19.7 0.0508 39.6 0.036484 118.634 +1600 19.7 0.0508 39.6 0.036484 117.604 +2000 19.7 0.0508 39.6 0.036484 117.724 +2500 19.7 0.0508 39.6 0.036484 116.184 +3150 19.7 0.0508 39.6 0.036484 113.004 +4000 19.7 0.0508 39.6 0.036484 108.684 +2500 0 0.0254 71.3 0.000400682 133.707 +3150 0 0.0254 71.3 0.000400682 137.007 +4000 0 0.0254 71.3 0.000400682 138.557 +5000 0 0.0254 71.3 0.000400682 136.837 +6300 0 0.0254 71.3 0.000400682 134.987 +8000 0 0.0254 71.3 0.000400682 129.867 +10000 0 0.0254 71.3 0.000400682 130.787 +12500 0 0.0254 71.3 0.000400682 133.207 +16000 0 0.0254 71.3 0.000400682 130.477 +20000 0 0.0254 71.3 0.000400682 123.217 +2000 0 0.0254 55.5 0.00041229 127.623 +2500 0 0.0254 55.5 0.00041229 130.073 +3150 0 0.0254 55.5 0.00041229 130.503 +4000 0 0.0254 55.5 0.00041229 133.223 +5000 0 0.0254 55.5 0.00041229 135.803 +6300 0 0.0254 55.5 0.00041229 136.103 +8000 0 0.0254 55.5 0.00041229 136.163 +10000 0 0.0254 55.5 0.00041229 134.563 +12500 0 0.0254 55.5 0.00041229 131.453 +16000 0 0.0254 55.5 0.00041229 125.683 +20000 0 0.0254 55.5 0.00041229 121.933 +1600 0 0.0254 39.6 0.000428464 124.156 +2000 0 0.0254 39.6 0.000428464 130.026 +2500 0 0.0254 39.6 0.000428464 131.836 +3150 0 0.0254 39.6 0.000428464 133.276 +4000 0 0.0254 39.6 0.000428464 135.346 +5000 0 0.0254 39.6 0.000428464 136.536 +6300 0 0.0254 39.6 0.000428464 136.826 +8000 0 0.0254 39.6 0.000428464 135.866 +10000 0 0.0254 39.6 0.000428464 133.376 +12500 0 0.0254 39.6 0.000428464 129.116 +16000 0 0.0254 39.6 0.000428464 124.986 +1000 0 0.0254 31.7 0.000439472 125.127 +1250 0 0.0254 31.7 0.000439472 127.947 +1600 0 0.0254 31.7 0.000439472 129.267 +2000 0 0.0254 31.7 0.000439472 130.697 +2500 0 0.0254 31.7 0.000439472 132.897 +3150 0 0.0254 31.7 0.000439472 135.227 +4000 0 0.0254 31.7 0.000439472 137.047 +5000 0 0.0254 31.7 0.000439472 138.607 +6300 0 0.0254 31.7 0.000439472 138.537 +8000 0 0.0254 31.7 0.000439472 137.207 +10000 0 0.0254 31.7 0.000439472 134.227 +12500 0 0.0254 31.7 0.000439472 128.977 +16000 0 0.0254 31.7 0.000439472 125.627 +2000 4.8 0.0254 71.3 0.000848633 128.398 +2500 4.8 0.0254 71.3 0.000848633 130.828 +3150 4.8 0.0254 71.3 0.000848633 133.378 +4000 4.8 0.0254 71.3 0.000848633 134.928 +5000 4.8 0.0254 71.3 0.000848633 135.468 +6300 4.8 0.0254 71.3 0.000848633 134.498 +8000 4.8 0.0254 71.3 0.000848633 131.518 +10000 4.8 0.0254 71.3 0.000848633 127.398 +12500 4.8 0.0254 71.3 0.000848633 127.688 +16000 4.8 0.0254 71.3 0.000848633 124.208 +20000 4.8 0.0254 71.3 0.000848633 119.708 +1600 4.8 0.0254 55.5 0.000873218 121.474 +2000 4.8 0.0254 55.5 0.000873218 125.054 +2500 4.8 0.0254 55.5 0.000873218 129.144 +3150 4.8 0.0254 55.5 0.000873218 132.354 +4000 4.8 0.0254 55.5 0.000873218 133.924 +5000 4.8 0.0254 55.5 0.000873218 135.484 +6300 4.8 0.0254 55.5 0.000873218 135.164 +8000 4.8 0.0254 55.5 0.000873218 132.184 +10000 4.8 0.0254 55.5 0.000873218 126.944 +12500 4.8 0.0254 55.5 0.000873218 125.094 +16000 4.8 0.0254 55.5 0.000873218 124.394 +20000 4.8 0.0254 55.5 0.000873218 121.284 +500 4.8 0.0254 39.6 0.000907475 116.366 +630 4.8 0.0254 39.6 0.000907475 118.696 +800 4.8 0.0254 39.6 0.000907475 120.766 +1000 4.8 0.0254 39.6 0.000907475 122.956 +1250 4.8 0.0254 39.6 0.000907475 125.026 +1600 4.8 0.0254 39.6 0.000907475 125.966 +2000 4.8 0.0254 39.6 0.000907475 128.916 +2500 4.8 0.0254 39.6 0.000907475 131.236 +3150 4.8 0.0254 39.6 0.000907475 133.436 +4000 4.8 0.0254 39.6 0.000907475 134.996 +5000 4.8 0.0254 39.6 0.000907475 135.426 +6300 4.8 0.0254 39.6 0.000907475 134.336 +8000 4.8 0.0254 39.6 0.000907475 131.346 +10000 4.8 0.0254 39.6 0.000907475 126.066 +500 4.8 0.0254 31.7 0.000930789 116.128 +630 4.8 0.0254 31.7 0.000930789 120.078 +800 4.8 0.0254 31.7 0.000930789 122.648 +1000 4.8 0.0254 31.7 0.000930789 125.348 +1250 4.8 0.0254 31.7 0.000930789 127.408 +1600 4.8 0.0254 31.7 0.000930789 128.718 +2000 4.8 0.0254 31.7 0.000930789 130.148 +2500 4.8 0.0254 31.7 0.000930789 132.588 +3150 4.8 0.0254 31.7 0.000930789 134.268 +4000 4.8 0.0254 31.7 0.000930789 135.328 +5000 4.8 0.0254 31.7 0.000930789 135.248 +6300 4.8 0.0254 31.7 0.000930789 132.898 +8000 4.8 0.0254 31.7 0.000930789 127.008 +630 9.5 0.0254 71.3 0.00420654 125.726 +800 9.5 0.0254 71.3 0.00420654 127.206 +1000 9.5 0.0254 71.3 0.00420654 129.556 +1250 9.5 0.0254 71.3 0.00420654 131.656 +1600 9.5 0.0254 71.3 0.00420654 133.756 +2000 9.5 0.0254 71.3 0.00420654 134.976 +2500 9.5 0.0254 71.3 0.00420654 135.956 +3150 9.5 0.0254 71.3 0.00420654 136.166 +4000 9.5 0.0254 71.3 0.00420654 134.236 +5000 9.5 0.0254 71.3 0.00420654 131.186 +6300 9.5 0.0254 71.3 0.00420654 127.246 +400 9.5 0.0254 55.5 0.0043284 120.952 +500 9.5 0.0254 55.5 0.0043284 123.082 +630 9.5 0.0254 55.5 0.0043284 125.452 +800 9.5 0.0254 55.5 0.0043284 128.082 +1000 9.5 0.0254 55.5 0.0043284 130.332 +1250 9.5 0.0254 55.5 0.0043284 132.202 +1600 9.5 0.0254 55.5 0.0043284 133.062 +2000 9.5 0.0254 55.5 0.0043284 134.052 +2500 9.5 0.0254 55.5 0.0043284 134.152 +3150 9.5 0.0254 55.5 0.0043284 133.252 +4000 9.5 0.0254 55.5 0.0043284 131.582 +5000 9.5 0.0254 55.5 0.0043284 128.412 +6300 9.5 0.0254 55.5 0.0043284 124.222 +200 9.5 0.0254 39.6 0.00449821 116.074 +250 9.5 0.0254 39.6 0.00449821 116.924 +315 9.5 0.0254 39.6 0.00449821 119.294 +400 9.5 0.0254 39.6 0.00449821 121.154 +500 9.5 0.0254 39.6 0.00449821 123.894 +630 9.5 0.0254 39.6 0.00449821 126.514 +800 9.5 0.0254 39.6 0.00449821 129.014 +1000 9.5 0.0254 39.6 0.00449821 130.374 +1250 9.5 0.0254 39.6 0.00449821 130.964 +1600 9.5 0.0254 39.6 0.00449821 131.184 +2000 9.5 0.0254 39.6 0.00449821 131.274 +2500 9.5 0.0254 39.6 0.00449821 131.234 +3150 9.5 0.0254 39.6 0.00449821 129.934 +4000 9.5 0.0254 39.6 0.00449821 127.864 +5000 9.5 0.0254 39.6 0.00449821 125.044 +6300 9.5 0.0254 39.6 0.00449821 120.324 +200 9.5 0.0254 31.7 0.00461377 119.146 +250 9.5 0.0254 31.7 0.00461377 120.136 +315 9.5 0.0254 31.7 0.00461377 122.766 +400 9.5 0.0254 31.7 0.00461377 124.756 +500 9.5 0.0254 31.7 0.00461377 126.886 +630 9.5 0.0254 31.7 0.00461377 129.006 +800 9.5 0.0254 31.7 0.00461377 130.746 +1000 9.5 0.0254 31.7 0.00461377 131.346 +1250 9.5 0.0254 31.7 0.00461377 131.446 +1600 9.5 0.0254 31.7 0.00461377 131.036 +2000 9.5 0.0254 31.7 0.00461377 130.496 +2500 9.5 0.0254 31.7 0.00461377 130.086 +3150 9.5 0.0254 31.7 0.00461377 128.536 +4000 9.5 0.0254 31.7 0.00461377 126.736 +5000 9.5 0.0254 31.7 0.00461377 124.426 +6300 9.5 0.0254 31.7 0.00461377 120.726 +250 12.7 0.0254 71.3 0.0121808 119.698 +315 12.7 0.0254 71.3 0.0121808 122.938 +400 12.7 0.0254 71.3 0.0121808 125.048 +500 12.7 0.0254 71.3 0.0121808 126.898 +630 12.7 0.0254 71.3 0.0121808 128.878 +800 12.7 0.0254 71.3 0.0121808 130.348 +1000 12.7 0.0254 71.3 0.0121808 131.698 +1250 12.7 0.0254 71.3 0.0121808 133.048 +1600 12.7 0.0254 71.3 0.0121808 134.528 +2000 12.7 0.0254 71.3 0.0121808 134.228 +2500 12.7 0.0254 71.3 0.0121808 134.058 +3150 12.7 0.0254 71.3 0.0121808 133.758 +4000 12.7 0.0254 71.3 0.0121808 131.808 +5000 12.7 0.0254 71.3 0.0121808 128.978 +6300 12.7 0.0254 71.3 0.0121808 125.398 +8000 12.7 0.0254 71.3 0.0121808 120.538 +10000 12.7 0.0254 71.3 0.0121808 114.418 +250 12.7 0.0254 39.6 0.0130253 121.547 +315 12.7 0.0254 39.6 0.0130253 123.537 +400 12.7 0.0254 39.6 0.0130253 125.527 +500 12.7 0.0254 39.6 0.0130253 127.127 +630 12.7 0.0254 39.6 0.0130253 128.867 +800 12.7 0.0254 39.6 0.0130253 130.217 +1000 12.7 0.0254 39.6 0.0130253 130.947 +1250 12.7 0.0254 39.6 0.0130253 130.777 +1600 12.7 0.0254 39.6 0.0130253 129.977 +2000 12.7 0.0254 39.6 0.0130253 129.567 +2500 12.7 0.0254 39.6 0.0130253 129.027 +3150 12.7 0.0254 39.6 0.0130253 127.847 +4000 12.7 0.0254 39.6 0.0130253 126.537 +5000 12.7 0.0254 39.6 0.0130253 125.107 +6300 12.7 0.0254 39.6 0.0130253 123.177 +8000 12.7 0.0254 39.6 0.0130253 120.607 +10000 12.7 0.0254 39.6 0.0130253 116.017 +200 17.4 0.0254 71.3 0.016104 112.506 +250 17.4 0.0254 71.3 0.016104 113.796 +315 17.4 0.0254 71.3 0.016104 115.846 +400 17.4 0.0254 71.3 0.016104 117.396 +500 17.4 0.0254 71.3 0.016104 119.806 +630 17.4 0.0254 71.3 0.016104 122.606 +800 17.4 0.0254 71.3 0.016104 124.276 +1000 17.4 0.0254 71.3 0.016104 125.816 +1250 17.4 0.0254 71.3 0.016104 126.356 +1600 17.4 0.0254 71.3 0.016104 126.406 +2000 17.4 0.0254 71.3 0.016104 126.826 +2500 17.4 0.0254 71.3 0.016104 126.746 +3150 17.4 0.0254 71.3 0.016104 126.536 +4000 17.4 0.0254 71.3 0.016104 125.586 +5000 17.4 0.0254 71.3 0.016104 123.126 +6300 17.4 0.0254 71.3 0.016104 119.916 +8000 17.4 0.0254 71.3 0.016104 115.466 +200 17.4 0.0254 55.5 0.0165706 109.951 +250 17.4 0.0254 55.5 0.0165706 110.491 +315 17.4 0.0254 55.5 0.0165706 111.911 +400 17.4 0.0254 55.5 0.0165706 115.461 +500 17.4 0.0254 55.5 0.0165706 119.621 +630 17.4 0.0254 55.5 0.0165706 122.411 +800 17.4 0.0254 55.5 0.0165706 123.091 +1000 17.4 0.0254 55.5 0.0165706 126.001 +1250 17.4 0.0254 55.5 0.0165706 129.301 +1600 17.4 0.0254 55.5 0.0165706 126.471 +2000 17.4 0.0254 55.5 0.0165706 125.261 +2500 17.4 0.0254 55.5 0.0165706 124.931 +3150 17.4 0.0254 55.5 0.0165706 124.101 +4000 17.4 0.0254 55.5 0.0165706 121.771 +5000 17.4 0.0254 55.5 0.0165706 118.941 +6300 17.4 0.0254 55.5 0.0165706 114.861 +200 17.4 0.0254 39.6 0.0172206 114.044 +250 17.4 0.0254 39.6 0.0172206 114.714 +315 17.4 0.0254 39.6 0.0172206 115.144 +400 17.4 0.0254 39.6 0.0172206 115.444 +500 17.4 0.0254 39.6 0.0172206 117.514 +630 17.4 0.0254 39.6 0.0172206 124.514 +800 17.4 0.0254 39.6 0.0172206 135.324 +1000 17.4 0.0254 39.6 0.0172206 138.274 +1250 17.4 0.0254 39.6 0.0172206 131.364 +1600 17.4 0.0254 39.6 0.0172206 127.614 +2000 17.4 0.0254 39.6 0.0172206 126.644 +2500 17.4 0.0254 39.6 0.0172206 124.154 +3150 17.4 0.0254 39.6 0.0172206 123.564 +4000 17.4 0.0254 39.6 0.0172206 122.724 +5000 17.4 0.0254 39.6 0.0172206 119.854 +200 17.4 0.0254 31.7 0.0176631 116.146 +250 17.4 0.0254 31.7 0.0176631 116.956 +315 17.4 0.0254 31.7 0.0176631 118.416 +400 17.4 0.0254 31.7 0.0176631 120.766 +500 17.4 0.0254 31.7 0.0176631 127.676 +630 17.4 0.0254 31.7 0.0176631 136.886 +800 17.4 0.0254 31.7 0.0176631 139.226 +1000 17.4 0.0254 31.7 0.0176631 131.796 +1250 17.4 0.0254 31.7 0.0176631 128.306 +1600 17.4 0.0254 31.7 0.0176631 126.846 +2000 17.4 0.0254 31.7 0.0176631 124.356 +2500 17.4 0.0254 31.7 0.0176631 124.166 +3150 17.4 0.0254 31.7 0.0176631 123.466 +4000 17.4 0.0254 31.7 0.0176631 121.996 +5000 17.4 0.0254 31.7 0.0176631 117.996 +315 22.2 0.0254 71.3 0.0214178 115.857 +400 22.2 0.0254 71.3 0.0214178 117.927 +500 22.2 0.0254 71.3 0.0214178 117.967 +630 22.2 0.0254 71.3 0.0214178 120.657 +800 22.2 0.0254 71.3 0.0214178 123.227 +1000 22.2 0.0254 71.3 0.0214178 134.247 +1250 22.2 0.0254 71.3 0.0214178 140.987 +1600 22.2 0.0254 71.3 0.0214178 131.817 +2000 22.2 0.0254 71.3 0.0214178 127.197 +2500 22.2 0.0254 71.3 0.0214178 126.097 +3150 22.2 0.0254 71.3 0.0214178 124.127 +4000 22.2 0.0254 71.3 0.0214178 123.917 +5000 22.2 0.0254 71.3 0.0214178 125.727 +6300 22.2 0.0254 71.3 0.0214178 123.127 +8000 22.2 0.0254 71.3 0.0214178 121.657 +200 22.2 0.0254 39.6 0.0229028 116.066 +250 22.2 0.0254 39.6 0.0229028 117.386 +315 22.2 0.0254 39.6 0.0229028 120.716 +400 22.2 0.0254 39.6 0.0229028 123.416 +500 22.2 0.0254 39.6 0.0229028 129.776 +630 22.2 0.0254 39.6 0.0229028 137.026 +800 22.2 0.0254 39.6 0.0229028 137.076 +1000 22.2 0.0254 39.6 0.0229028 128.416 +1250 22.2 0.0254 39.6 0.0229028 126.446 +1600 22.2 0.0254 39.6 0.0229028 122.216 +2000 22.2 0.0254 39.6 0.0229028 121.256 +2500 22.2 0.0254 39.6 0.0229028 121.306 +3150 22.2 0.0254 39.6 0.0229028 120.856 +4000 22.2 0.0254 39.6 0.0229028 119.646 +5000 22.2 0.0254 39.6 0.0229028 118.816 +630 0 0.1016 71.3 0.00121072 124.155 +800 0 0.1016 71.3 0.00121072 126.805 +1000 0 0.1016 71.3 0.00121072 128.825 +1250 0 0.1016 71.3 0.00121072 130.335 +1600 0 0.1016 71.3 0.00121072 131.725 +2000 0 0.1016 71.3 0.00121072 132.095 +2500 0 0.1016 71.3 0.00121072 132.595 +3150 0 0.1016 71.3 0.00121072 131.955 +4000 0 0.1016 71.3 0.00121072 130.935 +5000 0 0.1016 71.3 0.00121072 130.795 +6300 0 0.1016 71.3 0.00121072 129.395 +8000 0 0.1016 71.3 0.00121072 125.465 +10000 0 0.1016 71.3 0.00121072 123.305 +12500 0 0.1016 71.3 0.00121072 119.375 +630 0 0.1016 55.5 0.00131983 126.170 +800 0 0.1016 55.5 0.00131983 127.920 +1000 0 0.1016 55.5 0.00131983 129.800 +1250 0 0.1016 55.5 0.00131983 131.430 +1600 0 0.1016 55.5 0.00131983 132.050 +2000 0 0.1016 55.5 0.00131983 132.540 +2500 0 0.1016 55.5 0.00131983 133.040 +3150 0 0.1016 55.5 0.00131983 131.780 +4000 0 0.1016 55.5 0.00131983 129.500 +5000 0 0.1016 55.5 0.00131983 128.360 +6300 0 0.1016 55.5 0.00131983 127.730 +8000 0 0.1016 55.5 0.00131983 124.450 +10000 0 0.1016 55.5 0.00131983 121.930 +12500 0 0.1016 55.5 0.00131983 119.910 +630 0 0.1016 39.6 0.00146332 125.401 +800 0 0.1016 39.6 0.00146332 128.401 +1000 0 0.1016 39.6 0.00146332 130.781 +1250 0 0.1016 39.6 0.00146332 132.271 +1600 0 0.1016 39.6 0.00146332 133.261 +2000 0 0.1016 39.6 0.00146332 133.251 +2500 0 0.1016 39.6 0.00146332 132.611 +3150 0 0.1016 39.6 0.00146332 130.961 +4000 0 0.1016 39.6 0.00146332 127.801 +5000 0 0.1016 39.6 0.00146332 126.021 +6300 0 0.1016 39.6 0.00146332 125.631 +8000 0 0.1016 39.6 0.00146332 122.341 +10000 0 0.1016 39.6 0.00146332 119.561 +630 0 0.1016 31.7 0.00150092 126.413 +800 0 0.1016 31.7 0.00150092 129.053 +1000 0 0.1016 31.7 0.00150092 131.313 +1250 0 0.1016 31.7 0.00150092 133.063 +1600 0 0.1016 31.7 0.00150092 133.553 +2000 0 0.1016 31.7 0.00150092 133.153 +2500 0 0.1016 31.7 0.00150092 132.003 +3150 0 0.1016 31.7 0.00150092 129.973 +4000 0 0.1016 31.7 0.00150092 126.933 +5000 0 0.1016 31.7 0.00150092 124.393 +6300 0 0.1016 31.7 0.00150092 124.253 +8000 0 0.1016 31.7 0.00150092 120.193 +10000 0 0.1016 31.7 0.00150092 115.893 +800 3.3 0.1016 71.3 0.00202822 131.074 +1000 3.3 0.1016 71.3 0.00202822 131.434 +1250 3.3 0.1016 71.3 0.00202822 132.304 +1600 3.3 0.1016 71.3 0.00202822 133.664 +2000 3.3 0.1016 71.3 0.00202822 134.034 +2500 3.3 0.1016 71.3 0.00202822 133.894 +3150 3.3 0.1016 71.3 0.00202822 132.114 +4000 3.3 0.1016 71.3 0.00202822 128.704 +5000 3.3 0.1016 71.3 0.00202822 127.054 +6300 3.3 0.1016 71.3 0.00202822 124.904 +8000 3.3 0.1016 71.3 0.00202822 121.234 +10000 3.3 0.1016 71.3 0.00202822 116.694 +630 3.3 0.1016 55.5 0.002211 126.599 +800 3.3 0.1016 55.5 0.002211 129.119 +1000 3.3 0.1016 55.5 0.002211 131.129 +1250 3.3 0.1016 55.5 0.002211 132.769 +1600 3.3 0.1016 55.5 0.002211 133.649 +2000 3.3 0.1016 55.5 0.002211 133.649 +2500 3.3 0.1016 55.5 0.002211 132.889 +3150 3.3 0.1016 55.5 0.002211 130.629 +4000 3.3 0.1016 55.5 0.002211 127.229 +5000 3.3 0.1016 55.5 0.002211 124.839 +6300 3.3 0.1016 55.5 0.002211 123.839 +8000 3.3 0.1016 55.5 0.002211 120.569 +10000 3.3 0.1016 55.5 0.002211 115.659 +630 3.3 0.1016 39.6 0.00245138 127.251 +800 3.3 0.1016 39.6 0.00245138 129.991 +1000 3.3 0.1016 39.6 0.00245138 131.971 +1250 3.3 0.1016 39.6 0.00245138 133.211 +1600 3.3 0.1016 39.6 0.00245138 133.071 +2000 3.3 0.1016 39.6 0.00245138 132.301 +2500 3.3 0.1016 39.6 0.00245138 130.791 +3150 3.3 0.1016 39.6 0.00245138 128.401 +4000 3.3 0.1016 39.6 0.00245138 124.881 +5000 3.3 0.1016 39.6 0.00245138 122.371 +6300 3.3 0.1016 39.6 0.00245138 120.851 +8000 3.3 0.1016 39.6 0.00245138 118.091 +10000 3.3 0.1016 39.6 0.00245138 115.321 +630 3.3 0.1016 31.7 0.00251435 128.952 +800 3.3 0.1016 31.7 0.00251435 131.362 +1000 3.3 0.1016 31.7 0.00251435 133.012 +1250 3.3 0.1016 31.7 0.00251435 134.022 +1600 3.3 0.1016 31.7 0.00251435 133.402 +2000 3.3 0.1016 31.7 0.00251435 131.642 +2500 3.3 0.1016 31.7 0.00251435 130.392 +3150 3.3 0.1016 31.7 0.00251435 128.252 +4000 3.3 0.1016 31.7 0.00251435 124.852 +5000 3.3 0.1016 31.7 0.00251435 122.082 +6300 3.3 0.1016 31.7 0.00251435 120.702 +8000 3.3 0.1016 31.7 0.00251435 117.432 +630 6.7 0.1016 71.3 0.00478288 131.448 +800 6.7 0.1016 71.3 0.00478288 134.478 +1000 6.7 0.1016 71.3 0.00478288 136.758 +1250 6.7 0.1016 71.3 0.00478288 137.658 +1600 6.7 0.1016 71.3 0.00478288 136.678 +2000 6.7 0.1016 71.3 0.00478288 134.568 +2500 6.7 0.1016 71.3 0.00478288 131.458 +3150 6.7 0.1016 71.3 0.00478288 124.458 +500 6.7 0.1016 55.5 0.0052139 129.343 +630 6.7 0.1016 55.5 0.0052139 133.023 +800 6.7 0.1016 55.5 0.0052139 135.953 +1000 6.7 0.1016 55.5 0.0052139 137.233 +1250 6.7 0.1016 55.5 0.0052139 136.883 +1600 6.7 0.1016 55.5 0.0052139 133.653 +2000 6.7 0.1016 55.5 0.0052139 129.653 +2500 6.7 0.1016 55.5 0.0052139 124.273 +400 6.7 0.1016 39.6 0.00578076 128.295 +500 6.7 0.1016 39.6 0.00578076 130.955 +630 6.7 0.1016 39.6 0.00578076 133.355 +800 6.7 0.1016 39.6 0.00578076 134.625 +1000 6.7 0.1016 39.6 0.00578076 134.515 +1250 6.7 0.1016 39.6 0.00578076 132.395 +1600 6.7 0.1016 39.6 0.00578076 127.375 +2000 6.7 0.1016 39.6 0.00578076 122.235 +315 6.7 0.1016 31.7 0.00592927 126.266 +400 6.7 0.1016 31.7 0.00592927 128.296 +500 6.7 0.1016 31.7 0.00592927 130.206 +630 6.7 0.1016 31.7 0.00592927 132.116 +800 6.7 0.1016 31.7 0.00592927 132.886 +1000 6.7 0.1016 31.7 0.00592927 131.636 +1250 6.7 0.1016 31.7 0.00592927 129.256 +1600 6.7 0.1016 31.7 0.00592927 124.346 +2000 6.7 0.1016 31.7 0.00592927 120.446 +200 8.9 0.1016 71.3 0.0103088 133.503 +250 8.9 0.1016 71.3 0.0103088 134.533 +315 8.9 0.1016 71.3 0.0103088 136.583 +400 8.9 0.1016 71.3 0.0103088 138.123 +500 8.9 0.1016 71.3 0.0103088 138.523 +630 8.9 0.1016 71.3 0.0103088 138.423 +800 8.9 0.1016 71.3 0.0103088 137.813 +1000 8.9 0.1016 71.3 0.0103088 135.433 +1250 8.9 0.1016 71.3 0.0103088 132.793 +1600 8.9 0.1016 71.3 0.0103088 128.763 +2000 8.9 0.1016 71.3 0.0103088 124.233 +2500 8.9 0.1016 71.3 0.0103088 123.623 +3150 8.9 0.1016 71.3 0.0103088 123.263 +4000 8.9 0.1016 71.3 0.0103088 120.243 +5000 8.9 0.1016 71.3 0.0103088 116.723 +6300 8.9 0.1016 71.3 0.0103088 117.253 +200 8.9 0.1016 39.6 0.0124596 133.420 +250 8.9 0.1016 39.6 0.0124596 134.340 +315 8.9 0.1016 39.6 0.0124596 135.380 +400 8.9 0.1016 39.6 0.0124596 135.540 +500 8.9 0.1016 39.6 0.0124596 133.790 +630 8.9 0.1016 39.6 0.0124596 131.920 +800 8.9 0.1016 39.6 0.0124596 130.940 +1000 8.9 0.1016 39.6 0.0124596 129.580 +1250 8.9 0.1016 39.6 0.0124596 127.710 +1600 8.9 0.1016 39.6 0.0124596 123.820 +2000 8.9 0.1016 39.6 0.0124596 119.040 +2500 8.9 0.1016 39.6 0.0124596 119.190 +3150 8.9 0.1016 39.6 0.0124596 119.350 +4000 8.9 0.1016 39.6 0.0124596 116.220 +5000 8.9 0.1016 39.6 0.0124596 113.080 +6300 8.9 0.1016 39.6 0.0124596 113.110 +200 12.3 0.1016 71.3 0.0337792 130.588 +250 12.3 0.1016 71.3 0.0337792 131.568 +315 12.3 0.1016 71.3 0.0337792 137.068 +400 12.3 0.1016 71.3 0.0337792 139.428 +500 12.3 0.1016 71.3 0.0337792 140.158 +630 12.3 0.1016 71.3 0.0337792 135.368 +800 12.3 0.1016 71.3 0.0337792 127.318 +1000 12.3 0.1016 71.3 0.0337792 127.928 +1250 12.3 0.1016 71.3 0.0337792 126.648 +1600 12.3 0.1016 71.3 0.0337792 124.748 +2000 12.3 0.1016 71.3 0.0337792 122.218 +2500 12.3 0.1016 71.3 0.0337792 121.318 +3150 12.3 0.1016 71.3 0.0337792 120.798 +4000 12.3 0.1016 71.3 0.0337792 118.018 +5000 12.3 0.1016 71.3 0.0337792 116.108 +6300 12.3 0.1016 71.3 0.0337792 113.958 +200 12.3 0.1016 55.5 0.0368233 132.304 +250 12.3 0.1016 55.5 0.0368233 133.294 +315 12.3 0.1016 55.5 0.0368233 135.674 +400 12.3 0.1016 55.5 0.0368233 136.414 +500 12.3 0.1016 55.5 0.0368233 133.774 +630 12.3 0.1016 55.5 0.0368233 124.244 +800 12.3 0.1016 55.5 0.0368233 125.114 +1000 12.3 0.1016 55.5 0.0368233 125.484 +1250 12.3 0.1016 55.5 0.0368233 124.214 +1600 12.3 0.1016 55.5 0.0368233 121.824 +2000 12.3 0.1016 55.5 0.0368233 118.564 +2500 12.3 0.1016 55.5 0.0368233 117.054 +3150 12.3 0.1016 55.5 0.0368233 116.914 +4000 12.3 0.1016 55.5 0.0368233 114.404 +5000 12.3 0.1016 55.5 0.0368233 112.014 +6300 12.3 0.1016 55.5 0.0368233 110.124 +200 12.3 0.1016 39.6 0.0408268 128.545 +250 12.3 0.1016 39.6 0.0408268 129.675 +315 12.3 0.1016 39.6 0.0408268 129.415 +400 12.3 0.1016 39.6 0.0408268 128.265 +500 12.3 0.1016 39.6 0.0408268 122.205 +630 12.3 0.1016 39.6 0.0408268 121.315 +800 12.3 0.1016 39.6 0.0408268 122.315 +1000 12.3 0.1016 39.6 0.0408268 122.435 +1250 12.3 0.1016 39.6 0.0408268 121.165 +1600 12.3 0.1016 39.6 0.0408268 117.875 +2000 12.3 0.1016 39.6 0.0408268 114.085 +2500 12.3 0.1016 39.6 0.0408268 113.315 +3150 12.3 0.1016 39.6 0.0408268 113.055 +4000 12.3 0.1016 39.6 0.0408268 110.905 +5000 12.3 0.1016 39.6 0.0408268 108.625 +6300 12.3 0.1016 39.6 0.0408268 107.985 +200 12.3 0.1016 31.7 0.0418756 124.987 +250 12.3 0.1016 31.7 0.0418756 125.857 +315 12.3 0.1016 31.7 0.0418756 124.717 +400 12.3 0.1016 31.7 0.0418756 123.207 +500 12.3 0.1016 31.7 0.0418756 118.667 +630 12.3 0.1016 31.7 0.0418756 119.287 +800 12.3 0.1016 31.7 0.0418756 120.037 +1000 12.3 0.1016 31.7 0.0418756 119.777 +1250 12.3 0.1016 31.7 0.0418756 118.767 +1600 12.3 0.1016 31.7 0.0418756 114.477 +2000 12.3 0.1016 31.7 0.0418756 110.447 +2500 12.3 0.1016 31.7 0.0418756 110.317 +3150 12.3 0.1016 31.7 0.0418756 110.307 +4000 12.3 0.1016 31.7 0.0418756 108.407 +5000 12.3 0.1016 31.7 0.0418756 107.147 +6300 12.3 0.1016 31.7 0.0418756 107.267 +200 15.6 0.1016 71.3 0.0437259 130.898 +250 15.6 0.1016 71.3 0.0437259 132.158 +315 15.6 0.1016 71.3 0.0437259 133.808 +400 15.6 0.1016 71.3 0.0437259 134.058 +500 15.6 0.1016 71.3 0.0437259 130.638 +630 15.6 0.1016 71.3 0.0437259 122.288 +800 15.6 0.1016 71.3 0.0437259 124.188 +1000 15.6 0.1016 71.3 0.0437259 124.438 +1250 15.6 0.1016 71.3 0.0437259 123.178 +1600 15.6 0.1016 71.3 0.0437259 121.528 +2000 15.6 0.1016 71.3 0.0437259 119.888 +2500 15.6 0.1016 71.3 0.0437259 118.998 +3150 15.6 0.1016 71.3 0.0437259 116.468 +4000 15.6 0.1016 71.3 0.0437259 113.298 +200 15.6 0.1016 39.6 0.0528487 123.514 +250 15.6 0.1016 39.6 0.0528487 124.644 +315 15.6 0.1016 39.6 0.0528487 122.754 +400 15.6 0.1016 39.6 0.0528487 120.484 +500 15.6 0.1016 39.6 0.0528487 115.304 +630 15.6 0.1016 39.6 0.0528487 118.084 +800 15.6 0.1016 39.6 0.0528487 118.964 +1000 15.6 0.1016 39.6 0.0528487 119.224 +1250 15.6 0.1016 39.6 0.0528487 118.214 +1600 15.6 0.1016 39.6 0.0528487 114.554 +2000 15.6 0.1016 39.6 0.0528487 110.894 +2500 15.6 0.1016 39.6 0.0528487 110.264 +3150 15.6 0.1016 39.6 0.0528487 109.254 +4000 15.6 0.1016 39.6 0.0528487 106.604 +5000 15.6 0.1016 39.6 0.0528487 106.224 +6300 15.6 0.1016 39.6 0.0528487 104.204 diff --git a/Experimentos/inicializacion-pesos-red-neuronal/img/0_1_aleatorio/f_ideal_y_rn_con_15_neuronas.png b/Experimentos/inicializacion-pesos-red-neuronal/img/0_1_aleatorio/f_ideal_y_rn_con_15_neuronas.png new file mode 100644 index 0000000..3f85e2a Binary files /dev/null and b/Experimentos/inicializacion-pesos-red-neuronal/img/0_1_aleatorio/f_ideal_y_rn_con_15_neuronas.png differ diff --git a/Experimentos/inicializacion-pesos-red-neuronal/img/0_1_aleatorio/f_ideal_y_rn_con_20_neuronas.png b/Experimentos/inicializacion-pesos-red-neuronal/img/0_1_aleatorio/f_ideal_y_rn_con_20_neuronas.png new file mode 100644 index 0000000..62fbd2f Binary files /dev/null and b/Experimentos/inicializacion-pesos-red-neuronal/img/0_1_aleatorio/f_ideal_y_rn_con_20_neuronas.png differ diff --git a/Experimentos/inicializacion-pesos-red-neuronal/img/0_1_aleatorio/f_ideal_y_rn_con_3_neuronas.png b/Experimentos/inicializacion-pesos-red-neuronal/img/0_1_aleatorio/f_ideal_y_rn_con_3_neuronas.png new file mode 100644 index 0000000..0f279bd Binary files /dev/null and b/Experimentos/inicializacion-pesos-red-neuronal/img/0_1_aleatorio/f_ideal_y_rn_con_3_neuronas.png differ diff --git a/Experimentos/inicializacion-pesos-red-neuronal/img/0_1_aleatorio/f_ideal_y_rn_con_40_neuronas.png b/Experimentos/inicializacion-pesos-red-neuronal/img/0_1_aleatorio/f_ideal_y_rn_con_40_neuronas.png new file mode 100644 index 0000000..bd72e9b Binary files /dev/null and b/Experimentos/inicializacion-pesos-red-neuronal/img/0_1_aleatorio/f_ideal_y_rn_con_40_neuronas.png differ diff --git a/Experimentos/inicializacion-pesos-red-neuronal/img/0_1_aleatorio/f_ideal_y_rn_con_5_neuronas.png b/Experimentos/inicializacion-pesos-red-neuronal/img/0_1_aleatorio/f_ideal_y_rn_con_5_neuronas.png new file mode 100644 index 0000000..244d84a Binary files /dev/null and b/Experimentos/inicializacion-pesos-red-neuronal/img/0_1_aleatorio/f_ideal_y_rn_con_5_neuronas.png differ diff --git a/Experimentos/inicializacion-pesos-red-neuronal/img/0_1_aleatorio/f_ideal_y_rn_con_60_neuronas.png b/Experimentos/inicializacion-pesos-red-neuronal/img/0_1_aleatorio/f_ideal_y_rn_con_60_neuronas.png new file mode 100644 index 0000000..a2d139c Binary files /dev/null and b/Experimentos/inicializacion-pesos-red-neuronal/img/0_1_aleatorio/f_ideal_y_rn_con_60_neuronas.png differ diff --git a/Experimentos/inicializacion-pesos-red-neuronal/img/0_1_aleatorio/f_ideal_y_rn_con_7_neuronas.png b/Experimentos/inicializacion-pesos-red-neuronal/img/0_1_aleatorio/f_ideal_y_rn_con_7_neuronas.png new file mode 100644 index 0000000..0612cbb Binary files /dev/null and b/Experimentos/inicializacion-pesos-red-neuronal/img/0_1_aleatorio/f_ideal_y_rn_con_7_neuronas.png differ diff --git a/Experimentos/inicializacion-pesos-red-neuronal/img/0_sintetico_homogeneo/f_ideal_y_rn_con_100_neuronas.png b/Experimentos/inicializacion-pesos-red-neuronal/img/0_sintetico_homogeneo/f_ideal_y_rn_con_100_neuronas.png new file mode 100644 index 0000000..1db4ddd Binary files /dev/null and b/Experimentos/inicializacion-pesos-red-neuronal/img/0_sintetico_homogeneo/f_ideal_y_rn_con_100_neuronas.png differ diff --git a/Experimentos/inicializacion-pesos-red-neuronal/img/0_sintetico_homogeneo/f_ideal_y_rn_con_10_neuronas.png b/Experimentos/inicializacion-pesos-red-neuronal/img/0_sintetico_homogeneo/f_ideal_y_rn_con_10_neuronas.png new file mode 100644 index 0000000..3178683 Binary files /dev/null and b/Experimentos/inicializacion-pesos-red-neuronal/img/0_sintetico_homogeneo/f_ideal_y_rn_con_10_neuronas.png differ diff --git a/Experimentos/inicializacion-pesos-red-neuronal/img/0_sintetico_homogeneo/f_ideal_y_rn_con_20_neuronas.png b/Experimentos/inicializacion-pesos-red-neuronal/img/0_sintetico_homogeneo/f_ideal_y_rn_con_20_neuronas.png new file mode 100644 index 0000000..a25eace Binary files /dev/null and b/Experimentos/inicializacion-pesos-red-neuronal/img/0_sintetico_homogeneo/f_ideal_y_rn_con_20_neuronas.png differ diff --git a/Experimentos/inicializacion-pesos-red-neuronal/img/0_sintetico_homogeneo/f_ideal_y_rn_con_2_neuronas.png b/Experimentos/inicializacion-pesos-red-neuronal/img/0_sintetico_homogeneo/f_ideal_y_rn_con_2_neuronas.png new file mode 100644 index 0000000..ec8fdae Binary files /dev/null and b/Experimentos/inicializacion-pesos-red-neuronal/img/0_sintetico_homogeneo/f_ideal_y_rn_con_2_neuronas.png differ diff --git a/Experimentos/inicializacion-pesos-red-neuronal/img/0_sintetico_homogeneo/f_ideal_y_rn_con_3_neuronas.png b/Experimentos/inicializacion-pesos-red-neuronal/img/0_sintetico_homogeneo/f_ideal_y_rn_con_3_neuronas.png new file mode 100644 index 0000000..e9adaab Binary files /dev/null and b/Experimentos/inicializacion-pesos-red-neuronal/img/0_sintetico_homogeneo/f_ideal_y_rn_con_3_neuronas.png differ diff --git a/Experimentos/inicializacion-pesos-red-neuronal/img/0_sintetico_homogeneo/f_ideal_y_rn_con_51_neuronas.png b/Experimentos/inicializacion-pesos-red-neuronal/img/0_sintetico_homogeneo/f_ideal_y_rn_con_51_neuronas.png new file mode 100644 index 0000000..617b0db Binary files /dev/null and b/Experimentos/inicializacion-pesos-red-neuronal/img/0_sintetico_homogeneo/f_ideal_y_rn_con_51_neuronas.png differ diff --git a/Experimentos/inicializacion-pesos-red-neuronal/img/0_sintetico_homogeneo/f_ideal_y_rn_con_5_neuronas.png b/Experimentos/inicializacion-pesos-red-neuronal/img/0_sintetico_homogeneo/f_ideal_y_rn_con_5_neuronas.png new file mode 100644 index 0000000..4a76c98 Binary files /dev/null and b/Experimentos/inicializacion-pesos-red-neuronal/img/0_sintetico_homogeneo/f_ideal_y_rn_con_5_neuronas.png differ diff --git a/Experimentos/inicializacion-pesos-red-neuronal/img/0_sintetico_homogeneo/f_ideal_y_rn_con_72_neuronas.png b/Experimentos/inicializacion-pesos-red-neuronal/img/0_sintetico_homogeneo/f_ideal_y_rn_con_72_neuronas.png new file mode 100644 index 0000000..e48aacd Binary files /dev/null and b/Experimentos/inicializacion-pesos-red-neuronal/img/0_sintetico_homogeneo/f_ideal_y_rn_con_72_neuronas.png differ diff --git a/Experimentos/inicializacion-pesos-red-neuronal/img/0_sintetico_homogeneo/f_ideal_y_rn_con_7_neuronas.png b/Experimentos/inicializacion-pesos-red-neuronal/img/0_sintetico_homogeneo/f_ideal_y_rn_con_7_neuronas.png new file mode 100644 index 0000000..0d8a738 Binary files /dev/null and b/Experimentos/inicializacion-pesos-red-neuronal/img/0_sintetico_homogeneo/f_ideal_y_rn_con_7_neuronas.png differ diff --git a/Experimentos/inicializacion-pesos-red-neuronal/img/0_sintetico_homogeneo/f_ideal_y_rn_con_90_neuronas.png b/Experimentos/inicializacion-pesos-red-neuronal/img/0_sintetico_homogeneo/f_ideal_y_rn_con_90_neuronas.png new file mode 100644 index 0000000..95f2af0 Binary files /dev/null and b/Experimentos/inicializacion-pesos-red-neuronal/img/0_sintetico_homogeneo/f_ideal_y_rn_con_90_neuronas.png differ diff --git a/Experimentos/inicializacion-pesos-red-neuronal/resultados/2_air_self_noise/TEST_WILCOXON b/Experimentos/inicializacion-pesos-red-neuronal/resultados/2_air_self_noise/TEST_WILCOXON new file mode 100644 index 0000000..134663a --- /dev/null +++ b/Experimentos/inicializacion-pesos-red-neuronal/resultados/2_air_self_noise/TEST_WILCOXON @@ -0,0 +1,21 @@ + + + Exact Wilcoxon signed rank test +------------------------------- +Population details: + parameter of interest: Location parameter (pseudomedian) + value under h_0: 0 + point estimate: -5.07785 + 95% confidence interval: (-5.504, -5.044) + +Test summary: + outcome with 95% confidence: reject h_0 + two-sided p-value: <1e-04 + +Details: + number of observations: 15 + Wilcoxon rank-sum statistic: 0.0 + rank sums: [0.0, 120.0] + adjustment for ties: 0.0 + + \ No newline at end of file diff --git a/Experimentos/inicializacion-pesos-red-neuronal/resultados/2_air_self_noise/boxplot.jl b/Experimentos/inicializacion-pesos-red-neuronal/resultados/2_air_self_noise/boxplot.jl new file mode 100644 index 0000000..b802694 --- /dev/null +++ b/Experimentos/inicializacion-pesos-red-neuronal/resultados/2_air_self_noise/boxplot.jl @@ -0,0 +1,53 @@ +################################################################## +# Muestra en pantalla los diagramas de bigotes +################################################################## +using PlotlyJS +using CSV +using DataFrames +limit = 16 +path = "Memoria/img/7-algoritmo-inicializar-pesos/experimento/" + +# Imprimimos tabla bigotes tiempo +df = DataFrame(CSV.File("Experimentos/inicializacion-pesos-red-neuronal/resultados/2_air_self_noise/tiempos.csv")) +table_df = Tables.matrix(df) +traces = [ + box( + y = table_df[i, 2:16], + name = table_df[i, 1] + ) + for i in 1:2 +] +ref = plot(traces, Layout(yaxis_title="Tiempos en segundos", boxmode="group")) +savefig(ref, path*"grafico-bigotes-tiempo.png") + + + +# Error cuadrático medio +file_path = "Experimentos/inicializacion-pesos-red-neuronal/resultados/2_air_self_noise/error_entrenamiento.csv" +df = DataFrame(CSV.File(file_path)) +table_df = Tables.matrix(df) +traces = [ + box( + y = table_df[i, 2:limit], + name = table_df[i, 1] + ) + for i in 1:2 +] +ref = plot(traces, Layout(yaxis_title="Error cuadrático medio", boxmode="group")) +savefig(ref, path*"grafico-bigotes-error_entrenamiento.png") + + +# Error cuadrático medio +file_path = "Experimentos/inicializacion-pesos-red-neuronal/resultados/2_air_self_noise/error_test.csv" +df = DataFrame(CSV.File(file_path)) +table_df = Tables.matrix(df) +traces = [ + box( + y = table_df[i, 2:limit], + name = table_df[i, 1] + ) + for i in 1:2 +] +ref = plot(traces, Layout(yaxis_title="Error cuadrático medio", boxmode="group")) +path = "Memoria/img/7-algoritmo-inicializar-pesos/experimento/" +savefig(ref, path*"grafico-bigotes-error_test.png") \ No newline at end of file diff --git a/Experimentos/inicializacion-pesos-red-neuronal/resultados/2_air_self_noise/error_entrenamiento.csv b/Experimentos/inicializacion-pesos-red-neuronal/resultados/2_air_self_noise/error_entrenamiento.csv new file mode 100644 index 0000000..1b65090 --- /dev/null +++ b/Experimentos/inicializacion-pesos-red-neuronal/resultados/2_air_self_noise/error_entrenamiento.csv @@ -0,0 +1,3 @@ +Método,Error entrenamiento 1,Error entrenamiento 2,Error entrenamiento 3,Error entrenamiento 4,Error entrenamiento 5,Error entrenamiento 6,Error entrenamiento 7,Error entrenamiento 8,Error entrenamiento 9,Error entrenamiento 10,Error entrenamiento 11,Error entrenamiento 12,Error entrenamiento 13,Error entrenamiento 14,Error entrenamiento 15 +Algoritmo inicialización,0.016107598186897273,0.01647800254822801,0.01812791557178738,0.017257623459379933,0.009463949251387709,0.01624105112345739,0.021113342327476713,0.014894196938574605,0.01821770963547351,0.020107777634127626,0.01578579107969757,0.015557253101713107,0.01646829428915805,0.019227417163779854,0.018424196762073988 +Aleatorio y Backpropagation,0.5816459121282251,0.570427576256533,0.5695056028289635,0.5749822184014982,0.5642217837837237,0.5665202421492537,0.5644598152096382,0.5671765730113418,0.5722351133614391,0.569825249652263,0.5608383911870397,0.5606055165897957,0.5744724288227865,0.5693031996474518,0.5633344375632706 diff --git a/Experimentos/inicializacion-pesos-red-neuronal/resultados/2_air_self_noise/error_test.csv b/Experimentos/inicializacion-pesos-red-neuronal/resultados/2_air_self_noise/error_test.csv new file mode 100644 index 0000000..668a710 --- /dev/null +++ b/Experimentos/inicializacion-pesos-red-neuronal/resultados/2_air_self_noise/error_test.csv @@ -0,0 +1,3 @@ +Método,Error test 1,Error test 2,Error test 3,Error test 4,Error test 5,Error test 6,Error test 7,Error test 8,Error test 9,Error test 10,Error test 11,Error test 12,Error test 13,Error test 14,Error test 15 +Algoritmo inicialización,0.17588948412370906,0.17875654823949128,0.18419165294584916,0.17503805769074474,0.09629487068340489,0.19059518504888087,0.19183625841162658,0.1722885027498988,0.1713092810998448,0.1827866390536342,0.16128499901538002,0.17872752239512982,0.16294055592022383,0.17542024363205286,0.1696798512522527 +Aleatorio y Backpropagation,0.5609455010263698,0.5707365283791447,0.5725804752342845,0.5616272440892142,0.583148113324764,0.5785511965937034,0.5565355709071861,0.5772385348695276,0.567121454169333,0.5719411815876849,0.5505614965699921,0.5642441681468707,0.5626468232466384,0.5729852815973077,0.5587863261999214 diff --git a/Experimentos/inicializacion-pesos-red-neuronal/resultados/2_air_self_noise/tiempos.csv b/Experimentos/inicializacion-pesos-red-neuronal/resultados/2_air_self_noise/tiempos.csv new file mode 100644 index 0000000..2b510ef --- /dev/null +++ b/Experimentos/inicializacion-pesos-red-neuronal/resultados/2_air_self_noise/tiempos.csv @@ -0,0 +1,3 @@ +Método,Tiempo 1,Tiempo 2,Tiempo 3,Tiempo 4,Tiempo 5,Tiempo 6,Tiempo 7,Tiempo 8,Tiempo 9,Tiempo 10,Tiempo 11,Tiempo 12,Tiempo 13,Tiempo 14,Tiempo 15 +Algoritmo inicialización,0.274179917,0.029524292,0.028009,0.028225333,0.025599417,0.027982792,0.027176875,0.027351292,0.02704025,0.028919541,0.027076209,0.028476375,0.026853334,0.027311125,0.028777917 +Aleatorio y Backpropagation,6.203886915,5.063537042,5.082766249,6.014708251,5.128079959000001,5.120265001,5.071514540999999,5.10520071,5.961476874000001,5.022621415,5.022854251,5.073831126000001,5.064733208000001,5.147634583,5.179978751 diff --git a/Experimentos/inicializacion-pesos-red-neuronal/utils/dat_to_csv.jl b/Experimentos/inicializacion-pesos-red-neuronal/utils/dat_to_csv.jl new file mode 100644 index 0000000..62f1db6 --- /dev/null +++ b/Experimentos/inicializacion-pesos-red-neuronal/utils/dat_to_csv.jl @@ -0,0 +1,23 @@ +################################################################# +# Fichero para convertir el formato .dat en .csv +################################################################# +# Fuente: https://stackoverflow.com/questions/61665998/reading-a-dat-file-in-julia-issues-with-variable-delimeter-spacing +function dat2csv(dat_path::AbstractString, csv_path::AbstractString) + open(csv_path, "w") do io + for line in eachline(dat_path) + join(io, split(line), ',') + println(io) + end + end + + return csv_path +end +function dat2csv(dat_path::AbstractString) + base, ext = splitext(dat_path) + ext == ".dat" || + throw(ArgumentError("file name doesn't end with `.dat`")) + return dat2csv(dat_path, "$base.csv") +end + +t = dat2csv("Experimentos/inicializacion-pesos-red-neuronal/data/airfoil_self_noise.dat") +println(t) \ No newline at end of file diff --git a/Makefile b/Makefile index 421df66..8fcad71 100644 --- a/Makefile +++ b/Makefile @@ -29,9 +29,7 @@ workflow-spell: install-spell spell ########## Test biblioteca redes neurales ########### test: - julia --project=. Biblioteca-Redes-Neuronales/test/activation_functions.test.jl - julia --project=. Biblioteca-Redes-Neuronales/test/one_layer_neural_network.test.jl - + julia --project=. OptimizedNeuralNetwork.jl/test/RUN_ALL_TEST.jl ############################### Generar experimentos ############ experimentos: diff --git a/Manifest.toml b/Manifest.toml index 1d3dd4d..6551c3b 100644 --- a/Manifest.toml +++ b/Manifest.toml @@ -3,20 +3,902 @@ julia_version = "1.7.1" manifest_format = "2.0" +[[deps.Adapt]] +deps = ["LinearAlgebra"] +git-tree-sha1 = "af92965fb30777147966f58acb05da51c5616b5f" +uuid = "79e6a3ab-5dfb-504d-930d-738a2a938a0e" +version = "3.3.3" + +[[deps.ArgTools]] +uuid = "0dad84c5-d112-42e6-8d28-ef12dabb789f" + +[[deps.Artifacts]] +uuid = "56f22d72-fd6d-98f1-02f0-08ddc0907c33" + +[[deps.Base64]] +uuid = "2a0f44e3-6c83-55bd-87e4-b1978d98bd5f" + +[[deps.Bzip2_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"] +git-tree-sha1 = "19a35467a82e236ff51bc17a3a44b69ef35185a2" +uuid = "6e34b625-4abd-537c-b88f-471c36dfa7a0" +version = "1.0.8+0" + +[[deps.Cairo_jll]] +deps = ["Artifacts", "Bzip2_jll", "Fontconfig_jll", "FreeType2_jll", "Glib_jll", "JLLWrappers", "LZO_jll", "Libdl", "Pixman_jll", "Pkg", "Xorg_libXext_jll", "Xorg_libXrender_jll", "Zlib_jll", "libpng_jll"] +git-tree-sha1 = "4b859a208b2397a7a623a03449e4636bdb17bcf2" +uuid = "83423d85-b0ee-5818-9007-b63ccbeb887a" +version = "1.16.1+1" + +[[deps.ChainRulesCore]] +deps = ["Compat", "LinearAlgebra", "SparseArrays"] +git-tree-sha1 = "9489214b993cd42d17f44c36e359bf6a7c919abf" +uuid = "d360d2e6-b24c-11e9-a2a3-2a2ae2dbcce4" +version = "1.15.0" + +[[deps.ChangesOfVariables]] +deps = ["ChainRulesCore", "LinearAlgebra", "Test"] +git-tree-sha1 = "1e315e3f4b0b7ce40feded39c73049692126cf53" +uuid = "9e997f8a-9a97-42d5-a9f1-ce6bfc15e2c0" +version = "0.1.3" + +[[deps.ColorSchemes]] +deps = ["ColorTypes", "ColorVectorSpace", "Colors", "FixedPointNumbers", "Random"] +git-tree-sha1 = "7297381ccb5df764549818d9a7d57e45f1057d30" +uuid = "35d6a980-a343-548e-a6ea-1d62b119f2f4" +version = "3.18.0" + +[[deps.ColorTypes]] +deps = ["FixedPointNumbers", "Random"] +git-tree-sha1 = "0f4e115f6f34bbe43c19751c90a38b2f380637b9" +uuid = "3da002f7-5984-5a60-b8a6-cbb66c0b333f" +version = "0.11.3" + +[[deps.ColorVectorSpace]] +deps = ["ColorTypes", "FixedPointNumbers", "LinearAlgebra", "SpecialFunctions", "Statistics", "TensorCore"] +git-tree-sha1 = "d08c20eef1f2cbc6e60fd3612ac4340b89fea322" +uuid = "c3611d14-8923-5661-9e6a-0046d554d3a4" +version = "0.9.9" + +[[deps.Colors]] +deps = ["ColorTypes", "FixedPointNumbers", "Reexport"] +git-tree-sha1 = "417b0ed7b8b838aa6ca0a87aadf1bb9eb111ce40" +uuid = "5ae59095-9a9b-59fe-a467-6f913c188581" +version = "0.12.8" + +[[deps.Compat]] +deps = ["Dates", "LinearAlgebra", "UUIDs"] +git-tree-sha1 = "924cdca592bc16f14d2f7006754a621735280b74" +uuid = "34da2185-b29b-5c13-b0c7-acf172513d20" +version = "4.1.0" + +[[deps.CompilerSupportLibraries_jll]] +deps = ["Artifacts", "Libdl"] +uuid = "e66e0078-7015-5450-92f7-15fbd957f2ae" + +[[deps.Contour]] +deps = ["StaticArrays"] +git-tree-sha1 = "9f02045d934dc030edad45944ea80dbd1f0ebea7" +uuid = "d38c429a-6771-53c6-b99e-75d170b6e991" +version = "0.5.7" + +[[deps.DataAPI]] +git-tree-sha1 = "fb5f5316dd3fd4c5e7c30a24d50643b73e37cd40" +uuid = "9a962f9c-6df0-11e9-0e5d-c546b8b5ee8a" +version = "1.10.0" + +[[deps.DataStructures]] +deps = ["Compat", "InteractiveUtils", "OrderedCollections"] +git-tree-sha1 = "d1fff3a548102f48987a52a2e0d114fa97d730f0" +uuid = "864edb3b-99cc-5e75-8d2d-829cb0a9cfe8" +version = "0.18.13" + +[[deps.DataValueInterfaces]] +git-tree-sha1 = "bfc1187b79289637fa0ef6d4436ebdfe6905cbd6" +uuid = "e2d170a0-9d28-54be-80f0-106bbe20a464" +version = "1.0.0" + +[[deps.Dates]] +deps = ["Printf"] +uuid = "ade2ca70-3891-5945-98fb-dc099432e06a" + +[[deps.DelimitedFiles]] +deps = ["Mmap"] +uuid = "8bb1440f-4735-579b-a4ab-409b98df4dab" + +[[deps.DocStringExtensions]] +deps = ["LibGit2"] +git-tree-sha1 = "b19534d1895d702889b219c382a6e18010797f0b" +uuid = "ffbed154-4ef7-542d-bbb7-c09d3a79fcae" +version = "0.8.6" + +[[deps.Downloads]] +deps = ["ArgTools", "LibCURL", "NetworkOptions"] +uuid = "f43a241f-c20a-4ad4-852c-f6b1247861c6" + +[[deps.EarCut_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"] +git-tree-sha1 = "3f3a2501fa7236e9b911e0f7a588c657e822bb6d" +uuid = "5ae413db-bbd1-5e63-b57d-d24a61df00f5" +version = "2.2.3+0" + +[[deps.Expat_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"] +git-tree-sha1 = "bad72f730e9e91c08d9427d5e8db95478a3c323d" +uuid = "2e619515-83b5-522b-bb60-26c02a35a201" +version = "2.4.8+0" + [[deps.ExprTools]] git-tree-sha1 = "56559bbef6ca5ea0c0818fa5c90320398a6fbf8d" uuid = "e2ba6199-217a-4e67-a87a-7c52f15ade04" version = "0.1.8" +[[deps.FFMPEG]] +deps = ["FFMPEG_jll"] +git-tree-sha1 = "b57e3acbe22f8484b4b5ff66a7499717fe1a9cc8" +uuid = "c87230d0-a227-11e9-1b43-d7ebe4e7570a" +version = "0.4.1" + +[[deps.FFMPEG_jll]] +deps = ["Artifacts", "Bzip2_jll", "FreeType2_jll", "FriBidi_jll", "JLLWrappers", "LAME_jll", "Libdl", "Ogg_jll", "OpenSSL_jll", "Opus_jll", "Pkg", "Zlib_jll", "libass_jll", "libfdk_aac_jll", "libvorbis_jll", "x264_jll", "x265_jll"] +git-tree-sha1 = "d8a578692e3077ac998b50c0217dfd67f21d1e5f" +uuid = "b22a6f82-2f65-5046-a5b2-351ab43fb4e5" +version = "4.4.0+0" + +[[deps.FixedPointNumbers]] +deps = ["Statistics"] +git-tree-sha1 = "335bfdceacc84c5cdf16aadc768aa5ddfc5383cc" +uuid = "53c48c17-4a7d-5ca2-90c5-79b7896eea93" +version = "0.8.4" + +[[deps.Fontconfig_jll]] +deps = ["Artifacts", "Bzip2_jll", "Expat_jll", "FreeType2_jll", "JLLWrappers", "Libdl", "Libuuid_jll", "Pkg", "Zlib_jll"] +git-tree-sha1 = "21efd19106a55620a188615da6d3d06cd7f6ee03" +uuid = "a3f928ae-7b40-5064-980b-68af3947d34b" +version = "2.13.93+0" + +[[deps.Formatting]] +deps = ["Printf"] +git-tree-sha1 = "8339d61043228fdd3eb658d86c926cb282ae72a8" +uuid = "59287772-0a20-5a39-b81b-1366585eb4c0" +version = "0.4.2" + +[[deps.FreeType2_jll]] +deps = ["Artifacts", "Bzip2_jll", "JLLWrappers", "Libdl", "Pkg", "Zlib_jll"] +git-tree-sha1 = "87eb71354d8ec1a96d4a7636bd57a7347dde3ef9" +uuid = "d7e528f0-a631-5988-bf34-fe36492bcfd7" +version = "2.10.4+0" + +[[deps.FriBidi_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"] +git-tree-sha1 = "aa31987c2ba8704e23c6c8ba8a4f769d5d7e4f91" +uuid = "559328eb-81f9-559d-9380-de523a88c83c" +version = "1.0.10+0" + +[[deps.GLFW_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Libglvnd_jll", "Pkg", "Xorg_libXcursor_jll", "Xorg_libXi_jll", "Xorg_libXinerama_jll", "Xorg_libXrandr_jll"] +git-tree-sha1 = "51d2dfe8e590fbd74e7a842cf6d13d8a2f45dc01" +uuid = "0656b61e-2033-5cc2-a64a-77c0f6c09b89" +version = "3.3.6+0" + +[[deps.GR]] +deps = ["Base64", "DelimitedFiles", "GR_jll", "HTTP", "JSON", "Libdl", "LinearAlgebra", "Pkg", "Printf", "Random", "RelocatableFolders", "Serialization", "Sockets", "Test", "UUIDs"] +git-tree-sha1 = "c98aea696662d09e215ef7cda5296024a9646c75" +uuid = "28b8d3ca-fb5f-59d9-8090-bfdbd6d07a71" +version = "0.64.4" + +[[deps.GR_jll]] +deps = ["Artifacts", "Bzip2_jll", "Cairo_jll", "FFMPEG_jll", "Fontconfig_jll", "GLFW_jll", "JLLWrappers", "JpegTurbo_jll", "Libdl", "Libtiff_jll", "Pixman_jll", "Pkg", "Qt5Base_jll", "Zlib_jll", "libpng_jll"] +git-tree-sha1 = "3a233eeeb2ca45842fe100e0413936834215abf5" +uuid = "d2c73de3-f751-5644-a686-071e5b155ba9" +version = "0.64.4+0" + +[[deps.GeometryBasics]] +deps = ["EarCut_jll", "IterTools", "LinearAlgebra", "StaticArrays", "StructArrays", "Tables"] +git-tree-sha1 = "83ea630384a13fc4f002b77690bc0afeb4255ac9" +uuid = "5c1252a2-5f33-56bf-86c9-59e7332b4326" +version = "0.4.2" + +[[deps.Gettext_jll]] +deps = ["Artifacts", "CompilerSupportLibraries_jll", "JLLWrappers", "Libdl", "Libiconv_jll", "Pkg", "XML2_jll"] +git-tree-sha1 = "9b02998aba7bf074d14de89f9d37ca24a1a0b046" +uuid = "78b55507-aeef-58d4-861c-77aaff3498b1" +version = "0.21.0+0" + +[[deps.Glib_jll]] +deps = ["Artifacts", "Gettext_jll", "JLLWrappers", "Libdl", "Libffi_jll", "Libiconv_jll", "Libmount_jll", "PCRE_jll", "Pkg", "Zlib_jll"] +git-tree-sha1 = "a32d672ac2c967f3deb8a81d828afc739c838a06" +uuid = "7746bdde-850d-59dc-9ae8-88ece973131d" +version = "2.68.3+2" + +[[deps.Graphite2_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"] +git-tree-sha1 = "344bf40dcab1073aca04aa0df4fb092f920e4011" +uuid = "3b182d85-2403-5c21-9c21-1e1f0cc25472" +version = "1.3.14+0" + +[[deps.Grisu]] +git-tree-sha1 = "53bb909d1151e57e2484c3d1b53e19552b887fb2" +uuid = "42e2da0e-8278-4e71-bc24-59509adca0fe" +version = "1.0.2" + +[[deps.HTTP]] +deps = ["Base64", "Dates", "IniFile", "Logging", "MbedTLS", "NetworkOptions", "Sockets", "URIs"] +git-tree-sha1 = "0fa77022fe4b511826b39c894c90daf5fce3334a" +uuid = "cd3eb016-35fb-5094-929b-558a96fad6f3" +version = "0.9.17" + +[[deps.HarfBuzz_jll]] +deps = ["Artifacts", "Cairo_jll", "Fontconfig_jll", "FreeType2_jll", "Glib_jll", "Graphite2_jll", "JLLWrappers", "Libdl", "Libffi_jll", "Pkg"] +git-tree-sha1 = "129acf094d168394e80ee1dc4bc06ec835e510a3" +uuid = "2e76f6c2-a576-52d4-95c1-20adfe4de566" +version = "2.8.1+1" + +[[deps.IniFile]] +git-tree-sha1 = "f550e6e32074c939295eb5ea6de31849ac2c9625" +uuid = "83e8ac13-25f8-5344-8a64-a9f2b223428f" +version = "0.5.1" + +[[deps.InteractiveUtils]] +deps = ["Markdown"] +uuid = "b77e0a4c-d291-57a0-90e8-8db25a27a240" + +[[deps.InverseFunctions]] +deps = ["Test"] +git-tree-sha1 = "b3364212fb5d870f724876ffcd34dd8ec6d98918" +uuid = "3587e190-3f89-42d0-90ee-14403ec27112" +version = "0.1.7" + +[[deps.IrrationalConstants]] +git-tree-sha1 = "7fd44fd4ff43fc60815f8e764c0f352b83c49151" +uuid = "92d709cd-6900-40b7-9082-c6be49f344b6" +version = "0.1.1" + +[[deps.IterTools]] +git-tree-sha1 = "fa6287a4469f5e048d763df38279ee729fbd44e5" +uuid = "c8e1da08-722c-5040-9ed9-7db0dc04731e" +version = "1.4.0" + +[[deps.IteratorInterfaceExtensions]] +git-tree-sha1 = "a3f24677c21f5bbe9d2a714f95dcd58337fb2856" +uuid = "82899510-4779-5014-852e-03e436cf321d" +version = "1.0.0" + +[[deps.JLLWrappers]] +deps = ["Preferences"] +git-tree-sha1 = "abc9885a7ca2052a736a600f7fa66209f96506e1" +uuid = "692b3bcd-3c85-4b1f-b108-f13ce0eb3210" +version = "1.4.1" + +[[deps.JSON]] +deps = ["Dates", "Mmap", "Parsers", "Unicode"] +git-tree-sha1 = "3c837543ddb02250ef42f4738347454f95079d4e" +uuid = "682c06a0-de6a-54ab-a142-c8b1cf79cde6" +version = "0.21.3" + +[[deps.JpegTurbo_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"] +git-tree-sha1 = "b53380851c6e6664204efb2e62cd24fa5c47e4ba" +uuid = "aacddb02-875f-59d6-b918-886e6ef4fbf8" +version = "2.1.2+0" + +[[deps.LAME_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"] +git-tree-sha1 = "f6250b16881adf048549549fba48b1161acdac8c" +uuid = "c1c5ebd0-6772-5130-a774-d5fcae4a789d" +version = "3.100.1+0" + +[[deps.LERC_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"] +git-tree-sha1 = "bf36f528eec6634efc60d7ec062008f171071434" +uuid = "88015f11-f218-50d7-93a8-a6af411a945d" +version = "3.0.0+1" + +[[deps.LZO_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"] +git-tree-sha1 = "e5b909bcf985c5e2605737d2ce278ed791b89be6" +uuid = "dd4b983a-f0e5-5f8d-a1b7-129d4a5fb1ac" +version = "2.10.1+0" + +[[deps.LaTeXStrings]] +git-tree-sha1 = "f2355693d6778a178ade15952b7ac47a4ff97996" +uuid = "b964fa9f-0449-5b57-a5c2-d3ea65f4040f" +version = "1.3.0" + +[[deps.Latexify]] +deps = ["Formatting", "InteractiveUtils", "LaTeXStrings", "MacroTools", "Markdown", "Printf", "Requires"] +git-tree-sha1 = "46a39b9c58749eefb5f2dc1178cb8fab5332b1ab" +uuid = "23fbe1c1-3f47-55db-b15f-69d7ec21a316" +version = "0.15.15" + +[[deps.LibCURL]] +deps = ["LibCURL_jll", "MozillaCACerts_jll"] +uuid = "b27032c2-a3e7-50c8-80cd-2d36dbcbfd21" + +[[deps.LibCURL_jll]] +deps = ["Artifacts", "LibSSH2_jll", "Libdl", "MbedTLS_jll", "Zlib_jll", "nghttp2_jll"] +uuid = "deac9b47-8bc7-5906-a0fe-35ac56dc84c0" + +[[deps.LibGit2]] +deps = ["Base64", "NetworkOptions", "Printf", "SHA"] +uuid = "76f85450-5226-5b5a-8eaa-529ad045b433" + +[[deps.LibSSH2_jll]] +deps = ["Artifacts", "Libdl", "MbedTLS_jll"] +uuid = "29816b5a-b9ab-546f-933c-edad1886dfa8" + +[[deps.Libdl]] +uuid = "8f399da3-3557-5675-b5ff-fb832c97cbdb" + +[[deps.Libffi_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"] +git-tree-sha1 = "0b4a5d71f3e5200a7dff793393e09dfc2d874290" +uuid = "e9f186c6-92d2-5b65-8a66-fee21dc1b490" +version = "3.2.2+1" + +[[deps.Libgcrypt_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Libgpg_error_jll", "Pkg"] +git-tree-sha1 = "64613c82a59c120435c067c2b809fc61cf5166ae" +uuid = "d4300ac3-e22c-5743-9152-c294e39db1e4" +version = "1.8.7+0" + +[[deps.Libglvnd_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg", "Xorg_libX11_jll", "Xorg_libXext_jll"] +git-tree-sha1 = "7739f837d6447403596a75d19ed01fd08d6f56bf" +uuid = "7e76a0d4-f3c7-5321-8279-8d96eeed0f29" +version = "1.3.0+3" + +[[deps.Libgpg_error_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"] +git-tree-sha1 = "c333716e46366857753e273ce6a69ee0945a6db9" +uuid = "7add5ba3-2f88-524e-9cd5-f83b8a55f7b8" +version = "1.42.0+0" + +[[deps.Libiconv_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"] +git-tree-sha1 = "42b62845d70a619f063a7da093d995ec8e15e778" +uuid = "94ce4f54-9a6c-5748-9c1c-f9c7231a4531" +version = "1.16.1+1" + +[[deps.Libmount_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"] +git-tree-sha1 = "9c30530bf0effd46e15e0fdcf2b8636e78cbbd73" +uuid = "4b2f31a3-9ecc-558c-b454-b3730dcb73e9" +version = "2.35.0+0" + +[[deps.Libtiff_jll]] +deps = ["Artifacts", "JLLWrappers", "JpegTurbo_jll", "LERC_jll", "Libdl", "Pkg", "Zlib_jll", "Zstd_jll"] +git-tree-sha1 = "3eb79b0ca5764d4799c06699573fd8f533259713" +uuid = "89763e89-9b03-5906-acba-b20f662cd828" +version = "4.4.0+0" + +[[deps.Libuuid_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"] +git-tree-sha1 = "7f3efec06033682db852f8b3bc3c1d2b0a0ab066" +uuid = "38a345b3-de98-5d2b-a5d3-14cd9215e700" +version = "2.36.0+0" + +[[deps.LinearAlgebra]] +deps = ["Libdl", "libblastrampoline_jll"] +uuid = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" + +[[deps.LogExpFunctions]] +deps = ["ChainRulesCore", "ChangesOfVariables", "DocStringExtensions", "InverseFunctions", "IrrationalConstants", "LinearAlgebra"] +git-tree-sha1 = "09e4b894ce6a976c354a69041a04748180d43637" +uuid = "2ab3a3ac-af41-5b50-aa03-7779005ae688" +version = "0.3.15" + +[[deps.Logging]] +uuid = "56ddb016-857b-54e1-b83d-db4d58db5568" + +[[deps.MacroTools]] +deps = ["Markdown", "Random"] +git-tree-sha1 = "3d3e902b31198a27340d0bf00d6ac452866021cf" +uuid = "1914dd2f-81c6-5fcd-8719-6d5c9610ff09" +version = "0.5.9" + +[[deps.Markdown]] +deps = ["Base64"] +uuid = "d6f4376e-aef5-505a-96c1-9c027394607a" + +[[deps.MbedTLS]] +deps = ["Dates", "MbedTLS_jll", "Random", "Sockets"] +git-tree-sha1 = "1c38e51c3d08ef2278062ebceade0e46cefc96fe" +uuid = "739be429-bea8-5141-9913-cc70e7f3736d" +version = "1.0.3" + +[[deps.MbedTLS_jll]] +deps = ["Artifacts", "Libdl"] +uuid = "c8ffd9c3-330d-5841-b78e-0817d7145fa1" + +[[deps.Measures]] +git-tree-sha1 = "e498ddeee6f9fdb4551ce855a46f54dbd900245f" +uuid = "442fdcdd-2543-5da2-b0f3-8c86c306513e" +version = "0.3.1" + +[[deps.Missings]] +deps = ["DataAPI"] +git-tree-sha1 = "bf210ce90b6c9eed32d25dbcae1ebc565df2687f" +uuid = "e1d29d7a-bbdc-5cf2-9ac0-f12de2c33e28" +version = "1.0.2" + +[[deps.Mmap]] +uuid = "a63ad114-7e13-5084-954f-fe012c677804" + +[[deps.MozillaCACerts_jll]] +uuid = "14a3606d-f60d-562e-9121-12d972cd8159" + +[[deps.NaNMath]] +git-tree-sha1 = "737a5957f387b17e74d4ad2f440eb330b39a62c5" +uuid = "77ba4419-2d1f-58cd-9bb1-8ffee604a2e3" +version = "1.0.0" + +[[deps.NetworkOptions]] +uuid = "ca575930-c2e3-43a9-ace4-1e988b2c1908" + +[[deps.Ogg_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"] +git-tree-sha1 = "887579a3eb005446d514ab7aeac5d1d027658b8f" +uuid = "e7412a2a-1a6e-54c0-be00-318e2571c051" +version = "1.3.5+1" + +[[deps.OpenBLAS_jll]] +deps = ["Artifacts", "CompilerSupportLibraries_jll", "Libdl"] +uuid = "4536629a-c528-5b80-bd46-f80d51c5b363" + +[[deps.OpenLibm_jll]] +deps = ["Artifacts", "Libdl"] +uuid = "05823500-19ac-5b8b-9628-191a04bc5112" + +[[deps.OpenSSL_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"] +git-tree-sha1 = "ab05aa4cc89736e95915b01e7279e61b1bfe33b8" +uuid = "458c3c95-2e84-50aa-8efc-19380b2a3a95" +version = "1.1.14+0" + +[[deps.OpenSpecFun_jll]] +deps = ["Artifacts", "CompilerSupportLibraries_jll", "JLLWrappers", "Libdl", "Pkg"] +git-tree-sha1 = "13652491f6856acfd2db29360e1bbcd4565d04f1" +uuid = "efe28fd5-8261-553b-a9e1-b2916fc3738e" +version = "0.5.5+0" + +[[deps.Opus_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"] +git-tree-sha1 = "51a08fb14ec28da2ec7a927c4337e4332c2a4720" +uuid = "91d4177d-7536-5919-b921-800302f37372" +version = "1.3.2+0" + +[[deps.OrderedCollections]] +git-tree-sha1 = "85f8e6578bf1f9ee0d11e7bb1b1456435479d47c" +uuid = "bac558e1-5e72-5ebc-8fee-abe8a469f55d" +version = "1.4.1" + +[[deps.PCRE_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"] +git-tree-sha1 = "b2a7af664e098055a7529ad1a900ded962bca488" +uuid = "2f80f16e-611a-54ab-bc61-aa92de5b98fc" +version = "8.44.0+0" + +[[deps.Parsers]] +deps = ["Dates"] +git-tree-sha1 = "1285416549ccfcdf0c50d4997a94331e88d68413" +uuid = "69de0a69-1ddd-5017-9359-2bf0b02dc9f0" +version = "2.3.1" + +[[deps.Pixman_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"] +git-tree-sha1 = "b4f5d02549a10e20780a24fce72bea96b6329e29" +uuid = "30392449-352a-5448-841d-b1acce4e97dc" +version = "0.40.1+0" + +[[deps.Pkg]] +deps = ["Artifacts", "Dates", "Downloads", "LibGit2", "Libdl", "Logging", "Markdown", "Printf", "REPL", "Random", "SHA", "Serialization", "TOML", "Tar", "UUIDs", "p7zip_jll"] +uuid = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f" + +[[deps.PlotThemes]] +deps = ["PlotUtils", "Statistics"] +git-tree-sha1 = "8162b2f8547bc23876edd0c5181b27702ae58dce" +uuid = "ccf2f8ad-2431-5c83-bf29-c5338b663b6a" +version = "3.0.0" + +[[deps.PlotUtils]] +deps = ["ColorSchemes", "Colors", "Dates", "Printf", "Random", "Reexport", "Statistics"] +git-tree-sha1 = "bb16469fd5224100e422f0b027d26c5a25de1200" +uuid = "995b91a9-d308-5afd-9ec6-746e21dbc043" +version = "1.2.0" + +[[deps.Plots]] +deps = ["Base64", "Contour", "Dates", "Downloads", "FFMPEG", "FixedPointNumbers", "GR", "GeometryBasics", "JSON", "Latexify", "LinearAlgebra", "Measures", "NaNMath", "Pkg", "PlotThemes", "PlotUtils", "Printf", "REPL", "Random", "RecipesBase", "RecipesPipeline", "Reexport", "Requires", "Scratch", "Showoff", "SparseArrays", "Statistics", "StatsBase", "UUIDs", "UnicodeFun", "Unzip"] +git-tree-sha1 = "9e42de869561d6bdf8602c57ec557d43538a92f0" +uuid = "91a5bcdd-55d7-5caf-9e0b-520d859cae80" +version = "1.29.1" + +[[deps.Preferences]] +deps = ["TOML"] +git-tree-sha1 = "47e5f437cc0e7ef2ce8406ce1e7e24d44915f88d" +uuid = "21216c6a-2e73-6563-6e65-726566657250" +version = "1.3.0" + [[deps.Printf]] deps = ["Unicode"] uuid = "de0858da-6303-5e67-8744-51eddeeeb8d7" +[[deps.Qt5Base_jll]] +deps = ["Artifacts", "CompilerSupportLibraries_jll", "Fontconfig_jll", "Glib_jll", "JLLWrappers", "Libdl", "Libglvnd_jll", "OpenSSL_jll", "Pkg", "Xorg_libXext_jll", "Xorg_libxcb_jll", "Xorg_xcb_util_image_jll", "Xorg_xcb_util_keysyms_jll", "Xorg_xcb_util_renderutil_jll", "Xorg_xcb_util_wm_jll", "Zlib_jll", "xkbcommon_jll"] +git-tree-sha1 = "c6c0f690d0cc7caddb74cef7aa847b824a16b256" +uuid = "ea2cea3b-5b76-57ae-a6ef-0a8af62496e1" +version = "5.15.3+1" + +[[deps.REPL]] +deps = ["InteractiveUtils", "Markdown", "Sockets", "Unicode"] +uuid = "3fa0cd96-eef1-5676-8a61-b3b8758bbffb" + +[[deps.Random]] +deps = ["SHA", "Serialization"] +uuid = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" + +[[deps.RecipesBase]] +git-tree-sha1 = "6bf3f380ff52ce0832ddd3a2a7b9538ed1bcca7d" +uuid = "3cdcf5f2-1ef4-517c-9805-6587b60abb01" +version = "1.2.1" + +[[deps.RecipesPipeline]] +deps = ["Dates", "NaNMath", "PlotUtils", "RecipesBase"] +git-tree-sha1 = "dc1e451e15d90347a7decc4221842a022b011714" +uuid = "01d81517-befc-4cb6-b9ec-a95719d0359c" +version = "0.5.2" + +[[deps.Reexport]] +git-tree-sha1 = "45e428421666073eab6f2da5c9d310d99bb12f9b" +uuid = "189a3867-3050-52da-a836-e630ba90ab69" +version = "1.2.2" + +[[deps.RelocatableFolders]] +deps = ["SHA", "Scratch"] +git-tree-sha1 = "cdbd3b1338c72ce29d9584fdbe9e9b70eeb5adca" +uuid = "05181044-ff0b-4ac5-8273-598c1e38db00" +version = "0.1.3" + +[[deps.Requires]] +deps = ["UUIDs"] +git-tree-sha1 = "838a3a4188e2ded87a4f9f184b4b0d78a1e91cb7" +uuid = "ae029012-a4dd-5104-9daa-d747884805df" +version = "1.3.0" + +[[deps.SHA]] +uuid = "ea8e919c-243c-51af-8825-aaa63cd721ce" + +[[deps.Scratch]] +deps = ["Dates"] +git-tree-sha1 = "0b4b7f1393cff97c33891da2a0bf69c6ed241fda" +uuid = "6c6a2e73-6563-6170-7368-637461726353" +version = "1.1.0" + +[[deps.Serialization]] +uuid = "9e88b42a-f829-5b0c-bbe9-9e923198166b" + +[[deps.Showoff]] +deps = ["Dates", "Grisu"] +git-tree-sha1 = "91eddf657aca81df9ae6ceb20b959ae5653ad1de" +uuid = "992d4aef-0814-514b-bc4d-f2e9a6c4116f" +version = "1.0.3" + +[[deps.Sockets]] +uuid = "6462fe0b-24de-5631-8697-dd941f90decc" + +[[deps.SortingAlgorithms]] +deps = ["DataStructures"] +git-tree-sha1 = "b3363d7460f7d098ca0912c69b082f75625d7508" +uuid = "a2af1166-a08f-5f64-846c-94a0d3cef48c" +version = "1.0.1" + +[[deps.SparseArrays]] +deps = ["LinearAlgebra", "Random"] +uuid = "2f01184e-e22b-5df5-ae63-d93ebab69eaf" + +[[deps.SpecialFunctions]] +deps = ["ChainRulesCore", "IrrationalConstants", "LogExpFunctions", "OpenLibm_jll", "OpenSpecFun_jll"] +git-tree-sha1 = "a9e798cae4867e3a41cae2dd9eb60c047f1212db" +uuid = "276daf66-3868-5448-9aa4-cd146d93841b" +version = "2.1.6" + +[[deps.StaticArrays]] +deps = ["LinearAlgebra", "Random", "Statistics"] +git-tree-sha1 = "2bbd9f2e40afd197a1379aef05e0d85dba649951" +uuid = "90137ffa-7385-5640-81b9-e52037218182" +version = "1.4.7" + +[[deps.Statistics]] +deps = ["LinearAlgebra", "SparseArrays"] +uuid = "10745b16-79ce-11e8-11f9-7d13ad32a3b2" + +[[deps.StatsAPI]] +deps = ["LinearAlgebra"] +git-tree-sha1 = "2c11d7290036fe7aac9038ff312d3b3a2a5bf89e" +uuid = "82ae8749-77ed-4fe6-ae5f-f523153014b0" +version = "1.4.0" + +[[deps.StatsBase]] +deps = ["DataAPI", "DataStructures", "LinearAlgebra", "LogExpFunctions", "Missings", "Printf", "Random", "SortingAlgorithms", "SparseArrays", "Statistics", "StatsAPI"] +git-tree-sha1 = "8977b17906b0a1cc74ab2e3a05faa16cf08a8291" +uuid = "2913bbd2-ae8a-5f71-8c99-4fb6c76f3a91" +version = "0.33.16" + +[[deps.StructArrays]] +deps = ["Adapt", "DataAPI", "StaticArrays", "Tables"] +git-tree-sha1 = "9abba8f8fb8458e9adf07c8a2377a070674a24f1" +uuid = "09ab397b-f2b6-538f-b94a-2f83cf4a842a" +version = "0.6.8" + +[[deps.TOML]] +deps = ["Dates"] +uuid = "fa267f1f-6049-4f14-aa54-33bafae1ed76" + +[[deps.TableTraits]] +deps = ["IteratorInterfaceExtensions"] +git-tree-sha1 = "c06b2f539df1c6efa794486abfb6ed2022561a39" +uuid = "3783bdb8-4a98-5b6b-af9a-565f29a5fe9c" +version = "1.0.1" + +[[deps.Tables]] +deps = ["DataAPI", "DataValueInterfaces", "IteratorInterfaceExtensions", "LinearAlgebra", "OrderedCollections", "TableTraits", "Test"] +git-tree-sha1 = "5ce79ce186cc678bbb5c5681ca3379d1ddae11a1" +uuid = "bd369af6-aec1-5ad0-b16a-f7cc5008161c" +version = "1.7.0" + +[[deps.Tar]] +deps = ["ArgTools", "SHA"] +uuid = "a4e569a6-e804-4fa4-b0f3-eef7a1d5b13e" + +[[deps.TensorCore]] +deps = ["LinearAlgebra"] +git-tree-sha1 = "1feb45f88d133a655e001435632f019a9a1bcdb6" +uuid = "62fd8b95-f654-4bbd-a8a5-9c27f68ccd50" +version = "0.1.1" + +[[deps.Test]] +deps = ["InteractiveUtils", "Logging", "Random", "Serialization"] +uuid = "8dfed614-e22c-5e08-85e1-65c5234f0b40" + [[deps.TimerOutputs]] deps = ["ExprTools", "Printf"] git-tree-sha1 = "7638550aaea1c9a1e86817a231ef0faa9aca79bd" uuid = "a759f4b9-e2f1-59dc-863e-4aeb61b1ea8f" version = "0.5.19" +[[deps.URIs]] +git-tree-sha1 = "97bbe755a53fe859669cd907f2d96aee8d2c1355" +uuid = "5c2747f8-b7ea-4ff2-ba2e-563bfd36b1d4" +version = "1.3.0" + +[[deps.UUIDs]] +deps = ["Random", "SHA"] +uuid = "cf7118a7-6976-5b1a-9a39-7adc72f591a4" + [[deps.Unicode]] uuid = "4ec0a83e-493e-50e2-b9ac-8f72acf5a8f5" + +[[deps.UnicodeFun]] +deps = ["REPL"] +git-tree-sha1 = "53915e50200959667e78a92a418594b428dffddf" +uuid = "1cfade01-22cf-5700-b092-accc4b62d6e1" +version = "0.4.1" + +[[deps.Unzip]] +git-tree-sha1 = "34db80951901073501137bdbc3d5a8e7bbd06670" +uuid = "41fe7b60-77ed-43a1-b4f0-825fd5a5650d" +version = "0.1.2" + +[[deps.Wayland_jll]] +deps = ["Artifacts", "Expat_jll", "JLLWrappers", "Libdl", "Libffi_jll", "Pkg", "XML2_jll"] +git-tree-sha1 = "3e61f0b86f90dacb0bc0e73a0c5a83f6a8636e23" +uuid = "a2964d1f-97da-50d4-b82a-358c7fce9d89" +version = "1.19.0+0" + +[[deps.Wayland_protocols_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"] +git-tree-sha1 = "4528479aa01ee1b3b4cd0e6faef0e04cf16466da" +uuid = "2381bf8a-dfd0-557d-9999-79630e7b1b91" +version = "1.25.0+0" + +[[deps.XML2_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Libiconv_jll", "Pkg", "Zlib_jll"] +git-tree-sha1 = "58443b63fb7e465a8a7210828c91c08b92132dff" +uuid = "02c8fc9c-b97f-50b9-bbe4-9be30ff0a78a" +version = "2.9.14+0" + +[[deps.XSLT_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Libgcrypt_jll", "Libgpg_error_jll", "Libiconv_jll", "Pkg", "XML2_jll", "Zlib_jll"] +git-tree-sha1 = "91844873c4085240b95e795f692c4cec4d805f8a" +uuid = "aed1982a-8fda-507f-9586-7b0439959a61" +version = "1.1.34+0" + +[[deps.Xorg_libX11_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg", "Xorg_libxcb_jll", "Xorg_xtrans_jll"] +git-tree-sha1 = "5be649d550f3f4b95308bf0183b82e2582876527" +uuid = "4f6342f7-b3d2-589e-9d20-edeb45f2b2bc" +version = "1.6.9+4" + +[[deps.Xorg_libXau_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"] +git-tree-sha1 = "4e490d5c960c314f33885790ed410ff3a94ce67e" +uuid = "0c0b7dd1-d40b-584c-a123-a41640f87eec" +version = "1.0.9+4" + +[[deps.Xorg_libXcursor_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg", "Xorg_libXfixes_jll", "Xorg_libXrender_jll"] +git-tree-sha1 = "12e0eb3bc634fa2080c1c37fccf56f7c22989afd" +uuid = "935fb764-8cf2-53bf-bb30-45bb1f8bf724" +version = "1.2.0+4" + +[[deps.Xorg_libXdmcp_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"] +git-tree-sha1 = "4fe47bd2247248125c428978740e18a681372dd4" +uuid = "a3789734-cfe1-5b06-b2d0-1dd0d9d62d05" +version = "1.1.3+4" + +[[deps.Xorg_libXext_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg", "Xorg_libX11_jll"] +git-tree-sha1 = "b7c0aa8c376b31e4852b360222848637f481f8c3" +uuid = "1082639a-0dae-5f34-9b06-72781eeb8cb3" +version = "1.3.4+4" + +[[deps.Xorg_libXfixes_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg", "Xorg_libX11_jll"] +git-tree-sha1 = "0e0dc7431e7a0587559f9294aeec269471c991a4" +uuid = "d091e8ba-531a-589c-9de9-94069b037ed8" +version = "5.0.3+4" + +[[deps.Xorg_libXi_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg", "Xorg_libXext_jll", "Xorg_libXfixes_jll"] +git-tree-sha1 = "89b52bc2160aadc84d707093930ef0bffa641246" +uuid = "a51aa0fd-4e3c-5386-b890-e753decda492" +version = "1.7.10+4" + +[[deps.Xorg_libXinerama_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg", "Xorg_libXext_jll"] +git-tree-sha1 = "26be8b1c342929259317d8b9f7b53bf2bb73b123" +uuid = "d1454406-59df-5ea1-beac-c340f2130bc3" +version = "1.1.4+4" + +[[deps.Xorg_libXrandr_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg", "Xorg_libXext_jll", "Xorg_libXrender_jll"] +git-tree-sha1 = "34cea83cb726fb58f325887bf0612c6b3fb17631" +uuid = "ec84b674-ba8e-5d96-8ba1-2a689ba10484" +version = "1.5.2+4" + +[[deps.Xorg_libXrender_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg", "Xorg_libX11_jll"] +git-tree-sha1 = "19560f30fd49f4d4efbe7002a1037f8c43d43b96" +uuid = "ea2f1a96-1ddc-540d-b46f-429655e07cfa" +version = "0.9.10+4" + +[[deps.Xorg_libpthread_stubs_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"] +git-tree-sha1 = "6783737e45d3c59a4a4c4091f5f88cdcf0908cbb" +uuid = "14d82f49-176c-5ed1-bb49-ad3f5cbd8c74" +version = "0.1.0+3" + +[[deps.Xorg_libxcb_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg", "XSLT_jll", "Xorg_libXau_jll", "Xorg_libXdmcp_jll", "Xorg_libpthread_stubs_jll"] +git-tree-sha1 = "daf17f441228e7a3833846cd048892861cff16d6" +uuid = "c7cfdc94-dc32-55de-ac96-5a1b8d977c5b" +version = "1.13.0+3" + +[[deps.Xorg_libxkbfile_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg", "Xorg_libX11_jll"] +git-tree-sha1 = "926af861744212db0eb001d9e40b5d16292080b2" +uuid = "cc61e674-0454-545c-8b26-ed2c68acab7a" +version = "1.1.0+4" + +[[deps.Xorg_xcb_util_image_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg", "Xorg_xcb_util_jll"] +git-tree-sha1 = "0fab0a40349ba1cba2c1da699243396ff8e94b97" +uuid = "12413925-8142-5f55-bb0e-6d7ca50bb09b" +version = "0.4.0+1" + +[[deps.Xorg_xcb_util_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg", "Xorg_libxcb_jll"] +git-tree-sha1 = "e7fd7b2881fa2eaa72717420894d3938177862d1" +uuid = "2def613f-5ad1-5310-b15b-b15d46f528f5" +version = "0.4.0+1" + +[[deps.Xorg_xcb_util_keysyms_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg", "Xorg_xcb_util_jll"] +git-tree-sha1 = "d1151e2c45a544f32441a567d1690e701ec89b00" +uuid = "975044d2-76e6-5fbe-bf08-97ce7c6574c7" +version = "0.4.0+1" + +[[deps.Xorg_xcb_util_renderutil_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg", "Xorg_xcb_util_jll"] +git-tree-sha1 = "dfd7a8f38d4613b6a575253b3174dd991ca6183e" +uuid = "0d47668e-0667-5a69-a72c-f761630bfb7e" +version = "0.3.9+1" + +[[deps.Xorg_xcb_util_wm_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg", "Xorg_xcb_util_jll"] +git-tree-sha1 = "e78d10aab01a4a154142c5006ed44fd9e8e31b67" +uuid = "c22f9ab0-d5fe-5066-847c-f4bb1cd4e361" +version = "0.4.1+1" + +[[deps.Xorg_xkbcomp_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg", "Xorg_libxkbfile_jll"] +git-tree-sha1 = "4bcbf660f6c2e714f87e960a171b119d06ee163b" +uuid = "35661453-b289-5fab-8a00-3d9160c6a3a4" +version = "1.4.2+4" + +[[deps.Xorg_xkeyboard_config_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg", "Xorg_xkbcomp_jll"] +git-tree-sha1 = "5c8424f8a67c3f2209646d4425f3d415fee5931d" +uuid = "33bec58e-1273-512f-9401-5d533626f822" +version = "2.27.0+4" + +[[deps.Xorg_xtrans_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"] +git-tree-sha1 = "79c31e7844f6ecf779705fbc12146eb190b7d845" +uuid = "c5fb5394-a638-5e4d-96e5-b29de1b5cf10" +version = "1.4.0+3" + +[[deps.Zlib_jll]] +deps = ["Libdl"] +uuid = "83775a58-1f1d-513f-b197-d71354ab007a" + +[[deps.Zstd_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"] +git-tree-sha1 = "e45044cd873ded54b6a5bac0eb5c971392cf1927" +uuid = "3161d3a3-bdf6-5164-811a-617609db77b4" +version = "1.5.2+0" + +[[deps.libass_jll]] +deps = ["Artifacts", "Bzip2_jll", "FreeType2_jll", "FriBidi_jll", "HarfBuzz_jll", "JLLWrappers", "Libdl", "Pkg", "Zlib_jll"] +git-tree-sha1 = "5982a94fcba20f02f42ace44b9894ee2b140fe47" +uuid = "0ac62f75-1d6f-5e53-bd7c-93b484bb37c0" +version = "0.15.1+0" + +[[deps.libblastrampoline_jll]] +deps = ["Artifacts", "Libdl", "OpenBLAS_jll"] +uuid = "8e850b90-86db-534c-a0d3-1478176c7d93" + +[[deps.libfdk_aac_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"] +git-tree-sha1 = "daacc84a041563f965be61859a36e17c4e4fcd55" +uuid = "f638f0a6-7fb0-5443-88ba-1cc74229b280" +version = "2.0.2+0" + +[[deps.libpng_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg", "Zlib_jll"] +git-tree-sha1 = "94d180a6d2b5e55e447e2d27a29ed04fe79eb30c" +uuid = "b53b4c65-9356-5827-b1ea-8c7a1a84506f" +version = "1.6.38+0" + +[[deps.libvorbis_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Ogg_jll", "Pkg"] +git-tree-sha1 = "b910cb81ef3fe6e78bf6acee440bda86fd6ae00c" +uuid = "f27f6e37-5d2b-51aa-960f-b287f2bc3b7a" +version = "1.3.7+1" + +[[deps.nghttp2_jll]] +deps = ["Artifacts", "Libdl"] +uuid = "8e850ede-7688-5339-a07c-302acd2aaf8d" + +[[deps.p7zip_jll]] +deps = ["Artifacts", "Libdl"] +uuid = "3f19e933-33d8-53b3-aaab-bd5110c3b7a0" + +[[deps.x264_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"] +git-tree-sha1 = "4fea590b89e6ec504593146bf8b988b2c00922b2" +uuid = "1270edf5-f2f9-52d2-97e9-ab00b5d0237a" +version = "2021.5.5+0" + +[[deps.x265_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"] +git-tree-sha1 = "ee567a171cce03570d77ad3a43e90218e38937a9" +uuid = "dfaa095f-4041-5dcd-9319-2fabd8486b76" +version = "3.5.0+0" + +[[deps.xkbcommon_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg", "Wayland_jll", "Wayland_protocols_jll", "Xorg_libxcb_jll", "Xorg_xkeyboard_config_jll"] +git-tree-sha1 = "ece2350174195bb31de1a63bea3a41ae1aa593b6" +uuid = "d8fb68d0-12a3-5cfd-a85a-d49703b185fd" +version = "0.9.1+5" diff --git a/Memoria/capitulos/0-Metodologia/Comentarios_previos.tex b/Memoria/capitulos/0-Metodologia/Comentarios_previos.tex index d81cea1..9a693b7 100644 --- a/Memoria/capitulos/0-Metodologia/Comentarios_previos.tex +++ b/Memoria/capitulos/0-Metodologia/Comentarios_previos.tex @@ -12,15 +12,16 @@ \section*{Comentario previo} Para ello se ha seleccionado una paleta de la web Palett.es, visitada por última vez el 13 de mayo del 2022 y con dirección \url{https://palett.es/6a94a8-013e3b-7eb645-31d331-26f27d}. - }: +y con la herramienta de contraste de la web \url{https://color.adobe.com/es/create/color-contrast-analyzer} +consultada por última vez el 16 de Junio de 2022}: \begin{itemize} - \item \iconoAclaraciones \textcolor{dark_green}{ Color 1}: Comentarios para + \item \iconoAclaraciones \textcolor{dark_green}{ \textbf{Color 1}}: Comentarios para aclarar conceptos matemáticos o informáticos y ofrecer la idea intuitiva que se esconde, donde no se presuponen conocimientos avanzados en la materia. - \item \iconoProfundizar \textcolor{blue}{ Color 2}: Comentarios para una reflexión más profunda o que indique nuevas áreas que explorar. - \item \iconoClave \textcolor{darkRed}{ Color 3}: Concepto clave y destacable que tendrá un papel fundamental a posteriori. + \item \iconoProfundizar \textcolor{blue}{\textbf{Color 2}}: Comentarios para una reflexión más profunda o que indique nuevas áreas que explorar. + \item \iconoClave \textcolor{darkRed}{ \textbf{Color 3}}: Concepto clave y destacable que tendrá un papel fundamental a posteriori. \end{itemize} Además a lo largo del trabajo se han realizado aportaciones propias y resultados novedosos, estos aparecerán destacados de la siguiente forma: diff --git a/Memoria/capitulos/0-Metodologia/asignaturas.tex b/Memoria/capitulos/0-Metodologia/asignaturas.tex new file mode 100644 index 0000000..521cf3e --- /dev/null +++ b/Memoria/capitulos/0-Metodologia/asignaturas.tex @@ -0,0 +1,35 @@ +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +% Asignaturas del grado relacionadas +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + +\section{Asignaturas de grado relacionadas con el trabajo } +\label{ch01:asignaturas} +Si bien, es casi imposible enumerar de manera +exhaustiva todas las asignaturas involucradas en este trabajo, +ya que todas han influido en menor o mayor medida en la comprensión +y formulación de ideas; las principales han +sido: +\begin{itemize} + \item \textbf{Análisis Matemático}: todas las asignaturas del departamento de análisis matemático + han tenido relevancia, ya sea en el modelado de espacios de funciones, + para probar que las redes neuronales son aproximadores universales + y para la elaboración de nuestros propios resultados. + \item El \textbf{Aprendizaje Automático} y \textbf{Visión por Computador} sientan las bases de lo que son problemas de aprendizaje, + tratamiento de los datos y evaluación del error, así como el uso práctico de las redes neuronales. + \item \textbf{Estructura de Datos}: diseño e implementación de la modelización de las redes neuronales y + sus algoritmos concernientes. +\end{itemize} + +En menor medida han tenido también relevancia: +\begin{itemize} + \item \textbf{Inferencia Estadística}, la Inferencia Estadística está estrechamente ligada con + la ciencia de datos, también ha sido utilizada para los test de hipótesis. + \item Otras asignaturas que han intervenido \textbf{Programación Orientada a Objeto} \textbf{Diseño y Desarrollo de Sistemas Informáticos} + \item Nociones de \textbf{Topología} se han requerido para probar ciertos resultados analíticos. + \item \textbf{Álgebra, Métodos Numéricos I, Modelos I, Geometría III y Metaheurística} esta agrupación de asignaturas + han ayudado a la comprensión y servido como germen de ideas y relaciones a lo largo de todo el desarrollo de la memoria, + por poner algunos ejemplos: la relación entre grafo y una matriz proviene de la asignatura de Modelos, + la existencia de funciones cuyo error tiende a infinito de Métodos Numéricos, + resultados constructivos que después han podido ser implementados (Álgebra y Métodos Numéricos), algunos resultados propios sobres las funciones de activación (Geometría), + conocimiento sobre algoritmos de optimización como los genéticos y \textit{KNN} (Metaheurística). +\end{itemize} \ No newline at end of file diff --git a/Memoria/capitulos/0-Metodologia/herramientas.tex b/Memoria/capitulos/0-Metodologia/herramientas.tex index ac0e323..c46dc1c 100644 --- a/Memoria/capitulos/0-Metodologia/herramientas.tex +++ b/Memoria/capitulos/0-Metodologia/herramientas.tex @@ -3,7 +3,7 @@ %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \section{Herramientas utilizadas} - +\label{ch01:Herramientas} \subsection{GitHub} Como servicio externo hemos usado \href{https://github.com}{GitHub}, ya que permite implementar de manera eficaz todo el desarrollo ágil: @@ -11,9 +11,9 @@ \subsection{GitHub} \subsection{Lenguaje de programación Julia} Hemos seleccionado como lenguaje de programación \href{https://julialang.org}{Julia} -por los siguientes motivos: +por los siguientes motivos (\cite{virtudes-de-julia}): \begin{itemize} - \item Ofrece \textit{benchmarks} + \item Ofrece resultados de \textit{benchmarks} muy competitivos\footnote{Véase los resultado expuestos en \url{https://julialang.org/benchmarks/}, web consultada por última vez el 22 de mayo de 2022.}, @@ -37,7 +37,7 @@ \subsection{\textit{Notebooks}} es interpretado (se va traduciendo y ejecutando línea a línea). } -Todos los gráficas que se muestran en la memoria han sido creadas por nosotros. +Todas las gráficas que se muestran en la memoria han sido creadas por nosotros. Las hemos generado con \textit{scripts} o la mayoría de la veces con \href{https://jupyter.org}{\textit{notebooks} de Jupyter}, el motivo de esto ha sido el tener una interacción y visualización más cómoda y compacta de los resultados. diff --git a/Memoria/capitulos/0-Metodologia/introduccion.tex b/Memoria/capitulos/0-Metodologia/introduccion.tex index 5fa051c..68f0f10 100644 --- a/Memoria/capitulos/0-Metodologia/introduccion.tex +++ b/Memoria/capitulos/0-Metodologia/introduccion.tex @@ -6,7 +6,7 @@ %******************************************************* \chapter{Metodología} - +\label{ch00:methodology} La planificación y organización es un componente vital a la hora del desarrollo de software y la ciencia de datos, por lo que no es de extrañar que sus beneficios sean extrapolables a otras áreas de la ciencia; @@ -190,7 +190,7 @@ \subsection*{Hito 2: Evaluación experimental de las hipótesis de optimización Deberán de validarse y cuantificar experimentalmente las propuestas de optimización del hito anterior. -Para ello se deberá de: +Para ello se deberá: \begin{enumerate} \item \textbf{Formular los test correspondientes y adecuados} lo que conllevará : \begin{itemize} @@ -203,7 +203,7 @@ \subsection*{Hito 2: Evaluación experimental de las hipótesis de optimización El criterio de aceptación de un producto mínimo viable consistirá en verificar que: \begin{itemize} - \item La implementación de los algoritmos debe de es coherente y proveniente del hito anterior y debe de estar referenciada. + \item La implementación de los algoritmos debe de ser coherente, proveniente del hito anterior y debe de estar referenciada. \item Toda implementación comprueba su correcto funcionamiento mediante tests. \item La redacción del análisis y conclusiones es aprobada por los tutores nuevamente. \end{itemize} @@ -212,7 +212,7 @@ \subsection*{Hito 2: Evaluación experimental de las hipótesis de optimización -\subsection*{Hito x: Entrega del proyecto} +\subsection*{Hito 3: Entrega del proyecto} Su resultado será una memoria revisada y pulida que se entregará en Prado como trabajo fin de grado en el plazo de la convocatoria ordinaria. @@ -227,12 +227,17 @@ \section{Registro de trabajo} las tareas dedicadas y los hitos relacionados en la siguiente \href{https://docs.google.com/spreadsheets/d/1TCcKQIKjKW9sMSU2f6obN9gHgv3c8UEdjmONkBlv42M/edit?usp=sharing}{hoja de cálculo}. -En total el número de horas invertidas ha sido: -\textcolor{red}{TODO Estos TODOS están relacionados con el issue 46} -\textcolor{red}{TODO añadir registro} +En total el número de horas invertidas ha sido +unas $450$. + Que se distribuye de la siguiente manera en los distintos hitos: -\textcolor{red}{TODO añadir registro} +\begin{figure}[H] + \centering + \includegraphics[width=\textwidth]{0-metodologia/chart.png} + \caption{Sector circular sobre la dedicación a cada hito} + \label{img:dedicar-hito} + \end{figure} \section{Resumen de la metodología} diff --git a/Memoria/capitulos/1-Introduccion_redes_neuronales/Objetivos.tex b/Memoria/capitulos/1-Introduccion_redes_neuronales/Objetivos.tex index dfe720d..818195a 100644 --- a/Memoria/capitulos/1-Introduccion_redes_neuronales/Objetivos.tex +++ b/Memoria/capitulos/1-Introduccion_redes_neuronales/Objetivos.tex @@ -23,7 +23,7 @@ \chapter{Introducción a las redes neuronales} \cite{a-universal-law-of-Robustness} \cite{CHAI2021100134} el sustento de esto no deja de ser experimental o basado en cotas de carácter \textit{en el peor de los casos y por el tamaño del espacio de búsqueda}. Pero estos motivos no constituyen una demostración formal ni rigurosa de porqué decantarnos verdaderamente por -ello y, es más otros artículos experimentales demuestran que aumentar el número de capas no mejora los resultado +ello y, es más otros artículos experimentales demuestran que aumentar el número de capas no mejora los resultados \cite{DBLP:conf/iwann/Linan-Villafranca21}. Así pues, sustentados con la demostración de convergencia universal \cite{HORNIK1989359} diff --git a/Memoria/capitulos/1-Introduccion_redes_neuronales/aprendizaje_introduccion.tex b/Memoria/capitulos/1-Introduccion_redes_neuronales/aprendizaje_introduccion.tex index 685d25c..e3d3cad 100644 --- a/Memoria/capitulos/1-Introduccion_redes_neuronales/aprendizaje_introduccion.tex +++ b/Memoria/capitulos/1-Introduccion_redes_neuronales/aprendizaje_introduccion.tex @@ -130,7 +130,7 @@ \subsubsection{Aprendizaje semi supervisado } como por ejemplo en traducción o detección de fraudes. -Las redes neuronales son partícipes en los tres tipos de aprendizaje +Las redes neuronales son partícipes en todos los tipos de aprendizaje recién mencionados \cite{8612259}, \cite{DBLP:journals/corr/BakerGNR16}, \cite{10.5555/2955491.2955578}. Sin embargo centraremos nuestro estudio en el caso de aprendizaje supervisado. diff --git a/Memoria/capitulos/1-Introduccion_redes_neuronales/feedforward-network-una-capa.tex b/Memoria/capitulos/1-Introduccion_redes_neuronales/feedforward-network-una-capa.tex index 75fc6df..aa80e5f 100644 --- a/Memoria/capitulos/1-Introduccion_redes_neuronales/feedforward-network-una-capa.tex +++ b/Memoria/capitulos/1-Introduccion_redes_neuronales/feedforward-network-una-capa.tex @@ -5,8 +5,40 @@ %******************************************************* % Introducción al capítulo -A lo largo de este capítulo daremos una modelización propia de red neuronal y la compararemos con otros modelos ofrecidos. -Probaremos además que el modelo propuesto es un aproximador universal, es decir es capaz de aproximar cualquier función medible. + El objetivo principal de este capítulo es dar una modelización propia de red neuronal y demostrar que es un + aproximador universal a cualquier función medible. + + El capítulo se organiza de la siguiente manera: + + \begin{enumerate} + \item \textbf{Introducción de nuestro modelo} de red neuronal $\rrnnmc$ y comparación con los usuales en la sección \ref{sec:redes-neuronales-intro-una-capa}. + \item Demostración de que \textbf{las redes neuronales modelizadas son aproximadores universales}. + \begin{itemize} + \item Para ello serán necesarias una serie de \textbf{definiciones previas} que se encuentran en la sección \ref{ch:articulo:sec:defincionesPrimeras}, + las más relevantes son la de función de activación y los espacios $\pmcg$. + + \item El resultado de convergencia universal es producto de una sucesión de \textbf{conseguir aproximar espacios a partir de otros}, concretamente las relaciones \textit{es denso en} y dónde se demuestran son: + \begin{align*} + \rrnn + \xRightarrow[]{\ref{teo:2_4_rrnn_densas_M}} + \rrnng + \xRightarrow[]{\ref{teorema:2_3_uniformemente_denso_compactos}} + \pmcg + \xRightarrow[]{\ref{teo:TeoremaConvergenciaRealEnCompactosDefinicionesEsenciales}} + \fC + \xRightarrow[]{\ref{teo:2_2_denso_función_continua}} + \fM. + \end{align*} + Además se verá + en la sección \ref{ch04:espacios-Lp} que $\rrnn$ es denso en $L_p(\R^d, \mu)$ lo que nos permitirá establecer propiedades y entender cómo funciona nuestro modelo + para problemas concretos de regresión y clasificación. + + \item En la sección \ref{ch04:salida-varias-dimensiones} se demostrará la convergencia universal para el espacio + $\rrnnmc$. + \end{itemize} + \item Consideración en la sección \ref{ch04:capacidad-calculo} si \textbf{en la práctica} se tiene la \textbf{capacidad computacional de actuar como aproximador universal}. + + \end{enumerate} % Comienzo de la sección @@ -24,7 +56,7 @@ \section{Definición de las redes neuronales \textit{Feedforward Networks} hemos considerado más conveniente; esta decisión es argumentada en el capítulo \ref{chapter:construir-redes-neuronales}. % Imagen grafo red neuronal una capa oculta muy simple y en blanco y negro -\begin{figure}[h!] +\begin{figure}[H] \centering \includegraphics[width=0.85\textwidth]{1-Introduccion_redes_neuronales/Red-Neuronal-una-capa-simple.png} \caption{\textit{Grafo} de una red neuronal de una capa oculta} @@ -78,7 +110,7 @@ \section{Definición de las redes neuronales \textit{Feedforward Networks} \textbf{Interpretación fórmula} } {\maginLetterSize - Observemos que $n$ es el número de neuronas de la capa oculta. Es decir lo que en el grafo \ref{img:grafo-red-neuronal-una-capa-oculta} serían las neuronas de las capas ocultas se correspondería con términos $\gamma_{i}( A_{i}(x))$. + Observemos que $n$ es el número de neuronas de la capa oculta. Es decir lo que en el grafo \ref{img:grafo-red-neuronal-una-capa-oculta} serían los nodos interiores y se correspondería con los términos $\gamma_{i}( A_{i}(x))$. } } \normalmarginpar @@ -142,7 +174,9 @@ \subsection*{Diferencia con otras definiciones} \label{subsection:diferencia-ot Las diferencias con nuestra definición son las siguientes \begin{itemize} - \item \textbf{Desaparece la función de clasificación $\theta$}. El motivo es que es un artificio teóricamente innecesario de acorde al teorema de convergencia universal \ref{teo:MFNAUA}. - \item \textbf{Se elimina un parámetro} de la transformación afín de la última capa; puesto que no es necesario para la convergencia de nuevo por \ref{teo:MFNAUA} lo hemos eliminado. - \item Nuestras funciones de activación son funciones medibles en vez de diferenciables ya que a priori no existe ninguna hipótesis teórica que fuerce a tal restricción, como hemos visto en \ref{teo:MFNAUA}. + \item \textbf{Desaparece la función de clasificación $\theta$}. + \item \textbf{Se elimina un parámetro} por cada neurona. + \item No se le exige condición de diferenciabilidad a priori, ya que a priori no existe ninguna hipótesis teórica que fuerce a tal restricción, como hemos visto en \ref{teo:MFNAUA}. \end{itemize} +Se justificarán y se matizarán tales decisiones en +la sección \ref{ch05:justifica-modelo}. diff --git a/Memoria/capitulos/2-Articulo_rrnn_aproximadores_universales/desgranando_el_articulo/articulo_1_primeras_definiciones.tex b/Memoria/capitulos/2-Articulo_rrnn_aproximadores_universales/desgranando_el_articulo/articulo_1_primeras_definiciones.tex index 689ec4e..418f5cb 100644 --- a/Memoria/capitulos/2-Articulo_rrnn_aproximadores_universales/desgranando_el_articulo/articulo_1_primeras_definiciones.tex +++ b/Memoria/capitulos/2-Articulo_rrnn_aproximadores_universales/desgranando_el_articulo/articulo_1_primeras_definiciones.tex @@ -104,38 +104,26 @@ \section{Definiciones primeras}\label{ch:articulo:sec:defincionesPrimeras} se demuestra en \cite{DBLP:journals/corr/SonodaM15} y en \cite{non-polynomial-activation-functions}, funciones de activación no polinómicas y no acotadas. \end{definicion} -\subsection*{Las funciones de activación $\Gamma$ son la clave del aprendizaje} - -\label{ch03:funcionamiento-intuitivo-funcion-activacion} - -Las \textit{funciones de activación} serán definidas con precisión en la sección -\ref{def:funcion_activacion_articulo}, pero para continuar con nuestro razonamiento -pensemos en ellas como una función cualquiera que no sea un polinomio. - -Una vez liberados de tratar de buscar un polinomio que aproxime la función en todo -el dominio, podemos pensar en aproximar la imagen de acorde a intervalos. - -% Issue #114 TODO : Añadir gráficos cuando esté implementada una red neuronal - -% Ejemplo de cómo se aproxima gracias a la forma de la función de activación -\begin{figure}[H] - \includegraphics[width=\textwidth]{1-Introduccion_redes_neuronales/idea-como-aproxima-redes-neuronales.jpeg} - \caption{Cómo actúa en la aproximación una función de activación} - \label{img:idea-como-aproxima-redes-neuronales} - \end{figure} - -La idea intuitiva es que para una capa oculta con una neurona, -lo que se hace es \textit{colocar} por escalado y simetrías la imagen de la función de activación. - -% Ejemplo trivial de como la forma de la función de activación influye en aproximar mejor -\begin{figure}[h!] - \includegraphics[width=0.8\textwidth]{1-Introduccion_redes_neuronales/Idea-forma-función-Activación.jpg} - \caption{Cómo afecta la forma de la función de activación} - \label{img:como afecta la forma de la función de aproximación} -\end{figure} +% Nota sobre que la funciones de activación +% son clave en el aprendizaje +\setlength{\marginparwidth}{\smallMarginSize} +\marginpar{\maginLetterSize + \iconoClave \textcolor{darkRed}{ + \textbf{ + Las funciones de activación $\Gamma$ son la clave del aprendizaje + } + } + \label{ch03:funcionamiento-intuitivo-funcion-activacion} +La idea intuitiva es que cada neurona +lo que se hace es \textit{colocar} por transformaciones afines la imagen de la función de activación en el espacio con el fin +de aproximar una región de la imagen de la función ideal. +Por lo tanto, la forma que ésta tenga será determinante en el número de neuronas necesarias para la convergencia. +} +\setlength{\marginparwidth}{\bigMarginSize} % Fin del tratamiento de funciones de activación + Para cualquier natural $d$ mayor que cero denotaremos por $\afines$ al conjunto de todas las \textbf{funciones afines} de $\R^d$ a $\R$. Es decir el conjunto de funciones de la forma $A(x) = w \cdot x + b$ donde $x$ y $w$ son vectores de $\R^d$, $b \in \R$ es un escalar @@ -148,9 +136,9 @@ \subsection*{Las funciones de activación $\Gamma$ son la clave del aprendizaje} \iconoAclaraciones \textcolor{dark_green}{ \textbf{Idea tras la definición de $\pmc$.} } -Nótese que de acorde a nuestra definición \ref{definition:redes_neuronales_una_capa_oculta} -lo que se ha definido es la clase de las redes -neuronales de una capa oculta y salida de una dimensión. +Nótese que de acorde a la definición \ref{definition:redes_neuronales_una_capa_oculta} +lo que se está refiriendo es la clase de las redes +neuronales de una capa oculta y \textbf{salida de una dimensión}. Donde cada sumando representa una neurona de la capa oculta. } %%% fin nota @@ -170,8 +158,8 @@ \subsection*{Las funciones de activación $\Gamma$ son la clave del aprendizaje} \end{split} \end{equation} - Conforme avancen los resultado teórico veremos que $\pmc$ - no depende de la función $G$ seleccionada, así pues tras enunciar tales resultados nos referiremos sin ambigüedad a tal conjunto como $\rrnn$. + Conforme avancen los resultados teóricos veremos que $\pmc$ + no depende de la función $G$ seleccionada; así pues, tras enunciar tales resultados nos referiremos sin ambigüedad a tal conjunto como $\rrnn$. \end{definicion} @@ -187,9 +175,7 @@ \subsection*{Las funciones de activación $\Gamma$ son la clave del aprendizaje} funciones de esta clase seremos capaz de aproximar cualquier función continua. De esta manera este conjunto actuará de nexo de unión entre las funciones continuas y las redes neuronales facilitando las demostraciones. De ahora en adelante nos - referiremos a este conjunto como al \textbf{de anillo de aproximación} (como curiosidad, el nombre proviene a que - tiene estructura de anillo y que se utilizará para - aproximar funciones continuas). + referiremos a este conjunto como al \textbf{de anillo de aproximación}. } \begin{definicion} [Anillo de aproximación de redes neuronales]\label{def:articulo_abstracción_rrnn} @@ -212,6 +198,7 @@ \subsection*{Las funciones de activación $\Gamma$ son la clave del aprendizaje} \reversemarginpar %%% Nota margen sobre función medible + \setlength{\marginparwidth}{\smallMarginSize} \marginpar{\maginLetterSize \iconoAclaraciones \textcolor{dark_green}{ \textbf{ @@ -224,9 +211,11 @@ \subsection*{Las funciones de activación $\Gamma$ son la clave del aprendizaje} predecir la naturaleza de datos nuevos. Es por ello necesario suponer que estos datos están regidos por alguna regla, la cual puede ser todo lo extraña posible pero que toma valores - que pueden ser observables y cuantificables en la mayoría de los casos, estos comportamientos son formalizados + que pueden ser observables y cuantificables en la mayoría de los casos, + estos comportamientos son formalizados matemáticamente con \textbf{funciones medibles}. } +\setlength{\marginparwidth}{\bigMarginSize} \normalmarginpar Introducimos a continuación la notación de los conjuntos de funciones que seremos capaces de aproximar. @@ -260,8 +249,6 @@ \subsection{ Reflexión sobre el tipo de funciones que se pueden aproximar} \begin{definicion} [Subconjunto denso] % Nota margen de denso - \reversemarginpar - \setlength{\marginparwidth}{\smallMarginSize} \marginpar{\maginLetterSize \iconoAclaraciones \textcolor{dark_green}{ \textbf{Idea intuitiva conjunto denso.} @@ -270,7 +257,6 @@ \subsection{ Reflexión sobre el tipo de funciones que se pueden aproximar} se está está diciendo que \textbf{los elementos de $S$ son capaces de aproximar cualquier elemento de $T$ con la precisión que se desee}. } - \normalmarginpar Dado un subconjunto $S$ de un espacio métrico $(X, \rho)$, se dice que $S$ es denso por la distancia $\rho$ en subconjunto $T$ si para todo $\varepsilon$ positivo y cualquier $t \in T$ existe un $s \in S$ tal @@ -280,6 +266,7 @@ \subsection{ Reflexión sobre el tipo de funciones que se pueden aproximar} Un ejemplo habitual es en el espacio métrico $(\R, |\cdot|)$ con $|\cdot|$ el valor absoluto, el subconjunto $T = \R$ y $S$ los números irracionales, $S = \R \setminus \Q$. +\newpage \begin{definicion} Un subconjunto $S$ de $\fC$ se dice que es \textbf{uniformemente denso para compactos} en $\fC$ diff --git a/Memoria/capitulos/2-Articulo_rrnn_aproximadores_universales/desgranando_el_articulo/articulo_2_teorema_1_hasta_lema_2_2.tex b/Memoria/capitulos/2-Articulo_rrnn_aproximadores_universales/desgranando_el_articulo/articulo_2_teorema_1_hasta_lema_2_2.tex index 83f2e02..22aebc3 100644 --- a/Memoria/capitulos/2-Articulo_rrnn_aproximadores_universales/desgranando_el_articulo/articulo_2_teorema_1_hasta_lema_2_2.tex +++ b/Memoria/capitulos/2-Articulo_rrnn_aproximadores_universales/desgranando_el_articulo/articulo_2_teorema_1_hasta_lema_2_2.tex @@ -99,7 +99,6 @@ \section{Primeros resultados} \end{enumerate} Veamos que $\pmcg$ separa puntos para cada compacto $K \subset \R^r$. - Por ser $G$ no constante existirán $a,b \in \R$ distintos cumpliendo que $G(a) \neq G(b)$. Fijadas $x,y \in K$ tomamos entonces cualquiera de las funciones afines que cumplen que $A(x) = a$ y $A(y)=b$ \footnote{Sabemos que al menos una habrá, ya que podemos plantear la función afín @@ -116,7 +115,7 @@ \section{Primeros resultados} \subsection{Observaciones y reflexiones sobre el teorema de convergencia real en compactos} -Con esto lo que acabamos de probar que una estructura más general de\textit{feedforward neural networks} con tan solo una capa oculta son capaces de aproximar cualquier +Con esto lo que acabamos de probar es que una estructura más general de \textit{feedforward neural networks} con tan solo una capa oculta son capaces de aproximar cualquier función continua en un compacto. Cabe destacar que a la función $G$, que haría el papel de función de activación, solo se le ha pedido como hipótesis ser continua. @@ -135,13 +134,15 @@ \subsection{Observaciones y reflexiones sobre el teorema de convergencia real en \marginpar{\maginLetterSize \iconoProfundizar \textcolor{blue}{\textbf{Nueva hipótesis de optimización}} El corolario \ref{cor:se-generaliza-G-a-una-familia} abre la puerta a preguntarse si la combinación de diferentes funciones de activación -podría mejorar los resultados de alguna manera. +podría mejorar los resultados de alguna manera. De hecho +trataremos sobre esto en el capítulo \ref{ch08:genetic-selection}. } \normalmarginpar \begin{aportacionOriginal} -\begin{corolario}[Pueden combinarse distintas funciones de activación en una misma red neuronal] \label{cor:se-generaliza-G-a-una-familia} +\begin{corolario}[Pueden combinarse distintas funciones de activación en una misma red neuronal] + \label{cor:se-generaliza-G-a-una-familia} Una misma red neuronal puede estar constituida por una familia de funciones continuas no constantes $\Gamma$, bastará con generalizar $\pmcg$ a $\sum \prod ^d (\Gamma)$ donde @@ -167,11 +168,10 @@ \subsection{Observaciones y reflexiones sobre el teorema de convergencia real en Notemos que este resultado no da pista alguna de las ventajas de una función frente a otra, ni cómo afecta a la \textit{velocidad de convergencia}. - Es más, a priori se estaría aumentando el espacio de búsqueda, lo que significaría que \textit{dificultaría el encontrar la solución}, es decir un aumento en coste y aumento del error de aproximación. -Sin embargo, como ya mostrábamos en +Sin embargo, como ya indicábamos en \ref{ch03:funcionamiento-intuitivo-funcion-activacion} utilizar una función de activación frente a otra varía el número de neuronas necesarias para @@ -315,48 +315,43 @@ \subsection{Observaciones y reflexiones sobre el teorema de convergencia real en % 2 -> 3 Probaremos ahora que (2) $\Longrightarrow$ (3). - - Por (2) se tiene que sea cual sea el $\varepsilon$ cumpliendo que + Por (2) se tiene que sea cual sea $\varepsilon$ cumpliendo que $0 < \varepsilon \leq 2$ existirá un natural $n_0$ a partir del cual, cualquier otro natural $n$ satisface que \begin{equation} - \mu \{ + \mu \left\{ x : |f_n(x) - f(x)| > \frac{\varepsilon}{2} - \} + \right\} < - \frac{\varepsilon}{2}, + \frac{\varepsilon}{2}. \end{equation} - Gracias a esta desigualdad, para cualquier $n > n_0$ podemos acotar la siguiente integral: \begin{equation} \int \min \{ |f_n(x) - f(x)|, 1\} d\mu(x) \leq \frac{\varepsilon}{2} (1-\frac{\varepsilon}{2}) + 1\frac{\varepsilon}{2} - = \varepsilon - \frac{\varepsilon^2}{4} < \varepsilon. + = \varepsilon - \frac{\varepsilon^2}{4} < \varepsilon, \end{equation} probando con ello la implicación (2) $\Longrightarrow$ (3). % 3 -> 1 Finalmente comprobaremos la implicación (3) $\Longrightarrow$ (1). - Para cada $n\in \N$ llamamos $g_n = \min\{|f_n - f|, 1|\}$. Por (2), dado $0 < \varepsilon < 1$, existe un $n_0 \in \N$ de modo que si $n \geq n_0$ se cumple que \begin{equation}\label{eq:definiciones_Básicas_Integral_GN_menor_Epsilon_Cuadrado} - \int g_n d\mu < \varepsilon^2 + \int g_n d\mu < \varepsilon^2. \end{equation} Como $\varepsilon < 1$ tenemos que \begin{equation} \{ x; g_n(x) > \varepsilon \} = - \{ x; |f_n - f| > \varepsilon \} + \{ x; |f_n - f| > \varepsilon \}, \end{equation} - luego - \begin{equation} \mu\{ x; |f_n - f(x)| > \varepsilon \} = @@ -366,9 +361,8 @@ \subsection{Observaciones y reflexiones sobre el teorema de convergencia real en \int_{g_n(x) > \varepsilon} g_n d\mu < \varepsilon \quad - \forall n \geq n_0 + \forall n \geq n_0, \end{equation} - donde se ha usado la desigualdad de Chebyshev para $g_n$ y la desigualdad (\refeq{eq:definiciones_Básicas_Integral_GN_menor_Epsilon_Cuadrado}). @@ -410,12 +404,12 @@ \subsection{Observaciones y reflexiones sobre el teorema de convergencia real en \iconoAclaraciones \textcolor{dark_green}{ \textbf{Idea intuitiva lema \ref{lema:A_1_C_es_denso_en_M}:}} } \marginpar{\maginLetterSize - Las funciones continuas pueden tomar formas muy variopintas, estando incluso no acotadas. La función de Dirichlet definida en $D(x) = 1$ si $x$ es irracional y $D(x)=0$ si $x$ es racional, es medible pero no es continua ya que presenta infinitas discontinuidades. + Las funciones medibles pueden tomar formas muy variopintas, estando incluso no acotadas, un ejemplo de ello es la función de Dirichlet definida como $D(x) = 1$ si $x$ es irracional y $D(x)=0$ si $x$ es racional. Esta función es medible pero no es continua ya que presenta infinitas discontinuidades. } \marginpar{\maginLetterSize Sin embargo, las funciones continuas son más simples, fáciles de entender y manejar. Gracias al lema \ref{lema:A_1_C_es_denso_en_M} - acabamos de probar que \textbf{podemos aproximar en + se prueba el llamativo resultado de que \textbf{podemos aproximar en casi todos sus puntos cualquier función medible a partir de una continua.} } \marginpar{\maginLetterSize @@ -430,7 +424,6 @@ \subsection{Observaciones y reflexiones sobre el teorema de convergencia real en = g_n. \end{equation} - Por lo que \begin{equation} \label{eq:lema3_2_integral_en_compacto_K} \int_K g_n d\mu @@ -439,7 +432,7 @@ \subsection{Observaciones y reflexiones sobre el teorema de convergencia real en \leq \frac{\varepsilon}{2} . \end{equation} - + % Acotando el primer sumando por la medida del complemento de la región integrada y en virtud de (\refeq{eq:lema3_2_integral_en_compacto_K}) diff --git a/Memoria/capitulos/2-Articulo_rrnn_aproximadores_universales/desgranando_el_articulo/articulo_3_teorema_2_2.tex b/Memoria/capitulos/2-Articulo_rrnn_aproximadores_universales/desgranando_el_articulo/articulo_3_teorema_2_2.tex index 35d5e1f..a2958af 100644 --- a/Memoria/capitulos/2-Articulo_rrnn_aproximadores_universales/desgranando_el_articulo/articulo_3_teorema_2_2.tex +++ b/Memoria/capitulos/2-Articulo_rrnn_aproximadores_universales/desgranando_el_articulo/articulo_3_teorema_2_2.tex @@ -13,7 +13,6 @@ \end{teorema} % Nota idea intuitiva teorema 2.2 - \setlength{\marginparwidth}{\smallMarginSize} \marginpar{\maginLetterSize\raggedright \iconoAclaraciones \textcolor{dark_green}{ \textbf{ @@ -26,10 +25,10 @@ Se prueba en el teorema \ref{teo:2_2_denso_función_continua} que con una \textbf{versión más general de una red neuronal} (perteneciente a $\pmcg$) \textbf{se es capaz de aproximar cualquier función medible}. - La idea de la demostración es sencilla, sabemos aproximar una función medible con una continua y a su vez una continua con una red neuronal generalizada, luego sabemos aproximar una función medible con una red neuronal generalizada. + La idea de la demostración es sencilla, sabemos aproximar una función medible con una continua y a su vez una continua con una del \textit{anillo de aproximación de redes neuronales}, luego sabemos aproximar una función medible con una \textit{anillo de aproximación de redes neuronales}. } - \setlength{\marginparwidth}{\bigMarginSize} % Fin de la nota + \begin{proof} Debemos probar que para cualquier función $f \in \fM$ existe una sucesión de funciones $\{h_n\}_{n\in \N}$ contenida en $\pmcg$ y @@ -78,7 +77,7 @@ Procedamos a realizar la siguiente prueba constructiva. Tomamos fijo pero arbitrario un $\varepsilon > 0,$ que sin pérdida de generalidad supondremos menor que uno. - Para que $H_\varepsilon$ pertenezca a $\mathcal{H}_\psi{(\R, \R)}$ deberá de ser de la + Para que $H_\varepsilon$ pertenezca a $\mathcal{H}_\psi{(\R, \R)}$ deberá ser de la forma $\sum^{q-1}_{j=1} b_j \psi( A_j(\lambda))$ debemos encontrar por ende el número de sumatorias, $q-1$; esa misma cantidad de constantes reales $b_j$ y funciones afines $A_j$. Para ello tomamos como $q$ cualquier número natural que cumpla que @@ -299,7 +298,7 @@ < \varepsilon. \end{equation} - + % Por el lema \ref{lema:a_2_paso_previo_denso} existe una función $H_{\delta}(\cdot) = \sum_{t=1}^T \beta_t \psi(A_t(\cdot))$ cumpliendo que @@ -307,7 +306,6 @@ \begin{equation} \sup_{\lambda \in \R} |F(\lambda) - H_{\delta}(\lambda) | < \delta. \end{equation} - Usando \refeq{eq:teorema_2_3__1} para $a_k = F(A_k(x))$ y $b_k = H_\delta(A_k(x))$ obtenemos @@ -321,7 +319,7 @@ < \varepsilon. \end{equation} - + % Puesto que $H_\delta$ es de la forma $\sum_{t=1}^T \beta_t \psi(A_t(\cdot))$ y porque $A_t(A_k(\cdot)) \in \afines$ se tiene por la desigualdad \refeq{eq:teorema2_3__3} que $\prod ^l_{k=1} H_\delta(A_k(\cdot)) \in \rrnng.$ @@ -545,10 +543,9 @@ Definimos \begin{equation} - B = \max \{ |\beta_j| : j \in \{1, ..., q \}\} + B = \max \{ |\beta_j| : j \in \{1, ..., q \}\}, \end{equation} - - En virtud del lema \ref{lema:A_3_función_activación_continua_con_arbitaria} + en virtud del lema \ref{lema:A_3_función_activación_continua_con_arbitaria} podemos encontrar $\cos_{M, \frac{\varepsilon}{q B}} \in \sum(\psi)$ cumpliendo que \begin{equation} @@ -701,7 +698,7 @@ Para cualquier función de activación $\psi$, $d \in \N$ y medida de probabilidad $\mu$ en $(\R^d, B^d)$, se tiene que $\rrnn$ es uniformemente denso para compactos - en $\fC$ y denso en $\fM$ para a la distancia $\dist$. + en $\fC$ y denso en $\fM$ para la distancia $\dist$. \end{teorema} % Nota idea intuitiva lema de que C es denso en M @@ -730,9 +727,9 @@ \begin{proof} En el lema anterior acabamos de ver que $\rrnn$ es uniformemente denso en compactos en $\fC$ -y gracias al lema \ref{lema:2_2_convergencia_uniforme_en_compactos} -(Si $\{f_n\}$ es una sucesión de funciones en $\fM$ que converge -uniformemente en un compacto a $f$ entonces $\rho_{\mu}(f_n, f) \longrightarrow 0$.) +y gracias al lema \ref{lema:2_2_convergencia_uniforme_en_compactos} que recordemos que afirma que +si $\{f_n\}$ es una sucesión de funciones en $\fM$ que converge +uniformemente en un compacto a $f$ entonces $\rho_{\mu}(f_n, f) \longrightarrow 0$; esto implica que $\rrnn$ sea $\dist$-denso en $\fC$. diff --git a/Memoria/capitulos/2-Articulo_rrnn_aproximadores_universales/desgranando_el_articulo/articulo_4_colorario_2_1.tex b/Memoria/capitulos/2-Articulo_rrnn_aproximadores_universales/desgranando_el_articulo/articulo_4_colorario_2_1.tex index 4e03d8b..6ddd75e 100644 --- a/Memoria/capitulos/2-Articulo_rrnn_aproximadores_universales/desgranando_el_articulo/articulo_4_colorario_2_1.tex +++ b/Memoria/capitulos/2-Articulo_rrnn_aproximadores_universales/desgranando_el_articulo/articulo_4_colorario_2_1.tex @@ -55,17 +55,26 @@ % Nota idea intuitiva corolario 2.1 \reversemarginpar + \setlength{\marginparwidth}{\smallMarginSize} + \marginpar{\maginLetterSize \iconoAclaraciones \textcolor{dark_green}{ - \textbf{Idea intuitiva corolario \ref{cor:2_1}} + \textbf{Idea intuitiva + corolario \ref{cor:2_1}} } } \marginpar{\maginLetterSize Este teorema corrige la carencia sobre la precisión del error que describíamos - en la idea intuitiva del teorema \ref{teo:2_4_rrnn_densas_M}. Podemos encontrar una red neuronal que aproxime cualquier función medible que queramos en todos los puntos del espacio que queramos. + en la idea intuitiva del + teorema \ref{teo:2_4_rrnn_densas_M}. + Podemos encontrar una red neuronal que + aproxime cualquier función medible que + queramos en todos los puntos del espacio + que deseados. } + \setlength{\marginparwidth}{\bigMarginSize} \normalmarginpar - + % Fin de la nota intuitiva \begin{proof} Sea $\varepsilon > 0$ fijo pero arbitrario. Gracias al teorema de Lusin \ref{teo:Lusin} diff --git a/Memoria/capitulos/2-Articulo_rrnn_aproximadores_universales/desgranando_el_articulo/articulo_5_colorarios_lp.tex b/Memoria/capitulos/2-Articulo_rrnn_aproximadores_universales/desgranando_el_articulo/articulo_5_colorarios_lp.tex index 60e3ff8..d4415fe 100644 --- a/Memoria/capitulos/2-Articulo_rrnn_aproximadores_universales/desgranando_el_articulo/articulo_5_colorarios_lp.tex +++ b/Memoria/capitulos/2-Articulo_rrnn_aproximadores_universales/desgranando_el_articulo/articulo_5_colorarios_lp.tex @@ -5,7 +5,7 @@ % Contenido del artículo 5: Colorarios LP %*************************************************************** \section{Generalización a espacios $L_p$} - +\label{ch04:espacios-Lp} Hasta ahora habíamos considerado el espacio de funciones continuas $\fC$ como subespacio dentro del espacio de funciones medibles $\fM$. @@ -33,9 +33,7 @@ \section{Generalización a espacios $L_p$} de funciones $f \in \fM$ tales que \begin{equation} \int |f(x)|^p d\mu < \infty. - \end{equation} - - + \end{equation} Se define la norma de $L_p$ como \begin{equation} \| f\|_p @@ -165,7 +163,7 @@ \section{Generalización a espacios $L_p$} %Corolario 2.4 \begin{corolario} \label{corolario:2_4_conjunto_finito} Sea $\mu$ una medida, que para - un conjunto finito de puntos $O$ cumple que $\mu(0)=1$, + un conjunto finito de puntos $O$ cumple que $\mu(O)=1$, entonces, para cualquier función medible $g \in \fM$ y sea cual sea $\varepsilon >0$ existe $f \in \rrnn$ la cual cumple que @@ -217,7 +215,6 @@ \section{Generalización a espacios $L_p$} |f(x) - g(x)| > \varepsilon \} = 0.$ - Por lo que acabamos de probar, como queríamos, que \begin{equation} \mu\{ @@ -256,15 +253,20 @@ \section{Generalización a espacios $L_p$} El resultado nos indica que podemos obtener una red neuronal $h$ que aproxime tal clasificador, pero \textbf{tal red neuronal no necesariamente tomará valores discretos}, es decir, - pudiera darse el caso que - $h( \{ x : g(x)=0 \}) \subset [-0.2,0.3]$ y que - $h(\{ x : g(x)=1 \}) \subset [0.9,1.2]$, - por lo que se pone de manifiesto en este resultado, - que en caso de requerirse de una salida completamente - discreta debería de componerse con otra función $\theta$ + pudiera darse el caso en que las imágenes + a un rango, por ejemplo: + $$h( \{ x : g(x)=0 \}) \subset [-0.2,0.3]$$ + y que + $$h(\{ x : g(x)=1 \}) \subset [0.9,1.2],$$ + por lo que se pone de manifiesto en este + resultado, + que en caso de requerirse + de una salida completamente + discreta debería de componerse + con otra función $\theta$ tal que - $\theta \circ h(\{ x : g(x)=0 \})=0$ y - $\theta \circ h(\{ x : g(x)=1 \})=1$. + $$\theta \circ h(\{ x : g(x)=0 \})=0$$ y + $$\theta \circ h(\{ x : g(x)=1 \})=1$$. } @@ -365,7 +367,7 @@ \section{Generalización a espacios $L_p$} Por tanto \begin{align} 0 &\leq \psi(-M) \leq \psi(M_1) = 0 \quad \text{ luego } \quad \psi(-M) = 0, \\ - 1 &\geq \psi(M) \geq \psi(M_2) = 1 \quad \text{ luego } \quad\psi(M) = 1 + 1 &\geq \psi(M) \geq \psi(M_2) = 1 \quad \text{ luego } \quad\psi(M) = 1. \end{align} Gracias a estas desigualdades es fácil ver que @@ -382,7 +384,7 @@ \section{Generalización a espacios $L_p$} \begin{equation} \psi(x)= \left\{ \begin{array}{lcc} 0 & si & x \leq 0 \\ - \frac{| x |}{1+ | x |}& si & 0< x + \frac{| x |}{1+ | x |}& si & 0< x. \end{array} \right. \end{equation} @@ -437,6 +439,7 @@ \section{Generalización a espacios $L_p$} \end{proof} % Nota intuitiva sobre la demostración del Teorema 2.5 +\setlength{\marginparwidth}{\smallMarginSize} \marginpar{\maginLetterSize\raggedright \iconoAclaraciones \textcolor{dark_green}{ \textbf{Idea de la demostración del teorema @@ -448,9 +451,13 @@ \section{Generalización a espacios $L_p$} Es interesante reparar en que la demostración se basa en añadir una neurona por cada punto que queramos que tome un valor concreto, esa neurona se activará (es decir, no será nula) cuando la entrada $x$ \textit{sea mayor} que el valor que la activa $x_i$ y vale la diferencia con el valor anterior $x_{i-1}$, es decir $g(x_{i}) - g(x_{i-1})$, como el nodo $x_{i-1}$ - también se activará por ser menor menor, el término $g(x_{i-1})$ se suma a la salida de la red y así como una serie telescópica al final solo resultará el valor $g(x_i)$. + también se activará por ser menor, el término $g(x_{i-1})$ se suma a la salida de la red y así como una serie telescópica al final solo resultará el valor $g(x_i)$. } } +\setlength{\marginparwidth}{\bigMarginSize} +% fin de la nota + + % Teorema 2.5 \begin{teorema}[Sobre el entrenamiento práctico de redes neuronales] \label{teorema:2_5_entrenamiento_redes_neuronales} @@ -472,10 +479,10 @@ \section{Generalización a espacios $L_p$} \textbf{Caso primero} -Suponemos que $\{x_1, \cdots, x_n\} \subset \R$ y tras renombrar +Suponemos que $\{x_1, \ldots, x_n\} \subset \R$ y tras renombrar podemos suponer que \begin{equation} - x_1 < x_2 < \ldots < x_n. + x_1 < x_2 < \cdots < x_n. \end{equation} Por alcanzar la función de activación $\psi$ el cero y el uno, @@ -484,8 +491,7 @@ \section{Generalización a espacios $L_p$} Definiremos de manera recursiva la red neuronal buscada $f_n$. % Nota nueva hipótesis de optimización del Teorema 2.5 -\setlength{\marginparwidth}{\smallMarginSize} -\reversemarginpar + \marginpar{\maginLetterSize\raggedright \iconoProfundizar \textcolor{blue}{ \textbf{Nueva hipótesis de optimización} @@ -502,7 +508,6 @@ \section{Generalización a espacios $L_p$} \setlength{\marginparwidth}{\bigMarginSize} } -\normalmarginpar \begin{itemize} \item Red neuronal $f_1$. @@ -606,7 +611,8 @@ \section{Generalización a espacios $L_p$} \end{equation} Podemos definir entonces $A \in \afines$ por $A_k(x)=B_k(p \cdot x).$ -Fijamos $\beta_k = g(x_k) - g(x_{k-1})$. +Fijamos también +$$\beta_k = g(x_k) - g(x_{k-1}).$$ La red neuronal $f_k$ se calcula como \begin{align} f_k(x) @@ -618,7 +624,7 @@ \section{Generalización a espacios $L_p$} & = \sum_{j=1}^k \beta_j \psi(A_j(x)) = - (g(x_k)-g(x_{k-1})) \psi(A_k(x)) + f_{k-1}(x) + (g(x_k)-g(x_{k-1})) \psi(A_k(x)) + f_{k-1}(x). \end{align} \end{itemize} diff --git a/Memoria/capitulos/2-Articulo_rrnn_aproximadores_universales/desgranando_el_articulo/articulo_6_multi_output.tex b/Memoria/capitulos/2-Articulo_rrnn_aproximadores_universales/desgranando_el_articulo/articulo_6_multi_output.tex index 18179fb..794321d 100644 --- a/Memoria/capitulos/2-Articulo_rrnn_aproximadores_universales/desgranando_el_articulo/articulo_6_multi_output.tex +++ b/Memoria/capitulos/2-Articulo_rrnn_aproximadores_universales/desgranando_el_articulo/articulo_6_multi_output.tex @@ -4,8 +4,8 @@ %*************************************************************** % Contenido del artículo 5: Generalización a multi-output %*************************************************************** -\section{Generalización para \textit{multi-output neuronal networks}} - +\section{Generalización para \textit{multi-output neural networks}} +\label{ch04:salida-varias-dimensiones} En las secciones anteriores se han provisto resultados para redes neuronales de salida real. Vamos a generalizar los resultados vistos para ser capaces de aproximar funciones continuas o medibles @@ -33,7 +33,7 @@ \section{Generalización para \textit{multi-output neuronal networks}} con $i \in \{1,\ldots, h\}$ . \begin{definicion}[Abstracción de una red neuronal con una capa oculta y múltiple salida] - Para cualquier función Borel medible $G$, definida de $\R$ a $\R$ y cualquiera naturales positivo + Para cualquier función Borel medible $G$, definida de $\R$ a $\R$ y cualesquiera naturales positivos $d,s \in \N$ se define a la clase de funciones $\rrnnmc$ como \begin{equation} \begin{split} @@ -203,7 +203,7 @@ \section{Generalización para \textit{multi-output neuronal networks}} Considerando $f$ compuesta por $h_n + h_1$ sumandos y donde sus pesos son los siguientes: El peso $\tilde{w}$ de las funciones afines: - Para cuales quiera + Para cualesquiera $i \in \{0, 1, \ldots , d \}$ y $j \in \{1, \ldots , h_n, h_{n} + 1, \ldots, h_n + h_1\}$ determinaremos la siguiente casuística \begin{enumerate} diff --git a/Memoria/capitulos/2-Articulo_rrnn_aproximadores_universales/diferencia_entre_los_reales_y_enteros.tex b/Memoria/capitulos/2-Articulo_rrnn_aproximadores_universales/diferencia_entre_los_reales_y_enteros.tex index ba1a2a5..02c787c 100644 --- a/Memoria/capitulos/2-Articulo_rrnn_aproximadores_universales/diferencia_entre_los_reales_y_enteros.tex +++ b/Memoria/capitulos/2-Articulo_rrnn_aproximadores_universales/diferencia_entre_los_reales_y_enteros.tex @@ -5,6 +5,7 @@ % y que refleja una posible fuente de mejora de las redes neuronales % ISSUE #88 \section{Consideración sobre la capacidad de cálculo} +\label{ch04:capacidad-calculo} Suele pasar peligrosamente desapercibido que el teorema \ref{corolario:2_6} recién probado asegura que se podrá encontrar una red neuronal en $\rrnnmc$ @@ -34,7 +35,7 @@ \section{Consideración sobre la capacidad de cálculo} % Teorema de que podemos tener redes neuronales con parámetros racionales que también converjan. \begin{aportacionOriginal} - \begin{teorema} + \begin{teorema}\label{teo:densidad-racional} El espacio $\mathcal{H}(\Q^d, \Q^s)$ es denso en el espacio $\rrnnmc$. \end{teorema} \begin{proof} @@ -46,7 +47,7 @@ \section{Consideración sobre la capacidad de cálculo} La red neuronal $h^r$ está determinada por un conjunto finito de parámetros supongamos que hay $q$ y que están determinados por un índice del conjunto $\Lambda$. - Sea $\alpha^0_i \in R$, el primer índice $h^r$ definimos la red neuronal $h^1$ con coeficientes $\alpha^1_i$ con $i\in \Lambda$ + Sea $\alpha^0_i \in \R$, el primer índice $h^r$ definimos la red neuronal $h^1$ con coeficientes $\alpha^1_i$ con $i\in \Lambda$ de tal forma que los parámetros que la determinan vienen dados por \begin{equation*} \alpha^1_i = \alpha^0_i @@ -93,6 +94,6 @@ \section{Consideración sobre la capacidad de cálculo} de las redes neuronales con entradas en los enteros. Todas estas pesquisas tienen su interés ya que por las arquitecturas -actuales, los números flotantes (racionales con un límite de decimales) se calculan en las GPUs, mientras que los enteros en las CPUs, siendo más rápidas la segundas\footnote{En el blog del investigador Long Zhou, se comenta se da una visión favorable sobre las CPUs y cómo en la actualidad se comenten errores a la hora de comparar las GPUs y CPUs, dejo link a la publicación. Consultada por última vez el 23 de mayo +actuales, los números flotantes (racionales con un límite de decimales y parte en entera) se calculan en las GPUs, mientras que los enteros en las CPUs, siendo más rápidas la segundas\footnote{En el blog del investigador Long Zhou, se da una visión favorable sobre las CPUs y cómo en la actualidad se comenten errores a la hora de comparar las GPUs y CPUs, dejo link a la publicación. Consultada por última vez el 23 de mayo del 2022, URL: \url{https://long-zhou.github.io/2013/02/12/CPU-GPU-comparison.html}} \cite{CPU-vs-GPUS}. diff --git a/Memoria/capitulos/2-Articulo_rrnn_aproximadores_universales/introduccion.tex b/Memoria/capitulos/2-Articulo_rrnn_aproximadores_universales/introduccion.tex index fa0fc65..3208cd0 100644 --- a/Memoria/capitulos/2-Articulo_rrnn_aproximadores_universales/introduccion.tex +++ b/Memoria/capitulos/2-Articulo_rrnn_aproximadores_universales/introduccion.tex @@ -6,7 +6,7 @@ %******************************************************* \section{Las redes neuronales son aproximadores universales} -Tras las definición \ref{sec:redes-neuronales-intro-una-capa} de red neural expuesta, +Tras las definición \ref{sec:redes-neuronales-intro-una-capa} de red neuronal expuesta, es pertinente la pregunta si tal estructura será capaz de aproximar con éxito una función genérica desconocida. @@ -35,26 +35,43 @@ \section{Las redes neuronales son aproximadores universales} \ref{teo:TeoremaConvergenciaRealEnCompactosDefinicionesEsenciales} e iremos refinando y generalizando los resultados hasta probar el resultado enunciado \ref{teo:MFNAUA} para una capa oculta. + % Nota margen de denso + \setlength{\marginparwidth}{\bigMarginSize} + \marginpar{\maginLetterSize + \iconoAclaraciones \textcolor{dark_green}{ + \textbf{Idea intuitiva conjunto denso.} + } + Si $S$ es denso en $T$, + se está está diciendo que \textbf{los elementos de $S$ son capaces de aproximar cualquier elemento de $T$ + con la precisión que se desee}. + } + + El esquema general será: - % Nota margen de denso - \setlength{\marginparwidth}{\bigMarginSize} - \marginpar{\maginLetterSize - \iconoAclaraciones \textcolor{dark_green}{ - \textbf{Idea intuitiva conjunto denso.} - } - Si $S$ es denso en $T$, - se está está diciendo que \textbf{los elementos de $S$ son capaces de aproximar cualquier elemento de $T$ - con la precisión que se desee}. - } +\begin{align*} + \rrnn + \xRightarrow[]{\ref{teo:2_4_rrnn_densas_M}} + \rrnng + \xRightarrow[]{\ref{teorema:2_3_uniformemente_denso_compactos}} + \pmcg + \xRightarrow[]{\ref{teo:TeoremaConvergenciaRealEnCompactosDefinicionesEsenciales}} + \fC + \xRightarrow[]{\ref{teo:2_2_denso_función_continua}} + \fM. +\end{align*} + \begin{itemize} - \item Las redes neuronales que nosotros hemos modelizado son densas en un espacio más general que hemos denominado \textit{Anillo de aproximación de redes neuronales}. - \item El espacio \textit{Anillo de aproximación de redes neuronales} es denso en la funciones continuas. - \item Las funciones continuas son densas en los reales. + \item Las redes neuronales que nosotros hemos modelizado son densas en un espacio más general que hemos denominado \textit{Anillo de aproximación de redes neuronales} + generado a partir de una función de activación $\psi$. + \item Que a su vez es denso en el \textit{Anillo de aproximación de redes neuronales} + generado a partir de una función medible $G$. + \item El espacio \textit{Anillo de aproximación de redes neuronales} es denso en el de las funciones continuas. + \item Las funciones continuas son densas en el espacio de funciones medibles. \end{itemize} -Si quisiéramos situar en este esquema a otras definiciones de redes neuronales las situaríamos entre nuestro modelo y el espacio \textit{Anillo de aproximación de redes neuronales}; en el capítulo \ref{chapter:construir-redes-neuronales} se probará la equivalencia de nuestro modelo con estas definiciones y sus beneficios. +Si quisiéramos situar en este esquema a otras definiciones de redes neuronales las situaríamos entre nuestro modelo y el espacio \textit{Anillo de aproximación de redes neuronales}; en el capítulo \ref{chapter:construir-redes-neuronales} se probará tal resultado y analizarán los beneficios de basarnos en un modelo más simple. diff --git a/Memoria/capitulos/3-Teoria_aproximacion/0_objetivos.tex b/Memoria/capitulos/3-Teoria_aproximacion/0_objetivos.tex index d1055e7..dec693d 100644 --- a/Memoria/capitulos/3-Teoria_aproximacion/0_objetivos.tex +++ b/Memoria/capitulos/3-Teoria_aproximacion/0_objetivos.tex @@ -1,18 +1,15 @@ % !TeX root = ../../tfg.tex % !TeX encoding = utf8 %%%% -% OBJETIVOS SOBRE EL CAPÍTULO DE TEORÍA DE LA PROXIMACIÓN +% OBJETIVOS SOBRE EL CAPÍTULO DE TEORÍA DE LA APROXIMACIÓN %%%%%%%% \chapter{Teoría de la aproximación} - \label{chapter:teoria-aproximar} + \label{ch03:teoria-aproximar} Puesto que queremos fundamentar desde el origen las redes neuronales, - supongamos que desconociéramos la existencia de éstas - y nos halláramos frente a un problema de aprendizaje \ref{sec:Aprendizaje}; - el enfoque más natural consistiría en abordar el problema - haciendo uso de teoría de la aproximación. - ¿Somos capaces de aproximar una función a partir de sus puntos? +vamos a tratar de abordar el problema de aprendizaje \ref{sec:Aprendizaje} desde resultados clásicos de teoría de la aproximación con el fin de analizar su carencias y virtudes (ver conclusiones \ref{ch03:conclusiones-teoria-aproximacion}). Para ello nuestro objetivo en esta sección será desarrollar la teoría necesaria para demostrar y analizar el teorema de Stone Weierstrass. + diff --git a/Memoria/capitulos/3-Teoria_aproximacion/1_polinomios_de_Bernstein.tex b/Memoria/capitulos/3-Teoria_aproximacion/1_polinomios_de_Bernstein.tex index ff4240c..f8883e8 100644 --- a/Memoria/capitulos/3-Teoria_aproximacion/1_polinomios_de_Bernstein.tex +++ b/Memoria/capitulos/3-Teoria_aproximacion/1_polinomios_de_Bernstein.tex @@ -191,7 +191,7 @@ \section{Polinomios de Bernstein}\label{ch:Bernstein} \begin{equation} x^2 = \sum_{k=2}^{n} \frac{k(k-1)}{n(n-1)} \binom{n}{k} x^{k} (1-x)^{n-k}. \end{equation} - Como con los términos $k=0$ y $k=1$ se anula, podemos añadir dichos índices sin modificar la suma + Como con los términos $k=0$ y $k=1$ se anulan, podemos añadir dichos índices sin modificar la suma \begin{equation} x^2 = \sum_{k=0}^{n} \frac{k(k-1)}{n(n-1)} \binom{n}{k} x^{k} (1-x)^{n-k}. \end{equation} @@ -361,9 +361,9 @@ \section{Polinomios de Bernstein}\label{ch:Bernstein} Además recordemos que se puede tomar un $n$ que satisfaga (\refeq{eq:cota-de-la-n}) y entonces se concluye que para los valores de $\mathcal{B}_{n x}$ se puede acotar la desigualdad por $\varepsilon$. - Por tanto, para un $n$ convenientemente seleccionado, se ha acotado la desigualdad para los indices $\mathcal{A}_{n x}$ y $\mathcal{B}_{n x}$, es decir todos los elementos de $\{0, \ldots, n\}$, por lo que concluimos que + Por tanto, para un $n$ convenientemente seleccionado, se ha acotado la desigualdad para los índices $\mathcal{A}_{n x}$ y $\mathcal{B}_{n x}$, es decir todos los elementos de $\{0, \ldots, n\}$, por lo que concluimos que \begin{equation*} |f(x) - B_n(x)| \leq 2 \varepsilon, \end{equation*} - independientemente del valor de $x$, por lo que se prueba que la secuencia de polinomios de Bernstein converge uniformemente a $f$ en $I$. + independientemente del valor de $x$, por lo que se prueba que la sucesión de polinomios de Bernstein converge uniformemente a $f$ en $I$. \end{proof} \ No newline at end of file diff --git a/Memoria/capitulos/3-Teoria_aproximacion/2_Weierstrass_approximation_theorem.tex b/Memoria/capitulos/3-Teoria_aproximacion/2_Weierstrass_approximation_theorem.tex index a6e3d8e..5addfef 100644 --- a/Memoria/capitulos/3-Teoria_aproximacion/2_Weierstrass_approximation_theorem.tex +++ b/Memoria/capitulos/3-Teoria_aproximacion/2_Weierstrass_approximation_theorem.tex @@ -4,18 +4,6 @@ %******************************************************* % Teorema de Aproximación Weierstrass %******************************************************* - -Realizando un repaso global habiendo acabado el teorema, - se pueden extraer que, junto a un ingenioso - manejo de operaciones y acotaciones; la clave del resultado reside en las consideraciones -en \refeq{consecuencia:M} y \refeq{consecuencia:delta} y estas a su vez en la -compacidad de $I$. - -Por su parte, la selección del dominio de $I = [0,1]$ viene determinada ya que - los nodos $\{ \frac{k}{N} \colon k\in \{0,..., N\}\}$ sobre los que se construye el \textit{N-ésimo polinomio de Bernstein} deben pertenecer a $I$. - -Sin embargo, tal dificultad es fácilmente salvable con un homeomorfismo. - % Nota margen sobre Idea intuitiva homeomorfismo \marginpar{\maginLetterSize \iconoAclaraciones \textcolor{dark_green}{ @@ -38,6 +26,19 @@ ya que el número de agujeros que tienen es distinto. } +Realizando un repaso global habiendo acabado el teorema, + se pueden extraer que, junto a un ingenioso + manejo de operaciones y acotaciones; la clave del resultado reside en las consideraciones +en \refeq{consecuencia:M} y \refeq{consecuencia:delta} y estas a su vez en la +compacidad de $I$. + +Por su parte, la selección del dominio de $I = [0,1]$ viene determinada ya que + los nodos $\{ \frac{k}{N} \colon k\in \{0,..., N\}\}$ sobre los que se construye el \textit{N-ésimo polinomio de Bernstein} deben pertenecer a $I$. + +Sin embargo, tal dificultad es fácilmente salvable con un homeomorfismo. + + + Como resultado de relajar el dominio donde se define $f$, pidiéndole tan solo compacidad nace el siguiente corolario. @@ -48,10 +49,9 @@ \begin{proof} Si $f$ se encuentra definida en $[a,b]$ con $aevaluate([x])[1], + -K_range,K_range, + label="red neuronal n=$n") + plot!(f_regression, + label="f ideal", + title="Comparativa función ideal y red neuronal n=$n") + + \end{minted} +\end{minipage} + +El resultado ha sido el siguientes + +\begin{minipage}{\textwidth}% + \begin{minted} + [ + frame=single, + framesep=10pt, + baselinestretch=1.2, + %bgcolor=sutilBackground, + %linenos + ]{Julia} + La red neuronal obtenida es : + La matrix de pesos de las neuronas, W1, es: + 5×2 Matrix{Float64}: + 0.0 1.0 + 1.33333 3.0 + 1.33333 1.0 + 1.33333 -1.0 + 1.33333 -3.0 + + La matrix de pesos de la salida, W2, es: + 1×5 Matrix{Float64}: + 16.0855 -15.6038 -3.48169 3.40547 0.693147 + \end{minted} +\end{minipage} + +Además de la imagen \ref{img:ch07-ejemplo-5-neuronas-incializacion-pesos} + +\begin{figure}[H] + \centering + \includegraphics[width=.8\textwidth]{7-algoritmo-inicializar-pesos/f_ideal_y_rn_con_5_neuronas.png} + \caption{Ejemplo de ejecución del algoritmo de inicialización de pesos} + \label{img:ch07-ejemplo-5-neuronas-incializacion-pesos} + \end{figure} + + + diff --git a/Memoria/capitulos/5-Estudio_experimental/4_conclusion_intuitiva.tex b/Memoria/capitulos/5-Estudio_experimental/4_conclusion_intuitiva.tex new file mode 100644 index 0000000..aefc92b --- /dev/null +++ b/Memoria/capitulos/5-Estudio_experimental/4_conclusion_intuitiva.tex @@ -0,0 +1,62 @@ +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +% CONCLUSIÓN INTUITIVA +% para poder finiquitar la memoria a tiempo +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + +\section{Utilidad del algoritmo de inicialización de pesos en problemas de teoría de la aproximación clásicos} + +El algoritmo que acabamos de implementar no solo +tiene su utilidad en el uso de inicialización de pesos +de redes neuronales, sino que resuelve problemas +de teoría de aproximación clásicos. + +Para mostrar esto habrá que remontarse a los ejemplos +del comienzo de este trabajo. +En la sección \ref{ch03:conclusiones-teoria-aproximacion} +se mostraba que había algunas funciones cuyo error +de aproximación tendía a infinito. +Gracias al teorema \ref{teo:MFNAUA} sabemos que +las redes neuronales convergen llevando el error +a cero. + +Véase cómo se aproxima ahora el ejemplo patológico +que se mostraba en la figura \ref{fig:aproximacion-lagrage}: + +\begin{figure}[H] + \centering + \begin{subfigure}[b]{0.475\textwidth} + \centering + \includegraphics[width=\textwidth]{7-algoritmo-inicializar-pesos/f_ideal_y_rn_con_3_neuronas.png} + \caption[Network2]% + {{\small Red neuronal inicializada a partir de 3 datos}} + \end{subfigure} + \hfill + \begin{subfigure}[b]{0.475\textwidth} + \centering + \includegraphics[width=\textwidth]{7-algoritmo-inicializar-pesos/f_ideal_y_rn_con_7_neuronas.png} + \caption[]% + {{\small Red neuronal inicializada a partir de 7 datos}} + \end{subfigure} + \vskip\baselineskip + \begin{subfigure}[b]{0.475\textwidth} + \centering + \includegraphics[width=\textwidth]{7-algoritmo-inicializar-pesos/f_ideal_y_rn_con_10_neuronas.png} + \caption[]% + {{\small Red neuronal inicializada a partir de 10 datos}} + \end{subfigure} + \hfill + \begin{subfigure}[b]{0.475\textwidth} + \centering + \includegraphics[width=\textwidth]{7-algoritmo-inicializar-pesos/f_ideal_y_rn_con_51_neuronas.png} + \caption[]% + {{\small Red neuronal inicializada a partir de 51 neuronas}} + \end{subfigure} + \caption{Ejemplo de aproximación de la función $f(x)$ con redes neuronales.} + \label{fig:aproximacion-red-neuronal} +\end{figure} +\begin{figure}[H] + \centering + \includegraphics[width=.5\textwidth]{7-algoritmo-inicializar-pesos/f_ideal_y_rn_con_100_neuronas.png} + \caption{Ejemplo de aproximación de la función $f(x)$ con red neuronal de 100 neuronas.} + \label{fig:aproximacion-red-neuronal-2} + \end{figure} diff --git a/Memoria/capitulos/5-Estudio_experimental/5_estudio_experimental.tex b/Memoria/capitulos/5-Estudio_experimental/5_estudio_experimental.tex new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/Memoria/capitulos/5-Estudio_experimental/5_estudio_experimental.tex @@ -0,0 +1 @@ + diff --git a/Memoria/capitulos/5-Estudio_experimental/combinacion_funciones_activacion.tex b/Memoria/capitulos/5-Estudio_experimental/combinacion_funciones_activacion.tex index 505c37e..fd4c5b1 100644 --- a/Memoria/capitulos/5-Estudio_experimental/combinacion_funciones_activacion.tex +++ b/Memoria/capitulos/5-Estudio_experimental/combinacion_funciones_activacion.tex @@ -3,19 +3,30 @@ % de activación %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \newpage -\chapter{Selección genética de las funciones de activación } +\chapter{Futuros trabajos: Selección genética de las funciones de activación } \label{ch08:genetic-selection} -%TODO: issue 90 -A modo de borrador de lo que tendrá este capítulo: -\begin{itemize} - \item Referencia a la idea intuitiva. - \item Comentario sobre complejidad. - - Más funciones de activación -> aumento espacio de búsqueda. - Si se usan bien -> disminuye número de neuronas - -> se converge más rápido con métodos. - \item Cómo introducir los algoritmos genéticos para desarrollar esta idea. - - El algoritmo genético selecciona qué funciones de activación se usan. Tras esto se entrena el algoritmo de de manera usual. -\end{itemize} \ No newline at end of file +Como se indicó en \ref{ch03:funcionamiento-intuitivo-funcion-activacion} +aunque la convergencia universal no +dependa de la función de activación seleccionada, +fijado cierto número de neuronas, éstas sí que pueden +determinar el error mínimo que podamos alcanzar. + +Gracias al resultado \ref{cor:se-generaliza-G-a-una-familia} es posible combinar en una red neuronal distintas funciones de activación y que el teorema de convergencia universal \ref{teo:MFNAUA} se mantenga cierto, esto abre la puerta a explorar también durante el entrenamiento diferentes funciones de activación. De hecho ya existen artículos como \cite{FunctionOptimizationwithGeneticAlgorithms} y \cite{Genetic-deep-neural-networks} donde se desarrolla de manera experimental esta idea. + +El problema que se tiene es que al aumentar +el número de funciones de activación candidatas, se está aumentando también el espacio de búsqueda; lo que significa que la complejidad del espacio aumenta y por ende el coste para encontrar una solución. + +Se ha intentado paliar la situación con algoritmos genéticos (véanse los artículos recién citados). Sin embargo, existen dos detalles claves y novedosos que podemos aportar: el primero es que \textbf{con nuestro teorema \ref{teo:eficacia-funciones-activation}} se ha obtenido un +criterio de selección de las funciones de activación que +tendrán el mismo potencial de aproximación y menor coste; +esto \textbf{nos ahorraría tener que explorar combinaciones de +funciones de activación que no vaya a aportar en precisión y +además aumenten el costo.} + + +El segundo reside en que en los artículos que versan sobre el tema, +utilizan modelos de \textit{deep learning} sensibles a la posiciones de las función de activación. Es decir, que para $n$ neuronas y $t$ funciones de activación diferentes el tamaño del espacio de búsqueda es $t^n$. Sin embargo, una de las ventajas que presenta \textbf{nuestro modelo} es que \textbf{es invariante ante cambios de posición de funciones de activación;} por lo que una vez fijado el número de cada tipo de funciones de activación da igual la neurona dónde se posicionen (esto es fácil de comprobar observando el modelo \ref{chapter:construir-redes-neuronales} y por la propiedad conmutativa de la suma). + +Es decir, \textbf{con nuestro modelo y resultados se estaría reduciendo el espacio de búsqueda y por tanto merecería la pena plantearse de nuevo este tipo de experimentos}. + diff --git a/Memoria/capitulos/9-Conclusiones.tex b/Memoria/capitulos/9-Conclusiones.tex new file mode 100644 index 0000000..1e818bc --- /dev/null +++ b/Memoria/capitulos/9-Conclusiones.tex @@ -0,0 +1,46 @@ +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +% Conclusiones del trabajo +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + +\chapter{Conclusiones} +\label{ch09:conclusion} +Era nuestro objetivo con este trabajo esclarecer +el motivo y funcionamiento de las redes neuronales y +a partir de ahí optimizar algún aspecto de ellas. + +El sustento teórico queda expuesto en los capítulos +\ref{ch00:methodology}, \ref{chapter:Introduction-neuronal-networks}, +\ref{ch03:teoria-aproximar}, \ref{chapter4:redes-neuronales-aproximador-universal} +y \ref{chapter:construir-redes-neuronales}. +Hemos contribuido al estado del arte actual con los +resultados: + +\begin{itemize} + \item La propuesta del uso de modelo de red neuronal (definición \ref{img:grafo-red-neuronal-una-capa-oculta}). + \item La demostración teórica del uso de distintas funciones de activación en + el modelo seleccionado (corolario \ref{cor:se-generaliza-G-a-una-familia}). + \item La demostración de la densidad del espacio de las redes neuronales racionales en el espacio de las funciones medibles (teorema \ref{teo:densidad-racional}). + \item Resultados sobre la irrelevancia del sesgo en las redes neuronales (sección \ref{consideration-irrelevancia-sesgo}). + \item Una alternativa al uso de funciones de clasificación (sección \ref{ch05:dominio-discreto}). + \item Un criterio de selección de funciones de activación (capítulo \ref{funciones-activacion-democraticas-mas-demoscraticas}). + \item Resultados teóricos sobre la equivalencia de funciones de activación (teorema \ref{teo:equivalencia-grafos-activation-function} y + corolario \ref{corolario:afine-activation-function}). + \item Un algoritmo de inicialización de los pesos de una red neuronal que acelera los métodos de aprendizaje iterativos (capítulo \ref{section:inicializar_pesos}). + \item La biblioteca \textit{OptimizedNeuralNetwork.jl} que aporta un modelo y métodos optimizados para el uso de redes neuronales. +\end{itemize} + +y proponemos como posibles vías de investigación en proyectos futuros: + +\begin{itemize} + \item Una revisión de la selección genética de funciones de activación con nuestro modelo (capítulo \ref{ch08:genetic-selection}). + \item Una investigación sobre la repercusión en la convergencia de la delimitación de la precisión en los coeficientes de las redes neuronales (sección \ref{ch04:capacidad-calculo}). +\end{itemize} + +Pero finalmente y sobretodo, me llevo la grata experiencia de +todo el proceso que ha conllevado este Trabajo Fin de Grados; +con las habilidades de gestión bibliográfica, comprensión y expresión rigurosa que ello conlleva; +así como el método adquirido, constancia y paciencia; +y por supuesto la satisfacción personal de haber sido capaz de acabar un proyecto +de estas características. + + diff --git a/Memoria/capitulos/Ejemplo-uso-biblioteca.ipynb b/Memoria/capitulos/Ejemplo-uso-biblioteca.ipynb new file mode 100644 index 0000000..f5137fb --- /dev/null +++ b/Memoria/capitulos/Ejemplo-uso-biblioteca.ipynb @@ -0,0 +1,629 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Ejemplo de uso de la biblioteca\n", + "Este notebook muestra ejemplos de ejecución de la biblioteca programada. \n", + "El contenido de la misma es: \n", + "- Cómo importar la biblioteca.\n", + "- Inicialización de red neuronal con dimensiones dadas y pesos aleatorios. \n", + "- Inicialización de red neuronal a partir de matrices.\n", + "- Ejemplo de evaluación con *Forward propagation*.\n", + "- Ejemplo de uso del algoritmo de inicialización de pesos.\n", + "- Ejemplo de llamadas a las funciones de activación.\n", + "- Ejemplo de aprendizaje de una red neuronal con el algoritmo de *Backpropagation*.\n", + "\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Cómo importar la biblioteca " + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "# Bibliotecas auxiliares que no tienen que ver con con la nuestra\n", + "using Random \n", + "using Plots\n", + "\n", + "# Importamos nuestra biblioteca\n", + "include(\"../../OptimizedNeuralNetwork.jl/src/OptimizedNeuralNetwork.jl\")\n", + "using Main.OptimizedNeuralNetwork" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Inicialización de red neuronal con pesos aleatorios \n", + "\n", + "Creamos una red neuronal con pesos inicializados de manera aleatoria. " + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "La matrix de pesos de las neuronas, W1, es:\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/plain": [ + "3×3 Matrix{Float64}:\n", + " 0.139102 0.0390801 0.854443\n", + " 0.418074 0.125086 0.679176\n", + " 0.39183 0.851716 0.599144" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/plain": [ + "\n", + "La matrix de pesos de la salida, W2, es:\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/plain": [ + "2×3 Matrix{Float64}:\n", + " 0.132426 0.39587 0.150934\n", + " 0.576401 0.456788 0.665346" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/plain": [] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "entry_dimesion = 2\n", + "number_of_hidden_units = 3\n", + "output_dimension = 2\n", + "\n", + "RandomWeightsNN(\n", + " entry_dimesion,\n", + " number_of_hidden_units,\n", + " output_dimension\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "La matriz $W1$ se corresponde a los pesos que se sitúan entre la capa de entrada y la capa oculta. \n", + "\n", + "La matriz $W2$ son los pesos entre la capa oculta y la salida. \n", + "\n", + "## Inicialización de una red neuronal partir de matrices \n", + "Como se comenta detalladamente en la memoria sección 5.2; $(A,S,B)$ son matrices que modelizan una red neuronal.\n", + "\n", + "- $A$ representa los coeficientes que se le aplican a los vectores de entrada. \n", + "- $S$ representa los sesgos que se suma a los respectivos parámetros de entrada. \n", + "- $B$ representan los coeficientes que se aplican a la capa oculta para la salida. " + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "La matrix de pesos de las neuronas, W1, es:\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/plain": [ + "3×4 Matrix{Int64}:\n", + " 3 4 1 1\n", + " 4 6 3 2\n", + " 1 1 1 3" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/plain": [ + "\n", + "La matrix de pesos de la salida, W2, es:\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/plain": [ + "2×3 Matrix{Int64}:\n", + " 1 2 3\n", + " 3 2 3" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/plain": [] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "S = [1,2,3] # Sesgos que se añaden a los parámetros entrada\n", + "A = [3 4 1; 4 6 3; 1 1 1] # Coeficientes entrada\n", + "B = [1 2 3; 3 2 3] # Coeficientes de salida\n", + "h = FromMatrixNN(S, A, B)\n", + "display(h)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Ejemplo de evaluación con Forward propagation" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "2-element Vector{Int64}:\n", + " 86\n", + " 114" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "v = [1,2,2]\n", + "# Ejemplo de evaluación h(v) \n", + "# con función de activación ReLU y forward_propagation \n", + "forward_propagation(h, ReLU,v )" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Ejemplo de uso del algoritmo de inicialización de pesos \n", + "\n", + "Para ello se utilizará la función\n", + "`nn_from_data(X_train, Y_train, n, M)`" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "La red neuronal obtenida es :" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/plain": [ + "La matrix de pesos de las neuronas, W1, es:\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/plain": [ + "5×2 Matrix{Float64}:\n", + " 0.0 1.0\n", + " 1.33333 3.0\n", + " 1.33333 1.0\n", + " 1.33333 -1.0\n", + " 1.33333 -3.0" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/plain": [ + "\n", + "La matrix de pesos de la salida, W2, es:\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/plain": [ + "1×5 Matrix{Float64}:\n", + " 16.0855 -15.6038 -3.48169 3.40547 0.693147" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAlgAAAGQCAIAAAD9V4nPAAAABmJLR0QA/wD/AP+gvaeTAAAgAElEQVR4nOzdd1wT9/8H8M9dQkJCAoSdqAiCKEtUUFyMIM6WOttabdWqdbTfWrV2aK3fWrW/Vm21ttY6Wm2/jqpVa611iwhORBmCCIKI7BUgZJB1vz+uTdOwAgQu4/18+IccN95cknvlPp/73GEEQSAAAADAWuFUFwCAGUhOTv7iiy/i4+OpLgQAYHzWEoRqtTo3N/fGjRt3796trq6muhwTVVRUNH/+/O+++647NyqTyX7++edVq1YtWrTogw8+6M5Na3300Ufz589vqXWkoqJi8uTJ+/fvDwkJMdYWd+3aNX/+/KdPn7Y+25UrVxYtWnTlyhVjbVfXw4cP58+f//PPP3fFyrvIV199tWjRopqaGqoLMWkbN26cP39+Q0MD1YWYD8LSPX369I033nByctL9q0NCQr799luFQkF1ddR4/Pjxrl27bt++rTc9NTUVITRlypRuq0Qmk4WGhiKEaDQaj8cLCAjotk3r6tu3L0JIpVI1/ZVarR47dqyLi0tubq4Rt/jSSy8hhJKTk1ufbdu2bQihr7/+2oib1rp06RJC6I033uiKlXcRoVCIEHr69CnVhZi0oUOHIoQqKyupKuD1119vNm6+++47qkpqHb1LU5Zyly9fnjZtWl1dXa9evV588UUvLy+lUpmdnX327Nn//Oc/OTk5X3/9NdU1UuDOnTuLFi1atWoV+YHR4nA40dHRwcHB3VbJmTNnUlJSpk6deujQISaT2W3b1TNs2LAePXpgGNb0V//3f/+XmJh4+fJlX19fI24xMDAwOjra3t7eiOsEwKSEhobyeDzdKT179qSqmNZZchBmZ2dPnjy5oaFh1apVn3zyCYPB0P6qoaHh888/hwYWPT4+Pt3cDZaTk4MQmjx5MoUpiBBqqXlQo9EEBAQkJiaSp61GtHbt2rVr1xp3nQCYlM2bN5Nn8KbPkoNw+fLlDQ0N8+fP/+yzz/R+xeFwNmzYoNdZqFKprl27lpmZqdFofH19Y2JiWCyW7gyPHz+uq6sLCAhgMBhXrlzJyspycHCYOHGim5sbOUNOTk5CQoJEIgkPDx8+fLjusiUlJaWlpV5eXs7Ozrdu3bp79y5BEBEREQMHDmxaeVZWVkZGRklJCYPBGDBgwMiRI3H8X725BQUF1dXVfn5+HA7n5s2b9+7dU6vVS5cuJc9pSkpKkpOTCwsL1Wq1j49PTEyMnZ2d7l/x5MkThFBpaWlKSgo5kSxMJpNlZWXxeLw+ffoghHJzc+vr6/38/Lhcrl6F5K/69evH4XDIKfn5+ffv3y8qKsIwrH///tHR0brfPJoSiUT5+fkPHz4kCyYr6d27t4uLS05OjlgsDg4O1l2DUqlMT0/ncDj9+vUjp5SVlRUXF3t6erq6uj548CAxMVGhUAwcODAyMrLZc7u8vLykpKSKigo3N7d+/foNHTpUu1czMzPlcnnTtEtPT6+oqMjPz8/NzY2NjXVxcdH9bXsL0PXkyZOamhp/f382m607/fbt27dv36bT6cOHDx80aFBLi8vl8vj4eLK1NjAwMCoqysbGRm+e8vLy27dvFxYWqlQqLy+vmJiY9p6ASiSS7Oxs3X2u9ysul+vn59fssgqFIiMjg1y2pqbm3LlzJSUlkZGR2kaI6urqy5cvFxUVsVis8PDwwYMHN11JY2PjuXPnHj9+7OrqOnbsWA8PD0PKJj9r3t7eTk5Oqamp169fV6lUQ4YMGTFiRLPzP3z48MaNG9XV1QKBYPTo0Xw+X/e3HXs3Xr9+XSKRvP766+Qpkd6xZfTo0ba2trpbIY8tgYGBDAYjISEhPT2dwWBER0f7+/vrVatWq5OTk3Nzc8vKyng83rBhw4KCggzZLc3uJfKDn56enpiYqFKpQkNDR40a1YG1mT2q22a7Sn5+PoZhdDq9qKjIkPlTU1P1Pu09evS4ePGi7jzPP/88QujixYu6R0w7O7tz586p1ep33nlH9/C3cOFC3WX/+9//IoS+++675557Tncrs2fP1u2qLCkp8fb21nuNAgMDs7Ozddc2d+5chNDx48ejoqK0s5FdXDExMXpHYRcXl9OnT2uXnTBhQtO3wc8//0w06SP85JNPEEKfffaZ3r5qbGx0cXFhsVi1tbUEQSgUiqYfV09Pz5s3b7ayw3/99demZezatYsgiNGjRyOEnjx5ojt/cXExQigiIkI75f/+7/8QQjt27FiwYIHuSiZMmCCXy3WXra6unjp1qt5uCQoK0s7QtI+wqqpq7NixuvOz2ext27bprtbwAppq2kcolUonT56su54ZM2Zs2bIFNekjPHnypN7B2s/PLz09XXeeSZMm6X15cnR0PHLkiO48bfYRyuVyV1dXOzu7uro6vV998cUXCKGPP/64pWWfPXtGvl4HDx7Ufg9btWoVQRAajWb9+vV63zLHjRtXU1Oju4aMjAwfHx/tDEwmc+/evYb0EX700UcIof3798+cOVN3E9OmTVMqlbpzVlRU6H0emUzmxo0bdedp17vxm2++0d3ogwcPCIK4d+9e02PLpUuXdFc4ceJEhFB8fLxuWuM4rreHz58/r3e5A0IoLi6O/CRqGdJHSB6R9uzZM2fOHN21TZo0qfMXT5B9hFeuXOnkerqNxQbhjz/+iBAaOnSoITMXFxeTX/bffvvttLS0rKysdevW0el0JpN5//597WxkEPbu3VsoFP7xxx/Jyclr1qzBcdzNze3jjz92d3ffu3fv3bt3f/nlF/Kr6++//65dlnzbCQSCvn37/vnnn4WFhZcvXyavQly2bJl2ttzc3PDw8O+++y4xMfHx48fXrl2bN28eeaTTfXeSQejp6Tlw4MB9+/bduHHjf//7n1qtJghi+PDh69evv3TpUnZ2dnJy8vr169lsNovF0l7rce/evdWrVyOEZsyYcfFvJSUlRJMgLCgowHHcz89Po9Ho7q5jx44hhF599VXyR7lcHhgY+OWXX169ejUnJ+fmzZvvvvsunU53dXXVO7TpKi8vv3jx4osvvkgeT8kyyG8t7Tr0eHt79+rVa9++fSkpKcePHycPnbrhLZPJyLON6OjoM2fO5OXl3bp169tvvx03bpx2Hr0gVCqV5An9uHHjkpKScnNz9+3bR75Ddu/e3d4CmtU0CGfPno0QGjFiRFJSUmFh4YkTJ3r16iUQCNC/g/CPP/7AcdzJyWnbtm337t1LSUn5+OOP6XS6h4dHRUWFdraYmJi1a9deuHDh4cOHKSkpX3zxhb29vY2NTVpamnYeQy6WWblyJUJo586duhM1Go2fnx+O4wUFBS0tSAahh4cHi8V69913//zzz4SEhMTERIIgyLdf//79Dx8+nJWVlZiY+PLLLyOEhEKh9p1G9usjhJYuXZqVlZWXl7dx40Ymk+nu7m5gEHp7e/v4+Bw4cCAlJeXIkSOenp5kUGlnk0qlAwYMQAi99NJLly9fzs7OPnHiBHmCq3tNR7vejZ6enr179/7222+TkpKOHz9eXl5eVFTk7OxM/iHkseWTTz6h0WhMJjM1NVW7OBmE3t7eI0eO/O2331JSUnbs2EG2xJA7jXTgwIEXXnjh0KFDd+7cyc7OPnPmTHR0NEJo1qxZuuUZHoTe3t5eXl4///xzSkrKsWPHvLy8EEJfffWVdja1Wn3eAHqZRwZhbGzs4MGDBwwY8Pzzz+/cuVMqlbZSD7UsNgjJD8OcOXMMmfnNN99ECM2bN0934qeffkoeCrVTyCAcMmSI7qlDXFwcQsjGxubhw4faiQcOHEAIzZ49WzuFfNvR6fTHjx9rJ5aXl9vZ2bV52krG3qlTp/SmuLu7N/2q3tTevXsRQu+99552yqFDh9DfX891Nb1qlPyY6Z3bac+MW9noxx9/jBD69ttvW6/tvffeQwgdPXpUd2K7Dj1cLre4uFg7MTk5GSE0ePBg7ZTNmzeTn0m9swFdekFI7p/AwEDdLx/kGAYXFxft59nAApqlF4QPHjxACLm5uYnFYu08KSkp5FmsNggVCkWvXr1sbGzIpnW9XfHRRx+1ssWjR48ihBYtWqSdYkgQPn78GMfxkJAQ3Ynkgs8991wrC5JB2PQ7waNHj3Ac9/T0FIlEutPJM7OzZ8+SP27cuBEh9Morr+jOQ36IDAxCZ2fnqqoq7UTy5YuOjtZOITexYMEC3WVLSkrs7e1dXV215/TtejcyGAzdDzhBEIsXL266lXXr1iGEJkyYoJ1CBmFoaKjuseXLL7/U+6LclFwuDwoKotFo5eXl2omGB6Gjo6PugomJieS3Me0UiUSCDGBvb6+7cjIIaTQan8/XNikHBAQUFha2UhKFLHYcYV1dHUKoaedWs06ePIkQ+vDDD3UnLl26lM1mX7p0SSwW605/5513aDSa9keycXLChAn9+/fXm0h2xemaPHmybmuPm5vba6+9plKpTp061Up5L7zwAkLozp07etOXLFliSK/PpEmTml3cEGSzyU8//aSdUlFRcf78+Z49e7beDd5SzUY3a9Ys8rSJFBYW5uTkpLvnyVT79NNP6XRDe8RPnDiBEFq5cqVux5tQKAwPD6+qqrp27Vq7CjDEb7/9hhBatGiRts8VITR48GC9nZyQkPDs2bOJEyfqdWcuWbIEw7CzZ8+2sonnn3+eRqO19xXx8fGJjY1NS0sjA560Z88ehNDChQvbXJzFYi1dulR3ysGDBzUazdtvv+3o6Kg7nfwy+ueff5I/kjvk3Xff1Z1n6dKlrXc865o3bx55KkaKiopiMBi6rwv5bXXNmjW6S/H5/ClTplRWVur+vYabPn267gcc/X1s0RsgSx5bLly4oHdsWb58ue6xhWycb/29xGQyx48fr1artf397TJnzhztJQ4IoVGjRrHZbN0tMhiM7QYgv25qxcXFJSYmNjY2lpSUSKXSK1euBAUFZWVlvfjii4RJ3svMYi+WIQ8oUqm0zTlrampKS0u5XC55WqDl4ODQt2/ftLS07OzsIUOGaKfrXR3g6uqKENJblpxYXl6ut62mI7LJi2WysrK0UzIyMjZt2pSSkvLs2TPdIbFVVVV6ywYGBjb9cyoqKjZt2nTp0qXi4mLdRZoubogXX3xx6dKlhw8f/uqrr8hOnQMHDiiVyrlz5+p+YgsKCj7//POkpKTi4uLa2tpObrRdml6s4e7u/vDhw8bGRiaTSRBERkYGhmHNXpTUEvLlaHqtSlhY2O3bt7OyssaNG2dgAe3aYtO3x6BBg3RH09+/fx8hVFlZqfelDSHEZDILCgq0P4pEos2bN587d664uLiiokI7vQN3k1iyZMmFCxf27NlDfgqqqqp+++23nj17kicxrevdu7fuhVraPyE5OVnvTyDfKto/ISsrC8dxvZE8Tk5Onp6ejx8/NqRsvdcFx3FXV9eysjLyR6lUmp2dzWQyd+7cqbcguf6CgoIOXDYSEBCg+2N1dXV5ebm9vb3e2BtHR0cfH5+MjIxHjx6FhYVpp+t1JZLtwNqaSWfOnNm5c+fDhw9LSkrkcrl2esc+a82+e3XfSHQ6/e23327vaqdMmaL9P41GEwqF8fHx/fr1u337dmJiYmRkZAdK7VIWG4TkgJX8/Pw25yTDRvdrkRb5RtT71qZ3mR/ZeKU3kbxOoel3HzIgdZHb1W4iMTFx7NixarVaKBTGxcWRHeN5eXl79uxRq9V6y+pdxIgQqqioGDJkSGFhYVhY2OzZs52cnOh0ulqt/uijj5oubgg7O7tp06bt37//9OnTZGseOdLg1Vdf1c6TnZ09YsSIurq6UaNGTZw4kcfj4TheVVW1ZcuWjm20XfT2PPr3zpdKpSqVytHRUe/SjNaRbwny1ddlyPsBtfzqt7nFlt4eWiKRCCGUmppKXm2ri8Viac+W6urqwsPDc3NzQ0JCZs6c6ezsTJ7arlmzRqVSGV4VKS4urnfv3ocPH96yZYu9vf2PP/7Y2Ng4f/58Q86wm75Fye9J586d0/0iReLxeOREjUYjkUh4PF7T8z83NzcDg7DZ10X7opB9CiqVavfu3U2X5fF4HXvr6v29rR9bMjIyWn8vNX0jff3118uWLbO3t584caKPjw/Z4nXlypULFy504JVtukX0771Ebj0vL6/N9dBotKZX+elycXEZPXr0sWPH7ty5A0HYfchvc3fu3Kmrq3NwcGhlTvLNpPutWYv8LmbEUc9Nt0KeNWo3sWbNGrlcfvr0abIfjnT48GGyMapN27dvLyws/OCDDz7//HPtxGfPnpG9Jh0zZ86c/fv3//TTTy+99FJqampaWtqoUaN0v7pu2LBBJBJ99913S5Ys0U5MSkoir3jsAPK7hUaj0Z1oYF+FHjabbWNjU1dXJ5VKm37mW0K+JcrLy/Wu1y8tLUVGfT/obbGlt4febMuWLSP7t1qya9eu3NzcJUuW6N4tr66urul5pCFoNNr8+fPXrl17+PDhhQsX/vDDDzQaraVbh+hpOoyE/BOOHDkyfvz4lpbCcdzOzq62trbpWXXTVpaOIcvgcDhVVVV6l9fq6cy70bjHFplMtmbNGkdHx3v37ummTlFR0YULFwxfT7vIZDK95q5m2dvbk71RrSC/R3bsg9zVLDYIQ0JCBg8efO/eve3bt5MXbjSlUqnodDqPx+vRo0dxcfGjR490j+8ikSg3N5dOp+t2/nUS2S6k6969ewgh7UigtLQ0e3t7vUu6DW/9T0tLQwjNmDGj6SZ0kecHBn6FjIqK8vHxuXDhQmlpKdlZqHe9tYEbNRw5NqC8vJwczkjSbT02HNkompycfO/ePcNbuoKCgjIzM+/evavXVkn2G3Vs2FbryFbu+/fvT58+XXe63ktPttbeuHGj9bUZ/RV544031q9fv3Pnzr59++bk5JDniB1b1aBBg/7888/r16+3EoQIoaCgoNu3b6empoaHh2snVlZWFhYWdmy7ejgcTt++fXNzczMzM1u/lVJn3o1OTk4CgaCkpCQnJ0e3EbKmpiYvL6+9x5a8vLyGhobx48frnXt15pVtk42NzVtvvdXmbIa0uJBvS9O8uYzFXiyDENq8eTOO4xs2bCAv99dz7Ngx7XkSefTRPYtCCG3btk0mk40fP173+oVO+v3338l7qZBKS0sPHjxoY2NDXlqCEHJxcZFIJLodOSUlJbt27TJw/WTLjO7BQqVSrV+/Xm+2Hj16IIS01/W1DsOwV199VaVS/fTTT4cPH2axWHoHa7JBT3ejDQ0Nep3n7UJ+zrXXTSCE1Gr1pk2bOra21157DSH08ccfKxQKAxch/8CvvvpKd5GLFy/evXvX3d09IiKiY5W0YsqUKRiG7d69u76+XjsxOTk5ISFBd7aYmBhPT8+EhIRz5841XYm2R7np20Cj0ZBXKnaMh4fH5MmT09LSli9fjgy7TKYls2fPptFoO3fubHrDcY1Go+3Unzp1KkKIvGxSa/v27UqlssOb1kOe1Dbba6DbN9/Jd+O0adNQC8eWiRMn6nWgtk77QdM9PT137lybX4w6w8bG5lsD6H7e6+vrZTKZ3nqOHz+elJTEYDD0hueaCEsOwpiYmO3bt6vV6pdeemn8+PG7du06d+7c6dOnN2/ePHTo0Jdeekl7Lr969Wp3d/f9+/cvXrz49u3b9+/fX7Vq1YYNG1gslt47uJN69Ogxfvz4X3/9NTc39/Tp07GxsVKpdMWKFdrx0UKhUK1WT548+dKlSzk5Ob/88ktkZGSzfQzNIi8y/M9//nP06NGcnJyLFy+OHTuW7FjSFRAQwGazT548uWLFih07duzevTs3N7eV1c6ZMwfDsPXr15eXl0+dOlXvej9yo7NmzTpz5kxOTs7vv/8eGRlp+NV9Tb344os0Gm3z5s2fffbZ9evXjx49GhkZWVlZ2bG1LV68ePjw4VevXo2Kijpx4kRmZmZCQsLWrVtbuep16tSpkZGRWVlZ48ePv3jxYmZm5s6dO8mxbps2beqKu8H1799//vz5VVVVo0ePPn/+fE5OzoEDByZNmkSOftNiMBg//PADnU6fNGnSe++9d/bs2YyMjAsXLmzfvj08PHzDhg3kbOSftnLlygMHDjx69Cg+Pj4uLi4/P7/p3WcMR7Z7p6en9+rVq9l7MhjIz89v3bp11dXV4eHhW7ZsuXr1anp6+unTpz/55BMfHx/tlUFvvvmmt7f3sWPHFi5cmJKSkpGR8fHHH2/evFnvTgKdsXz58qFDh54+fToqKoocbnjr1q3Dhw/PmzdP9+Sv2Xej4VemfPTRR25ubvv27dMeWz788MONGzeyWCxyxIXh3N3d/f39s7Ky5s2bl5ycnJWVtW3btpdfflm3WlNw9+7dHj16LF68+Jtvvjl58uSuXbumT59OXi+6atUqcnioyaFo2Eb3uXLlit6tpRFCHA5nyZIlZWVl2tmysrL0Lizs06eP7lBW4u/xc+TdIrTIi0f0bgBBnkn07dtXO4UctfP999+TX3VJGIYtXrxYd+RQdXW13r3Z4uLiyMEVukORyHGE8fHxen+sWq3Wu8tJQEAAOUatf//+unMeP36cHDxLavbOMrrIAYWoueGDMplMt0cTITRy5MikpCT071GYzWp2HCFBEDt27NCN0mHDhpGNhE1Hbn3//fd6y5LNjDKZTDulrq5u1qxZel1B4eHh2hma3lmmtrZW98o3hJCDg4PuaPp2FdBU0wH1crlc704oc+fOJU+J9O4sc/XqVb2rExFCPXv2PHTokHaeZcuW6fbP+fr6pqens1gsgUCgnaddT5/QaDRkO966desMmV97Z5lmf7t79+6mX+9CQkJ0x5g/evRI989ksVgHDhww/M4yBw8e1Jveq1cvBoOhO6W+vn7evHl6V/0wmcxJkybpztaZdyNBEA8ePNBrY296bCEvwc3MzNSdSMat7l1B7t+/r9u0iOP46tWrySafH3/8UTub4eMI9+3bpzedHP7RyoJtun37dtNWNGdn561bt+rdmsN0YIRJjuowuqdPn969e7eyspLBYPTp02fIkCFNGyU0Gg35PUulUvn5+Y0YMULvG3RZWZlUKu3Zs6fe/bsrKip4PJ7ufdYJgnjy5ImNjY32688nn3yybt26ffv2zZ07NzU1lewsHDFiRNO7OBIEcefOnaysLBqNNnjw4KCgIJlMRg7w0F5VWFlZKRaL+Xx+s03z2dnZ9+/fl0qlffv2Je9TqleMlkwmI78NuLm5cTgchULx9OlTDofT9Ht3TU0Neb2fl5dXsxcXpKampqenk3epJu858OzZMxaL1fpXeHK17u7uTV+OZ8+eXb16VS6X9+/ff9SoURqN5unTp7a2ttpBe7W1tTU1NS4uLnpXHBQVFSkUCm9vb70rNYqKiq5fv15bW0s+70m3q+/p06cKhaLpRQG5ubm3bt2SSCQ9e/aMiorSG5ba3gJ0lZWVicViT09PvfPLjIyMO3fu0On0IUOGBAQE1NXVVVdXOzs7613wpdFo0tLSMjIypFKph4eHt7f3gAED9Db3+PHjlJQUsVjs4+MTERFBp9OfPHmC47i2e08mkxUVFTk4OBjS5EDeTebp06cFBQVk03rrVCpVYWGh7uulRy6X37p1Kz8/X6PR8Pn8wMBA3W9mJKVSmZCQkJeX5+joGBsb6+zsXFpaKpPJPD09W7lmlXxTkW9p3enk3XebXtxYUVFx48aNsrIyOzu7nj17hoaGNr2GpcPvRhJ5bMnMzFSr1YYfW9Rq9dOnT5lMpu4Ol8lk169ff/LkCYfDiYyM7NGjB7lpV1dX7fvz2bNncrncx8enleuARCKRSCTSXUq7rFKp7ORZpkqlSk1NzcvLE4lEDAbD19d36NCherdXNSnWEoSU0w1CqmsBoN1Onz79wgsvvPjii+QdagCwJBZ71SgAoPPkcnlJSUlZWdny5csxDNO7QwoAlgGCEADQovv372ufh7B69WqjP5cRAFMAQdhNYmNjWSwWHEeAeendu/fnn39uZ2c3ZMgQ3SF9AFgS6CMEAABg1Sx5HCEAAADQJghCAAAAVg2CEAAAgFWDIAQAAGDVIAgBAABYNQhCAAAAVq2bgrCkpEQulxs+v95jMK0T7AQEOwEhBDsBIQQ7ASEEOwEh1DU7oZuCcMaMGeRDTQ1kmk8x7mawExDsBIQQ7ASEEOwEhBDsBIRQ1+wEaBoFAABg1SAIAQAAWDUIQgAAAFYNghAAAIBVgyAEAABg1SAIAQAAWDUIQgAAAFYNghAAAIBVM8kn1GvUJw+enEubQWAY1aV0HJuOMqfRvbhm/CcAAIA1MMkgxGmxlfGN80bZ9OhDdSkdtyhJ/dtTYlkQBCEAXUUmk12/fp3qKrqPVCpls9lUV0ENDMMiIyNtbGy6YuUmGYQI2fgOkOfcN+sgHC3ADjwmlgVRXQcAluvPP/986623goODqS6kmxAEgZlzO1ln3L179/Tp06NGjeqKlZtoENL6DGi8f4UrnEZ1IR0XzccXJSnVBI1mpe9bALqcRqOJjIw8evQo1YWALhcVFdV19xw30Ytl6H0CFQUPCZWS6kI6zo2Fetph96oIqgsBAADQGhMNQszWju7hqXiSRXUhnRIjwOJLIQgBAMCkmWgQIoRs+w2WP7pHdRWdIhRgV0rg+WEAAGDSTDcImf0GNz66T3UVnRLNx2+UEwqIQgAAMGEmHIRe/qqqEk1DHdWFdJwjA/k5YMmV0DoKAACmy3SDEOE0pm+wPDeV6jo6JUaAXSmBIAQAANNlwkGIELNfaGO2mXcT8nHoJgQAtCIlJeWPP/6guoq23bx589SpU0ZZVXFxcf7fiouLjbLOzjDpILTtP1iefZfqKjolwgO7W0XIVFTXAQAwVTdu3Dhy5AjVVbTtxo0bxgrCadOmjRw5csyYMWPGjFmyZIlR1tkZJh2EdBcBRmcoy55SXUjHcWxQMA+7WQGtowBYF7VaXVRUpFaryR+rqqqUSv2B0WVlZSpVG1+TS0tLmy5YXV3d2NhoYCV1dXXV1dXk/yUSSRzd0IwAACAASURBVH19fdN5KisrZTKZ7hSFQlFSUtJmea3X2Yoff/wxLy8vLy/v999/N3ypLtKOIFQqlfn5+XK5XDultrY2X4f2JTci2/6h8uwUo6+2O8XAIAoArMns2bPnzZsXHBwcHR2dm5ubkJDQr1+/0aNHe3l5ffbZZ+Q8WVlZfn5+sbGxAwcOTElp5hC3devWuLi46Ojo2NhYd3f3M2fOkNPv378/YMCA6OhoX1/fFStWEASBEHr++ecPHDhAzvDLL7+MHz8eIVRQUODg4LB06dLQ0NAPP/ywrq5u8uTJ/v7+AwYMGDt2bGVlJULo8uXLwcHBM2fOjI6O9vDw2LZtG7mSdevW+fn5xcXF8fn8L7/8spU/dvv27ZMmTZo4cSJZ5+nTpw3cS5WVlQ8ePNANFAoZdIs1giCGDh2anp6uVCoTExNHjhxJTv/22283b97s4uJC/njz5k03Nzfj1sfsHypJOs2Nnmrc1XYnoQD/b4rxvyIAAJp1o5wolnZfG8xoAe7E1J+YlJR048YNFxeXmpqamJiYkydPhoeH19XVhYeHR0ZGjho16o033li4cOHKlStramrCwsK0B1Vd165dS0lJ8fX1PXTo0OrVq5977jm5XD59+vRvvvlm4sSJMpksJibmxIkT06a1eCvK+vp6b2/vx48fI4RWrlypUqny8/NxHH/11Vc//PDDH374ASGUmZm5devW2NjYjIyMYcOGLV682NbWdt68eWvXrsUwrKKiYuDAgZMnT/bx8WlpK/Hx8Xfv3vXz8zty5MiqVavi4uIQQrNmzWp6P/Tx48d///33CCEMwz777DMajfbs2bMvvviC8tZRg4KQLDo0NNTPz0/vV6+++uqOHTu6oLC/2PYbVHNgE9Eow5isrttKlxrpjqXXEGIl4nbJbdMBAP9yo4K4042dEQOdCCem/g2FX3nlFfIM4erVqxwOp7CwsLCwECHk6+t79erVkJCQmzdvXrhwASHk5OQ0Y8aMZ8+eNV1zbGysr68vQig6OnrBggUIoZSUFLFYLJFIjh07hhDq06dPfHx8K0GI4/ibb75J/v/s2bNbtmyh0+kIoaVLl06ZMoWc7uPjExsbixAKDg5ms9lFRUW+vr48Hm/v3r0PHz6USCQ4jmdkZLQShDExMWQ0REdHz549m5z4zTffNG28ZbFY2mIcHR0RQteuXRs3blxkZGRgYGBL6+8Ght50e8yYMc1Ob2xsfPTokUAg4HK5xqvqHxjDluHp15iXYRswtCvW3w1saSjUBUsqIyb0gttvA9DlVgZTf+mDk5MT+Z+qqiq1Wq1t/AwKCgoJCamtrbWxsdE+UMnR0bHZINQeVBkMhkKhQAhVV1fjOK5dW69evUJCQvSWIhtLSRwOh8n863S1traWzB6EEI/Hq62t1duK7obGjh0bHBwcFxdna2t79+7dhoaGVv7YpnUihE6dOpWXl6c3Z2Bg4CuvvEL+yeSUyMjIkJCQW7dumUcQtuTUqVO3bt3Kz89/+eWXd+/e3dLDolQqVXJysrY52MfHx8vLq5XVajQa7Y3Gmf1CZVnJjP5hnSyVQkI+drlYPa5H+z6fujvBasFOQLATEEIt7ATdg75pCggIaGxs/OSTT2xtbbUT1Wq1jY3Nw4cPAwICEEJpaWk4btDBwd/fXyKRfPDBBzweT3c6j8erqqoi///w4cNml+3bt+/9+/eHDx+OELp3717T5j2tmpqaO3fuJCYm0mg0hUJRVlZmSG16WCwWh8PRm6i7E0iNjY1FRUWurq6GrFOjw/BKDNm3nQrCN998c/Xq1TiOl5aWRkdHf/PNNytWrGh2TplMduDAAe23gDlz5rTemyiTyWg0Gvl/witQdvALm/FzOlMqtYbz8A/u06XS9nUL6+4EqwU7AcFOQAi1sBMaGxtNPAtHjRoVFhY2derUxYsX4zh++/btsWPHRkRELF++/PXXX//vf/+bmZkZHx8/evRoQ9bWt2/fWbNmxcXFrVixwtbWlrxwJi4uLjY29tNPP+3du3dRUdGRI0e8vb2bLvvBBx+8/vrrdnZ2DAbj/fff37RpU0tbcXR0FAgEGzZsGDFixN69ew2/cFTXjBkzWvpVQUHBp59+GhERgeP4Tz/95OTkNG7cuDZXSBBEY2OjVCpt78eBzWa3mYWdCkLt6T+fz3/ttdcSExNbCkIul7tx48aIiAgD10wQxD/fJnwDpQRhK6+nuwg6Uy2Fotko75pSacPhNelUb8W/doK1gp2AYCcghFrYCba2tib4oNrnnnuuZ8+e2h9PnDjx008//fHHHyqVatCgQWQb4KefftqrV6/Dhw8PGDDg8OHDTc+6hgwZ0qNHD/L/LBbr/fffJ/+/c+fOY8eOXbx4USaTBQUFhYWFIYRmz56tVCpPnDgRFBT0v//978GDBwghBweHZcuWaVc4YcKEX3/99ciRIyqVat++fWS/YO/evefOnaud56233nJxccFx/OLFi9u3bz948ODcuXMnTJhA1jx8+PCmPYVhYWHu7u7k/21tbT/44IM294+zs7O/v/+1a9cQQpMmTVqwYIG2/bYVGIaRZ5ld8XHA2vV9ysXF5dSpU81e4DR//nwajbZ79+5mF4yMjGxXEIrFYt2Wa9HhrTY9fTgRLxheqqkZf0612B+f3LsdraN6O8E6wU5AsBMQQi3shGPHjh07dgwezGsNoqKi1q9fHxkZ2RUfB0PPCH/44YeqqiqZTHbw4MGkpKSFCxfyeLz//Oc/ISEhzs7OSUlJhw8fbnqxrLHY+odJ7lw06yAU8vH4EmJyb6rrAAAA8G+GnqDU1dWJRKK3337b3t5eJBKRfZXh4eG3bt06evQojUZLTU0dNGhQF1XJ7DdIkf+AUCq6aP3dAB7SCwAApsnQM8JmO/9ee+211157zaj1NA9nceh878b8B7b9BnfD5rrCYBesSEKUy5C7uY6HBAAYSqPRHDly5MGDBzExMboXwmzevHnGjBm9evXSnTk7Ozs+Pr69g8qvXLlSX18/efJk41Rs3agfcGMgW/+wxodmfANuGoYiPPCEUmu/CB4Aa7B3795t27YFBgby+Xzd6devX6+r03/Gal5envYGaYbTDskHnWdGQRhq7k+iEPKhdRQAq5CSkjJt2rSZM2eSIwW1jh8/HhQUpP2xsLBQLBbrLavRaAoLC/Vugd3Q0JCXl2cid+a0PGYThIyefTUSsbqmgupCOk4ID+kFwAqsXbv2xIkT33zzTVhYWFZWlu6v/P39b9++jRB69uzZgAEDXnjhheHDh584cUI7w6lTp7y8vF555RVfX9/PP/+cnLhy5crQ0NDXX3+9d+/emzdv7s6/xUp09s4y3QfDmP1DZQ/vcEY+T3UpHTTACRM1EkUSoqedyY18AsBiKJ5kqeuqu21zTL+BOPtfV/N/+umnxcXF/v7+K1eubGmp9957b/To0Vu3blUoFNobWBYVFS1atIh8WkVdXd3gwYPHjBkTGhr6/vvvb9myBSFUWVkZHBz80ksv9e4NF6Abk/kEIUKsgKGS5EvmG4QYQlF8PL6UeM0XghCArtL4JEtR+KjbNmfTo49eEBri0qVL5IhyBoMxZ84c8kEQFy9e9PDwSE9PT09PRwh5e3snJiaGhobS6fQvv/wyPz9fpVLhOJ6VlQVBaFzmFIRM/zDRkW2EohFjtOcGLaZEyMfiS4jXfKmuAwDLxY2ZTnUJbSAIoqGhQTsq3N7envxPTU2NRqPJz88nfxwzZkxYWBhBEFFRURMmTJg+fbq9vf2dO3ckEgk1dVsucwpC3JZt06tvY+5928BhVNfSQTEC7It0uHAUAKuGYZifn9+9e/fIcRTap0kEBwdLJJLly5czGAztzCUlJXl5eeStQSUSSbPPqQCdZE5BiBCyDRgqy7xjvkHY3xFTEyhfTPThQusoANbrgw8+eOeddyQSSWVl5bFjx8jbdY4ZMyYgIGDSpEmvv/46QRA3btyYOXNmaGios7PzmjVrhgwZsmfPnpae8AM6w8yCkBUYXrnjA0QQyPTutGugaD4WX0L06Weu9QMA2jR9+nRnZ+em09977z1PT0+E0KxZs3g83tmzZwUCwcmTJ9PS0hBCGIadOnXql19+uXHjBkEQ5B266XR6fHz8rl27Ll269OGHHxYVFQUHByOEYmJiWn9MIDCcmQUh3a0nxrBVluTb9GjxcckmjhxNOL8f1XUAALrMhAkTmp3+xhtvaP8/ceLEiRMnkv8nsw0hhOP4zJkzZ86cqbuUr69v01ET5JMFgVGYzThCLduAobKsO1RX0XExAuxKiQaGEwIAgIkwvyBkBQ6VZ5pxEHpzMSYNy66FKAQAAJNgfkHI8AlWVTxTi0VUF9JxMXwsHm4xAwAApsH8ghCj0Zl9B5r1DbiF8EgmAAAwGeYXhMj8uwlHC/D4Eg30EwIAgCkwyyBkBQ5tzLlPqFVUF9JBfDZyscUyRJCEAABAPTMbPkHCOY50156KvAdMv4FU19JBMQLsSgkR4gSjCQHolPz8/N27d1NdBehypaWlXbdyswxChBAreLjswU3zDUKhAPtfLrE8qO05AQAtmThxYmpqqvYWZRZPqVRa7Z1lYmJi+vXrqvHX5hqEtkHDq3atcZyy2ExvMRMjwN9IVKo0NLpZNk4DYBLs7Ow2btxIdRXdRywWa2/VDYzIXA/DNh6eGN1GWZJPdSEd5MxEvTnYvWroJgQAAIqZaxAisnU04ybVVXRcDDywHgAATIAZB6Ft0HDZAzMOQiEfiy+BRzIBAADFzDgImd4BmroaVXUZ1YV0UBQfv1lBNKqprgMAAKybGQchwjDbgCHyzNtU19FBDgzU3xG7UwmtowAAQCVzDkKLaB2FbkIAAKCWmQdh/1BlYa5Gaq5PpxQK8PhS6CYEAAAqmXcQYjYMZt8B8ofJVBfSQZEeWEoVITXXW8UBAIAlMO8gRGbeOsqmo4HO2I1yaB0FAADKmH0QsgLDG7PvESol1YV0kJCPQesoAABQyOyDEOc42PTwbsy5T3UhHSQU4HC9DAAAUMjsgxAhxAoeKUtLorqKDhrhhmWKiHpzPaEFAACzZxFBGDJK9uAW0pjl0HQmDQ1xxRLL4KQQAACoYQlBSHN0oTvzGx9nUF1IBwn5ONxrDQAAqGIJQYjIk0KzbR2NEWDxpXBGCAAA1LCUIBwYIcu4jjRmeV411BXLqyeq5FTXAQAAVslCgpDu7IFzeY0FD6kupCPoOBrhjiWWmWWKAwCAubOQIEQIsQaY8bWjQj4OraMAAEAJCwrCkFGytEREmGWcCOEhvQAAQBHLCUIbj94Yk6V4lkt1IR0xyBkrlRJlMqrrAAAA62M5QYjI1tH061RX0RE0DEV44FdhEAUAAHQ7iwpCtjkPohDyYRAFAABQwKKC0KanL9JolMX5VBfSETCaEAAAKGFRQYgQYg2KlKZeo7qKjghywuoVxNMGyEIAAOhWlhaE7IGRsnsJ5njtKIZQFB9PgJNCAADoXpYWhDY9fRCdrih6THUhHREDgygAAKDbWVoQIvKSmfsJVFfREUI+BCEAAHQ3CwxC1qAoqXm2jvo5YARCefXmVzkAAJgvCwxCG74XbstSFD6iupCOgJNCAADoZhYYhAgh1sBI6X2zvHZUCIMoAACge1lmELIHR8tSr5lj6+hoAXa5RGN+dQMAgNmyzCCku/XEWRxFQRbVhbSbJwfj0LGHtRCFAADQTSwzCBFCrMFR5ts6Ct2EAADQbSw2CNkDI2Wpieb4zHohH4uHIAQAgO5isUFId+1Bc3BufJxOdSHtFiPAE0o10E8IAADdw2KDECHEChVKU65QXUW78dnIlYWl1UASAgBAd7DkIGQPjpZl3CCUCqoLabcYAbSOAgBAN7HkIKTZO9n09JVn3aG6kHYT8rH4UvPr3QQAAHNkyUGIEGKHCqUp8VRX0W5CAZ5YRighCgEAoOtZeBCyQiIac1I1UjHVhbSPMxN5cbA0kYW/OgAAYAos/FCL27KZfgNl6TeoLqTdYgRYQoWFvzoAAGAKLP9Qyw6LMc/WUexaOUZ1FQAAYPksPwhtA4Yqi/PUtVVUF9I+kR54cjXeqKa6DgAAsHSWH4QY3YYVPEJqbo/qdWCgfvbE7UoYRAEAAF3L8oMQIcQOM8trRyPdNVdK4MpRAADoWlYRhEzfEE1DnbK0gOpC2ifSjYBh9QAA0NWsIggRhrHDYqTJl6iuo31GuGruVRMSFdV1AACARbOOIETILnysNCXevB5GwaIRA52xG+VwUggAAF3IWoKQ7tqD5ugqf3SP6kLaJ4aPxUM3IQAAdCVrCUKEEHtorNm1jgoF+JVSOCMEAIAuZE1BOCha/vCuRtZAdSHtMNwNyxIR9Uqq6wAAAMtlRUGIszlMv0GytCSqC2kHJg0NdcWuwUkhAAB0GYOCUCwW//rrr6tWrVqyZInudLVavW3btokTJ86ZMycrK6trKjQmu6GjpXcuUl1F+wgFODySCQAAuo5BQZiZmfn999+XlJTs3btXd/qmTZt+/PHHFStW9OvXLzo6ur6+vmuKNBpb/yGqqhJVZTHVhbQDPKQXAAC6lEFBOGzYsEuXLq1cuVJ3olqt/vbbb7du3RobG7t69Wp/f/+DBw92TZHGg9NYg6Kld69QXUc7DHHB8sVElZzqOgAAwEJ1vI+wtLS0pKRk5MiR5I8jR45MSUkxUlVdyG7oGGnyJUSYzTkWHUcj3bFrZdA6CgAAXYLe4SXLy8tZLJatrS35o7Ozc3p6ekszi8Xi+fPn29nZkT8uXrx45syZray8oaHLru20d9Uw2aK0G3SfAV21CSPR7oQRzvTzBWiMszXeY6YL3wnmA3YCgp2AEIKdgBBq/05gs9k0Gq31eToehBwOR6FQaDQaHMcRQlKplMvltlLKW2+9NWjQIPLHHj16tDIzqc0ZOgwb+Zwi/Rp34MguWr8RkTthvBcx+6qay2VRXQ41uu6dYEZgJyDYCQgh2AkIoS7YCR0PQoFAgBAqLCz08vJCCD158qRXr14tzUyj0fr16xcaGtrhzRkRO0xYf2a/RlKP29lTXYtBBjljFXKiTIY8rDQKAQCgC3W8j5DL5U6YMGHPnj0IobKystOnT7/88svGK6wL4bZ2toFDzejBTDiGRrnjcK81AADoCgYFYVVVlZOT06hRo1QqlZOTU2BgIDl906ZNhw4dCgsLCwkJmTt3romc8BnCbth4yc2zVFfRDkIBFg/D6gEAoAsY1DTq7Oycl5en/ZHsFEQI+fv75+TkZGdnu7q6enh4dEmBXYPpE0yolIpnOYxeflTXYpAYAbY9E84IAQDA+AwKQgzDeDxes7+ysbEJDg42akndAsPswsdKbp03lyAM5GENSuJpA9Gbg1FdCwAAWBQruteoHvbQsbLURELRSHUhBsEQiubjcIsZAAAwOusNQpo9j+HlL0tLpLoQQ0E3IQAAdAXrDUJkbpfMxPDhpqMAAGB8Vh2ErMChqppyZWkB1YUYpK8DhmEotw6yEAAAjMmqgxDhNLth4yU3/qS6DkNF86F1FAAAjMy6gxAhu2HjpSlXiEYZ1YUYRAhBCAAAxmbtQUhzdGH6BEvvJ1BdiEFiBNiVEg0kIQAAGJG1ByFCyG7Ecw2Jp6muwiCeHIxrg2WJIAoBAMBoIAiRbf9QolGmKMyhuhCDCPnYFbh2FAAAjAeCECEMsxs2TnL9DNV1GARGEwIAgHFBECKEkN3wCbKM6xqpGTz0crQAv1qqUUMUAgCAkUAQIoQQbmdv2z9Mevcy1YW0zZ2FPFhYWjUkIQAAGAcE4V/sRjzXcP0MIswgYGIE2BVoHQUAACOBIPwL0zcYw1Bj3gOqC2mbkI/BQ3oBAMBYIAj/YTfy+Yak36muom1CAX69nFBCFAIAgDFAEP7DbuiYxtw0dW0l1YW0wYmJvLnY3SpoHQUAACOAIPwHxmSxw2Iakv6gupC2xQjgSRQAAGAcEIT/wol4QXLzrOk/rVfIx+NLoW0UAACMAILwX+guAoaXv/TeVaoLaUMUH7tdQcjVVNcBAADmD4JQHyfihYaEkyY+joJrgwJ42O0Kky4SAADMAgShPtt+gwmNpjEvg+pC2iDkY9A6CgAAnQdB2ASGcSJeaLh2iuo62iAU4HD3bQAA6DwIwmbYDR3TmJehqi6jupDWjHLH7lcTEhXVdQAAgJmDIGwGxmDaDYmV3DDp51Gw6WiQM3a9HE4KAQCgUyAIm2cXESe5dd7Ex1HECOBeawAA0FkQhM2jO/OZfYIkyRepLqQ1Qj50EwIAQGdBELaII5zWcOU40pjuKddwd+xhLVGroLoOAAAwZxCELWL2CcS5jrIHN6kupEUMHIW7YYllphvVAABg+iAIW8MVThNfOkJ1Fa0R8nG46SgAAHQGBGFrWANGaqQNiieZVBfSohgBFg8P6QUAgE6AIGwVhnGip4jjj1NdR4vCXLAnYqJKTnUdAABgtiAI22A3dGxjfpayvJDqQppHx9EodywB7rUGAAAdBUHYBozB5IycaMp3XBMKcGgdBQCADoMgbBsn4gXZvQS1WER1Ic0T8jEYTQgAAB0GQdg2nOPIGhQpuW6id1wb6IxVyokSKWQhAAB0BAShQbjRUyXXz5jmHddwDEV44FehdRQAADoEgtAgdLeejD5BkptnqS6keUI+BqMJAQCgYyAIDWU/9hVx/K+E2hSfexQjgG5CAADoIAhCQ9n06GMj8JYmX6K6kGYE8DCpiigQQxYCAEC7QRC2g/3YWeKLR5BGTXUh+jCEomEQBQAAdAgEYTswvPrTeC7S+9eoLqQZ0E0IAAAdA0HYPtwxr9RfOIwIk4scuOkoAAB0DARh+9j2G4yz7GQPblFdiD5fe4yGoZw6yEIAAGgfCMJ2445+sf78QRM8KYzmw0khAAC0GwRhu7GChiO1Sp5zn+pC9AkF0E0IAADtBkHYfhjGHTNDfP4g1XXoixFg8aUaSEIAAGgXCMKOYA+MVEvEjTmpVBfyL73sMHsbLFMEUQgAAO0AQdghOG4/bmbd2Z+prkMf3GIGAADaC4Kwg9iDogi5VP7oHtWF/AuMJgQAgPaCIOwoDOOOfaX+7P+oruNfYgR4QplGDVEIAAAGgyDsOPbASEIhl2enUF3IP9xYSMDGUqshCQEAwFAQhJ2AYfZjZ9af+cmkxhTCA+sBAKBdIAg7hRUyitCo5Nl3qS7kH0IBFl+qoboKAAAwGxCEnYNh9uNm1f/5s+mcFEbz8RvlhBKiEAAADANB2FmsASMRjSZLS6K6kL84MVEfLpZcaSrBDAAAJg6C0AgcJs6pO7PfdJ5TCE+iAAAAw0EQGgHTbxCN5yq5YyoPrxcK8PgSaBsFAACDQBAah8Pz8+rP/kwoFVQXghBCkR7Y7UpCpqK6DgAAMAcQhMbB8PRjePaTXP+D6kIQQohrg4J42C3oJgQAAANAEBqNQ9w88ZVjGrmU6kIQIrsJoXUUAAAMAEFoNHS3nsx+oQ1XT1BdCEIICfk4XC8DAACGgCA0JocJsxsSf1fX11BdCBrlgaVVEw1KqusAAACTB0FoTDQnN7vwsfXnDlBdCLKlocEu2PVyOCkEAIA2QBAaGXfMDHnGTWVpAdWFkK2j0E0IAABtgCA0MpzF4ca+XHf6R6oLgYf0AgCAQSAIjc9u1POqyuLGnPvUlhHuhj2qJWpNYmQjAACYLghC48NodIfn59ae2kPtnbgZOBrmhl2D1lEAAGgVBGGXYA0YhdkwpXevUFuGUACDKAAAoA0QhF0DwxwnL6w7s59QyCmsAh7SCwAAbYIg7CoML3+mT7D48lEKawhzxZ5JiAoZhSUAAICp61QQyuVykQ6NBrqj/sXhhfkNSX+oqsuoKoCGoZHuWEIZvC4AANCiTgXhli1b+Hy+z98qKiqMVZZloDk4c6Km1P3+A4U1CPl4PLSOAgBAyzrbNDp//vyav3l4eBilJkvCFU5TFuU2Pk6nqgB4SC8AALSus0GoUqmePn2qUMBoteZhNgyHF+bX/rqDqufXhzhjVXKiWAJZCAAAzetsEB4+fDgqKsrR0fHNN99UqVp8FKxKpbp79+6lvxUUFHRyu2aEFRKBcxwkN89RsnUMoSg+fhVOCgEAoAUY0YlB3+Xl5S4uLjQarbCwUCgUvvPOO0uXLm12zkGDBmEY5ujoSP44Z86cadOmtbLmhoYGDofT4cJMjbq8ULLvU+7SrRiba/hSxtoJu3NpaSJsx1CzfGK9hb0TOgZ2AoKdgBCCnYAQav9OYLPZON7GKR+9MwW5u7uT//H09Jw7d25CQkJLQcjlcjdu3BgREWHgmgmCsKjXmxNAhMWorhzhzVhm+ELG2gkTvIntj9Qcjm3nV9X9LO2d0CGwExDsBIQQ7ASEUNfsBKONIywqKuLxeMZam+VxmPCaPPuuouBh92/a3xFTatATMbSOAgBAMzp1Rrhy5coBAwa4uromJSX9/PPP165dM1ZZlgdjshxeWCA68rX7ezsQTuvmrUfzsfhSwpuLdfN2AQDA9HXqjLB///7nz5/fsWOHWCxOTk4eMmSIscqySOzB0TRHl4akP7p/00IBBqMJAQCgWZ06I1ywYMGCBQuMVYo1cJy6pOLrFayQUTQH5+7crpCPrU1RI9TdZ6IAAGD64F6j3Yru2sNu+ITuv9eMjz3GpGGP6uCkEAAA9EEQdjf7Ma8oCrLk2SndvF14EgUAADQLgrC7YQwmb8ay2qPbicZufSoEdBMCAECzIAgpwOw7kNEnsP7cge7c6GgBdqVEo4EoBACAf4MgpIbjlMXSlHjFs5xu26KAjTnbYg9EkIQAAPAvEITUwO3sHeLmiw5vJdTdd+cz6CYEAICmIAgpwx4ymsZza7h6stu2KIRHMgEAQBMQhFRynPam+MoxVVVJ92xOyMevlWrUEIUAAKADgpBKdCd3+3GzRIe+Qp14Bojh3Fiopx12rwqSEAAA/gFBifczTAAAIABJREFUSDFOxAsIxxqu/dY9m4MH1gMAgB4IQqphGO+Vd+svHlFVFnfD1oQCLL5E0w0bAgAAcwFBSD26s4f9uJk1B7cgTZdHVDQfv15OKCAKAQDgbxCEJoEzKg5nssRd30DqyEB+DlhyJbSOAgDAXyAITQOGOb78jvjiL8qywq7eVIwARhMCAMA/IAhNBd3J3X7ibNGhL5FG3aUbEvJx6CYEAAAtCEITwhnxHM5xqD9/qEu3EuGBJVcRsu67oQ0AAJg0CEJTgmFOM1dIbp5tfJzRdRvh2KABTtjNCmgdBQAAhCAITQ3OcXR88W3R4a80cmnXbUXIx+JLoXUUAAAQgiA0Qazg4Uy/gXUnv++6TQgFOFwvAwAAJAhCU+Q4ZVHjkyxFxvUuWv9IdyyjhhAru2j1AABgTiAITRHGsHV69b3GP/erayu7Yv22NBTmgiWVwUkhAABAEJoqhmc/xojnqvdv7KIHFgoFOHQTAgAAgiA0ZcxRk3A7R/G5g12xcnhILwAAkCAITRiGOb2yXJJ8Uf7ontHXHe6G5dYRokajrxgAAMwMBKFJwzkOTq9+IDq4RV1fY9w12+BouDuWUAatowAAawdBaOqYvsF2wyeIDm4x+sN7hXw8HlpHAQBWD4LQDNiPm0Vo1PXnjdxZCA/pBQAABEFoHnDcec4qya3z8ofJRlzrYBesSEJUyIy4SgAAMD8QhOYB5zg6z/1IdOgrVXWZsdZJw1CEB34VBlEAAKwbBKHZYHj158a+XP3jekKpMNY6hXxoHQUAWDsIQnPCiZps49az1ni3IYWH9AIAAAShmeHNWKbIfyC58adR1hbshNUpiCIJZCEAwHpBEJoZjMlyfuPT+nMHjPLMQgyhSA8cWkcBANYMgtD80J09eLNW1vzvc6PcklsowGA0IQDAmkEQmiXbfoM5UVOq9n5CKDp7k7QYAXYZghAAYMUgCM0VN2a6jXtv0dGvO3nHmX4OmJpA+WLIQgCAlYIgNGO8GctUFUX1l37p5HrgSRQAAGsGQWjGMBuG84J1kptnpSnxnVkPdBMCAKwZBKF5o9nzXBaur/ttt6Igu8MrGS3ALpdoIAkBANYJgtDs2Xj05s1cUb1vvbqmomNr6M3B2HQsuxaiEABgjSAILYGt/xBuzPSqPWs1soaOrQG6CQEAVguC0EJwoqYw/QZW713XsTuRCuGRTAAAawVBaDkcJy+iO3tU7/8MadTtXXa0AL9aooF+QgCAFYIgtCAY5vjyO0ijrjn8VXsHF/LZyJWFpddAEgIArA4EoUXBaHTn19eoqsrqzuxv77LQTQgAsE4QhJYGYzBdFvxXlnFTfPVEuxYUCrB4eEgvAMD6QBBaINzO3vXN/5NcOyW5c9HwpYR8PKmMUEEUAgCsDAShZaI5OLss+az+zH75w2QDF3GxRb05WEoVtI4CAKwLBKHForv2cJ6/tubgFsWTTAMXgUEUAAArBEFoyRie/Zxmrazet0FZVmjI/EI+Fl8CbaMAAOsCQWjhbP2HOExaWLVzlbK87SyM4uM3K4jGdo9CBAAAMwZBaPnYoUKHyQurvlulLC1ofU4HBurviN2phNZRAIAVgSC0CuxBUY6TF1V9/1GbWSjkQzchAMC6QBBaC9agSEOyUCjAoZsQAGBVIAitCGtQpOOUxVU7V7eShZEe2N0qQqrqvqoAAIBaEITWhTUwwnHakqqdq5UlT5qdgU1HIU7YzQpoHQUAWAsIQqvDColwnLak8rtVime5zc4QI8CuQOsoAMBq0KkuAFCAFRKBEKre81+XRRtsevTR+61QgK9OhiEUAADKaKQNGplYIxVrpA2ETKKWiqtEDVW1DfX1EsLLf/SEWONuDoLQSrFCIhDCqnZ95LxgHcPTT/dXw92wTBEhViKuDVXVAQAsjUYuJXTiTSNr0EgbNFKxRirWyBoIaYNu+CFbjtKW00DjiGh2ZYhbrLZTMe3YHI6jg0tvJzej1wZBaL1YIaMwuk3V7rXOsz9g+g3STreloSGu2LUy4rleGIXlAQBMHUFopGKNpF4jqddI6zUSsVoqJqTNRx1mw8TZXJzNwVmcv/7D5uIsDo3nKmdwC1V2uUpOptwuVWp3s56DcCyYhwXysGAnLIiHjXfC7P/+Xi4Wi43+d0AQWjXbwHDneR/X7NvgOO1N1sAI7fQYAR5fonmuF43C2gAAVCGUir8CjAy5hjq1tJ6QiNXaKZJ6cgaczcXZXNyOi7PtcTt73M4eZ3Ho7p5/Zx4ZeByczUX4X8eTRjXKqiUeiIgHNcQDEfHgGappJAIcsQFOWKAAW8rDdjthHqxu/XshCK0ds0+gy5LPqnat0UjFdiMmkhOFfOw/N+B6GQAsENEoU4trNQ11GkmdWlKvaajTiEV//UdSr2moVTfUIbUKZ3NxO/t/JZyDM13gTSOn/PVbe4S10W6kJtDjeiLjKfFApHkgIjJqiEIJ0df+r7O9Rf2xICfMm9vWWroYBCFANgJv16VbqnauVjfU2Y99BSE01BXLFxPVjciZSXVxAACDEWqVRlyrFos0YpG6oU5TX/NP5olFZNQhGp1mZ49zebidPc5xoNnZ4xxHukdvGscet3PAOQ40Lg9jdvyMTNSIMkVEShWRVUtkiojUasKegQJ5WIAjFueJfRiCB/IwWxNrbIIgBAghRHfmuy79qmrXGnVtJW/6W3ScNsIdSyjVTPWCATYAmApCqVBVl/0VcmKRul6kkdSp62s04lp1Q61GLCIUjWSS4VwejeOA2zvRndzwXn1xO3sal4dz7HE7B8yGYcSSahpRRg2RKSIyRH+1drLoKIiHBTth4W7Ygn54AA+zM/mcMfkCQXeh2fPclm6p3v9Z1Z5PnOeujhEw40uIqV5UlwWANSEaZeraKnVDrbq2SiMWqeuq1WKRuq5aIxap62oIlUJq7/RXyHF5NHse3UXA7BOEcx1pHEecy8PZnC4tT6JCWSIiQ0RkioiMGuKBiJCpUCAPI5Pv5T54sBNmjs1IEITgHxiT5fLGJ6Jfd1RsXzl6+rrXchyprggAS0MoFeraSnVdtVpUqa6vVtfXaOq1gVeFMJzm6ELjONIcXXAuj2bvZNPDh+bghHOdaA5OEhXB5XK7rVSlBuXUEWTHXqYIZYiIUinR3xEL4mFBPGxMMB7IQ73sLOHacghC8G84jffSUvHlox4/r+Dw15bLfN279/ItACwAoZCrRBWaump1bZWKDLzaKrWoQl1XTSjkNAcXmqMzzdGV5uBMd/LAvfxpDs40Lo/m4IwxbFtbbxeMHPinZoQKxMQDEfFA9FdrZ2494Wn31wCGV31RsBPuY4/RLCH49EEQgmZwR79Ec3Lfd3jN1uPviLyGUliJUmljY9Px29zgGJrcGx/X0xI/u4ByGrWqtkpdU66qKVfXlKtrq9R11eraSnVtFaFW0nhuNHtnmqML3dHVRuDNChhKc3ShObjgHAeq60YIIYUGVciI7Fr0QPRXD1+miHBiYoE8FMzDJvTC3huA+zua3FUtXQSCEDSPPSjKke2+4Of1BfRJ+QOnU1WGXK6xte14jMlUaNkt9TA3bPcomg1c9wM6RqNW11ZXlpaXFJeqasrp9eW29eXM+gqGpEbB5jXauzfauzXauys4fgp3l0aui8LORWXbXF+dAqFKhCo7PjBJJsNZrHYvLlGiEimqkBMVMlQiJSplqFxGiJXIlYX1tUdBPCzMFZvrhwfxMAdjXkZjTiAIQYv4/fqr399uv3fdYGUBb8Yy415sZiCxWM3ldirB3uiPz4xXTzyv+nU03Wo/58AgGo26tkpVU64Wlauqy/451auvUbIcMwg3CdddzPGoYweJesXUsN1Etq4a/N+HUA1CdQjVIYS65PktKhWdTm/3mh0YyNUWeXOwoa5IwMbdWMidZZaXtHQdCELQGpqDs+vSLaJftlZ+857z/LU0B2eqK2o3Nh0dj6Utv6UedVp1ZhzNkwPNpAARapW6plxVVaKqLFFVkf9K1TXlOMeR7uRGc/agO7kzfILYQ0bTnNwPVDqvSsF+iaHH8Sl+84jFUi4XEsz4OhWEBEHs3r37/Pnzbm5u7777bt++fY1VFjAdmA3D6dX3xZePVmx9x3nOKoZ3INUVtRsNQ9uH07Y+0Iw8rf59LG2QM2ShdVHXVqoqipWVRaoK8l+xuq6K5uhCdxHQXfg0FwHTbxDdRUB39tBr9tAQaPVd9YkCIuF5mp8DvG0sVqeC8Msvv/zxxx83bdp0586dyMjI3NxcDqdrR7EAamAYN/Zlmx4+1fs2cEe/zImaTHVBHbE8CPfmonFnVT9G0p/3hIOaZSJUSlVlsaqsUFnxTFVWqKwoUlUW47ZsultPultPumsPW7+BdLdeNCd3jNbG0U+uRvOuqZ82ENfj6K7/396dhkVx5WsAP1V1qhd6A5qGBtmFjoogKFEk4kYEF8BoQpwkTuJormY3yc06450kk8SZjEmMyXU0JiaPmnG8XuMk4r4nLjiKO5qooGzS7DQ0TQNdp+p+IHG8Pm5Ad5+G/v8+5JFO27wW3fVSVeecuu1YTtDbdb8ICSFLlixZuXJlRkZGVlbW3r17165dO3fuXCeGAx5FMTA58MUl9V+/115y3v83L/VkESZaHohgQ3yYabvI7xPZZwfB4JleTyKCUFXmqC4TzKWO6lKHuZRYarHeiIMi+MBQxeAUdWA/bAhlFT5dfWVzK5q6S4jWMHsmYy8ZOenNul+EVVVVFRUVaWm/3LIgLS3t2LFjUIR9G+cfaJj/kWXjsuqP5+tnL+CDwmkn6rLhBuZgNjd5B7nYJC1O4Vg4MuxVSEONw3zZUVnSUXnZUVlCGqqwPhgHR/DGCJ9h43hjJDaEXLvLQbedbZBydpEnYpm3hvbJWXPgRj0qQqVSqVT+clig1+vPnj17qydbrdb58+f7+v6yUskTTzzx4IMP3ubFbTYbQ3k5cvo8diPwk2dLJ/bVfPqqMvMx2dDxLv1ertgIBgbtGM88cgDn7hI+H+Hw/N/3Pfad4E7W1Qubyy8yMjlnjGCDIriYJOV9U1lDv2tnOAlCBCHUau/hN9ph5p46wn00TJgeLtpaehrbueCdgLq+EXx8fFj2Dqd/ul+EKpWqo6NDFMXO72G3229zgVCpVObm5sbHx3d+GRMTc/uriZIkweVGj94Io7MdpiENa/4iFZ/1mzHfdSscumgjqBHam4XmHCDZP3CbMjz9CpBHvxPcxVJ8OuRPa1m1a5f9W/Gz+NZxsnECTjN64lQbeCcg12yE7l8mCQkJkSSpoqKi88vS0tLQ0NBbPRljnJycfP+vIiMju/19gYfgjeGBL3/KB0dUf/hMe9EZ2nG6TM6hNWO5scHMfXlCUbNLZn0BZ5IkVuXCNVkEET13mHx2TszPwWlGbz/q8jbdL0KtVpuZmfn1118jhOrq6vLy8nJzc50XDPQCDIe1E2f6/+blhm/+atm4TCIC7URdwyD053u5V+LZtDwhvwa60LNJ0h3vAdttzQ6UvVMoapYO5eBIDbSg1+nRwLkPPvjgiy++SEtLGzJkSG5u7ogRI5wVC/QiclNi0CtLSUN17ScvC7VXacfpsrkD2NVj8QO7hHXF3V/7CvReV23S2M1CmJrJy8BannYaQEOP5hHGx8cXFRWdPn06KCgIznZ6M1at0z/5duux3TWfvKTNeLTXTTSc0I/ZPQln7SQ/N0lvD/X4wTPAeY7WStN2kRfi2NeHwHQa79XTJdYUCgUcCIJOPvfez4fFNqz+S/uVc34PPevqcQ3OFe/P5OdwWTvIVRtZdh+HYa/oUVxzXvTbK+LTh8iXaVxOBPy8vRr8+IEz8caIwJeWYH1w9QdPtx7bQztO14T4MD9m4cpWKWunYHXQTgNcbEmh+OIRcdtEDC0I4B0AnIzhZbrs2QFPL2z58fu6zxeQhhraibpAzaPvJ+AoDTMqT6iwwfCZvqlDRL/7kXx9UTycww0LgKExAIoQuAYfEhX40mLFgOTqj5+37lmPxF4zDgWzaNl9XG4Um7aZnLdAF3oG550abWhHmduEpg50OAeHqaAFAUJQhMCFWE495oHAFz9p+/l4zWevOKrLaAfqggVJ7F+Hs2M3C9sroAv7juJm6b48IcGf2ZDO+cA96MCvoAiBa+GAYMMzf1Hde3/tp6805X3Vi+Ya5kax303Ac34kqy/1msPZvktCqKdHb4erpbTNwgtx7JKRsMYs+H+gCIHrMYwqdXLQq0sd5is1Hz7Xfuk07UB3KzWI+SGLe++U+PYJAgeGvdq6YvGhPcI3Y/HTA2GnB24EZweAm3C+hoC577adO9K47hNsDPd98BnsH0Q71J3FaJnD2XjqLqHESr5I43jYi9IgSVK3F5uWEHrnBFlzSdozGQ/0hSNBcBPwsQZupYhLCfr9FwpTUs3H85u3fyM5OmgnurMABdo5CVs60KTtgqUX5AX/1k7Qb/eTnRVSfg60ILglKELgbgyH1WMeCHp1qVBvrnp/Tuux3bQT3ZkKo2/v5wb5MaPyhNIWOEvaO9S1oQnbBIeI9kzGgb3vNtLAfaAIAR2cTu//2Kv+j/2nde+GuuV/cJhLaCe6A45Bn47kZpvYUXnkZD10oXt1ffpEYaN07/fC/f3YdeM5JVwCArcFRQhokscmBr26VDFoeN3f3mxc+xFp9PTZ9y/Hs5+OZDO3CZvLoAs9166rUvpW4d1h7B+TYHwouDMoQkAby6lHTzUu+AoHhVV/+Fzj+k/FFgvtTLczLZLdlIHnHhQ+/xmmVbhLV44Il54Xn/hB+Of9eGYM7N/AXYE3CvAIjFypSX/Y+PsvWaW66s9zm/K+ktrttEPdUkogcyALf3xWfOMYTKvwIERC8/PJ0vPiwWycGgSHguBuQRECD8KqtLrs2YEvLhbqzVULn7Tlb/PYCfj9tUx+Ds6vlnL3ELuHZvQuLQ40fTc5b5EO5+BouLku6AooQuBxsKGfftYf9HPesp860LJkvu3wVs+sQ3852jkJy1k0fqtQ20Y7Td8mSbc/8q5slcZsEQwKtDUT+8rcFAr0GVCEwEPJwk0BTy/0yZ1vP/evqvdmt/zwnQdOOpRz6JtxXGYoM3KTcLEJzpK6zu2WWDvdII3cRLLDmS9hxQPQLTCsGHg0Lszk+x/vOK5ebt71D+vu/1GPnaZOm8rI5LRz/RuD0NtDuWAfcewWYeP9OCUQTsq51cYS8amD5Is0bircVhB0F7x1QC/A94vWz/pDwFPvdZRdrHpvlnXvBk8bSjNvAPtlGp66S9hYAkNJXeAWo0YXnRFfzBe3T8TQgqAn4IgQ9Bp8v/763y1wmEusu/5h/tMTqpSJ6rQczjeAdq5fTA5jdk/CWTvJmQbp7aEc7Th9nCCi+UfIwSrpYDYXroajcNAjUISgl+GDI/0ff1NoqLYd2lK96BlZ5EDthEdkkQNo50IIoXh/Jj+Hm7KDVLaSv6VyGI5SXMPqQDP2CgihA9lYy9NOA3o/+KSCXgn7B+myZwf/cZXClFS/emHNR8+3HtuNRPqnJUN8mANZuMImZe0UrA7aafqK6+8+cdkqjfheiNUymzOgBYFzQBGCXoyRK9VjHghe8LVmfG7Lwc1VC59sObBJbLPRTaXm0aYJOFLNpOUJV20wlNSZ8muktDzy7CC4uS5wJjg1Cno/llMmjVYmje64ct7643fN21YrE0apUifLwk20EmEWLR/FLSkU0zaTLZkc3ACoxySEmPWXxefzyVej8ZQw2J7AmaAIQd8hixqkjxpErI2tR3c1rPozw/OqkZNVKZmMnM49eOYPZoN90PgtwtrxeFww7Lu7T5Kkdol585i4bwoeBL9VAGeDIgR9Dafx06Q/rBmf237pVMvhrc07/q5MTFOnTuFD+7s/zMPRrEHJPLJX+HAEB2tAd08bQc8eIK+JTH4O3FYQuAQUIeijGEZuSpKbkkhTve3IjrqV73BqX597032GjmXVOncGGRfM7J2Cp+wgJVa0IAm6sGtq7GjabsHEIzUvQQsCF4GPJejjOJ1em/lo8B9X6abNc1SVVr0/p/Zvb7Ye2y11tLstwyBfJj8Hf18q/u5H4qA/srXXuNQkjdosJAcwX6Sxt1liDYAegiIE3oFh5NFxfg+/EPynterUya2nDpjf+W3D3xe1XzyJ7rCes3MYlWh/Fq5vQ1N2CM0wreIu7Loqjdos/FcSDBAFLgenRoF3YXiZMjFNmZhGmupbC/ZaNi6XHO3KxNHKxFGyMNeOMlVh9M8J3Px8kpYnbM7kwlSwd7+lFT+Lbx0nG9JxmpFBqGs35gWgq6AIgZfidHpNeq4mPddxtbj11IGG1R8gInR2pCzM5KLdLseg/07llhSKIzeR7ydwwwJg534jCaF3TpC/F0n7s/A9Otg+wB2gCIG34/v11/Xrr5syy1FVaj91oHHtR2KbXZmQqkwcLY8a5IpGnD+YDVejSduFr0bjrHDY1/+bTUAz95G6dik/BwcoaKcBXgOKEIBf8MYIfmKEduJMh7nEfvqgZf0Ssc2uHJyiiBshj0lgeGfe73VaJGv0YabvEt4Zxs0dAJfqEUKoslXK3kmS9Mz6dHzjbQXh1ChwJShCAG7EB0fywZHaiTMd1WVthUesu9Y1rFooj01UxA1XDLyX0+md8l1GBjKHcvCUHeRco7Q4xdvHg5xpkHJ2klkmBm7cAdwPihCAW+KDwvmgcE36w2Krtf3iqbaLJ5u3rGJVGkVciiJuRM9PnEZrmEPZeNou4eG9ZM0YTumtH8ftFdITPwhLUrjf9L/VwfHt7lAPQA956ycPgK5gfTSd42jQQ8+2Xz7fdv5flvVLSEuz4p4kuSlJcU8S52vo3iv7y9HOSXj2jyR9q/B9BjZ434WxJYXiorPi5gx8rwGqDtABRQhAV7CcPCZeHhOvy3mSNDd2XC5su3iyeesqBsvk9yQpTElyUxLro+7SS8o59M047o2jJC1P2JLJ9dd6Sx8IInruMDlSI+Xn3GEyyfW3YQLA6aAIAegmTuv3y2GiJDmuXm67eNJ2ZHvDPz7mgyMVpkRZ9GB51KC7XO+bQeiD4ZxJJ96XJ2xIx6OMfX+nb3WgR/YKDhH9kIV1dxyHBGdGgStBEQLQYwzDh/bnQ/trxj8kOTo6Ss63F52x7l5XX17EB4XLo+NkMQnyqEGsSnv7l5lzDxvsw0zfLSwfxU2P7MtDSa9YpawdJL0fsziF46DhAG1QhAA4E8PL5LGJ8thEhJAkODrKLnYUn7Ud2tL4zSLOzyDvP1gWNUgWfg8OCLnpQJvJYcyOSThnJyltQS8N7ptdmF8jPbhb+EMi9+ygu/8HwiEhcCEoQgBchcG8PDpOHh2nQQiJYsfV4o7LhfbCI81bV4s2Kx8WKws3ycJNsjAT5x947W8l6ZlD2dyUHaTEKn3c5w6Y1hWL84+QVWPwxNCu/MNgHiFwJShCANyCZWVhsbKwWPWYaQghsc3mqCxxlF+yF+Y3bVoptlqxMUIeHSePHsSH3ROu9TuYjR/aIzy4m6wd10fm1UkIvXtS/PqiuGcyHuwHrQY8CBQhABSwClXnwWLnl6Sp3lFxqaO8qOXwto7STxiW5cNi/zc0dnlDdM53A5aNlmk0dPP2VDtBcw6QS01Sfg42wm0FgYeBIgSAPk6n53R6RVxK55dCvbmj7KKj/NKc2n/OKC02F/gWaGNL1JHlqvBSVUStIkjsVRfMqu2oxi49GMXun4K9dtEA4MngXQmAx8H6YKwPRkljEEIGSbIVXhhSWzG0rhTXbOfLypjWJhIQLhgiHIZwEhgh6COIb+AdX5MWBqFAJQpSMjcuH9olcI0QuBIUIQCejWEiIvtp4gdce0ASHEJdpaP8kqOqzHFmi1BVJtqasSEEG8N5YwRvDMfGCOxv7FvNAaNGgQtBEQLQyzCY540RvDHi2iOivUWoKnWYSx1VpW0XTgjmUsnRgQ0hXEAw1gdjQwjWh+CAYE6n71vtCIBzQBEC0OuxSrUsKk4WFXftEbG1Rag3k/oqod7cUXqh9fh+Ul9Fmus5rZ7TG3FAMNYbsT6Y0xt5Y4Rz7zDlEpIk0Y4A+jAoQgD6INZHLfOJRWGx1z8odbQJdWahrrLzv20XTpA6M2lu4HwNOCAYBwTjgBAuIAT7GThdAKvW0QoPgJtBEQLgLRiZgg+J4kOirn9QIgKpr/q1ICvbLp0mllpiqZM62jhfA+cbwPkaOquR8zNwfoGcTs/6uH0yB1wiBK4ERQiAV2M4jANDcWDoDY9Ljg7SWEMsdcRSKzTWdlwtJuf+1fmIRBycXxD2DeB0es4/iPMN4LT+nFbPav04tS9inb8ynIQkBpoQuAwUIQDgJhhedtOCRAhJ7XahsZZYaklTHWms7Sj5iTQ3kqZ60doo2ppZlZbV+nFaPafx5bR6Vq1j1TpO68+qdZzGl1XpYMAO8DRQhACArmHkSt4YzhvDb/L/RJG0WMTmRtJcT6yNpKlBqDeLpT+R5gaxpYm0NImtVk6tY1U6Vu3LaXxZtY5VaVmVjlNpWY0vq9KyPhrWR3Pj+B2YRwhcCYoQAOA8LMtp/TmtP4/63/wJIiEtTWJLE7E2ii1NYkuTaGsSzCXtLRbR1iTarKKtWbQ1Iw7/UooqDavSMgwLFwmB60ARAgDciOV+bcqo2zxLareLNqvY2kxszWKrVWy1Sv2HuC0j8DZQhAAAj8PIlZxcyfkH8r8+IlmtNAOBPq1v3vkTAAAAuEtQhAAAALyaJxYhIaS8vJx2CspgIyCERFEsKyujnYIySZJKS0tpp6AMNkKnkpIS2hHoKy0tdfqKe55YhDU1Nenp6bRTUNbY2Dh69GjaKShraWkZOXIk7RSUtbW1DR8+nHYKygghSUlJtFPQN2TIEFh1ddiwYQ6Hw7mv6YlFCAAAALgNFCEAAACvBkUIAADAq7lpHmFCQsJjjz3G8/ydn4qQKIp2u71//1usTOEdRFEUBMFv2HnVAAAFgElEQVTLN4IkSQzDwEbged7LNwJCSKlUwkZQq9UxMTG0U1CmUCgGDBjA3PWSe5s3bx44cODtn8O459Kr3W43m81u+EYAAADANaGhoTLZHW497aYiBAAAADwTXCMEAADg1aAIAQAAeDUoQgAAAF4NihAAAIBX8+jbMJnN5u+++66oqMjHx2f8+PHjxo2jnYgCm822devWkydPYownTpyYmppKOxEFkiQVFxcfP368qalp5syZPj4+tBO5icPhWLly5cWLFxMSEh5//HGW9cbfXK9cuVJQUNDY2DhjxgydTkc7DgWiKObn5+/bt6+pqSkxMXHGjBkYe/Su2xUIIRs2bCgsLLTZbPHx8Y8++qhcLnfWi3v056qgoODMmTPh4eEY4xkzZnz22We0E1GwePHiFStWaDQajuOmTJmyYsUK2okoKC4uTklJWb58+bx585qbm2nHcZ+ZM2euXbs2NjZ26dKlL7zwAu04FNTU1AwdOrTzR19dXU07Dh2FhYWzZs2y2Wz9+vVbtGjR1KlTaSeiwG63r1+/XqVSRURErFixIiMjw4lTHnrN9Inly5evWrUqPz+fdhB3a2trUygUnX9esWLFsmXLTp48STeS+4miyLKsxWLx8/Mzm81Go5F2Ine4dOlSQkJCZWWln59feXm5yWQqLS0NDAykncutOhdVIIRgjC9cuGAymWgnoqCjowNj3Hk+oKqqKiQk5PLly5GRkbRzUVNfXx8QEFBWVhYWFuaUF/ToI8JrHA7H0aNH4+PjaQeh4FoLIoTa2trUajXFMLR45ynBAwcOJCcn+/n5IYTCwsKio6OPHDlCO5S73f0CIn2YTCa79hFob29HCHnnfuCagwcPGo1Gg8HgrBf09BPNxcXFGRkZVVVV8fHxu3fvph2HJrPZvHDhwi+//JJ2EOAmZrP5+o96YGBgZWUlxTyAOkmSnn/++Tlz5gQEBNDOQsfkyZMLCgoEQcjLy7v+IKGHKP+iffjwYXwzp0+f7nxCZGRkQUHBkSNH9Hr9U089RTeti9TU1Nx0I2zcuPHacywWS3Z29rx587KysihGdZ3y8vKbboQtW7bQjkYNxpgQcu1Lh8Nxx5WiQN/22muv1dbWLl68mHYQatatW3f8+PE33ngjNze3oaHBaa8r9RKnTp3ieZ4QQjsIBRaLJTk5+ZVXXqEdhLLGxkaEkNlsph3ETdasWTNs2LBrX0ZGRm7bto1iHooEQUAIXbhwgXYQml599dXk5GSLxUI7iEcwmUzffvuts17Noy+9tLa2XvtzQUFBaGioF14rstls2dnZqampixYtop0FuFVmZub58+eLiooQQgUFBRaLZfTo0bRDAToWLFiwc+fO7du3e+cEEoSQ3W6Xfh3aWVlZWVlZGR4e7qwX9+hrhPPmzSspKYmKiqqsrDx58uSaNWtoJ6Jg4cKFhw4dstlsycnJCCGNRrNv3z7aoShITU212+0IoczMTJ7njx071ueHURgMhtdff338+PETJkzYtm3bu+++6z0TKK83YcKEzpNg06dPVygU+/fv97ahIkePHn3//fdjYmIyMzM7H/n888+HDRtGN5Wbbd++/fXXXx86dCghZO/evbNnz+7cJTqFR0+faGtrO3bsWEVFhV6vHzFihHf+KlRRUXH99CmO4xITEynmoeXEiRPXv1e9Zy9w4sSJCxcuJCQkxMXF0c5Cx+nTpztPjXZKTEzkOI5iHvdraWm5cOHC9Y+YTCaNRkMrDy3nzp376aefMMbx8fHOvTmlRxchAAAA4Gped8kNAAAAuB4UIQAAAK8GRQgAAMCrQRECAADwalCEAAAAvBoUIQAAAK8GRQgAAMCrQRECAADwalCEAAAAvBoUIQAAAK8GRQgAAMCr/R9iywFEzWj0YAAAAABJRU5ErkJggg==", + "image/svg+xml": "\n\n\n \n \n \n\n\n\n \n \n \n\n\n\n \n \n \n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "text/html": [ + "\n", + "\n", + "\n", + " \n", + " \n", + " \n", + "\n", + "\n", + "\n", + " \n", + " \n", + " \n", + "\n", + "\n", + "\n", + " \n", + " \n", + " \n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "# Declaramos las variables que vamos a seguir\n", + "# Función ideal que queremos aproximar\n", + "f_regression(x)=(x<1) ? exp(-x)-4 : log(x)\n", + "data_set_size = 5 \n", + "n = data_set_size # Número de neuronas \n", + " # coincide con el tamaño del conjunto\n", + "#Partición homogénea del dominio [-3,3]\n", + "K_range = 3\n", + "X_train= Vector(LinRange(-K_range, K_range, n)) \n", + "Y_train = map(f_regression, X_train) # Imágenes de la partición\n", + "\n", + "M = 1\n", + "# USO DE LA FUNCIÓN DE INICIALIZACIÓN DE LOS PESOS\n", + "h = nn_from_data(X_train, Y_train, n, M)\n", + "\n", + "# Imprimimos la red neuronal \n", + "display(Text(\"La red neuronal obtenida es :\"))\n", + "println(h)\n", + "\n", + "# Vamos a ver cómo aproxima los resultados \n", + "# Función que dado un punto lo evalúa con forward_propagation\n", + "# y la función de activación Rampa\n", + "evaluate(x)=forward_propagation(h,\n", + " RampFunction,x)\n", + "\n", + "plot(x->evaluate([x])[1],\n", + " -K_range,K_range, \n", + " label=\"red neuronal n=$n\")\n", + "plot!(f_regression,\n", + " label=\"f ideal\",\n", + " title=\"Comparativa función ideal y red neuronal n=$n\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Funciones de activación \n", + "\n", + "### Funciones de activación no dependientes de parámetros\n" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "CosineSquasher(0.6963141756074113) = 0.8206971218447603\n", + "RampFunction(0.708936898471023) = 0.708936898471023\n", + "ReLU(0.279572614615742) = 0.279572614615742\n", + "Sigmoid(0.6463954880294331) = 0.6561977350585728\n", + "HardTanh(0.27377994734523636) = 0.27377994734523636\n" + ] + } + ], + "source": [ + "funciones_activacion = [\n", + " CosineSquasher,\n", + " RampFunction,\n", + " ReLU, \n", + " Sigmoid, \n", + " HardTanh\n", + "]\n", + "\n", + "for σ in funciones_activacion\n", + " x = rand()\n", + " println(\"$(σ)($x) = $(σ(x))\")\n", + "end" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Funciones de activación dependientes de parámetros\n", + "\n", + "Existen funciones de activación que depende de parámetros, podemos definirlas eficientemente a partir de macros: \n" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "f(4.027026570178562) = 4.027026570178562\n", + "f(4.380146279765949) = 4.380146279765949\n", + "f(-2.5822478995523013) = -0.025822478995523014\n" + ] + } + ], + "source": [ + "# Concretamos los parámetros de los que dependen\n", + "# de macros\n", + "umbral = @ThresholdFunction(x->x,0) \n", + "indicadora = @IndicatorFunction(0)\n", + "lRelu = @LReLU(0.01)\n", + "\n", + "# Evaluamos en puntos concretos\n", + "dependientes_parametro = [umbral, indicadora, lRelu]\n", + "for σ in dependientes_parametro\n", + " x = (rand()-0.5)*10\n", + " println(\"$(σ)($x) = $(σ(x))\")\n", + "end" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Backpropagation \n", + "Ejemplo de uso de Backpropagation " + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "En error en la iteración 0 es: 1.334625428920788\n", + "El error en la iteración 1 es: 0.14080024193105276\n", + "El error en la iteración 2 es: 0.12226659859194122\n", + "El error en la iteración 3 es: 0.12146473358191419\n" + ] + } + ], + "source": [ + "n = 3 # número de neuronas \n", + " η = 0.005 # queremos que reduzca sin pasarse, de ahí que sea \"\"pequeño\"\" el learning rate\n", + " tol = 0.5 # rango de error que permitimos ya que puede existir casos en los que el η sea demasiado grande\n", + " data_set_size = n\n", + " cosin(x,y)=cos(x)+sin(y) # funcion ideal\n", + " h = RandomWeightsNN(2,n, 1) # 2 dimensión entrada 1 dimensión de salida \n", + " X_train = (rand(Float64, (data_set_size, 2)))*3\n", + " Y_train = map(v->cosin(v...),eachrow(X_train))\n", + " disminuye_error = 0.0\n", + " error = error_in_data_set(\n", + " X_train,\n", + " Y_train,\n", + " x->forward_propagation(h,RampFunction,x)\n", + " )\n", + " println(\"En error en la iteración 0 es: $error\")\n", + " for i in 1:n \n", + " backpropagation!(h, X_train, Y_train, RampFunction, derivativeRampFunction, n)\n", + "\n", + " error = error_in_data_set(\n", + " X_train,\n", + " Y_train,\n", + " x->forward_propagation(h,RampFunction,x)\n", + " )\n", + " println(\"El error en la iteración $i es: $error\")\n", + " end" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Julia 1.7.1", + "language": "julia", + "name": "julia-1.7" + }, + "language_info": { + "file_extension": ".jl", + "mimetype": "application/julia", + "name": "julia", + "version": "1.7.1" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/Memoria/capitulos/Introduccion.tex b/Memoria/capitulos/Introduccion.tex index d472ea1..ce7dd32 100644 --- a/Memoria/capitulos/Introduccion.tex +++ b/Memoria/capitulos/Introduccion.tex @@ -3,26 +3,58 @@ %% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -\part{Teoría subyacente} - -No es usual en un manual que trate sobre redes neuronales encontrarse en su interior con un -capítulo sobre teoría de la aproximación, pero tampoco es nuestra intención -hacer de este documento una recopilación de todo lo usual, sino todo lo contrario. - -Existe en la actualidad un desequilibrio entre resultados empíricos y teóricos de redes neuronales llegando incluso a contradicción (como se comenta en la introducción del capítulo \ref{chapter:Introduction-neuronal-networks}), será por tanto -nuestro primer objetivo conseguir una revisión y purga de cualquier artificio existente sobre redes neuronales carente de fundamento matemático. - -El fin de esto no es más que construir una teoría sólida que de cabida a optimizaciones de -fundamento teórico. Para ello nuestro \textit{modus operandi} será el siguiente: -Se describirá el conjunto y características de problemas que pretendemos abarcar en el capítulo \ref{chapter:Introduction-neuronal-networks}. -Se comentará las limitaciones e inconvenientes que presenta un enfoque clásico -basado en teoría de la aproximación en el capítulo \ref{chapter:teoria-aproximar}. -A continuación en el capítulo \ref{chapter4:redes-neuronales-aproximador-universal}, -presentarán las redes neuronales como un modelo eficiente. - -Al final del mismo capítulo se introduce la definición que hemos determinado por conveniente de red neuronal y que es producto de los capítulos -\ref{chapter:teoria-aproximar} y \ref{chapter4:redes-neuronales-aproximador-universal}. - -Tras todo el fundamento teórico en \ref{chapter:construir-redes-neuronales} se explicitará el diseño de la red neuronal modelizada así como los algoritmo de evaluación y aprendizaje. - -En los capítulos \ref{funciones-activacion-democraticas-mas-demoscraticas} y \ref{section:inicializar_pesos} se explican además otros resultados para optimizar el coste computacional. \ No newline at end of file +\chapter*{Introducción} + +\section*{Origen} +En nuestros días, el aprendizaje automático es un campo en +continua expansión. Desde procesamiento de imágenes hasta predicciones de +acciones en bolsa, las redes neuronales se van implantando en todos los +campos del conocimiento. \\ + + +A pesar del pragmatismo actual que prevalece en las redes +neuronales, el concepto de neurona artificial nace en el primer tercio del siglo XX con el artículo \textit{ Logical calculus of the ideas immanent in nervous activity} \cite{primer-articulo}, como +intento de modelar el pensamiento humano en forma de proceso lógico. Esta +primera etapa de investigación culmina con la invención del perceptrón en +1958 por Frank Rosenblatt con el artículo \textit{The perceptron: a probabilistic model for information storage and organization in the brain} \cite{rosenblatt1958perceptron}. Es importante notar que todas estas investigaciones +tenían un objetivo más allá de la resolución de problemas complejos utilizando +máquinas u ordenadores: comprender el proceso de aprendizaje y cognición del +ser humano.\\ + +Estos primeros modelos tenían sus carencias y reservas por parte +de la comunidad científica y cuando en 1969 la publicación de la demostración de que +los modelos eran incapaces de resolver problemas lógicos simples (véase \cite{minsky69perceptrons}) hizo que el +campo casi muriera. No fue hasta 1986 con el descubrimiento de un modelo en más complejo, +que se superarían estas limitaciones y que darían lugar a las actuales redes neuronales +\cite{10.5555/104279.104293}.\\ + +El modelos de 1986, no obstante, usaba un método que sus autores eran incapaces de +relacionar con el mundo de la cognición, suponiendo con esto el +inicio de la separación de las redes neuronales con el campo de la psicología +y la neurociencia y el inicio del enfoque actual de resolución de problemas. +Estos nuevos descubrimientos, sumados a una mejora en las capacidades +computacionales de los ordenadores y que en 1989 se demostrara formalmente su eficacia \cite{HORNIK1989359} provocaron un auge en el interés acerca de +las redes, que perdura hasta hoy.\\ + + +\section*{Descripción del problema, motivación y objetivos} +Si bien las redes neuronales abandonaron ya la psicología +y a pesar de su uso extensivo en la actualidad, es un área incipiente de la +matemática donde los grandes teoremas aún están por descubrir. +Ante tal desequilibrio entre resultados empíricos y teóricos, + será +nuestro primer objetivo conseguir una revisión y purga de cualquier artificio + existente sobre redes neuronales carente de fundamento matemático. +Siendo el fin de esto construir una teoría sólida que de cabida a +optimizaciones de fundamento teórico. + +\section*{Técnicas, áreas matemáticas y fuentes utilizadas} + +El artículo principal que membrana el proyecto es el artículo +\textit{Multilayer Feedforward Networks are Universal Approximators} \cite{HORNIK1989359} + escrito por Kurt Hornik, Maxwell Stinchcombe y Halber White. Como pilar a la + sección de teoría de aproximación se ha utilizad el manual \textit{The Elements of Real Analysis} \cite{the-elements-of-real-analysis} y finalmente los manuales de referencia seguidos sobre el aprendizaje automático han sido: + \textit{Learning From Data} \cite{MostafaLearningFromData} y \textit{Pattern Recognition and Machine Learning} \cite{BishopPaterRecognition}. + + Sobre las técnicas y áreas empleadas pueden consultarse con detalle en la secciones \ref{ch01:Herramientas} y \ref{ch01:asignaturas}. + diff --git a/Memoria/img/0-metodologia/chart.png b/Memoria/img/0-metodologia/chart.png new file mode 100644 index 0000000..4d7e241 Binary files /dev/null and b/Memoria/img/0-metodologia/chart.png differ diff --git a/Memoria/img/7-algoritmo-inicializar-pesos/experimento/grafico-bigotes-error_entrenamiento.png b/Memoria/img/7-algoritmo-inicializar-pesos/experimento/grafico-bigotes-error_entrenamiento.png new file mode 100644 index 0000000..e7bc232 Binary files /dev/null and b/Memoria/img/7-algoritmo-inicializar-pesos/experimento/grafico-bigotes-error_entrenamiento.png differ diff --git a/Memoria/img/7-algoritmo-inicializar-pesos/experimento/grafico-bigotes-error_test.png b/Memoria/img/7-algoritmo-inicializar-pesos/experimento/grafico-bigotes-error_test.png new file mode 100644 index 0000000..1862f1a Binary files /dev/null and b/Memoria/img/7-algoritmo-inicializar-pesos/experimento/grafico-bigotes-error_test.png differ diff --git a/Memoria/img/7-algoritmo-inicializar-pesos/experimento/grafico-bigotes-tiempo.png b/Memoria/img/7-algoritmo-inicializar-pesos/experimento/grafico-bigotes-tiempo.png new file mode 100644 index 0000000..18655f7 Binary files /dev/null and b/Memoria/img/7-algoritmo-inicializar-pesos/experimento/grafico-bigotes-tiempo.png differ diff --git a/Memoria/img/7-algoritmo-inicializar-pesos/f_ideal_y_rn_con_100_neuronas.png b/Memoria/img/7-algoritmo-inicializar-pesos/f_ideal_y_rn_con_100_neuronas.png new file mode 100644 index 0000000..1db4ddd Binary files /dev/null and b/Memoria/img/7-algoritmo-inicializar-pesos/f_ideal_y_rn_con_100_neuronas.png differ diff --git a/Memoria/img/7-algoritmo-inicializar-pesos/f_ideal_y_rn_con_10_neuronas.png b/Memoria/img/7-algoritmo-inicializar-pesos/f_ideal_y_rn_con_10_neuronas.png new file mode 100644 index 0000000..3178683 Binary files /dev/null and b/Memoria/img/7-algoritmo-inicializar-pesos/f_ideal_y_rn_con_10_neuronas.png differ diff --git a/Memoria/img/7-algoritmo-inicializar-pesos/f_ideal_y_rn_con_20_neuronas.png b/Memoria/img/7-algoritmo-inicializar-pesos/f_ideal_y_rn_con_20_neuronas.png new file mode 100644 index 0000000..a25eace Binary files /dev/null and b/Memoria/img/7-algoritmo-inicializar-pesos/f_ideal_y_rn_con_20_neuronas.png differ diff --git a/Memoria/img/7-algoritmo-inicializar-pesos/f_ideal_y_rn_con_2_neuronas.png b/Memoria/img/7-algoritmo-inicializar-pesos/f_ideal_y_rn_con_2_neuronas.png new file mode 100644 index 0000000..ec8fdae Binary files /dev/null and b/Memoria/img/7-algoritmo-inicializar-pesos/f_ideal_y_rn_con_2_neuronas.png differ diff --git a/Memoria/img/7-algoritmo-inicializar-pesos/f_ideal_y_rn_con_3_neuronas.png b/Memoria/img/7-algoritmo-inicializar-pesos/f_ideal_y_rn_con_3_neuronas.png new file mode 100644 index 0000000..e9adaab Binary files /dev/null and b/Memoria/img/7-algoritmo-inicializar-pesos/f_ideal_y_rn_con_3_neuronas.png differ diff --git a/Memoria/img/7-algoritmo-inicializar-pesos/f_ideal_y_rn_con_51_neuronas.png b/Memoria/img/7-algoritmo-inicializar-pesos/f_ideal_y_rn_con_51_neuronas.png new file mode 100644 index 0000000..617b0db Binary files /dev/null and b/Memoria/img/7-algoritmo-inicializar-pesos/f_ideal_y_rn_con_51_neuronas.png differ diff --git a/Memoria/img/7-algoritmo-inicializar-pesos/f_ideal_y_rn_con_5_neuronas.png b/Memoria/img/7-algoritmo-inicializar-pesos/f_ideal_y_rn_con_5_neuronas.png new file mode 100644 index 0000000..4a76c98 Binary files /dev/null and b/Memoria/img/7-algoritmo-inicializar-pesos/f_ideal_y_rn_con_5_neuronas.png differ diff --git a/Memoria/img/7-algoritmo-inicializar-pesos/f_ideal_y_rn_con_72_neuronas.png b/Memoria/img/7-algoritmo-inicializar-pesos/f_ideal_y_rn_con_72_neuronas.png new file mode 100644 index 0000000..e48aacd Binary files /dev/null and b/Memoria/img/7-algoritmo-inicializar-pesos/f_ideal_y_rn_con_72_neuronas.png differ diff --git a/Memoria/img/7-algoritmo-inicializar-pesos/f_ideal_y_rn_con_7_neuronas.png b/Memoria/img/7-algoritmo-inicializar-pesos/f_ideal_y_rn_con_7_neuronas.png new file mode 100644 index 0000000..0d8a738 Binary files /dev/null and b/Memoria/img/7-algoritmo-inicializar-pesos/f_ideal_y_rn_con_7_neuronas.png differ diff --git a/Memoria/img/7-algoritmo-inicializar-pesos/f_ideal_y_rn_con_90_neuronas.png b/Memoria/img/7-algoritmo-inicializar-pesos/f_ideal_y_rn_con_90_neuronas.png new file mode 100644 index 0000000..95f2af0 Binary files /dev/null and b/Memoria/img/7-algoritmo-inicializar-pesos/f_ideal_y_rn_con_90_neuronas.png differ diff --git a/Memoria/library.bib b/Memoria/library.bib index 393e101..62948e9 100644 --- a/Memoria/library.bib +++ b/Memoria/library.bib @@ -1,3 +1,59 @@ +%----- Historia redes neuronales + +@article{primer-articulo, + abstract = {Because of the ``all-or-none''character of nervous activity, neural events and the relations among them can be treated by means of propositional logic. It is found that the behavior of every net can be described in these terms, with the addition of more complicated logical means for nets containing circles; and that for any logical expression satisfying certain conditions, one can find a net behaving in the fashion it describes. It is shown that many particular choices among possible neurophysiological assumptions are equivalent, in the sense that for every net behaving under one assumption, there exists another net which behaves under the other and gives the same results, although perhaps not in the same time. Various applications of the calculus are discussed.}, + author = {McCulloch, Warren S. and Pitts, Walter}, + da = {1943/12/01}, + date-added = {2022-06-19 08:07:13 +0200}, + date-modified = {2022-06-19 08:07:13 +0200}, + doi = {10.1007/BF02478259}, + id = {McCulloch1943}, + isbn = {1522-9602}, + journal = {The bulletin of mathematical biophysics}, + number = {4}, + pages = {115--133}, + title = {A logical calculus of the ideas immanent in nervous activity}, + ty = {JOUR}, + url = {https://doi.org/10.1007/BF02478259}, + volume = {5}, + year = {1943}, + Bdsk-Url-1 = {https://doi.org/10.1007/BF02478259}} + %presencación perceptrón +@article{rosenblatt1958perceptron, + title={The perceptron: a probabilistic model for information storage and organization in the brain.}, + author={Rosenblatt, Frank}, + journal={Psychological review}, + volume={65}, + number={6}, + pages={386}, + year={1958}, + publisher={American Psychological Association} +} +% libro muestra carencias perceptrón +@book{minsky69perceptrons, + added-at = {2008-05-16T13:57:01.000+0200}, + address = {Cambridge, MA, USA}, + author = {Minsky, Marvin and Papert, Seymour}, + biburl = {https://www.bibsonomy.org/bibtex/206a5a6751b3e61408455fca2ed8d87fc/sb3000}, + description = {: mf : blob : » bibtex}, + interhash = {d80d4948a422623047f1b800272c0389}, + intrahash = {06a5a6751b3e61408455fca2ed8d87fc}, + keywords = {linear-classification neural-networks seminal}, + publisher = {MIT Press}, + timestamp = {2008-05-16T13:57:02.000+0200}, + title = {Perceptrons: An Introduction to Computational Geometry}, + year = 1969 +} +@inbook{10.5555/104279.104293, +author = {Rumelhart, D. E. and Hinton, G. E. and Williams, R. J.}, +title = {Learning Internal Representations by Error Propagation}, +year = {1986}, + isbn = {026268053X}, + publisher = {MIT Press}, + address = {Cambridge, MA, USA}, + booktitle = {Parallel Distributed Processing: Explorations in the Microstructure of Cognition, Vol. 1: Foundations}, + pages = {318–362}, + numpages = {45} } `%----- Teoría de la aproximación ----- % Texto principal del que se ha sacado @book{the-elements-of-real-analysis, @@ -6,7 +62,43 @@ @book{the-elements-of-real-analysis year={1947}, publisher={John. Wiley \& Sons} } - +% --- algoritmos de ordenación +@article{Quicksort, + author = {Hoare, C. A. R.}, + title = "{Quicksort}", + journal = {The Computer Journal}, + volume = {5}, + number = {1}, + pages = {10-16}, + year = {1962}, + month = {01}, + abstract = "{A description is given of a new method of sorting in the random-access store of a computer. The method compares very favourably with other known methods in speed, in economy of storage, and in ease of programming. Certain refinements of the method, which may be useful in the optimization of inner loops, are described in the second part of the paper.}", + issn = {0010-4620}, + doi = {10.1093/comjnl/5.1.10}, + url = {https://doi.org/10.1093/comjnl/5.1.10}, + eprint = {https://academic.oup.com/comjnl/article-pdf/5/1/10/1111445/050010.pdf}, +} + +@book{merge-sort, + title={"Section 5.2.4: Sorting by Merging". Sorting and Searching. The Art of Computer Programming}, + author={Knuth, Donald}, + year={1998}, + publisher={Addison-Wesley}, + isbn={ISBN 0-201-89685-0}, + pages={158–168}, + edition={Vol. 3 (2nd ed.).} +} +% comparación de quicksort y merge sort +@article{quicksort-vs-merge-sort, +author = {Ali, Irfan and Lashari, Haque Nawaz and Keerio, Imran and Maitlo, Abdullah and Chhajro, M. and Malook, Muhammad}, +year = {2018}, +month = {01}, +pages = {192-195}, +title = {Performance Comparison between Merge and Quick Sort Algorithms in Data Structure}, +volume = {9}, +journal = {International Journal of Advanced Computer Science and Applications}, +doi = {10.14569/IJACSA.2018.091127} +} % ---------- Introducción a las redes neuronales ------- % Importancia y estado del arte del aprendizaje automático en la actualidad @@ -700,7 +792,23 @@ @article{Liskov-principle numpages = {31}, keywords = {Larch, subtyping, formal specifications} } - +%%%% Julia +@article{virtudes-de-julia, + author = {Jeff Bezanson and + Stefan Karpinski and + Viral B. Shah and + Alan Edelman}, + title = {Julia: {A} Fast Dynamic Language for Technical Computing}, + journal = {CoRR}, + volume = {abs/1209.5145}, + year = {2012}, + url = {http://arxiv.org/abs/1209.5145}, + eprinttype = {arXiv}, + eprint = {1209.5145}, + timestamp = {Mon, 13 Aug 2018 16:49:00 +0200}, + biburl = {https://dblp.org/rec/journals/corr/abs-1209-5145.bib}, + bibsource = {dblp computer science bibliography, https://dblp.org} +} %%%%%%%%%%%%%%%%%% metodología %%%%%%%%%%%%%% %% Descripción del desarrollo ágil en la ciencia @article{DBLP:journals/corr/abs-2104-12545, diff --git a/Memoria/paquetes/comandos-entornos.tex b/Memoria/paquetes/comandos-entornos.tex index b917d5e..a6fcadc 100644 --- a/Memoria/paquetes/comandos-entornos.tex +++ b/Memoria/paquetes/comandos-entornos.tex @@ -8,10 +8,30 @@ \newcommand{\Q}{\mathbb{Q}} % Racionales \newcommand{\C}{\mathbb{C}} % Complejos +% Otros espacios +\newcommand{\D}{\mathcal{D}} % Conjunto de datos de entrenamiento + %%%%%%%%% Mis comandos %%%%%%%%% % Para escribir código y pseudo código \usepackage{minted} - +\usemintedstyle{friendly} +\definecolor{sutilGreen}{rgb}{0.850, 0.996, 0.807} % para el fondo del código +%\definecolor{sutilBackground}{rgb}{0.933, 0.905, 0.866} +\definecolor{sutilBackground}{rgb}{0.99, 0.95, 0.9} + +\newminted{code}{ + frame=single, + framesep=10pt, + baselinestretch=1.2, + bgcolor=sutilBackground, + %linenos +} +\newminted{example}{frame=single, + framesep=10pt, + baselinestretch=1.2, + %bgcolor=sutilBackground, + %linenos +} \usepackage{algorithmic} % Para la definición de redes neuronales de una sola capa \newcommand{\Hu}{\mathcal{H}(X,Y)} % Espacio de las redes neuronales @@ -85,10 +105,11 @@ % Para las notas del margen %Nota los colores seleccionados han sido creados con una paleta inclusiva % https://palett.es/6a94a8-013e3b-7eb645-31d331-26f27d -\definecolor{darkRed}{rgb}{0.2,1,0.7}%{ 0.149, 0.99, 0.49}%{1,0.1,0.1} +\definecolor{darkRed}{HTML}{9E6D0B}%{rgb}{0.2,1,0.7}%{ 0.149, 0.99, 0.49}%{1,0.1,0.1} + \definecolor{dark_green}{rgb}{0, 0.24, 0.23} %{0.2, 0.7, 0.2} -\definecolor{sutilGreen}{rgb}{0.850, 0.996, 0.807} % para el fondo del código -\definecolor{blue}{rgb}{0.61, 0.98, 0.759} % sobreeescribimos el azul +\definecolor{blue}{HTML}{2700FD} +%\definecolor{blue}{rgb}{0.61, 0.98, 0.759} % sobreeescribimos el azul \newcommand{\smallMarginSize}{1.8cm} \newcommand{\bigMarginSize}{3cm} \newcommand{\maginLetterSize}{\scriptsize}%{\footnotesize} %{\scriptsize}% @@ -148,5 +169,6 @@ \usepackage{csquotes} \let\oldenquote\enquote \renewcommand{\enquote}[1]{{\itshape\oldenquote{#1}}} +\usepackage{epigraph} %este es para las que salen a la derecha diff --git a/Memoria/preliminares/agradecimientos.tex b/Memoria/preliminares/agradecimientos.tex index e94a5b4..b6b2c02 100644 --- a/Memoria/preliminares/agradecimientos.tex +++ b/Memoria/preliminares/agradecimientos.tex @@ -7,5 +7,16 @@ \chapter*{Agradecimientos} -Agradezco a +No todos los días termina una de escribir su trabajo fin de grado y en esta euforia casi descomedida me parece oportuno volver la vista atrás y agradecer a todos aquellos que me han acompañado durante el camino. + +Comenzaré por +las personas que más quiero del mundo, gracias papá y mamá por vuestra infinita paciencia y vuestro amor inconmensurable, sin vosotros no hubiera sido posible (de hecho nada lo sería). +Gracias también a mis dos tutores, JJ y Javier por toda la atención y consideraciones que me han dedicado, que sepáis que sois dos \textit{soletes} y el cariño que me inspiráis no es poco. + +Por supuesto también a todas las personas que me han insuflado ganas de aprender e ir a clase, ya sean esos profesores inspiradores y cercanos o +todas las personas que me han sacado una sonrisa alguna vez. + +Quiero además añadir una mención especial a mis \textit{algebristas recalcitrantes favoritos} Daniel y Ricardo por haberme aguantado durante tantísimas horas; y como no podía ser de otra forma: a mi fiel compañero de aventuras, a mi Sancho para su Quijote (o su Sancho para mi Quijote, según se tercie), a mi archiamigo Jose, gracias por todos los momentos que hemos compartido y nos quedan por vivir. + + \endinput diff --git a/Memoria/preliminares/declaracion-originalidad.tex b/Memoria/preliminares/declaracion-originalidad.tex index 93f6cd9..041c0f3 100644 --- a/Memoria/preliminares/declaracion-originalidad.tex +++ b/Memoria/preliminares/declaracion-originalidad.tex @@ -11,7 +11,7 @@ \textsc{Declaración de originalidad}\\\bigskip -D. \miNombre \\\medskip +Dña. \miNombre \\\medskip Declaro explícitamente que el trabajo presentado como Trabajo de Fin de Grado (TFG), correspondiente al curso académico \miCurso, es original, entendida esta, en el sentido de que no ha utilizado para la elaboración del trabajo fuentes sin citarlas debidamente. \medskip diff --git a/Memoria/preliminares/pensamientos_iniciales.tex b/Memoria/preliminares/pensamientos_iniciales.tex deleted file mode 100644 index 1e59399..0000000 --- a/Memoria/preliminares/pensamientos_iniciales.tex +++ /dev/null @@ -1,65 +0,0 @@ -% \manualmark -% \markboth{\textsc{Introducción}}{\textsc{Introducción}} - -\chapter{Introducción}\label{ch:introduccion} - -Estimado lector, podría comenzar una amigable y espectacular introducción mostrando algunos de los incontables ejemplos -de problemas para los que -el aprendizaje automático o las redes neuronales aportan soluciones exitosas e incluso sorprendentes. (Esto se tratará en -[insertar referencia, habrá que hacerlo para que no me digan nada) -Pero en este trabajo me gustaría principalmente aportar una visión a más bajo nivel, partiendo de los pilares matemáticos -que sostienen -las redes neuroles y explicar qué son exactamente, por qué funcionan y cómo se pueden optimizar. - -\section{Filosofía} -Las matemáticas a mi parecer se rigen sobre un equilibrismo férreo, entrañable y absorvente que son los axiomas; -con solo aceptar la verosimilitud de un axioma una vez, uno ya puede dejarse llevar por los derroteros -lógicos y confiar en que todo lo probado es completamente cierto (centro de dicho fonambulismo). - -Es por ello que me gustaría empezar con las siguiente pregunta que pulula en mi interior más crítico: - -¿Todo fenómeno observable guarda una ley que lo explique? - ---- -Relacionar la realidad con lógica -> Gödel nos diría que no. -¿Es la realidad lógica? -[TODO buscar más información] ---- -Si -Desconozco la respuesta, pero de tal manera imitando en cierta manera al razonamiento sobre la existencia -de Dios de Pascal: -Si la respuesta fuera negativa: ¿Debería ser esto un motivo para frenar nuestra busca? -¿Se podría separar la realidad en explicable o no? ¿Cómo se podría crear un método? -Si fuera positiva bastaría seguir cómo vamos. - -Mientras alguien encuentra respuestra, ya que la curiosidad humana es indómita - entretengámonos pues pensando que sí y escarbemos en la inmensidad del conocimiento -aún por descubrir. - -\section{Motivación del aprendizaje automático} - - -Al igual que un niño pequeño es capaz de distinguir entre un árbol y su progenitor sin ser -capaz de dar una descripción matemática de su deducción o una mera explicación. - -De entre todo el desconocimiento existente podría presentarse la siguiente situación: -Qué ocurriría si no hubiéramos encontrado una manera analítica viable de resolver un problema, -pero sin embargo -contáramos con "los datos, ejemplos o muestras suficientes" como para dar una solución empírica. - - -En resonancia con las ideas platónicas, los problemas de aprendizaje tratan de encontrar soluciones empíricas donde todavía -no se conocen métodos analíticos. - -Estos no darán una explicación de porqué funcionan, pero como ya se ha comprobado [] pueden llegar a -soluciones exitosas. - -Y si bien, así expuesto puede en un principio parecer de poco interés para el lector más matemático, -la teoría que subyace bajo éste instrumento de la Ingeniería que se encuentra en plena esfervesciencia y creación -es realmente bello y apasionanto, espero a lo largo de este libro poder transmitirlo. - -Pregunta para mí ¿podría esta estructura guardar alguna relación o expliación con el ser humano? - -\subsection{Componentes del aprendizaje} - -\endinput \ No newline at end of file diff --git a/Memoria/preliminares/resumen.tex b/Memoria/preliminares/resumen.tex index 0a61acd..ffcbf48 100644 --- a/Memoria/preliminares/resumen.tex +++ b/Memoria/preliminares/resumen.tex @@ -11,26 +11,39 @@ \chapter*{Resumen}\label{ch:resumen} %\addcontentsline{toc}{chapter}{Resumen} -Objetivo inofrmática: -Elegir un framework común para trabajar con redes neuronales así como una serie de problemas de complejidad media, tales como spambase. Establecer una línea base examinando los resultados obtenidos con la configuración base a la hora de entrenar este tipo de redes neuronales y el resultado obtenido. A partir de esa línea base, testear las diferentes restricciones, cambios en representación y suposiciones deducidos en la parte matemática para ver qué influencia tienen en la velocidad, en el resultado, o en ambos. +Existe en la actualidad un desequilibrio entre resultados empíricos +y teóricos de redes neuronales llegando incluso a contradicción + (como se comenta en la introducción del capítulo + \ref{chapter:Introduction-neuronal-networks}), será por tanto +nuestro primer objetivo construir una teoría sólida +que de cabida a + optimizaciones de fundamento teórico; +una revisión y + purga de cualquier artificio existente sobre + redes neuronales carente de fundamento matemático. + +Como resultado de ello se ha propuesto e implementado +un nuevo modelo de red neuronal así como sus +métodos de aprendizaje y evaluación. +Además se ha dado un criterio de selección de +funciones de activación y un algoritmo de +inicialización de pesos que mejora los ya existentes. +Todos los resultados han conducido a la creación de +la biblioteca \textit{OptimizedNeuralNetwork.jl}, +que contiene la implementación de nuestros modelos y + métodos optimizados. + + +La estructura de la memoria es la siguiente: -Objetivo matemáticas: -El objetivo de esta parte es doble, en primer lugar, se propone analizar con detalle las demostraciones de algunos resultados de aproximación universal de redes neuronales para funciones continuas. En segundo lugar se propone realizar un estudio de la posible optimización de redes neuronales concretas en base a los resultados obtenidos empíricamente en la parte informática. Se tratará de modelizar matemáticamente dichos resultados y de obtener mejoras en la convergencia de las aproximaciones imponiendo, si es necesario, hipótesis más restrictivas en algunos de los elementos de las redes neuronales que se correspondan con su uso en la práctica. -Libros: -[1] Abu-Mostafa, Y.S. et al.: Learning From Data. AMLBook, 2012. [2] G. Cybenko, Approximations by superpositions of a sigmoidal function, Math. Contro Signal Systems 2 (1989), 303-314. [3] J. Conway, A Course in Functional Analysis, -2nd Edition, Springer-Verlag, 1990. [4] A. Géron, Hands-on machine learning with Scikit-Learn, Keras and TensorFlow: concepts, tools, and techniques to build intelligent systems (2nd ed.). O’Reilly, 2019. [5] K. Hornik, M Stinchcombe and H. White, Multilayer feedforward networks are universal approximators, Neural Networks 2 (1989), 359-366. [6] W. Rudin, Real and complex analysis. McGraw-Hill Book Co., New York-Toronto, Ont.-London 1966. \paragraph{PALABRAS CLAVE:} \begin{itemize*}[label=,itemsep=1em,itemjoin=\hspace{1em}] \item redes neuronales - \item LSTM - \item series temporales - \item selección de modelos - \item validación - \item selección de hiperparámetros - \item detección de anomalías - \item detector - \item perturbación + \item optimización + \item funciones de activación + \item inicialización de pesos + \item Biblioteca de aprendizaje automático \end{itemize*} \endinput diff --git a/Memoria/preliminares/summary.tex b/Memoria/preliminares/summary.tex index e08fc47..907be4b 100644 --- a/Memoria/preliminares/summary.tex +++ b/Memoria/preliminares/summary.tex @@ -8,18 +8,88 @@ \selectlanguage{english} \chapter*{Summary}\label{ch:summary} %\addcontentsline{toc}{chapter}{Summar} - + +Nowadays experimental research in Neural Networks is more advanced than theoretical +results. +From this we aim to establish a solid mathematical theory so as to optimize the current neural network models. + + +As a result of our study, we have proposed a novel neural +network model, and adapted and optimized +evaluation and learning methods to it. +Moreover, we have discovered some theorems that prove the +equivalence among some activation functions, and hence propose a new + algorithm to initialize weights of neural networks. Thanks to the +first result, we obtain a criteria to choose the most +suitable activation function to maintain accuracy and reduce computational costs. + Thanks to the second one, we might accelerate +learning convergence methods. + +In addition, the models, methods and algorithms have been +implemented in Julia, resulting in the \textit{OptimizedNeuralNetwork.jl} library. + +All the theory development, designs, decisions and results are +written in this memory, which have the following structure: +\begin{itemize} + \item \textbf{Chapter \ref{ch00:methodology}: Description of the methodology followed.} We have organized our project according to an agile philosophy based on personas methodology, user stories, milestones and tests. This method has conducted and linked mathematical and technical results and implementations, giving them coherence and validation methods. + + \item \textbf{Chapter \ref{chapter:Introduction-neuronal-networks}: Description of the learning problem.} We defined the characteristic and type of machine learning problems. We will focus on supervised learning ones. + + \item \textbf{Chapter \ref{ch03:teoria-aproximar}: Approximation theory.} In order to establish a solid theory, we will start our work trying to solve machine learning problems by traditional approximation methods. The main result we prove is the Stone Weierstrass's theorem. As a conclusion of this chapter we will achieve knowledge of the virtues and faults of traditional methods and understanding the necessity of new methods and structures such as neural networks. + + \item \textbf{Chapter \ref{chapter4:redes-neuronales-aproximador-universal}: Neural networks are universal approximators.} In this chapter we introduce our neural + network model and compare it with the conventional ones. In order to show it is well + defined, we will prove the universal convergence of our model to any measurable + function. In addition, we will give some results about how our model solves + classification and regression problems as its number of neurons rises. Finally, we + will argue if all of those math results can actually solve real life problems. The + idea behind the debate is the computability representation of real numbers. + + \item \textbf{Chapter \ref{chapter:construir-redes-neuronales}: The design and implementation of neural networks.} We will carefully describe the design and + implementation of our model of neural network. Thanks to that we will obtain some + mathematical results about bias and classification function. This will be useful to + compare our model with the conventional ones and justify +our selection. Moreover, we will explain, justify and design learning and evaluation +methods to our model. These methods are optimized versions of Forward Propagation and +Backpropagation. + +\item \textbf{Chapter \ref{funciones-activacion-democraticas-mas-demoscraticas}: Democratization of activation functions.} +We will explain in this chapter if there are better activation functions. In this +direction we will prove two original results which show that there are families of +activation functions that with the same conditions will solve problems with the same +accuracy. As a result, if we compare the computational cost of the members of those +families and choose the faster one, we will obtain a method to optimize evaluation and +learning of neural networks without loss of accuracy. We have used the Wilcoxon +signed-rank test as a statistical hypothesis test so as to give a rigorous study of +our criteria. + +\item \textbf{Chapter \ref{section:inicializar_pesos}: Weight initializing algorithm.} +Since the Backpropagation and other iterative methods are sensitive to the initial +value of a neural network, we will show an original method to initialize its weights +from training data. This process not only will produce a better initial step but also +has lower computational cost than Backpropagation. To test the potential of this +method we will use the Wilcoxon signed-rank test again and also, from the experiment's +requirements we will design and create our OptimizedNeuralNetwork.jl library. In this chapter we +will also explain every decision done during the design and implementation of the +library in order to be as efficient as possible. + +\item \textbf{Chapter \ref{ch08:genetic-selection}: Use of genetic algorithm in the selection of activation function.} +In this chapter we will explain a future work. Given a fixed number of neurons, the +selection of its activation function may be crucial to reduce the train and test +error. However, adding more free params to the search space increases its complexity +and at same time the cost of finding a solution. Nevertheless, the result obtained at +chapter \ref{funciones-activacion-democraticas-mas-demoscraticas} and a property of our neural model will reduce the space complexity. + +\item \textbf{Chapter \ref{ch09:conclusion}: Conclusions.} +\end{itemize} + \paragraph{KEYWORDS:} \begin{itemize*}[label=,itemsep=1em,itemjoin=\hspace{1em}] \item neural networks - \item LSTM - \item time series - \item model selection - \item validation - \item hyper-parameters selection - \item anomaly detection - \item detector - \item perturbation + \item optimization + \item activation functions + \item weights initializing + \item machine learning library \end{itemize*} % Al finalizar el resumen en inglés, volvemos a seleccionar el idioma español para el documento diff --git a/Memoria/preliminares/titulo.tex b/Memoria/preliminares/titulo.tex index a933175..5c6429c 100644 --- a/Memoria/preliminares/titulo.tex +++ b/Memoria/preliminares/titulo.tex @@ -31,8 +31,9 @@ \noindent\miNombre \textit{\miTitulo}. -Trabajo de fin de Grado. Curso académico \miCurso. - +\noindent Trabajo de fin de Grado. Curso académico \miCurso. +\\ +\\ \begin{minipage}[t]{0.25\textwidth} \flushleft \textbf{Responsables de tutorización} diff --git a/Memoria/tfg.tex b/Memoria/tfg.tex index 288e872..47cef37 100644 --- a/Memoria/tfg.tex +++ b/Memoria/tfg.tex @@ -11,8 +11,8 @@ % Autor de la memoria: Blanca Cano Camarero -\documentclass{scrbook} - +%\documentclass{scrbook} +\documentclass[twoside]{scrbook} \KOMAoptions{% fontsize=10pt, % Tamaño de fuente paper=a4, % Tamaño del papel @@ -198,9 +198,10 @@ \include{preliminares/portada} \include{preliminares/titulo} -%\include{preliminares/declaracion-originalidad} -%\include{preliminares/resumen} -%\include{preliminares/summary} +\include{preliminares/declaracion-originalidad} +\include{preliminares/agradecimientos} +\include{preliminares/resumen} +\include{preliminares/summary} %\include{preliminares/dedicatoria} % Opcional \include{preliminares/tablacontenidos} % Opcional @@ -218,18 +219,20 @@ %\bigskip % Deja un espacio vertical en la parte superiọ-r } - -%Metodología \include{capitulos/0-Metodologia/Comentarios_previos} +\include{capitulos/Introduccion} +%Metodología + \input{capitulos/0-Metodologia/introduccion} \input{capitulos/0-Metodologia/herramientas} +\input{capitulos/0-Metodologia/asignaturas} % Filosofía a seguir -\input{capitulos/Introduccion} +%\input{capitulos/Introduccion} % Redes neuronales Definición de la clase de redes neuronales \input{capitulos/1-Introduccion_redes_neuronales/Objetivos} -\include{capitulos/1-Introduccion_redes_neuronales/aprendizaje_introduccion} +\input{capitulos/1-Introduccion_redes_neuronales/aprendizaje_introduccion} % Teoría de la aproximación @@ -244,7 +247,7 @@ \chapter{Las redes neuronales son aproximadores universales} \label{chapter4:redes-neuronales-aproximador-universal} \input{capitulos/1-Introduccion_redes_neuronales/feedforward-network-una-capa} \input{capitulos/2-Articulo_rrnn_aproximadores_universales/introduccion} -\include{capitulos/2-Articulo_rrnn_aproximadores_universales/desgranando_el_articulo/articulo_1_primeras_definiciones} +\input{capitulos/2-Articulo_rrnn_aproximadores_universales/desgranando_el_articulo/articulo_1_primeras_definiciones} \input{capitulos/2-Articulo_rrnn_aproximadores_universales/desgranando_el_articulo/articulo_2_teorema_1_hasta_lema_2_2} \input{capitulos/2-Articulo_rrnn_aproximadores_universales/desgranando_el_articulo/articulo_3_teorema_2_2} \input{capitulos/2-Articulo_rrnn_aproximadores_universales/desgranando_el_articulo/articulo_4_colorario_2_1} @@ -259,12 +262,20 @@ \chapter{Las redes neuronales son aproximadores universales} \input{capitulos/4-Actualizacion_redes_neuronales/aprendizaje} \input{capitulos/4-Actualizacion_redes_neuronales/otras-alternativas} -% Mejoras propuestas -\input{capitulos/5-Estudio_experimental/funciones_activacion} -\input{capitulos/5-Estudio_experimental/inicializacion-pesos} +%%%%%%%%%%%%%%%%%%%%%%%%%% Hipótesis +%\part{Exploración de las hipótesis planteadas y estudio experimental de las mismas} +% Estudio de las funciones de activación +\include{capitulos/5-Estudio_experimental/1_funciones_activacion} +% Estudio del algoritmo de inicialización de pesos +\input{capitulos/5-Estudio_experimental/2_descripcion_inicializacion-pesos} +\input{capitulos/5-Estudio_experimental/3_detalles_implementacion} +\input{capitulos/5-Estudio_experimental/3_algoritmo-inicializacion-pesos} +\input{capitulos/5-Estudio_experimental/4_conclusion_intuitiva} +% Comentario sobre los algoritmos genéticos \input{capitulos/5-Estudio_experimental/combinacion_funciones_activacion} %\include{capitulos/N-Exploracion-hipotesis-planteadas/hipotesis} +\include{capitulos/9-Conclusiones} % -------------------------------------------------------------------- % APPENDIX: Opcional @@ -309,5 +320,4 @@ \chapter{Las redes neuronales son aproximadores universales} \begin{footnotesize} % Normalmente el índice se imprime en un tamaño de letra más pequeño. \printindex \end{footnotesize} -\include{preliminares/agradecimientos} \end{document} diff --git a/OptimizedNeuralNetwork.jl/src/OptimizedNeuralNetwork.jl b/OptimizedNeuralNetwork.jl/src/OptimizedNeuralNetwork.jl new file mode 100644 index 0000000..e625bd0 --- /dev/null +++ b/OptimizedNeuralNetwork.jl/src/OptimizedNeuralNetwork.jl @@ -0,0 +1,11 @@ +#################################################### +# Library OptimizedNeuronalNetwork +#################################################### +module OptimizedNeuralNetwork +include("activation_functions.jl") +include("one_layer_neuronal_network.jl") +include("forward_propagation.jl") +include("metric_estimation.jl") +include("weight-initializer-algorithm/weight-initializer-algorithm.jl") +include("backpropagation.jl") +end \ No newline at end of file diff --git a/Biblioteca-Redes-Neuronales/src/activation_functions.jl b/OptimizedNeuralNetwork.jl/src/activation_functions.jl similarity index 73% rename from Biblioteca-Redes-Neuronales/src/activation_functions.jl rename to OptimizedNeuralNetwork.jl/src/activation_functions.jl index e19f254..1140e14 100644 --- a/Biblioteca-Redes-Neuronales/src/activation_functions.jl +++ b/OptimizedNeuralNetwork.jl/src/activation_functions.jl @@ -1,5 +1,6 @@ -module ActivationFunctions - +#################################################################################### +# Constains activation functions and its derivatives +#################################################################################### export CosineSquasher export HardTanh export @IndicatorFunction @@ -8,8 +9,9 @@ export RampFunction export ReLU export Sigmoid export @ThresholdFunction - - +# derivatives +export derivativeRampFunction +export derivativeReLU """ Threshold(polynomial, t) Return a Threshold Function defined by `polynomial`and `t`. @@ -52,6 +54,13 @@ Return Ramp function a bounded ReLU function function RampFunction(x::Real) return min(1,max(0,x)) end +""" + derivateveRampFunction(x::Real) +Return the derivate of the Ramp function +""" +function derivativeRampFunction(x::Real) + return (0<=x<=1) ? 1 : 0 +end """ ReLU(x::Real) @@ -60,6 +69,13 @@ ReLU function function ReLU(x::Real) return max(0,x) end +""" + derivativeReLU(x::Real) +ReLU function +""" +function derivativeReLU(x::Real) + return (x<0) ? 0 : 1 +end """ Sigmoid(x::Real) @@ -90,5 +106,5 @@ Leaky ReLU macro LReLU(a::Real) return :( f(x::Real)=(x<0) ? $(esc(a))*x : x ) end -end #module end + diff --git a/OptimizedNeuralNetwork.jl/src/backpropagation.jl b/OptimizedNeuralNetwork.jl/src/backpropagation.jl new file mode 100644 index 0000000..a35ee28 --- /dev/null +++ b/OptimizedNeuralNetwork.jl/src/backpropagation.jl @@ -0,0 +1,105 @@ +########################################################## +# Implementación de Backpropagation +# Basado en el algoritmo 4 y 5, sección 5.3 de la memoria +########################################################## +export backpropagation! + +using Random + +VectorOrMatrix = Union{Matrix,Vector} +function descent_gradient( + neural_network::AbstractOneLayerNeuralNetwork, + X_train :: VectorOrMatrix, + Y_train :: VectorOrMatrix, + activation_function, + derivative_activation_funcion, + batch_size :: Int = 32, + ) + len, _ = size(X_train) + if len < batch_size + error("batch_size should be equal or smaller than the size of train dataset, + but $batch_size > $len") + end + + # Derivadas parciales a calcular + n_1,d = size(neural_network.W1) + s,n = size(neural_network.W2) + + partial_W1 = zeros(Float64, (n_1,d)) + partial_W2 = zeros(Float64, (s,n)) + derivative_B = zeros(Float64, (s,n)) + # Variables auxiliares para reducir número de operaciones + sensibilities_img = zeros(Float64, n) + + index = first(randperm(len),batch_size) + # Para cada muestra del entrenamiento + for train_index in index + # 1. Forward propagation almacenando resultado relevantes + 𝛅 = neural_network.W1 * push!(copy(X_train[train_index,:]),1) + sensibilities_img = map(activation_function, 𝛅) + derivative_sensibilities_img= map(derivative_activation_funcion, 𝛅) + + forward_propagation_x = neural_network.W2 * sensibilities_img + error = forward_propagation_x .- Y_train[train_index,:] + + # 2. Parcial de B (W_2) + for v in 1:n + for w in 1:s + partial_W2[w,v] = partial_W2[w,v] + error[w]*sensibilities_img[v] + end + end + + # 3. Parcial W_1 + for i in 1:n + for k in 1:s + derivative_B[k,i] = + neural_network.W2[k,i]*derivative_sensibilities_img[i] + end + end + + for v in 1:n + for k in 1:s + difference_times_derivative = error[k]*derivative_B[k,s] + partial_W2[k, v] += error[k]*derivative_sensibilities_img[v] + partial_W1[v,d] += difference_times_derivative + + for u in 1:(d-1) + partial_W1[v,u] += difference_times_derivative * X_train[train_index, u] + end + end + end + + end + return partial_W1, partial_W2 +end + +""" + function backpropagation!( + neural_network::AbstractOneLayerNeuralNetwork, + X_train :: Matrix, + Y_train :: Vector, + activation_function, + derivative_activation_funcion, + batch_size :: Int = 32, + η :: Float64 = 0.1 + ) + +Compute backpropagation +""" +function backpropagation!( + neural_network::AbstractOneLayerNeuralNetwork, + X_train :: Matrix, + Y_train :: Vector, + activation_function, + derivative_activation_funcion, + batch_size :: Int = 32, + η :: Float64 = 0.1 +) + ∂w1, ∂w2 = descent_gradient(neural_network, + X_train, Y_train, + activation_function, derivative_activation_funcion, + batch_size) + neural_network.W1 -= η*∂w1 + neural_network.W2 -= η*∂w2 + return neural_network +end \ No newline at end of file diff --git a/OptimizedNeuralNetwork.jl/src/forward_propagation.jl b/OptimizedNeuralNetwork.jl/src/forward_propagation.jl new file mode 100644 index 0000000..2f61983 --- /dev/null +++ b/OptimizedNeuralNetwork.jl/src/forward_propagation.jl @@ -0,0 +1,14 @@ +###################################################### +# ALGORITMO FORWARD PROPAGATION +# Algoritmo 3 descrito en la memoria. Capítulo 5. +###################################################### +export forward_propagation +""" +forward_propagation (h::AbstractOneLayerNeuralNetwork, activation_function, x::Vector{Real}) +Only use an activation function +""" +function forward_propagation(h::AbstractOneLayerNeuralNetwork,activation_function, x) + s = h.W1 * push!(copy(x),1) + ∑= map(activation_function,s) + return h.W2 * ∑ +end diff --git a/OptimizedNeuralNetwork.jl/src/metric_estimation.jl b/OptimizedNeuralNetwork.jl/src/metric_estimation.jl new file mode 100644 index 0000000..21c1aba --- /dev/null +++ b/OptimizedNeuralNetwork.jl/src/metric_estimation.jl @@ -0,0 +1,37 @@ +#################################################### +# Función para tomar métricas +#################################################### +export regression +export error_in_data_set + +using Statistics +using LinearAlgebra +""" + Regression(X::Vector,Y,f) +Para los puntos (X,Y) devuelve tupla con +1. Media del error entre Y y f(X) +2. Mediana del error +3. Desviación típica del error +4. Coeficiente de correlación +""" +function regression(X::Vector,Y,f) + f_x = map(f, X) + diferences = map(norm,eachrow(Y .- f_x)) + return mean(diferences), median(diferences), std(diferences), cor(Y, f_x) +end + +function regression(X::Matrix,Y,f) + f_x = map(x->f(x)[1], eachrow(X)) + diferences = map(norm,eachrow(Y .- f_x)) + return mean(diferences), median(diferences), std(diferences), cor(Y, f_x) +end + +""" + error_in_data_set(x_set::Matrix,y_set, eval_neural_network)::Float64 +Devuelve el error mínimo cuadrático. +""" +function error_in_data_set(x_set::Matrix,y_set, eval_neural_network)::Float64 + estimations = map(x->eval_neural_network(x)[1], eachrow(x_set)) + diferences = map(norm,eachrow(y_set .- estimations)) + return mean(diferences) +end \ No newline at end of file diff --git a/OptimizedNeuralNetwork.jl/src/one_layer_neuronal_network.jl b/OptimizedNeuralNetwork.jl/src/one_layer_neuronal_network.jl new file mode 100644 index 0000000..e83df1b --- /dev/null +++ b/OptimizedNeuralNetwork.jl/src/one_layer_neuronal_network.jl @@ -0,0 +1,97 @@ +######################################################## +# ONE LAYER NEURONAL NETWORK TYPE +# and evaluation with forward propagation +######################################################## +# Constructores +export RandomWeightsNN +export FromMatrixNN +# Tipo +export AbstractOneLayerNeuralNetwork + +""" + AbstractOneLayerNeuralNetwork +The basic elements that define a one layer neural network +Must have two matrix: +W1: Matrix n x (d+1) +W2: Matris s x n +""" +abstract type AbstractOneLayerNeuralNetwork end + +""" + RandomWeightsNN +Return a random initialized Neuronal Network +""" +mutable struct RandomWeightsNN <: AbstractOneLayerNeuralNetwork + entry_dimesion :: Int + number_of_hide_units :: Int + output_dimension :: Int + W1 # pesos de la entrada a la capa oculta A S (sesgo última columna) + W2 # pesos de la capa oculta a la salida + + function RandomWeightsNN(entry_dimesion, + number_of_hide_units, + output_dimension) + + W1 = rand(Float64, number_of_hide_units, entry_dimesion+1) + W2 = rand(Float64, output_dimension, number_of_hide_units) + return new( + entry_dimesion, + number_of_hide_units, + output_dimension, + W1, + W2 + ) + + end +end + +""" + RandomWeightsNN +Return a Neuronal Network inizialized by three matrix +""" +mutable struct FromMatrixNN <: AbstractOneLayerNeuralNetwork + W1 :: Matrix # pesos de la entrada a la capa oculta + W2 :: Matrix# pesos de la capa oculta a la salida + function FromMatrixNN(S,A,B) + # Comprobación de que los tipos son correctos + if !( typeof(S) <: Vector && typeof(A) <: Matrix && typeof(B) <: Matrix ) + throw(ArgumentError("El tipo de los argumentos no es el correcto\n + Debería de ser:\n + typeof(S) <: Vector && typeof(A) <: Matrix && typeof(B) <: Matrix \n + pero se ha encontrado: \n + typeof(S)= $(typeof(S)) typeof(A) $(typeof(A)) typeof(B) $(typeof(B)) + ")) + end + # Comprobaciones de que los tamaños son coherentes + (n_a,d_a) = size(A) + l_s = length(S) + (s_b, n_b) = size(B) + ## Coherencia A y S + if n_a != l_s + throw(ArgumentError("El número de filas de A (que es $(n_a))debe de ser igual que + la longitud de S (que es $(l_s)) + Los tamaños encontrados son: + size(S)=$(size(S)). + size(A)=$(size(A)) + ")) + end + # Coherencia A y B + if n_a != n_b + throw(ArgumentError("El número de fila de A (que es $(n_a)) no es coherente con el número de columnas de B (que es $(n_b)). + Ambos debería de ser iguales.")) + end + return new(hcat(A,S), B) + end +end + +""" + Base.show(io::IO, h <:AbstractOneLayerNeuralNetwork) +Implementamos el algoritmo de visualización de nuestras matrices +""" +function Base.show(io::IO, h ::AbstractOneLayerNeuralNetwork) + display(Text("La matrix de pesos de las neuronas, W1, es:\n")) + display(h.W1) + display(Text("\nLa matrix de pesos de la salida, W2, es:\n")) + display(h.W2) +end + diff --git a/OptimizedNeuralNetwork.jl/src/weight-initializer-algorithm/multiple-input-multiple-output.jl b/OptimizedNeuralNetwork.jl/src/weight-initializer-algorithm/multiple-input-multiple-output.jl new file mode 100644 index 0000000..47e441e --- /dev/null +++ b/OptimizedNeuralNetwork.jl/src/weight-initializer-algorithm/multiple-input-multiple-output.jl @@ -0,0 +1,75 @@ +##################################################################### +# IMPLEMENTACIÓN DEL ALGORITMOS DE INICIALIZACIÓN DE PESOS +# Basado en capítulo 7, algoritmo 6 +# CASO ENTRADA REAL de dimensión d > 1 SALIDA REAL de dimensión s>1 +##################################################################### +export nn_from_data +""" + nn_from_data(X_train,Y_train, n, M=10) + Devuelve una red neuronal con los pesos ya inicializados + de acorte a los conjuntos de entrenamiento. + `n` es el número de neuronas en la capa oculta. + El tamaño de entrada y salida de la red neuronal vienen determinados por + por los propios datos de entranamiento. + El tamaño de entrada se corresponde con el número de atributos de X_train (su número de columnas) + y a salida con el número de columnas de `Y_train`. + + M es una constante que depende de la función de activación, + por lo visto en teoría M=10 funciona para todas las + +""" +function nn_from_data(X_train::Matrix,Y_train::Matrix, n::Int, M=10)::AbstractOneLayerNeuralNetwork + (_ , entry_dimension) = size(X_train) + (_ , output_dimension) = size(Y_train) + # inicializamos p + p = rand(Float32, entry_dimension) + + index :: Int = 1 + tam :: Int = 0 + nodes = [Vector{Float64}(undef, output_dimension) for _ in 1:n] + y_values = [Vector{Float64}(undef, output_dimension) for _ in 1:n] + my_keys = zeros(Float64, n) + + while tam < n && index <= n + new_point = X_train[index, :] + if notOrtonormal(nodes, p, new_point, tam) + tam += 1 + ordered_vector = sum(p.*new_point) + my_keys[tam] = ordered_vector + nodes[tam] = new_point + y_values[tam] = Y_train[index,:] + end + index += 1 + end + ordered_values_index = sortperm(my_keys) + # Matrices de la red neuronal + # A = n x d + # S = n x 1 + # B = s x n + A = zeros(Float64, n, entry_dimension) + S = zeros(Float64, n) + B = zeros(Float64, output_dimension, n) + + # Cálculo del valor de las neuronas + key = ordered_values_index[1] + x_a = nodes[key] + y_a = y_values[key] + # valores iniciales + S[1]=M + B[:, 1] = y_a + + for index in 2:n + key = ordered_values_index[index] + x_s = nodes[key] + y_s = y_values[key] + + coeff_aux = 2M / sum(p.* (x_s - x_a)) + S[index] = M - coeff_aux*sum(p .* x_s) + A[index,:] = coeff_aux * p + B[:,index] = y_s - y_a + + x_a = x_s + y_a = y_s + end + return FromMatrixNN(S,A,B) +end \ No newline at end of file diff --git a/OptimizedNeuralNetwork.jl/src/weight-initializer-algorithm/multiple-input-single-ouput.jl b/OptimizedNeuralNetwork.jl/src/weight-initializer-algorithm/multiple-input-single-ouput.jl new file mode 100644 index 0000000..35c8669 --- /dev/null +++ b/OptimizedNeuralNetwork.jl/src/weight-initializer-algorithm/multiple-input-single-ouput.jl @@ -0,0 +1,74 @@ +##################################################################### +# IMPLEMENTACIÓN DEL ALGORITMOS DE INICIALIZACIÓN DE PESOS +# Basado en capítulo 7, algoritmo 6 +# CASO ENTRADA REAL de dimensión d > 1 SALIDA REAL (una dimensión) +##################################################################### +export nn_from_data +""" + nn_from_data(X_train::Matrix,Y_train::Vector, n::Int, M=10)::AbstractOneLayerNeuralNetwork + Devuelve una red neuronal con los pesos ya inicializados + de acorte a los conjuntos de entrenamiento. + `n` es el número de neuronas en la capa oculta. + El tamaño de entrada y salida de la red neuronal vienen determinados por + por los propios datos de entranamiento. + El tamaño de entrada se corresponde con el número de atributos de X_train (su número de columnas) + y a salida con el número de columnas de `Y_train`. + + M es una constante que depende de la función de activación, + por lo visto en teoría M=10 funciona para todas las + +""" +function nn_from_data(X_train::Matrix,Y_train::Vector{Float64}, n::Int, M=10)::AbstractOneLayerNeuralNetwork + (_ , entry_dimension) = size(X_train) + output_dimension = 1 + # inicializamos p + p = rand(Float32, entry_dimension) + index = 1 + tam = 0 + + nodes = Array{Vector{Float64}}(undef, n) + y_values = Array{Float64}(undef, n) # float porque la salida es de dimensión 1 + my_keys = zeros(Float64, n) + while tam < n && index <= n + new_point = X_train[index, :] + if notOrtonormal(nodes, p, new_point, tam) + tam += 1 + ordered_vector = sum(p.*new_point) + my_keys[tam] = ordered_vector + nodes[tam] = new_point + y_values[tam] = Y_train[index] + end + index += 1 + end + ordered_values_index = sortperm(my_keys) + # Matrices de la red neuronal + # A = n x d + # S = n x 1 + # B = 1 x n + A = zeros(Float64, n, entry_dimension) + S = zeros(Float64, n) + B = zeros(Float64, output_dimension, n) + + # Cálculo del valor de las neuronas + key = ordered_values_index[1] + x_a = nodes[key] + y_a = y_values[key] + + S[1]=M + B[1] = y_a + + for index in 2:n + key = ordered_values_index[index] + x_s = nodes[key] + y_s = y_values[key] + + coeff_aux = 2M / sum(p.* (x_s - x_a)) + S[index] = M - coeff_aux*sum(p .* x_s) + A[index,:] = coeff_aux * p + B[index] = y_s - y_a + + x_a = x_s + y_a = y_s + end + return FromMatrixNN(S,A,B) +end \ No newline at end of file diff --git a/OptimizedNeuralNetwork.jl/src/weight-initializer-algorithm/single-input-single-output.jl b/OptimizedNeuralNetwork.jl/src/weight-initializer-algorithm/single-input-single-output.jl new file mode 100644 index 0000000..3b9b91d --- /dev/null +++ b/OptimizedNeuralNetwork.jl/src/weight-initializer-algorithm/single-input-single-output.jl @@ -0,0 +1,69 @@ +##################################################################### +# IMPLEMENTACIÓN DEL ALGORITMOS DE INICIALIZACIÓN DE PESOS +# Basado en capítulo 7, algoritmo 6 +# CASO ENTRADA REAL (una dimensión) SALIDA REAL (una dimensión) +##################################################################### +export nn_from_data +""" +nn_from_data(X_train::Vector,Y_train::Vector, n::Int, M=10)::AbstractOneLayerNeuralNetwork + Devuelve una red neuronal de entrada de una dimensión y + de salida una dimensión con los pesos ya inicializados + de acorte a los conjuntos de entrenamiento. + `n` es el número de neuronas en la capa oculta. + El tamaño de entrada y salida de la red neuronal vienen determinados por + por los propios datos de entranamiento. + El tamaño de entrada se corresponde con el número de atributos de X_train (su número de columnas) + y a salida con el número de columnas de `Y_train`. + + M es una constante que depende de la función de activación, + por lo visto en teoría M=10 funciona para todas las + +""" +function nn_from_data(X_train::Vector,Y_train::Vector, n::Int, M=10)::AbstractOneLayerNeuralNetwork + entry_dimension = 1 + output_dimension = 1 + + nodes = [] + index = 1 + tam = 0 + y_values = zeros(n) + + while tam < n && index <= n + if !(X_train[index] in nodes) + append!(nodes, X_train[index] ) + tam += 1 + y_values[tam] = Y_train[index] + end + index += 1 + end + ordered_index = sortperm(nodes) + # Matrices de la red neuronal + # A = n x 1 + # S = n x 1 + # B = 1 x n + A = zeros(Float64, n, entry_dimension) + S = zeros(Float64, n) + B = zeros(Float64, output_dimension, n) + + # valores iniciales + x_a = nodes[ordered_index[1]] + y_a = y_values[ordered_index[1]] + # Función afín constantemente Y_1 + S[1]= M + B[1] = y_a + + # Cálculo del resto de neuronas + for (index,key) in collect(Iterators.zip(2:n, ordered_index[2:n])) + x_s = nodes[key] + y_s = y_values[key] + + A[index] = 2M / (x_s - x_a) + S[index] = M - x_s * A[index] + B[index] = y_s - y_a + + x_a = x_s + y_a = y_s + + end + return FromMatrixNN(S,A,B) +end diff --git a/OptimizedNeuralNetwork.jl/src/weight-initializer-algorithm/utils.jl b/OptimizedNeuralNetwork.jl/src/weight-initializer-algorithm/utils.jl new file mode 100644 index 0000000..a3fb3b5 --- /dev/null +++ b/OptimizedNeuralNetwork.jl/src/weight-initializer-algorithm/utils.jl @@ -0,0 +1,18 @@ +##################################################################### +# Función auxiliar para comprobar la perpendicularidad +##################################################################### +""" +notOrtonormal(point_dict::Dict, p::Vector, new_point::Vector)::Bool +Función auxiliar para la inicialización de pesos de redes neuronales de entrada de dimensión mayor que uno. +Comprueba que todos los vectores de `point_dict` no sean ortogonales al vector `new_point` +Esto es que `p.(v - new_point) neq 0` para todo (_,v) en `point_dict` +`point_dict`` es un diccionario donde los vectores son los valores. +""" +function notOrtonormal(points::Vector{Vector{Float64}}, p::Vector, new_point::Vector, tam::Int)::Bool + for i in 1:tam + if sum(p.*(points[i]-new_point)) == 0 + return false + end + end + return true +end \ No newline at end of file diff --git a/OptimizedNeuralNetwork.jl/src/weight-initializer-algorithm/weight-initializer-algorithm.jl b/OptimizedNeuralNetwork.jl/src/weight-initializer-algorithm/weight-initializer-algorithm.jl new file mode 100644 index 0000000..f97d2ea --- /dev/null +++ b/OptimizedNeuralNetwork.jl/src/weight-initializer-algorithm/weight-initializer-algorithm.jl @@ -0,0 +1,14 @@ +##################################################################### +# IMPLEMENTACIÓN DEL ALGORITMOS DE INICIALIZACIÓN DE PESOS +# Basado en capítulo 7, algoritmo 6 +##################################################################### + +# Tamaño de la red neuronal y conjunto de datos + +#Caso h:R -> R +include("single-input-single-output.jl") +include("utils.jl") +#Caso h:R^d -> R +include("multiple-input-single-ouput.jl") +#Caso h:R^d -> R^s con r,s> 1 +include("multiple-input-multiple-output.jl") diff --git a/OptimizedNeuralNetwork.jl/test/RUN_ALL_TEST.jl b/OptimizedNeuralNetwork.jl/test/RUN_ALL_TEST.jl new file mode 100644 index 0000000..2b2868b --- /dev/null +++ b/OptimizedNeuralNetwork.jl/test/RUN_ALL_TEST.jl @@ -0,0 +1,34 @@ +#################################################### +# CONTAINS ALL THE TEST +# Those test are: +# - Activation functions +# - Neuronal network structure +# - Forward Propagation +# - Our initialization algorithm +#################################################### +include("../src/OptimizedNeuralNetwork.jl") +using .OptimizedNeuralNetwork + +println("Testing Activation functions...") +t = @elapsed include("activation_functions.test.jl") +println("done (took $t seconds).") + +println("Testing Neuronal Network Data type...") +t = @elapsed include("one_layer_neural_network.test.jl") +println("done (took $t seconds).") + +println("Testing forward_propagation...") +t = @elapsed include("forward_propagation.test.jl") +println("done (took $t seconds).") + +println("Testing our initialization algorithm") +t = @elapsed include("weight-inizializer-algorithm/main.test.jl") +println("done (took $t seconds).") + +println("Testing metric estimation") +t = @elapsed include("metric_estimation.test.jl") +println("done (took $t seconds).") + +println("Testing backpropagation") +t = @elapsed include("backpropagation.test.jl") +println("done (took $t seconds).") \ No newline at end of file diff --git a/Biblioteca-Redes-Neuronales/test/activation_functions.test.jl b/OptimizedNeuralNetwork.jl/test/activation_functions.test.jl similarity index 96% rename from Biblioteca-Redes-Neuronales/test/activation_functions.test.jl rename to OptimizedNeuralNetwork.jl/test/activation_functions.test.jl index 86fa52c..5c93448 100644 --- a/Biblioteca-Redes-Neuronales/test/activation_functions.test.jl +++ b/OptimizedNeuralNetwork.jl/test/activation_functions.test.jl @@ -5,11 +5,9 @@ ############################################################################ using Test -include("./../src/activation_functions.jl") -using .ActivationFunctions - ######################### TEST ######################### @testset "Activations functions" begin + id(x)=x # Propiedas asintóticas funciones clásicas @test @ThresholdFunction(id,0)(-1) ≈ -1 diff --git a/OptimizedNeuralNetwork.jl/test/backpropagation.test.jl b/OptimizedNeuralNetwork.jl/test/backpropagation.test.jl new file mode 100644 index 0000000..fb229ee --- /dev/null +++ b/OptimizedNeuralNetwork.jl/test/backpropagation.test.jl @@ -0,0 +1,38 @@ +################################### +# Backpropagation test +# El erro debe de ir decreciendo conforme avanzan las +# evaluaciones +################################### +using Test +@testset "Backpropagation" begin + n = 3 # número de neuronas + η = 0.005 # queremos que reduzca sin pasarse, de ahí que sea ""pequeño"" el learning rate + tol = 0.5 # rango de error que permitimos ya que puede existir casos en los que el η sea demasiado grande + data_set_size = n + cosin(x,y)=cos(x)+sin(y) # funcion ideal + h = RandomWeightsNN(2,n, 1) # 2 dimensión entrada 1 dimensión de salida + X_train = (rand(Float64, (data_set_size, 2)))*3 + Y_train = map(v->cosin(v...),eachrow(X_train)) + disminuye_error = 0.0 + error_0 = error_in_data_set( + X_train, + Y_train, + x->forward_propagation(h,RampFunction,x) + ) + for i in 1:n + backpropagation!(h, X_train, Y_train, RampFunction, derivativeRampFunction, n) + + error_1 = error_in_data_set( + X_train, + Y_train, + x->forward_propagation(h,RampFunction,x) + ) + disminuye_error += (error_1 < tol + error_0) ? 1 : 0 # tolerancia + error_0 = error_1 + end + # Debe tenerse en cuenta de que a pesar de estar la + # tolerancia, el η introduce cierta probabilidad de aumentar el error, + # es por ello que introducimos esta heurísticas + @test disminuye_error >= ceil(0.9*(n-1)) # más de noveinta porciento de los casos disminuye el error + +end \ No newline at end of file diff --git a/OptimizedNeuralNetwork.jl/test/forward_propagation.test.jl b/OptimizedNeuralNetwork.jl/test/forward_propagation.test.jl new file mode 100644 index 0000000..ff87598 --- /dev/null +++ b/OptimizedNeuralNetwork.jl/test/forward_propagation.test.jl @@ -0,0 +1,107 @@ +################################################ +# TEST DE FORWARD PROPAGATION +################################################ +using Test + +@testset "forward_propagation correct types" begin + ## Comprobación de tipos y dimensión + entry_dimesion = 2 + number_of_hide_units = 3 + output_dimension = 2 + OLNN = RandomWeightsNN( + entry_dimesion, + number_of_hide_units, + output_dimension + ) + # El resultado debe de ser un vector + @test typeof(forward_propagation(OLNN,ReLU, [1,2.0])) == Vector{Float64} + # La evaluación debe de tener las mismas dimensiones que la salida de la red neuronal + @test length(forward_propagation(OLNN,ReLU, [0,1.0])) == output_dimension +end +@testset "forward_propagation matrix order and " begin + ## Comprobación de evaluación correcta + # Debiera de ser la red neurona identidad + S = [0, 0] + A = [1 0; 0 1] + B = [1 0; 0 1] + h = FromMatrixNN(S,A,B) + + vectores = [ + [1,2], [0,0],[-1,4] + ] + for v in vectores + @test forward_propagation(h, x->x,v ) == v + end + # Debiera de ser la red neuronal que multiplica el primer índice por dos y el resto por 3 + S = [0, 0] + A = [1 0; 0 1] + B = [2 0; 0 3] + h = FromMatrixNN(S,A,B) + + vectores = [ + [1,2], [0,0],[-1,4] + ] + for v in vectores + @test forward_propagation(h, x->x,v ) == [2*v[1], 3*v[2]] + end + # Funcionamiento correcto para translaciones + # Debiera de ser la red neuronal que suma el vector (1 2) + S = [1, 2] + A = [1 0; 0 1] + B = [1 0; 0 1] + h = FromMatrixNN(S,A,B) + + vectores = [ + [1,2], [0,0],[-1,4] + ] + for v in vectores + @test forward_propagation(h, x->x,v ) == [v[1]+1, v[2]+2] + end + S = [2, 5] + A = [2 3; 7 8] + B = [4 7; 10 -9] + v = [2, -1] + # Calculamos manualmente cuál debiera de ser el resultado + c = A*v + c = c + S + c = B*c + + h = FromMatrixNN(S,A,B) + @test forward_propagation(h, x->x,v) == c + +end +@testset "forward_propagation activation function" begin + # Comprobamos que admite una función de activación cualquiera + S = [0, 0] + A = [1 0; 0 1] + B = [1 0; 0 1] + h = FromMatrixNN(S,A,B) + + vectores = [ + [1,2], [0,-3] + ] + soluciones_reLU = [ + [1,2],[0,0] + ] + for (v,test) in zip(vectores,soluciones_reLU) + @test forward_propagation(h, ReLU,v ) == test + end + # Comprobamos que aplica correctamente los coeficientes + S = [0, 0] + A = [1 0; 0 1] + B = [1 0; 0 1] + h = FromMatrixNN(S,A,B) + + vectores = [ + [-1,2], [0,-3] + ] + soluciones= [ + [1,4],[0,9] + ] + for (v,test) in zip(vectores,soluciones) + @test forward_propagation(h, x-> x^2,v ) == test + end + + +end + diff --git a/OptimizedNeuralNetwork.jl/test/metric_estimation.test.jl b/OptimizedNeuralNetwork.jl/test/metric_estimation.test.jl new file mode 100644 index 0000000..c5c4054 --- /dev/null +++ b/OptimizedNeuralNetwork.jl/test/metric_estimation.test.jl @@ -0,0 +1,15 @@ +################################################################### +# TEST Metric estimations +################################################################### +using Test +#include("../src/OptimizedNeuralNetwork.jl") +include("../src/metric_estimation.jl") + +@testset "Regression metrics" begin + f(x)=x.*x + X = [1,-1,-2,2] + Y = map(f, X) + # Comprobamos que devuelve que para este caso en concreto son correctas: + # la media de error, mediana de error, la desviación media y el coeficiente de correlación + @test regression(X,Y,f) == (0,0,0,1) +end diff --git a/OptimizedNeuralNetwork.jl/test/one_layer_neural_network.test.jl b/OptimizedNeuralNetwork.jl/test/one_layer_neural_network.test.jl new file mode 100644 index 0000000..eda7752 --- /dev/null +++ b/OptimizedNeuralNetwork.jl/test/one_layer_neural_network.test.jl @@ -0,0 +1,49 @@ +using Test + +entry_dimesion = 2 +number_of_hidden_units = 3 +output_dimension = 2 +OLNN = RandomWeightsNN( + entry_dimesion, + number_of_hidden_units, + output_dimension +) + +@testset "Dimension of one layer networks random initialization" begin + # Weights have correct dimensions + # Notemos que OLNN ha sido creada con la iniciacilización aleatoria, + # Las única hipótesis que debe de cumplir es que: + # 1. Inicialización con las dimensiones correctas + @test size(OLNN.W1)==(number_of_hidden_units, 1+entry_dimesion) + @test size(OLNN.W2)==(output_dimension, number_of_hidden_units) + # 2. Por la aleatoriedad generada no todas las entradas debieran de ser iguales + @test OLNN.W1[1,:] != OLNN.W1[2,:] + @test OLNN.W2[1,:] != OLNN.W2[2,:] +end + +@testset "One layer created from matrix" begin + S = [1,2] #vector + A = [3 4; 4 6] # matrix + B = reshape([ 1 ; 1 ],1,2) # matrix 2 x 1 + h = FromMatrixNN(S, A, B) + # Comprobación de tipo correcto + @test typeof(h) <: AbstractOneLayerNeuralNetwork + # Comprobación de tamaños correctos + ### Para la matriz W_1 + (n_rows1, n_columns1) = size(h.W1) + (n_rows2, n_columns2) = size(h.W2) + (r_a, c_a) = size(A) + @test n_rows1 == r_a + @test n_columns1 == c_a+1 + ### Para la matriz W_1 + (n_rows2, n_columns2) = size(h.W2) + (r_b, c_b) = size(B) + @test n_rows2 == r_b + @test n_columns2 == r_a + @test n_columns2 == c_b + println("Revisión ocular:") + println(h) +end + + + diff --git a/OptimizedNeuralNetwork.jl/test/weight-inizializer-algorithm/main.test.jl b/OptimizedNeuralNetwork.jl/test/weight-inizializer-algorithm/main.test.jl new file mode 100644 index 0000000..3c02208 --- /dev/null +++ b/OptimizedNeuralNetwork.jl/test/weight-inizializer-algorithm/main.test.jl @@ -0,0 +1,10 @@ +################################################### +# Test inicialización de pesos +################################################### +using Test +using Random +Random.seed!(2); + +include("single-input-single-output.test.jl") +include("multiple-input-single-output.test.jl") +include("multiple-input-multiple-output.test.jl") \ No newline at end of file diff --git a/OptimizedNeuralNetwork.jl/test/weight-inizializer-algorithm/multiple-input-multiple-output.test.jl b/OptimizedNeuralNetwork.jl/test/weight-inizializer-algorithm/multiple-input-multiple-output.test.jl new file mode 100644 index 0000000..fc2ecdf --- /dev/null +++ b/OptimizedNeuralNetwork.jl/test/weight-inizializer-algorithm/multiple-input-multiple-output.test.jl @@ -0,0 +1,31 @@ + @testset "Nodes initialization algorithm n=3 entry = 3 output = 2" begin + M = 1 # Constante para la función rampa + # Bien definido para tamaño n = 2 y salida de dimensión 1 + f_regression(x,y,z)=[x*y-z,x] + data_set_size = 6 + entry_dimension = 3 + output_dimension = 2 + # Número de neuronas + n = data_set_size # Debe de ser mayor que 1 para que no de error + X_train= rand(Float32, data_set_size, entry_dimension) + Y_train::Matrix = mapreduce(permutedims, vcat, map(x->f_regression(x...), eachrow(X_train))) + + h = nn_from_data(X_train, Y_train, n, M) + + # veamos que el tamaño de la salida es la adecuada + @test size(h.W1) == (n,entry_dimension+1) + @test size(h.W2) == (output_dimension,n) + + # Si ha sido bien construida: + # Evaluar la red neuronal en los datos con los que se construyó + # debería de resultar el valor de Y_train respectivo + evaluar(x)=forward_propagation(h, + RampFunction,x) + + for i in 1:n + @test evaluar(X_train[i,:]) ≈ Y_train[i,:] + end + +end + + diff --git a/OptimizedNeuralNetwork.jl/test/weight-inizializer-algorithm/multiple-input-single-output.test.jl b/OptimizedNeuralNetwork.jl/test/weight-inizializer-algorithm/multiple-input-single-output.test.jl new file mode 100644 index 0000000..2943a12 --- /dev/null +++ b/OptimizedNeuralNetwork.jl/test/weight-inizializer-algorithm/multiple-input-single-output.test.jl @@ -0,0 +1,36 @@ +@testset "Nodes initialization algorithm entry dimension >1 output dimension 1" begin + # Comprobamos que las hipótesis de selección son correctas + M = 1 + @test RampFunction(M) == 1 + @test RampFunction(-M) == 0 + + # Bien definido para tamaño n = 2 y salida de dimensión 1 + f_regression(x,y,z)=x*y-z + data_set_size = 4 + entry_dimension = 3 + output_dimension = 1 + # Número de neuronas + n = data_set_size# Debe de ser mayor que 1 para que no de error + X_train= rand(Float64, data_set_size, entry_dimension) + Y_train = map(x->f_regression(x...), eachrow(X_train)) + + h = nn_from_data(X_train, Y_train, n, M) + + # veamos que el tamaño de la salida es la adecuada + @test size(h.W1) == (n,entry_dimension+1) + @test size(h.W2) == (output_dimension,n) + + # Si ha sido bien construida: + # Evaluar la red neuronal en los datos con los que se construyó + # debería de resultar el valor de Y_train respectivo + evaluar(x)=forward_propagation(h, + RampFunction,x) + + for (x,y) in zip(eachrow(X_train),Y_train) + @test evaluar(x) ≈ [y] + end +end + + + + diff --git a/OptimizedNeuralNetwork.jl/test/weight-inizializer-algorithm/single-input-single-output.test.jl b/OptimizedNeuralNetwork.jl/test/weight-inizializer-algorithm/single-input-single-output.test.jl new file mode 100644 index 0000000..c1db9f7 --- /dev/null +++ b/OptimizedNeuralNetwork.jl/test/weight-inizializer-algorithm/single-input-single-output.test.jl @@ -0,0 +1,36 @@ + +@testset "Nodes initialization algorithm entry dimension 1 output dimension 1" begin + # Comprobamos que las hipótesis de selección son correctas + M = 1 + @test RampFunction(M) == 1 + @test RampFunction(-M) == 0 + # Bien definido para tamaño n = 2 y salida de dimensión 1 + f_regression(x)=(x<=1) ? exp(-x) : log(x) + data_set_size = 5 + entry_dimension = 1 + output_dimension = 1 + # Número de neuronas + n = data_set_size # Debe de ser mayor que 1 para que no de error + X_train= map( + x-> (x-0.5)*10, # reescalamos al intervalo [-5,5] + rand(Float64, data_set_size) + ) + + Y_train = map(f_regression, X_train) + h = nn_from_data(X_train, Y_train, n, M) + + # veamos que el tamaño de la salida es la adecuada + @test size(h.W1) == (n,2) + @test size(h.W2) == (1,n) + + # Si ha sido bien construida: + # Evaluar la red neuronal en los datos con los que se construyó + # debería de resultar el valor de Y_train respectivo + evaluar(x)=forward_propagation(h, + RampFunction,x) + + for (x,y) in zip(X_train,Y_train) + @test evaluar([x]) ≈ [y] + end + +end diff --git a/Project.toml b/Project.toml index e67402d..79b5f49 100644 --- a/Project.toml +++ b/Project.toml @@ -1,2 +1,10 @@ +name = "OptimizedNeuralNetwork.jl" +uuid = "c0288f0f-8577-469d-b024-d58cda6ff0ea" +authors = ["Blanca "] +version = "0.1.0" [deps] +LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" +Plots = "91a5bcdd-55d7-5caf-9e0b-520d859cae80" +Statistics = "10745b16-79ce-11e8-11f9-7d13ad32a3b2" +TOML = "fa267f1f-6049-4f14-aa54-33bafae1ed76" TimerOutputs = "a759f4b9-e2f1-59dc-863e-4aeb61b1ea8f" diff --git a/Readme.md b/Readme.md index 8068467..f489b8d 100644 --- a/Readme.md +++ b/Readme.md @@ -1,85 +1,134 @@ -# Trabajo fin de grado sobre optimización de redes neuronales. +# Trabajo fin de grado sobre optimización de redes neuronales + +Granada primera mitad 2022 +Alumna: Blanca Cano Camarero +Tutores: -Granada primera mitad 2022. -Alumna: Blanca Cano Camarero -Tutores: - Juan Julián Merelo Guervós - Francisco Javier Merí de la Maza -## Motivación: Democratización de la inteligencia artificial. +## Biblioteca OptimizedNeuralNetwork.jl -Partiendo de las premisas de que la ciencia que no es replicable dudosamente es ciencias [1] y -cómo los avances tecnológicos se están construyendo actualmente fundamentalmente gracias al aumento de la potencia de cómputo [2][3]. +La biblioteca *OptimizedNeuralNetwork.jl* implementa el modelo de red neuronal descrito en la memoria del proyecto (puede descargar una versión pdf en *release*) +y que pretende ser una optimización de las redes neuronales convencionales, +así como otros algoritmo que tengan como objetivo también la mejora en algún aspecto. -Es necesaria una democratización de la situación, un acercamiento a los nuevos resultados y aplicaciones -para organizaciones y usuarios con capacidades de cómputo más modestas. -Se pretende por tanto, realizar un estudio de la posible optimización de redes neuronales a raíz de: (1) el análisis detallados de la construcción y resultados matemáticos de éstas (como puede ser el teorema de aproximación universal) (2) Un estudio empírico de la velocidad o precisión de los resultados. +Contiene las siguientes funciones que mostramos con algunos ejemplos +puede ver un ejemplo completo de uso en [el siguiente notebook](https://github.com/BlancaCC/TFG-Estudio-de-las-redes-neuronales/tree/main/Memoria/capitulos) -Tanto (1) como (2) se desarrollarán ligados y se retroalimentarán entre ellos. +### Importamos la biblioteca y módulo -De esta manera se tratará de buscar algoritmos de redes neuronales que no requieran de una potencia masiva. +```Julia +include("OptimizedNeuralNetwork.jl/src/OptimizedNeuralNetwork.jl") +using Main.OptimizedNeuronalNetwork +``` + +### Creación de redes neuronales + +#### Creación de una red neuronal de pesos aleatorios + +Red neuronal con pesos aleatorios: -## Generación de la memoria +``` Julia +entry_dimension = 2 +number_of_hidden_units = 3 +output_dimension = 2 -**Puede descargar un PDF de la memoria en la sección de releases**. -O descargar el repositorio y escribir `make`. +RandomWeightsNN( + entry_dimension, + number_of_hidden_units, + output_dimension +) +``` -## Objetivos +#### Creación de una red neuronal a partir de matrices -1. Posibilidad de limitar la precisión de los cálculos con los que se trabaja en redes neuronales. -2. Posibilidad de hallar límites superiores al tamaño de las redes neuronales que se usan en machine learning, tanto en capas como en unidades para cada capa. -3. Implementación "open source" de los límites hallados anteriormente usando un lenguaje de altas prestaciones, que permita trabajar en cualquier tipo de hardware. +```Julia +S = [1,2,3] # sesgos entrada +A = [3 4 1; 4 6 3; 1 1 1] # coeficientes entrada +B = [1 2 3; 3 2 3] # coeficientes salida +FromMatrixNN(S, A, B) +``` -## Estructura +Inicialización de la matriz a partir de datos de entrenamiento +`nn_from_data(X_train, Y_train, n, M)` +Donde $n$ es el número de neuronas y $M$ es una cte que depende de la función de activación +(ver memoria) -### 1 Teoría de la aproximación. +### Funciones de activación -Donde destacan el teorema de aproximación de Weierstrass y Stone-Weierstrass. +### Evaluada en un punto y no dependientes de ningún parámetro -Es de interés profundizar en este campo porque +``` Julia + CosineSquasher(10) + RampFunction(1) + ReLU(-1) + Sigmoid(9999999) + HardTanh(9999999) +``` -(i) Da explicación del uso base de estructuras "simples" como son los polinomios a la hora de la construcción otras más "complejas". -(ii) Clarifica qué tipo de funciones se pueden aproximar con polinomios. +### Funciones de activación dependientes de parámetros +Existen funciones de activación que depende de parámetros, podemos definirlas eficientemente a partir de macros: -### 2 Construcción de las redes -#### 2.1 Formulación del marco teórico -Se conecta con teoría de aproximación y teorema de convergencia universal. +```Julia +# Concretamos los parámetros de los que dependen +# de macros + umbral = @ThresholdFunction(x->x,0) + indicadora = @IndicatorFunction(0) + lRelu = @LReLU(0.01) -#### 2.2 Descripción -(Teoría) -- Construcción desde perceptrón. -- Explicación de la actualización de los pesos. +# Evaluamos en puntos concretos +umbral(-2.9) +indicadora(3.9) +lRelu(0.2) +``` -(Práctica) -- Implementación estricta de una red neuronal. -- Análisis de los resultados en parámetros como (i) bondad ajuste (ii) eficiencia de cómputo. +### Algoritmo de *forward propagation* -### 3 Teorema de aproximación universal +``` Julia +forward_propagation(h,RampFunction,x) +``` -Con el fin de validar los resultado obtenidos se desarrollará el paper de Hornik, Stinchcombe y White *Multilayer Feedforward Networks are Universal approximators*. +### Algoritmo de *back propagation* -### 4 Fase de experimentación-especulación-refinamiento de la red neuronal. +``` Julia +backpropagation!(h, X_train, Y_train, RampFunction, derivativeRampFunction, n) +``` +## Reglas +- Generación de la memoria `make`. +- Pasar test a la implementación `make test`. +- Para ejecutar los experimentos `make experimentos` (los experimentos generan datos cuya localización pude configurar en `Experimentos/.config.toml`). -Con el fin de cuantificar el trabajo llevaré un registro [aquí](https://docs.google.com/spreadsheets/d/1TCcKQIKjKW9sMSU2f6obN9gHgv3c8UEdjmONkBlv42M/edit?usp=sharing). +## Motivación del proyecto: Democratización de la inteligencia artificial +Partiendo de las premisas de que la ciencia que no es replicable dudosamente es ciencias [1] y +cómo los avances tecnológicos se están construyendo actualmente fundamentalmente gracias al aumento de la potencia de cómputo [2][3]. + +Es necesaria una democratización de la situación, un acercamiento a los nuevos resultados y aplicaciones +para organizaciones y usuarios con capacidades de cómputo más modestas. +Se pretende por tanto, realizar un estudio de la posible optimización de redes neuronales a raíz de: (1) el análisis detallados de la construcción y resultados matemáticos de éstas (como puede ser el teorema de aproximación universal) (2) Un estudio empírico de la velocidad o precisión de los resultados. + +Tanto (1) como (2) se desarrollarán ligados y se retroalimentarán entre ellos. + +De esta manera se tratará de buscar algoritmos de redes neuronales que no requieran de una potencia masiva. -Bibliografía: -[1] Título: *Agile (data) science: a (draft) manifesto*. Autores: Juan Julián Merelo Guervós y Mario García Valdez. -Última fecha consulta: 13-02-21. URL: https://arxiv.org/abs/2104.12545 . Abstract: Science has a data management as well as a project management problem. While industrial grade data science teams have embraced the *agile* mindset, and adopted or created all kind of tools to manage reproducible workflows, academia-based science is still (mostly) mired in a mindset that's focused on a single final product (a paper), without focusing on incremental improvement and, over all, reproducibility. In this report we argue towards the adoption of the agile mindset and agile data science tools in academia, to make a more responsible, sustainable, and above all, reproducible science. +Bibliografía: +[1] Título: *Agile (data) science: a (draft) manifesto*. Autores: Juan Julián Merelo Guervós y Mario García Valdez. +Última fecha consulta: 13-02-21. URL: . Abstract: Science has a data management as well as a project management problem. While industrial grade data science teams have embraced the *agile* mindset, and adopted or created all kind of tools to manage reproducible workflows, academia-based science is still (mostly) mired in a mindset that's focused on a single final product (a paper), without focusing on incremental improvement and, over all, reproducibility. In this report we argue towards the adoption of the agile mindset and agile data science tools in academia, to make a more responsible, sustainable, and above all, reproducible science. -[2] Título: *The bitter Lesson*. Autor: Rich Sutton. URL: http://www.incompleteideas.net/IncIdeas/BitterLesson.html +[2] Título: *The bitter Lesson*. Autor: Rich Sutton. URL: Última fecha consulta: 13-02-21. Abstract: Aporta una visión negativa de la evolución del *machine learning* basada en enfoques antropocéntricos -y alaba el uso de métodos de propósitos generales y cómo ello conlleva que al aumentar la potencia cómputo los resultados mejoren. +y alaba el uso de métodos de propósitos generales y cómo ello conlleva que al aumentar la potencia cómputo los resultados mejoren. [3] Título: *A Universal Law of Robustness via Isoperimetry* autres: Sebastien Bubeck and Mark Sellke, libro: *Advances in Neural Information Processing Systems*, editor=A. Beygelzimer and Y. Dauphin and P. Liang and J. Wortman Vaughan, año: 2021, -URL: https://openreview.net/forum?id=z71OSKqTFh7 -abstract: Classically, data interpolation with a parametrized model class is possible as long as the number of parameters is larger than the number of equations to be satisfied. A puzzling phenomenon in the current practice of deep learning is that models are trained with many more parameters than what this classical theory would suggest. We propose a theoretical explanation for this phenomenon. We prove that for a broad class of data distributions and model classes, overparametrization is {\em necessary} if one wants to interpolate the data {\em smoothly}. Namely we show that {\em smooth} interpolation requires - times more parameters than mere interpolation, where - is the ambient data dimension. We prove this universal law of robustness for any smoothly parametrized function class with polynomial size weights, and any covariate distribution verifying isoperimetry. In the case of two-layers neural networks and Gaussian covariates, this law was conjectured in prior work by Bubeck, Li and Nagaraj. We also give an interpretation of our result as an improved generalization bound for model classes consisting of smooth functions. \ No newline at end of file +URL: +abstract: Classically, data interpolation with a parametrized model class is possible as long as the number of parameters is larger than the number of equations to be satisfied. A puzzling phenomenon in the current practice of deep learning is that models are trained with many more parameters than what this classical theory would suggest. We propose a theoretical explanation for this phenomenon. We prove that for a broad class of data distributions and model classes, overparametrization is {\em necessary} if one wants to interpolate the data {\em smoothly}. Namely we show that {\em smooth} interpolation requires + times more parameters than mere interpolation, where + is the ambient data dimension. We prove this universal law of robustness for any smoothly parametrized function class with polynomial size weights, and any covariate distribution verifying isoperimetry. In the case of two-layers neural networks and Gaussian covariates, this law was conjectured in prior work by Bubeck, Li and Nagaraj. We also give an interpretation of our result as an improved generalization bound for model classes consisting of smooth functions.