From afc05ec8d46aee68813aaa52837b9369f410cf2b Mon Sep 17 00:00:00 2001
From: sidneyriffic <31529542+sidneyriffic@users.noreply.github.com>
Date: Sun, 1 Jul 2018 07:58:02 -0700
Subject: [PATCH 1/3] Hackathon deadline submission
2-24 is lowest loss of last model, 4-32 is a much farther epoch version of the last model. Original tri is the trident shaped precursor to the last model. The other "addforward" folders are from previous versions of the current model.
---
config.py | 4 +-
crypto_predict - pre-recursion.ipynb | 2658 ++++++++++++++++++++++++++
crypto_predict.ipynb | 1813 ++++++++++++------
weights 1-48/.gitignore | 5 +
weights 2-24/.gitignore | 5 +
weights 3-31/.gitignore | 5 +
weights 4-32/.gitignore | 5 +
weights addforward/.gitignore | 5 +
weights addforward2/.gitignore | 5 +
weights origtri/.gitignore | 5 +
10 files changed, 3944 insertions(+), 566 deletions(-)
create mode 100644 crypto_predict - pre-recursion.ipynb
create mode 100644 weights 1-48/.gitignore
create mode 100644 weights 2-24/.gitignore
create mode 100644 weights 3-31/.gitignore
create mode 100644 weights 4-32/.gitignore
create mode 100644 weights addforward/.gitignore
create mode 100644 weights addforward2/.gitignore
create mode 100644 weights origtri/.gitignore
diff --git a/config.py b/config.py
index 33a767d..787169c 100644
--- a/config.py
+++ b/config.py
@@ -1,5 +1,5 @@
# Which colums to use as input for the nerual network
-columns = ['Close','Volume','Low','High']
+columns = ['Close', 'Volume', 'High', 'Low']
# Which currency pair are we interested in predicting
pair = 'BTC_ETH' # or 'USDT_BTC'
@@ -7,7 +7,7 @@
CONFIG = {
'pair': pair,
'period': 300,
- 'input_size': 30,
+ 'input_size': 48,
'output_size': 12,
'lstm_hidden_size': 50,
'columns' : columns,
diff --git a/crypto_predict - pre-recursion.ipynb b/crypto_predict - pre-recursion.ipynb
new file mode 100644
index 0000000..fc54e9b
--- /dev/null
+++ b/crypto_predict - pre-recursion.ipynb
@@ -0,0 +1,2658 @@
+{
+ "cells": [
+ {
+ "cell_type": "code",
+ "execution_count": 1,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "{'pair': 'BTC_ETH',\n",
+ " 'period': 300,\n",
+ " 'input_size': 60,\n",
+ " 'output_size': 12,\n",
+ " 'lstm_hidden_size': 50,\n",
+ " 'columns': ['Close', 'Volume'],\n",
+ " 'csv_src_file': 'BTC_ETH',\n",
+ " 'name': 'lstm',\n",
+ " 'folder': {'data': 'data/', 'weights': 'weights/'},\n",
+ " 'filename': 'BTC_ETH_lstm_i60_o12_Close_Volume'}"
+ ]
+ },
+ "execution_count": 1,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "from config import CONFIG\n",
+ "from utils import series_to_supervised\n",
+ "\n",
+ "CONFIG"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 2,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/html": [
+ "
\n",
+ "\n",
+ "
\n",
+ " \n",
+ " \n",
+ " | \n",
+ " Close | \n",
+ " Timestamp | \n",
+ " High | \n",
+ " Low | \n",
+ " Open | \n",
+ " Volume | \n",
+ "
\n",
+ " \n",
+ " \n",
+ " \n",
+ " 269507 | \n",
+ " 0.082828 | \n",
+ " 1519862700 | \n",
+ " 0.082856 | \n",
+ " 0.082729 | \n",
+ " 0.082729 | \n",
+ " 4.151247 | \n",
+ "
\n",
+ " \n",
+ " 269508 | \n",
+ " 0.082609 | \n",
+ " 1519863000 | \n",
+ " 0.082828 | \n",
+ " 0.082606 | \n",
+ " 0.082828 | \n",
+ " 5.551513 | \n",
+ "
\n",
+ " \n",
+ " 269509 | \n",
+ " 0.082552 | \n",
+ " 1519863300 | \n",
+ " 0.082673 | \n",
+ " 0.082547 | \n",
+ " 0.082609 | \n",
+ " 2.327443 | \n",
+ "
\n",
+ " \n",
+ " 269510 | \n",
+ " 0.082460 | \n",
+ " 1519863600 | \n",
+ " 0.082625 | \n",
+ " 0.082419 | \n",
+ " 0.082552 | \n",
+ " 1.519736 | \n",
+ "
\n",
+ " \n",
+ " 269511 | \n",
+ " 0.082455 | \n",
+ " 1519863900 | \n",
+ " 0.082460 | \n",
+ " 0.082418 | \n",
+ " 0.082455 | \n",
+ " 0.552411 | \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
"
+ ],
+ "text/plain": [
+ " Close Timestamp High Low Open Volume\n",
+ "269507 0.082828 1519862700 0.082856 0.082729 0.082729 4.151247\n",
+ "269508 0.082609 1519863000 0.082828 0.082606 0.082828 5.551513\n",
+ "269509 0.082552 1519863300 0.082673 0.082547 0.082609 2.327443\n",
+ "269510 0.082460 1519863600 0.082625 0.082419 0.082552 1.519736\n",
+ "269511 0.082455 1519863900 0.082460 0.082418 0.082455 0.552411"
+ ]
+ },
+ "execution_count": 2,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "import numpy as np\n",
+ "import pandas as pd\n",
+ "from sklearn.preprocessing import MinMaxScaler\n",
+ "from pandas import read_csv\n",
+ "from pandas import DataFrame\n",
+ "from pandas import concat\n",
+ "from matplotlib import pyplot\n",
+ " \n",
+ "input_size = 100\n",
+ "#data file path\n",
+ "dfp = ''.join([CONFIG['folder']['data'], CONFIG['csv_src_file'], '.csv'])\n",
+ "\n",
+ "#Columns of price data to use\n",
+ "columns = CONFIG['columns']\n",
+ "# df = pd.read_csv(dfp).dropna().tail(1000000)\n",
+ "dataset = pd.read_csv(dfp)\n",
+ "\n",
+ "# to drop values before 2018 1514764800, March 2018 1519862400, July 2017 1498867200\n",
+ "dataset = dataset[dataset.Timestamp > 1519862400]\n",
+ "dataset.head()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 3,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "29582\n"
+ ]
+ }
+ ],
+ "source": [
+ "print(dataset.shape[0])"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 4,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "29582\n"
+ ]
+ }
+ ],
+ "source": [
+ "#filter out unwanted columns and convert to numpy.ndarray\n",
+ "#loc[rows, cols] to get, .values gets values from a dataframe as a numpy.ndarray\n",
+ "values = dataset.loc[:,columns].values\n",
+ "print(values.shape[0])"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 5,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "(60, 2, 12)"
+ ]
+ },
+ "execution_count": 5,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "# parameters to prepare the dataset for learning \n",
+ "n_lag = CONFIG['input_size']\n",
+ "n_out = CONFIG['output_size']\n",
+ "n_features = len(columns)\n",
+ "n_lag,n_features,n_out"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 6,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "29582\n"
+ ]
+ }
+ ],
+ "source": [
+ "# scale dataset\n",
+ "scaler = MinMaxScaler(feature_range=(0, 1))\n",
+ "scaled = scaler.fit_transform(values)\n",
+ "print(scaled.shape[0])"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 7,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/html": [
+ "\n",
+ "\n",
+ "
\n",
+ " \n",
+ " \n",
+ " | \n",
+ " var1(t-60) | \n",
+ " var2(t-60) | \n",
+ " var1(t-59) | \n",
+ " var2(t-59) | \n",
+ " var1(t-58) | \n",
+ " var2(t-58) | \n",
+ " var1(t-57) | \n",
+ " var2(t-57) | \n",
+ " var1(t-56) | \n",
+ " var2(t-56) | \n",
+ " ... | \n",
+ " var1(t+7) | \n",
+ " var2(t+7) | \n",
+ " var1(t+8) | \n",
+ " var2(t+8) | \n",
+ " var1(t+9) | \n",
+ " var2(t+9) | \n",
+ " var1(t+10) | \n",
+ " var2(t+10) | \n",
+ " var1(t+11) | \n",
+ " var2(t+11) | \n",
+ "
\n",
+ " \n",
+ " \n",
+ " \n",
+ " 60 | \n",
+ " 0.911499 | \n",
+ " 0.005383 | \n",
+ " 0.904896 | \n",
+ " 0.007199 | \n",
+ " 0.903185 | \n",
+ " 0.003018 | \n",
+ " 0.900404 | \n",
+ " 0.001971 | \n",
+ " 0.900251 | \n",
+ " 0.000716 | \n",
+ " ... | \n",
+ " 0.896031 | \n",
+ " 0.000725 | \n",
+ " 0.900122 | \n",
+ " 0.002147 | \n",
+ " 0.899499 | \n",
+ " 0.002193 | \n",
+ " 0.900826 | \n",
+ " 0.003644 | \n",
+ " 0.902605 | \n",
+ " 0.000667 | \n",
+ "
\n",
+ " \n",
+ " 61 | \n",
+ " 0.904896 | \n",
+ " 0.007199 | \n",
+ " 0.903185 | \n",
+ " 0.003018 | \n",
+ " 0.900404 | \n",
+ " 0.001971 | \n",
+ " 0.900251 | \n",
+ " 0.000716 | \n",
+ " 0.899950 | \n",
+ " 0.001567 | \n",
+ " ... | \n",
+ " 0.900122 | \n",
+ " 0.002147 | \n",
+ " 0.899499 | \n",
+ " 0.002193 | \n",
+ " 0.900826 | \n",
+ " 0.003644 | \n",
+ " 0.902605 | \n",
+ " 0.000667 | \n",
+ " 0.905607 | \n",
+ " 0.001138 | \n",
+ "
\n",
+ " \n",
+ " 62 | \n",
+ " 0.903185 | \n",
+ " 0.003018 | \n",
+ " 0.900404 | \n",
+ " 0.001971 | \n",
+ " 0.900251 | \n",
+ " 0.000716 | \n",
+ " 0.899950 | \n",
+ " 0.001567 | \n",
+ " 0.900536 | \n",
+ " 0.001585 | \n",
+ " ... | \n",
+ " 0.899499 | \n",
+ " 0.002193 | \n",
+ " 0.900826 | \n",
+ " 0.003644 | \n",
+ " 0.902605 | \n",
+ " 0.000667 | \n",
+ " 0.905607 | \n",
+ " 0.001138 | \n",
+ " 0.906037 | \n",
+ " 0.001566 | \n",
+ "
\n",
+ " \n",
+ " 63 | \n",
+ " 0.900404 | \n",
+ " 0.001971 | \n",
+ " 0.900251 | \n",
+ " 0.000716 | \n",
+ " 0.899950 | \n",
+ " 0.001567 | \n",
+ " 0.900536 | \n",
+ " 0.001585 | \n",
+ " 0.898062 | \n",
+ " 0.002247 | \n",
+ " ... | \n",
+ " 0.900826 | \n",
+ " 0.003644 | \n",
+ " 0.902605 | \n",
+ " 0.000667 | \n",
+ " 0.905607 | \n",
+ " 0.001138 | \n",
+ " 0.906037 | \n",
+ " 0.001566 | \n",
+ " 0.903877 | \n",
+ " 0.000476 | \n",
+ "
\n",
+ " \n",
+ " 64 | \n",
+ " 0.900251 | \n",
+ " 0.000716 | \n",
+ " 0.899950 | \n",
+ " 0.001567 | \n",
+ " 0.900536 | \n",
+ " 0.001585 | \n",
+ " 0.898062 | \n",
+ " 0.002247 | \n",
+ " 0.900530 | \n",
+ " 0.001073 | \n",
+ " ... | \n",
+ " 0.902605 | \n",
+ " 0.000667 | \n",
+ " 0.905607 | \n",
+ " 0.001138 | \n",
+ " 0.906037 | \n",
+ " 0.001566 | \n",
+ " 0.903877 | \n",
+ " 0.000476 | \n",
+ " 0.906416 | \n",
+ " 0.003237 | \n",
+ "
\n",
+ " \n",
+ " 65 | \n",
+ " 0.899950 | \n",
+ " 0.001567 | \n",
+ " 0.900536 | \n",
+ " 0.001585 | \n",
+ " 0.898062 | \n",
+ " 0.002247 | \n",
+ " 0.900530 | \n",
+ " 0.001073 | \n",
+ " 0.898052 | \n",
+ " 0.000990 | \n",
+ " ... | \n",
+ " 0.905607 | \n",
+ " 0.001138 | \n",
+ " 0.906037 | \n",
+ " 0.001566 | \n",
+ " 0.903877 | \n",
+ " 0.000476 | \n",
+ " 0.906416 | \n",
+ " 0.003237 | \n",
+ " 0.911383 | \n",
+ " 0.002917 | \n",
+ "
\n",
+ " \n",
+ " 66 | \n",
+ " 0.900536 | \n",
+ " 0.001585 | \n",
+ " 0.898062 | \n",
+ " 0.002247 | \n",
+ " 0.900530 | \n",
+ " 0.001073 | \n",
+ " 0.898052 | \n",
+ " 0.000990 | \n",
+ " 0.900246 | \n",
+ " 0.001661 | \n",
+ " ... | \n",
+ " 0.906037 | \n",
+ " 0.001566 | \n",
+ " 0.903877 | \n",
+ " 0.000476 | \n",
+ " 0.906416 | \n",
+ " 0.003237 | \n",
+ " 0.911383 | \n",
+ " 0.002917 | \n",
+ " 0.912647 | \n",
+ " 0.003534 | \n",
+ "
\n",
+ " \n",
+ " 67 | \n",
+ " 0.898062 | \n",
+ " 0.002247 | \n",
+ " 0.900530 | \n",
+ " 0.001073 | \n",
+ " 0.898052 | \n",
+ " 0.000990 | \n",
+ " 0.900246 | \n",
+ " 0.001661 | \n",
+ " 0.900536 | \n",
+ " 0.000313 | \n",
+ " ... | \n",
+ " 0.903877 | \n",
+ " 0.000476 | \n",
+ " 0.906416 | \n",
+ " 0.003237 | \n",
+ " 0.911383 | \n",
+ " 0.002917 | \n",
+ " 0.912647 | \n",
+ " 0.003534 | \n",
+ " 0.913068 | \n",
+ " 0.005535 | \n",
+ "
\n",
+ " \n",
+ " 68 | \n",
+ " 0.900530 | \n",
+ " 0.001073 | \n",
+ " 0.898052 | \n",
+ " 0.000990 | \n",
+ " 0.900246 | \n",
+ " 0.001661 | \n",
+ " 0.900536 | \n",
+ " 0.000313 | \n",
+ " 0.901199 | \n",
+ " 0.001925 | \n",
+ " ... | \n",
+ " 0.906416 | \n",
+ " 0.003237 | \n",
+ " 0.911383 | \n",
+ " 0.002917 | \n",
+ " 0.912647 | \n",
+ " 0.003534 | \n",
+ " 0.913068 | \n",
+ " 0.005535 | \n",
+ " 0.909149 | \n",
+ " 0.004790 | \n",
+ "
\n",
+ " \n",
+ " 69 | \n",
+ " 0.898052 | \n",
+ " 0.000990 | \n",
+ " 0.900246 | \n",
+ " 0.001661 | \n",
+ " 0.900536 | \n",
+ " 0.000313 | \n",
+ " 0.901199 | \n",
+ " 0.001925 | \n",
+ " 0.901833 | \n",
+ " 0.000689 | \n",
+ " ... | \n",
+ " 0.911383 | \n",
+ " 0.002917 | \n",
+ " 0.912647 | \n",
+ " 0.003534 | \n",
+ " 0.913068 | \n",
+ " 0.005535 | \n",
+ " 0.909149 | \n",
+ " 0.004790 | \n",
+ " 0.906146 | \n",
+ " 0.001807 | \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
10 rows × 144 columns
\n",
+ "
"
+ ],
+ "text/plain": [
+ " var1(t-60) var2(t-60) var1(t-59) var2(t-59) var1(t-58) var2(t-58) \\\n",
+ "60 0.911499 0.005383 0.904896 0.007199 0.903185 0.003018 \n",
+ "61 0.904896 0.007199 0.903185 0.003018 0.900404 0.001971 \n",
+ "62 0.903185 0.003018 0.900404 0.001971 0.900251 0.000716 \n",
+ "63 0.900404 0.001971 0.900251 0.000716 0.899950 0.001567 \n",
+ "64 0.900251 0.000716 0.899950 0.001567 0.900536 0.001585 \n",
+ "65 0.899950 0.001567 0.900536 0.001585 0.898062 0.002247 \n",
+ "66 0.900536 0.001585 0.898062 0.002247 0.900530 0.001073 \n",
+ "67 0.898062 0.002247 0.900530 0.001073 0.898052 0.000990 \n",
+ "68 0.900530 0.001073 0.898052 0.000990 0.900246 0.001661 \n",
+ "69 0.898052 0.000990 0.900246 0.001661 0.900536 0.000313 \n",
+ "\n",
+ " var1(t-57) var2(t-57) var1(t-56) var2(t-56) ... var1(t+7) \\\n",
+ "60 0.900404 0.001971 0.900251 0.000716 ... 0.896031 \n",
+ "61 0.900251 0.000716 0.899950 0.001567 ... 0.900122 \n",
+ "62 0.899950 0.001567 0.900536 0.001585 ... 0.899499 \n",
+ "63 0.900536 0.001585 0.898062 0.002247 ... 0.900826 \n",
+ "64 0.898062 0.002247 0.900530 0.001073 ... 0.902605 \n",
+ "65 0.900530 0.001073 0.898052 0.000990 ... 0.905607 \n",
+ "66 0.898052 0.000990 0.900246 0.001661 ... 0.906037 \n",
+ "67 0.900246 0.001661 0.900536 0.000313 ... 0.903877 \n",
+ "68 0.900536 0.000313 0.901199 0.001925 ... 0.906416 \n",
+ "69 0.901199 0.001925 0.901833 0.000689 ... 0.911383 \n",
+ "\n",
+ " var2(t+7) var1(t+8) var2(t+8) var1(t+9) var2(t+9) var1(t+10) \\\n",
+ "60 0.000725 0.900122 0.002147 0.899499 0.002193 0.900826 \n",
+ "61 0.002147 0.899499 0.002193 0.900826 0.003644 0.902605 \n",
+ "62 0.002193 0.900826 0.003644 0.902605 0.000667 0.905607 \n",
+ "63 0.003644 0.902605 0.000667 0.905607 0.001138 0.906037 \n",
+ "64 0.000667 0.905607 0.001138 0.906037 0.001566 0.903877 \n",
+ "65 0.001138 0.906037 0.001566 0.903877 0.000476 0.906416 \n",
+ "66 0.001566 0.903877 0.000476 0.906416 0.003237 0.911383 \n",
+ "67 0.000476 0.906416 0.003237 0.911383 0.002917 0.912647 \n",
+ "68 0.003237 0.911383 0.002917 0.912647 0.003534 0.913068 \n",
+ "69 0.002917 0.912647 0.003534 0.913068 0.005535 0.909149 \n",
+ "\n",
+ " var2(t+10) var1(t+11) var2(t+11) \n",
+ "60 0.003644 0.902605 0.000667 \n",
+ "61 0.000667 0.905607 0.001138 \n",
+ "62 0.001138 0.906037 0.001566 \n",
+ "63 0.001566 0.903877 0.000476 \n",
+ "64 0.000476 0.906416 0.003237 \n",
+ "65 0.003237 0.911383 0.002917 \n",
+ "66 0.002917 0.912647 0.003534 \n",
+ "67 0.003534 0.913068 0.005535 \n",
+ "68 0.005535 0.909149 0.004790 \n",
+ "69 0.004790 0.906146 0.001807 \n",
+ "\n",
+ "[10 rows x 144 columns]"
+ ]
+ },
+ "execution_count": 7,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "# frame as supervised learning\n",
+ "reframed = series_to_supervised(scaled, n_lag, n_out)\n",
+ "reframed.head(10)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 8,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "29511\n"
+ ]
+ }
+ ],
+ "source": [
+ "print(reframed.shape[0])"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 9,
+ "metadata": {
+ "scrolled": true
+ },
+ "outputs": [
+ {
+ "data": {
+ "text/html": [
+ "\n",
+ "\n",
+ "
\n",
+ " \n",
+ " \n",
+ " | \n",
+ " var1(t-60) | \n",
+ " var2(t-60) | \n",
+ " var1(t-59) | \n",
+ " var2(t-59) | \n",
+ " var1(t-58) | \n",
+ " var2(t-58) | \n",
+ " var1(t-57) | \n",
+ " var2(t-57) | \n",
+ " var1(t-56) | \n",
+ " var2(t-56) | \n",
+ " var1(t-55) | \n",
+ " var2(t-55) | \n",
+ " var1(t-54) | \n",
+ " var2(t-54) | \n",
+ " var1(t-53) | \n",
+ " var2(t-53) | \n",
+ " var1(t-52) | \n",
+ " var2(t-52) | \n",
+ " var1(t-51) | \n",
+ " var2(t-51) | \n",
+ " var1(t-50) | \n",
+ " var2(t-50) | \n",
+ " var1(t-49) | \n",
+ " var2(t-49) | \n",
+ " var1(t-48) | \n",
+ " var2(t-48) | \n",
+ " var1(t-47) | \n",
+ " var2(t-47) | \n",
+ " var1(t-46) | \n",
+ " var2(t-46) | \n",
+ " var1(t-45) | \n",
+ " var2(t-45) | \n",
+ " var1(t-44) | \n",
+ " var2(t-44) | \n",
+ " var1(t-43) | \n",
+ " var2(t-43) | \n",
+ " var1(t-42) | \n",
+ " var2(t-42) | \n",
+ " var1(t-41) | \n",
+ " var2(t-41) | \n",
+ " var1(t-40) | \n",
+ " var2(t-40) | \n",
+ " var1(t-39) | \n",
+ " var2(t-39) | \n",
+ " var1(t-38) | \n",
+ " var2(t-38) | \n",
+ " var1(t-37) | \n",
+ " var2(t-37) | \n",
+ " var1(t-36) | \n",
+ " var2(t-36) | \n",
+ " var1(t-35) | \n",
+ " var2(t-35) | \n",
+ " var1(t-34) | \n",
+ " var2(t-34) | \n",
+ " var1(t-33) | \n",
+ " var2(t-33) | \n",
+ " var1(t-32) | \n",
+ " var2(t-32) | \n",
+ " var1(t-31) | \n",
+ " var2(t-31) | \n",
+ " var1(t-30) | \n",
+ " var2(t-30) | \n",
+ " var1(t-29) | \n",
+ " var2(t-29) | \n",
+ " var1(t-28) | \n",
+ " var2(t-28) | \n",
+ " var1(t-27) | \n",
+ " var2(t-27) | \n",
+ " var1(t-26) | \n",
+ " var2(t-26) | \n",
+ " var1(t-25) | \n",
+ " var2(t-25) | \n",
+ " var1(t-24) | \n",
+ " var2(t-24) | \n",
+ " var1(t-23) | \n",
+ " var2(t-23) | \n",
+ " var1(t-22) | \n",
+ " var2(t-22) | \n",
+ " var1(t-21) | \n",
+ " var2(t-21) | \n",
+ " var1(t-20) | \n",
+ " var2(t-20) | \n",
+ " var1(t-19) | \n",
+ " var2(t-19) | \n",
+ " var1(t-18) | \n",
+ " var2(t-18) | \n",
+ " var1(t-17) | \n",
+ " var2(t-17) | \n",
+ " var1(t-16) | \n",
+ " var2(t-16) | \n",
+ " var1(t-15) | \n",
+ " var2(t-15) | \n",
+ " var1(t-14) | \n",
+ " var2(t-14) | \n",
+ " var1(t-13) | \n",
+ " var2(t-13) | \n",
+ " var1(t-12) | \n",
+ " var2(t-12) | \n",
+ " var1(t-11) | \n",
+ " var2(t-11) | \n",
+ " var1(t-10) | \n",
+ " var2(t-10) | \n",
+ " var1(t-9) | \n",
+ " var2(t-9) | \n",
+ " var1(t-8) | \n",
+ " var2(t-8) | \n",
+ " var1(t-7) | \n",
+ " var2(t-7) | \n",
+ " var1(t-6) | \n",
+ " var2(t-6) | \n",
+ " var1(t-5) | \n",
+ " var2(t-5) | \n",
+ " var1(t-4) | \n",
+ " var2(t-4) | \n",
+ " var1(t-3) | \n",
+ " var2(t-3) | \n",
+ " var1(t-2) | \n",
+ " var2(t-2) | \n",
+ " var1(t-1) | \n",
+ " var2(t-1) | \n",
+ " var1(t) | \n",
+ " var1(t+1) | \n",
+ " var1(t+2) | \n",
+ " var1(t+3) | \n",
+ " var1(t+4) | \n",
+ " var1(t+5) | \n",
+ " var1(t+6) | \n",
+ " var1(t+7) | \n",
+ " var1(t+8) | \n",
+ " var1(t+9) | \n",
+ " var1(t+10) | \n",
+ " var1(t+11) | \n",
+ "
\n",
+ " \n",
+ " \n",
+ " \n",
+ " 60 | \n",
+ " 0.911499 | \n",
+ " 0.005383 | \n",
+ " 0.904896 | \n",
+ " 0.007199 | \n",
+ " 0.903185 | \n",
+ " 0.003018 | \n",
+ " 0.900404 | \n",
+ " 0.001971 | \n",
+ " 0.900251 | \n",
+ " 0.000716 | \n",
+ " 0.899950 | \n",
+ " 0.001567 | \n",
+ " 0.900536 | \n",
+ " 0.001585 | \n",
+ " 0.898062 | \n",
+ " 0.002247 | \n",
+ " 0.900530 | \n",
+ " 0.001073 | \n",
+ " 0.898052 | \n",
+ " 0.000990 | \n",
+ " 0.900246 | \n",
+ " 0.001661 | \n",
+ " 0.900536 | \n",
+ " 0.000313 | \n",
+ " 0.901199 | \n",
+ " 0.001925 | \n",
+ " 0.901833 | \n",
+ " 0.000689 | \n",
+ " 0.904123 | \n",
+ " 0.004102 | \n",
+ " 0.904161 | \n",
+ " 0.001904 | \n",
+ " 0.898631 | \n",
+ " 0.002870 | \n",
+ " 0.898622 | \n",
+ " 0.001366 | \n",
+ " 0.900645 | \n",
+ " 0.001475 | \n",
+ " 0.902122 | \n",
+ " 0.003711 | \n",
+ " 0.901912 | \n",
+ " 0.000501 | \n",
+ " 0.898889 | \n",
+ " 0.001275 | \n",
+ " 0.897707 | \n",
+ " 0.006063 | \n",
+ " 0.896295 | \n",
+ " 0.005163 | \n",
+ " 0.901640 | \n",
+ " 0.005457 | \n",
+ " 0.894162 | \n",
+ " 0.001330 | \n",
+ " 0.901667 | \n",
+ " 0.004633 | \n",
+ " 0.902515 | \n",
+ " 0.000511 | \n",
+ " 0.902515 | \n",
+ " 0.005043 | \n",
+ " 0.903902 | \n",
+ " 0.001018 | \n",
+ " 0.911195 | \n",
+ " 0.004273 | \n",
+ " 0.909011 | \n",
+ " 0.000288 | \n",
+ " 0.908729 | \n",
+ " 0.005163 | \n",
+ " 0.903118 | \n",
+ " 0.000774 | \n",
+ " 0.904913 | \n",
+ " 0.001256 | \n",
+ " 0.909531 | \n",
+ " 0.000620 | \n",
+ " 0.907403 | \n",
+ " 0.001534 | \n",
+ " 0.907393 | \n",
+ " 0.001065 | \n",
+ " 0.907137 | \n",
+ " 0.001018 | \n",
+ " 0.903187 | \n",
+ " 0.000341 | \n",
+ " 0.903218 | \n",
+ " 0.000487 | \n",
+ " 0.903810 | \n",
+ " 0.001179 | \n",
+ " 0.909454 | \n",
+ " 0.001097 | \n",
+ " 0.907357 | \n",
+ " 0.000555 | \n",
+ " 0.911044 | \n",
+ " 0.007564 | \n",
+ " 0.910657 | \n",
+ " 0.003413 | \n",
+ " 0.907522 | \n",
+ " 0.008177 | \n",
+ " 0.900615 | \n",
+ " 0.003681 | \n",
+ " 0.906061 | \n",
+ " 0.001641 | \n",
+ " 0.899258 | \n",
+ " 0.016586 | \n",
+ " 0.903952 | \n",
+ " 0.012982 | \n",
+ " 0.899530 | \n",
+ " 0.006698 | \n",
+ " 0.899831 | \n",
+ " 0.000562 | \n",
+ " 0.907336 | \n",
+ " 0.004833 | \n",
+ " 0.907291 | \n",
+ " 0.001092 | \n",
+ " 0.901101 | \n",
+ " 0.000096 | \n",
+ " 0.902979 | \n",
+ " 0.000749 | \n",
+ " 0.898715 | \n",
+ " 0.003541 | \n",
+ " 0.899620 | \n",
+ " 0.003343 | \n",
+ " 0.902619 | \n",
+ " 0.000657 | \n",
+ " 0.898043 | \n",
+ " 0.899926 | \n",
+ " 0.904471 | \n",
+ " 0.901639 | \n",
+ " 0.904114 | \n",
+ " 0.894464 | \n",
+ " 0.894675 | \n",
+ " 0.896031 | \n",
+ " 0.900122 | \n",
+ " 0.899499 | \n",
+ " 0.900826 | \n",
+ " 0.902605 | \n",
+ "
\n",
+ " \n",
+ " 61 | \n",
+ " 0.904896 | \n",
+ " 0.007199 | \n",
+ " 0.903185 | \n",
+ " 0.003018 | \n",
+ " 0.900404 | \n",
+ " 0.001971 | \n",
+ " 0.900251 | \n",
+ " 0.000716 | \n",
+ " 0.899950 | \n",
+ " 0.001567 | \n",
+ " 0.900536 | \n",
+ " 0.001585 | \n",
+ " 0.898062 | \n",
+ " 0.002247 | \n",
+ " 0.900530 | \n",
+ " 0.001073 | \n",
+ " 0.898052 | \n",
+ " 0.000990 | \n",
+ " 0.900246 | \n",
+ " 0.001661 | \n",
+ " 0.900536 | \n",
+ " 0.000313 | \n",
+ " 0.901199 | \n",
+ " 0.001925 | \n",
+ " 0.901833 | \n",
+ " 0.000689 | \n",
+ " 0.904123 | \n",
+ " 0.004102 | \n",
+ " 0.904161 | \n",
+ " 0.001904 | \n",
+ " 0.898631 | \n",
+ " 0.002870 | \n",
+ " 0.898622 | \n",
+ " 0.001366 | \n",
+ " 0.900645 | \n",
+ " 0.001475 | \n",
+ " 0.902122 | \n",
+ " 0.003711 | \n",
+ " 0.901912 | \n",
+ " 0.000501 | \n",
+ " 0.898889 | \n",
+ " 0.001275 | \n",
+ " 0.897707 | \n",
+ " 0.006063 | \n",
+ " 0.896295 | \n",
+ " 0.005163 | \n",
+ " 0.901640 | \n",
+ " 0.005457 | \n",
+ " 0.894162 | \n",
+ " 0.001330 | \n",
+ " 0.901667 | \n",
+ " 0.004633 | \n",
+ " 0.902515 | \n",
+ " 0.000511 | \n",
+ " 0.902515 | \n",
+ " 0.005043 | \n",
+ " 0.903902 | \n",
+ " 0.001018 | \n",
+ " 0.911195 | \n",
+ " 0.004273 | \n",
+ " 0.909011 | \n",
+ " 0.000288 | \n",
+ " 0.908729 | \n",
+ " 0.005163 | \n",
+ " 0.903118 | \n",
+ " 0.000774 | \n",
+ " 0.904913 | \n",
+ " 0.001256 | \n",
+ " 0.909531 | \n",
+ " 0.000620 | \n",
+ " 0.907403 | \n",
+ " 0.001534 | \n",
+ " 0.907393 | \n",
+ " 0.001065 | \n",
+ " 0.907137 | \n",
+ " 0.001018 | \n",
+ " 0.903187 | \n",
+ " 0.000341 | \n",
+ " 0.903218 | \n",
+ " 0.000487 | \n",
+ " 0.903810 | \n",
+ " 0.001179 | \n",
+ " 0.909454 | \n",
+ " 0.001097 | \n",
+ " 0.907357 | \n",
+ " 0.000555 | \n",
+ " 0.911044 | \n",
+ " 0.007564 | \n",
+ " 0.910657 | \n",
+ " 0.003413 | \n",
+ " 0.907522 | \n",
+ " 0.008177 | \n",
+ " 0.900615 | \n",
+ " 0.003681 | \n",
+ " 0.906061 | \n",
+ " 0.001641 | \n",
+ " 0.899258 | \n",
+ " 0.016586 | \n",
+ " 0.903952 | \n",
+ " 0.012982 | \n",
+ " 0.899530 | \n",
+ " 0.006698 | \n",
+ " 0.899831 | \n",
+ " 0.000562 | \n",
+ " 0.907336 | \n",
+ " 0.004833 | \n",
+ " 0.907291 | \n",
+ " 0.001092 | \n",
+ " 0.901101 | \n",
+ " 0.000096 | \n",
+ " 0.902979 | \n",
+ " 0.000749 | \n",
+ " 0.898715 | \n",
+ " 0.003541 | \n",
+ " 0.899620 | \n",
+ " 0.003343 | \n",
+ " 0.902619 | \n",
+ " 0.000657 | \n",
+ " 0.898043 | \n",
+ " 0.001283 | \n",
+ " 0.899926 | \n",
+ " 0.904471 | \n",
+ " 0.901639 | \n",
+ " 0.904114 | \n",
+ " 0.894464 | \n",
+ " 0.894675 | \n",
+ " 0.896031 | \n",
+ " 0.900122 | \n",
+ " 0.899499 | \n",
+ " 0.900826 | \n",
+ " 0.902605 | \n",
+ " 0.905607 | \n",
+ "
\n",
+ " \n",
+ " 62 | \n",
+ " 0.903185 | \n",
+ " 0.003018 | \n",
+ " 0.900404 | \n",
+ " 0.001971 | \n",
+ " 0.900251 | \n",
+ " 0.000716 | \n",
+ " 0.899950 | \n",
+ " 0.001567 | \n",
+ " 0.900536 | \n",
+ " 0.001585 | \n",
+ " 0.898062 | \n",
+ " 0.002247 | \n",
+ " 0.900530 | \n",
+ " 0.001073 | \n",
+ " 0.898052 | \n",
+ " 0.000990 | \n",
+ " 0.900246 | \n",
+ " 0.001661 | \n",
+ " 0.900536 | \n",
+ " 0.000313 | \n",
+ " 0.901199 | \n",
+ " 0.001925 | \n",
+ " 0.901833 | \n",
+ " 0.000689 | \n",
+ " 0.904123 | \n",
+ " 0.004102 | \n",
+ " 0.904161 | \n",
+ " 0.001904 | \n",
+ " 0.898631 | \n",
+ " 0.002870 | \n",
+ " 0.898622 | \n",
+ " 0.001366 | \n",
+ " 0.900645 | \n",
+ " 0.001475 | \n",
+ " 0.902122 | \n",
+ " 0.003711 | \n",
+ " 0.901912 | \n",
+ " 0.000501 | \n",
+ " 0.898889 | \n",
+ " 0.001275 | \n",
+ " 0.897707 | \n",
+ " 0.006063 | \n",
+ " 0.896295 | \n",
+ " 0.005163 | \n",
+ " 0.901640 | \n",
+ " 0.005457 | \n",
+ " 0.894162 | \n",
+ " 0.001330 | \n",
+ " 0.901667 | \n",
+ " 0.004633 | \n",
+ " 0.902515 | \n",
+ " 0.000511 | \n",
+ " 0.902515 | \n",
+ " 0.005043 | \n",
+ " 0.903902 | \n",
+ " 0.001018 | \n",
+ " 0.911195 | \n",
+ " 0.004273 | \n",
+ " 0.909011 | \n",
+ " 0.000288 | \n",
+ " 0.908729 | \n",
+ " 0.005163 | \n",
+ " 0.903118 | \n",
+ " 0.000774 | \n",
+ " 0.904913 | \n",
+ " 0.001256 | \n",
+ " 0.909531 | \n",
+ " 0.000620 | \n",
+ " 0.907403 | \n",
+ " 0.001534 | \n",
+ " 0.907393 | \n",
+ " 0.001065 | \n",
+ " 0.907137 | \n",
+ " 0.001018 | \n",
+ " 0.903187 | \n",
+ " 0.000341 | \n",
+ " 0.903218 | \n",
+ " 0.000487 | \n",
+ " 0.903810 | \n",
+ " 0.001179 | \n",
+ " 0.909454 | \n",
+ " 0.001097 | \n",
+ " 0.907357 | \n",
+ " 0.000555 | \n",
+ " 0.911044 | \n",
+ " 0.007564 | \n",
+ " 0.910657 | \n",
+ " 0.003413 | \n",
+ " 0.907522 | \n",
+ " 0.008177 | \n",
+ " 0.900615 | \n",
+ " 0.003681 | \n",
+ " 0.906061 | \n",
+ " 0.001641 | \n",
+ " 0.899258 | \n",
+ " 0.016586 | \n",
+ " 0.903952 | \n",
+ " 0.012982 | \n",
+ " 0.899530 | \n",
+ " 0.006698 | \n",
+ " 0.899831 | \n",
+ " 0.000562 | \n",
+ " 0.907336 | \n",
+ " 0.004833 | \n",
+ " 0.907291 | \n",
+ " 0.001092 | \n",
+ " 0.901101 | \n",
+ " 0.000096 | \n",
+ " 0.902979 | \n",
+ " 0.000749 | \n",
+ " 0.898715 | \n",
+ " 0.003541 | \n",
+ " 0.899620 | \n",
+ " 0.003343 | \n",
+ " 0.902619 | \n",
+ " 0.000657 | \n",
+ " 0.898043 | \n",
+ " 0.001283 | \n",
+ " 0.899926 | \n",
+ " 0.001236 | \n",
+ " 0.904471 | \n",
+ " 0.901639 | \n",
+ " 0.904114 | \n",
+ " 0.894464 | \n",
+ " 0.894675 | \n",
+ " 0.896031 | \n",
+ " 0.900122 | \n",
+ " 0.899499 | \n",
+ " 0.900826 | \n",
+ " 0.902605 | \n",
+ " 0.905607 | \n",
+ " 0.906037 | \n",
+ "
\n",
+ " \n",
+ " 63 | \n",
+ " 0.900404 | \n",
+ " 0.001971 | \n",
+ " 0.900251 | \n",
+ " 0.000716 | \n",
+ " 0.899950 | \n",
+ " 0.001567 | \n",
+ " 0.900536 | \n",
+ " 0.001585 | \n",
+ " 0.898062 | \n",
+ " 0.002247 | \n",
+ " 0.900530 | \n",
+ " 0.001073 | \n",
+ " 0.898052 | \n",
+ " 0.000990 | \n",
+ " 0.900246 | \n",
+ " 0.001661 | \n",
+ " 0.900536 | \n",
+ " 0.000313 | \n",
+ " 0.901199 | \n",
+ " 0.001925 | \n",
+ " 0.901833 | \n",
+ " 0.000689 | \n",
+ " 0.904123 | \n",
+ " 0.004102 | \n",
+ " 0.904161 | \n",
+ " 0.001904 | \n",
+ " 0.898631 | \n",
+ " 0.002870 | \n",
+ " 0.898622 | \n",
+ " 0.001366 | \n",
+ " 0.900645 | \n",
+ " 0.001475 | \n",
+ " 0.902122 | \n",
+ " 0.003711 | \n",
+ " 0.901912 | \n",
+ " 0.000501 | \n",
+ " 0.898889 | \n",
+ " 0.001275 | \n",
+ " 0.897707 | \n",
+ " 0.006063 | \n",
+ " 0.896295 | \n",
+ " 0.005163 | \n",
+ " 0.901640 | \n",
+ " 0.005457 | \n",
+ " 0.894162 | \n",
+ " 0.001330 | \n",
+ " 0.901667 | \n",
+ " 0.004633 | \n",
+ " 0.902515 | \n",
+ " 0.000511 | \n",
+ " 0.902515 | \n",
+ " 0.005043 | \n",
+ " 0.903902 | \n",
+ " 0.001018 | \n",
+ " 0.911195 | \n",
+ " 0.004273 | \n",
+ " 0.909011 | \n",
+ " 0.000288 | \n",
+ " 0.908729 | \n",
+ " 0.005163 | \n",
+ " 0.903118 | \n",
+ " 0.000774 | \n",
+ " 0.904913 | \n",
+ " 0.001256 | \n",
+ " 0.909531 | \n",
+ " 0.000620 | \n",
+ " 0.907403 | \n",
+ " 0.001534 | \n",
+ " 0.907393 | \n",
+ " 0.001065 | \n",
+ " 0.907137 | \n",
+ " 0.001018 | \n",
+ " 0.903187 | \n",
+ " 0.000341 | \n",
+ " 0.903218 | \n",
+ " 0.000487 | \n",
+ " 0.903810 | \n",
+ " 0.001179 | \n",
+ " 0.909454 | \n",
+ " 0.001097 | \n",
+ " 0.907357 | \n",
+ " 0.000555 | \n",
+ " 0.911044 | \n",
+ " 0.007564 | \n",
+ " 0.910657 | \n",
+ " 0.003413 | \n",
+ " 0.907522 | \n",
+ " 0.008177 | \n",
+ " 0.900615 | \n",
+ " 0.003681 | \n",
+ " 0.906061 | \n",
+ " 0.001641 | \n",
+ " 0.899258 | \n",
+ " 0.016586 | \n",
+ " 0.903952 | \n",
+ " 0.012982 | \n",
+ " 0.899530 | \n",
+ " 0.006698 | \n",
+ " 0.899831 | \n",
+ " 0.000562 | \n",
+ " 0.907336 | \n",
+ " 0.004833 | \n",
+ " 0.907291 | \n",
+ " 0.001092 | \n",
+ " 0.901101 | \n",
+ " 0.000096 | \n",
+ " 0.902979 | \n",
+ " 0.000749 | \n",
+ " 0.898715 | \n",
+ " 0.003541 | \n",
+ " 0.899620 | \n",
+ " 0.003343 | \n",
+ " 0.902619 | \n",
+ " 0.000657 | \n",
+ " 0.898043 | \n",
+ " 0.001283 | \n",
+ " 0.899926 | \n",
+ " 0.001236 | \n",
+ " 0.904471 | \n",
+ " 0.001412 | \n",
+ " 0.901639 | \n",
+ " 0.904114 | \n",
+ " 0.894464 | \n",
+ " 0.894675 | \n",
+ " 0.896031 | \n",
+ " 0.900122 | \n",
+ " 0.899499 | \n",
+ " 0.900826 | \n",
+ " 0.902605 | \n",
+ " 0.905607 | \n",
+ " 0.906037 | \n",
+ " 0.903877 | \n",
+ "
\n",
+ " \n",
+ " 64 | \n",
+ " 0.900251 | \n",
+ " 0.000716 | \n",
+ " 0.899950 | \n",
+ " 0.001567 | \n",
+ " 0.900536 | \n",
+ " 0.001585 | \n",
+ " 0.898062 | \n",
+ " 0.002247 | \n",
+ " 0.900530 | \n",
+ " 0.001073 | \n",
+ " 0.898052 | \n",
+ " 0.000990 | \n",
+ " 0.900246 | \n",
+ " 0.001661 | \n",
+ " 0.900536 | \n",
+ " 0.000313 | \n",
+ " 0.901199 | \n",
+ " 0.001925 | \n",
+ " 0.901833 | \n",
+ " 0.000689 | \n",
+ " 0.904123 | \n",
+ " 0.004102 | \n",
+ " 0.904161 | \n",
+ " 0.001904 | \n",
+ " 0.898631 | \n",
+ " 0.002870 | \n",
+ " 0.898622 | \n",
+ " 0.001366 | \n",
+ " 0.900645 | \n",
+ " 0.001475 | \n",
+ " 0.902122 | \n",
+ " 0.003711 | \n",
+ " 0.901912 | \n",
+ " 0.000501 | \n",
+ " 0.898889 | \n",
+ " 0.001275 | \n",
+ " 0.897707 | \n",
+ " 0.006063 | \n",
+ " 0.896295 | \n",
+ " 0.005163 | \n",
+ " 0.901640 | \n",
+ " 0.005457 | \n",
+ " 0.894162 | \n",
+ " 0.001330 | \n",
+ " 0.901667 | \n",
+ " 0.004633 | \n",
+ " 0.902515 | \n",
+ " 0.000511 | \n",
+ " 0.902515 | \n",
+ " 0.005043 | \n",
+ " 0.903902 | \n",
+ " 0.001018 | \n",
+ " 0.911195 | \n",
+ " 0.004273 | \n",
+ " 0.909011 | \n",
+ " 0.000288 | \n",
+ " 0.908729 | \n",
+ " 0.005163 | \n",
+ " 0.903118 | \n",
+ " 0.000774 | \n",
+ " 0.904913 | \n",
+ " 0.001256 | \n",
+ " 0.909531 | \n",
+ " 0.000620 | \n",
+ " 0.907403 | \n",
+ " 0.001534 | \n",
+ " 0.907393 | \n",
+ " 0.001065 | \n",
+ " 0.907137 | \n",
+ " 0.001018 | \n",
+ " 0.903187 | \n",
+ " 0.000341 | \n",
+ " 0.903218 | \n",
+ " 0.000487 | \n",
+ " 0.903810 | \n",
+ " 0.001179 | \n",
+ " 0.909454 | \n",
+ " 0.001097 | \n",
+ " 0.907357 | \n",
+ " 0.000555 | \n",
+ " 0.911044 | \n",
+ " 0.007564 | \n",
+ " 0.910657 | \n",
+ " 0.003413 | \n",
+ " 0.907522 | \n",
+ " 0.008177 | \n",
+ " 0.900615 | \n",
+ " 0.003681 | \n",
+ " 0.906061 | \n",
+ " 0.001641 | \n",
+ " 0.899258 | \n",
+ " 0.016586 | \n",
+ " 0.903952 | \n",
+ " 0.012982 | \n",
+ " 0.899530 | \n",
+ " 0.006698 | \n",
+ " 0.899831 | \n",
+ " 0.000562 | \n",
+ " 0.907336 | \n",
+ " 0.004833 | \n",
+ " 0.907291 | \n",
+ " 0.001092 | \n",
+ " 0.901101 | \n",
+ " 0.000096 | \n",
+ " 0.902979 | \n",
+ " 0.000749 | \n",
+ " 0.898715 | \n",
+ " 0.003541 | \n",
+ " 0.899620 | \n",
+ " 0.003343 | \n",
+ " 0.902619 | \n",
+ " 0.000657 | \n",
+ " 0.898043 | \n",
+ " 0.001283 | \n",
+ " 0.899926 | \n",
+ " 0.001236 | \n",
+ " 0.904471 | \n",
+ " 0.001412 | \n",
+ " 0.901639 | \n",
+ " 0.000668 | \n",
+ " 0.904114 | \n",
+ " 0.894464 | \n",
+ " 0.894675 | \n",
+ " 0.896031 | \n",
+ " 0.900122 | \n",
+ " 0.899499 | \n",
+ " 0.900826 | \n",
+ " 0.902605 | \n",
+ " 0.905607 | \n",
+ " 0.906037 | \n",
+ " 0.903877 | \n",
+ " 0.906416 | \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
"
+ ],
+ "text/plain": [
+ " var1(t-60) var2(t-60) var1(t-59) var2(t-59) var1(t-58) var2(t-58) \\\n",
+ "60 0.911499 0.005383 0.904896 0.007199 0.903185 0.003018 \n",
+ "61 0.904896 0.007199 0.903185 0.003018 0.900404 0.001971 \n",
+ "62 0.903185 0.003018 0.900404 0.001971 0.900251 0.000716 \n",
+ "63 0.900404 0.001971 0.900251 0.000716 0.899950 0.001567 \n",
+ "64 0.900251 0.000716 0.899950 0.001567 0.900536 0.001585 \n",
+ "\n",
+ " var1(t-57) var2(t-57) var1(t-56) var2(t-56) var1(t-55) var2(t-55) \\\n",
+ "60 0.900404 0.001971 0.900251 0.000716 0.899950 0.001567 \n",
+ "61 0.900251 0.000716 0.899950 0.001567 0.900536 0.001585 \n",
+ "62 0.899950 0.001567 0.900536 0.001585 0.898062 0.002247 \n",
+ "63 0.900536 0.001585 0.898062 0.002247 0.900530 0.001073 \n",
+ "64 0.898062 0.002247 0.900530 0.001073 0.898052 0.000990 \n",
+ "\n",
+ " var1(t-54) var2(t-54) var1(t-53) var2(t-53) var1(t-52) var2(t-52) \\\n",
+ "60 0.900536 0.001585 0.898062 0.002247 0.900530 0.001073 \n",
+ "61 0.898062 0.002247 0.900530 0.001073 0.898052 0.000990 \n",
+ "62 0.900530 0.001073 0.898052 0.000990 0.900246 0.001661 \n",
+ "63 0.898052 0.000990 0.900246 0.001661 0.900536 0.000313 \n",
+ "64 0.900246 0.001661 0.900536 0.000313 0.901199 0.001925 \n",
+ "\n",
+ " var1(t-51) var2(t-51) var1(t-50) var2(t-50) var1(t-49) var2(t-49) \\\n",
+ "60 0.898052 0.000990 0.900246 0.001661 0.900536 0.000313 \n",
+ "61 0.900246 0.001661 0.900536 0.000313 0.901199 0.001925 \n",
+ "62 0.900536 0.000313 0.901199 0.001925 0.901833 0.000689 \n",
+ "63 0.901199 0.001925 0.901833 0.000689 0.904123 0.004102 \n",
+ "64 0.901833 0.000689 0.904123 0.004102 0.904161 0.001904 \n",
+ "\n",
+ " var1(t-48) var2(t-48) var1(t-47) var2(t-47) var1(t-46) var2(t-46) \\\n",
+ "60 0.901199 0.001925 0.901833 0.000689 0.904123 0.004102 \n",
+ "61 0.901833 0.000689 0.904123 0.004102 0.904161 0.001904 \n",
+ "62 0.904123 0.004102 0.904161 0.001904 0.898631 0.002870 \n",
+ "63 0.904161 0.001904 0.898631 0.002870 0.898622 0.001366 \n",
+ "64 0.898631 0.002870 0.898622 0.001366 0.900645 0.001475 \n",
+ "\n",
+ " var1(t-45) var2(t-45) var1(t-44) var2(t-44) var1(t-43) var2(t-43) \\\n",
+ "60 0.904161 0.001904 0.898631 0.002870 0.898622 0.001366 \n",
+ "61 0.898631 0.002870 0.898622 0.001366 0.900645 0.001475 \n",
+ "62 0.898622 0.001366 0.900645 0.001475 0.902122 0.003711 \n",
+ "63 0.900645 0.001475 0.902122 0.003711 0.901912 0.000501 \n",
+ "64 0.902122 0.003711 0.901912 0.000501 0.898889 0.001275 \n",
+ "\n",
+ " var1(t-42) var2(t-42) var1(t-41) var2(t-41) var1(t-40) var2(t-40) \\\n",
+ "60 0.900645 0.001475 0.902122 0.003711 0.901912 0.000501 \n",
+ "61 0.902122 0.003711 0.901912 0.000501 0.898889 0.001275 \n",
+ "62 0.901912 0.000501 0.898889 0.001275 0.897707 0.006063 \n",
+ "63 0.898889 0.001275 0.897707 0.006063 0.896295 0.005163 \n",
+ "64 0.897707 0.006063 0.896295 0.005163 0.901640 0.005457 \n",
+ "\n",
+ " var1(t-39) var2(t-39) var1(t-38) var2(t-38) var1(t-37) var2(t-37) \\\n",
+ "60 0.898889 0.001275 0.897707 0.006063 0.896295 0.005163 \n",
+ "61 0.897707 0.006063 0.896295 0.005163 0.901640 0.005457 \n",
+ "62 0.896295 0.005163 0.901640 0.005457 0.894162 0.001330 \n",
+ "63 0.901640 0.005457 0.894162 0.001330 0.901667 0.004633 \n",
+ "64 0.894162 0.001330 0.901667 0.004633 0.902515 0.000511 \n",
+ "\n",
+ " var1(t-36) var2(t-36) var1(t-35) var2(t-35) var1(t-34) var2(t-34) \\\n",
+ "60 0.901640 0.005457 0.894162 0.001330 0.901667 0.004633 \n",
+ "61 0.894162 0.001330 0.901667 0.004633 0.902515 0.000511 \n",
+ "62 0.901667 0.004633 0.902515 0.000511 0.902515 0.005043 \n",
+ "63 0.902515 0.000511 0.902515 0.005043 0.903902 0.001018 \n",
+ "64 0.902515 0.005043 0.903902 0.001018 0.911195 0.004273 \n",
+ "\n",
+ " var1(t-33) var2(t-33) var1(t-32) var2(t-32) var1(t-31) var2(t-31) \\\n",
+ "60 0.902515 0.000511 0.902515 0.005043 0.903902 0.001018 \n",
+ "61 0.902515 0.005043 0.903902 0.001018 0.911195 0.004273 \n",
+ "62 0.903902 0.001018 0.911195 0.004273 0.909011 0.000288 \n",
+ "63 0.911195 0.004273 0.909011 0.000288 0.908729 0.005163 \n",
+ "64 0.909011 0.000288 0.908729 0.005163 0.903118 0.000774 \n",
+ "\n",
+ " var1(t-30) var2(t-30) var1(t-29) var2(t-29) var1(t-28) var2(t-28) \\\n",
+ "60 0.911195 0.004273 0.909011 0.000288 0.908729 0.005163 \n",
+ "61 0.909011 0.000288 0.908729 0.005163 0.903118 0.000774 \n",
+ "62 0.908729 0.005163 0.903118 0.000774 0.904913 0.001256 \n",
+ "63 0.903118 0.000774 0.904913 0.001256 0.909531 0.000620 \n",
+ "64 0.904913 0.001256 0.909531 0.000620 0.907403 0.001534 \n",
+ "\n",
+ " var1(t-27) var2(t-27) var1(t-26) var2(t-26) var1(t-25) var2(t-25) \\\n",
+ "60 0.903118 0.000774 0.904913 0.001256 0.909531 0.000620 \n",
+ "61 0.904913 0.001256 0.909531 0.000620 0.907403 0.001534 \n",
+ "62 0.909531 0.000620 0.907403 0.001534 0.907393 0.001065 \n",
+ "63 0.907403 0.001534 0.907393 0.001065 0.907137 0.001018 \n",
+ "64 0.907393 0.001065 0.907137 0.001018 0.903187 0.000341 \n",
+ "\n",
+ " var1(t-24) var2(t-24) var1(t-23) var2(t-23) var1(t-22) var2(t-22) \\\n",
+ "60 0.907403 0.001534 0.907393 0.001065 0.907137 0.001018 \n",
+ "61 0.907393 0.001065 0.907137 0.001018 0.903187 0.000341 \n",
+ "62 0.907137 0.001018 0.903187 0.000341 0.903218 0.000487 \n",
+ "63 0.903187 0.000341 0.903218 0.000487 0.903810 0.001179 \n",
+ "64 0.903218 0.000487 0.903810 0.001179 0.909454 0.001097 \n",
+ "\n",
+ " var1(t-21) var2(t-21) var1(t-20) var2(t-20) var1(t-19) var2(t-19) \\\n",
+ "60 0.903187 0.000341 0.903218 0.000487 0.903810 0.001179 \n",
+ "61 0.903218 0.000487 0.903810 0.001179 0.909454 0.001097 \n",
+ "62 0.903810 0.001179 0.909454 0.001097 0.907357 0.000555 \n",
+ "63 0.909454 0.001097 0.907357 0.000555 0.911044 0.007564 \n",
+ "64 0.907357 0.000555 0.911044 0.007564 0.910657 0.003413 \n",
+ "\n",
+ " var1(t-18) var2(t-18) var1(t-17) var2(t-17) var1(t-16) var2(t-16) \\\n",
+ "60 0.909454 0.001097 0.907357 0.000555 0.911044 0.007564 \n",
+ "61 0.907357 0.000555 0.911044 0.007564 0.910657 0.003413 \n",
+ "62 0.911044 0.007564 0.910657 0.003413 0.907522 0.008177 \n",
+ "63 0.910657 0.003413 0.907522 0.008177 0.900615 0.003681 \n",
+ "64 0.907522 0.008177 0.900615 0.003681 0.906061 0.001641 \n",
+ "\n",
+ " var1(t-15) var2(t-15) var1(t-14) var2(t-14) var1(t-13) var2(t-13) \\\n",
+ "60 0.910657 0.003413 0.907522 0.008177 0.900615 0.003681 \n",
+ "61 0.907522 0.008177 0.900615 0.003681 0.906061 0.001641 \n",
+ "62 0.900615 0.003681 0.906061 0.001641 0.899258 0.016586 \n",
+ "63 0.906061 0.001641 0.899258 0.016586 0.903952 0.012982 \n",
+ "64 0.899258 0.016586 0.903952 0.012982 0.899530 0.006698 \n",
+ "\n",
+ " var1(t-12) var2(t-12) var1(t-11) var2(t-11) var1(t-10) var2(t-10) \\\n",
+ "60 0.906061 0.001641 0.899258 0.016586 0.903952 0.012982 \n",
+ "61 0.899258 0.016586 0.903952 0.012982 0.899530 0.006698 \n",
+ "62 0.903952 0.012982 0.899530 0.006698 0.899831 0.000562 \n",
+ "63 0.899530 0.006698 0.899831 0.000562 0.907336 0.004833 \n",
+ "64 0.899831 0.000562 0.907336 0.004833 0.907291 0.001092 \n",
+ "\n",
+ " var1(t-9) var2(t-9) var1(t-8) var2(t-8) var1(t-7) var2(t-7) \\\n",
+ "60 0.899530 0.006698 0.899831 0.000562 0.907336 0.004833 \n",
+ "61 0.899831 0.000562 0.907336 0.004833 0.907291 0.001092 \n",
+ "62 0.907336 0.004833 0.907291 0.001092 0.901101 0.000096 \n",
+ "63 0.907291 0.001092 0.901101 0.000096 0.902979 0.000749 \n",
+ "64 0.901101 0.000096 0.902979 0.000749 0.898715 0.003541 \n",
+ "\n",
+ " var1(t-6) var2(t-6) var1(t-5) var2(t-5) var1(t-4) var2(t-4) \\\n",
+ "60 0.907291 0.001092 0.901101 0.000096 0.902979 0.000749 \n",
+ "61 0.901101 0.000096 0.902979 0.000749 0.898715 0.003541 \n",
+ "62 0.902979 0.000749 0.898715 0.003541 0.899620 0.003343 \n",
+ "63 0.898715 0.003541 0.899620 0.003343 0.902619 0.000657 \n",
+ "64 0.899620 0.003343 0.902619 0.000657 0.898043 0.001283 \n",
+ "\n",
+ " var1(t-3) var2(t-3) var1(t-2) var2(t-2) var1(t-1) var2(t-1) \\\n",
+ "60 0.898715 0.003541 0.899620 0.003343 0.902619 0.000657 \n",
+ "61 0.899620 0.003343 0.902619 0.000657 0.898043 0.001283 \n",
+ "62 0.902619 0.000657 0.898043 0.001283 0.899926 0.001236 \n",
+ "63 0.898043 0.001283 0.899926 0.001236 0.904471 0.001412 \n",
+ "64 0.899926 0.001236 0.904471 0.001412 0.901639 0.000668 \n",
+ "\n",
+ " var1(t) var1(t+1) var1(t+2) var1(t+3) var1(t+4) var1(t+5) \\\n",
+ "60 0.898043 0.899926 0.904471 0.901639 0.904114 0.894464 \n",
+ "61 0.899926 0.904471 0.901639 0.904114 0.894464 0.894675 \n",
+ "62 0.904471 0.901639 0.904114 0.894464 0.894675 0.896031 \n",
+ "63 0.901639 0.904114 0.894464 0.894675 0.896031 0.900122 \n",
+ "64 0.904114 0.894464 0.894675 0.896031 0.900122 0.899499 \n",
+ "\n",
+ " var1(t+6) var1(t+7) var1(t+8) var1(t+9) var1(t+10) var1(t+11) \n",
+ "60 0.894675 0.896031 0.900122 0.899499 0.900826 0.902605 \n",
+ "61 0.896031 0.900122 0.899499 0.900826 0.902605 0.905607 \n",
+ "62 0.900122 0.899499 0.900826 0.902605 0.905607 0.906037 \n",
+ "63 0.899499 0.900826 0.902605 0.905607 0.906037 0.903877 \n",
+ "64 0.900826 0.902605 0.905607 0.906037 0.903877 0.906416 "
+ ]
+ },
+ "execution_count": 9,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "# drop columns we don't want to predict\n",
+ "# We're only concerned with the estimating the close value,\n",
+ "# Close should be first in the list of column in the config file\n",
+ "\n",
+ "cols_to_drop = []\n",
+ "\n",
+ "for i in range (n_out):\n",
+ " for j in range(1, n_features):\n",
+ " cols_to_drop.append(reframed.shape[1]-(i*n_features+j))\n",
+ "\n",
+ "reframed.drop(reframed.columns[cols_to_drop], axis=1, inplace=True)\n",
+ "\n",
+ "pd.set_option('display.max_columns', 500)\n",
+ "reframed.head()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 10,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "29511\n"
+ ]
+ }
+ ],
+ "source": [
+ "#ndarray.shape holds array dimensions in tuple form\n",
+ "print(reframed.values.shape[0])"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 11,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "reframed_values = reframed.values\n",
+ "# split into train and test sets\n",
+ "training_size = int(0.8* reframed_values.shape[0])\n",
+ "train = reframed_values[:training_size, :]\n",
+ "test = reframed_values[training_size:, :]"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 12,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "(23608, 60, 2) (23608, 12, 1) (5903, 60, 2) (5903, 12, 1)\n"
+ ]
+ }
+ ],
+ "source": [
+ "# split into input and outputs\n",
+ "# n_obs is the length of input columns in our dataframe\n",
+ "n_obs = n_lag * n_features\n",
+ "\n",
+ "# We're only concerned with the estimating the close value,\n",
+ "# Close should be first in the list of column in the config file\n",
+ "\n",
+ "n_outputs = n_out * n_features\n",
+ "train_x, train_y = train[:, :n_obs], train[:, -n_out:]\n",
+ "test_x, test_y = test[:, :n_obs], test[:, -n_out:]\n",
+ "\n",
+ "# reshape input to be 3D [samples, timesteps, features]\n",
+ "train_x = train_x.reshape((train_x.shape[0], n_lag, n_features))\n",
+ "test_x = test_x.reshape((test_x.shape[0], n_lag, n_features))\n",
+ "\n",
+ "# reshape output to be 3D [samples, timesteps, features]\n",
+ "train_y = train_y.reshape(-1, n_out, 1)\n",
+ "test_y = test_y.reshape(-1, n_out, 1)\n",
+ "\n",
+ "print(train_x.shape, train_y.shape, test_x.shape, test_y.shape)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 13,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "model_name=''.join([CONFIG['folder']['weights'], CONFIG['filename'], '_model', '.json'])\n",
+ "model_weights_name=''.join([CONFIG['folder']['weights'], CONFIG['filename'], '_model_weights', '.h5'])"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 14,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "epochs=25\n",
+ "batch_size=4096"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 110,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "__________________________________________________________________________________________________\n",
+ "Layer (type) Output Shape Param # Connected to \n",
+ "==================================================================================================\n",
+ "input_38 (InputLayer) (None, 60, 2) 0 \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1935 (Conv1D) (None, 59, 51) 255 input_38[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1936 (Conv1D) (None, 58, 51) 5253 conv1d_1935[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1937 (Conv1D) (None, 57, 50) 5150 conv1d_1936[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1938 (Conv1D) (None, 56, 49) 4949 conv1d_1937[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1939 (Conv1D) (None, 55, 48) 4752 conv1d_1938[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1940 (Conv1D) (None, 54, 47) 4559 conv1d_1939[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1941 (Conv1D) (None, 53, 46) 4370 conv1d_1940[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1942 (Conv1D) (None, 52, 45) 4185 conv1d_1941[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1943 (Conv1D) (None, 51, 44) 4004 conv1d_1942[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1944 (Conv1D) (None, 50, 43) 3827 conv1d_1943[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1945 (Conv1D) (None, 49, 42) 3654 conv1d_1944[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1946 (Conv1D) (None, 48, 41) 3485 conv1d_1945[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1947 (Conv1D) (None, 47, 40) 3320 conv1d_1946[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1948 (Conv1D) (None, 46, 39) 3159 conv1d_1947[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1949 (Conv1D) (None, 45, 38) 3002 conv1d_1948[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1950 (Conv1D) (None, 44, 37) 2849 conv1d_1949[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1951 (Conv1D) (None, 43, 36) 2700 conv1d_1950[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1952 (Conv1D) (None, 42, 35) 2555 conv1d_1951[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1953 (Conv1D) (None, 41, 34) 2414 conv1d_1952[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1954 (Conv1D) (None, 40, 33) 2277 conv1d_1953[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1955 (Conv1D) (None, 39, 32) 2144 conv1d_1954[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1956 (Conv1D) (None, 38, 31) 2015 conv1d_1955[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1957 (Conv1D) (None, 37, 30) 1890 conv1d_1956[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1958 (Conv1D) (None, 36, 29) 1769 conv1d_1957[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1959 (Conv1D) (None, 35, 28) 1652 conv1d_1958[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1960 (Conv1D) (None, 34, 27) 1539 conv1d_1959[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1961 (Conv1D) (None, 33, 26) 1430 conv1d_1960[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1962 (Conv1D) (None, 32, 25) 1325 conv1d_1961[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1963 (Conv1D) (None, 31, 24) 1224 conv1d_1962[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1964 (Conv1D) (None, 30, 23) 1127 conv1d_1963[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1965 (Conv1D) (None, 29, 22) 1034 conv1d_1964[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1966 (Conv1D) (None, 28, 21) 945 conv1d_1965[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1967 (Conv1D) (None, 27, 20) 860 conv1d_1966[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1968 (Conv1D) (None, 26, 19) 779 conv1d_1967[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1969 (Conv1D) (None, 25, 18) 702 conv1d_1968[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1970 (Conv1D) (None, 24, 17) 629 conv1d_1969[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1971 (Conv1D) (None, 23, 16) 560 conv1d_1970[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1972 (Conv1D) (None, 22, 15) 495 conv1d_1971[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1973 (Conv1D) (None, 21, 14) 434 conv1d_1972[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1974 (Conv1D) (None, 20, 13) 377 conv1d_1973[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1975 (Conv1D) (None, 19, 12) 324 conv1d_1974[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1976 (Conv1D) (None, 18, 11) 275 conv1d_1975[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1977 (Conv1D) (None, 17, 10) 230 conv1d_1976[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1978 (Conv1D) (None, 16, 9) 189 conv1d_1977[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1979 (Conv1D) (None, 15, 8) 152 conv1d_1978[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1983 (Conv1D) (None, 49, 50) 1250 input_38[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1980 (Conv1D) (None, 14, 7) 119 conv1d_1979[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "lstm_363 (LSTM) (None, 15) 1080 input_38[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "lstm_364 (LSTM) (None, 15) 1080 input_38[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "lstm_365 (LSTM) (None, 15) 1080 input_38[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "lstm_366 (LSTM) (None, 15) 1080 input_38[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "lstm_367 (LSTM) (None, 15) 1080 input_38[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "lstm_368 (LSTM) (None, 15) 1080 input_38[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "lstm_369 (LSTM) (None, 15) 1080 input_38[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "lstm_370 (LSTM) (None, 15) 1080 input_38[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "lstm_371 (LSTM) (None, 15) 1080 input_38[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "lstm_372 (LSTM) (None, 15) 1080 input_38[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "lstm_373 (LSTM) (None, 15) 1080 input_38[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "lstm_374 (LSTM) (None, 15) 1080 input_38[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "lstm_375 (LSTM) (None, 10) 2440 conv1d_1983[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "lstm_376 (LSTM) (None, 10) 2440 conv1d_1983[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "lstm_377 (LSTM) (None, 10) 2440 conv1d_1983[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "lstm_378 (LSTM) (None, 10) 2440 conv1d_1983[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "lstm_379 (LSTM) (None, 10) 2440 conv1d_1983[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "lstm_380 (LSTM) (None, 10) 2440 conv1d_1983[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "lstm_381 (LSTM) (None, 10) 2440 conv1d_1983[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "lstm_382 (LSTM) (None, 10) 2440 conv1d_1983[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "lstm_383 (LSTM) (None, 10) 2440 conv1d_1983[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "lstm_384 (LSTM) (None, 10) 2440 conv1d_1983[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "lstm_385 (LSTM) (None, 10) 2440 conv1d_1983[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "lstm_386 (LSTM) (None, 10) 2440 conv1d_1983[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1981 (Conv1D) (None, 13, 6) 90 conv1d_1980[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "concatenate_46 (Concatenate) (None, 180) 0 lstm_363[0][0] \n",
+ " lstm_364[0][0] \n",
+ " lstm_365[0][0] \n",
+ " lstm_366[0][0] \n",
+ " lstm_367[0][0] \n",
+ " lstm_368[0][0] \n",
+ " lstm_369[0][0] \n",
+ " lstm_370[0][0] \n",
+ " lstm_371[0][0] \n",
+ " lstm_372[0][0] \n",
+ " lstm_373[0][0] \n",
+ " lstm_374[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "concatenate_47 (Concatenate) (None, 120) 0 lstm_375[0][0] \n",
+ " lstm_376[0][0] \n",
+ " lstm_377[0][0] \n",
+ " lstm_378[0][0] \n",
+ " lstm_379[0][0] \n",
+ " lstm_380[0][0] \n",
+ " lstm_381[0][0] \n",
+ " lstm_382[0][0] \n",
+ " lstm_383[0][0] \n",
+ " lstm_384[0][0] \n",
+ " lstm_385[0][0] \n",
+ " lstm_386[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1982 (Conv1D) (None, 12, 5) 65 conv1d_1981[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "reshape_10 (Reshape) (None, 12, 15) 0 concatenate_46[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "reshape_11 (Reshape) (None, 12, 10) 0 concatenate_47[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "concatenate_48 (Concatenate) (None, 12, 30) 0 conv1d_1982[0][0] \n",
+ " reshape_10[0][0] \n",
+ " reshape_11[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "time_distributed_12 (TimeDistri (None, 12, 1) 31 concatenate_48[0][0] \n",
+ "==================================================================================================\n",
+ "Total params: 138,614\n",
+ "Trainable params: 138,614\n",
+ "Non-trainable params: 0\n",
+ "__________________________________________________________________________________________________\n"
+ ]
+ }
+ ],
+ "source": [
+ "from keras.models import Sequential\n",
+ "from keras.layers import Dense, concatenate\n",
+ "from keras.layers import Input\n",
+ "from keras.layers import LSTM, CuDNNLSTM, GRU,CuDNNGRU\n",
+ "from keras.layers import Conv1D, AveragePooling1D, MaxPooling1D\n",
+ "from keras.layers import Dropout, Flatten\n",
+ "from keras.layers import Activation, BatchNormalization\n",
+ "from keras.layers import TimeDistributed\n",
+ "from keras.layers import Bidirectional\n",
+ "from keras.layers import RepeatVector\n",
+ "from keras.callbacks import ModelCheckpoint\n",
+ "import keras\n",
+ "\n",
+ "units= CONFIG['lstm_hidden_size']\n",
+ "dropout = .1\n",
+ "\n",
+ "# design network\n",
+ "inputs = Input(shape=(60, 2))\n",
+ "convmodel = Conv1D(51, 2)(inputs)\n",
+ "for i in range(1, 48):\n",
+ " convmodel = Conv1D(52-i, 2)(convmodel)\n",
+ "\n",
+ "rnnlist = []\n",
+ "for i in range(12):\n",
+ " rnnmodel = LSTM(15)(inputs)\n",
+ " rnnlist.append(rnnmodel)\n",
+ "rnnmodel = concatenate(rnnlist)\n",
+ "rnnmodel = keras.layers.Reshape((12, 15))(rnnmodel)\n",
+ "\n",
+ "convrnnprep = Conv1D(50, 12)(inputs)\n",
+ "convrnnlist = []\n",
+ "for i in range(12):\n",
+ " convrnnmodel = LSTM(10)(convrnnprep)\n",
+ " convrnnlist.append(convrnnmodel)\n",
+ "convrnn = concatenate(convrnnlist)\n",
+ "convrnn = keras.layers.Reshape((12, 10))(convrnn)\n",
+ "\n",
+ "combined = concatenate([convmodel, rnnmodel, convrnn])\n",
+ "combined = TimeDistributed(Dense(1))(combined)\n",
+ "#model.add(LSTM(100, input_shape=(train_x.shape[1], train_x.shape[2]), activation=None))\n",
+ "#model.add(RepeatVector(n_out))\n",
+ "\n",
+ "#model.add(LSTM(60))\n",
+ "#model.add(Dropout(.1))\n",
+ "\n",
+ "\n",
+ "\n",
+ "# We're only concerned with the estimating the close value,\n",
+ "# otherwise use n_outputs instead of 1\n",
+ "# Dense(n_outputs, ...\n",
+ "#model.add(TimeDistributed(Dense(1)))\n",
+ "model = keras.Model(inputs=inputs, outputs=combined)\n",
+ "model.compile(loss='mse', optimizer='adam')\n",
+ "\n",
+ "# store model\n",
+ "# serialize model to JSON\n",
+ "model_json = model.to_json()\n",
+ "with open(model_name, \"w\") as json_file:\n",
+ " json_file.write(model_json)\n",
+ "\n",
+ "model.summary()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 111,
+ "metadata": {
+ "scrolled": true
+ },
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Train on 23608 samples, validate on 5903 samples\n",
+ "Epoch 1/25\n",
+ "23608/23608 [==============================] - 56s 2ms/step - loss: 0.2893 - val_loss: 0.2751\n",
+ "\n",
+ "Epoch 00001: val_loss improved from inf to 0.27510, saving model to weights/BTC_ETH_lstm_i60_o12_Close_Volume_model_weights.h5\n",
+ "Epoch 2/25\n",
+ "23608/23608 [==============================] - 20s 863us/step - loss: 0.0583 - val_loss: 0.0236\n",
+ "\n",
+ "Epoch 00002: val_loss improved from 0.27510 to 0.02358, saving model to weights/BTC_ETH_lstm_i60_o12_Close_Volume_model_weights.h5\n",
+ "Epoch 3/25\n",
+ "23608/23608 [==============================] - 21s 878us/step - loss: 0.0478 - val_loss: 0.0072\n",
+ "\n",
+ "Epoch 00003: val_loss improved from 0.02358 to 0.00718, saving model to weights/BTC_ETH_lstm_i60_o12_Close_Volume_model_weights.h5\n",
+ "Epoch 4/25\n",
+ "23608/23608 [==============================] - 21s 873us/step - loss: 0.0348 - val_loss: 0.0123\n",
+ "\n",
+ "Epoch 00004: val_loss did not improve from 0.00718\n",
+ "Epoch 5/25\n",
+ "23608/23608 [==============================] - 21s 877us/step - loss: 0.0063 - val_loss: 0.0036\n",
+ "\n",
+ "Epoch 00005: val_loss improved from 0.00718 to 0.00358, saving model to weights/BTC_ETH_lstm_i60_o12_Close_Volume_model_weights.h5\n",
+ "Epoch 6/25\n",
+ "23608/23608 [==============================] - 21s 883us/step - loss: 0.0036 - val_loss: 0.0078\n",
+ "\n",
+ "Epoch 00006: val_loss did not improve from 0.00358\n",
+ "Epoch 7/25\n",
+ "23608/23608 [==============================] - 21s 885us/step - loss: 0.0049 - val_loss: 0.0040\n",
+ "\n",
+ "Epoch 00007: val_loss did not improve from 0.00358\n",
+ "Epoch 8/25\n",
+ "23608/23608 [==============================] - 21s 888us/step - loss: 0.0085 - val_loss: 0.0055\n",
+ "\n",
+ "Epoch 00008: val_loss did not improve from 0.00358\n",
+ "Epoch 9/25\n",
+ "23608/23608 [==============================] - 21s 879us/step - loss: 0.0067 - val_loss: 9.5548e-04\n",
+ "\n",
+ "Epoch 00009: val_loss improved from 0.00358 to 0.00096, saving model to weights/BTC_ETH_lstm_i60_o12_Close_Volume_model_weights.h5\n",
+ "Epoch 10/25\n",
+ "23608/23608 [==============================] - 21s 880us/step - loss: 0.0082 - val_loss: 0.0132\n",
+ "\n",
+ "Epoch 00010: val_loss did not improve from 0.00096\n",
+ "Epoch 11/25\n",
+ "23608/23608 [==============================] - 20s 853us/step - loss: 0.0060 - val_loss: 0.0017\n",
+ "\n",
+ "Epoch 00011: val_loss did not improve from 0.00096\n",
+ "Epoch 12/25\n",
+ "23608/23608 [==============================] - 20s 853us/step - loss: 0.0045 - val_loss: 0.0015\n",
+ "\n",
+ "Epoch 00012: val_loss did not improve from 0.00096\n",
+ "Epoch 13/25\n",
+ "23608/23608 [==============================] - 20s 853us/step - loss: 0.0040 - val_loss: 3.3025e-04\n",
+ "\n",
+ "Epoch 00013: val_loss improved from 0.00096 to 0.00033, saving model to weights/BTC_ETH_lstm_i60_o12_Close_Volume_model_weights.h5\n",
+ "Epoch 14/25\n",
+ "23608/23608 [==============================] - 20s 866us/step - loss: 0.0013 - val_loss: 8.8446e-04\n",
+ "\n",
+ "Epoch 00014: val_loss did not improve from 0.00033\n",
+ "Epoch 15/25\n",
+ "23608/23608 [==============================] - 20s 850us/step - loss: 6.4106e-04 - val_loss: 7.3872e-04\n",
+ "\n",
+ "Epoch 00015: val_loss did not improve from 0.00033\n",
+ "Epoch 16/25\n",
+ "23608/23608 [==============================] - 20s 849us/step - loss: 6.9955e-04 - val_loss: 6.5034e-04\n",
+ "\n",
+ "Epoch 00016: val_loss did not improve from 0.00033\n",
+ "Epoch 17/25\n",
+ "23608/23608 [==============================] - 20s 851us/step - loss: 5.6615e-04 - val_loss: 2.7856e-04\n",
+ "\n",
+ "Epoch 00017: val_loss improved from 0.00033 to 0.00028, saving model to weights/BTC_ETH_lstm_i60_o12_Close_Volume_model_weights.h5\n",
+ "Epoch 18/25\n",
+ "23608/23608 [==============================] - 20s 856us/step - loss: 3.5684e-04 - val_loss: 2.4057e-04\n",
+ "\n",
+ "Epoch 00018: val_loss improved from 0.00028 to 0.00024, saving model to weights/BTC_ETH_lstm_i60_o12_Close_Volume_model_weights.h5\n",
+ "Epoch 19/25\n",
+ "23608/23608 [==============================] - 21s 869us/step - loss: 3.4699e-04 - val_loss: 3.5079e-04\n",
+ "\n",
+ "Epoch 00019: val_loss did not improve from 0.00024\n",
+ "Epoch 20/25\n",
+ "23608/23608 [==============================] - 20s 853us/step - loss: 3.4400e-04 - val_loss: 2.2245e-04\n",
+ "\n",
+ "Epoch 00020: val_loss improved from 0.00024 to 0.00022, saving model to weights/BTC_ETH_lstm_i60_o12_Close_Volume_model_weights.h5\n",
+ "Epoch 21/25\n",
+ "23608/23608 [==============================] - 20s 848us/step - loss: 3.0070e-04 - val_loss: 2.4665e-04\n",
+ "\n",
+ "Epoch 00021: val_loss did not improve from 0.00022\n",
+ "Epoch 22/25\n",
+ "23608/23608 [==============================] - 20s 862us/step - loss: 2.8406e-04 - val_loss: 2.9794e-04\n",
+ "\n",
+ "Epoch 00022: val_loss did not improve from 0.00022\n",
+ "Epoch 23/25\n",
+ "23608/23608 [==============================] - 20s 858us/step - loss: 2.9385e-04 - val_loss: 2.2953e-04\n",
+ "\n",
+ "Epoch 00023: val_loss did not improve from 0.00022\n",
+ "Epoch 24/25\n",
+ "23608/23608 [==============================] - 20s 860us/step - loss: 2.6628e-04 - val_loss: 2.3367e-04\n",
+ "\n",
+ "Epoch 00024: val_loss did not improve from 0.00022\n",
+ "Epoch 25/25\n",
+ "23608/23608 [==============================] - 20s 856us/step - loss: 2.5383e-04 - val_loss: 2.5620e-04\n",
+ "\n",
+ "Epoch 00025: val_loss did not improve from 0.00022\n"
+ ]
+ }
+ ],
+ "source": [
+ "# fit network\n",
+ "history = model.fit(train_x, train_y, epochs=epochs, batch_size=batch_size,\n",
+ " validation_data=(test_x, test_y), verbose=1, shuffle=False,\n",
+ " callbacks=[ModelCheckpoint(model_weights_name, monitor='val_loss', verbose=1,save_best_only='true',\n",
+ " save_weights_only=True)])"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 112,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "__________________________________________________________________________________________________\n",
+ "Layer (type) Output Shape Param # Connected to \n",
+ "==================================================================================================\n",
+ "input_38 (InputLayer) (None, 60, 2) 0 \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1935 (Conv1D) (None, 59, 51) 255 input_38[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1936 (Conv1D) (None, 58, 51) 5253 conv1d_1935[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1937 (Conv1D) (None, 57, 50) 5150 conv1d_1936[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1938 (Conv1D) (None, 56, 49) 4949 conv1d_1937[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1939 (Conv1D) (None, 55, 48) 4752 conv1d_1938[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1940 (Conv1D) (None, 54, 47) 4559 conv1d_1939[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1941 (Conv1D) (None, 53, 46) 4370 conv1d_1940[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1942 (Conv1D) (None, 52, 45) 4185 conv1d_1941[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1943 (Conv1D) (None, 51, 44) 4004 conv1d_1942[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1944 (Conv1D) (None, 50, 43) 3827 conv1d_1943[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1945 (Conv1D) (None, 49, 42) 3654 conv1d_1944[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1946 (Conv1D) (None, 48, 41) 3485 conv1d_1945[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1947 (Conv1D) (None, 47, 40) 3320 conv1d_1946[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1948 (Conv1D) (None, 46, 39) 3159 conv1d_1947[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1949 (Conv1D) (None, 45, 38) 3002 conv1d_1948[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1950 (Conv1D) (None, 44, 37) 2849 conv1d_1949[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1951 (Conv1D) (None, 43, 36) 2700 conv1d_1950[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1952 (Conv1D) (None, 42, 35) 2555 conv1d_1951[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1953 (Conv1D) (None, 41, 34) 2414 conv1d_1952[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1954 (Conv1D) (None, 40, 33) 2277 conv1d_1953[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1955 (Conv1D) (None, 39, 32) 2144 conv1d_1954[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1956 (Conv1D) (None, 38, 31) 2015 conv1d_1955[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1957 (Conv1D) (None, 37, 30) 1890 conv1d_1956[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1958 (Conv1D) (None, 36, 29) 1769 conv1d_1957[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1959 (Conv1D) (None, 35, 28) 1652 conv1d_1958[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1960 (Conv1D) (None, 34, 27) 1539 conv1d_1959[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1961 (Conv1D) (None, 33, 26) 1430 conv1d_1960[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1962 (Conv1D) (None, 32, 25) 1325 conv1d_1961[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1963 (Conv1D) (None, 31, 24) 1224 conv1d_1962[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1964 (Conv1D) (None, 30, 23) 1127 conv1d_1963[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1965 (Conv1D) (None, 29, 22) 1034 conv1d_1964[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1966 (Conv1D) (None, 28, 21) 945 conv1d_1965[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1967 (Conv1D) (None, 27, 20) 860 conv1d_1966[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1968 (Conv1D) (None, 26, 19) 779 conv1d_1967[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1969 (Conv1D) (None, 25, 18) 702 conv1d_1968[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1970 (Conv1D) (None, 24, 17) 629 conv1d_1969[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1971 (Conv1D) (None, 23, 16) 560 conv1d_1970[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1972 (Conv1D) (None, 22, 15) 495 conv1d_1971[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1973 (Conv1D) (None, 21, 14) 434 conv1d_1972[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1974 (Conv1D) (None, 20, 13) 377 conv1d_1973[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1975 (Conv1D) (None, 19, 12) 324 conv1d_1974[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1976 (Conv1D) (None, 18, 11) 275 conv1d_1975[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1977 (Conv1D) (None, 17, 10) 230 conv1d_1976[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1978 (Conv1D) (None, 16, 9) 189 conv1d_1977[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1979 (Conv1D) (None, 15, 8) 152 conv1d_1978[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1983 (Conv1D) (None, 49, 50) 1250 input_38[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1980 (Conv1D) (None, 14, 7) 119 conv1d_1979[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "lstm_363 (LSTM) (None, 15) 1080 input_38[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "lstm_364 (LSTM) (None, 15) 1080 input_38[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "lstm_365 (LSTM) (None, 15) 1080 input_38[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "lstm_366 (LSTM) (None, 15) 1080 input_38[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "lstm_367 (LSTM) (None, 15) 1080 input_38[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "lstm_368 (LSTM) (None, 15) 1080 input_38[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "lstm_369 (LSTM) (None, 15) 1080 input_38[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "lstm_370 (LSTM) (None, 15) 1080 input_38[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "lstm_371 (LSTM) (None, 15) 1080 input_38[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "lstm_372 (LSTM) (None, 15) 1080 input_38[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "lstm_373 (LSTM) (None, 15) 1080 input_38[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "lstm_374 (LSTM) (None, 15) 1080 input_38[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "lstm_375 (LSTM) (None, 10) 2440 conv1d_1983[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "lstm_376 (LSTM) (None, 10) 2440 conv1d_1983[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "lstm_377 (LSTM) (None, 10) 2440 conv1d_1983[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "lstm_378 (LSTM) (None, 10) 2440 conv1d_1983[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "lstm_379 (LSTM) (None, 10) 2440 conv1d_1983[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "lstm_380 (LSTM) (None, 10) 2440 conv1d_1983[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "lstm_381 (LSTM) (None, 10) 2440 conv1d_1983[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "lstm_382 (LSTM) (None, 10) 2440 conv1d_1983[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "lstm_383 (LSTM) (None, 10) 2440 conv1d_1983[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "lstm_384 (LSTM) (None, 10) 2440 conv1d_1983[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "lstm_385 (LSTM) (None, 10) 2440 conv1d_1983[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "lstm_386 (LSTM) (None, 10) 2440 conv1d_1983[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1981 (Conv1D) (None, 13, 6) 90 conv1d_1980[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "concatenate_46 (Concatenate) (None, 180) 0 lstm_363[0][0] \n",
+ " lstm_364[0][0] \n",
+ " lstm_365[0][0] \n",
+ " lstm_366[0][0] \n",
+ " lstm_367[0][0] \n",
+ " lstm_368[0][0] \n",
+ " lstm_369[0][0] \n",
+ " lstm_370[0][0] \n",
+ " lstm_371[0][0] \n",
+ " lstm_372[0][0] \n",
+ " lstm_373[0][0] \n",
+ " lstm_374[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "concatenate_47 (Concatenate) (None, 120) 0 lstm_375[0][0] \n",
+ " lstm_376[0][0] \n",
+ " lstm_377[0][0] \n",
+ " lstm_378[0][0] \n",
+ " lstm_379[0][0] \n",
+ " lstm_380[0][0] \n",
+ " lstm_381[0][0] \n",
+ " lstm_382[0][0] \n",
+ " lstm_383[0][0] \n",
+ " lstm_384[0][0] \n",
+ " lstm_385[0][0] \n",
+ " lstm_386[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "conv1d_1982 (Conv1D) (None, 12, 5) 65 conv1d_1981[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "reshape_10 (Reshape) (None, 12, 15) 0 concatenate_46[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "reshape_11 (Reshape) (None, 12, 10) 0 concatenate_47[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "concatenate_48 (Concatenate) (None, 12, 30) 0 conv1d_1982[0][0] \n",
+ " reshape_10[0][0] \n",
+ " reshape_11[0][0] \n",
+ "__________________________________________________________________________________________________\n",
+ "time_distributed_12 (TimeDistri (None, 12, 1) 31 concatenate_48[0][0] \n",
+ "==================================================================================================\n",
+ "Total params: 138,614\n",
+ "Trainable params: 138,614\n",
+ "Non-trainable params: 0\n",
+ "__________________________________________________________________________________________________\n"
+ ]
+ }
+ ],
+ "source": [
+ "model.summary()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 113,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "5903/5903 [==============================] - 123s 21ms/step\n"
+ ]
+ },
+ {
+ "data": {
+ "text/plain": [
+ "0.00022245472163341716"
+ ]
+ },
+ "execution_count": 113,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "# Load the best weights\n",
+ "model.load_weights(model_weights_name)\n",
+ "model.compile(loss='mse', optimizer='adam')\n",
+ "model.evaluate(test_x, test_y)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 114,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "from matplotlib import pyplot"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 115,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAA6sAAAGfCAYAAAC5lkQXAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvhp/UCwAAIABJREFUeJzs3Xl4nGd97//3rX1kaUbj3ZqxsbOQkNWJnQCFUiiBbBCWQAgQlpY20JZzen5ctA2nByj0nNN0p5QttARO4QCHkkMJh7AFCGELsRMCOBuJs45kx44taWRrl+7fH89IlhU7luSRZiS9X9el65nleUZfOU6ufHTfz/cbYoxIkiRJklRNaipdgCRJkiRJUxlWJUmSJElVx7AqSZIkSao6hlVJkiRJUtUxrEqSJEmSqo5hVZIkSZJUdQyrkiRJkqSqY1iVJEmSJFUdw6okSZIkqerUVbqAqVauXBk3btxY6TIkSZIkSXPgjjvueDLGuOpY500rrIYQLgL+CagF/jXGeO2U998B/BEwChwAro4x3lN67z3A20rv/ecY47ee7ntt3LiR7du3T6csSZIkSdICE0J4dDrnHXMbcAihFvgocDFwGvD6EMJpU077fIzxzBjjZuBvgH8oXXsacCVwOnAR8LHS50mSJEmSdFTTuWf1fODBGONDMcYh4IvAKyafEGMsTnq6DIilx68AvhhjHIwxPgw8WPo8SZIkSZKOajrbgHPA45OeF4BnTz0phPBHwLuABuC3J11725Rrc7OqVJIkSZK0ZEwnrIYjvBaf8kKMHwU+GkJ4A/DfgLdM99oQwtXA1QAbNmyYRkmSJEmStDANDw9TKBQYGBiodClzqqmpiXw+T319/ayun05YLQDrJz3PA51Pc/4XgY/P5NoY4yeBTwJs3br1KWFWkiRJkhaLQqFAa2srGzduJIQjre8tfDFG9u3bR6FQYNOmTbP6jOncs7oNODmEsCmE0EDSMOnGySeEEE6e9PRS4IHS4xuBK0MIjSGETcDJwO2zqlSSJEmSFoGBgQFWrFixaIMqQAiBFStWHNfq8TFXVmOMIyGEdwLfIhldc32M8e4QwgeB7THGG4F3hhAuAIaBLpItwJTO+xJwDzAC/FGMcXTW1UqSJEnSIrCYg+q44/0ZpzVnNcZ4E3DTlNfeN+nxHz/Ntf8D+B+zLVCSJEmStPRMZxuwJEmSJGmR6O7u5mMf+9iMr7vkkkvo7u6eg4qOzLAqSZIkSUvI0cLq6OjT37F500030dbWNldlPcW0tgFLkiRJkhaHa665hp07d7J582bq6+tpaWlh3bp13HXXXdxzzz288pWv5PHHH2dgYIA//uM/5uqrrwZg48aNbN++nQMHDnDxxRfz/Oc/n5/85Cfkcjm++tWvkkqlylqnYVWSJEmSKuQDX7ubezqLZf3M09rTvP/lpx/1/WuvvZYdO3Zw1113ccstt3DppZeyY8eOiREz119/PcuXL6e/v5/zzjuPyy+/nBUrVhz2GQ888ABf+MIX+Jd/+ReuuOIKbrjhBq666qqy/hyGVUmSJElaws4///zDZqF++MMf5itf+QoAjz/+OA888MBTwuqmTZvYvHkzAFu2bOGRRx4pe12GVUmSJEmqkKdbAZ0vy5Ytm3h8yy23cPPNN/PTn/6U5uZmXvjCFx5xVmpjY+PE49raWvr7+8telw2WZmh3zwA79x6odBmSJEmSNCutra309vYe8b2enh6y2SzNzc3cd9993HbbbfNc3SGurM7Qu750F/3Do3zlD59X6VIkSZIkacZWrFjB8573PM444wxSqRRr1qyZeO+iiy7iE5/4BGeddRannHIKz3nOcypWp2F1hvLZFLfcv7fSZUiSJEnSrH3+858/4uuNjY184xvfOOJ74/elrly5kh07dky8/u53v7vs9YHbgGcs19bMnt5BBkeefgaRJEmSJGn2DKszlMsms4M6u596k7EkSZIkqTwMqzOUL4XVjq7yd7uSJEmSJCUMqzOUayuF1e6+ClciSZIkSYuXYXWG1maaqAmurEqSJEnSXDKszlB9bQ3rMikKhlVJkiRJmjOG1VnItaUodBtWJUmSJC083d3dfOxjH5vVtR/60Ifo65ufWyINq7OQy6bcBixJkiRpQVooYbVuXr7LIpNrS7G7OMDI6Bh1teZ9SZIkSQvHNddcw86dO9m8eTMveclLWL16NV/60pcYHBzkVa96FR/4wAc4ePAgV1xxBYVCgdHRUd773vfyxBNP0NnZyYte9CJWrlzJ97///Tmt07A6C/lsitGxyO7iAPlsc6XLkSRJkrRQfeMa2P2r8n7m2jPh4muP+va1117Ljh07uOuuu/j2t7/Nl7/8ZW6//XZijFx22WXceuut7N27l/b2dr7+9a8D0NPTQyaT4R/+4R/4/ve/z8qVK8tb8xG4LDgLOWetSpIkSVoEvv3tb/Ptb3+bc845h3PPPZf77ruPBx54gDPPPJObb76ZP/uzP+OHP/whmUxm3mtzZXUWxmetFrr6eXaFa5EkSZK0gD3NCuh8iDHynve8h7e//e1Pee+OO+7gpptu4j3veQ8vfelLed/73jevtbmyOgvtpbDaYUdgSZIkSQtMa2srvb29AFx44YVcf/31HDhwAICOjg727NlDZ2cnzc3NXHXVVbz73e/mzjvvfMq1c82V1Vloqq9lVWuj24AlSZIkLTgrVqzgec97HmeccQYXX3wxb3jDG3juc58LQEtLC5/73Od48MEH+ZM/+RNqamqor6/n4x//OABXX301F198MevWrZvzBkshxjin32Cmtm7dGrdv317pMo5u304YOsgr/28vyxpr+d+/95xKVyRJkiRpAbn33nt51rOeVeky5sWRftYQwh0xxq3HutaV1Zn6+rtgsJd89u/Y0dFT6WokSZIkaVHyntWZSuehp4NcNkVn9wBjY9W1Mi1JkiRJi4FhdaYyOTjwBOvTdQyNjrH3wGClK5IkSZK0wFTb7Zhz4Xh/RsPqTKXbgcgJTUm3rIJNliRJkiTNQFNTE/v27VvUgTXGyL59+2hqapr1Z3jP6kyl8wDka/cDyfiaLc/IVrIiSZIkSQtIPp+nUCiwd+/eSpcyp5qamsjn87O+3rA6U5kcAKvHngRaKHT1VbYeSZIkSQtKfX09mzZtqnQZVc9twDOVTsJqU/9uss31zlqVJEmSpDlgWJ2ppjQ0pic6And0G1YlSZIkqdwMq7ORbodiB7m2lA2WJEmSJGkOGFZnI52DngL5bDMdXf2LuouXJEmSJFWCYXU2MjkodpJrS9E/PEpX33ClK5IkSZKkRcWwOhvpPBzcQz5dC2BHYEmSJEkqM8PqbKTbAdjY0A1gR2BJkiRJKjPD6myUZq22sx/AjsCSJEmSVGaG1dlI5wFYNvgELY11dgSWJEmSpDIzrM5GaRtwcHyNJEmSJM0Jw+psNLZAUwaKHeSzKbcBS5IkSVKZGVZnK52Hng5y2RQddgOWJEmSpLIyrM5WJgelbcDFgRGKA85alSRJkqRyMazOVjpX2gbcDDi+RpIkSZLKybA6W+kc9O0j3xoAw6okSZIklZNhdbZKs1bX13YBUPC+VUmSJEkqG8PqbKWTsJodeYLGuho7AkuSJElSGRlWZyuTByAUdyUdgQ2rkiRJklQ2htXZal2XHIsFcm0pCt6zKkmSJEllY1idrYZmSC2HnqQjsA2WJEmSJKl8DKvHIzM+vibFvoND9A+NVroiSZIkSVoUphVWQwgXhRDuDyE8GEK45gjvvyuEcE8I4ZchhO+GEJ4x6b3REMJdpa8by1l8xaXzUOwk15YCoKPbjsCSJEmSVA7HDKshhFrgo8DFwGnA60MIp0057efA1hjjWcCXgb+Z9F5/jHFz6euyMtVdHTI56CmQzyZh1ftWJUmSJKk8prOyej7wYIzxoRjjEPBF4BWTT4gxfj/GOL6seBuQL2+ZVSrdDgPd5FvGAOwILEmSJEllMp2wmgMen/S8UHrtaN4GfGPS86YQwvYQwm0hhFfOosbqlU4y+aqxfdTVBFdWJUmSJKlM6qZxTjjCa/GIJ4ZwFbAV+K1JL2+IMXaGEE4AvhdC+FWMceeU664GrgbYsGHDtAqvCpkks9f2dtLelrIjsCRJkiSVyXRWVgvA+knP80Dn1JNCCBcAfw5cFmMcHH89xthZOj4E3AKcM/XaGOMnY4xbY4xbV61aNaMfoKLSpQXmYge5tpTbgCVJkiSpTKYTVrcBJ4cQNoUQGoArgcO6+oYQzgGuIwmqeya9ng0hNJYerwSeB9xTruIrLt2eHHs6yGVTFLrsBixJkiRJ5XDMbcAxxpEQwjuBbwG1wPUxxrtDCB8EtscYbwT+FmgB/j2EAPBYqfPvs4DrQghjJMH42hjj4gmrdY2wbBUUk47Ae3oHGRoZo6HO8bWSJEmSdDymc88qMcabgJumvPa+SY8vOMp1PwHOPJ4Cq146l8xabU8RI+zq6ecZK5ZVuipJkiRJWtBcAjxemfzENmBw1qokSZIklYNh9Xil26HYwfpsM4AdgSVJkiSpDAyrxyudg8Eia5uGqAlQsCOwJEmSJB03w+rxyuQBqD+wmzXpJjsCS5IkSVIZGFaP18Ss1UIya9VtwJIkSZJ03Ayrx2vSrNV8NkWH24AlSZIk6bgZVo9Xuh0IUEw6Au/uGWBkdKzSVUmSJEnSgmZYPV619dCyJhlf09bMyFjkid7BSlclSZIkSQuaYbUcMjkoJtuAwfE1kiRJknS8DKvlUJq1mhsPq912BJYkSZKk42FYLYd0PtkGnGkCoLDflVVJkiRJOh6G1XLI5GD4IE2jvaxsabQjsCRJkiQdJ8NqOUzMWu0k5/gaSZIkSTpuhtVyyOSTY08H+bYUBRssSZIkSdJxMayWQ7o9ORYL5Esrq2NjsbI1SZIkSdICZlgth5a1EGqSJkvZFEMjYzx50FmrkiRJkjRbhtVyqK2D1nXJPattyfgatwJLkiRJ0uwZVsslnSttA24GoMOwKkmSJEmzZlgtl3T7xDZgwI7AkiRJknQcDKvlkslDsYOWhloyqXoKXX2VrkiSJEmSFizDarmkczAyAP1dSUdgtwFLkiRJ0qwZVsslk0uOPQVybSm3AUuSJEnScTCslku6FFaLyX2rha5+YnTWqiRJkiTNhmG1XNKHVlbz2Wb6hkbp7huubE2SJEmStEAZVsulZTXU1B02a9WtwJIkSZI0O4bVcqmphdZ2KHaQL42vsSOwJEmSJM2OYbWcSrNWD4VVV1YlSZIkaTYMq+WUyUGxQCZVz7KGWrcBS5IkSdIsGVbLKZ2DYicBJjoCS5IkSZJmzrBaTpk8jA7BwSeTWauGVUmSJEmaFcNqOaXbk2MxGV/jNmBJkiRJmh3DajlNzFrtIJdN0dM/TO+As1YlSZIkaaYMq+WUySfHYoezViVJkiTpOBhWy6l5JdQ2HDZr1ftWJUmSJGnmDKvlVFMzMWs1l3VlVZIkSZJmy7BabukcFDtYuayRhroax9dIkiRJ0iwYVsstnYOeDmpqAnnH10iSJEnSrBhWyy2Tg95OGBsjl01RcBuwJEmSJM2YYbXc0jkYG4GDe8i1pejo6qt0RZIkSZK04BhWy23SrNV8NsWTB4YYGB6tbE2SJEmStMAYVsstUwqrxYIdgSVJkiRplgyr5ZbOJ8diJ7m2ZgA7AkuSJEnSDBlWy615OdQ1QU+B/PjKqmFVkiRJkmbEsFpuIUC6HYodrEk3UVcT6Oi2yZIkSZIkzYRhdS6UZq3W1gTWZprcBixJkiRJM2RYnQuZPBQ7AchnU24DliRJkqQZMqzOhXQOenfB2Ci5tma7AUuSJEnSDBlW50K6HeIo9O4ml02xuzjA0MhYpauSJEmSpAXDsDoXMuPjazrIZ1PECLt7BipbkyRJkiQtIIbVuZDOJcdiB/m2ZHxNwY7AkiRJkjRthtW5kCmF1Z4OcqVZq3YEliRJkqTpm1ZYDSFcFEK4P4TwYAjhmiO8/64Qwj0hhF+GEL4bQnjGpPfeEkJ4oPT1lnIWX7Wa2qC+GYodrMukCAE7AkuSJEnSDBwzrIYQaoGPAhcDpwGvDyGcNuW0nwNbY4xnAV8G/qZ07XLg/cCzgfOB94cQsuUrv0qFUJq1WqChroY1rU12BJYkSZKkGZjOyur5wIMxxodijEPAF4FXTD4hxvj9GOP4TZm3AaUOQ1wIfCfGuD/G2AV8B7ioPKVXuUwOih0A5LIpCl3esypJkiRJ0zWdsJoDHp/0vFB67WjeBnxjJteGEK4OIWwPIWzfu3fvNEpaANJ5KHYCkGtLubIqSZIkSTMwnbAajvBaPOKJIVwFbAX+dibXxhg/GWPcGmPcumrVqmmUtABkctC7G0aHyWdT7OoeYHTsiH9skiRJkqQpphNWC8D6Sc/zQOfUk0IIFwB/DlwWYxycybWLUrodiNC7i1w2xchY5Imis1YlSZIkaTqmE1a3ASeHEDaFEBqAK4EbJ58QQjgHuI4kqO6Z9Na3gJeGELKlxkovLb22+KVLt+32dJArzVp1K7AkSZIkTc8xw2qMcQR4J0nIvBf4Uozx7hDCB0MIl5VO+1ugBfj3EMJdIYQbS9fuB/6SJPBuAz5Yem3xG5+1Wuwgn20GHF8jSZIkSdNVN52TYow3ATdNee19kx5f8DTXXg9cP9sCF6z0obCaO8WVVUmSJEmaielsA9ZsNKWhoRV6Okg11LJiWYPjayRJkiRpmgyrc2nSrNV8NkXBbcCSJEmSNC2G1bmUPhRWc1lnrUqSJEnSdBlW51ImBz2lsNqWoqOrnxidtSpJkiRJx2JYnUvpHBzcAyOD5LPNDI6M8eSBoUpXJUmSJElVz7A6lyY6Anc6a1WSJEmSZsCwOpcyk8JqNgmrdgSWJEmSpGMzrM6ldD45FjsmwmqHHYElSZIk6ZgMq3Mp3Z4cewqkm+pJN9W5DViSJEmSpsGwOpcaW6ApM2l8TbOzViVJkiRpGgyrcy2dh2InAPlsym3AkiRJkjQNhtW5lslBTwEozVrtdtaqJEmSJB2LYXWupdsntgHnsykODI7Q0z9c4aIkSZIkqboZVudaOg99+2C4n/zE+Bq3AkuSJEnS0zGszrXJs1bbmgHsCCxJkiRJx2BYnWvp8bB6aNaqK6uSJEmS9PQMq3NtPKz2dJBtridVX2tHYEmSJEk6BsPqXEu3J8digRBCMr6mu6+yNUmSJElSlTOszrWGZkgth56kI3Aum3IbsCRJkiQdg2F1PmRyUOwEDs1alSRJkiQdnWF1PqTzk2atNtPdN8yBwZEKFyVJkiRJ1cuwOh/S7dBTAJjoCGyTJUmSJEk6OsPqfMjkYKAbhg6SayuFVZssSZIkSdJRGVbnQzqfHIudrHdlVZIkSZKOybA6HzLjs1YLrGxppKG2hoJNliRJkiTpqAyr82Fi1moHNTWB9rYmx9dIkiRJ0tMwrM6H9PjK6qGOwG4DliRJkqSjM6zOh7pGWLZqYnyNs1YlSZIk6ekZVudLOncorGZT7O0dZGB4tMJFSZIkSVJ1MqzOl3Ru0jbgpCNwp6urkiRJknREhtX5kskdtg0YcCuwJEmSJB2FYXW+pHMwWISBIrnSyqodgSVJkiTpyAyr8yWTT47FTtamm6itCXYEliRJkqSjMKzOl4lZqwXqamtYm25yG7AkSZIkHYVhdb5MmbWay6YodPVVsCBJkiRJql6G1fmSbgcCFDuBpCOw24AlSZIk6cgMq/Olth5a1kCxAEC+LcXu4gDDo2MVLkySJEmSqo9hdT6l2w/bBjwWYXfPQIWLkiRJkqTqY1idT5NmreazzYDjayRJkiTpSAyr8ymdT1ZWYyTXlsxatSOwJEmSJD2VYXU+ZXIwfBAGeljX1gRgR2BJkiRJOgLD6nwaH19T7KCxrpbVrY12BJYkSZKkIzCszqcps1bz2ZTbgCVJkiTpCAyr8ykzvrKajK/JZZttsCRJkiRJR2BYnU8tayHUQLETgFxbil09/YyNxQoXJkmSJEnVxbA6n2rroHXdYduAh0cje3oHK1yYJEmSJFUXw+p8S7dP2gacjK+xI7AkSZIkHc6wOt/SuUMrq85alSRJkqQjMqzOt0w+uWc1xkkrq4ZVSZIkSZrMsDrf0jkY6Yf+Lpob6li+rMGwKkmSJElTGFbnW7o9OfaU7lttc9aqJEmSJE01rbAaQrgohHB/COHBEMI1R3j/BSGEO0MIIyGE10x5bzSEcFfp68ZyFb5gZfLJsXioI3CHDZYkSZIk6TB1xzohhFALfBR4CVAAtoUQbowx3jPptMeAtwLvPsJH9McYN5eh1sUhnUuOpbCaa0vx/fv3EGMkhFDBwiRJkiSpekxnZfV84MEY40MxxiHgi8ArJp8QY3wkxvhLYGwOalxcWlZDTd1ER+BcNsXA8Bj7Dg5VuDBJkiRJqh7TCas54PFJzwul16arKYSwPYRwWwjhlUc6IYRwdemc7Xv37p3BRy9ANbXQum7SNuBmADpssiRJkiRJE6YTVo+0NzXO4HtsiDFuBd4AfCiEcOJTPizGT8YYt8YYt65atWoGH71ATZq1mnPWqiRJkiQ9xXTCagFYP+l5Huic7jeIMXaWjg8BtwDnzKC+xSmTO3TP6sSsVZssSZIkSdK46YTVbcDJIYRNIYQG4EpgWl19QwjZEEJj6fFK4HnAPU9/1RKQzkGxE2Ikk6qntanObcCSJEmSNMkxw2qMcQR4J/At4F7gSzHGu0MIHwwhXAYQQjgvhFAAXgtcF0K4u3T5s4DtIYRfAN8Hrp3SRXhpSudgdBAOPgk4a1WSJEmSpjrm6BqAGONNwE1TXnvfpMfbSLYHT73uJ8CZx1nj4pMZH19TgJZV5LMpCq6sSpIkSdKE6WwDVrlNzFpNbv3NZ5vdBixJkiRJkxhWKyFTWoSe1BG4d3CEnv7hChYlSZIkSdXDsFoJzSuhtiHZBowdgSVJkiRpKsNqJdTUQOu6p85adSuwJEmSJAGG1crJ5CdmreZLK6t2BJYkSZKkhGG1UtK5ibC6fFkDTfU1dgSWJEmSpBLDaqVkclDcBWNjhBCSWauGVUmSJEkCDKuVk87B2DAc3AOUxte4DViSJEmSAMNq5YzPWh1vspRN2Q1YkiRJkkoMq5WSKYXV4qGOwF19w/QNjVSwKEmSJEmqDobVSknnk+PUjsDetypJkiRJhtWKaV4OdU3QUwAOhVU7AkuSJEmSYbVyQoB0+6RtwM0AFGyyJEmSJEmG1YpK56DYCcDq1kbqa4PbgCVJkiQJw2plZfIT3YBragLtbXYEliRJkiQwrFZWuh16d8HYKJB0BHbWqiRJkiQZVisrnYM4Cr27gaTJktuAJUmSJMmwWlmZ8fE1yX2rubZm9vQOMjgyWsGiJEmSJKnyDKuVlM4lx2IyviZXGl/T2T1QqYokSZIkqSoYVisp3Z4cS02WxmetuhVYkiRJ0lJnWK2kVBbqmyfNWi2F1W47AkuSJEla2gyrlRRCadZqElbXZpqoCVBwZVWSJEnSEmdYrbRMbmIbcH1tDesydgSWJEmSJMNqpaXzEyurkGwFLjhrVZIkSdISZ1ittHR7Mmd1dBhIOgK7sipJkiRpqTOsVlomB8QksJJ0BN5dHGBkdKyydUmSJElSBRlWKy2dT46TOgKPjkV2F521KkmSJGnpMqxWWiaXHHsKQLINGOwILEmSJGlpM6xWWro9OU6dtWpYlSRJkrSEGVYrrSkDDa0T42vax8OqHYElSZIkLWGG1WqQyU2srDbV17KqtZFCV1+Fi5IkSZKkyjGsVoN07imzVl1ZlSRJkrSUGVarQbp9YhswJONrvGdVkiRJ0lJmWK0GmTwc3AMjg0DSEbize4CxsVjhwiRJkiSpMgyr1SBdGl/TuwuAfFuKodEx9h4YrGBRkiRJklQ5htVqMDFrNdkKnM82A85alSRJkrR0GVarwfjK6vis1WwyvsaOwJIkSZKWKsNqNRgPqz0FIOkGDM5alSRJkrR0GVarQWMLNGWg2AnAssY6ss31dgSWJEmStGQZVqtFOn/4rNVsyntWJUmSJC1ZhtVqkW6f2AYMyVZgtwFLkiRJWqoMq9UikztsZTWfbaajq58YnbUqSZIkaekxrFaLdB769sHwAJCsrPYPj7L/4FCFC5MkSZKk+WdYrRaZI4+vcSuwJEmSpKXIsFotpsxazY+HVZssSZIkSVqCDKvVYmLWaimstjUDrqxKkiRJWpoMq9Ui3Z4cSyur6VQdLY11jq+RJEmStCQZVqtFQzOklk+E1RACeWetSpIkSVqiDKvVJJOb2AYMzlqVJEmStHQZVqtJ+vBZq7lsikJXXwULkiRJkqTKMKxWk3QOegoTT/PZFL0DIxQHhitYlCRJkiTNv2mF1RDCRSGE+0MID4YQrjnC+y8IIdwZQhgJIbxmyntvCSE8UPp6S7kKX5QyORjohqGDAOTGOwJ736okSZKkJeaYYTWEUAt8FLgYOA14fQjhtCmnPQa8Ffj8lGuXA+8Hng2cD7w/hJA9/rIXqXQ+ORY7gWQbMGCTJUmSJElLznRWVs8HHowxPhRjHAK+CLxi8gkxxkdijL8ExqZceyHwnRjj/hhjF/Ad4KIy1L04jY+vKW0FzrUlYbXD+1YlSZIkLTHTCas54PFJzwul16ZjWteGEK4OIWwPIWzfu3fvND96EcqU/mhKTZZWtjTQWFdjR2BJkiRJS850wmo4wmtxmp8/rWtjjJ+MMW6NMW5dtWrVND96EUqPh9VkG3AIodQR2LAqSZIkaWmZTlgtAOsnPc8DndP8/OO5dumpa4Rlqw7rCOysVUmSJElL0XTC6jbg5BDCphBCA3AlcOM0P/9bwEtDCNlSY6WXll7T0aTbD5u1ms822w1YkiRJ0pJzzLAaYxwB3kkSMu8FvhRjvDuE8MEQwmUAIYTzQggF4LXAdSGEu0vX7gf+kiTwbgM+WHpNR5POQ8/ksJpi38Eh+oZGKliUJEmSJM2vuumcFGO8Cbhpymvvm/R4G8kW3yNdez1w/XHUuLRkcvDIjyaejncE7uzu56TVrZWqSpIkSZLm1XS2AWs+pXMw2AODvUCysgrOWpUkSZK0tBhWq814R+DSVuCcYVWSJEnSEmQHpPxaAAAgAElEQVRYrTYTs1aTjsCrW5uoqwl2BJYkSZK0pBhWq82UWau1NYH2tpQdgSVJkiQtKYbVatO6DgiHdQTOtaUodPVVriZJkiRJmmeG1WpT1wAtqye2AUNy36rbgCVJkiQtJYbVapTOPWXW6p7eQQZHRitYlCRJkiTNH8NqNcrkJu5ZhWQbcIywq3uggkVJkiRJ0vwxrFajdB6KHRAjcGh8jVuBJUmSJC0VhtVqlMnB0AEY6AFgfbYZwI7AkiRJkpYMw2o1Srcnx2Jy3+raTBM1AQqurEqSJElaIgyr1SidT46l+1bra2tYk25yfI0kSZKkJcOwWo0yueTYc2h8TT6bchuwJEmSpCXDsFqNWtZCqJnYBgxJR2AbLEmSJElaKgyr1ai2Lgmsk2at5rIpdvUMMDI6VsHCJEmSJGl+GFarVSYHxUPbgHNtzYyORZ7oHaxgUZIkSZI0Pwyr1Sqdm2iwBMk9q+D4GkmSJElLg2G1WmXyyTbgGIFkGzBgR2BJkiRJS4JhtVql22GkH/q7gKTBEriyKkmSJGlpMKxWq/Th42ua6mtZ2dJoR2BJkiRJS4JhtVpl8slx0n2ruWyKgiurkiRJkpYAw2q1Gl9ZndQROO+sVUmSJElLhGG1WrWshpq6w2at5rNJWB0bixUsTJIkSZLmnmG1WtXUQus6KB4Kq7lsiqGRMZ484KxVSZIkSYubYbWaTZm1Ot4RuOBWYEmSJEmLnGG1mmVyE92AAfLZZsDxNZIkSZIWP8NqNUu3JyurMblHNZctrawaViVJkiQtcobVapbOw+ggHHwSgJbGOjKpejq6+ypcmCRJkiTNLcNqNcuMj6+Z0hHYlVVJkiRJi5xhtZqlnxpWc20ptwFLkiRJWvQMq9Usk0+OPYePr+no7idGZ61KkiRJWrwMq9WseSXU1EPx8I7AfUOjdPcNV7AwSZIkSZpbhtVqVlNzqCNwycSsVbcCS5IkSVrEDKvVLpM/bBtwvjS+xo7AkiRJkhYzw2q1S+embAN2ZVWSJEnS4mdYrXbpdijugrExADKpepY11NLRbViVJEmStHgZVqtdJg9jw3BwDwAhBHJZx9dIkiRJWtwMq9XuCLNW89lmOgyrkiRJkhYxw2q1y5TC6uRZq20ptwFLkiRJWtQMq9XuCCuruWyKnv5hegectSpJkiRpcTKsVrvmFVDXBD2HOgKPz1p1dVWSJEnSYmVYrXYhlDoCd068NDFr1ftWJUmSJC1ShtWFIJ17yjZgcNaqJEmSpMXLsLoQpHOHNVhauayRhroavnX3bu7pLFawMEmSJEmaG4bVhSCTg95dMDYKQE1N4I3P3sC2R/ZzyYd/yMX/9EP+9YcPsbd3sMKFSpIkSVJ5GFYXgnQO4igceGLipfe//HRu/68X8MFXnE5DbeC/f/1envNX3+Vtn9nGTb/axeDIaAULliRJkqTjU1fpAjQNmXxy7OlImi2VZJc18ObnbuTNz93IA0/0csOdHXzl5wW+e98eMql6Xn72Ol6zZT1n5zOEECpUvCRJkiTNnGF1IRgPqMUCcN4RTzl5TSvXXHwqf3LhKfzowSe54Y4C/769wOdue4wTVy3j8i15XnVOjnWZ1PzVLUmSJEmzZFhdCNK55DipydLR1NYEfuuZq/itZ66iODDMTb/cxQ13Fvibb97P337rfp5/0kouPzfPhaevJdVQO8eFS5IkSdLsGFYXglQW6psPm7U6Hemmeq48fwNXnr+BR548yP+9s8ANd3bwX/7PXbQ01nHpmeu4fEue8zZm3SYsSZIkqapMK6yGEC4C/gmoBf41xnjtlPcbgX8DtgD7gNfFGB8JIWwE7gXuL516W4zxHeUpfQkJoTRrtTDrj9i4chnveukp/JcLnsnPHt7PDXcW+NovO/k/2x9nw/JmXn1ujsvPzbN+eXMZC5ckSZKk2TlmWA0h1AIfBV4CFIBtIYQbY4z3TDrtbUBXjPGkEMKVwF8Dryu9tzPGuLnMdS89mdy0tgEfS01N4LknruC5J67gg684nW/u2M0Ndxb4p+8+wIdufoDzNy3nNefmueSsdbQ0uvAuSZIkqTKmk0bOBx6MMT4EEEL4IvAKYHJYfQXwF6XHXwY+EtxXWl7pHOz8Xlk/srmhjlefm+fV5+bp6O7nK6Vtwn96wy95/413c9EZa7n83DzPPXEFtTX+45QkSZI0f6YTVnPA45OeF4BnH+2cGONICKEHWFF6b1MI4edAEfhvMcYfHl/JS1Q6B727YXQYauvL/vG5thTv/O2T+aMXncSdj3Un24R/0clXft7BukwTrzonx+Vb8py4qqXs31uSJEmSpppOWD3Sklqc5jm7gA0xxn0hhC3Af4QQTo8xFg+7OISrgasBNmzYMI2SlqBMDohJYG1bP2ffJoTAlmdk2fKMLO972Wl8554nuOHOAp/4wU4+dstONq9v4/IteS47q51Mc/lDsyRJkiTB9MJqAZicjvLA1La04+cUQgh1QAbYH2OMwCBAjPGOEMJO4JnA9skXxxg/CXwSYOvWrVODsADS+eRY7JjTsDpZU30tLz+7nZef3c6e4gD/cVcHN9zRwXv/Ywd/+bV7eMlpa7h8S47feuZqtwlLkiRJKqvphNVtwMkhhE1AB3Al8IYp59wIvAX4KfAa4HsxxhhCWEUSWkdDCCcAJwMPla36pSTdnhx7Zt8R+HisTjdx9QtO5Pd/8wTu7izy5TsK3PiLTr7+q11sWN7M77/gBF67JU9TvbNbJUmSJB2/Y4bV0j2o7wS+RTK65voY490hhA8C22OMNwKfAj4bQngQ2E8SaAFeAHwwhDACjALviDHun4sfZNHL5JLjDGetllsIgTNyGc7IZfivlzyLm+99gutufYj3/scOPvSdX/OW39jIm57zDLLLGipapyRJkqSFLSQ7davH1q1b4/bt24994lL0P/Nwzhvh4r+udCWHiTFy+8P7ue7Wh/jefXtI1dfyuvPW87bnb3JuqyRJkqTDhBDuiDFuPdZ5DtJcSDK5im0DfjohBJ59wgqefcIK7t/dy3W37uRztz3KZ297lJedtY6rX3ACp7dnKl2mJEmSpAWkptIFaAbS7UmDpSp2ytpW/uGKzdz6py/id35jIzff8wSXfvhHvOlTP+PHDz5Jta3kS5IkSapOhtWFJJ2DnuoOq+Pa21L8t5edxk+ueTF/cuEp3Lurlzf+68+47CM/5v/9spOR0bFKlyhJkiSpihlWF5JMHg7ugZGhSlcybZnmev7oRSfxoz97EX/16jM5MDjCOz//c37773/AZ3/6CP1Do5UuUZIkSVIVMqwuJOlSR+DeynYEno2m+lpef/4Gbn7Xb/GJq7awfFkD7/3q3Tzvr7/HP938AF0HF04AlyRJkjT3bLC0kEzMWu2A7MaKljJbtTWBi85Yy4Wnr2HbI11c94Od/OPNv+YTP9hpB2FJkiRJEwyrC0kmnxyrvMnSdIQQOH/Tcs7ftJz7d/fyyVsfmuggfOmZSQfhM3J2EJYkSZKWKrcBLyTj24AXQVid7JS1rfz9FWfzwz97Eb/7vI18994neNk/20FYkiRJWsoMqwtJYws0ZRZMR+CZWpdJ8eeXnsZP3vNi/vSiQx2EX/6RH/G1X9hBWJIkSVpKDKsLTTq36FZWp8qk6vnDFyYdhK999Zn0DY7yn77wc17097fwbz+1g7AkSZK0FBhWF5p0DnoKla5iXjTV13JlqYPwdW/awsqWRt731bv5jWu/y4du/jX77SAsSZIkLVo2WFpoMjno/Hmlq5hXNTWBC09fy0tPW8P2R7v4xC07+dDNDyQdhLeu5/d+8wQ7CEuSJEmLjGF1oUnnoe9JGB6A+qZKVzOvQgict3E55711Ob9+Iukg/PnbH+Oztz3Ky85q5wOXnU52WUOly5QkSZJUBm4DXmgyi7Mj8Ew9c00rf/fas7n1T1/E7/3mCXxzx26u+tTP6O5za7AkSZK0GBhWF5p0e3Jc4mF13LpMiv96ybP45Ju38MATBwyskiRJ0iJhWF1o0vnkWOysbB1V5oWnrOa6N23h17sP8KZP3U5P33ClS5IkSZJ0HAyrC834yuoS6Qg8Ey86dTWfeNO53L+7lzdd/zN6+g2skiRJ0kJlWF1oGpohtdxtwEfx26eu4eNXncu9u4q8+VMGVkmSJGmhMqwuROkc9BhWj+bFz1rDx9+4hXt2FXnz9bdTHDCwSpIkSQuNYXUhyuS8Z/UYLjhtDR974xbu6ezhzZ8ysEqSJEkLjWF1IUrnoOg9q8fyktPW8NE3nMuOjh7ecv3t9BpYJUmSpAXDsLoQZXLQ3wVDfZWupOq99PS1fOQN5/KrgoFVkiRJWkgMqwtROpccbbI0LRedsZaPvOEcflno4a2f3saBwZFKlyRJkiTpGAyrC9F4WHV8zbRddMY6/vn153DX49289frbDaySJElSlTOsLkSZ8ZVVmyzNxMVnJoH154938zufNrBKkiRJ1cywuhC1tidHtwHP2CVnruPDV57DnY9187uf3sZBA6skSZJUlQyrC1F9E2Q2wO3/And9AcbGKl3RgnLpWev40Os2c8djXfzOZ7bRN2RglSRJkqqNYXWhet1noW09/Mc74FMvgcL2Sle0oLz87Hb+8XWb2f7Ifn7n0wZWSZIkqdoYVheq9s3wtpvhlZ+AnsfhX18MX/kD6N1d6coWjMtKgXXbI/v53c9so39otNIlSZIkSSoxrC5kNTWw+fXwn+6A5/0X2PFl+Oct8KN/hJHBSle3ILxic45/fN1mbn/YwCpJkiRVE8PqYtDYCi/5APzhbbDpBXDzX8BHnw333QQxVrq6qveKzTn+/oqz+dnD+3jb/zKwSpIkSdXAsLqYrDgRXv8FuOr/Qm0DfPH18LlXw577Kl1Z1XvVOXn+7rVn89OH9vH7/7adgWEDqyRJklRJhtXF6KQXwx/8GC66Fgp3wMd/A75xDfR3Vbqyqvbqc/P83WvO5sc7nzSwSpIkSRVmWF2sauvhOX8A//lOOPfN8LNPJPezbr8exgxhR3P5ljx/+5qz+dGDBlZJkiSpkgyri92ylfDyD8Hbb4VVp8L/+//gut+CR35c6cqq1mu25Pnry8/iRw8+ydWfvcPAKkmSJFWAYXWpWHcWvPXr8JpPJ9uBP3MJ/PtbofuxSldWla7Yup6/fvVZ3PrrvbzdwCpJkiTNO8PqUhICnPFqeOc2eOF74P5vwkfOg+//FQz1Vbq6qnPFeev568vP5Ae/3ss7PncHgyMGVkmSJGm+GFaXooZmeOE1SWg95RL4wbVJaN1xg6NupnjdeRv4q1efyS337+UdnzWwSpIkSfPFsLqUta2H134a3noTNGfhy78Ln74Edv2i0pVVldefv4H/+aoz+f79e/mDz91pYNXCdGAP3HIt/K/L4GfXwUBPpSuSJEl6WiFW2Ura1q1b4/bt2ytdxtIzNgp3/ht87y+hbz9seQv89nuTBk3VaPAA7LkHntgBu3dATwE2PBtOfRmsfGay5bnM/vfPHuXPv7KDC561mo+9cQsNdf6uRwvArl/AbZ+AHV+G0SHIboKuh6G+Gc58DWx9G7RvrnSVkiRpCQkh3BFj3HrM8wyrOkx/F/zgb+D2T0L9smS78Pm/n4zCqYQYkyZQ46H0iV/BE3fD/oeB0t/dxjS0roMn70+eLz8RTr0kCa7586CmtmzlfPa2R3nvf+zggmet4WNvPNfAquo0OgL3fz0JqY/9JPl3+Zw3wvlvh5UnQefPYdun4FdfhpF+aD8XznsbnP7q5DYBSZKkOWRY1fHZez988xrY+T1YeQpc9Fdw0ovn9nsOHYQ9904KpjuSYDpYLJ0QYPkmWHMGrD0zOa45Hdo2JCupxU64/xtw39fh4VthbBiaV8IpFyXB9YQXQn3quMv87E8f4b1fvZuXnLaGj77BwKoq0t8Fd34Wbv8X6Hks+Xfj/LfDOVdBqu0I53fDL/9PElyfvB+aMrD5jbD1d2HlyfNfvyRJWhIMqzp+McKvvwnffE+ybfCZF8OF/wNWnHj8n9tTmBJKd8C+nUyslja0JEF0zRmw9gxYcyasfhY0tkzvewwU4cGbk+D6wHdgsCfZ9njib8Opl8LJF8KyFbP+Ef7tp4/wvq/ezYWnr+EjbziX+loDqypo76/hZ5+AX3wBhvtg42/Cs98Bp1w8vZ0FMcKjP05C671fS37Rs+kFSWg99WWV21khSZIWJcOqymdkEG77ONz6t8nj5/4hvOBPoLH12NcO9x9htXTH4c1dshtLq6TjwfQMaHsG1JQpAI4MJf8jft/X4f6boNgBoQY2PDcJrqdckqzYztBnfvwwf/G1e7jo9LX88xvOMbBWoRgjxYERamsCyxpqCXNwL3PFjI0lOx9+9vHkFzO1jXDma+E570h2HszWgT3w88/C9s8kq7Mta+DcN8OWt0ImX67qJUnSEmZYVfn17obvfhDu+t/J/8Be8Bdw1pVJqIwx2YY7HkbHg+m+ByGOJdfXL4M1px0eSlefBk3p+fsZYkwazowH1yd2JK+vPi0JradeCu3nTLtB06d//DAf+No9XHzGWj78+ioIrIMHYP9DyS8Jss9I/jktpoB2BL0Dwzy+v59CVx+Pd5WOpeeFrn4ODI4AUBMgnaon3VRPOlWXHCc/TtWTbqoj01w/6fmh95urJewOHoBffIH4s+sI+x5gdNka9p/2JjpPupLu0MaBgREODo5woPR1cHCE3tLx4OAIgyNjpFP1LG9uINtcT1tzA8uXNdDWXE920uOW+kDY+d1ktfWBbyd/j06+MLm39cQXl++XSZIkackxrGruFO6Ab/wpdGyHdWcnDY6e2JHcLzeubcNTV0uzm6rvf3C7HoH7bkqC66M/gTgKre3J9slTL022U9Y1PO1HfOpHD/OX/+8eLjlzLf905TwE1pEh6H40+UXAxNfO5Ni76/Bz61LJyvXyTckxO37cmITZusa5rbUM+oZGKHT18/j+JHxOhNHu5NjTP3zY+csaalm/vJl8NkU+20yuLblPuTgwTLF/mOLACMX+YXr6h0uvjVAcGKZv6OlHEtXWBNJNdUcMvEnAPcJ7k56n6msZHo0TAfLAlEB5YGDK88HRw95r7uvgor4buXTkO7TSx11jJ3D9yMV8Y+zZDFN31Lob62pobapjWWMdyxrqaKiroTgwTNfBIbr7h486Wrm+NtBWCrQnNXTx8uFv8/wDN9E60kWxKccjG6+g65mvo2XFGrLNDWSbG8ik6qmpqYJAL0mSqpphVXNrbAx+9SX44T8k95GuOT25r3RtqelRU6bSFc5c33749beSLqoPfje5968xDSddULrP9SVH/bn+9YcP8d+/fi8veOYqnn/SCla3NrGqtZHVrY2sam0kk6qf2arc2FiyXXlqGN2/E7oeTUL1uNRyWHFS6evE5FjfnATxrkeS+43HHw/3TfomAdLthwLs8o2TwuwmaF4+L6uyA8OjdHT3HxZIH+/qo1B6vO/g0GHnN9XXkM8mYXT9+HH5oedtzTP8sy4ZHh2jtxRkJ4fYIwXbyaF3/PX+4acPuzUBxqb5n9tUfS3LGutobazl/Nr7ePXQ1zhv4KdE4O62F3HXuivpXrGZlqZ6WhqTINrSVEdLYy0tjfUsa6ydeP3pfnkyNhYpDgyz/+AQXX3DdPcNsf/gEN19w3T1DSVfBw897j3Yx/kDP+H1Nd/hOTX3Mhjr+ObY+Xxu5AK2xVMIIdCWSlZo25rrS6u0SeDNLmsohdpkNbetuZ7WUv0tjXXUGnIlSVoyDKvS8Rjuh4d+kATX+78BB/dCTT1sfP6h+1wzucMuuf5HD/N3377/iCt0DbU1rCoF14kQ29LA+sZ+1tPJ2uEC2f7HWHbgEWr2P5SE0pGBQx9Q33woiE7+Wn5CEiqnI8bkfsTJAXb/pCB7YPeUoltLAXbj4SuyyzdBZv20m+4MjYyxq6f/sK25j48f9/exp3fwKX9WuWxqYmV0ahhd2dJAgOTPZ7C39FVMjgPFp742dACWrYZVz4RVpyajjY6xWj4bQyNj9A6MB9unht4DAyM01dckwXL8q7TiOf48Wf2spW5sCHbckNyPuvtXkMrClt+B837vKX/v5luMyerwgcLd1P/8M7T9+svUDffS1XISd656FT9teQm7B+sngm533xD7+4YYGB572s9tbhgP6OPB+9Cf0aHX6g89n3Rea9Oh54115RtVJUmS5oZhVSqXsVEobE+C631fT1Y4AdZtTjqlnnpJcs9rCMQY6R0cYW/vIHuKg+w9MEjX/n2M7nuQuv0PsezAI7T1P8bq4QIbYieZcGilczjW8lhcTaGmnT0N6yk2P4P+9Cbi8hNJLc+xKj2+Wpsc00115b2HcqgPuh9lbN9DDD/5MGP7HoKuR6jpeZT63seoGT20wjkWajnQtJaexhz7GtrZW7eO3bVr6QhrKcQ1PDnaRP/QKHt7B9ldHJi0ohhprhnhpMwYJ7aOsal1lHzzKO2pYVY3DLGyfogW+qgZ6p0UOieH0ElhdGz4iD/GYWrqoWEZDHQfei3UJiF/1SnJ18rx48nJuZXU+wRs/xRsvz75BcmqZyUNk868onrnnw4dTIL1tk/BrruSe9PPfE1yb+u6sydOGxgepWvSym133/DE/bTJFuhhDgyO0Du+Hbp07J20PXp0GkvT9bVhUpCtPzz8HiHoNtXXUl8bqK2poa4mUFcbqK0J1NfWUFsTktdqaqirPfS4tjZQX5OcV1d76Lq6mhpqAtVxb7MkSVXMsCrNlb2/LgXXm6CwDYhJ9+JTXwb5LclYnn07D23dnbpimVkPK05kNHsiB1o2sq9xPbvq8jw2uoI9B0fZ0zuQhN3eQfaWvoZGn7oq1VhXc9hW48lbj///9u4uxo7zruP49z9zzu561946iZ3gJrFj0iQSoNLSKAEVtb1pCb1pI0HrqBdBQkoLrYA7Em6IkBAVAgRXoNBGShCkqtQC7gVKi0C8SE2bxLTktU5UuYmd4Je4ie3d9e45M38unmfmzMyes97tbnbG8e8jHc3MM8/MeXzOs+Pzn+dldkylLK1kLK5kLA0yFpaHYX0lY3GQsbQyZGF5tF7ui/mbjJzr+DEH7BT7k5PcaKfCup3igJ3kajtfy3/ednG6vw9PZ9iVLDGbLzKdLdAbXsDy4aU/43Q6zDY9vStMwDU9P9ouX/ONZZG3klaMyV1ZgDMvwZmjcPrF8BzhM0fDd1TtUr17/yh4LQPZW0PL5tvpxJHw6Jlnvw75EG69KwSpBz98eU2QdeJICLaf+RoMl+D620PQ+rN3b/oZx+7OxUHO+eUBC8sZFy4OOb88qI31HR/oDlalLQ/XbuXdrH4R8MbAtgxyEyv31QLgNGF+pjf6G56fZu/OYjnDtfPTzPTVYiwiIu8cClZFtsP5k3D0X0KL6w//A7LYpXV2z+pxpNe8J3Sh3eCPdnfn3NKQ0xculq21o+XF2vabi+NbG9PEmO2n7JhKmZ1KmZ3qMTu1ejukjdZr+fpxezru6/fYMZUy1UtCy+ebP6p0K47LbLA60GwGlLXgcz6Mgd6uiZ+GK2H25NMvVgLZo/DGS/Vu2Duvq7fC7r0tdCme2/uTB5PZEF78BjzxN/DqE6Hb9fs/A3fct/lnGbdt6cfw/a+EFuIzR2FmN7zvM+G5rXve03bpWBnm5SRXFwcZw9wZZs4wz8v1LHcGeU5WSc9yZ5A5WZ7Hpcf81ePieuOcWRbPV03PYr48583FAafPL3PmwvLYsc27pnvsLYPYGfbunK7frIr7rpqd0iRXIiLSeVsarJrZXcBfASnwJXf/YmP/NPAo8AHgDeDT7n4s7nsA+E0gA37H3R9f670UrMpla/lCGGu6+wDs2N1OEYYZZy6ssLQyDEFnP2V2OmUqTdQ1cSPyLATfp4+uDmRXKq3IM7tD0FqMhy1aYudvmDzz9eJZOPIIfPdLcO54GA9852dDMLedj3HaDu5w7L9Da+sL3witxgc/HFpbb/v4usc9X0my3Dm7sDK2h0XYHqWPGx/fS4w9O5uts9NhGEF1e5daa0VEpD1bFqyaWQocBT4KHAeeBO5x9+creX4beK+7f87MDgF3u/unzexngMeAO4B3A/8K3OruE6fNVLAqIp3lHh4PVASu1UB28Y1Rvv5cGANbDWRn98D3HwstjsOlELT94m/BLR+D5AoIGs6fhP95FJ5+BN56FZJeeFkSXlhct/hqpier99f2jTumuRxzTNoPE5j1Z0Ovh6m5sKym9WfDmOGJaTvCY6K2+dFcC8X4+DGBbHX5xsLy2EcUzc/0Qivt3BQ/Nd/j2tnQa6LXS+mlKf00od9L6Te304R+akylCf1eUt8u00bbvTR0iVaLr4iIFLYyWP0l4EF3/5W4/QCAu/9JJc/jMc+3zawH/B+wF7i/mreab9L7KVgVkcvSwpk4FvYH9UD23IlRnt4MvPdTcOfnwiOerkR5Bi99C175NngeXjBad6+keyPdJ6TnjWN8HeeKy2wlzP49WBy9VhbXN4FXU29HPYAtgtupxnYZGFe2IbxnNozLQWiJzgaN7ZUx+9Y+xrMB+XCFbDjAs0G53/IhqQ9JWfuxS6u+QjdyDMdwwEnisppe3R/SKdKsul4E+MV6uBnhYc7vNZdebhdp1N67UOY1C9Wgdo7Vx5b/ThJyS8hJ4zIhI8VjWmYpTlIu3RIyeuSWlHlG672w31K8OIeluKXxuFEaMT2vnNfMRpN3mZEwWjcs3sOJS6xcT8o8xX2eMAkYMU/1nAZxO4F47Oi8hrnHT4r4iVJJi5+yVz/ZuM8b35qPy88a+0bngWLkRb0esWo9ntWS0dnjcdW65uUJrZLPGvUwnq84Ju6v3Xqx4rOwehr139hmYI2f3VZJsMZy9O+l/OypvXu1BhdlqGzjq44tz++MzTNx273+bx5zXho5ys+yPJNV0hv5rfr3V/17L46p5q+eZzwtivYAAAdRSURBVNynVp5yTav2++R9zayTzt38zseXrP5mNiZtbKGI3/3Y2G112rU3/zz7Dtw2vqAdsN5gdfKT5EeuB16tbB8H7pyUx92HZvYWcE1Mf6JxbLvPXRAReTvM7Qmvmz5YT794Lkzu9NYrcNOHYO6adsrXFUkKt90VXl2WDRpB7FIIYqtBbS1tCQYLY9IWwxjet07U0waLo0D9UpI+pFOQ9uJ6Py6r271Rem8Kkrky3dI+adInbeZrni9JQ2DpOXmWkxXLPCfPi2VGHsfm5nlGnuchf56T5x6XeZnulXSP+dwdzzNydzx33DPy3HGP+zzHih/GVvmJ646ZN37GFj+oizRv/DRu/JhvBEYWf7Wbx33xYHOnR46Rk/iAxDMScsyzsO45CXmZnpJhHsPZmFbd39vgDQERkc36zukH2Hfg/raLsWnrCVbH3RBY+1bKKM96jsXM7gPuA9i/f/86iiQicpmYmQ+zRN/wgbZLIhuRxiDu7RpHXLTqriyEwBXGB55JeunmgS1khMkpUkAjirdYnofZx/Nh6GHgWVjmMc2r63l9vVT5CVVrXVlPejVLjjvkhBsGedzpuZM7eJHujntchjsHlD/tKi2QtSWj7fJWwqS81dbLokV0jfOGvKMW2LIltugpUfxDq/tjz4pqK3DZU2Pcuaj0vCg/z2J/uJHi5OXnUGYrPxPwauKYv1/36i2UcKraeeK5mg1o3vgeR99D/bjqe9BoxfQxafWWzEq6UUn38S2i8ZbRqhbZauErrfLhbFZPc6fagjs6T72PRHN/syW5Gnqs/uzqCZM7ltoG8tbffbS+zmv2mO947LHN7/gS+Qo3v/syn6wxWk+wehy4sbJ9A/DahDzHYzfgdwFn13ks7v4Q8BCEbsDrLbyIiMhlySzMet2bBq5uuzSyHZIESDoxsVgR/m3vKGsRkY1bz3XqSeAWMztoZlPAIeBwI89h4N64/mvAv3m4rXQYOGRm02Z2ELgF+O7WFF1ERERERETeqS7ZshrHoH4BeJzQM+hhd3/OzP4IeMrdDwNfBv7OzF4mtKgeisc+Z2ZfBZ4HhsDn15oJWERERERERARY33NWt5NmAxYREREREXnnWu9swBquICIiIiIiIp2jYFVEREREREQ6R8GqiIiIiIiIdI6CVREREREREekcBasiIiIiIiLSOQpWRUREREREpHMUrIqIiIiIiEjnKFgVERERERGRzlGwKiIiIiIiIp2jYFVEREREREQ6R8GqiIiIiIiIdI6CVREREREREekcc/e2y1BjZqeBH7VdjkvYA5xpuxAia1Adla5THZWuUx2VrlMdla5bq44ecPe9lzpB54LVy4GZPeXut7ddDpFJVEel61RHpetUR6XrVEel67aijqobsIiIiIiIiHSOglURERERERHpHAWrP5mH2i6AyCWojkrXqY5K16mOStepjkrXbbqOasyqiIiIiIiIdI5aVkVERERERKRzFKxukJndZWY/MLOXzez+tssj0mRmx8zsGTP7npk91XZ5RMzsYTM7ZWbPVtKuNrNvmdlLcXlVm2WUK9uEOvqgmZ2I19LvmdnH2yyjXNnM7EYz+3cze8HMnjOz343pupZKJ6xRRzd1LVU34A0wsxQ4CnwUOA48Cdzj7s+3WjCRCjM7Btzu7nr2mnSCmX0IuAA86u4/F9P+FDjr7l+MN/6ucvffb7OccuWaUEcfBC64+5+1WTYRADPbB+xz9yNmtgt4Gvgk8BvoWiodsEYd/RSbuJaqZXVj7gBedvcfuvsK8BXgEy2XSUSk09z9P4GzjeRPAI/E9UcI/6GJtGJCHRXpDHd/3d2PxPXzwAvA9ehaKh2xRh3dFAWrG3M98Gpl+zhb8CWIbDEHvmlmT5vZfW0XRmSC69z9dQj/wQHXtlwekXG+YGb/G7sJq3uldIKZ3QS8H/gOupZKBzXqKGziWqpgdWNsTJr6UUvXfNDdfwH4VeDzsXubiIhszF8DNwPvA14H/rzd4oiAme0Evgb8nrufa7s8Ik1j6uimrqUKVjfmOHBjZfsG4LWWyiIylru/FpengH8kdF8X6ZqTcXxLMc7lVMvlEalx95Punrl7DvwtupZKy8ysTwgC/t7dvx6TdS2VzhhXRzd7LVWwujFPAreY2UEzmwIOAYdbLpNIyczm4qB2zGwO+Bjw7NpHibTiMHBvXL8X+OcWyyKyShEARHeja6m0yMwM+DLwgrv/RWWXrqXSCZPq6GavpZoNeIPidMt/CaTAw+7+xy0XSaRkZj9NaE0F6AH/oDoqbTOzx4CPAHuAk8AfAv8EfBXYD7wC/Lq7a4IbacWEOvoRQrc1B44Bny3GBopsNzP7ZeC/gGeAPCb/AWFMoK6l0ro16ug9bOJaqmBVREREREREOkfdgEVERERERKRzFKyKiIiIiIhI5yhYFRERERERkc5RsCoiIiIiIiKdo2BVREREREREOkfBqoiIiIiIiHSOglURERERERHpHAWrIiIiIiIi0jn/D2Ud0s9B0D9CAAAAAElFTkSuQmCC\n",
+ "text/plain": [
+ "