Open In Colab

View Source on GitHub

Data Preparation for Timeseries forecasting

This notebook describes how to prepare data for RNN/LSTM for timeseries prediction problem especially for Keras/Tensorflow

[1]:
import numpy as np
import itertools

np.set_printoptions(suppress=True) # to suppress scientific notation while printing arrays

Create Data

Create a data which is supposed to represent a timeseries prediction problem. The data has 6 columns and 1000 rows. The first five columns are supposed to be input and the last column is supposed to be output.

[2]:
rows = 2000
cols = 6
data = np.arange(int(rows*cols)).reshape(-1,rows).transpose()
print(data[0:20])
print('\n {} \n'.format(data.shape))
print(data[-20:])
[[    0  2000  4000  6000  8000 10000]
 [    1  2001  4001  6001  8001 10001]
 [    2  2002  4002  6002  8002 10002]
 [    3  2003  4003  6003  8003 10003]
 [    4  2004  4004  6004  8004 10004]
 [    5  2005  4005  6005  8005 10005]
 [    6  2006  4006  6006  8006 10006]
 [    7  2007  4007  6007  8007 10007]
 [    8  2008  4008  6008  8008 10008]
 [    9  2009  4009  6009  8009 10009]
 [   10  2010  4010  6010  8010 10010]
 [   11  2011  4011  6011  8011 10011]
 [   12  2012  4012  6012  8012 10012]
 [   13  2013  4013  6013  8013 10013]
 [   14  2014  4014  6014  8014 10014]
 [   15  2015  4015  6015  8015 10015]
 [   16  2016  4016  6016  8016 10016]
 [   17  2017  4017  6017  8017 10017]
 [   18  2018  4018  6018  8018 10018]
 [   19  2019  4019  6019  8019 10019]]

 (2000, 6)

[[ 1980  3980  5980  7980  9980 11980]
 [ 1981  3981  5981  7981  9981 11981]
 [ 1982  3982  5982  7982  9982 11982]
 [ 1983  3983  5983  7983  9983 11983]
 [ 1984  3984  5984  7984  9984 11984]
 [ 1985  3985  5985  7985  9985 11985]
 [ 1986  3986  5986  7986  9986 11986]
 [ 1987  3987  5987  7987  9987 11987]
 [ 1988  3988  5988  7988  9988 11988]
 [ 1989  3989  5989  7989  9989 11989]
 [ 1990  3990  5990  7990  9990 11990]
 [ 1991  3991  5991  7991  9991 11991]
 [ 1992  3992  5992  7992  9992 11992]
 [ 1993  3993  5993  7993  9993 11993]
 [ 1994  3994  5994  7994  9994 11994]
 [ 1995  3995  5995  7995  9995 11995]
 [ 1996  3996  5996  7996  9996 11996]
 [ 1997  3997  5997  7997  9997 11997]
 [ 1998  3998  5998  7998  9998 11998]
 [ 1999  3999  5999  7999  9999 11999]]
[3]:
def first_nan_from_end(ar):
    """
    This function finds index for first nan from the group which is present at the end of input array `ar`.
    Some examples are below
    [np.nan, np.nan, 0, 2, 3, 0, 3, np.nan, np.nan, np.nan, np.nan] >> 7
    [np.nan, np.nan, 1, 2, 3, 0, np.nan, np.nan, np.nan] >> 6
    [0, 2, 3, 0, 3] >> 5
    [np.nan, np.nan, 0,2,3,0,3] >> 7
    """
    last_non_zero=0

    for idx, val in enumerate(ar[::-1]):  # first find first non-nan value starting from last
        if ~np.isnan(val): # val >= 0:
            last_non_zero = idx
            break
    return ar.shape[0] - last_non_zero


def batch_generator(data, lookback, in_features, out_features, batch_size, step, min_ind, max_ind, future_y_val,
                   norm=None, trim_last_batch=True):
    """
    :param data: `ndarray`, input data.
    :param lookback: `int`, sequence length, number of values LSTM will see at time `t` to make prediction at `t+1`.
    :in_features: `int`, number of columns in `data` starting from 0 to be considered as input
    :out_features: `int`, number of columns in `data` started from last to be considred as output/prediction.
    :parm norm: a dictionary which contains scaler object with which to normalize x and y data. We use separate scalers for x
                 and y data. Keys must be `x_scaler` and `y_scaler`.
    :parm trim_last_batch: bool, if True, last batch will be ignored if that contains samples less than `batch_size`.
    """

    # selecting the data of interest for x and y
    X = data[min_ind:max_ind, 0:in_features]
    Y = data[min_ind:max_ind, -out_features:].reshape(-1,out_features)

    # normalizting both x and y data
    if norm:
        x_scaler = norm['x_scaler']
        y_scaler = norm['y_scaler']
        X = x_scaler.fit_transform(X)
        Y = y_scaler.fit_transform(Y)

    # container for keeping x and y windows. A `windows` is here defined as one complete set of data at one timestep.
    x_wins = np.full((X.shape[0], lookback, in_features), np.nan, dtype=np.float32)
    y_wins = np.full((Y.shape[0], out_features), np.nan)

    # creating windows from X data
    st = lookback*step - step                 # starting point of sampling from data may not start from 0
    for j in range(st, X.shape[0]-lookback):
        en = j - lookback*step
        indices = np.arange(j, en, -step)
        ind = np.flip(indices)
        x_wins[j,:,:] = X[ind,:]

    # creating windows from Y data
    for i in range(0, Y.shape[0]-lookback):
        y_wins[i,:] = Y[i+lookback,:]



    """removing trailing nans or nans which are present at the end part of array"""
    first_nan_at_end = first_nan_from_end(y_wins[:,0])  # first nan in last part of data, start skipping from here
    y_wins = y_wins[0:first_nan_at_end,:]
    x_wins = x_wins[0:first_nan_at_end,:]

    """removing nans from start"""
    y_val = st-lookback + future_y_val
    if st>0:                              # if some values from start were skipped, we need to remove nans from those places
        x_wins = x_wins[st:,:]
        y_wins = y_wins[y_val:,:]


    print("""shape of x data: {} \nshape of y data: {}""".format(x_wins.shape, y_wins.shape))


    print(""".\n{} values are skipped from start and {} values are skipped from end in output array"""
          .format(st, X.shape[0]-first_nan_at_end))

    pot_samples = x_wins.shape[0]  # ptential samples

    print('\npotential samples are {}'.format(pot_samples))

    residue = pot_samples % batch_size
    print('\nresidue is {} '.format(residue))

    samples = pot_samples - residue
    print('\nActual samples are {}'.format(samples))

    interval = np.arange(0, samples + batch_size, batch_size)
    print('\nPotential intervals: {}'.format(interval ))

    if residue > 0:
        interval = np.append(interval, pot_samples)
    print('\nActual interval: {} '.format(interval))

    # The last batch may not fewer data as other batches. We can skip that incomplete batch.
    # This can be useful if want to save our batches in a list i.e. if 'x_batches' is a list.
    if trim_last_batch:   #TODO this must be obligatory when saving batches in numpy array
        no_of_batches = len(interval)-2
    else:
        no_of_batches = len(interval) - 1

    print('\nNumber of batches are {} '.format(no_of_batches))

    # container for batches
    x_batches = np.full((no_of_batches, batch_size, lookback, in_features), np.nan)
    y_batches = np.full((no_of_batches, batch_size, out_features), np.nan)


    for b in range(no_of_batches):
        st = interval[b]
        en = interval[b + 1]
        an_x_batch = x_wins[st:en, :, :]
        x_batches[b] = an_x_batch
       # y_batches[b] = y_wins[st:en]
        y_batches[b] = y_wins[st+1:en+1]


    print('\nshape of batches for:')
    print('x_data ', ' y_data')
    for i,j in zip(x_batches, y_batches):
        ishp, jshp = None, None
        if isinstance(i, np.ndarray):
            ishp = i.shape
        if isinstance(j, np.ndarray):
            jshp = j.shape
        print(ishp, jshp)

    return x_batches, y_batches




_lookback=7  # sequence length
input_features = 5  # number of columns in dataset to be used as input
output_features = 1 # number of column to be used as output
_batch_size = 16
input_stepsize = 2
st_ind = 0
end_ind = 600
t_plus_ith_val = 1 # which value to predict in future, e.g if input is 11,12,13,14 and default value of this variable means we
                  # want to predict 15, setting value equal to 3 means we want to predict 17.

train_x_batches, train_y_batches = batch_generator(data, _lookback, input_features, output_features, _batch_size,
                                    input_stepsize, st_ind, end_ind, t_plus_ith_val,
                                    trim_last_batch = True)

test_x_batches, test_y_batches = batch_generator(data, _lookback, input_features, output_features, _batch_size,
                                    input_stepsize,
                                    min_ind = 600,
                                    max_ind = 800,
                                    future_y_val = t_plus_ith_val,
                                    trim_last_batch = True)

shape of x data: (581, 7, 5)
shape of y data: (587, 1)
.
12 values are skipped from start and 7 values are skipped from end in output array

potential samples are 581

residue is 5

Actual samples are 576

Potential intervals: [  0  16  32  48  64  80  96 112 128 144 160 176 192 208 224 240 256 272
 288 304 320 336 352 368 384 400 416 432 448 464 480 496 512 528 544 560
 576]

Actual interval: [  0  16  32  48  64  80  96 112 128 144 160 176 192 208 224 240 256 272
 288 304 320 336 352 368 384 400 416 432 448 464 480 496 512 528 544 560
 576 581]

Number of batches are 36

shape of batches for:
x_data   y_data
(16, 7, 5) (16, 1)
(16, 7, 5) (16, 1)
(16, 7, 5) (16, 1)
(16, 7, 5) (16, 1)
(16, 7, 5) (16, 1)
(16, 7, 5) (16, 1)
(16, 7, 5) (16, 1)
(16, 7, 5) (16, 1)
(16, 7, 5) (16, 1)
(16, 7, 5) (16, 1)
(16, 7, 5) (16, 1)
(16, 7, 5) (16, 1)
(16, 7, 5) (16, 1)
(16, 7, 5) (16, 1)
(16, 7, 5) (16, 1)
(16, 7, 5) (16, 1)
(16, 7, 5) (16, 1)
(16, 7, 5) (16, 1)
(16, 7, 5) (16, 1)
(16, 7, 5) (16, 1)
(16, 7, 5) (16, 1)
(16, 7, 5) (16, 1)
(16, 7, 5) (16, 1)
(16, 7, 5) (16, 1)
(16, 7, 5) (16, 1)
(16, 7, 5) (16, 1)
(16, 7, 5) (16, 1)
(16, 7, 5) (16, 1)
(16, 7, 5) (16, 1)
(16, 7, 5) (16, 1)
(16, 7, 5) (16, 1)
(16, 7, 5) (16, 1)
(16, 7, 5) (16, 1)
(16, 7, 5) (16, 1)
(16, 7, 5) (16, 1)
(16, 7, 5) (16, 1)
shape of x data: (181, 7, 5)
shape of y data: (187, 1)
.
12 values are skipped from start and 7 values are skipped from end in output array

potential samples are 181

residue is 5

Actual samples are 176

Potential intervals: [  0  16  32  48  64  80  96 112 128 144 160 176]

Actual interval: [  0  16  32  48  64  80  96 112 128 144 160 176 181]

Number of batches are 11

shape of batches for:
x_data   y_data
(16, 7, 5) (16, 1)
(16, 7, 5) (16, 1)
(16, 7, 5) (16, 1)
(16, 7, 5) (16, 1)
(16, 7, 5) (16, 1)
(16, 7, 5) (16, 1)
(16, 7, 5) (16, 1)
(16, 7, 5) (16, 1)
(16, 7, 5) (16, 1)
(16, 7, 5) (16, 1)
(16, 7, 5) (16, 1)

first train batch

We can look at first training batch which consist of 16 windows where each window has shape [7, 5]. One training window is fed at one time step and each window has correspoding prediction. Also look where each window starts and ends and the y values for each window. Values in rows of windows are not continuous because o f input_stepsize.

[4]:
for inp,out in zip(train_x_batches[0], train_y_batches[0]):
    print(inp,out, '\n')
[[   0. 2000. 4000. 6000. 8000.]
 [   2. 2002. 4002. 6002. 8002.]
 [   4. 2004. 4004. 6004. 8004.]
 [   6. 2006. 4006. 6006. 8006.]
 [   8. 2008. 4008. 6008. 8008.]
 [  10. 2010. 4010. 6010. 8010.]
 [  12. 2012. 4012. 6012. 8012.]] [10014.]

[[   1. 2001. 4001. 6001. 8001.]
 [   3. 2003. 4003. 6003. 8003.]
 [   5. 2005. 4005. 6005. 8005.]
 [   7. 2007. 4007. 6007. 8007.]
 [   9. 2009. 4009. 6009. 8009.]
 [  11. 2011. 4011. 6011. 8011.]
 [  13. 2013. 4013. 6013. 8013.]] [10015.]

[[   2. 2002. 4002. 6002. 8002.]
 [   4. 2004. 4004. 6004. 8004.]
 [   6. 2006. 4006. 6006. 8006.]
 [   8. 2008. 4008. 6008. 8008.]
 [  10. 2010. 4010. 6010. 8010.]
 [  12. 2012. 4012. 6012. 8012.]
 [  14. 2014. 4014. 6014. 8014.]] [10016.]

[[   3. 2003. 4003. 6003. 8003.]
 [   5. 2005. 4005. 6005. 8005.]
 [   7. 2007. 4007. 6007. 8007.]
 [   9. 2009. 4009. 6009. 8009.]
 [  11. 2011. 4011. 6011. 8011.]
 [  13. 2013. 4013. 6013. 8013.]
 [  15. 2015. 4015. 6015. 8015.]] [10017.]

[[   4. 2004. 4004. 6004. 8004.]
 [   6. 2006. 4006. 6006. 8006.]
 [   8. 2008. 4008. 6008. 8008.]
 [  10. 2010. 4010. 6010. 8010.]
 [  12. 2012. 4012. 6012. 8012.]
 [  14. 2014. 4014. 6014. 8014.]
 [  16. 2016. 4016. 6016. 8016.]] [10018.]

[[   5. 2005. 4005. 6005. 8005.]
 [   7. 2007. 4007. 6007. 8007.]
 [   9. 2009. 4009. 6009. 8009.]
 [  11. 2011. 4011. 6011. 8011.]
 [  13. 2013. 4013. 6013. 8013.]
 [  15. 2015. 4015. 6015. 8015.]
 [  17. 2017. 4017. 6017. 8017.]] [10019.]

[[   6. 2006. 4006. 6006. 8006.]
 [   8. 2008. 4008. 6008. 8008.]
 [  10. 2010. 4010. 6010. 8010.]
 [  12. 2012. 4012. 6012. 8012.]
 [  14. 2014. 4014. 6014. 8014.]
 [  16. 2016. 4016. 6016. 8016.]
 [  18. 2018. 4018. 6018. 8018.]] [10020.]

[[   7. 2007. 4007. 6007. 8007.]
 [   9. 2009. 4009. 6009. 8009.]
 [  11. 2011. 4011. 6011. 8011.]
 [  13. 2013. 4013. 6013. 8013.]
 [  15. 2015. 4015. 6015. 8015.]
 [  17. 2017. 4017. 6017. 8017.]
 [  19. 2019. 4019. 6019. 8019.]] [10021.]

[[   8. 2008. 4008. 6008. 8008.]
 [  10. 2010. 4010. 6010. 8010.]
 [  12. 2012. 4012. 6012. 8012.]
 [  14. 2014. 4014. 6014. 8014.]
 [  16. 2016. 4016. 6016. 8016.]
 [  18. 2018. 4018. 6018. 8018.]
 [  20. 2020. 4020. 6020. 8020.]] [10022.]

[[   9. 2009. 4009. 6009. 8009.]
 [  11. 2011. 4011. 6011. 8011.]
 [  13. 2013. 4013. 6013. 8013.]
 [  15. 2015. 4015. 6015. 8015.]
 [  17. 2017. 4017. 6017. 8017.]
 [  19. 2019. 4019. 6019. 8019.]
 [  21. 2021. 4021. 6021. 8021.]] [10023.]

[[  10. 2010. 4010. 6010. 8010.]
 [  12. 2012. 4012. 6012. 8012.]
 [  14. 2014. 4014. 6014. 8014.]
 [  16. 2016. 4016. 6016. 8016.]
 [  18. 2018. 4018. 6018. 8018.]
 [  20. 2020. 4020. 6020. 8020.]
 [  22. 2022. 4022. 6022. 8022.]] [10024.]

[[  11. 2011. 4011. 6011. 8011.]
 [  13. 2013. 4013. 6013. 8013.]
 [  15. 2015. 4015. 6015. 8015.]
 [  17. 2017. 4017. 6017. 8017.]
 [  19. 2019. 4019. 6019. 8019.]
 [  21. 2021. 4021. 6021. 8021.]
 [  23. 2023. 4023. 6023. 8023.]] [10025.]

[[  12. 2012. 4012. 6012. 8012.]
 [  14. 2014. 4014. 6014. 8014.]
 [  16. 2016. 4016. 6016. 8016.]
 [  18. 2018. 4018. 6018. 8018.]
 [  20. 2020. 4020. 6020. 8020.]
 [  22. 2022. 4022. 6022. 8022.]
 [  24. 2024. 4024. 6024. 8024.]] [10026.]

[[  13. 2013. 4013. 6013. 8013.]
 [  15. 2015. 4015. 6015. 8015.]
 [  17. 2017. 4017. 6017. 8017.]
 [  19. 2019. 4019. 6019. 8019.]
 [  21. 2021. 4021. 6021. 8021.]
 [  23. 2023. 4023. 6023. 8023.]
 [  25. 2025. 4025. 6025. 8025.]] [10027.]

[[  14. 2014. 4014. 6014. 8014.]
 [  16. 2016. 4016. 6016. 8016.]
 [  18. 2018. 4018. 6018. 8018.]
 [  20. 2020. 4020. 6020. 8020.]
 [  22. 2022. 4022. 6022. 8022.]
 [  24. 2024. 4024. 6024. 8024.]
 [  26. 2026. 4026. 6026. 8026.]] [10028.]

[[  15. 2015. 4015. 6015. 8015.]
 [  17. 2017. 4017. 6017. 8017.]
 [  19. 2019. 4019. 6019. 8019.]
 [  21. 2021. 4021. 6021. 8021.]
 [  23. 2023. 4023. 6023. 8023.]
 [  25. 2025. 4025. 6025. 8025.]
 [  27. 2027. 4027. 6027. 8027.]] [10029.]

We had to skip certain values from start because of lookup size #### second train batch Windows in second training batch start from exactly where the windows in first ended.

[5]:
for inp,out in zip(train_x_batches[1], train_y_batches[1]):
    print(inp,out, '\n')
[[  16. 2016. 4016. 6016. 8016.]
 [  18. 2018. 4018. 6018. 8018.]
 [  20. 2020. 4020. 6020. 8020.]
 [  22. 2022. 4022. 6022. 8022.]
 [  24. 2024. 4024. 6024. 8024.]
 [  26. 2026. 4026. 6026. 8026.]
 [  28. 2028. 4028. 6028. 8028.]] [10030.]

[[  17. 2017. 4017. 6017. 8017.]
 [  19. 2019. 4019. 6019. 8019.]
 [  21. 2021. 4021. 6021. 8021.]
 [  23. 2023. 4023. 6023. 8023.]
 [  25. 2025. 4025. 6025. 8025.]
 [  27. 2027. 4027. 6027. 8027.]
 [  29. 2029. 4029. 6029. 8029.]] [10031.]

[[  18. 2018. 4018. 6018. 8018.]
 [  20. 2020. 4020. 6020. 8020.]
 [  22. 2022. 4022. 6022. 8022.]
 [  24. 2024. 4024. 6024. 8024.]
 [  26. 2026. 4026. 6026. 8026.]
 [  28. 2028. 4028. 6028. 8028.]
 [  30. 2030. 4030. 6030. 8030.]] [10032.]

[[  19. 2019. 4019. 6019. 8019.]
 [  21. 2021. 4021. 6021. 8021.]
 [  23. 2023. 4023. 6023. 8023.]
 [  25. 2025. 4025. 6025. 8025.]
 [  27. 2027. 4027. 6027. 8027.]
 [  29. 2029. 4029. 6029. 8029.]
 [  31. 2031. 4031. 6031. 8031.]] [10033.]

[[  20. 2020. 4020. 6020. 8020.]
 [  22. 2022. 4022. 6022. 8022.]
 [  24. 2024. 4024. 6024. 8024.]
 [  26. 2026. 4026. 6026. 8026.]
 [  28. 2028. 4028. 6028. 8028.]
 [  30. 2030. 4030. 6030. 8030.]
 [  32. 2032. 4032. 6032. 8032.]] [10034.]

[[  21. 2021. 4021. 6021. 8021.]
 [  23. 2023. 4023. 6023. 8023.]
 [  25. 2025. 4025. 6025. 8025.]
 [  27. 2027. 4027. 6027. 8027.]
 [  29. 2029. 4029. 6029. 8029.]
 [  31. 2031. 4031. 6031. 8031.]
 [  33. 2033. 4033. 6033. 8033.]] [10035.]

[[  22. 2022. 4022. 6022. 8022.]
 [  24. 2024. 4024. 6024. 8024.]
 [  26. 2026. 4026. 6026. 8026.]
 [  28. 2028. 4028. 6028. 8028.]
 [  30. 2030. 4030. 6030. 8030.]
 [  32. 2032. 4032. 6032. 8032.]
 [  34. 2034. 4034. 6034. 8034.]] [10036.]

[[  23. 2023. 4023. 6023. 8023.]
 [  25. 2025. 4025. 6025. 8025.]
 [  27. 2027. 4027. 6027. 8027.]
 [  29. 2029. 4029. 6029. 8029.]
 [  31. 2031. 4031. 6031. 8031.]
 [  33. 2033. 4033. 6033. 8033.]
 [  35. 2035. 4035. 6035. 8035.]] [10037.]

[[  24. 2024. 4024. 6024. 8024.]
 [  26. 2026. 4026. 6026. 8026.]
 [  28. 2028. 4028. 6028. 8028.]
 [  30. 2030. 4030. 6030. 8030.]
 [  32. 2032. 4032. 6032. 8032.]
 [  34. 2034. 4034. 6034. 8034.]
 [  36. 2036. 4036. 6036. 8036.]] [10038.]

[[  25. 2025. 4025. 6025. 8025.]
 [  27. 2027. 4027. 6027. 8027.]
 [  29. 2029. 4029. 6029. 8029.]
 [  31. 2031. 4031. 6031. 8031.]
 [  33. 2033. 4033. 6033. 8033.]
 [  35. 2035. 4035. 6035. 8035.]
 [  37. 2037. 4037. 6037. 8037.]] [10039.]

[[  26. 2026. 4026. 6026. 8026.]
 [  28. 2028. 4028. 6028. 8028.]
 [  30. 2030. 4030. 6030. 8030.]
 [  32. 2032. 4032. 6032. 8032.]
 [  34. 2034. 4034. 6034. 8034.]
 [  36. 2036. 4036. 6036. 8036.]
 [  38. 2038. 4038. 6038. 8038.]] [10040.]

[[  27. 2027. 4027. 6027. 8027.]
 [  29. 2029. 4029. 6029. 8029.]
 [  31. 2031. 4031. 6031. 8031.]
 [  33. 2033. 4033. 6033. 8033.]
 [  35. 2035. 4035. 6035. 8035.]
 [  37. 2037. 4037. 6037. 8037.]
 [  39. 2039. 4039. 6039. 8039.]] [10041.]

[[  28. 2028. 4028. 6028. 8028.]
 [  30. 2030. 4030. 6030. 8030.]
 [  32. 2032. 4032. 6032. 8032.]
 [  34. 2034. 4034. 6034. 8034.]
 [  36. 2036. 4036. 6036. 8036.]
 [  38. 2038. 4038. 6038. 8038.]
 [  40. 2040. 4040. 6040. 8040.]] [10042.]

[[  29. 2029. 4029. 6029. 8029.]
 [  31. 2031. 4031. 6031. 8031.]
 [  33. 2033. 4033. 6033. 8033.]
 [  35. 2035. 4035. 6035. 8035.]
 [  37. 2037. 4037. 6037. 8037.]
 [  39. 2039. 4039. 6039. 8039.]
 [  41. 2041. 4041. 6041. 8041.]] [10043.]

[[  30. 2030. 4030. 6030. 8030.]
 [  32. 2032. 4032. 6032. 8032.]
 [  34. 2034. 4034. 6034. 8034.]
 [  36. 2036. 4036. 6036. 8036.]
 [  38. 2038. 4038. 6038. 8038.]
 [  40. 2040. 4040. 6040. 8040.]
 [  42. 2042. 4042. 6042. 8042.]] [10044.]

[[  31. 2031. 4031. 6031. 8031.]
 [  33. 2033. 4033. 6033. 8033.]
 [  35. 2035. 4035. 6035. 8035.]
 [  37. 2037. 4037. 6037. 8037.]
 [  39. 2039. 4039. 6039. 8039.]
 [  41. 2041. 4041. 6041. 8041.]
 [  43. 2043. 4043. 6043. 8043.]] [10045.]

last train batch

Some values from end are also missing because we skipped the last batch which did not have the shape of [16, 7, 5].

[6]:
for inp,out in zip(train_x_batches[-1], train_y_batches[-1]):
    print(inp,out, '\n')
[[ 560. 2560. 4560. 6560. 8560.]
 [ 562. 2562. 4562. 6562. 8562.]
 [ 564. 2564. 4564. 6564. 8564.]
 [ 566. 2566. 4566. 6566. 8566.]
 [ 568. 2568. 4568. 6568. 8568.]
 [ 570. 2570. 4570. 6570. 8570.]
 [ 572. 2572. 4572. 6572. 8572.]] [10574.]

[[ 561. 2561. 4561. 6561. 8561.]
 [ 563. 2563. 4563. 6563. 8563.]
 [ 565. 2565. 4565. 6565. 8565.]
 [ 567. 2567. 4567. 6567. 8567.]
 [ 569. 2569. 4569. 6569. 8569.]
 [ 571. 2571. 4571. 6571. 8571.]
 [ 573. 2573. 4573. 6573. 8573.]] [10575.]

[[ 562. 2562. 4562. 6562. 8562.]
 [ 564. 2564. 4564. 6564. 8564.]
 [ 566. 2566. 4566. 6566. 8566.]
 [ 568. 2568. 4568. 6568. 8568.]
 [ 570. 2570. 4570. 6570. 8570.]
 [ 572. 2572. 4572. 6572. 8572.]
 [ 574. 2574. 4574. 6574. 8574.]] [10576.]

[[ 563. 2563. 4563. 6563. 8563.]
 [ 565. 2565. 4565. 6565. 8565.]
 [ 567. 2567. 4567. 6567. 8567.]
 [ 569. 2569. 4569. 6569. 8569.]
 [ 571. 2571. 4571. 6571. 8571.]
 [ 573. 2573. 4573. 6573. 8573.]
 [ 575. 2575. 4575. 6575. 8575.]] [10577.]

[[ 564. 2564. 4564. 6564. 8564.]
 [ 566. 2566. 4566. 6566. 8566.]
 [ 568. 2568. 4568. 6568. 8568.]
 [ 570. 2570. 4570. 6570. 8570.]
 [ 572. 2572. 4572. 6572. 8572.]
 [ 574. 2574. 4574. 6574. 8574.]
 [ 576. 2576. 4576. 6576. 8576.]] [10578.]

[[ 565. 2565. 4565. 6565. 8565.]
 [ 567. 2567. 4567. 6567. 8567.]
 [ 569. 2569. 4569. 6569. 8569.]
 [ 571. 2571. 4571. 6571. 8571.]
 [ 573. 2573. 4573. 6573. 8573.]
 [ 575. 2575. 4575. 6575. 8575.]
 [ 577. 2577. 4577. 6577. 8577.]] [10579.]

[[ 566. 2566. 4566. 6566. 8566.]
 [ 568. 2568. 4568. 6568. 8568.]
 [ 570. 2570. 4570. 6570. 8570.]
 [ 572. 2572. 4572. 6572. 8572.]
 [ 574. 2574. 4574. 6574. 8574.]
 [ 576. 2576. 4576. 6576. 8576.]
 [ 578. 2578. 4578. 6578. 8578.]] [10580.]

[[ 567. 2567. 4567. 6567. 8567.]
 [ 569. 2569. 4569. 6569. 8569.]
 [ 571. 2571. 4571. 6571. 8571.]
 [ 573. 2573. 4573. 6573. 8573.]
 [ 575. 2575. 4575. 6575. 8575.]
 [ 577. 2577. 4577. 6577. 8577.]
 [ 579. 2579. 4579. 6579. 8579.]] [10581.]

[[ 568. 2568. 4568. 6568. 8568.]
 [ 570. 2570. 4570. 6570. 8570.]
 [ 572. 2572. 4572. 6572. 8572.]
 [ 574. 2574. 4574. 6574. 8574.]
 [ 576. 2576. 4576. 6576. 8576.]
 [ 578. 2578. 4578. 6578. 8578.]
 [ 580. 2580. 4580. 6580. 8580.]] [10582.]

[[ 569. 2569. 4569. 6569. 8569.]
 [ 571. 2571. 4571. 6571. 8571.]
 [ 573. 2573. 4573. 6573. 8573.]
 [ 575. 2575. 4575. 6575. 8575.]
 [ 577. 2577. 4577. 6577. 8577.]
 [ 579. 2579. 4579. 6579. 8579.]
 [ 581. 2581. 4581. 6581. 8581.]] [10583.]

[[ 570. 2570. 4570. 6570. 8570.]
 [ 572. 2572. 4572. 6572. 8572.]
 [ 574. 2574. 4574. 6574. 8574.]
 [ 576. 2576. 4576. 6576. 8576.]
 [ 578. 2578. 4578. 6578. 8578.]
 [ 580. 2580. 4580. 6580. 8580.]
 [ 582. 2582. 4582. 6582. 8582.]] [10584.]

[[ 571. 2571. 4571. 6571. 8571.]
 [ 573. 2573. 4573. 6573. 8573.]
 [ 575. 2575. 4575. 6575. 8575.]
 [ 577. 2577. 4577. 6577. 8577.]
 [ 579. 2579. 4579. 6579. 8579.]
 [ 581. 2581. 4581. 6581. 8581.]
 [ 583. 2583. 4583. 6583. 8583.]] [10585.]

[[ 572. 2572. 4572. 6572. 8572.]
 [ 574. 2574. 4574. 6574. 8574.]
 [ 576. 2576. 4576. 6576. 8576.]
 [ 578. 2578. 4578. 6578. 8578.]
 [ 580. 2580. 4580. 6580. 8580.]
 [ 582. 2582. 4582. 6582. 8582.]
 [ 584. 2584. 4584. 6584. 8584.]] [10586.]

[[ 573. 2573. 4573. 6573. 8573.]
 [ 575. 2575. 4575. 6575. 8575.]
 [ 577. 2577. 4577. 6577. 8577.]
 [ 579. 2579. 4579. 6579. 8579.]
 [ 581. 2581. 4581. 6581. 8581.]
 [ 583. 2583. 4583. 6583. 8583.]
 [ 585. 2585. 4585. 6585. 8585.]] [10587.]

[[ 574. 2574. 4574. 6574. 8574.]
 [ 576. 2576. 4576. 6576. 8576.]
 [ 578. 2578. 4578. 6578. 8578.]
 [ 580. 2580. 4580. 6580. 8580.]
 [ 582. 2582. 4582. 6582. 8582.]
 [ 584. 2584. 4584. 6584. 8584.]
 [ 586. 2586. 4586. 6586. 8586.]] [10588.]

[[ 575. 2575. 4575. 6575. 8575.]
 [ 577. 2577. 4577. 6577. 8577.]
 [ 579. 2579. 4579. 6579. 8579.]
 [ 581. 2581. 4581. 6581. 8581.]
 [ 583. 2583. 4583. 6583. 8583.]
 [ 585. 2585. 4585. 6585. 8585.]
 [ 587. 2587. 4587. 6587. 8587.]] [10589.]

first test batch

The start of test data depends upon the value of min_ind and max_ind.

[7]:
for inp,out in zip(test_x_batches[0], test_y_batches[0]):
    print(inp,out, '\n')
[[ 600. 2600. 4600. 6600. 8600.]
 [ 602. 2602. 4602. 6602. 8602.]
 [ 604. 2604. 4604. 6604. 8604.]
 [ 606. 2606. 4606. 6606. 8606.]
 [ 608. 2608. 4608. 6608. 8608.]
 [ 610. 2610. 4610. 6610. 8610.]
 [ 612. 2612. 4612. 6612. 8612.]] [10614.]

[[ 601. 2601. 4601. 6601. 8601.]
 [ 603. 2603. 4603. 6603. 8603.]
 [ 605. 2605. 4605. 6605. 8605.]
 [ 607. 2607. 4607. 6607. 8607.]
 [ 609. 2609. 4609. 6609. 8609.]
 [ 611. 2611. 4611. 6611. 8611.]
 [ 613. 2613. 4613. 6613. 8613.]] [10615.]

[[ 602. 2602. 4602. 6602. 8602.]
 [ 604. 2604. 4604. 6604. 8604.]
 [ 606. 2606. 4606. 6606. 8606.]
 [ 608. 2608. 4608. 6608. 8608.]
 [ 610. 2610. 4610. 6610. 8610.]
 [ 612. 2612. 4612. 6612. 8612.]
 [ 614. 2614. 4614. 6614. 8614.]] [10616.]

[[ 603. 2603. 4603. 6603. 8603.]
 [ 605. 2605. 4605. 6605. 8605.]
 [ 607. 2607. 4607. 6607. 8607.]
 [ 609. 2609. 4609. 6609. 8609.]
 [ 611. 2611. 4611. 6611. 8611.]
 [ 613. 2613. 4613. 6613. 8613.]
 [ 615. 2615. 4615. 6615. 8615.]] [10617.]

[[ 604. 2604. 4604. 6604. 8604.]
 [ 606. 2606. 4606. 6606. 8606.]
 [ 608. 2608. 4608. 6608. 8608.]
 [ 610. 2610. 4610. 6610. 8610.]
 [ 612. 2612. 4612. 6612. 8612.]
 [ 614. 2614. 4614. 6614. 8614.]
 [ 616. 2616. 4616. 6616. 8616.]] [10618.]

[[ 605. 2605. 4605. 6605. 8605.]
 [ 607. 2607. 4607. 6607. 8607.]
 [ 609. 2609. 4609. 6609. 8609.]
 [ 611. 2611. 4611. 6611. 8611.]
 [ 613. 2613. 4613. 6613. 8613.]
 [ 615. 2615. 4615. 6615. 8615.]
 [ 617. 2617. 4617. 6617. 8617.]] [10619.]

[[ 606. 2606. 4606. 6606. 8606.]
 [ 608. 2608. 4608. 6608. 8608.]
 [ 610. 2610. 4610. 6610. 8610.]
 [ 612. 2612. 4612. 6612. 8612.]
 [ 614. 2614. 4614. 6614. 8614.]
 [ 616. 2616. 4616. 6616. 8616.]
 [ 618. 2618. 4618. 6618. 8618.]] [10620.]

[[ 607. 2607. 4607. 6607. 8607.]
 [ 609. 2609. 4609. 6609. 8609.]
 [ 611. 2611. 4611. 6611. 8611.]
 [ 613. 2613. 4613. 6613. 8613.]
 [ 615. 2615. 4615. 6615. 8615.]
 [ 617. 2617. 4617. 6617. 8617.]
 [ 619. 2619. 4619. 6619. 8619.]] [10621.]

[[ 608. 2608. 4608. 6608. 8608.]
 [ 610. 2610. 4610. 6610. 8610.]
 [ 612. 2612. 4612. 6612. 8612.]
 [ 614. 2614. 4614. 6614. 8614.]
 [ 616. 2616. 4616. 6616. 8616.]
 [ 618. 2618. 4618. 6618. 8618.]
 [ 620. 2620. 4620. 6620. 8620.]] [10622.]

[[ 609. 2609. 4609. 6609. 8609.]
 [ 611. 2611. 4611. 6611. 8611.]
 [ 613. 2613. 4613. 6613. 8613.]
 [ 615. 2615. 4615. 6615. 8615.]
 [ 617. 2617. 4617. 6617. 8617.]
 [ 619. 2619. 4619. 6619. 8619.]
 [ 621. 2621. 4621. 6621. 8621.]] [10623.]

[[ 610. 2610. 4610. 6610. 8610.]
 [ 612. 2612. 4612. 6612. 8612.]
 [ 614. 2614. 4614. 6614. 8614.]
 [ 616. 2616. 4616. 6616. 8616.]
 [ 618. 2618. 4618. 6618. 8618.]
 [ 620. 2620. 4620. 6620. 8620.]
 [ 622. 2622. 4622. 6622. 8622.]] [10624.]

[[ 611. 2611. 4611. 6611. 8611.]
 [ 613. 2613. 4613. 6613. 8613.]
 [ 615. 2615. 4615. 6615. 8615.]
 [ 617. 2617. 4617. 6617. 8617.]
 [ 619. 2619. 4619. 6619. 8619.]
 [ 621. 2621. 4621. 6621. 8621.]
 [ 623. 2623. 4623. 6623. 8623.]] [10625.]

[[ 612. 2612. 4612. 6612. 8612.]
 [ 614. 2614. 4614. 6614. 8614.]
 [ 616. 2616. 4616. 6616. 8616.]
 [ 618. 2618. 4618. 6618. 8618.]
 [ 620. 2620. 4620. 6620. 8620.]
 [ 622. 2622. 4622. 6622. 8622.]
 [ 624. 2624. 4624. 6624. 8624.]] [10626.]

[[ 613. 2613. 4613. 6613. 8613.]
 [ 615. 2615. 4615. 6615. 8615.]
 [ 617. 2617. 4617. 6617. 8617.]
 [ 619. 2619. 4619. 6619. 8619.]
 [ 621. 2621. 4621. 6621. 8621.]
 [ 623. 2623. 4623. 6623. 8623.]
 [ 625. 2625. 4625. 6625. 8625.]] [10627.]

[[ 614. 2614. 4614. 6614. 8614.]
 [ 616. 2616. 4616. 6616. 8616.]
 [ 618. 2618. 4618. 6618. 8618.]
 [ 620. 2620. 4620. 6620. 8620.]
 [ 622. 2622. 4622. 6622. 8622.]
 [ 624. 2624. 4624. 6624. 8624.]
 [ 626. 2626. 4626. 6626. 8626.]] [10628.]

[[ 615. 2615. 4615. 6615. 8615.]
 [ 617. 2617. 4617. 6617. 8617.]
 [ 619. 2619. 4619. 6619. 8619.]
 [ 621. 2621. 4621. 6621. 8621.]
 [ 623. 2623. 4623. 6623. 8623.]
 [ 625. 2625. 4625. 6625. 8625.]
 [ 627. 2627. 4627. 6627. 8627.]] [10629.]

second test batch

[8]:
for inp,out in zip(test_x_batches[1], test_y_batches[1]):
    print(inp,out, '\n')
[[ 616. 2616. 4616. 6616. 8616.]
 [ 618. 2618. 4618. 6618. 8618.]
 [ 620. 2620. 4620. 6620. 8620.]
 [ 622. 2622. 4622. 6622. 8622.]
 [ 624. 2624. 4624. 6624. 8624.]
 [ 626. 2626. 4626. 6626. 8626.]
 [ 628. 2628. 4628. 6628. 8628.]] [10630.]

[[ 617. 2617. 4617. 6617. 8617.]
 [ 619. 2619. 4619. 6619. 8619.]
 [ 621. 2621. 4621. 6621. 8621.]
 [ 623. 2623. 4623. 6623. 8623.]
 [ 625. 2625. 4625. 6625. 8625.]
 [ 627. 2627. 4627. 6627. 8627.]
 [ 629. 2629. 4629. 6629. 8629.]] [10631.]

[[ 618. 2618. 4618. 6618. 8618.]
 [ 620. 2620. 4620. 6620. 8620.]
 [ 622. 2622. 4622. 6622. 8622.]
 [ 624. 2624. 4624. 6624. 8624.]
 [ 626. 2626. 4626. 6626. 8626.]
 [ 628. 2628. 4628. 6628. 8628.]
 [ 630. 2630. 4630. 6630. 8630.]] [10632.]

[[ 619. 2619. 4619. 6619. 8619.]
 [ 621. 2621. 4621. 6621. 8621.]
 [ 623. 2623. 4623. 6623. 8623.]
 [ 625. 2625. 4625. 6625. 8625.]
 [ 627. 2627. 4627. 6627. 8627.]
 [ 629. 2629. 4629. 6629. 8629.]
 [ 631. 2631. 4631. 6631. 8631.]] [10633.]

[[ 620. 2620. 4620. 6620. 8620.]
 [ 622. 2622. 4622. 6622. 8622.]
 [ 624. 2624. 4624. 6624. 8624.]
 [ 626. 2626. 4626. 6626. 8626.]
 [ 628. 2628. 4628. 6628. 8628.]
 [ 630. 2630. 4630. 6630. 8630.]
 [ 632. 2632. 4632. 6632. 8632.]] [10634.]

[[ 621. 2621. 4621. 6621. 8621.]
 [ 623. 2623. 4623. 6623. 8623.]
 [ 625. 2625. 4625. 6625. 8625.]
 [ 627. 2627. 4627. 6627. 8627.]
 [ 629. 2629. 4629. 6629. 8629.]
 [ 631. 2631. 4631. 6631. 8631.]
 [ 633. 2633. 4633. 6633. 8633.]] [10635.]

[[ 622. 2622. 4622. 6622. 8622.]
 [ 624. 2624. 4624. 6624. 8624.]
 [ 626. 2626. 4626. 6626. 8626.]
 [ 628. 2628. 4628. 6628. 8628.]
 [ 630. 2630. 4630. 6630. 8630.]
 [ 632. 2632. 4632. 6632. 8632.]
 [ 634. 2634. 4634. 6634. 8634.]] [10636.]

[[ 623. 2623. 4623. 6623. 8623.]
 [ 625. 2625. 4625. 6625. 8625.]
 [ 627. 2627. 4627. 6627. 8627.]
 [ 629. 2629. 4629. 6629. 8629.]
 [ 631. 2631. 4631. 6631. 8631.]
 [ 633. 2633. 4633. 6633. 8633.]
 [ 635. 2635. 4635. 6635. 8635.]] [10637.]

[[ 624. 2624. 4624. 6624. 8624.]
 [ 626. 2626. 4626. 6626. 8626.]
 [ 628. 2628. 4628. 6628. 8628.]
 [ 630. 2630. 4630. 6630. 8630.]
 [ 632. 2632. 4632. 6632. 8632.]
 [ 634. 2634. 4634. 6634. 8634.]
 [ 636. 2636. 4636. 6636. 8636.]] [10638.]

[[ 625. 2625. 4625. 6625. 8625.]
 [ 627. 2627. 4627. 6627. 8627.]
 [ 629. 2629. 4629. 6629. 8629.]
 [ 631. 2631. 4631. 6631. 8631.]
 [ 633. 2633. 4633. 6633. 8633.]
 [ 635. 2635. 4635. 6635. 8635.]
 [ 637. 2637. 4637. 6637. 8637.]] [10639.]

[[ 626. 2626. 4626. 6626. 8626.]
 [ 628. 2628. 4628. 6628. 8628.]
 [ 630. 2630. 4630. 6630. 8630.]
 [ 632. 2632. 4632. 6632. 8632.]
 [ 634. 2634. 4634. 6634. 8634.]
 [ 636. 2636. 4636. 6636. 8636.]
 [ 638. 2638. 4638. 6638. 8638.]] [10640.]

[[ 627. 2627. 4627. 6627. 8627.]
 [ 629. 2629. 4629. 6629. 8629.]
 [ 631. 2631. 4631. 6631. 8631.]
 [ 633. 2633. 4633. 6633. 8633.]
 [ 635. 2635. 4635. 6635. 8635.]
 [ 637. 2637. 4637. 6637. 8637.]
 [ 639. 2639. 4639. 6639. 8639.]] [10641.]

[[ 628. 2628. 4628. 6628. 8628.]
 [ 630. 2630. 4630. 6630. 8630.]
 [ 632. 2632. 4632. 6632. 8632.]
 [ 634. 2634. 4634. 6634. 8634.]
 [ 636. 2636. 4636. 6636. 8636.]
 [ 638. 2638. 4638. 6638. 8638.]
 [ 640. 2640. 4640. 6640. 8640.]] [10642.]

[[ 629. 2629. 4629. 6629. 8629.]
 [ 631. 2631. 4631. 6631. 8631.]
 [ 633. 2633. 4633. 6633. 8633.]
 [ 635. 2635. 4635. 6635. 8635.]
 [ 637. 2637. 4637. 6637. 8637.]
 [ 639. 2639. 4639. 6639. 8639.]
 [ 641. 2641. 4641. 6641. 8641.]] [10643.]

[[ 630. 2630. 4630. 6630. 8630.]
 [ 632. 2632. 4632. 6632. 8632.]
 [ 634. 2634. 4634. 6634. 8634.]
 [ 636. 2636. 4636. 6636. 8636.]
 [ 638. 2638. 4638. 6638. 8638.]
 [ 640. 2640. 4640. 6640. 8640.]
 [ 642. 2642. 4642. 6642. 8642.]] [10644.]

[[ 631. 2631. 4631. 6631. 8631.]
 [ 633. 2633. 4633. 6633. 8633.]
 [ 635. 2635. 4635. 6635. 8635.]
 [ 637. 2637. 4637. 6637. 8637.]
 [ 639. 2639. 4639. 6639. 8639.]
 [ 641. 2641. 4641. 6641. 8641.]
 [ 643. 2643. 4643. 6643. 8643.]] [10645.]

second last test batch

[9]:
for inp,out in zip(test_x_batches[-2], test_y_batches[-2]):
    print(inp,out, '\n')
[[ 744. 2744. 4744. 6744. 8744.]
 [ 746. 2746. 4746. 6746. 8746.]
 [ 748. 2748. 4748. 6748. 8748.]
 [ 750. 2750. 4750. 6750. 8750.]
 [ 752. 2752. 4752. 6752. 8752.]
 [ 754. 2754. 4754. 6754. 8754.]
 [ 756. 2756. 4756. 6756. 8756.]] [10758.]

[[ 745. 2745. 4745. 6745. 8745.]
 [ 747. 2747. 4747. 6747. 8747.]
 [ 749. 2749. 4749. 6749. 8749.]
 [ 751. 2751. 4751. 6751. 8751.]
 [ 753. 2753. 4753. 6753. 8753.]
 [ 755. 2755. 4755. 6755. 8755.]
 [ 757. 2757. 4757. 6757. 8757.]] [10759.]

[[ 746. 2746. 4746. 6746. 8746.]
 [ 748. 2748. 4748. 6748. 8748.]
 [ 750. 2750. 4750. 6750. 8750.]
 [ 752. 2752. 4752. 6752. 8752.]
 [ 754. 2754. 4754. 6754. 8754.]
 [ 756. 2756. 4756. 6756. 8756.]
 [ 758. 2758. 4758. 6758. 8758.]] [10760.]

[[ 747. 2747. 4747. 6747. 8747.]
 [ 749. 2749. 4749. 6749. 8749.]
 [ 751. 2751. 4751. 6751. 8751.]
 [ 753. 2753. 4753. 6753. 8753.]
 [ 755. 2755. 4755. 6755. 8755.]
 [ 757. 2757. 4757. 6757. 8757.]
 [ 759. 2759. 4759. 6759. 8759.]] [10761.]

[[ 748. 2748. 4748. 6748. 8748.]
 [ 750. 2750. 4750. 6750. 8750.]
 [ 752. 2752. 4752. 6752. 8752.]
 [ 754. 2754. 4754. 6754. 8754.]
 [ 756. 2756. 4756. 6756. 8756.]
 [ 758. 2758. 4758. 6758. 8758.]
 [ 760. 2760. 4760. 6760. 8760.]] [10762.]

[[ 749. 2749. 4749. 6749. 8749.]
 [ 751. 2751. 4751. 6751. 8751.]
 [ 753. 2753. 4753. 6753. 8753.]
 [ 755. 2755. 4755. 6755. 8755.]
 [ 757. 2757. 4757. 6757. 8757.]
 [ 759. 2759. 4759. 6759. 8759.]
 [ 761. 2761. 4761. 6761. 8761.]] [10763.]

[[ 750. 2750. 4750. 6750. 8750.]
 [ 752. 2752. 4752. 6752. 8752.]
 [ 754. 2754. 4754. 6754. 8754.]
 [ 756. 2756. 4756. 6756. 8756.]
 [ 758. 2758. 4758. 6758. 8758.]
 [ 760. 2760. 4760. 6760. 8760.]
 [ 762. 2762. 4762. 6762. 8762.]] [10764.]

[[ 751. 2751. 4751. 6751. 8751.]
 [ 753. 2753. 4753. 6753. 8753.]
 [ 755. 2755. 4755. 6755. 8755.]
 [ 757. 2757. 4757. 6757. 8757.]
 [ 759. 2759. 4759. 6759. 8759.]
 [ 761. 2761. 4761. 6761. 8761.]
 [ 763. 2763. 4763. 6763. 8763.]] [10765.]

[[ 752. 2752. 4752. 6752. 8752.]
 [ 754. 2754. 4754. 6754. 8754.]
 [ 756. 2756. 4756. 6756. 8756.]
 [ 758. 2758. 4758. 6758. 8758.]
 [ 760. 2760. 4760. 6760. 8760.]
 [ 762. 2762. 4762. 6762. 8762.]
 [ 764. 2764. 4764. 6764. 8764.]] [10766.]

[[ 753. 2753. 4753. 6753. 8753.]
 [ 755. 2755. 4755. 6755. 8755.]
 [ 757. 2757. 4757. 6757. 8757.]
 [ 759. 2759. 4759. 6759. 8759.]
 [ 761. 2761. 4761. 6761. 8761.]
 [ 763. 2763. 4763. 6763. 8763.]
 [ 765. 2765. 4765. 6765. 8765.]] [10767.]

[[ 754. 2754. 4754. 6754. 8754.]
 [ 756. 2756. 4756. 6756. 8756.]
 [ 758. 2758. 4758. 6758. 8758.]
 [ 760. 2760. 4760. 6760. 8760.]
 [ 762. 2762. 4762. 6762. 8762.]
 [ 764. 2764. 4764. 6764. 8764.]
 [ 766. 2766. 4766. 6766. 8766.]] [10768.]

[[ 755. 2755. 4755. 6755. 8755.]
 [ 757. 2757. 4757. 6757. 8757.]
 [ 759. 2759. 4759. 6759. 8759.]
 [ 761. 2761. 4761. 6761. 8761.]
 [ 763. 2763. 4763. 6763. 8763.]
 [ 765. 2765. 4765. 6765. 8765.]
 [ 767. 2767. 4767. 6767. 8767.]] [10769.]

[[ 756. 2756. 4756. 6756. 8756.]
 [ 758. 2758. 4758. 6758. 8758.]
 [ 760. 2760. 4760. 6760. 8760.]
 [ 762. 2762. 4762. 6762. 8762.]
 [ 764. 2764. 4764. 6764. 8764.]
 [ 766. 2766. 4766. 6766. 8766.]
 [ 768. 2768. 4768. 6768. 8768.]] [10770.]

[[ 757. 2757. 4757. 6757. 8757.]
 [ 759. 2759. 4759. 6759. 8759.]
 [ 761. 2761. 4761. 6761. 8761.]
 [ 763. 2763. 4763. 6763. 8763.]
 [ 765. 2765. 4765. 6765. 8765.]
 [ 767. 2767. 4767. 6767. 8767.]
 [ 769. 2769. 4769. 6769. 8769.]] [10771.]

[[ 758. 2758. 4758. 6758. 8758.]
 [ 760. 2760. 4760. 6760. 8760.]
 [ 762. 2762. 4762. 6762. 8762.]
 [ 764. 2764. 4764. 6764. 8764.]
 [ 766. 2766. 4766. 6766. 8766.]
 [ 768. 2768. 4768. 6768. 8768.]
 [ 770. 2770. 4770. 6770. 8770.]] [10772.]

[[ 759. 2759. 4759. 6759. 8759.]
 [ 761. 2761. 4761. 6761. 8761.]
 [ 763. 2763. 4763. 6763. 8763.]
 [ 765. 2765. 4765. 6765. 8765.]
 [ 767. 2767. 4767. 6767. 8767.]
 [ 769. 2769. 4769. 6769. 8769.]
 [ 771. 2771. 4771. 6771. 8771.]] [10773.]

Last test batch

[10]:
for inp,out in zip(test_x_batches[-1], test_y_batches[-1]):
    print(inp,out, '\n')
[[ 760. 2760. 4760. 6760. 8760.]
 [ 762. 2762. 4762. 6762. 8762.]
 [ 764. 2764. 4764. 6764. 8764.]
 [ 766. 2766. 4766. 6766. 8766.]
 [ 768. 2768. 4768. 6768. 8768.]
 [ 770. 2770. 4770. 6770. 8770.]
 [ 772. 2772. 4772. 6772. 8772.]] [10774.]

[[ 761. 2761. 4761. 6761. 8761.]
 [ 763. 2763. 4763. 6763. 8763.]
 [ 765. 2765. 4765. 6765. 8765.]
 [ 767. 2767. 4767. 6767. 8767.]
 [ 769. 2769. 4769. 6769. 8769.]
 [ 771. 2771. 4771. 6771. 8771.]
 [ 773. 2773. 4773. 6773. 8773.]] [10775.]

[[ 762. 2762. 4762. 6762. 8762.]
 [ 764. 2764. 4764. 6764. 8764.]
 [ 766. 2766. 4766. 6766. 8766.]
 [ 768. 2768. 4768. 6768. 8768.]
 [ 770. 2770. 4770. 6770. 8770.]
 [ 772. 2772. 4772. 6772. 8772.]
 [ 774. 2774. 4774. 6774. 8774.]] [10776.]

[[ 763. 2763. 4763. 6763. 8763.]
 [ 765. 2765. 4765. 6765. 8765.]
 [ 767. 2767. 4767. 6767. 8767.]
 [ 769. 2769. 4769. 6769. 8769.]
 [ 771. 2771. 4771. 6771. 8771.]
 [ 773. 2773. 4773. 6773. 8773.]
 [ 775. 2775. 4775. 6775. 8775.]] [10777.]

[[ 764. 2764. 4764. 6764. 8764.]
 [ 766. 2766. 4766. 6766. 8766.]
 [ 768. 2768. 4768. 6768. 8768.]
 [ 770. 2770. 4770. 6770. 8770.]
 [ 772. 2772. 4772. 6772. 8772.]
 [ 774. 2774. 4774. 6774. 8774.]
 [ 776. 2776. 4776. 6776. 8776.]] [10778.]

[[ 765. 2765. 4765. 6765. 8765.]
 [ 767. 2767. 4767. 6767. 8767.]
 [ 769. 2769. 4769. 6769. 8769.]
 [ 771. 2771. 4771. 6771. 8771.]
 [ 773. 2773. 4773. 6773. 8773.]
 [ 775. 2775. 4775. 6775. 8775.]
 [ 777. 2777. 4777. 6777. 8777.]] [10779.]

[[ 766. 2766. 4766. 6766. 8766.]
 [ 768. 2768. 4768. 6768. 8768.]
 [ 770. 2770. 4770. 6770. 8770.]
 [ 772. 2772. 4772. 6772. 8772.]
 [ 774. 2774. 4774. 6774. 8774.]
 [ 776. 2776. 4776. 6776. 8776.]
 [ 778. 2778. 4778. 6778. 8778.]] [10780.]

[[ 767. 2767. 4767. 6767. 8767.]
 [ 769. 2769. 4769. 6769. 8769.]
 [ 771. 2771. 4771. 6771. 8771.]
 [ 773. 2773. 4773. 6773. 8773.]
 [ 775. 2775. 4775. 6775. 8775.]
 [ 777. 2777. 4777. 6777. 8777.]
 [ 779. 2779. 4779. 6779. 8779.]] [10781.]

[[ 768. 2768. 4768. 6768. 8768.]
 [ 770. 2770. 4770. 6770. 8770.]
 [ 772. 2772. 4772. 6772. 8772.]
 [ 774. 2774. 4774. 6774. 8774.]
 [ 776. 2776. 4776. 6776. 8776.]
 [ 778. 2778. 4778. 6778. 8778.]
 [ 780. 2780. 4780. 6780. 8780.]] [10782.]

[[ 769. 2769. 4769. 6769. 8769.]
 [ 771. 2771. 4771. 6771. 8771.]
 [ 773. 2773. 4773. 6773. 8773.]
 [ 775. 2775. 4775. 6775. 8775.]
 [ 777. 2777. 4777. 6777. 8777.]
 [ 779. 2779. 4779. 6779. 8779.]
 [ 781. 2781. 4781. 6781. 8781.]] [10783.]

[[ 770. 2770. 4770. 6770. 8770.]
 [ 772. 2772. 4772. 6772. 8772.]
 [ 774. 2774. 4774. 6774. 8774.]
 [ 776. 2776. 4776. 6776. 8776.]
 [ 778. 2778. 4778. 6778. 8778.]
 [ 780. 2780. 4780. 6780. 8780.]
 [ 782. 2782. 4782. 6782. 8782.]] [10784.]

[[ 771. 2771. 4771. 6771. 8771.]
 [ 773. 2773. 4773. 6773. 8773.]
 [ 775. 2775. 4775. 6775. 8775.]
 [ 777. 2777. 4777. 6777. 8777.]
 [ 779. 2779. 4779. 6779. 8779.]
 [ 781. 2781. 4781. 6781. 8781.]
 [ 783. 2783. 4783. 6783. 8783.]] [10785.]

[[ 772. 2772. 4772. 6772. 8772.]
 [ 774. 2774. 4774. 6774. 8774.]
 [ 776. 2776. 4776. 6776. 8776.]
 [ 778. 2778. 4778. 6778. 8778.]
 [ 780. 2780. 4780. 6780. 8780.]
 [ 782. 2782. 4782. 6782. 8782.]
 [ 784. 2784. 4784. 6784. 8784.]] [10786.]

[[ 773. 2773. 4773. 6773. 8773.]
 [ 775. 2775. 4775. 6775. 8775.]
 [ 777. 2777. 4777. 6777. 8777.]
 [ 779. 2779. 4779. 6779. 8779.]
 [ 781. 2781. 4781. 6781. 8781.]
 [ 783. 2783. 4783. 6783. 8783.]
 [ 785. 2785. 4785. 6785. 8785.]] [10787.]

[[ 774. 2774. 4774. 6774. 8774.]
 [ 776. 2776. 4776. 6776. 8776.]
 [ 778. 2778. 4778. 6778. 8778.]
 [ 780. 2780. 4780. 6780. 8780.]
 [ 782. 2782. 4782. 6782. 8782.]
 [ 784. 2784. 4784. 6784. 8784.]
 [ 786. 2786. 4786. 6786. 8786.]] [10788.]

[[ 775. 2775. 4775. 6775. 8775.]
 [ 777. 2777. 4777. 6777. 8777.]
 [ 779. 2779. 4779. 6779. 8779.]
 [ 781. 2781. 4781. 6781. 8781.]
 [ 783. 2783. 4783. 6783. 8783.]
 [ 785. 2785. 4785. 6785. 8785.]
 [ 787. 2787. 4787. 6787. 8787.]] [10789.]

Generator using yield

Instead of using return statement, we can use yield statement, which is more memory efficient because it does not ruturn the whole array i.e. train_x_batches at once rather it gives one batch at one time.

[11]:
from sklearn.preprocessing import MinMaxScaler

train_x_scaler = MinMaxScaler(feature_range=(0, 1))
train_y_scaler = MinMaxScaler(feature_range=(0, 1))
val_x_scaler = MinMaxScaler(feature_range=(0, 1))
val_y_scaler = MinMaxScaler(feature_range=(0, 1))
test_x_scaler = MinMaxScaler(feature_range=(0, 1))
test_y_scaler = MinMaxScaler(feature_range=(0, 1))

# Instead of function we construct a class which is initialized with almost same arguments as were used in previous function.
class batch_generator(object):
    """
    :param data: `ndarray`, input data.
    :param batch_size: `int`, batch size to be used
    :param args: a dictionary containing values of parameters depending upon method used.
    :param method: str, default is 'many_to_one', if many_to_one, then following keys are expected in
                   dictionary args.
            :lookback: `int`, sequence length, number of values LSTM will see at time `t` to make prediction at `t+1`.
            :in_features: `int`, number of columns in `data` starting from 0 to be considered as input
            :out_features: `int`, number of columns in `data` started from last to be considred as output/prediction.
            :trim_last_batch: bool, if True, last batch will be ignored if that contains samples less than `batch_size`.
            :norm: a dictionary which contains scaler object with which to normalize x and y data. We use separate scalers for x
                         and y data. Keys must be `x_scaler` and `y_scaler`.
            :batch_size:
            :step: step size in input data
            :min_ind: starting point from `data`
            :max_ind: end point from `data`
            :future_y_val: number of values to predict

    :param verbose: `boolean`
    """

    def __init__(self, data, batch_size, args, method='many_to_one', verbose=True):

        self.data = data
        self.batch_size = batch_size
        self.args = args
        self.method=method
        self.verbose=verbose
        self.ignoriert_am_anfang=None
        self.ignoriert_am_ende = None
        self.no_of_batches = None


    def many_to_one(self):

        many_to_one_args = {'lookback': 'required',
                            'in_features': 'required',
                            'out_features': 'required',
                            'min_ind': 'required',
                            'max_ind': 'required',
                            'future_y_val': 'required',
                            'step': 1,
                            'norm': None,
                            'trim_last_batch':True}

        for k,v in many_to_one_args.items():
            if v=='required':
                if k not in self.args:
                    raise ValueError('for {} method, value of {} is required'.format(method, k))
                else:
                    many_to_one_args[k] = self.args[k]
            else:
                if k in self.args:
                    many_to_one_args[k] = self.args[k]

        lookback = many_to_one_args['lookback']
        in_features = many_to_one_args['in_features']
        out_features = many_to_one_args['out_features']
        min_ind = many_to_one_args['min_ind']
        max_ind = many_to_one_args['max_ind']
        future_y_val = many_to_one_args['future_y_val']
        step = many_to_one_args['step']
        norm = many_to_one_args['norm']
        trim_last_batch = many_to_one_args['trim_last_batch']

        # selecting the data of interest for x and y
        X = self.data[min_ind:max_ind, 0:in_features]
        Y = self.data[min_ind:max_ind, -out_features:].reshape(-1,out_features)

        if norm is not None:
            x_scaler = norm['x_scaler']
            y_scaler = norm['y_scaler']
            X = x_scaler.fit_transform(X)
            Y = y_scaler.fit_transform(Y)

        # container for keeping x and y windows. A `windows` is here defined as one complete set of data at one timestep.
        x_wins = np.full((X.shape[0], lookback, in_features), np.nan, dtype=np.float32)
        y_wins = np.full((Y.shape[0], out_features), np.nan)

        # creating windows from X data
        st = lookback*step - step # starting point of sampling from data
        for j in range(st, X.shape[0]-lookback):
            en = j - lookback*step
            indices = np.arange(j, en, -step)
            ind = np.flip(indices)
            x_wins[j,:,:] = X[ind,:]

        # creating windows from Y data
        for i in range(0, Y.shape[0]-lookback):
            y_wins[i,:] = Y[i+lookback,:]



        """removing trailing nans"""
        first_nan_at_end = first_nan_from_end(y_wins[:,0])  # first nan in last part of data, start skipping from here
        y_wins = y_wins[0:first_nan_at_end,:]
        x_wins = x_wins[0:first_nan_at_end,:]

        """removing nans from start"""
        y_val = st-lookback + future_y_val
        if st>0:
            x_wins = x_wins[st:,:]
            y_wins = y_wins[y_val:,:]

        if self.verbose:
            print("""shape of x data: {} \nshape of y data: {}""".format(x_wins.shape, y_wins.shape))

            print(""".\n{} values are skipped from start and {} values are skipped from end in output array"""
              .format(st, X.shape[0]-first_nan_at_end))
        self.ignoriert_am_anfang = st
        self.ignoriert_am_ende = X.shape[0]-first_nan_at_end

        pot_samples = x_wins.shape[0]

        if self.verbose:
            print('\npotential samples are {}'.format(pot_samples))

        residue = pot_samples % self.batch_size
        if self.verbose:
            print('\nresidue is {} '.format(residue))
        self.residue = residue

        samples = pot_samples - residue
        if self.verbose:
            print('\nActual samples are {}'.format(samples))

        interval = np.arange(0, samples + self.batch_size, self.batch_size)
        if self.verbose:
            print('\nPotential intervals: {}'.format(interval ))

        interval = np.append(interval, pot_samples)
        if self.verbose:
            print('\nActual interval: {} '.format(interval))

        if trim_last_batch:
            no_of_batches = len(interval)-2
        else:
            no_of_batches = len(interval) - 1

        print('\nNumber of batches are {} '.format(no_of_batches))
        self.no_of_batches = no_of_batches

        # code for generator
        gen_i = 1
        while 1:

            for b in range(no_of_batches):
                st = interval[b]
                en = interval[b + 1]
                x_batch = x_wins[st:en, :, :]
                y_batch = y_wins[st:en]

                gen_i +=1

                yield x_batch, y_batch



_lookback=2  # sequence length
input_features = 5
output_features = 1
_batch_size = 16
input_stepsize = 2
st_ind = 0
end_ind = 600
t_plus_ith_val = 1 # which value to predict in future, e.g if input is 11,12,13,14 and default value of this variable means we
                  # want to predict 15, setting value equal to 3 means we want to predict 17.

train_args = {'lookback': _lookback,
            'in_features': input_features,
            'out_features': output_features,
            'min_ind': st_ind,
            'max_ind': end_ind,
            'future_y_val': t_plus_ith_val,
            'step': input_stepsize,
            'norm': {'x_scaler': train_x_scaler, 'y_scaler': train_y_scaler},
            'trim_last_batch':True}

train_generator = batch_generator(data, _batch_size, train_args)
train_gen = train_generator.many_to_one()

val_args = {'lookback': _lookback,
            'in_features': input_features,
            'out_features': output_features,
            'min_ind': 600,
            'max_ind': 800,
            'future_y_val': t_plus_ith_val,
            'step': input_stepsize,
            'norm': {'x_scaler': val_x_scaler, 'y_scaler': val_y_scaler},
            'trim_last_batch':True}

val_generator = batch_generator(data, _batch_size, val_args, verbose=False)
val_gen = val_generator.many_to_one()

test_args = {'lookback': _lookback,
            'in_features': input_features,
            'out_features': output_features,
            'min_ind': 800,
            'max_ind': 1000,
            'future_y_val': t_plus_ith_val,
            'step': input_stepsize,
            'norm': {'x_scaler': test_x_scaler, 'y_scaler': test_y_scaler},
            'trim_last_batch':True}

test_generator = batch_generator(data, _batch_size, test_args)
test_gen = test_generator.many_to_one()

Validation data generator

The values are normalized between 0 and 1. We can run the next cell multiple times and each time a different batch is printed. If we want to see exact values, we can turn-off normalization by setting norm value to None in above cell.

[12]:
x_batch, y_batch = next(val_gen)
for inp,out in zip(x_batch, y_batch):
    print(inp,out, '\n')

Number of batches are 12
[[0.         0.         0.         0.         0.        ]
 [0.01005025 0.01005025 0.01005025 0.01005025 0.01005025]] [0.01507538]

[[0.00502513 0.00502513 0.00502513 0.00502513 0.00502513]
 [0.01507538 0.01507538 0.01507538 0.01507538 0.01507538]] [0.0201005]

[[0.01005025 0.01005025 0.01005025 0.01005025 0.01005025]
 [0.0201005  0.0201005  0.0201005  0.0201005  0.0201005 ]] [0.02512563]

[[0.01507538 0.01507538 0.01507538 0.01507538 0.01507538]
 [0.02512563 0.02512563 0.02512563 0.02512563 0.02512563]] [0.03015075]

[[0.0201005  0.0201005  0.0201005  0.0201005  0.0201005 ]
 [0.03015075 0.03015075 0.03015075 0.03015075 0.03015075]] [0.03517588]

[[0.02512563 0.02512563 0.02512563 0.02512563 0.02512563]
 [0.03517588 0.03517588 0.03517588 0.03517588 0.03517588]] [0.04020101]

[[0.03015075 0.03015075 0.03015075 0.03015075 0.03015075]
 [0.040201   0.040201   0.040201   0.040201   0.040201  ]] [0.04522613]

[[0.03517588 0.03517588 0.03517588 0.03517588 0.03517588]
 [0.04522613 0.04522613 0.04522613 0.04522613 0.04522613]] [0.05025126]

[[0.040201   0.040201   0.040201   0.040201   0.040201  ]
 [0.05025126 0.05025126 0.05025126 0.05025126 0.05025126]] [0.05527638]

[[0.04522613 0.04522613 0.04522613 0.04522613 0.04522613]
 [0.05527638 0.05527638 0.05527638 0.05527638 0.05527638]] [0.06030151]

[[0.05025126 0.05025126 0.05025126 0.05025126 0.05025126]
 [0.06030151 0.06030151 0.06030151 0.06030151 0.06030151]] [0.06532663]

[[0.05527638 0.05527638 0.05527638 0.05527638 0.05527638]
 [0.06532663 0.06532663 0.06532663 0.06532663 0.06532663]] [0.07035176]

[[0.06030151 0.06030151 0.06030151 0.06030151 0.06030151]
 [0.07035176 0.07035176 0.07035176 0.07035176 0.07035176]] [0.07537688]

[[0.06532663 0.06532663 0.06532663 0.06532663 0.06532663]
 [0.07537688 0.07537688 0.07537688 0.07537688 0.07537688]] [0.08040201]

[[0.07035176 0.07035176 0.07035176 0.07035176 0.07035176]
 [0.08040201 0.08040201 0.08040201 0.08040201 0.08040201]] [0.08542714]

[[0.07537688 0.07537688 0.07537688 0.07537688 0.07537688]
 [0.08542714 0.08542714 0.08542714 0.08542714 0.08542714]] [0.09045226]

C:\Users\USER\Anaconda3\envs\tfgpu\lib\site-packages\sklearn\utils\validation.py:595: DataConversionWarning: Data with input dtype int32 was converted to float64 by MinMaxScaler.
  warnings.warn(msg, DataConversionWarning)
C:\Users\USER\Anaconda3\envs\tfgpu\lib\site-packages\sklearn\utils\validation.py:595: DataConversionWarning: Data with input dtype int32 was converted to float64 by MinMaxScaler.
  warnings.warn(msg, DataConversionWarning)

Test data generator

[13]:
x_batch, y_batch = next(test_gen)
for inp,out in zip(x_batch, y_batch):
    print(inp,out, '\n')
shape of x data: (196, 2, 5)
shape of y data: (197, 1)
.
2 values are skipped from start and 2 values are skipped from end in output array

potential samples are 196

residue is 4

Actual samples are 192

Potential intervals: [  0  16  32  48  64  80  96 112 128 144 160 176 192]

Actual interval: [  0  16  32  48  64  80  96 112 128 144 160 176 192 196]

Number of batches are 12
[[0.         0.         0.         0.         0.        ]
 [0.01005025 0.01005025 0.01005025 0.01005025 0.01005025]] [0.01507538]

[[0.00502513 0.00502513 0.00502513 0.00502513 0.00502513]
 [0.01507538 0.01507538 0.01507538 0.01507538 0.01507538]] [0.0201005]

[[0.01005025 0.01005025 0.01005025 0.01005025 0.01005025]
 [0.0201005  0.0201005  0.0201005  0.0201005  0.0201005 ]] [0.02512563]

[[0.01507538 0.01507538 0.01507538 0.01507538 0.01507538]
 [0.02512563 0.02512563 0.02512563 0.02512563 0.02512563]] [0.03015075]

[[0.0201005  0.0201005  0.0201005  0.0201005  0.0201005 ]
 [0.03015075 0.03015075 0.03015075 0.03015075 0.03015075]] [0.03517588]

[[0.02512563 0.02512563 0.02512563 0.02512563 0.02512563]
 [0.03517588 0.03517588 0.03517588 0.03517588 0.03517588]] [0.04020101]

[[0.03015075 0.03015075 0.03015075 0.03015075 0.03015075]
 [0.040201   0.040201   0.040201   0.040201   0.040201  ]] [0.04522613]

[[0.03517588 0.03517588 0.03517588 0.03517588 0.03517588]
 [0.04522613 0.04522613 0.04522613 0.04522613 0.04522613]] [0.05025126]

[[0.040201   0.040201   0.040201   0.040201   0.040201  ]
 [0.05025126 0.05025126 0.05025126 0.05025126 0.05025126]] [0.05527638]

[[0.04522613 0.04522613 0.04522613 0.04522613 0.04522613]
 [0.05527638 0.05527638 0.05527638 0.05527638 0.05527638]] [0.06030151]

[[0.05025126 0.05025126 0.05025126 0.05025126 0.05025126]
 [0.06030151 0.06030151 0.06030151 0.06030151 0.06030151]] [0.06532663]

[[0.05527638 0.05527638 0.05527638 0.05527638 0.05527638]
 [0.06532663 0.06532663 0.06532663 0.06532663 0.06532663]] [0.07035176]

[[0.06030151 0.06030151 0.06030151 0.06030151 0.06030151]
 [0.07035176 0.07035176 0.07035176 0.07035176 0.07035176]] [0.07537688]

[[0.06532663 0.06532663 0.06532663 0.06532663 0.06532663]
 [0.07537688 0.07537688 0.07537688 0.07537688 0.07537688]] [0.08040201]

[[0.07035176 0.07035176 0.07035176 0.07035176 0.07035176]
 [0.08040201 0.08040201 0.08040201 0.08040201 0.08040201]] [0.08542714]

[[0.07537688 0.07537688 0.07537688 0.07537688 0.07537688]
 [0.08542714 0.08542714 0.08542714 0.08542714 0.08542714]] [0.09045226]

C:\Users\USER\Anaconda3\envs\tfgpu\lib\site-packages\sklearn\utils\validation.py:595: DataConversionWarning: Data with input dtype int32 was converted to float64 by MinMaxScaler.
  warnings.warn(msg, DataConversionWarning)
C:\Users\USER\Anaconda3\envs\tfgpu\lib\site-packages\sklearn\utils\validation.py:595: DataConversionWarning: Data with input dtype int32 was converted to float64 by MinMaxScaler.
  warnings.warn(msg, DataConversionWarning)