Welcome


As a Data Scientist, I strive to extract and deliver meaningful, solution focused and key data-driven insights that enhance business growth.

I do utilize Statistical and Machine Learning models both in centralized and distributed (on-prem and Cloud) computing environments.


My favourite tools, currently:


##    Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
##    2.00   26.00   36.00   42.98   56.00  120.00
library(keras)
library(tensorflow)

Define the model

model <- keras_model_sequential() 
model%>%
  layer_lstm(neurons, batch_input_shape = c(batch_size, X_shape2, X_shape3), stateful= TRUE)%>%
  layer_dense(units = 1)

Compile the model

model %>% compile(
  loss = 'mean_squared_error',
  optimizer = optimizer_adam( lr= 0.0001 , decay = 1e-6 ),  #  optimizer_sgd(lr = 0.02),
  metrics = c('accuracy')
)

Model Summary:

summary(model)
## ___________________________________________________________________________
## Layer (type)                     Output Shape                  Param #     
## ===========================================================================
## lstm_6 (LSTM)                    (1, 4)                        96          
## ___________________________________________________________________________
## dense_6 (Dense)                  (1, 1)                        5           
## ===========================================================================
## Total params: 101
## Trainable params: 101
## Non-trainable params: 0
## ___________________________________________________________________________
import numpy as np
#import pandas as pd
#from sklearn.cross_validation import train_test_split, StratifiedKFold 
#from sklearn.ensemble  import GradientBoostingClassifier as GBC, RandomForestClassifier
#from sklearn.model_selection import GridSearchCV