Repository: MegaJoctan/MALE5 Branch: MQL5-ML Commit: d6541568aaba Files: 47 Total size: 473.0 KB Directory structure: gitextract_ly_vy9wi/ ├── .github/ │ └── FUNDING.yml ├── .gitignore ├── Examples/ │ ├── Classifier Model Example.mq5 │ └── Regressor Model Example.mq5 ├── LICENSE ├── MqPlotLib/ │ └── plots.mqh ├── Neural Networks/ │ ├── Pattern Nets.mqh │ ├── README.md │ ├── Regressor Nets.mqh │ ├── initializers.mqh │ ├── kohonen maps.mqh │ └── optimizers.mqh ├── Numpy/ │ └── Numpy.mqh ├── Pandas/ │ ├── Incremental LE.mqh │ └── pandas.mqh ├── README.md ├── Sklearn/ │ ├── Cluster/ │ │ ├── DBSCAN.mqh │ │ ├── Hierachical Clustering.mqh │ │ ├── KMeans.mqh │ │ └── base.mqh │ ├── Decomposition/ │ │ ├── LDA.mqh │ │ ├── NMF.mqh │ │ ├── PCA.mqh │ │ ├── README.md │ │ ├── TruncatedSVD.mqh │ │ └── base.mqh │ ├── Ensemble/ │ │ ├── AdaBoost.mqh │ │ ├── README.md │ │ └── Random Forest.mqh │ ├── Linear Models/ │ │ ├── Linear Regression.mqh │ │ ├── Logistic Regression.mqh │ │ ├── README.md │ │ └── Ridge.mqh │ ├── Naive Bayes/ │ │ ├── Naive Bayes.mqh │ │ ├── README.md │ │ └── naive bayes visuals.py │ ├── Neighbors/ │ │ └── KNN_nearest_neighbors.mqh │ ├── Tree/ │ │ ├── README.md │ │ └── tree.mqh │ ├── metrics.mqh │ └── preprocessing.mqh ├── Stats Models/ │ ├── ADF.mqh │ ├── ARIMA.mqh │ └── OLS.mqh ├── Tensors.mqh ├── Utils.mqh └── requirements.txt ================================================ FILE CONTENTS ================================================ ================================================ FILE: .github/FUNDING.yml ================================================ # These are supported funding model platforms ko_fi: omegajoctan custom: ['https://www.mql5.com/en/users/omegajoctan/seller'] custom: ['https://www.buymeacoffee.com/omegajoctan'] ================================================ FILE: .gitignore ================================================ *.ex5 *.psd *.zip *.rar *.xlsx Todo's.txt logisticwiki.txt /venv /Neural Nets Pro ================================================ FILE: Examples/Classifier Model Example.mq5 ================================================ //+------------------------------------------------------------------+ //| Classifier Model Sample.mq5 | //| Copyright 2023, Omega Joctan | //| https://www.mql5.com/en/users/omegajoctan | //+------------------------------------------------------------------+ #property copyright "Copyright 2023, Omega Joctan" #property link "https://www.mql5.com/en/users/omegajoctan" #property version "1.00" #include #include #include //helper functions for for data manipulations #include //fo measuring the performance StandardizationScaler scaler; //standardization scaler from preprocessing.mqh CDecisionTreeClassifier *decision_tree; //a decision tree classifier model MqlRates rates[]; //+------------------------------------------------------------------+ //| Expert initialization function | //+------------------------------------------------------------------+ int OnInit() { //--- Model selection decision_tree = new CDecisionTreeClassifier(2, 5); //a decision tree classifier from DecisionTree class //--- vector open, high, low, close; int data_size = 1000; //--- Getting the open, high, low and close values for the past 1000 bars, starting from the recent closed bar of 1 open.CopyRates(Symbol(), PERIOD_D1, COPY_RATES_OPEN, 1, data_size); high.CopyRates(Symbol(), PERIOD_D1, COPY_RATES_HIGH, 1, data_size); low.CopyRates(Symbol(), PERIOD_D1, COPY_RATES_LOW, 1, data_size); close.CopyRates(Symbol(), PERIOD_D1, COPY_RATES_CLOSE, 1, data_size); matrix X(data_size, 3); //creating the x matrix //--- Assigning the open, high, and low price values to the x matrix X.Col(open, 0); X.Col(high, 1); X.Col(low, 2); //--- Since we are using the x variables to predict y, we choose the close price to be the target variable vector y(data_size); for (int i=0; iopen[i]) //a bullish candle appeared y[i] = 1; //buy signal else { y[i] = 0; //sell signal } } //--- We split the data into training and testing samples for training and evaluation matrix X_train, X_test; vector y_train, y_test; double train_size = 0.7; //70% of the data should be used for training the rest for testing int random_state = 42; //we put a random state to shuffle the data so that a machine learning model understands the patterns and not the order of the dataset, this makes the model durable MatrixExtend::TrainTestSplitMatrices(X, y, X_train, y_train, X_test, y_test, train_size, random_state); // we split the x and y data into training and testing samples //--- Normalizing the independent variables X_train = scaler.fit_transform(X_train); // we fit the scaler on the training data and transform the data alltogether X_test = scaler.transform(X_test); // we transform the new data this way //--- Training the model decision_tree.fit(X_train, y_train); //The training function //--- Measuring predictive accuracy vector train_predictions = decision_tree.predict_bin(X_train); Print("Training results classification report"); Metrics::classification_report(y_train, train_predictions); //--- Evaluating the model on out-of-sample predictions vector test_predictions = decision_tree.predict_bin(X_test); Print("Testing results classification report"); Metrics::classification_report(y_test, test_predictions); //--- ArraySetAsSeries(rates, true); return(INIT_SUCCEEDED); } //+------------------------------------------------------------------+ //| Expert deinitialization function | //+------------------------------------------------------------------+ void OnDeinit(const int reason) { //--- delete (decision_tree); //We have to delete the AI model object from the memory } //+------------------------------------------------------------------+ //| Expert tick function | //+------------------------------------------------------------------+ void OnTick() { //--- Making predictions live from the market CopyRates(Symbol(), PERIOD_D1, 1, 3, rates); //Get the very recent information from the market vector x = {rates[0].open, rates[0].high, rates[0].low}; //Assigning data from the recent candle in a similar way to the training data x = scaler.transform(x); int signal = (int)decision_tree.predict_bin(x); Comment("Signal = ",signal==1?"BUY":"SELL"); //Ternary operator for checking if the signal is either buy or sell } //+------------------------------------------------------------------+ ================================================ FILE: Examples/Regressor Model Example.mq5 ================================================ //+------------------------------------------------------------------+ //| Regressor Model sample.mq5 | //| Copyright 2023, Omega Joctan | //| https://www.mql5.com/en/users/omegajoctan | //+------------------------------------------------------------------+ #property copyright "Copyright 2023, Omega Joctan" #property link "https://www.mql5.com/en/users/omegajoctan" #property version "1.00" #include #include #include //helper functions for for data manipulations #include //fo measuring the performance StandardizationScaler scaler; //standardization scaler from preprocessing.mqh CDecisionTreeRegressor *decision_tree; //a decision tree classifier model MqlRates rates[]; //+------------------------------------------------------------------+ //| Expert initialization function | //+------------------------------------------------------------------+ int OnInit() { //--- Model selection decision_tree = new CDecisionTreeRegressor(2, 5); //a decision tree classifier from DecisionTree class vector open, high, low, close; int data_size = 1000; //bars //--- Getting the open, high, low and close values for the past 1000 bars, starting from the recent closed bar of 1 open.CopyRates(Symbol(), PERIOD_D1, COPY_RATES_OPEN, 1, data_size); high.CopyRates(Symbol(), PERIOD_D1, COPY_RATES_HIGH, 1, data_size); low.CopyRates(Symbol(), PERIOD_D1, COPY_RATES_LOW, 1, data_size); close.CopyRates(Symbol(), PERIOD_D1, COPY_RATES_CLOSE, 1, data_size); matrix X(data_size, 3); //creating the x matrix //--- Assigning the open, high, and low price values to the x matrix X.Col(open, 0); X.Col(high, 1); X.Col(low, 2); vector y = close; // The target variable is the close price, using open, high and low values were want to predict the next closing price //--- We split the data into training and testing samples for training and evaluation matrix X_train, X_test; vector y_train, y_test; double train_size = 0.7; //70% of the data to be used for training the rest 30% for testing int random_state = 42; //we put a random state to shuffle the data so that a machine learning model understands the patterns and not the order of the dataset, this makes the model durable MatrixExtend::TrainTestSplitMatrices(X, y, X_train, y_train, X_test, y_test, train_size, random_state); // we split the x and y data into training and testing samples //--- Normalizing the independent variables X_train = scaler.fit_transform(X_train); // we fit the scaler on the training data and transform the data alltogether X_test = scaler.transform(X_test); // we transform the new data this way //--- Training the model decision_tree.fit(X_train, y_train); //The training function //--- Measuring predictive accuracy vector train_predictions = decision_tree.predict(X_train); printf("Decision decision_tree training r2_score = %.3f ",Metrics::RegressionMetric(y_train, train_predictions, METRIC_R_SQUARED)); //--- Evaluating the model on out-of-sample predictions vector test_predictions = decision_tree.predict(X_test); printf("Decision decision_tree out-of-sample r2_score = %.3f ",Metrics::r_squared(y_test, test_predictions)); return(INIT_SUCCEEDED); } //+------------------------------------------------------------------+ //| Expert deinitialization function | //+------------------------------------------------------------------+ void OnDeinit(const int reason) { //--- delete (decision_tree); //We have to delete the AI model object from the memory } //+------------------------------------------------------------------+ //| Expert tick function | //+------------------------------------------------------------------+ void OnTick() { //--- Making predictions live from the market CopyRates(Symbol(), PERIOD_D1, 1, 3, rates); //Get the very recent information from the market vector x = {rates[0].open, rates[0].high, rates[0].low}; //Assigning data from the recent candle in a similar way to the training data x = scaler.transform(x); double predicted_close_price = decision_tree.predict(x); Comment("Next closing price predicted is = ",predicted_close_price); } //+------------------------------------------------------------------+ ================================================ FILE: LICENSE ================================================ MIT License Copyright (c) 2023 Omega Joctan Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ================================================ FILE: MqPlotLib/plots.mqh ================================================ //+------------------------------------------------------------------+ //| plots.mqh | //| Copyright 2022, Fxalgebra.com | //| https://www.mql5.com/en/users/omegajoctan | //+------------------------------------------------------------------+ #property copyright "Copyright 2022, Fxalgebra.com" #property link "https://www.mql5.com/en/users/omegajoctan" //+------------------------------------------------------------------+ //| defines | //+------------------------------------------------------------------+ #include #include class CPlots { protected: CGraphic *graph; long m_chart_id; int m_subwin; int m_x1, m_x2; int m_y1, m_y2; string m_font_family; bool m_chart_show; string m_plot_names[]; ENUM_CURVE_TYPE m_curve_type; bool GraphCreate(string plot_name); vector m_x, m_y; string x_label, y_label; public: CPlots(long chart_id=0, int sub_win=0 ,int x1=30, int y1=40, int x2=550, int y2=310, string font_family="Consolas", bool chart_show=true); ~CPlots(void); bool Plot(string plot_name, vector& x, vector& y, string x_axis_label, string y_axis_label, string label, ENUM_CURVE_TYPE curve_type=CURVE_POINTS_AND_LINES,color clr = clrDodgerBlue, bool points_fill = true); bool AddPlot(vector &v,string label="plt",color clr=clrOrange); }; //+------------------------------------------------------------------+ //| | //+------------------------------------------------------------------+ CPlots::CPlots(long chart_id=0, int sub_win=0 ,int x1=30, int y1=40, int x2=550, int y2=310, string font_family="Consolas", bool chart_show=true): m_chart_id(chart_id), m_subwin(sub_win), m_x1(x1), m_y1(y1), m_x2(x2), m_y2(y2), m_font_family(font_family), m_chart_show(chart_show) { graph = new CGraphic(); ChartRedraw(m_chart_id); } //+------------------------------------------------------------------+ //| | //+------------------------------------------------------------------+ CPlots::~CPlots(void) { for (int i=0; i #include #ifndef RANDOM_STATE #define RANDOM_STATE 42 #endif //+------------------------------------------------------------------+ //| | //+------------------------------------------------------------------+ enum activation { AF_HARD_SIGMOID_ = AF_HARD_SIGMOID, AF_SIGMOID_ = AF_SIGMOID, AF_SWISH_ = AF_SWISH, AF_SOFTSIGN_ = AF_SOFTSIGN, AF_TANH_ = AF_TANH }; //+------------------------------------------------------------------+ //| | //+------------------------------------------------------------------+ class CPatternNets { private: vector W_CONFIG; vector W; //Weights vector vector B; //Bias vector activation A_FX; protected: ulong inputs; ulong outputs; ulong rows; vector HL_CONFIG; bool SoftMaxLayer; vector classes; void SoftMaxLayerFX(matrix &mat); public: CPatternNets(matrix &xmatrix, vector &yvector,vector &HL_NODES, activation ActivationFx, bool SoftMaxLyr=false); ~CPatternNets(void); int PatternNetFF(vector &in_vector); vector PatternNetFF(matrix &xmatrix); }; //+------------------------------------------------------------------+ //| | //+------------------------------------------------------------------+ CPatternNets::CPatternNets(matrix &xmatrix, vector &yvector,vector &HL_NODES, activation ActivationFx, bool SoftMaxLyr=false) { A_FX = ActivationFx; inputs = xmatrix.Cols(); rows = xmatrix.Rows(); SoftMaxLayer = SoftMaxLyr; //--- Normalize data if (rows != yvector.Size()) { Print(__FUNCTION__," FATAL | Number of rows in the x matrix is not the same the y vector size "); return; } classes = MatrixExtend::Unique(yvector); outputs = classes.Size(); HL_CONFIG.Copy(HL_NODES); HL_CONFIG.Resize(HL_CONFIG.Size()+1); //Add the output layer HL_CONFIG[HL_CONFIG.Size()-1] = (int)outputs; //Append one node to the output layer //--- W_CONFIG.Resize(HL_CONFIG.Size()); B.Resize((ulong)HL_CONFIG.Sum()); //--- GENERATE WEIGHTS ulong layer_input = inputs; for (ulong i=0; i\n", "HIDDEN LAYERS + OUTPUT ",HL_CONFIG,"\n", "INPUTS ",inputs," | OUTPUTS ",outputs," W CONFIG ",W_CONFIG,"\n", "activation ",EnumToString(A_FX)," SoftMaxLayer = ",bool(SoftMaxLayer) ); Print("WEIGHTS ",W,"\nBIAS ",B); #endif } //+------------------------------------------------------------------+ //| | //+------------------------------------------------------------------+ CPatternNets::~CPatternNets(void) { ZeroMemory(W); ZeroMemory(B); } //+------------------------------------------------------------------+ //| | //+------------------------------------------------------------------+ int CPatternNets::PatternNetFF(vector &in_vector) { matrix L_INPUT = {}, L_OUTPUT={}, L_WEIGHTS = {}; vector v_weights ={}; ulong w_start = 0; L_INPUT = MatrixExtend::VectorToMatrix(in_vector); vector L_BIAS_VECTOR = {}; matrix L_BIAS_MATRIX = {}; ulong b_start = 0; for (ulong i=0; i ",i); Print("L_WEIGHTS\n",L_WEIGHTS,"\nL_INPUT\n",L_INPUT,"\nL_BIAS\n",L_BIAS_MATRIX); #endif L_OUTPUT = L_WEIGHTS.MatMul(L_INPUT); L_OUTPUT = L_OUTPUT+L_BIAS_MATRIX; //Add bias //--- if (i==W_CONFIG.Size()-1) //Last layer { if (SoftMaxLayer) { Print("Before softmax\n",L_OUTPUT); SoftMaxLayerFX(L_OUTPUT); Print("After\n",L_OUTPUT); } else L_OUTPUT.Activation(L_OUTPUT, ENUM_ACTIVATION_FUNCTION(A_FX)); } else L_OUTPUT.Activation(L_OUTPUT, ENUM_ACTIVATION_FUNCTION(A_FX)); //--- L_INPUT.Copy(L_OUTPUT); //Assign outputs to the inputs w_start += (ulong)W_CONFIG[i]; //New weights copy b_start += (ulong)HL_CONFIG[i]; } #ifdef DEBUG_MODE Print("--> outputs\n ",L_OUTPUT); #endif vector v_out = MatrixExtend::MatrixToVector(L_OUTPUT); return((int)classes[v_out.ArgMax()]); } //+------------------------------------------------------------------+ //| | //+------------------------------------------------------------------+ void CPatternNets::SoftMaxLayerFX(matrix &mat) { vector ret = MatrixExtend::MatrixToVector(mat); ret.Activation(ret, AF_SOFTMAX); mat = MatrixExtend::VectorToMatrix(ret, mat.Cols()); } //+------------------------------------------------------------------+ //| | //+------------------------------------------------------------------+ vector CPatternNets::PatternNetFF(matrix &xmatrix) { vector v(xmatrix.Rows()); for (ulong i=0; i