<ruby id="bdb3f"></ruby>

    <p id="bdb3f"><cite id="bdb3f"></cite></p>

      <p id="bdb3f"><cite id="bdb3f"><th id="bdb3f"></th></cite></p><p id="bdb3f"></p>
        <p id="bdb3f"><cite id="bdb3f"></cite></p>

          <pre id="bdb3f"></pre>
          <pre id="bdb3f"><del id="bdb3f"><thead id="bdb3f"></thead></del></pre>

          <ruby id="bdb3f"><mark id="bdb3f"></mark></ruby><ruby id="bdb3f"></ruby>
          <pre id="bdb3f"><pre id="bdb3f"><mark id="bdb3f"></mark></pre></pre><output id="bdb3f"></output><p id="bdb3f"></p><p id="bdb3f"></p>

          <pre id="bdb3f"><del id="bdb3f"><progress id="bdb3f"></progress></del></pre>

                <ruby id="bdb3f"></ruby>

                企業??AI智能體構建引擎,智能編排和調試,一鍵部署,支持知識庫和私有化部署方案 廣告
                ``` #include <stdio.h> #include <stdlib.h> #include <math.h> #include<iostream> using namespace std; #define INPUT_NODES 4 //3 #define HIDDEN_NODES 5 //隱藏層 #define OUTPUT_NODES 3 //4 #define LEARNING_RATE 0.1 double inputLayer[INPUT_NODES]; double hiddenLayer[HIDDEN_NODES]; double outputLayer[OUTPUT_NODES]; double inputToHiddenWeights[INPUT_NODES][HIDDEN_NODES]; double hiddenToOutputWeights[HIDDEN_NODES][OUTPUT_NODES]; double hiddenBias[HIDDEN_NODES]; double outputBias[OUTPUT_NODES]; // Activation functions//激活函數 double sigmoid(float x) { return 1.0 / (1.0 + exp(-x)); } double sigmoid(double x) { return 1.0 / (1.0 + exp(-x)); } double dSigmoid(float x) { return x * (1.0 - x); } double dSigmoid(double x) { return x * (1.0 - x); } double dSigmoid_ld(double x) { return x * (1.0 - x); } void initializeWeights() { for (int i = 0; i < INPUT_NODES; i++) { for (int j = 0; j < HIDDEN_NODES; j++) { inputToHiddenWeights[i][j] = ((double)rand() / RAND_MAX) * 2 - 1; // between -1 and 1//RAND_MAX==32767 } } for (int i = 0; i < HIDDEN_NODES; i++) { for (int j = 0; j < OUTPUT_NODES; j++) { hiddenToOutputWeights[i][j] = ((double)rand() / RAND_MAX) * 2 - 1; // between -1 and 1 } } } void forwardPropagation() { // Input to hidden layer for (int j = 0; j < HIDDEN_NODES; j++) { hiddenLayer[j] = 0; for (int i = 0; i < INPUT_NODES; i++) { hiddenLayer[j] += inputLayer[i] * inputToHiddenWeights[i][j]; } hiddenLayer[j] += hiddenBias[j]; hiddenLayer[j] = sigmoid(hiddenLayer[j]); } // Hidden to output layer for (int k = 0; k < OUTPUT_NODES; k++) { outputLayer[k] = 0; for (int j = 0; j < HIDDEN_NODES; j++) { outputLayer[k] += hiddenLayer[j] * hiddenToOutputWeights[j][k]; } outputLayer[k] += outputBias[k]; outputLayer[k] = sigmoid(outputLayer[k]); } } void backwardPropagation(double target[OUTPUT_NODES]) { double outputErrors[OUTPUT_NODES]; double hiddenErrors[HIDDEN_NODES]; //計算損失Calculate output errors for (int k = 0; k < OUTPUT_NODES; k++) { outputErrors[k] = target[k] - outputLayer[k]; } //算隱藏層的損失Calculate hidden layer errors for (int j = 0; j < HIDDEN_NODES; j++) {//for220j hiddenErrors[j] = 0; for (int k = 0; k < OUTPUT_NODES; k++) {//for3300k hiddenErrors[j] += outputErrors[k] * hiddenToOutputWeights[j][k]; }//for3300k }//for220j //更新隱藏層的權重Update hidden to output weights for (int j = 0; j < HIDDEN_NODES; j++) {//for440j for (int k = 0; k < OUTPUT_NODES; k++) {//for5500k hiddenToOutputWeights[j][k] += LEARNING_RATE * outputErrors[k] * dSigmoid(outputLayer[k]) * hiddenLayer[j]; }//for5500k }//for440j //更新輸入層權重Update input to hidden weights for (int i = 0; i < INPUT_NODES; i++) {//for660i for (int j = 0; j < HIDDEN_NODES; j++) { inputToHiddenWeights[i][j] += LEARNING_RATE * hiddenErrors[j] * dSigmoid(hiddenLayer[j]) * inputLayer[i]; }//for7700j }//for660i }//void backwardPropagation(double target[OUTPUT_NODES]) int main() { cout << RAND_MAX << endl; initializeWeights(); // Example inputLayer[0] = 0.5; inputLayer[1] = 0.25; inputLayer[2] = -0.75; inputLayer[3] = 0; double target[OUTPUT_NODES] = { 0.1, 0.9, 0.2 }; //, 0.8}; //此為訓練數據! for (int epoch = 0; epoch < 10000; epoch++) { // Training for 10000 epochs forwardPropagation(); backwardPropagation(target); } //測試數據: inputLayer[0] = 0.55; inputLayer[1] = 0.24; inputLayer[2] = -0.9; inputLayer[3] = 0.021; forwardPropagation(); for (int k = 0; k < OUTPUT_NODES; k++) { printf("Output[%d]: %f\n", k, outputLayer[k]); } return 0; }//main ```
                  <ruby id="bdb3f"></ruby>

                  <p id="bdb3f"><cite id="bdb3f"></cite></p>

                    <p id="bdb3f"><cite id="bdb3f"><th id="bdb3f"></th></cite></p><p id="bdb3f"></p>
                      <p id="bdb3f"><cite id="bdb3f"></cite></p>

                        <pre id="bdb3f"></pre>
                        <pre id="bdb3f"><del id="bdb3f"><thead id="bdb3f"></thead></del></pre>

                        <ruby id="bdb3f"><mark id="bdb3f"></mark></ruby><ruby id="bdb3f"></ruby>
                        <pre id="bdb3f"><pre id="bdb3f"><mark id="bdb3f"></mark></pre></pre><output id="bdb3f"></output><p id="bdb3f"></p><p id="bdb3f"></p>

                        <pre id="bdb3f"><del id="bdb3f"><progress id="bdb3f"></progress></del></pre>

                              <ruby id="bdb3f"></ruby>

                              哎呀哎呀视频在线观看