邏輯迴歸演算法-CSDN
阿新 • • 發佈:2019-02-14
Logistic Regression演算法分析及Java程式碼
只是從以下幾個方面學習,現在還只是學習階段,轉載分析別人的文章
- Cost 函式原理,及似然函式相關推導
- Sigmoid 函式的原理(為什麼用Sigmoid來做這個函式)
- 最速下降法原理 (原理分析及其他演算法)
遍歷一個矩陣:
public static void matrixTraverse(Matrix matrix) { //遍歷這個矩陣; for (int i = 0; i < matrix.getRowDimension(); i++) { for (int j = 0; j < matrix.getColumnDimension(); j++) { System.out.print(matrix.get(i, j) + "\t"); } System.out.print("\n"); } }
Java讀取檔案中的資料:
public static Matrix getData(String pathname) { // String pathname="E:\\Study\\Python_R\\Python_Books\\Machine Learning in Action\\machinelearninginaction\\Ch05\\testSet.txt"; // String pathname2="E:\\Documents\\data.txt"; //把檔案讀進來 String line = " "; List<double[]> list = new ArrayList(); // Matrix matrix=new Matrix(); try { InputStream in = new FileInputStream(new File(pathname)); InputStreamReader inreader = new InputStreamReader(in); BufferedReader br = new BufferedReader(inreader); while ((line = br.readLine()) != null) { String[] tmp = line.split("\t"); // System.out.println(tmp[-1]); double[] value = new double[3]; value[0] = 1.0; value[1] = Double.parseDouble(tmp[0]); value[2] = Double.parseDouble(tmp[1]); list.add(value); //把資料放在list中儲存; } // System.out.println("list_size:" + list.size()); Iterator<double[]> it = list.iterator(); /* while (it.hasNext()) { double[] it_next=it.next(); for(double i:it_next){ System.out.print(i+"\t"); } System.out.print("\n"); }*/ //放在二維陣列中 double[][] data = new double[list.size()][3]; for (int i = 0; it.hasNext(); i++) { double[] tmp = it.next(); data[i] = tmp; } //遍歷這個二維陣列; /* for(int i=0;i<data.length;i++){ double[] arr=data[i]; for(int j=0;j<arr.length;j++){ System.out.print(data[i][j]+"\t"); } System.out.print("\n"); }*/ Matrix matrix = new Matrix(data); return matrix; } catch (FileNotFoundException e) { e.printStackTrace(); return null; } catch (IOException e) { e.printStackTrace(); return null; } }
讀取資料檔案中的Label:
public static Matrix getLabel(String pathname) { { // String pathname="E:\\Study\\Python_R\\Python_Books\\Machine Learning in Action\\machinelearninginaction\\Ch05\\testSet.txt"; // Matrix matrix=getData(pathname); // matrixTraverse(matrix); String line = " "; List<double[]> list = new ArrayList<double[]>(); try { FileInputStream fi = new FileInputStream(new File(pathname)); InputStreamReader in = new InputStreamReader(fi); BufferedReader br = new BufferedReader(in); // System.out.println(br.readLine().getClass()); //把資料放進list while ((line = br.readLine()) != null) {//這具br.readLine原來是這樣的節奏; String[] tmp = line.split("\t"); double[] value = new double[1]; value[0] = Double.parseDouble(tmp[2]); list.add(value); // line = br.readLine(); //相當於i++,多了;這個錯誤犯的; } // System.out.println("list_size:\t" + list.size()); //把資料放進double[][] Iterator<double[]> it = list.iterator(); double[][] label = new double[list.size()][1]; for (int i = 0; it.hasNext(); i++) { double[] tmp = it.next(); label[i] = tmp; } Matrix labelMatrix = new Matrix(label); return labelMatrix; } catch (IOException e) { e.printStackTrace(); return null; } } }
演算法主要部分,梯度下降法:
public static Matrix gradient() {
String pathname = "E:\\Study\\Python_R\\Python_Books\\Machine Learning in Action\\machinelearninginaction\\Ch05\\testSet.txt";
Matrix matrixLabel = getLabel(pathname);
Matrix matrixData = getData(pathname);
// matrixTraverse(matrixData);
// matrixTraverse(matrixLabel);
//寫最速下降法來嘗試下;
//初始的weight
double[][] weight = new double[3][1];
weight[0][0] = 1;
weight[1][0] = 1;
weight[2][0] = 1;
Matrix weightMat = new Matrix(weight);
Matrix mm;
Matrix h;
Matrix e;
double alpha = 0.001;
int maxCycles = 500;
// System.out.println("matrixData.times(weightMat):");
// matrixTraverse(matrixData.times(weightMat).times(-1));
for (int i = 1; i < maxCycles; i++) {
mm = matrixData.times(weightMat);
h = sigmoid(mm);
e = matrixLabel.minus(h);
weightMat = weightMat.plus(matrixData.transpose().times(e).times(alpha));//其實這個我沒有太看懂;
/* System.out.println("----------------"+i+"--------------------");
for(int j=0; j<weightMat.getRowDimension();j++){
System.out.println(weightMat.get(j,0));
}*/
}
return weightMat;
}
部落格今天先寫到這裡~something worng happens…