评价函数,这里还是采用RMSE来评价
1#ifndefEVALUATE_H 2#defineEVALUATE_H 3#include<cmath> 4#include<vector> 5usingnamespacestd; 6doubleComputeRMSE(vector<vector<double>>predict,vector<vector<double>>test) 7{ 8intCounter=0; 9doublesum=0; 10for(vector<vector<double>>::size_typei=0;i<test.size();++i) 11{ 12for(vector<double>::size_typej=0;j<test[0].size();++j) 13{ 14if(predict[i][j]&&test[i][j]) 15{ 16++Counter; 17sum+=pow((test[i][j]-predict[i][j]),2); 18} 19} 20} 21returnsqrt(sum/Counter); 22} 23 24#endif |
最后是主程序
1#include"Evaluate.h" 2#include"ReadAndWriteData.h" 3 4#include<cmath> 5#include<algorithm> 6#include<vector> 7#include<iostream> 8 9usingnamespacestd; 10 11 12doubleInnerProduct(vector<double>A,vector<double>B)//计算两个向量的内积 13{ 14doubleres=0; 15for(vector<double>::size_typei=0;i<A.size();++i) 16{ 17res+=A[i]*B[i]; 18} 19returnres; 20} 21 22template<typenameT>//对矩阵(二维数组)进行转置操作 23vector<vector<T>>Transpose(vector<vector<T>>Matrix) 24{ 25unsignedrow=Matrix.size(); 26unsignedcol=Matrix[0].size(); 27vector<vector<T>>Trans(col,vector<T>(row,0)); 28for(unsignedi=0;i<col;++i) 29{ 30for(unsignedj=0;j<row;++j) 31{ 32Trans[i][j]=Matrix[j][i]; 33} 34} 35returnTrans; 36} 37 38vector<vector<double>>BiasedMF(vector<vector<double>>train,doublelr,doublepenalty, 39intmaxItr) 40{ 41unsignedrow=train.size(); 42unsignedcol=train[0].size(); 43//计算全局平均分 44doubleavg=0; 45intCounter=0; 46for(unsignedi=0;i<row;++i) 47{ 48for(unsignedj=0;j<col;++j) 49{ 50if(train[i][j]) 51{ 52avg+=train[i][j]; 53++Counter; 54} 55} 56} 57avg/=Counter; 58//初始化Items偏置 59vector<double>ItemsBias(col,0); 60vector<vector<double>>Transtrain=Transpose(train); 61for(unsignedi=0;i<col;++i) 62{ 63intCounter=0; 64doublesum=0; 65for(unsignedj=0;j<row;++j) 66{ 67if(Transtrain[i][j]) 68{ 69sum+=Transtrain[i][j]-avg; 70++Counter; 71} 72 73} 74ItemsBias[i]=sum/(25+Counter); 75} 76 77//初始化Users偏置 78vector<double>UsersBias(row,0); 79for(unsignedi=0;i<row;++i) 80{ 81intCounter=0; 82doublesum=0; 83for(unsignedj=0;j<col;++j) 84{ 85if(train[i][j]) 86{ 87sum+=train[i][j]-avg-ItemsBias[j]; 88++Counter; 89} 90} 91UsersBias[i]=sum/(10+Counter); 92} 93 94//初始化Users和Items对应的矩阵 95unsignedk=10; 96vector<vector<double>>predict(row,vector<double>(col,0)); 97vector<vector<double>>Users(row,vector<double>(k,0)); 98vector<vector<double>>Items(col,vector<double>(k,0)); 99 100 101//梯度下降迭代 102doublermse=100; 103intit=0; 104while(it<maxItr) 105{ 106for(unsignedi=0;i<row;++i) 107{ 108for(unsignedj=0;j<col;++j) 109{ 110predict[i][j]=InnerProduct(Users[i],Items[j])+UsersBias[i] 111+ItemsBias[j]; 112} 113} 114doublenew_rmse=ComputeRMSE(predict,train); 115if(new_rmse<rmse) 116rmse=new_rmse; 117cout<<"第"<<it<<"次迭代:"<<endl; 118cout<<"rmseis:"<<rmse<<endl; 119for(unsignedi=0;i<row;++i) 120{ 121for(unsignedj=0;j<col;++j) 122{ 123if(train[i][j]) 124{ 125doubleerr=train[i][j]-predict[i][j]; 126//更新Useri和Itemj的因子向量 127for(unsignedt=0;t<k;++t) 128{ 129doubletmp=Users[i][t]; 130Users[i][t]+=lr*(err*Items[j][t]-penalty*Users[i][t]); 131Items[j][t]+=lr*(err*tmp-penalty*Items[j][t]); 132} 133//更新Useri和Itemj的偏差 134doubletmp=UsersBias[i]+ItemsBias[j]-avg; 135UsersBias[i]+=lr*(err-penalty*tmp); 136ItemsBias[j]+=lr*(err-penalty*tmp); 137} 138} 139} 140++it; 141} 142returnpredict; 143} 144 145intmain() 146{ 147 148stringFilePath1("E:\\Matlabcode\\recommendationsystem\\data\\movielens\\train.txt"); 149stringFilePath2("E:\\Matlabcode\\recommendationsystem\\data\\movielens\\test.txt"); 150 151introw=943; 152intcol=1682; 153vector<vector<double>>train=txtRead<double>(FilePath1,row,col); 154vector<vector<double>>predict=BiasedMF(train,0.001,0.003,100); 155txtWrite(predict,"predict.txt"); 156vector<vector<double>>test=txtRead<double>(FilePath2,462,1591); 157doublermse=ComputeRMSE(predict,test); 158cout<<"ProbeRMSEis"<<rmse<<endl; 159return0; 160} |
4.运行
下面是运行过程中的截图,可以看出运行过程中RMSE逐渐减小,表示与真实的历史评分矩阵差别在减小,由于时间关系没有运行完,根据以前在Matlab上的运行结果,最终的RMSE应该可以达到0.92左右,当然这只是在训练集上的RMSE,最终效果要测出在测试集上的RMSE,要比上一篇讲到的基于用户的协同过滤好一些,关于用户和Items因子向量的初始化会对结果有一定影响,本文中只是全部初始化为0其实不太好,有兴趣的读者可以自己尝试其他分布函数来初始化,但是总体上不会有什么太大的影响,有什么问题可以联系我。