2

對於一項任務,我應該測試不同類型的推薦器,我必須首先實施。我一直在四處尋找一個好的圖書館來做這件事(我曾經想過Weka),並且偶然發現了Mahout。因此,我必須提出:a)我對Mahout完全陌生b)我沒有強大的推薦人背景和算法(否則我不會做這個課程......)和c )對不起,但我遠沒有成爲世界上最好的開發者==>我會很感激,如果你可以使用外行術語(儘可能......):)Mahout:基於項目的推薦器的調整後的餘弦相似度

我一直在關注一些教程(例如,this以及part2),並在基於物品和基於用戶的推薦人中獲得了一些初步結果。

但是,我對基於項目的預測並不滿意。到目前爲止,我只發現考慮到用戶的評級偏差的相似性功能。我想知道是否有像adjusted cosine similarity這樣的東西。任何提示?

+1

您可以嘗試擴展AbstractSimilarity類自己的工具 – eliasah 2015-04-02 17:57:10

+0

好的,謝謝我會仔細研究一下。我很驚訝我似乎無法找到任何有關(其他人問同樣的問題,準備使用的實現,...) – PLB 2015-04-02 19:20:29

+0

我發現這個類,現在已被棄用。你知道是否有任何方法可以找到我應該使用的東西嗎? (我使用mahout-0.9,我認爲這已被刪除!)http://archive.cloudera.com/cdh5/cdh/5/mahout-0.8-cdh5.0.0/mahout-core/org/apache/mahout/ cf/taste/impl/recommender/BiasedItemBasedRecommender.html – PLB 2015-04-02 19:37:44

回答

0

下面是我創建的AdjustedCosineSimilarity的示例。您必須記住,由於sqrt計算,這將比PearsonCorrelationSimilarity慢,但會產生更好的結果。至少對於我的數據集來說結果要好得多。但是你應該做一個權衡,質量/性能,並且根據你的需求你應該使用你想要的實現。

/** 
* Custom implementation of {@link AdjustedCosineSimilarity} 
* 
* @author dmilchevski 
* 
*/ 
public class AdjustedCosineSimilarity extends AbstractSimilarity { 

    /** 
    * Creates new {@link AdjustedCosineSimilarity} 
    * 
    * @param dataModel 
    * @throws TasteException 
    */ 
    public AdjustedCosineSimilarity(DataModel dataModel) 
      throws TasteException { 
     this(dataModel, Weighting.UNWEIGHTED); 
    } 

    /** 
    * Creates new {@link AdjustedCosineSimilarity} 
    * 
    * @param dataModel 
    * @param weighting 
    * @throws TasteException 
    */ 
    public AdjustedCosineSimilarity(DataModel dataModel, Weighting weighting) 
      throws TasteException { 
     super(dataModel, weighting, true); 
     Preconditions.checkArgument(dataModel.hasPreferenceValues(), 
       "DataModel doesn't have preference values"); 
    } 

    /** 
    * Compute the result 
    */ 
    @Override 
    double computeResult(int n, double sumXY, double sumX2, double sumY2, double sumXYdiff2) { 
     if (n == 0) { 
      return Double.NaN; 
     } 
     // Note that sum of X and sum of Y don't appear here since they are 
     // assumed to be 0; 
     // the data is assumed to be centered. 
     double denominator = Math.sqrt(sumX2) * Math.sqrt(sumY2); 
     if (denominator == 0.0) { 
      // One or both parties has -all- the same ratings; 
      // can't really say much similarity under this measure 
      return Double.NaN; 
     } 
     return sumXY/denominator; 
    } 

    /** 
    * Gets the average preference 
    * @param prefs 
    * @return 
    */ 
    private double averagePreference(PreferenceArray prefs){ 
     double sum = 0.0; 
     int n = prefs.length(); 
     for(int i=0; i<n; i++){ 
      sum+=prefs.getValue(i); 
     } 
     if(n>0){ 
      return sum/n; 
     } 
     return 0.0d; 
    } 

    /** 
    * Compute the item similarity between two items 
    */ 
    @Override 
    public double itemSimilarity(long itemID1, long itemID2) throws TasteException { 
     DataModel dataModel = getDataModel(); 
     PreferenceArray xPrefs = dataModel.getPreferencesForItem(itemID1); 
     PreferenceArray yPrefs = dataModel.getPreferencesForItem(itemID2); 
     int xLength = xPrefs.length(); 
     int yLength = yPrefs.length(); 

     if (xLength == 0 || yLength == 0) { 
      return Double.NaN; 
     } 

     long xIndex = xPrefs.getUserID(0); 
     long yIndex = yPrefs.getUserID(0); 
     int xPrefIndex = 0; 
     int yPrefIndex = 0; 

     double sumX = 0.0; 
     double sumX2 = 0.0; 
     double sumY = 0.0; 
     double sumY2 = 0.0; 
     double sumXY = 0.0; 
     double sumXYdiff2 = 0.0; 
     int count = 0; 

     // No, pref inferrers and transforms don't appy here. I think. 

     while (true) { 
      int compare = xIndex < yIndex ? -1 : xIndex > yIndex ? 1 : 0; 
      if (compare == 0) { 
       // Both users expressed a preference for the item 
       double x = xPrefs.getValue(xPrefIndex); 
       double y = yPrefs.getValue(yPrefIndex); 
       long xUserId = xPrefs.getUserID(xPrefIndex); 
       long yUserId = yPrefs.getUserID(yPrefIndex); 

       double xMean = averagePreference(dataModel.getPreferencesFromUser(xUserId)); 
       double yMean = averagePreference(dataModel.getPreferencesFromUser(yUserId)); 

       sumXY += (x - xMean) * (y - yMean); 
       sumX += x; 
       sumX2 += (x - xMean) * (x - xMean); 
       sumY += y; 
       sumY2 += (y - yMean) * (y - yMean); 
       double diff = x - y; 
       sumXYdiff2 += diff * diff; 
       count++; 
      } 
      if (compare <= 0) { 
       if (++xPrefIndex == xLength) { 
        break; 
       } 
       xIndex = xPrefs.getUserID(xPrefIndex); 
      } 
      if (compare >= 0) { 
       if (++yPrefIndex == yLength) { 
        break; 
       } 
       yIndex = yPrefs.getUserID(yPrefIndex); 
      } 
     } 

     double result; 

     // See comments above on these computations 
     double n = (double) count; 
     double meanX = sumX/n; 
     double meanY = sumY/n; 
     // double centeredSumXY = sumXY - meanY * sumX - meanX * sumY + n * 
     // meanX * meanY; 
     double centeredSumXY = sumXY - meanY * sumX; 
     // double centeredSumX2 = sumX2 - 2.0 * meanX * sumX + n * meanX * 
     // meanX; 
     double centeredSumX2 = sumX2 - meanX * sumX; 
     // double centeredSumY2 = sumY2 - 2.0 * meanY * sumY + n * meanY * 
     // meanY; 
     double centeredSumY2 = sumY2 - meanY * sumY; 
//  result = computeResult(count, centeredSumXY, centeredSumX2, 
//    centeredSumY2, sumXYdiff2); 

     result = computeResult(count, sumXY, sumX2, sumY2, sumXYdiff2); 

     if (!Double.isNaN(result)) { 
      result = normalizeWeightResult(result, count, 
        dataModel.getNumUsers()); 
     } 
     return result; 
    } 

} 
+0

非常感謝。我會盡量在接下來的日子裏檢查它! – PLB 2015-04-14 09:32:30