如何结合Scikit Learn的GroupKFold和StratifieKFold

问题描述

我正在使用一个不平衡的数据集,该数据集具有来自同一组用户的多个观察值。我想确保在培训和测试集中没有相同的用户,同时仍要尽可能保持原始分布。 我一直在尝试将Sklearn的GroupKFold和StratifiedKFold函数组合在一起,但我对此却一无所知。有人对我如何结合这两个功能有任何想法吗?

解决方法

def stratified_group_k_fold(X,y,groups,k,seed=None):
    """Source: https://www.kaggle.com/jakubwasikowski/stratified-group-k-fold-cross-validation """
    labels_num = np.max(y) + 1
    y_counts_per_group = collections.defaultdict(lambda: np.zeros(labels_num))
    y_distr = collections.Counter()
    for label,g in zip(y,groups):
        y_counts_per_group[g][label] += 1
        y_distr[label] += 1

    y_counts_per_fold = collections.defaultdict(lambda: np.zeros(labels_num))
    groups_per_fold = collections.defaultdict(set)

    def eval_y_counts_per_fold(y_counts,fold):
        y_counts_per_fold[fold] += y_counts
        std_per_label = []
        for label in range(labels_num):
            label_std = np.std([y_counts_per_fold[i][label] / y_distr[label] for i in range(k)])
            std_per_label.append(label_std)
        y_counts_per_fold[fold] -= y_counts
        return np.mean(std_per_label)

    groups_and_y_counts = list(y_counts_per_group.items())
    random.Random(seed).shuffle(groups_and_y_counts)

    for g,y_counts in sorted(groups_and_y_counts,key=lambda x: -np.std(x[1])):
        best_fold = None
        min_eval = None
        for i in range(k):
            fold_eval = eval_y_counts_per_fold(y_counts,i)
            if min_eval is None or fold_eval < min_eval:
                min_eval = fold_eval
                best_fold = i
        y_counts_per_fold[best_fold] += y_counts
        groups_per_fold[best_fold].add(g)

    all_groups = set(groups)
    for i in range(k):
        train_groups = all_groups - groups_per_fold[i]
        test_groups = groups_per_fold[i]

        train_indices = [i for i,g in enumerate(groups) if g in train_groups]
        test_indices = [i for i,g in enumerate(groups) if g in test_groups]

        yield train_indices,test_indices