logistic regression and softmax_regression

待阅读https://www.cnblogs.com/Determined22/p/6362951.html

import numpy as np
from prml.linear.classifier import Classifier


class LogisticRegression(Classifier):
    """
    Logistic regression model

    y = sigmoid(X @ w)
    t ~ Bernoulli(t|y)
    """

    @staticmethod
    def _sigmoid(a):
        return np.tanh(a * 0.5) * 0.5 + 0.5

    def fit(self, X:np.ndarray, t:np.ndarray, max_iter:int=100):
        """
        maximum likelihood estimation of logistic regression model

        Parameters
        ----------
        X : (N, D) np.ndarray
            training data independent variable
        t : (N,) np.ndarray
            training data dependent variable
            binary 0 or 1
        max_iter : int, optional
            maximum number of paramter update iteration (the default is 100)
        """
        w = np.zeros(np.size(X, 1))
        for _ in range(max_iter):
            w_prev = np.copy(w)
            y = self._sigmoid(X @ w)
            grad = X.T @ (y - t)
            hessian = (X.T * y * (1 - y)) @ X
            try:
                w -= np.linalg.solve(hessian, grad)
            except np.linalg.LinAlgError:
               break
            if np.allclose(w, w_prev):
                break
        self.w = w

    def proba(self, X:np.ndarray):
        """
        compute probability of input belonging class 1

        Parameters
        ----------
        X : (N, D) np.ndarray
            training data independent variable

        Returns
        -------
        (N,) np.ndarray
            probability of positive
        """
        return self._sigmoid(X @ self.w)

    def classify(self, X:np.ndarray, threshold:float=0.5):
        """
        classify input data

        Parameters
        ----------
        X : (N, D) np.ndarray
            independent variable to be classified
        threshold : float, optional
            threshold of binary classification (default is 0.5)

        Returns
        -------
        (N,) np.ndarray
            binary class for each input
        """
        return (self.proba(X) > threshold).astype(np.int)

import numpy as np
from prml.linear.classifier import Classifier
from prml.preprocess.label_transformer import LabelTransformer


class SoftmaxRegression(Classifier):
    """
    Softmax regression model
    aka
    multinomial logistic regression,
    multiclass logistic regression,
    maximum entropy classifier.

    y = softmax(X @ W)
    t ~ Categorical(t|y)
    """

    @staticmethod
    def _softmax(a):
        a_max = np.max(a, axis=-1, keepdims=True)
        exp_a = np.exp(a - a_max)
        return exp_a / np.sum(exp_a, axis=-1, keepdims=True)

    def fit(self, X:np.ndarray, t:np.ndarray, max_iter:int=100, learning_rate:float=0.1):
        """
        maximum likelihood estimation of the parameter

        Parameters
        ----------
        X : (N, D) np.ndarray
            training independent variable
        t : (N,) or (N, K) np.ndarray
            training dependent variable
            in class index or one-of-k encoding
        max_iter : int, optional
            maximum number of iteration (the default is 100)
        learning_rate : float, optional
            learning rate of gradient descent (the default is 0.1)
        """
        if t.ndim == 1:
            t = LabelTransformer().encode(t)
        self.n_classes = np.size(t, 1)#t是N*K的,K表示类别数
        W = np.zeros((np.size(X, 1)),self.n_classes)
        
        for _ in range(max_iter):
            for i in range(np.size(t,0)):
                W_prev = np.copy(W)
                y = self._softmax(X[i] @ W)
                grad = X[i].T @ (y - t[i])
                W -= learning_rate * grad
                if np.allclose(W, W_prev):
                    break
        self.W = W

    def proba(self, X:np.ndarray):
        """
        compute probability of input belonging each class

        Parameters
        ----------
        X : (N, D) np.ndarray
            independent variable

        Returns
        -------
        (N, K) np.ndarray
            probability of each class
        """
        return self._softmax(X @ self.W)

    def classify(self, X:np.ndarray):
        """
        classify input data

        Parameters
        ----------
        X : (N, D) np.ndarray
            independent variable to be classified

        Returns
        -------
        (N,) np.ndarray
            class index for each input
        """
        return np.argmax(self.proba(X), axis=-1)

在这里插入图片描述

发布了239 篇原创文章 · 获赞 16 · 访问量 2万+

猜你喜欢

转载自blog.csdn.net/qq_38662930/article/details/103637878
今日推荐