You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

551 lines
20 KiB

4 years ago
  1. # Author: Mathieu Blondel <mathieu@mblondel.org>
  2. # Arnaud Joly <a.joly@ulg.ac.be>
  3. # Maheshakya Wijewardena <maheshakya.10@cse.mrt.ac.lk>
  4. # License: BSD 3 clause
  5. from __future__ import division
  6. import warnings
  7. import numpy as np
  8. import scipy.sparse as sp
  9. from .base import BaseEstimator, ClassifierMixin, RegressorMixin
  10. from .utils import check_random_state
  11. from .utils.validation import _num_samples
  12. from .utils.validation import check_array
  13. from .utils.validation import check_consistent_length
  14. from .utils.validation import check_is_fitted
  15. from .utils.random import random_choice_csc
  16. from .utils.stats import _weighted_percentile
  17. from .utils.multiclass import class_distribution
  18. class DummyClassifier(BaseEstimator, ClassifierMixin):
  19. """
  20. DummyClassifier is a classifier that makes predictions using simple rules.
  21. This classifier is useful as a simple baseline to compare with other
  22. (real) classifiers. Do not use it for real problems.
  23. Read more in the :ref:`User Guide <dummy_estimators>`.
  24. Parameters
  25. ----------
  26. strategy : str, default="stratified"
  27. Strategy to use to generate predictions.
  28. * "stratified": generates predictions by respecting the training
  29. set's class distribution.
  30. * "most_frequent": always predicts the most frequent label in the
  31. training set.
  32. * "prior": always predicts the class that maximizes the class prior
  33. (like "most_frequent") and ``predict_proba`` returns the class prior.
  34. * "uniform": generates predictions uniformly at random.
  35. * "constant": always predicts a constant label that is provided by
  36. the user. This is useful for metrics that evaluate a non-majority
  37. class
  38. .. versionadded:: 0.17
  39. Dummy Classifier now supports prior fitting strategy using
  40. parameter *prior*.
  41. random_state : int, RandomState instance or None, optional, default=None
  42. If int, random_state is the seed used by the random number generator;
  43. If RandomState instance, random_state is the random number generator;
  44. If None, the random number generator is the RandomState instance used
  45. by `np.random`.
  46. constant : int or str or array of shape = [n_outputs]
  47. The explicit constant as predicted by the "constant" strategy. This
  48. parameter is useful only for the "constant" strategy.
  49. Attributes
  50. ----------
  51. classes_ : array or list of array of shape = [n_classes]
  52. Class labels for each output.
  53. n_classes_ : array or list of array of shape = [n_classes]
  54. Number of label for each output.
  55. class_prior_ : array or list of array of shape = [n_classes]
  56. Probability of each class for each output.
  57. n_outputs_ : int,
  58. Number of outputs.
  59. outputs_2d_ : bool,
  60. True if the output at fit is 2d, else false.
  61. sparse_output_ : bool,
  62. True if the array returned from predict is to be in sparse CSC format.
  63. Is automatically set to True if the input y is passed in sparse format.
  64. """
  65. def __init__(self, strategy="stratified", random_state=None,
  66. constant=None):
  67. self.strategy = strategy
  68. self.random_state = random_state
  69. self.constant = constant
  70. def fit(self, X, y, sample_weight=None):
  71. """Fit the random classifier.
  72. Parameters
  73. ----------
  74. X : {array-like, object with finite length or shape}
  75. Training data, requires length = n_samples
  76. y : array-like, shape = [n_samples] or [n_samples, n_outputs]
  77. Target values.
  78. sample_weight : array-like of shape = [n_samples], optional
  79. Sample weights.
  80. Returns
  81. -------
  82. self : object
  83. """
  84. allowed_strategies = ("most_frequent", "stratified", "uniform",
  85. "constant", "prior")
  86. if self.strategy not in allowed_strategies:
  87. raise ValueError("Unknown strategy type: %s, expected one of %s."
  88. % (self.strategy, allowed_strategies))
  89. if self.strategy == "uniform" and sp.issparse(y):
  90. y = y.toarray()
  91. warnings.warn('A local copy of the target data has been converted '
  92. 'to a numpy array. Predicting on sparse target data '
  93. 'with the uniform strategy would not save memory '
  94. 'and would be slower.',
  95. UserWarning)
  96. self.sparse_output_ = sp.issparse(y)
  97. if not self.sparse_output_:
  98. y = np.atleast_1d(y)
  99. self.output_2d_ = y.ndim == 2
  100. if y.ndim == 1:
  101. y = np.reshape(y, (-1, 1))
  102. self.n_outputs_ = y.shape[1]
  103. check_consistent_length(X, y, sample_weight)
  104. if self.strategy == "constant":
  105. if self.constant is None:
  106. raise ValueError("Constant target value has to be specified "
  107. "when the constant strategy is used.")
  108. else:
  109. constant = np.reshape(np.atleast_1d(self.constant), (-1, 1))
  110. if constant.shape[0] != self.n_outputs_:
  111. raise ValueError("Constant target value should have "
  112. "shape (%d, 1)." % self.n_outputs_)
  113. (self.classes_,
  114. self.n_classes_,
  115. self.class_prior_) = class_distribution(y, sample_weight)
  116. if (self.strategy == "constant" and
  117. any(constant[k] not in self.classes_[k]
  118. for k in range(self.n_outputs_))):
  119. # Checking in case of constant strategy if the constant
  120. # provided by the user is in y.
  121. raise ValueError("The constant target value must be "
  122. "present in training data")
  123. if self.n_outputs_ == 1 and not self.output_2d_:
  124. self.n_classes_ = self.n_classes_[0]
  125. self.classes_ = self.classes_[0]
  126. self.class_prior_ = self.class_prior_[0]
  127. return self
  128. def predict(self, X):
  129. """Perform classification on test vectors X.
  130. Parameters
  131. ----------
  132. X : {array-like, object with finite length or shape}
  133. Training data, requires length = n_samples
  134. Returns
  135. -------
  136. y : array, shape = [n_samples] or [n_samples, n_outputs]
  137. Predicted target values for X.
  138. """
  139. check_is_fitted(self, 'classes_')
  140. # numpy random_state expects Python int and not long as size argument
  141. # under Windows
  142. n_samples = _num_samples(X)
  143. rs = check_random_state(self.random_state)
  144. n_classes_ = self.n_classes_
  145. classes_ = self.classes_
  146. class_prior_ = self.class_prior_
  147. constant = self.constant
  148. if self.n_outputs_ == 1:
  149. # Get same type even for self.n_outputs_ == 1
  150. n_classes_ = [n_classes_]
  151. classes_ = [classes_]
  152. class_prior_ = [class_prior_]
  153. constant = [constant]
  154. # Compute probability only once
  155. if self.strategy == "stratified":
  156. proba = self.predict_proba(X)
  157. if self.n_outputs_ == 1:
  158. proba = [proba]
  159. if self.sparse_output_:
  160. class_prob = None
  161. if self.strategy in ("most_frequent", "prior"):
  162. classes_ = [np.array([cp.argmax()]) for cp in class_prior_]
  163. elif self.strategy == "stratified":
  164. class_prob = class_prior_
  165. elif self.strategy == "uniform":
  166. raise ValueError("Sparse target prediction is not "
  167. "supported with the uniform strategy")
  168. elif self.strategy == "constant":
  169. classes_ = [np.array([c]) for c in constant]
  170. y = random_choice_csc(n_samples, classes_, class_prob,
  171. self.random_state)
  172. else:
  173. if self.strategy in ("most_frequent", "prior"):
  174. y = np.tile([classes_[k][class_prior_[k].argmax()] for
  175. k in range(self.n_outputs_)], [n_samples, 1])
  176. elif self.strategy == "stratified":
  177. y = np.vstack(classes_[k][proba[k].argmax(axis=1)] for
  178. k in range(self.n_outputs_)).T
  179. elif self.strategy == "uniform":
  180. ret = [classes_[k][rs.randint(n_classes_[k], size=n_samples)]
  181. for k in range(self.n_outputs_)]
  182. y = np.vstack(ret).T
  183. elif self.strategy == "constant":
  184. y = np.tile(self.constant, (n_samples, 1))
  185. if self.n_outputs_ == 1 and not self.output_2d_:
  186. y = np.ravel(y)
  187. return y
  188. def predict_proba(self, X):
  189. """
  190. Return probability estimates for the test vectors X.
  191. Parameters
  192. ----------
  193. X : {array-like, object with finite length or shape}
  194. Training data, requires length = n_samples
  195. Returns
  196. -------
  197. P : array-like or list of array-lke of shape = [n_samples, n_classes]
  198. Returns the probability of the sample for each class in
  199. the model, where classes are ordered arithmetically, for each
  200. output.
  201. """
  202. check_is_fitted(self, 'classes_')
  203. # numpy random_state expects Python int and not long as size argument
  204. # under Windows
  205. n_samples = _num_samples(X)
  206. rs = check_random_state(self.random_state)
  207. n_classes_ = self.n_classes_
  208. classes_ = self.classes_
  209. class_prior_ = self.class_prior_
  210. constant = self.constant
  211. if self.n_outputs_ == 1 and not self.output_2d_:
  212. # Get same type even for self.n_outputs_ == 1
  213. n_classes_ = [n_classes_]
  214. classes_ = [classes_]
  215. class_prior_ = [class_prior_]
  216. constant = [constant]
  217. P = []
  218. for k in range(self.n_outputs_):
  219. if self.strategy == "most_frequent":
  220. ind = class_prior_[k].argmax()
  221. out = np.zeros((n_samples, n_classes_[k]), dtype=np.float64)
  222. out[:, ind] = 1.0
  223. elif self.strategy == "prior":
  224. out = np.ones((n_samples, 1)) * class_prior_[k]
  225. elif self.strategy == "stratified":
  226. out = rs.multinomial(1, class_prior_[k], size=n_samples)
  227. elif self.strategy == "uniform":
  228. out = np.ones((n_samples, n_classes_[k]), dtype=np.float64)
  229. out /= n_classes_[k]
  230. elif self.strategy == "constant":
  231. ind = np.where(classes_[k] == constant[k])
  232. out = np.zeros((n_samples, n_classes_[k]), dtype=np.float64)
  233. out[:, ind] = 1.0
  234. P.append(out)
  235. if self.n_outputs_ == 1 and not self.output_2d_:
  236. P = P[0]
  237. return P
  238. def predict_log_proba(self, X):
  239. """
  240. Return log probability estimates for the test vectors X.
  241. Parameters
  242. ----------
  243. X : {array-like, object with finite length or shape}
  244. Training data, requires length = n_samples
  245. Returns
  246. -------
  247. P : array-like or list of array-like of shape = [n_samples, n_classes]
  248. Returns the log probability of the sample for each class in
  249. the model, where classes are ordered arithmetically for each
  250. output.
  251. """
  252. proba = self.predict_proba(X)
  253. if self.n_outputs_ == 1:
  254. return np.log(proba)
  255. else:
  256. return [np.log(p) for p in proba]
  257. def score(self, X, y, sample_weight=None):
  258. """Returns the mean accuracy on the given test data and labels.
  259. In multi-label classification, this is the subset accuracy
  260. which is a harsh metric since you require for each sample that
  261. each label set be correctly predicted.
  262. Parameters
  263. ----------
  264. X : {array-like, None}
  265. Test samples with shape = (n_samples, n_features) or
  266. None. Passing None as test samples gives the same result
  267. as passing real test samples, since DummyClassifier
  268. operates independently of the sampled observations.
  269. y : array-like, shape = (n_samples) or (n_samples, n_outputs)
  270. True labels for X.
  271. sample_weight : array-like, shape = [n_samples], optional
  272. Sample weights.
  273. Returns
  274. -------
  275. score : float
  276. Mean accuracy of self.predict(X) wrt. y.
  277. """
  278. if X is None:
  279. X = np.zeros(shape=(len(y), 1))
  280. return super(DummyClassifier, self).score(X, y, sample_weight)
  281. class DummyRegressor(BaseEstimator, RegressorMixin):
  282. """
  283. DummyRegressor is a regressor that makes predictions using
  284. simple rules.
  285. This regressor is useful as a simple baseline to compare with other
  286. (real) regressors. Do not use it for real problems.
  287. Read more in the :ref:`User Guide <dummy_estimators>`.
  288. Parameters
  289. ----------
  290. strategy : str
  291. Strategy to use to generate predictions.
  292. * "mean": always predicts the mean of the training set
  293. * "median": always predicts the median of the training set
  294. * "quantile": always predicts a specified quantile of the training set,
  295. provided with the quantile parameter.
  296. * "constant": always predicts a constant value that is provided by
  297. the user.
  298. constant : int or float or array of shape = [n_outputs]
  299. The explicit constant as predicted by the "constant" strategy. This
  300. parameter is useful only for the "constant" strategy.
  301. quantile : float in [0.0, 1.0]
  302. The quantile to predict using the "quantile" strategy. A quantile of
  303. 0.5 corresponds to the median, while 0.0 to the minimum and 1.0 to the
  304. maximum.
  305. Attributes
  306. ----------
  307. constant_ : float or array of shape [n_outputs]
  308. Mean or median or quantile of the training targets or constant value
  309. given by the user.
  310. n_outputs_ : int,
  311. Number of outputs.
  312. outputs_2d_ : bool,
  313. True if the output at fit is 2d, else false.
  314. """
  315. def __init__(self, strategy="mean", constant=None, quantile=None):
  316. self.strategy = strategy
  317. self.constant = constant
  318. self.quantile = quantile
  319. def fit(self, X, y, sample_weight=None):
  320. """Fit the random regressor.
  321. Parameters
  322. ----------
  323. X : {array-like, object with finite length or shape}
  324. Training data, requires length = n_samples
  325. y : array-like, shape = [n_samples] or [n_samples, n_outputs]
  326. Target values.
  327. sample_weight : array-like of shape = [n_samples], optional
  328. Sample weights.
  329. Returns
  330. -------
  331. self : object
  332. """
  333. allowed_strategies = ("mean", "median", "quantile", "constant")
  334. if self.strategy not in allowed_strategies:
  335. raise ValueError("Unknown strategy type: %s, expected one of %s."
  336. % (self.strategy, allowed_strategies))
  337. y = check_array(y, ensure_2d=False)
  338. if len(y) == 0:
  339. raise ValueError("y must not be empty.")
  340. self.output_2d_ = y.ndim == 2
  341. if y.ndim == 1:
  342. y = np.reshape(y, (-1, 1))
  343. self.n_outputs_ = y.shape[1]
  344. check_consistent_length(X, y, sample_weight)
  345. if self.strategy == "mean":
  346. self.constant_ = np.average(y, axis=0, weights=sample_weight)
  347. elif self.strategy == "median":
  348. if sample_weight is None:
  349. self.constant_ = np.median(y, axis=0)
  350. else:
  351. self.constant_ = [_weighted_percentile(y[:, k], sample_weight,
  352. percentile=50.)
  353. for k in range(self.n_outputs_)]
  354. elif self.strategy == "quantile":
  355. if self.quantile is None or not np.isscalar(self.quantile):
  356. raise ValueError("Quantile must be a scalar in the range "
  357. "[0.0, 1.0], but got %s." % self.quantile)
  358. percentile = self.quantile * 100.0
  359. if sample_weight is None:
  360. self.constant_ = np.percentile(y, axis=0, q=percentile)
  361. else:
  362. self.constant_ = [_weighted_percentile(y[:, k], sample_weight,
  363. percentile=percentile)
  364. for k in range(self.n_outputs_)]
  365. elif self.strategy == "constant":
  366. if self.constant is None:
  367. raise TypeError("Constant target value has to be specified "
  368. "when the constant strategy is used.")
  369. self.constant = check_array(self.constant,
  370. accept_sparse=['csr', 'csc', 'coo'],
  371. ensure_2d=False, ensure_min_samples=0)
  372. if self.output_2d_ and self.constant.shape[0] != y.shape[1]:
  373. raise ValueError(
  374. "Constant target value should have "
  375. "shape (%d, 1)." % y.shape[1])
  376. self.constant_ = self.constant
  377. self.constant_ = np.reshape(self.constant_, (1, -1))
  378. return self
  379. def predict(self, X, return_std=False):
  380. """
  381. Perform classification on test vectors X.
  382. Parameters
  383. ----------
  384. X : {array-like, object with finite length or shape}
  385. Training data, requires length = n_samples
  386. return_std : boolean, optional
  387. Whether to return the standard deviation of posterior prediction.
  388. All zeros in this case.
  389. Returns
  390. -------
  391. y : array, shape = [n_samples] or [n_samples, n_outputs]
  392. Predicted target values for X.
  393. y_std : array, shape = [n_samples] or [n_samples, n_outputs]
  394. Standard deviation of predictive distribution of query points.
  395. """
  396. check_is_fitted(self, "constant_")
  397. n_samples = _num_samples(X)
  398. y = np.full((n_samples, self.n_outputs_), self.constant_,
  399. dtype=np.array(self.constant_).dtype)
  400. y_std = np.zeros((n_samples, self.n_outputs_))
  401. if self.n_outputs_ == 1 and not self.output_2d_:
  402. y = np.ravel(y)
  403. y_std = np.ravel(y_std)
  404. return (y, y_std) if return_std else y
  405. def score(self, X, y, sample_weight=None):
  406. """Returns the coefficient of determination R^2 of the prediction.
  407. The coefficient R^2 is defined as (1 - u/v), where u is the residual
  408. sum of squares ((y_true - y_pred) ** 2).sum() and v is the total
  409. sum of squares ((y_true - y_true.mean()) ** 2).sum().
  410. The best possible score is 1.0 and it can be negative (because the
  411. model can be arbitrarily worse). A constant model that always
  412. predicts the expected value of y, disregarding the input features,
  413. would get a R^2 score of 0.0.
  414. Parameters
  415. ----------
  416. X : {array-like, None}
  417. Test samples with shape = (n_samples, n_features) or None.
  418. For some estimators this may be a
  419. precomputed kernel matrix instead, shape = (n_samples,
  420. n_samples_fitted], where n_samples_fitted is the number of
  421. samples used in the fitting for the estimator.
  422. Passing None as test samples gives the same result
  423. as passing real test samples, since DummyRegressor
  424. operates independently of the sampled observations.
  425. y : array-like, shape = (n_samples) or (n_samples, n_outputs)
  426. True values for X.
  427. sample_weight : array-like, shape = [n_samples], optional
  428. Sample weights.
  429. Returns
  430. -------
  431. score : float
  432. R^2 of self.predict(X) wrt. y.
  433. """
  434. if X is None:
  435. X = np.zeros(shape=(len(y), 1))
  436. return super(DummyRegressor, self).score(X, y, sample_weight)