
    (phH                         S r SSKJr  SSKrSSKrSSKJrJr  SSK	J
r
  SSKJr  SSKJr  SS	KJr  SS
KJr  / SQrS rS r\" S/ SQ/ 5      rSSS.S jjrS rS rS rSS jrg)ax  
Contingency table functions (:mod:`scipy.stats.contingency`)
============================================================

Functions for creating and analyzing contingency tables.

.. currentmodule:: scipy.stats.contingency

.. autosummary::
   :toctree: generated/

   chi2_contingency
   relative_risk
   odds_ratio
   crosstab
   association

   expected_freq
   margins

    )reduceN   )power_divergence_untabulate)relative_risk)crosstab)
odds_ratio)_make_tuple_bunch)stats)marginsexpected_freqchi2_contingencyr   associationr   r	   c           
          / n[        [        U R                  5      5      nU HQ  n[        R                  " [        R
                  X Vs/ s H  oDU:w  d  M
  UPM     sn5      nUR                  U5        MS     U$ s  snf )a~  Return a list of the marginal sums of the array `a`.

Parameters
----------
a : ndarray
    The array for which to compute the marginal sums.

Returns
-------
margsums : list of ndarrays
    A list of length `a.ndim`.  `margsums[k]` is the result
    of summing `a` over all axes except `k`; it has the same
    number of dimensions as `a`, but the length of each axis
    except axis `k` will be 1.

Examples
--------
>>> import numpy as np
>>> from scipy.stats.contingency import margins

>>> a = np.arange(12).reshape(2, 6)
>>> a
array([[ 0,  1,  2,  3,  4,  5],
       [ 6,  7,  8,  9, 10, 11]])
>>> m0, m1 = margins(a)
>>> m0
array([[15],
       [51]])
>>> m1
array([[ 6,  8, 10, 12, 14, 16]])

>>> b = np.arange(24).reshape(2,3,4)
>>> m0, m1, m2 = margins(b)
>>> m0
array([[[ 66]],
       [[210]]])
>>> m1
array([[[ 60],
        [ 92],
        [124]]])
>>> m2
array([[[60, 66, 72, 78]]])
)listrangendimnpapply_over_axessumappend)amargsumsrangedkjmargs         J/var/www/html/venv/lib/python3.13/site-packages/scipy/stats/contingency.pyr   r   '   sj    X H%- F!!"&&!-JA6a-JK  O .Ks   		A:A:c                     [         R                  " U [         R                  S9n [        U 5      nU R                  n[        [         R                  U5      U R                  5       US-
  -  -  nU$ )ac  
Compute the expected frequencies from a contingency table.

Given an n-dimensional contingency table of observed frequencies,
compute the expected frequencies for the table based on the marginal
sums under the assumption that the groups associated with each
dimension are independent.

Parameters
----------
observed : array_like
    The table of observed frequencies.  (While this function can handle
    a 1-D array, that case is trivial.  Generally `observed` is at
    least 2-D.)

Returns
-------
expected : ndarray of float64
    The expected frequencies, based on the marginal sums of the table.
    Same shape as `observed`.

Examples
--------
>>> import numpy as np
>>> from scipy.stats.contingency import expected_freq
>>> observed = np.array([[10, 10, 20],[20, 20, 20]])
>>> expected_freq(observed)
array([[ 12.,  12.,  16.],
       [ 18.,  18.,  24.]])

)dtyper   )r   asarrayfloat64r   r   r   multiplyr   )observedr   dexpecteds       r   r   r   [   sY    F zz("**5H x H
 	Abkk8,x||~!a%/HHHO    Chi2ContingencyResult)	statisticpvaluedofr   )methodc                   [         R                  " U 5      n [         R                  " U S:  5      (       a  [        S5      eU R                  S:X  a  [        S5      e[        U 5      n[         R                  " US:H  5      (       a:  [        [        [         R                  " US:H  5      6 5      S   n[        SU S35      eUb  [        XXU5      $ UR                  [        UR                  5      -
  UR                  -   S-
  nUS:X  a  SnS	nOwUS:X  aS  U(       aL  X@-
  n	[         R                  " U	5      n
[         R                  " S
[         R                  " U	5      5      nXU
-  -   n [!        XU R                  S-
  U-
  SUS9u  px[#        XxXd5      $ )a  Chi-square test of independence of variables in a contingency table.

This function computes the chi-square statistic and p-value for the
hypothesis test of independence of the observed frequencies in the
contingency table [1]_ `observed`.  The expected frequencies are computed
based on the marginal sums under the assumption of independence; see
`scipy.stats.contingency.expected_freq`.  The number of degrees of
freedom is (expressed using numpy functions and attributes)::

    dof = observed.size - sum(observed.shape) + observed.ndim - 1


Parameters
----------
observed : array_like
    The contingency table. The table contains the observed frequencies
    (i.e. number of occurrences) in each category.  In the two-dimensional
    case, the table is often described as an "R x C table".
correction : bool, optional
    If True, *and* the degrees of freedom is 1, apply Yates' correction
    for continuity.  The effect of the correction is to adjust each
    observed value by 0.5 towards the corresponding expected value.
lambda_ : float or str, optional
    By default, the statistic computed in this test is Pearson's
    chi-squared statistic [2]_.  `lambda_` allows a statistic from the
    Cressie-Read power divergence family [3]_ to be used instead.  See
    `scipy.stats.power_divergence` for details.
method : ResamplingMethod, optional
    Defines the method used to compute the p-value. Compatible only with
    `correction=False`,  default `lambda_`, and two-way tables.
    If `method` is an instance of `PermutationMethod`/`MonteCarloMethod`,
    the p-value is computed using
    `scipy.stats.permutation_test`/`scipy.stats.monte_carlo_test` with the
    provided configuration options and other appropriate settings.
    Otherwise, the p-value is computed as documented in the notes.
    Note that if `method` is an instance of `MonteCarloMethod`, the ``rvs``
    attribute must be left unspecified; Monte Carlo samples are always drawn
    using the ``rvs`` method of `scipy.stats.random_table`.

    .. versionadded:: 1.15.0


Returns
-------
res : Chi2ContingencyResult
    An object containing attributes:

    statistic : float
        The test statistic.
    pvalue : float
        The p-value of the test.
    dof : int
        The degrees of freedom. NaN if `method` is not ``None``.
    expected_freq : ndarray, same shape as `observed`
        The expected frequencies, based on the marginal sums of the table.

See Also
--------
scipy.stats.contingency.expected_freq
scipy.stats.fisher_exact
scipy.stats.chisquare
scipy.stats.power_divergence
scipy.stats.barnard_exact
scipy.stats.boschloo_exact
:ref:`hypothesis_chi2_contingency` : Extended example

Notes
-----
An often quoted guideline for the validity of this calculation is that
the test should be used only if the observed and expected frequencies
in each cell are at least 5.

This is a test for the independence of different categories of a
population. The test is only meaningful when the dimension of
`observed` is two or more.  Applying the test to a one-dimensional
table will always result in `expected` equal to `observed` and a
chi-square statistic equal to 0.

This function does not handle masked arrays, because the calculation
does not make sense with missing values.

Like `scipy.stats.chisquare`, this function computes a chi-square
statistic; the convenience this function provides is to figure out the
expected frequencies and degrees of freedom from the given contingency
table. If these were already known, and if the Yates' correction was not
required, one could use `scipy.stats.chisquare`.  That is, if one calls::

    res = chi2_contingency(obs, correction=False)

then the following is true::

    (res.statistic, res.pvalue) == stats.chisquare(obs.ravel(),
                                                   f_exp=ex.ravel(),
                                                   ddof=obs.size - 1 - dof)

The `lambda_` argument was added in version 0.13.0 of scipy.

References
----------
.. [1] "Contingency table",
       https://en.wikipedia.org/wiki/Contingency_table
.. [2] "Pearson's chi-squared test",
       https://en.wikipedia.org/wiki/Pearson%27s_chi-squared_test
.. [3] Cressie, N. and Read, T. R. C., "Multinomial Goodness-of-Fit
       Tests", J. Royal Stat. Soc. Series B, Vol. 46, No. 3 (1984),
       pp. 440-464.

Examples
--------
A two-way example (2 x 3):

>>> import numpy as np
>>> from scipy.stats import chi2_contingency
>>> obs = np.array([[10, 10, 20], [20, 20, 20]])
>>> res = chi2_contingency(obs)
>>> res.statistic
2.7777777777777777
>>> res.pvalue
0.24935220877729619
>>> res.dof
2
>>> res.expected_freq
array([[ 12.,  12.,  16.],
       [ 18.,  18.,  24.]])

Perform the test using the log-likelihood ratio (i.e. the "G-test")
instead of Pearson's chi-squared statistic.

>>> res = chi2_contingency(obs, lambda_="log-likelihood")
>>> res.statistic
2.7688587616781319
>>> res.pvalue
0.25046668010954165

A four-way example (2 x 2 x 2 x 2):

>>> obs = np.array(
...     [[[[12, 17],
...        [11, 16]],
...       [[11, 12],
...        [15, 16]]],
...      [[[23, 15],
...        [30, 22]],
...       [[14, 17],
...        [15, 16]]]])
>>> res = chi2_contingency(obs)
>>> res.statistic
8.7584514426741897
>>> res.pvalue
0.64417725029295503

When the sum of the elements in a two-way table is small, the p-value
produced by the default asymptotic approximation may be inaccurate.
Consider passing a `PermutationMethod` or `MonteCarloMethod` as the
`method` parameter with `correction=False`.

>>> from scipy.stats import PermutationMethod
>>> obs = np.asarray([[12, 3],
...                   [17, 16]])
>>> res = chi2_contingency(obs, correction=False)
>>> ref = chi2_contingency(obs, correction=False, method=PermutationMethod())
>>> res.pvalue, ref.pvalue
(0.0614122539870913, 0.1074)  # may vary

For a more detailed example, see :ref:`hypothesis_chi2_contingency`.

r   z-All values in `observed` must be nonnegative.zNo data; `observed` has size 0.zLThe internally computed table of expected frequencies has a zero element at .Nr   g        g      ?g      ?)ddofaxislambda_)r   r!   any
ValueErrorsizer   r   zipnonzero_chi2_resampling_methodsr   shaper   signminimumabsr   r(   )r$   
correctionr1   r,   r&   zeroposr+   chi2pdiff	direction	magnitudes               r   r   r      su   P zz(#H	vvhlHII}}:;;X&H	vvh!m sBJJx1}567: >>EYaI J 	J 'JQWXX --#hnn-
-
=
AC
ax !8
 &DI

3t5Ii"77H"8(0(9C(?d+24 !#88r'   c                    U R                   S:w  a  Sn[        U5      eU(       a  SU< SU< S3n[        U5      eUb  SU< SU< S3n[        U5      e[        U[        R                  5      (       a  [        XU5      nO>[        U[        R                  5      (       a  [        XU5      nOSU< S3n[        U5      e[        UR                  UR                  [        R                  U5      $ )	N   z7Use of `method` is only compatible with two-way tables.z`correction=z!` is not compatible with `method=z.`z	`lambda_=z`method=zi` not recognized; if provided, `method` must be an instance of `PermutationMethod` or `MonteCarloMethod`.)r   r3   
isinstancer   PermutationMethod_chi2_permutation_methodMonteCarloMethod_chi2_monte_carlo_methodr(   r)   r*   r   nan)r$   r&   r<   r1   r,   messageress          r   r7   r7   c  s    }}K!!!j]"DVIRH!!gZA&"E!!&%1122&x6B	FE22	3	3&x6Bvi  L L!! 

BFFHMMr'   c                    ^^ [        U 5      u  nmUU4S jn[        R                  " U4U4SSS.UR                  5       D6$ )Nc                 `   > [        U T5      S   n[        R                  " UT-
  S-  T-  5      $ )Nr   rD   )r   r   r   )xtabler&   ys     r   r)   +_chi2_permutation_method.<locals>.statistic  s1    Aq!vvux'!+H455r'   pairingsgreater)permutation_typealternative)r   r   permutation_test_asdict)r$   r&   r,   rO   r)   rQ   s    `   @r   rG   rG   }  sN    x DAq6
 !!1$	 MJ.7M;A>>;KM Mr'   c                   ^^	 UR                  5       nUR                  SS 5      b  Sn[        U5      e[        R                  R                  UR                  SS 5      5      n[        R                  R                  U 5      u  pV[        R                  " UR                  5       UR                  5       US9m	U	4S jnTR                  5       mU4S jn[        R                  " U R                  5       Xx4SS0UD6$ )	NrvszIf the `method` argument of `chi2_contingency` is an instance of `MonteCarloMethod`, its `rvs` attribute must be unspecified. Use the `MonteCarloMethod` `rng` argument to control the random state.rng)seedc                 J   > U S   nTR                  US9R                  U 5      $ )Nr   )r4   )rZ   reshape)r4   n_resamplesXs     r   rZ   %_chi2_monte_carlo_method.<locals>.rvs  s(    1guu+u&..t44r'   c                 @   > [         R                  " U T-
  S-  T-  US9$ )NrD   )r0   )r   r   )rP   r0   r&   s     r   r)   +_chi2_monte_carlo_method.<locals>.statistic  s#    vvux'!+H44@@r'   rV   rT   )rX   popr3   r   randomdefault_rngr   contingencyr   random_tableravelmonte_carlo_test)
r$   r&   r,   rK   r[   rowsumscolsumsrZ   r)   r`   s
    `       @r   rI   rI     s    ^^Fzz%*2 !!
))



5$ 7
8C ((00:G7==?GMMO#FA5 ~~HA !!(.."2C C.7C;AC Cr'   c                 H   [         R                  " U 5      n[         R                  " UR                  [         R                  5      (       d  [        S5      e[        UR                  5      S:w  a  [        S5      e[        XBUS9nUR                  UR                  5       -  nUR                  u  pxUS:X  a  U[        US-
  US-
  5      -  n	OCUS:X  a#  U[        R                  " US-
  US-
  -  5      -  n	OUS:X  a	  USU-   -  n	O[        S	5      e[        R                  " U	5      $ )
a6
  Calculates degree of association between two nominal variables.

The function provides the option for computing one of three measures of
association between two nominal variables from the data given in a 2d
contingency table: Tschuprow's T, Pearson's Contingency Coefficient
and Cramer's V.

Parameters
----------
observed : array-like
    The array of observed values
method : {"cramer", "tschuprow", "pearson"} (default = "cramer")
    The association test statistic.
correction : bool, optional
    Inherited from `scipy.stats.contingency.chi2_contingency()`
lambda_ : float or str, optional
    Inherited from `scipy.stats.contingency.chi2_contingency()`

Returns
-------
statistic : float
    Value of the test statistic

Notes
-----
Cramer's V, Tschuprow's T and Pearson's Contingency Coefficient, all
measure the degree to which two nominal or ordinal variables are related,
or the level of their association. This differs from correlation, although
many often mistakenly consider them equivalent. Correlation measures in
what way two variables are related, whereas, association measures how
related the variables are. As such, association does not subsume
independent variables, and is rather a test of independence. A value of
1.0 indicates perfect association, and 0.0 means the variables have no
association.

Both the Cramer's V and Tschuprow's T are extensions of the phi
coefficient.  Moreover, due to the close relationship between the
Cramer's V and Tschuprow's T the returned values can often be similar
or even equivalent.  They are likely to diverge more as the array shape
diverges from a 2x2.

References
----------
.. [1] "Tschuprow's T",
       https://en.wikipedia.org/wiki/Tschuprow's_T
.. [2] Tschuprow, A. A. (1939)
       Principles of the Mathematical Theory of Correlation;
       translated by M. Kantorowitsch. W. Hodge & Co.
.. [3] "Cramer's V", https://en.wikipedia.org/wiki/Cramer's_V
.. [4] "Nominal Association: Phi and Cramer's V",
       http://www.people.vcu.edu/~pdattalo/702SuppRead/MeasAssoc/NominalAssoc.html
.. [5] Gingrich, Paul, "Association Between Variables",
       http://uregina.ca/~gingrich/ch11a.pdf

Examples
--------
An example with a 4x2 contingency table:

>>> import numpy as np
>>> from scipy.stats.contingency import association
>>> obs4x2 = np.array([[100, 150], [203, 322], [420, 700], [320, 210]])

Pearson's contingency coefficient

>>> association(obs4x2, method="pearson")
0.18303298140595667

Cramer's V

>>> association(obs4x2, method="cramer")
0.18617813077483678

Tschuprow's T

>>> association(obs4x2, method="tschuprow")
0.14146478765062995
z$`observed` must be an integer array.rD   zmethod only accepts 2d arrays)r<   r1   cramerr   	tschuprowpearsonzUInvalid argument value: 'method' argument must be 'cramer', 'tschuprow', or 'pearson')r   r!   
issubdtyper    integerr3   lenr8   r   r)   r   minmathsqrt)
r$   r,   r<   r1   arr	chi2_statphi2n_rowsn_colsvalues
             r   r   r     s   \ **X
C==BJJ//?@@
399~899 )02I *DYYNFs6A:vz22	;	tyy&1*!!<==	9	D! B C 	C 99Ur'   )TN)rn   FN)__doc__	functoolsr   ru   numpyr   	_stats_pyr   r   _relative_riskr   	_crosstabr   _odds_ratior	   scipy._lib._bunchr
   scipyr   __all__r   r   r(   r   r7   rG   rI   r    r'   r   <module>r      sv   .    4 )  # / 91h-` *3R O9 O9dN4
MC6dr'   