- 前向き研究と後向き研究
- 式(28.4)の導出
- 母数の推定
- 標本オッズ比の対数の推定誤差の導出
- 適合度カイ二乗検定と尤度比検定
- 式(28.9)の導出
- 式(28.11)の式変形の導出
- フィッシャーの正確検定
- 式(28.14)の式変形(p.264の上式)の導出
- 2元分割表
- p.267の上式:対数尤度関数の導出
- 母数\(\theta_{ij}\)の最尤推定量の導出
- ラグランジュの未定乗数法による式の導出
- \(\lambda_1,\lambda_2,\hat{\theta}_{i\cdot},\hat{\theta}_{\cdot j},\hat{\theta}_{ij}\)の導出
- ポアソン分布に従う際にも最尤推定量が\(\hat{\theta}_{ij}=x_{i\cdot}x_{\cdot j}/x_{\cdot\cdot}^2\)で表せること
- 3元分割表
- 完全独立モデルのもとで\(\hat{\theta}_{ijk}=x_{i\cdot\cdot}x_{\cdot j\cdot}x_{\cdot\cdot k}/n^3\)になること
- 周辺独立モデルのもとで\(\hat{\theta}_{ijk}=x_{i\cdot\cdot}x_{\cdot jk}/n^2\)になること
- 条件付き独立モデルのもとで\(\hat{\theta}_{ijk}=x_{i\cdot k}x_{\cdot jk}/nx_{\cdot\cdot k}\)になること
統計学実践ワークブックの行間埋め 第28章
\(1-\theta_1, 1-\theta_2\)についても同様に求める。
\begin{eqnarray}
1-\theta_1
&=&
P(B_2|A_1) \\ \\
&=&
\frac{P(A_1|B_2)P(B_2)}{A_1} \\ \\
&=&
\eta_2 \frac{P(B_2)}{A_1} \\ \\
\\
1-\theta_2
&=&
P(B_2|A_2) \\ \\
&=&
\frac{P(A_2|B_2)P(B_2)}{A_2} \\ \\
&=&
(1-\eta_2) \frac{P(B_2)}{A_2} \\ \\
\\
\frac{\eta_1/(1-\eta_1)}{\eta_2/(1-\eta_2)}
&=&
\frac{(P(B_1|A_1)P(A_1)/(P(B_1)))/(P(B_1|A_2)P(A_2)/P(B_1))}{(P(B_2|A_1)P(A_1)/P(B_2))/(P(B_2|A_2)P(A_2)/(P_(B_2)))} \\ \\
&=&
\frac{P(B_1|A_1)/P(B_1|A_2)}{P(B_2|A_1)/P(B_2|A_2)} &2式目&\\ \\
&=&
\frac{(P(A_1|B_1)P(B_1)/P(A_1))/(P(A_2|B_1)P(B_1)/P(A_2))}{(P(A_1|B_2)P(B_2)/P(A_1))/(P(A_2|B_2)P(B_2)/P(A_2))} \\ \\
&=&
\frac{P(A_1|B_1)/P(A_2|B_1)}{P(A_1|B_2)/P(A_2|B_2)} &3式目&\\ \\
&=&
\frac{\theta_1/(1-\theta_1)}{\theta_2/(1-\theta_2)} &4式目&\\ \\
&=&
\psi&5式目&
\end{eqnarray}
二項分布を考えると、尤度関数\(l(\hat{\theta})\)、対数尤度\(\log l(\hat{\theta})\)は
\begin{eqnarray}
l(\hat{\theta})
&=&
{}_{n_1}C_{x_1}\hat{\theta}^{x_1}(1-\hat{\theta})^{n_1-x_1}{}_{n_2}C_{x_2}\hat{\theta}^{x_2}(1-\hat{\theta})^{n_2-x_2} \\ \\
\log l(\hat{\theta})
&=&
\log {}_{n_1}C_{x_1}+ x_1\log \hat{\theta}+(n_1-x_1)\log (1-\hat{\theta})+\log {}_{n_2}C_{x_2}+ x_2\log \hat{\theta}+(n_2-x_2)\log (1-\hat{\theta})
\end{eqnarray}
となる。対数尤度関数の微分係数が0になるときの\(\hat{\theta}\)を求める。
\begin{eqnarray}
\frac{\partial}{\partial \hat{\theta} }\log l(\hat{\theta})
&=&
\frac{\partial}{\partial \hat{\theta} }\left( \log {}_{n_1}C_{x_1}+ x_1\log \hat{\theta}+(n_1-x_1)\log (1-\hat{\theta})+\log {}_{n_2}C_{x_2}+ x_2\log \hat{\theta}+(n_2-x_2)\log (1-\hat{\theta})\right) \\ \\
&=&
\frac{x_1}{\hat{\theta}}+\frac{-(n_1-x_1)}{1-\hat{\theta}}+ \frac{x_2}{\hat{\theta}}+\frac{-(n_2-x_2)}{1-\hat{\theta}} \\ \\
&=&
\frac{x_1+x_2}{\hat{\theta}}+\frac{x_1+x_2-(n_1+n_2)}{1-\hat{\theta}}
=0 \\ \\
&\Leftrightarrow&
(1-\hat{\theta})(x_1+x_2)+\hat{\theta}(x_1+x_2-(n_1+n_2))=0 \\ \\
&\Leftrightarrow&
x_1+x_2=\hat{\theta}((n_1+n_2)-(x_1+x_2))+\hat{\theta}(x_1+x_2) \\ \\
&\Leftrightarrow&
x_1+x_2=\hat{\theta}(n_1+n_2) \\ \\
&\Leftrightarrow&
\hat{\theta}=\frac{x_1+x_2}{n_1+n_2} \\ \\
\end{eqnarray}
\begin{eqnarray}
\chi^2
&=&
\displaystyle \sum_{i=1}^2\sum_{j=1}^2 \frac{(x_{ij}-m_{ij})^2}{m_{ij}} \\ \\
&=&
\displaystyle \sum_{i=1}^2 \frac{(x_{i1}-m_{i1})^2}{m_{i1}}+\frac{(x_{i2}-m_{i2})^2}{m_{i2}} \\ \\
&=&
\displaystyle \sum_{i=1}^2 \frac{(x_{i1}-(x_{i\cdot}x_{\cdot 1}/x_{\cdot\cdot}))^2}{x_{i\cdot}x_{\cdot 1}/x_{\cdot\cdot}}+\frac{(x_{i2}-x_{i\cdot}x_{\cdot 2}/x_{\cdot\cdot})^2}{x_{i\cdot}x_{\cdot 2}/x_{\cdot\cdot}} \\ \\
&=&
\displaystyle \sum_{i=1}^2 \frac{(x_{i1}x_{\cdot\cdot}-x_{i\cdot}x_{\cdot 1})^2}{x_{i\cdot}x_{\cdot 1}x_{\cdot\cdot}}+\frac{(x_{i2}x_{\cdot\cdot}-x_{i\cdot}x_{\cdot 2})^2}{x_{i\cdot}x_{\cdot 2}x_{\cdot\cdot}} \\ \\
&=&
\displaystyle \sum_{i=1}^2 \frac{x_{\cdot 2}(x_{i1}x_{\cdot\cdot}-x_{i\cdot}x_{\cdot 1})^2+x_{\cdot 1}(x_{i2}x_{\cdot\cdot}-x_{i\cdot}x_{\cdot 2})^2}{x_{i\cdot}x_{\cdot 1}x_{\cdot 2}x_{\cdot\cdot}}\\ \\
&=&
\displaystyle \sum_{i=1}^2 \frac{x_{\cdot\cdot}^2(x_{\cdot 2}x_{i1}^2+x_{\cdot 1}x_{i2}^2)-2x_{\cdot\cdot}(x_{\cdot 2}x_{i1}x_{i\cdot}x_{\cdot 1}+x_{\cdot 1}x_{i2}x_{i\cdot}x_{\cdot 2})+x_{\cdot 2}(x_{i\cdot}x_{\cdot 1})^2+x_{\cdot 1}(x_{i\cdot}x_{\cdot 2})^2}{x_{i\cdot}x_{\cdot 1}x_{\cdot 2}x_{\cdot\cdot}}\\ \\
&=&
\displaystyle \sum_{i=1}^2 \frac{x_{\cdot\cdot}^2(x_{\cdot 2}x_{i1}^2+x_{\cdot 1}x_{i2}^2)-2x_{\cdot\cdot}x_{\cdot 1}x_{\cdot 2}x_{i\cdot}(x_{i1}+x_{i2})+x_{\cdot 1}x_{\cdot 2}x_{i\cdot}^2(x_{\cdot 1}+x_{\cdot 2})}{x_{i\cdot}x_{\cdot 1}x_{\cdot 2}x_{\cdot\cdot}}\\ \\
&=&
\displaystyle \sum_{i=1}^2 \frac{x_{\cdot\cdot}^2(x_{\cdot 2}x_{i1}^2+x_{\cdot 1}x_{i2}^2)-2x_{\cdot\cdot}x_{\cdot 1}x_{\cdot 2}x_{i\cdot}(x_{i\cdot})+x_{\cdot 1}x_{\cdot 2}x_{i\cdot}^2(x_{\cdot\cdot})}{x_{i\cdot}x_{\cdot 1}x_{\cdot 2}x_{\cdot\cdot}}\\ \\
&=&
\displaystyle \sum_{i=1}^2 \frac{x_{\cdot\cdot}^2(x_{\cdot 2}x_{i1}^2+x_{\cdot 1}x_{i2}^2)-x_{\cdot\cdot}x_{\cdot 1}x_{\cdot 2}x_{i\cdot}^2}{x_{i\cdot}x_{\cdot 1}x_{\cdot 2}x_{\cdot\cdot}}\\ \\
&=&
\displaystyle \sum_{i=1}^2 \frac{x_{\cdot\cdot}(x_{\cdot 2}x_{i1}^2+x_{\cdot 1}x_{i2}^2)-x_{\cdot 1}x_{\cdot 2}x_{i\cdot}^2}{x_{i\cdot}x_{\cdot 1}x_{\cdot 2}}\\ \\
&=&
\frac{x_{2\cdot}x_{\cdot\cdot}(x_{\cdot 2}x_{11}^2+x_{\cdot 1}x_{12}^2)-x_{2\cdot}x_{\cdot 1}x_{\cdot 2}x_{1\cdot}^2}{x_{1\cdot}x_{2\cdot}x_{\cdot 1}x_{\cdot 2}}+\frac{x_{1\cdot}x_{\cdot\cdot}(x_{\cdot 2}x_{21}^2+x_{\cdot 1}x_{22}^2)-x_{1\cdot}x_{\cdot 1}x_{\cdot 2}x_{2\cdot}^2}{x_{1\cdot}x_{2\cdot}x_{\cdot 1}x_{\cdot 2}}\\ \\
&=&
\frac{x_{\cdot\cdot}(x_{2\cdot}x_{\cdot 2}x_{11}^2+x_{2\cdot}x_{\cdot 1}x_{12}^2+x_{1\cdot}x_{\cdot 2}x_{21}^2+x_{1\cdot}x_{\cdot 1}x_{22}^2)-x_{2\cdot}x_{\cdot 1}x_{\cdot 2}x_{1\cdot}^2-x_{1\cdot}x_{\cdot 1}x_{\cdot 2}x_{2\cdot}^2}{x_{1\cdot}x_{2\cdot}x_{\cdot 1}x_{\cdot 2}}\\ \\
&=&
\frac{x_{\cdot\cdot}(x_{2\cdot}x_{\cdot 2}x_{11}^2+x_{2\cdot}x_{\cdot 1}x_{12}^2+x_{1\cdot}x_{\cdot 2}x_{21}^2+x_{1\cdot}x_{\cdot 1}x_{22}^2)-x_{2\cdot}x_{\cdot 1}x_{\cdot 2}x_{1\cdot}(x_{1\cdot}+x_{2\cdot})}{x_{1\cdot}x_{2\cdot}x_{\cdot 1}x_{\cdot 2}}\\ \\
&=&
\frac{x_{\cdot\cdot}(x_{2\cdot}x_{\cdot 2}x_{11}^2+x_{2\cdot}x_{\cdot 1}x_{12}^2+x_{1\cdot}x_{\cdot 2}x_{21}^2+x_{1\cdot}x_{\cdot 1}x_{22}^2)-x_{2\cdot}x_{\cdot 1}x_{\cdot 2}x_{1\cdot}(x_{\cdot\cdot})}{x_{1\cdot}x_{2\cdot}x_{\cdot 1}x_{\cdot 2}}\\ \\
&=&
\frac{x_{\cdot\cdot}(x_{2\cdot}x_{\cdot 2}x_{11}^2+x_{2\cdot}x_{\cdot 1}x_{12}^2+x_{1\cdot}x_{\cdot 2}x_{21}^2+x_{1\cdot}x_{\cdot 1}x_{22}^2-x_{2\cdot}x_{\cdot 1}x_{\cdot 2}x_{1\cdot})}{x_{1\cdot}x_{2\cdot}x_{\cdot 1}x_{\cdot 2}}\\ \\
&=&
\frac{x_{\cdot\cdot}(x_{2\cdot}x_{\cdot 2}x_{11}^2+x_{2\cdot}(x_{11}+x_{21})x_{12}^2+(x_{11}+x_{12})x_{\cdot 2}x_{21}^2+(x_{11}+x_{12})(x_{11}+x_{21})x_{22}^2-x_{2\cdot}(x_{11}+x_{21})x_{\cdot 2}(x_{11}+x_{12}))}{x_{1\cdot}x_{2\cdot}x_{\cdot 1}x_{\cdot 2}}\\ \\
&=&
\frac{x_{\cdot\cdot}((x_{2\cdot}x_{\cdot 2}+x_{22}^2-x_{2\cdot}x_{\cdot 2})x_{11}^2+(x_{2\cdot}x_{12}^2+x_{\cdot 2}x_{21}^2+x_{22}^2(x_{12}+x_{21})-x_{2\cdot}x_{\cdot 2}(x_{21}+x_{21}))x_{11}+(x_{2\cdot}x_{21}x_{12}^2+x_{12}x_{\cdot 2}x_{21}^2+x_{12}x_{21}x_{22}^2-x_{2\cdot}x_{\cdot 2}x_{21}x_{12}))}{x_{1\cdot}x_{2\cdot}x_{\cdot 1}x_{\cdot 2}}\\ \\
&=&
\frac{x_{\cdot\cdot}(x_{22}^2x_{11}^2+(x_{2\cdot}x_{12}^2+x_{\cdot 2}x_{21}^2+(x_{22}^2-x_{2\cdot}x_{\cdot 2})(x_{12}+x_{21}))x_{11}+(x_{2\cdot}x_{21}x_{12}^2+x_{12}x_{\cdot 2}x_{21}^2+(x_{22}^2-x_{2\cdot}x_{\cdot 2})x_{21}x_{12}))}{x_{1\cdot}x_{2\cdot}x_{\cdot 1}x_{\cdot 2}}\\ \\
\end{eqnarray}
ここで、\begin{eqnarray}
x_{22}^2-x_{2\cdot}x_{\cdot 2}
&=&
x_{22}^2-(x_{21}+x_{22})(x_{12}+x_{22})\\ \\
&=&
-(x_{21}x_{22}+x_{12}x_{22}+x_{12}x_{21})\\ \\
&=&
-(x_{21}x_{22}+x_{12}x_{2\cdot})
\end{eqnarray}
より、
\begin{eqnarray}
\chi^2
&=&
\frac{x_{\cdot\cdot}(x_{22}^2x_{11}^2+(x_{2\cdot}x_{12}^2+x_{\cdot 2}x_{21}^2+(x_{22}^2-x_{2\cdot}x_{\cdot 2})(x_{12}+x_{21}))x_{11}+(x_{2\cdot}x_{21}x_{12}^2+x_{12}x_{\cdot 2}x_{21}^2+(x_{22}^2-x_{2\cdot}x_{\cdot 2})x_{21}x_{12}))}{x_{1\cdot}x_{2\cdot}x_{\cdot 1}x_{\cdot 2}}\\ \\
&=&
\frac{x_{\cdot\cdot}(x_{22}^2x_{11}^2+(x_{2\cdot}x_{12}^2+x_{\cdot 2}x_{21}^2+(-(x_{21}x_{22}+x_{12}x_{2\cdot}))(x_{12}+x_{21}))x_{11}+(x_{2\cdot}x_{21}x_{12}^2+x_{12}x_{\cdot 2}x_{21}^2+(-(x_{21}x_{22}+x_{12}x_{2\cdot}))x_{21}x_{12}))}{x_{1\cdot}x_{2\cdot}x_{\cdot 1}x_{\cdot 2}}\\ \\
&=&
\frac{x_{\cdot\cdot}(x_{22}^2x_{11}^2+(x_{2\cdot}x_{12}^2+x_{\cdot 2}x_{21}^2+(-(x_{21}x_{22}+x_{12}x_{2\cdot}))(x_{12}+x_{21}))x_{11}+(x_{2\cdot}x_{21}x_{12}^2+x_{12}x_{\cdot 2}x_{21}^2+(-(x_{21}x_{22}+x_{12}x_{2\cdot}))x_{21}x_{12}))}{x_{1\cdot}x_{2\cdot}x_{\cdot 1}x_{\cdot 2}}\\ \\
&=&
\frac{x_{\cdot\cdot}(x_{22}^2x_{11}^2+(x_{2\cdot}x_{12}^2+x_{\cdot 2}x_{21}^2+(-(x_{21}x_{22}+x_{12}x_{2\cdot}))(x_{12}+x_{21}))x_{11}+(x_{12}x_{\cdot 2}x_{21}^2-x_{21}x_{22}x_{21}x_{12}))}{x_{1\cdot}x_{2\cdot}x_{\cdot 1}x_{\cdot 2}}\\ \\
&=&
\frac{x_{\cdot\cdot}(x_{22}^2x_{11}^2+(x_{2\cdot}x_{12}^2+x_{\cdot 2}x_{21}^2+(-(x_{21}x_{22}+x_{12}x_{2\cdot}))(x_{12}+x_{21}))x_{11}+(x_{12}(x_{12}+x_{22})x_{21}^2-x_{21}^2x_{22}x_{12}))}{x_{1\cdot}x_{2\cdot}x_{\cdot 1}x_{\cdot 2}}\\ \\
&=&
\frac{x_{\cdot\cdot}(x_{22}^2x_{11}^2+(x_{2\cdot}x_{12}^2+x_{\cdot 2}x_{21}^2+(-(x_{21}x_{22}+x_{12}x_{2\cdot}))(x_{12}+x_{21}))x_{11}+x_{12}^2x_{21}^2)}{x_{1\cdot}x_{2\cdot}x_{\cdot 1}x_{\cdot 2}}\\ \\
&=&
\frac{x_{\cdot\cdot}(x_{22}^2x_{11}^2+(x_{2\cdot}x_{12}^2+x_{\cdot 2}x_{21}^2-x_{21}x_{22}x_{12}-x_{12}x_{2\cdot}x_{12}-x_{21}x_{22}x_{21}-x_{12}x_{2\cdot}x_{21})x_{11}+x_{12}^2x_{21}^2)}{x_{1\cdot}x_{2\cdot}x_{\cdot 1}x_{\cdot 2}}\\ \\
&=&
\frac{x_{\cdot\cdot}(x_{22}^2x_{11}^2+((x_{12}+x_{22})x_{21}^2-x_{21}x_{22}x_{12}-x_{21}^2x_{22}-x_{12}(x_{21}+x_{22})x_{21})x_{11}+x_{12}^2x_{21}^2)}{x_{1\cdot}x_{2\cdot}x_{\cdot 1}x_{\cdot 2}}\\ \\
&=&
\frac{x_{\cdot\cdot}(x_{22}^2x_{11}^2+(-2x_{21}x_{22}x_{12})x_{11}+x_{12}^2x_{21}^2)}{x_{1\cdot}x_{2\cdot}x_{\cdot 1}x_{\cdot 2}}\\ \\
&=&
\frac{x_{\cdot\cdot}(x_{22}x_{11}-x_{12}x_{21})^2}{x_{1\cdot}x_{2\cdot}x_{\cdot 1}x_{\cdot 2}}\\ \\
\end{eqnarray}
\begin{eqnarray}
P(X_{11}=x_{11})
&=&
\frac{ {}_{x_{\cdot 1} }C_{x_{11} }\times {}_{x_{\cdot 2} }C_{x_{1\cdot}-x_{11} } }{ {}_{x_{\cdot\cdot} }C_{x_{1\cdot} } } \\ \\
&=&
{}_{x_{\cdot 1} }C_{x_{11} }&\times &{}_{x_{\cdot 2} }C_{x_{1\cdot}-x_{11} }& \times &\frac{1}{ {}_{x_{\cdot\cdot} }C_{x_{1\cdot} } }& \\ \\
&=&
\frac{x_{\cdot 1}!}{x_{11}!(x_{\cdot 1}-x_{11})! }&\times &\frac{x_{\cdot 2}! }{(x_{1\cdot}-x_{11})!(x_{\cdot 2}-x_{1\cdot}+x_{11})!} &\times& \frac{x_{1\cdot}!(x_{\cdot\cdot}-x_{1\cdot})!}{x_{\cdot\cdot}!} \\ \\
&=&
\frac{x_{\cdot 1}!}{x_{11}!((x_{1 1}+x_{2 1})-x_{11})! }&\times &\frac{x_{\cdot 2}! }{((x_{11}+x_{12})-x_{11})!((x_{12}+x_{22})-(x_{11}+x_{12})+x_{11})!} &\times& \frac{x_{1\cdot}!((x_{1\cdot}+x_{2\cdot})-x_{1\cdot})!}{x_{\cdot\cdot}!} \\ \\
&=&
\frac{x_{\cdot 1}!}{x_{11}!x_{2 1}! }&\times &\frac{x_{\cdot 2}! }{x_{12}!x_{22}!} &\times& \frac{x_{1\cdot}!x_{2\cdot}!}{x_{\cdot\cdot}!} \\ \\
&=&
\frac{x_{1\cdot}!x_{2\cdot}!x_{\cdot 1}!x_{\cdot 2}!}{x_{\cdot\cdot}!}\frac{1}{x_{11}!x_{12}!x_{2 1}!x_{22}!} \\ \\
\end{eqnarray}
便宜上、\(l=\log L\)とする。尤度関数\(L\)を求めると
\begin{eqnarray}
L
&=&
\frac{n!}{\displaystyle \prod_{i=1}^I\prod_{j=1}^J x_{ij}!}\displaystyle\prod_{i=1}^I\prod_{j=1}^J\theta_{ij}^{x_{ij} } \\ \\
\Rightarrow
l&=&\log L \\ \\
&=&
\log \left(\frac{n!}{\displaystyle \prod_{i=1}^I\prod_{j=1}^J x_{ij}!}\displaystyle\prod_{i=1}^I\prod_{j=1}^J\theta_{ij}^{x_{ij} }\right) \\ \\
&=&
\log \left(\frac{n!}{\displaystyle \prod_{i=1}^I\prod_{j=1}^J x_{ij}!}\right)+\log \left(\displaystyle\prod_{i=1}^I\prod_{j=1}^J\theta_{ij}^{x_{ij} }\right) \\ \\
&=&
\log \left(\frac{n!}{\displaystyle \prod_{i=1}^I\prod_{j=1}^J x_{ij}!}\right)+\displaystyle\sum_{i=1}^I\sum_{j=1}^J\log \theta_{ij}^{x_{ij} } \\ \\
&=&
\log \left(\frac{n!}{\displaystyle \prod_{i=1}^I\prod_{j=1}^J x_{ij}!}\right)+\displaystyle\sum_{i=1}^I\sum_{j=1}^Jx_{ij}\log \theta_{ij} \\ \\
\end{eqnarray}
多項分布の最尤推定に該当するため、ラグランジュの未定乗数法を利用する。
こちらの解説などを参考
こちらの解説などを参考
独立性の仮説のため\(\theta_{ij}=\theta_{i\cdot}\theta_{\cdot j}\)であることを利用する。
\begin{eqnarray}
&&\left\{
\begin{array}{l}
l&=&\log \left(\frac{n!}{\displaystyle \prod_{i=1}^I\prod_{j=1}^J x_{ij}!}\right)+\displaystyle\sum_{i=1}^I\sum_{j=1}^Jx_{ij}\log \theta_{ij} \\
\displaystyle \sum_{i=1}^I \theta_{i\cdot}&=&1 \\
\displaystyle \sum_{j=1}^J \theta_{\cdot j}&=&1 \\
\end{array}
\right. \\ \\
&=&\left\{
\begin{array}{l}
l&=&const+\displaystyle\sum_{i=1}^I\sum_{j=1}^Jx_{ij}\log \theta_{i\cdot}\theta_{\cdot j} \\
\displaystyle \sum_{i=1}^I \theta_{i\cdot}-1&=&0 \\
\displaystyle \sum_{j=1}^J \theta_{\cdot j}-1&=&0 \\
\end{array}
\right. \\ \\
&=&\left\{
\begin{array}{l}
l&=&const+\displaystyle\sum_{i=1}^I\sum_{j=1}^J(x_{ij}\log \theta_{i\cdot}+x_{ij}\log\theta_{\cdot j}) \\
\displaystyle \sum_{i=1}^I \theta_{i\cdot}-1&=&0 \\
\displaystyle \sum_{j=1}^J \theta_{\cdot j}-1&=&0 \\
\end{array}
\right. \\ \\
&=&\left\{
\begin{array}{l}
l&=&const+\displaystyle\sum_{i=1}^I\sum_{j=1}^Jx_{ij}\log \theta_{i\cdot}+\sum_{i=1}^I\sum_{j=1}^Jx_{ij}\log\theta_{\cdot j} \\
\displaystyle \sum_{i=1}^I \theta_{i\cdot}-1&=&0 \\
\displaystyle \sum_{j=1}^J \theta_{\cdot j}-1&=&0 \\
\end{array}
\right. \\ \\
&=&\left\{
\begin{array}{l}
l&=&const+\displaystyle\sum_{i=1}^I\log \theta_{i\cdot}\sum_{j=1}^Jx_{ij}+\sum_{j=1}^J\log\theta_{\cdot j}\sum_{i=1}^Ix_{ij} \\
\displaystyle \sum_{i=1}^I \theta_{i\cdot}-1&=&0 \\
\displaystyle \sum_{j=1}^J \theta_{\cdot j}-1&=&0 \\
\end{array}
\right. \\ \\
&=&\left\{
\begin{array}{l}
l&=&const+\displaystyle\sum_{i=1}^I\log \theta_{i\cdot}x_{i\cdot}+\sum_{j=1}^J\log\theta_{\cdot j}x_{\cdot j} \\
\displaystyle \sum_{i=1}^I \theta_{i\cdot}-1&=&0 \\
\displaystyle \sum_{j=1}^J \theta_{\cdot j}-1&=&0 \\
\end{array}
\right. \\ \\
&=&\left\{
\begin{array}{l}
l&=&const+\displaystyle\sum_{i=1}^Ix_{i\cdot}\log \theta_{i\cdot}+\sum_{j=1}^Jx_{\cdot j}\log\theta_{\cdot j} \\
\displaystyle \sum_{i=1}^I \theta_{i\cdot}-1&=&0 \\
\displaystyle \sum_{j=1}^J \theta_{\cdot j}-1&=&0 \\
\end{array}
\right. \\ \\
&\Rightarrow&
L(\theta_{\cdot j},\theta_{i\cdot},\lambda_1,\lambda_2)=l-\lambda_1(\displaystyle \sum_{i=1}^I \theta_{i\cdot}-1)-\lambda_2(\displaystyle \sum_{j=1}^J \theta_{\cdot j}-1) \\ \\
&\Leftrightarrow&
L(\theta_{\cdot j},\theta_{i\cdot},\lambda_1,\lambda_2)=const+\displaystyle\sum_{i=1}^Ix_{i\cdot}\log \theta_{i\cdot}+\sum_{j=1}^Jx_{\cdot j}\log\theta_{\cdot j}-\lambda_1(\displaystyle \sum_{i=1}^I \theta_{i\cdot}-1)-\lambda_2(\displaystyle \sum_{j=1}^J \theta_{\cdot j}-1) \\ \\
&\Leftrightarrow&
L(\theta_{\cdot j},\theta_{i\cdot},\lambda_1,\lambda_2)=\displaystyle\sum_{i=1}^Ix_{i\cdot}\log \theta_{i\cdot}+\sum_{j=1}^Jx_{\cdot j}\log\theta_{\cdot j}-\lambda_1(\displaystyle \sum_{i=1}^I \theta_{i\cdot}-1)-\lambda_2(\displaystyle \sum_{j=1}^J \theta_{\cdot j}-1) \\ \\
\end{eqnarray}
最後の定数部分は、偏微分の際にゼロになるので無視した。
\begin{eqnarray}
&&\left\{
\begin{array}{l}
\frac{\partial}{\partial \theta_{\cdot j} }L(\theta_{\cdot j},\theta_{i\cdot},\lambda_1,\lambda_2)&=&0 \\
\frac{\partial}{\partial \theta_{i \cdot} }L(\theta_{\cdot j},\theta_{i\cdot},\lambda_1,\lambda_2)&=&0 \\
\frac{\partial}{\partial \lambda_1 }L(\theta_{\cdot j},\theta_{i\cdot},\lambda_1,\lambda_2)&=&0 \\
\frac{\partial}{\partial \lambda_2 }L(\theta_{\cdot j},\theta_{i\cdot},\lambda_1,\lambda_2)&=&0 \\
\end{array}
\right. \\ \\
&\Leftrightarrow&\left\{
\begin{array}{l}
\frac{x_{\cdot j}}{\theta_{\cdot j}}-\lambda_2&=&0 \\
\frac{x_{i \cdot}}{\theta_{i \cdot}}-\lambda_1&=&0 \\
\displaystyle \sum_{i=1}^I \theta_{i\cdot}-1&=&0 \\
\displaystyle \sum_{j=1}^J \theta_{\cdot j}-1&=&0 \\
\end{array}
\right. \\ \\
&\Leftrightarrow&\left\{
\begin{array}{l}
\theta_{\cdot j}&=&\frac{x_{\cdot j}}{\lambda_2} \\
\theta_{i \cdot}&=&\frac{x_{i \cdot}}{\lambda_1} \\
\displaystyle \sum_{i=1}^I \theta_{i\cdot}-1&=&0 \\
\displaystyle \sum_{j=1}^J \theta_{\cdot j}-1&=&0 \\
\end{array}
\right. \\ \\
&\Leftrightarrow&\left\{
\begin{array}{l}
\displaystyle \sum_{j=1}^J\theta_{\cdot j}&=&\displaystyle \sum_{j=1}^J\frac{x_{\cdot j}}{\lambda_2} \\
\displaystyle \sum_{i=1}^I\theta_{i \cdot}&=&\displaystyle \sum_{i=1}^I\frac{x_{i \cdot}}{\lambda_1} \\
\displaystyle \sum_{i=1}^I \theta_{i\cdot}&=&1 \\
\displaystyle \sum_{j=1}^J \theta_{\cdot j}&=&1 \\
\end{array}
\right. \\ \\
&\Leftrightarrow&\left\{
\begin{array}{l}
1&=&\frac{x_{\cdot \cdot}}{\lambda_2} \\
1&=&\frac{x_{\cdot \cdot}}{\lambda_1} \\
\displaystyle \sum_{i=1}^I \theta_{i\cdot}&=&1 \\
\displaystyle \sum_{j=1}^J \theta_{\cdot j}&=&1 \\
\end{array}
\right. \\ \\
&\Leftrightarrow&\left\{
\begin{array}{l}
\lambda_2&=&x_{\cdot \cdot} \\
\lambda_1&=&x_{\cdot \cdot} \\
\end{array}
\right. \\ \\
\end{eqnarray}
はじめの式にこれらの値を代入すると
\begin{eqnarray}
&&\left\{
\begin{array}{l}
\theta_{\cdot j}&=&\frac{x_{\cdot j}}{\lambda_2} \\
\theta_{i \cdot}&=&\frac{x_{i \cdot}}{\lambda_1} \\
\displaystyle \sum_{i=1}^I \theta_{i\cdot}-1&=&0 \\
\displaystyle \sum_{j=1}^J \theta_{\cdot j}-1&=&0 \\
\lambda_1&=&x_{\cdot\cdot} \\
\lambda_2&=&x_{\cdot\cdot} \\
\end{array}
\right. \\ \\
&&\left\{
\begin{array}{l}
\theta_{\cdot j}&=&\frac{x_{\cdot j}}{x_{\cdot\cdot}} \\
\theta_{i \cdot}&=&\frac{x_{i \cdot}}{x_{\cdot\cdot}} \\
\lambda_1&=&x_{\cdot\cdot} \\
\lambda_2&=&x_{\cdot\cdot} \\
\end{array}
\right. \\ \\
\end{eqnarray}
が得られる。仮定より、\(\theta_{ij}=\theta_{\cdot j}\theta_{i\cdot}\)なので、
\begin{eqnarray}
\theta_{ij}
&=&
\theta_{\cdot j}\theta_{i\cdot} \\ \\
&=&
\frac{x_{\cdot j}}{x_{\cdot\cdot}}\frac{x_{i\cdot}}{x_{\cdot\cdot}} \\ \\
&=&
\frac{x_{i\cdot}x_{\cdot j}}{x_{\cdot\cdot}^2}\\ \\
\end{eqnarray}
対数尤度関数の導出をした後、ラグランジュの未定乗数法による最尤推定量の導出を行う。
はじめに対数尤度関数\(l\)を求める。尤度関数\(e^l\)は、ポアソン分布を仮定しているため
\begin{eqnarray}
e^l
&=&
\displaystyle \prod_{i=1}^I\prod_{j=1}^J e^{-\theta_{ij}}\frac{\theta_{ij}^{x_{ij}}}{x_{ij}!}
\end{eqnarray}
となるから、
\begin{eqnarray}
l
&=&
\log \left(\displaystyle \prod_{i=1}^I\prod_{j=1}^J e^{-\theta_{ij}}\frac{\theta_{ij}^{x_{ij}}}{x_{ij}!}\right) \\ \\
&=&
\displaystyle \sum_{i=1}^I\sum_{j=1}^J\log \left( e^{-\theta_{ij}}\frac{\theta_{ij}^{x_{ij}}}{x_{ij}!}\right) \\ \\
&=&
\displaystyle \sum_{i=1}^I\sum_{j=1}^J\left( -\theta_{ij}+\log \theta_{ij}^{x_{ij} }-\log x_{ij}!\right) \\ \\
&=&
\displaystyle \sum_{i=1}^I\sum_{j=1}^J\left( -\theta_{ij}+x_{ij}\log \theta_{ij}-\log x_{ij}!\right) \\ \\
\end{eqnarray}
ここで、主効果モデルを仮定するため、\(\theta_{ij}=\theta_{i\cdot}\theta_{\cdot j}\)とすると
\begin{eqnarray}
l
&=&
\displaystyle \sum_{i=1}^I\sum_{j=1}^J\left( -\theta_{ij}+x_{ij}\log \theta_{ij}-\log x_{ij}!\right) \\ \\
&=&
\displaystyle \sum_{i=1}^I\sum_{j=1}^J\left( -\theta_{i\cdot}\theta_{\cdot j}+x_{ij}\log \theta_{i\cdot}\theta_{\cdot j}-\log x_{ij}!\right) \\ \\
&=&
\displaystyle \sum_{i=1}^I\sum_{j=1}^J\left( -\theta_{i\cdot}\theta_{\cdot j}+x_{ij}\log \theta_{i\cdot}+x_{ij}\log \theta_{\cdot j}-\log x_{ij}!\right) \\ \\
\end{eqnarray}
が得られる。ここで、
\begin{eqnarray}
\displaystyle \sum_{i=1}^I \theta_{i\cdot}&=&1 \\
\displaystyle \sum_{j=1}^J \theta_{\cdot j}&=&1 \\
\end{eqnarray}
の条件のもと、ラグランジュ関数\(L(\theta_{i\cdot},\theta_{\cdot j},\lambda_1,\lambda_2)\)を求める。
\begin{eqnarray}
&&\left\{
\begin{array}{l}
l&=&\displaystyle \sum_{i=1}^I\sum_{j=1}^J\left( -\theta_{i\cdot}\theta_{\cdot j}+x_{ij}\log \theta_{i\cdot}+x_{ij}\log \theta_{\cdot j}-\log x_{ij}!\right) \\
\displaystyle \sum_{i=1}^I \theta_{i\cdot}&=&1 \\
\displaystyle \sum_{j=1}^J \theta_{\cdot j}&=&1 \\
\end{array}
\right. \\ \\
&\Rightarrow&
L(\theta_{i\cdot},\theta_{\cdot j},\lambda_1,\lambda_2)=\sum_{i=1}^I\sum_{j=1}^J\left( -\theta_{i\cdot}\theta_{\cdot j}+x_{ij}\log \theta_{i\cdot}+x_{ij}\log \theta_{\cdot j}-\log x_{ij}!\right)-\lambda_1(\displaystyle \sum_{i=1}^I \theta_{i\cdot}-1)-\lambda_2(\displaystyle \sum_{j=1}^J \theta_{\cdot j}-1) \\ \\
&&=
-\sum_{i=1}^I\sum_{j=1}^J\theta_{i\cdot}\theta_{\cdot j}+\sum_{i=1}^Ix_{i\cdot}\log \theta_{i\cdot}+\sum_{j=1}^Jx_{\cdot j}\log \theta_{\cdot j}-\sum_{i=1}^I\sum_{j=1}^J\log x_{ij}!-\lambda_1(\displaystyle \sum_{i=1}^I \theta_{i\cdot}-1)-\lambda_2(\displaystyle \sum_{j=1}^J \theta_{\cdot j}-1) \\ \\
\end{eqnarray}
ラグランジュの未定乗数法によってこの解を求めると、
\begin{eqnarray}
&&\left\{
\begin{array}{l}
\frac{\partial}{\partial \theta_{i\cdot}}L(\theta_{i\cdot},\theta_{\cdot j},\lambda_1,\lambda_2)&=&0 \\
\frac{\partial}{\partial \theta_{\cdot j}}L(\theta_{i\cdot},\theta_{\cdot j},\lambda_1,\lambda_2)&=&0 \\
\frac{\partial}{\partial \theta_{\lambda_1}}L(\theta_{i\cdot},\theta_{\cdot j},\lambda_1,\lambda_2)&=&0 \\
\frac{\partial}{\partial \theta_{\lambda_2}}L(\theta_{i\cdot},\theta_{\cdot j},\lambda_1,\lambda_2)&=&0 \\
\end{array}
\right. \\ \\
&\Leftrightarrow&\left\{
\begin{array}{l}
-\displaystyle\sum_{j=1}^J\theta_{\cdot j}+\frac{x_{i\cdot}}{\theta_{i\cdot}}-\lambda_1&=&0 \\
-\displaystyle\sum_{i=1}^I\theta_{i \cdot}+\frac{x_{\cdot j}}{\theta_{\cdot j}}-\lambda_2&=&0 \\
\displaystyle\sum_{i=1}^I\theta_{i\cdot}&=&1\\
\displaystyle\sum_{j=1}^J\theta_{\cdot j}&=&1\\
\end{array}
\right. \\ \\
&\Leftrightarrow&\left\{
\begin{array}{l}
-1+\frac{x_{i\cdot}}{\theta_{i\cdot}}-\lambda_1&=&0 \\
-1+\frac{x_{\cdot j}}{\theta_{\cdot j}}-\lambda_2&=&0 \\
\displaystyle\sum_{i=1}^I\theta_{i\cdot}&=&1\\
\displaystyle\sum_{j=1}^J\theta_{\cdot j}&=&1\\
\end{array}
\right. \\ \\
&\Leftrightarrow&\left\{
\begin{array}{l}
\theta_{i\cdot}&=&\frac{x_{i\cdot}}{1+\lambda_1} \\
\theta_{\cdot j}&=&\frac{x_{\cdot j}}{1+\lambda_2} \\
\displaystyle\sum_{i=1}^I\theta_{i\cdot}&=&1\\
\displaystyle\sum_{j=1}^J\theta_{\cdot j}&=&1\\
\end{array}
\right. \\ \\
&\Leftrightarrow&\left\{
\begin{array}{l}
\displaystyle\sum_{i=1}^I\theta_{i\cdot}&=&\displaystyle\sum_{i=1}^I\frac{x_{i\cdot}}{1+\lambda_1} \\
\displaystyle\sum_{j=1}^J\theta_{\cdot j}&=&\displaystyle\sum_{j=1}^J\frac{x_{\cdot j}}{1+\lambda_2} \\
\displaystyle\sum_{i=1}^I\theta_{i\cdot}&=&1\\
\displaystyle\sum_{j=1}^J\theta_{\cdot j}&=&1\\
\end{array}
\right. \\ \\
&\Leftrightarrow&\left\{
\begin{array}{l}
1&=&\frac{x_{\cdot\cdot}}{1+\lambda_1} \\
1&=&\frac{x_{\cdot\cdot}}{1+\lambda_2} \\
\displaystyle\sum_{i=1}^I\theta_{i\cdot}&=&1\\
\displaystyle\sum_{j=1}^J\theta_{\cdot j}&=&1\\
\end{array}
\right. \\ \\
&\Leftrightarrow&\left\{
\begin{array}{l}
\lambda_1&=&x_{\cdot\cdot}+1\\
\lambda_2&=&x_{\cdot\cdot}+1\\
\displaystyle\sum_{i=1}^I\theta_{i\cdot}&=&1\\
\displaystyle\sum_{j=1}^J\theta_{\cdot j}&=&1\\
\end{array}
\right. \\ \\
&\Rightarrow&\left\{
\begin{array}{l}
\lambda_1&=&x_{\cdot\cdot}+1\\
\lambda_2&=&x_{\cdot\cdot}+1\\
\displaystyle\sum_{i=1}^I\theta_{i\cdot}&=&1\\
\displaystyle\sum_{j=1}^J\theta_{\cdot j}&=&1\\
-\displaystyle\sum_{j=1}^J\theta_{\cdot j}+\frac{x_{i\cdot}}{\theta_{i\cdot}}-\lambda_1&=&0 \\
-\displaystyle\sum_{i=1}^I\theta_{i \cdot}+\frac{x_{\cdot j}}{\theta_{\cdot j}}-\lambda_2&=&0 \\
\end{array}
\right. \\ \\
&\Rightarrow&\left\{
\begin{array}{l}
\lambda_1&=&x_{\cdot\cdot}+1\\
\lambda_2&=&x_{\cdot\cdot}+1\\
-1+\frac{x_{i\cdot}}{\theta_{i\cdot}}-\lambda_1&=&0 \\
-1+\frac{x_{\cdot j}}{\theta_{\cdot j}}-\lambda_2&=&0 \\
\end{array}
\right. \\ \\
&\Rightarrow&\left\{
\begin{array}{l}
\lambda_1&=&x_{\cdot\cdot}+1\\
\lambda_2&=&x_{\cdot\cdot}+1\\
-1+\frac{x_{i\cdot}}{\theta_{i\cdot}}-(x_{\cdot\cdot}+1)&=&0 \\
-1+\frac{x_{\cdot j}}{\theta_{\cdot j}}-(x_{\cdot\cdot}+1)&=&0 \\
\end{array}
\right. \\ \\
&\Rightarrow&\left\{
\begin{array}{l}
\theta_{i\cdot}&=&\frac{x_{i\cdot}}{x_{\cdot\cdot}} \\
\theta_{\cdot j}&=&\frac{x_{\cdot j}}{x_{\cdot\cdot}} \\
\end{array}
\right. \\ \\
\end{eqnarray}
ここで、\(\theta_{ij}=\theta_{i\cdot}\theta_{\cdot j}\)より、\(\theta_{ij}=x_{i\cdot}x_{\cdot j}/x_{\cdot\cdot}^2\)
まず、二元分割表に倣って対数尤度関数\(l=\log L\)を求める。
\begin{eqnarray}
L
&=&
\frac{n!}{\displaystyle \prod_{i=1}^I\prod_{j=1}^J\prod_{k=1}^K x_{ijk}!}\displaystyle\prod_{i=1}^I\prod_{j=1}^J\prod_{k=1}^K\theta_{ijk}^{x_{ijk} } \\ \\
\Rightarrow
l
&=&
\log L \\ \\
&=&
\log \left(\frac{n!}{\displaystyle \prod_{i=1}^I\prod_{j=1}^J\prod_{k=1}^K x_{ijk}!}\displaystyle\prod_{i=1}^I\prod_{j=1}^J\prod_{k=1}^K\theta_{ijk}^{x_{ijk} }\right) \\ \\
&=&
\log \left(\frac{n!}{\displaystyle \prod_{i=1}^I\prod_{j=1}^J\prod_{k=1}^K x_{ijk}!}\right)+\displaystyle\sum_{i=1}^I\sum_{j=1}^J\sum_{k=1}^Kx_{ijk}\log \theta_{ijk} \\ \\
\end{eqnarray}
完全独立モデルを仮定しているため、\(\theta_{ijk}=\theta_{i \cdot\cdot}\theta_{\cdot j\cdot}\theta_{\cdot\cdot k}\)となることを利用して、
\begin{eqnarray}
l
&=&
\log \left(\frac{n!}{\displaystyle \prod_{i=1}^I\prod_{j=1}^J\prod_{k=1}^K x_{ijk}!}\right)+\displaystyle\sum_{i=1}^I\sum_{j=1}^J\sum_{k=1}^Kx_{ijk}\log \theta_{ijk} \\ \\
&=&
\log \left(\frac{n!}{\displaystyle \prod_{i=1}^I\prod_{j=1}^J\prod_{k=1}^K x_{ijk}!}\right)+\displaystyle\sum_{i=1}^I\sum_{j=1}^J\sum_{k=1}^Kx_{ijk}\log \theta_{i \cdot\cdot}\theta_{\cdot j\cdot}\theta_{\cdot\cdot k} \\ \\
&=&
\log \left(\frac{n!}{\displaystyle \prod_{i=1}^I\prod_{j=1}^J\prod_{k=1}^K x_{ijk}!}\right)+\displaystyle\sum_{i=1}^I\sum_{j=1}^J\sum_{k=1}^Kx_{ijk}\log \theta_{i \cdot\cdot}+\displaystyle\sum_{i=1}^I\sum_{j=1}^J\sum_{k=1}^Kx_{ijk}\log \theta_{\cdot j\cdot}+\displaystyle\sum_{i=1}^I\sum_{j=1}^J\sum_{k=1}^Kx_{ijk}\log \theta_{\cdot\cdot k} \\ \\
&=&
\log \left(\frac{n!}{\displaystyle \prod_{i=1}^I\prod_{j=1}^J\prod_{k=1}^K x_{ijk}!}\right)+\displaystyle\sum_{i=1}^I\left(\sum_{j=1}^J\sum_{k=1}^Kx_{ijk}\log \theta_{i \cdot\cdot}\right)+\displaystyle\sum_{j=1}^J\left(\sum_{i=1}^I\sum_{k=1}^Kx_{ijk}\log \theta_{\cdot j\cdot}\right)+\displaystyle\sum_{k=1}^K\left(\sum_{i=1}^I\sum_{j=1}^Jx_{ijk}\log \theta_{\cdot\cdot k}\right) \\ \\
&=&
\log \left(\frac{n!}{\displaystyle \prod_{i=1}^I\prod_{j=1}^J\prod_{k=1}^K x_{ijk}!}\right)+\displaystyle\sum_{i=1}^Ix_{i\cdot\cdot}\log \theta_{i \cdot\cdot}+\displaystyle\sum_{j=1}^Jx_{\cdot j\cdot}\log \theta_{\cdot j\cdot}+\displaystyle\sum_{k=1}^Kx_{\cdot\cdot k}\log \theta_{\cdot\cdot k} \\ \\
\end{eqnarray}
ここで、ラグランジュの未定乗数法のためにラグランジュ関数を求める。\(\theta\)に関する条件は、
\begin{eqnarray}
\left\{
\begin{array}{l}
\displaystyle\sum_{i=1}^I\theta_{i\cdot\cdot}&=&1\\
\displaystyle\sum_{j=1}^J\theta_{\cdot j\cdot}&=&1\\
\displaystyle\sum_{k=1}^K\theta_{\cdot\cdot k}&=&1\\
\end{array}
\right. \\ \\
\end{eqnarray}
であるから、ラグランジュ関数\(L(\theta_{i\cdot\cdot},\theta_{\cdot j\cdot},\theta_{\cdot\cdot k},\lambda_1,\lambda_2,\lambda_3)\)は
\begin{eqnarray}
L(\theta_{i\cdot\cdot},\theta_{\cdot j\cdot},\theta_{\cdot\cdot k},\lambda_1,\lambda_2,\lambda_3)
&=&
l-\lambda_1(\displaystyle\sum_{i=1}^I\theta_{i\cdot\cdot}-1)-\lambda_2(\displaystyle\sum_{j=1}^J\theta_{\cdot j\cdot}-1)-\lambda_3(\displaystyle\sum_{k=1}^K\theta_{\cdot\cdot k}-1) \\ \\
&=&
\log \left(\frac{n!}{\displaystyle \prod_{i=1}^I\prod_{j=1}^J\prod_{k=1}^K x_{ijk}!}\right)+\displaystyle\sum_{i=1}^Ix_{i\cdot\cdot}\log \theta_{i \cdot\cdot}+\displaystyle\sum_{j=1}^Jx_{\cdot j\cdot}\log \theta_{\cdot j\cdot}+\displaystyle\sum_{k=1}^Kx_{\cdot\cdot k}\log \theta_{\cdot\cdot k}-\lambda_1(\displaystyle\sum_{i=1}^I\theta_{i\cdot\cdot}-1)-\lambda_2(\displaystyle\sum_{j=1}^J\theta_{\cdot j\cdot}-1)-\lambda_3(\displaystyle\sum_{k=1}^K\theta_{\cdot\cdot k}-1) \\ \\
\end{eqnarray}
となる。ラグランジュの未定乗数法を用いて\(\lambda_1,\lambda_2,\lambda_3\)を求める。
\begin{eqnarray}
&&\left\{
\begin{array}{l}
\frac{\partial}{\partial \theta_{ i\cdot\cdot}}L(\theta_{i\cdot\cdot},\theta_{\cdot j\cdot},\theta_{\cdot\cdot k},\lambda_1,\lambda_2,\lambda_3)=0\\
\frac{\partial}{\partial \theta_{\cdot j\cdot}}L(\theta_{i\cdot\cdot},\theta_{\cdot j\cdot},\theta_{\cdot\cdot k},\lambda_1,\lambda_2,\lambda_3)=0\\
\frac{\partial}{\partial \theta_{\cdot\cdot k}}L(\theta_{i\cdot\cdot},\theta_{\cdot j\cdot},\theta_{\cdot\cdot k},\lambda_1,\lambda_2,\lambda_3)=0\\
\frac{\partial}{\partial \lambda_1}L(\theta_{i\cdot\cdot},\theta_{\cdot j\cdot},\theta_{\cdot\cdot k},\lambda_1,\lambda_2,\lambda_3)=0\\
\frac{\partial}{\partial \lambda_2}L(\theta_{i\cdot\cdot},\theta_{\cdot j\cdot},\theta_{\cdot\cdot k},\lambda_1,\lambda_2,\lambda_3)=0\\
\frac{\partial}{\partial \lambda_3}L(\theta_{i\cdot\cdot},\theta_{\cdot j\cdot},\theta_{\cdot\cdot k},\lambda_1,\lambda_2,\lambda_3)=0\\
\end{array}
\right. \\ \\
&\Leftrightarrow&\left\{
\begin{array}{l}
\frac{x_{ i\cdot\cdot}}{\theta_{ i\cdot\cdot}}-\lambda_1=0\\
\frac{x_{\cdot j\cdot}}{\theta_{\cdot j\cdot}}-\lambda_2=0\\
\frac{x_{\cdot\cdot k}}{\theta_{\cdot\cdot k}}-\lambda_3=0\\
\displaystyle \sum_{i=1}^I\theta_{ i\cdot\cdot}-1=0 \\
\displaystyle \sum_{j=1}^J\theta_{\cdot j\cdot}-1=0 \\
\displaystyle \sum_{k=1}^K\theta_{\cdot\cdot k}-1=0 \\
\end{array}
\right. \\ \\
&\Leftrightarrow&\left\{
\begin{array}{l}
\theta_{ i\cdot\cdot}=\frac{x_{ i\cdot\cdot}}{\lambda_1}\\
\theta_{\cdot j\cdot}=\frac{x_{\cdot j\cdot}}{\lambda_2}\\
\theta_{\cdot\cdot k}=\frac{x_{\cdot\cdot k}}{\lambda_3}\\
\displaystyle \sum_{i=1}^I\theta_{ i\cdot\cdot}-1=0 \\
\displaystyle \sum_{j=1}^J\theta_{\cdot j\cdot}-1=0 \\
\displaystyle \sum_{k=1}^K\theta_{\cdot\cdot k}-1=0 \\
\end{array}
\right. \\ \\
&\Leftrightarrow&\left\{
\begin{array}{l}
\displaystyle\sum_{i=1}^I\theta_{ i\cdot\cdot}=\displaystyle\sum_{i=1}^I\frac{x_{ i\cdot\cdot}}{\lambda_1} \\
\displaystyle\sum_{j=1}^J\theta_{\cdot j\cdot}=\displaystyle\sum_{j=1}^J\frac{x_{\cdot j\cdot}}{\lambda_2} \\
\displaystyle\sum_{k=1}^K\theta_{\cdot\cdot k}=\displaystyle\sum_{k=1}^K\frac{x_{\cdot\cdot k}}{\lambda_3} \\
\displaystyle \sum_{i=1}^I\theta_{ i\cdot\cdot}-1=0 \\
\displaystyle \sum_{j=1}^J\theta_{\cdot j\cdot}-1=0 \\
\displaystyle \sum_{k=1}^K\theta_{\cdot\cdot k}-1=0 \\
\end{array}
\right. \\ \\
&\Leftrightarrow&\left\{
\begin{array}{l}
1=\displaystyle\sum_{i=1}^I\frac{x_{ i\cdot\cdot}}{\lambda_1}\\
1=\displaystyle\sum_{j=1}^J\frac{x_{\cdot j\cdot}}{\lambda_2}\\
1=\displaystyle\sum_{k=1}^K\frac{x_{\cdot\cdot k}}{\lambda_3}\\
\displaystyle \sum_{i=1}^I\theta_{ i\cdot\cdot}-1=0 \\
\displaystyle \sum_{j=1}^J\theta_{\cdot j\cdot}-1=0 \\
\displaystyle \sum_{k=1}^K\theta_{\cdot\cdot k}-1=0 \\
\end{array}
\right. \\ \\
&\Leftrightarrow&\left\{
\begin{array}{l}
1=\frac{x_{\cdot\cdot\cdot}}{\lambda_1}\\
1=\frac{x_{\cdot\cdot\cdot}}{\lambda_2}\\
1=\frac{x_{\cdot\cdot\cdot}}{\lambda_3}\\
\displaystyle \sum_{i=1}^I\theta_{ i\cdot\cdot}-1=0 \\
\displaystyle \sum_{j=1}^J\theta_{\cdot j\cdot}-1=0 \\
\displaystyle \sum_{k=1}^K\theta_{\cdot\cdot k}-1=0 \\
\end{array}
\right. \\ \\
&\Rightarrow&\left\{
\begin{array}{l}
\lambda_1=x_{\cdot\cdot\cdot} \\
\lambda_2=x_{\cdot\cdot\cdot} \\
\lambda_3=x_{\cdot\cdot\cdot} \\
\end{array}
\right. \\ \\
\end{eqnarray}
となる。
\(\lambda_1,\lambda_2,\lambda_3\)を利用して、\(\theta_{i\cdot\cdot},\theta_{\cdot j\cdot},\theta_{\cdot\cdot k}\)を求める。
\begin{eqnarray}
&&\left\{
\begin{array}{l}
\frac{x_{ i\cdot\cdot}}{\theta_{ i\cdot\cdot}}-\lambda_1=0\\
\frac{x_{\cdot j\cdot}}{\theta_{\cdot j\cdot}}-\lambda_2=0\\
\frac{x_{\cdot\cdot k}}{\theta_{\cdot\cdot k}}-\lambda_3=0\\
\end{array}
\right. \\ \\
&\Leftrightarrow&\left\{
\begin{array}{l}
\frac{x_{ i\cdot\cdot}}{\theta_{ i\cdot\cdot}}-x_{\cdot\cdot\cdot}=0\\
\frac{x_{\cdot j\cdot}}{\theta_{\cdot j\cdot}}-x_{\cdot\cdot\cdot}=0\\
\frac{x_{\cdot\cdot k}}{\theta_{\cdot\cdot k}}-x_{\cdot\cdot\cdot}=0\\
\end{array}
\right. \\ \\
&\Leftrightarrow&\left\{
\begin{array}{l}
\theta_{ i\cdot\cdot}=\frac{x_{ i\cdot\cdot}}{x_{\cdot\cdot\cdot}}\\
\theta_{\cdot j\cdot}=\frac{x_{\cdot j\cdot}}{x_{\cdot\cdot\cdot}}\\
\theta_{\cdot\cdot k}=\frac{x_{\cdot\cdot k}}{x_{\cdot\cdot\cdot}}\\
\end{array}
\right. \\ \\
\end{eqnarray}
ここで、\(\theta_{ijk}=\theta_{i\cdot\cdot}\theta_{\cdot j\cdot}\theta_{\cdot\cdot k}\)を仮定したので、
\begin{eqnarray}
\theta_{ijk}&=&\theta_{i\cdot\cdot}\theta_{\cdot j\cdot}\theta_{\cdot\cdot k} \\ \\
&=&
\frac{x_{ i\cdot\cdot}}{x_{\cdot\cdot\cdot}}\frac{x_{\cdot j\cdot}}{x_{\cdot\cdot\cdot}}\frac{x_{\cdot\cdot k}}{x_{\cdot\cdot\cdot}} \\ \\
&=&
\frac{x_{ i\cdot\cdot}x_{\cdot j\cdot}x_{\cdot\cdot k} }{x_{\cdot\cdot\cdot}^3} \\ \\
&=&
\frac{x_{ i\cdot\cdot}x_{\cdot j\cdot}x_{\cdot\cdot k} }{n^3} \\ \\
\end{eqnarray}
まず、二元分割表に倣って対数尤度関数\(l=\log L\)を求める。
\begin{eqnarray}
L
&=&
\frac{n!}{\displaystyle \prod_{i=1}^I\prod_{j=1}^J\prod_{k=1}^K x_{ijk}!}\displaystyle\prod_{i=1}^I\prod_{j=1}^J\prod_{k=1}^K\theta_{ijk}^{x_{ijk} } \\ \\
\Rightarrow
l
&=&
\log L \\ \\
&=&
\log \left(\frac{n!}{\displaystyle \prod_{i=1}^I\prod_{j=1}^J\prod_{k=1}^K x_{ijk}!}\displaystyle\prod_{i=1}^I\prod_{j=1}^J\prod_{k=1}^K\theta_{ijk}^{x_{ijk} }\right) \\ \\
&=&
\log \left(\frac{n!}{\displaystyle \prod_{i=1}^I\prod_{j=1}^J\prod_{k=1}^K x_{ijk}!}\right)+\displaystyle\sum_{i=1}^I\sum_{j=1}^J\sum_{k=1}^Kx_{ijk}\log \theta_{ijk} \\ \\
\end{eqnarray}
周辺独立モデルを仮定しているため、\(\theta_{ijk}=\theta_{i\cdot\cdot}\theta_{\cdot jk}\)とすると、
\begin{eqnarray}
l
&=&
\log \left(\frac{n!}{\displaystyle \prod_{i=1}^I\prod_{j=1}^J\prod_{k=1}^K x_{ijk}!}\right)+\displaystyle\sum_{i=1}^I\sum_{j=1}^J\sum_{k=1}^Kx_{ijk}\log \theta_{ijk} \\ \\
&=&
\log \left(\frac{n!}{\displaystyle \prod_{i=1}^I\prod_{j=1}^J\prod_{k=1}^K x_{ijk}!}\right)+\displaystyle\sum_{i=1}^I\sum_{j=1}^J\sum_{k=1}^Kx_{ijk}\log \theta_{i\cdot\cdot}\theta_{\cdot jk} \\ \\
&=&
\log \left(\frac{n!}{\displaystyle \prod_{i=1}^I\prod_{j=1}^J\prod_{k=1}^K x_{ijk}!}\right)+\displaystyle\sum_{i=1}^I\sum_{j=1}^J\sum_{k=1}^Kx_{ijk}(\log \theta_{i\cdot\cdot}+\log \theta_{\cdot jk}) \\ \\
&=&
\log \left(\frac{n!}{\displaystyle \prod_{i=1}^I\prod_{j=1}^J\prod_{k=1}^K x_{ijk}!}\right)+\displaystyle\sum_{i=1}^I\sum_{j=1}^J\sum_{k=1}^Kx_{ijk}\log \theta_{i\cdot\cdot}+\displaystyle\sum_{i=1}^I\sum_{j=1}^J\sum_{k=1}^Kx_{ijk}\log \theta_{\cdot jk} \\ \\
&=&
\log \left(\frac{n!}{\displaystyle \prod_{i=1}^I\prod_{j=1}^J\prod_{k=1}^K x_{ijk}!}\right)+\displaystyle\sum_{i=1}^I(\sum_{j=1}^J\sum_{k=1}^Kx_{ijk}\log \theta_{i\cdot\cdot})+\displaystyle\sum_{j=1}^J\sum_{k=1}^K(\sum_{i=1}^Ix_{ijk}\log \theta_{\cdot jk}) \\ \\
&=&
\log \left(\frac{n!}{\displaystyle \prod_{i=1}^I\prod_{j=1}^J\prod_{k=1}^K x_{ijk}!}\right)+\displaystyle\sum_{i=1}^Ix_{i\cdot\cdot}\log \theta_{i\cdot\cdot}+\displaystyle\sum_{j=1}^J\sum_{k=1}^Kx_{\cdot jk}\log \theta_{\cdot jk} \\ \\
\end{eqnarray}
ここで、ラグランジュの未定乗数法のためにラグランジュ関数を求める。\(\theta\)に関する条件は、
\begin{eqnarray}
\left\{
\begin{array}{l}
\displaystyle\sum_{i=1}^I\theta_{i\cdot\cdot}&=&1\\
\displaystyle\sum_{j=1}^J\displaystyle\sum_{k=1}^K\theta_{\cdot jk}&=&1\\
\end{array}
\right. \\ \\
\end{eqnarray}
であるから、ラグランジュ関数\(L(\theta_{i\cdot\cdot},\theta_{\cdot jk},\lambda_1,\lambda_2)\)は
\begin{eqnarray}
L(\theta_{i\cdot\cdot},\theta_{\cdot jk},\lambda_1,\lambda_2)
&=&
l-\lambda_1(\displaystyle\sum_{i=1}^I\theta_{i\cdot\cdot}-1)-\lambda_2(\displaystyle\sum_{j=1}^J\sum_{k=1}^K\theta_{\cdot jk}-1) \\ \\
&=&
\log \left(\frac{n!}{\displaystyle \prod_{i=1}^I\prod_{j=1}^J\prod_{k=1}^K x_{ijk}!}\right)+\displaystyle\sum_{i=1}^Ix_{i\cdot\cdot}\log \theta_{i\cdot\cdot}+\displaystyle\sum_{j=1}^J\sum_{k=1}^Kx_{\cdot jk}\log \theta_{\cdot jk}-\lambda_1(\displaystyle\sum_{i=1}^I\theta_{i\cdot\cdot}-1)-\lambda_2(\displaystyle\sum_{j=1}^J\sum_{k=1}^K\theta_{\cdot jk}-1) \\ \\
\end{eqnarray}
となる。ラグランジュの未定乗数法を用いて\(\lambda_1,\lambda_2\)を求める。
\begin{eqnarray}
&&\left\{
\begin{array}{l}
\frac{\partial}{\partial \theta_{ i\cdot\cdot} }L ( \theta_{i \cdot \cdot },\theta_{\cdot jk},\lambda_1,\lambda_2)=0\\
\frac{\partial}{\partial \theta_{\cdot jk} }L(\theta_{i\cdot\cdot},\theta_{\cdot jk},\lambda_1,\lambda_2)=0\\
\frac{\partial}{\partial \lambda_1 }L(\theta_{i\cdot\cdot},\theta_{\cdot jk},\lambda_1,\lambda_2)=0\\
\frac{\partial}{\partial \lambda_2 }L(\theta_{i\cdot\cdot},\theta_{\cdot jk},\lambda_1,\lambda_2)=0\\
\end{array}
\right. \\ \\
&\Leftrightarrow&\left\{
\begin{array}{l}
\frac{x_{ i\cdot\cdot} }{\theta_{i\cdot\cdot} }-\lambda_1=0\\
\frac{x_{\cdot jk} }{\theta_{\cdot jk}}-\lambda_2=0\\
\displaystyle \sum_{i=1}^I\theta_{ i\cdot\cdot}-1=0 \\
\displaystyle \sum_{j=1}^J\sum_{k=1}^K\theta_{\cdot jk}-1=0 \\
\end{array}
\right. \\ \\
&\Leftrightarrow&\left\{
\begin{array}{l}
\theta_{ i\cdot\cdot}=\frac{x_{ i\cdot\cdot}}{\lambda_1} \\
\theta_{\cdot j\cdot}=\frac{x_{\cdot jk}}{\lambda_2} \\
\displaystyle \sum_{i=1}^I\theta_{ i\cdot\cdot}-1=0 \\
\displaystyle \sum_{j=1}^J\sum_{k=1}^K\theta_{\cdot jk}-1=0 \\
\end{array}
\right. \\ \\
&\Leftrightarrow&\left\{
\begin{array}{l}
\displaystyle\sum_{i=1}^I\theta_{ i\cdot\cdot}=\displaystyle\sum_{i=1}^I\frac{x_{ i\cdot\cdot}}{\lambda_1} \\
\displaystyle\sum_{j=1}^J\sum_{k=1}^K\theta_{\cdot jk}=\displaystyle\sum_{j=1}^J\sum_{k=1}^K\frac{x_{\cdot jk}}{\lambda_2} \\
\displaystyle \sum_{i=1}^I\theta_{ i\cdot\cdot}-1=0 \\
\displaystyle \sum_{j=1}^J\sum_{k=1}^K\theta_{\cdot jk}-1=0 \\
\end{array}
\right. \\ \\
&\Leftrightarrow&\left\{
\begin{array}{l}
1=\displaystyle\sum_{i=1}^I\frac{x_{ i\cdot\cdot}}{\lambda_1} \\
1=\displaystyle\sum_{j=1}^J\sum_{k=1}^K\frac{x_{\cdot jk}}{\lambda_2} \\
\displaystyle \sum_{i=1}^I\theta_{ i\cdot\cdot}-1=0 \\
\displaystyle \sum_{j=1}^J\sum_{k=1}^K\theta_{\cdot jk}-1=0 \\
\end{array}
\right. \\ \\
&\Leftrightarrow&\left\{
\begin{array}{l}
1=\frac{x_{\cdot\cdot\cdot}}{\lambda_1} \\
1=\frac{x_{\cdot\cdot\cdot}}{\lambda_2} \\
\displaystyle \sum_{i=1}^Ix_{ i\cdot\cdot}-1=0 \\
\displaystyle \sum_{j=1}^J\sum_{k=1}^Kx_{\cdot jk}-1=0 \\
\end{array}
\right. \\ \\
&\Rightarrow&\left\{
\begin{array}{l}
\lambda_1=x_{\cdot\cdot\cdot} \\
\lambda_2=x_{\cdot\cdot\cdot} \\
\end{array}
\right. \\ \\
\end{eqnarray}
となる。
\(\lambda_1,\lambda_2\)を利用して、\(\theta_{i\cdot\cdot},\theta_{\cdot jk}\)を求める。
\begin{eqnarray}
&&\left\{
\begin{array}{l}
\frac{x_{ i\cdot\cdot}}{\theta_{ i\cdot\cdot}}-\lambda_1=0\\
\frac{x_{\cdot jk}}{\theta_{\cdot jk}}-\lambda_2=0\\
\end{array}
\right. \\ \\
&\Leftrightarrow&\left\{
\begin{array}{l}
\frac{x_{ i\cdot\cdot}}{\theta_{ i\cdot\cdot}}-x_{\cdot\cdot\cdot}=0\\
\frac{x_{\cdot jk}}{\theta_{\cdot jk}}-x_{\cdot\cdot\cdot}=0\\
\end{array}
\right. \\ \\
&\Leftrightarrow&\left\{
\begin{array}{l}
\theta_{ i\cdot\cdot}=\frac{x_{ i\cdot\cdot}}{x_{\cdot\cdot\cdot}}\\
\theta_{\cdot jk}=\frac{x_{\cdot jk}}{x_{\cdot\cdot\cdot}}\\
\end{array}
\right. \\ \\
\end{eqnarray}
ここで、\(\theta_{ijk}=\theta_{i\cdot\cdot}\theta_{\cdot jk}\)を仮定したので、
\begin{eqnarray}
\theta_{ijk}&=&\theta_{i\cdot\cdot}\theta_{\cdot jk} \\ \\
&=&
\frac{x_{ i\cdot\cdot}}{x_{\cdot\cdot\cdot}}\frac{x_{\cdot jk}}{x_{\cdot\cdot\cdot}} \\ \\
&=&
\frac{x_{ i\cdot\cdot}x_{\cdot j k} }{x_{\cdot\cdot\cdot}^2} \\ \\
&=&
\frac{x_{ i\cdot\cdot}x_{\cdot jk} }{n^2} \\ \\
\end{eqnarray}
まず、二元分割表に倣って対数尤度関数\(l=\log L\)を求める。
\begin{eqnarray}
L
&=&
\frac{n!}{\displaystyle \prod_{i=1}^I\prod_{j=1}^J\prod_{k=1}^K x_{ijk}!}\displaystyle\prod_{i=1}^I\prod_{j=1}^J\prod_{k=1}^K\theta_{ijk}^{x_{ijk} } \\ \\
\Rightarrow
l
&=&
\log L \\ \\
&=&
\log \left(\frac{n!}{\displaystyle \prod_{i=1}^I\prod_{j=1}^J\prod_{k=1}^K x_{ijk}!}\displaystyle\prod_{i=1}^I\prod_{j=1}^J\prod_{k=1}^K\theta_{ijk}^{x_{ijk} }\right) \\ \\
&=&
\log \left(\frac{n!}{\displaystyle \prod_{i=1}^I\prod_{j=1}^J\prod_{k=1}^K x_{ijk}!}\right)+\displaystyle\sum_{i=1}^I\sum_{j=1}^J\sum_{k=1}^Kx_{ijk}\log \theta_{ijk} \\ \\
\end{eqnarray}
条件付き独立モデルを仮定しているため、\(\theta_{ijk}=\frac{\theta_{i\cdot k}\theta_{\cdot jk}}{\theta_{\cdot\cdot k}}\)とすると、
\begin{eqnarray}
l
&=&
\log \left(\frac{n!}{\displaystyle \prod_{i=1}^I\prod_{j=1}^J\prod_{k=1}^K x_{ijk}!}\right)+\displaystyle\sum_{i=1}^I\sum_{j=1}^J\sum_{k=1}^Kx_{ijk}\log \theta_{ijk} \\ \\
&=&
\log \left(\frac{n!}{\displaystyle \prod_{i=1}^I\prod_{j=1}^J\prod_{k=1}^K x_{ijk}!}\right)+\displaystyle\sum_{i=1}^I\sum_{j=1}^J\sum_{k=1}^Kx_{ijk}\log \frac{\theta_{i\cdot k}\theta_{\cdot jk}}{\theta_{\cdot\cdot k}} \\ \\
&=&
\log \left(\frac{n!}{\displaystyle \prod_{i=1}^I\prod_{j=1}^J\prod_{k=1}^K x_{ijk}!}\right)+\displaystyle\sum_{i=1}^I\sum_{j=1}^J\sum_{k=1}^Kx_{ijk}(\log \theta_{i\cdot k}+\log \theta_{\cdot jk}-\log \theta_{\cdot\cdot k}) \\ \\
&=&
\log \left(\frac{n!}{\displaystyle \prod_{i=1}^I\prod_{j=1}^J\prod_{k=1}^K x_{ijk}!}\right)+\displaystyle\sum_{i=1}^I\sum_{j=1}^J\sum_{k=1}^Kx_{ijk}\log \theta_{i\cdot k}+\displaystyle\sum_{i=1}^I\sum_{j=1}^J\sum_{k=1}^Kx_{ijk}\log \theta_{\cdot jk}-\displaystyle\sum_{i=1}^I\sum_{j=1}^J\sum_{k=1}^Kx_{ijk}\log \theta_{\cdot\cdot k} \\ \\
&=&
\log \left(\frac{n!}{\displaystyle \prod_{i=1}^I\prod_{j=1}^J\prod_{k=1}^K x_{ijk}!}\right)+\displaystyle\sum_{i=1}^I\sum_{k=1}^K(\sum_{j=1}^Jx_{ijk}\log \theta_{i\cdot k})+\displaystyle\sum_{j=1}^J\sum_{k=1}^K(\sum_{i=1}^Ix_{ijk}\log \theta_{\cdot jk})-\displaystyle\sum_{k=1}^K(\sum_{i=1}^I\sum_{j=1}^Jx_{ijk}\log \theta_{\cdot\cdot k}) \\ \\
&=&
\log \left(\frac{n!}{\displaystyle \prod_{i=1}^I\prod_{j=1}^J\prod_{k=1}^K x_{ijk}!}\right)+\displaystyle\sum_{i=1}^I\sum_{k=1}^Kx_{i\cdot k}\log \theta_{i\cdot k}+\displaystyle\sum_{j=1}^J\sum_{k=1}^Kx_{\cdot jk}\log \theta_{\cdot jk}-\displaystyle\sum_{k=1}^Kx_{\cdot\cdot k}\log \theta_{\cdot\cdot k} \\ \\
\end{eqnarray}
ここで、条件付き確率の総和について、p.8を参考にし、
\begin{eqnarray}
p_X(x)=\displaystyle \sum_{y}p(x,y)
\end{eqnarray}
より、
\begin{eqnarray}
\displaystyle\sum_yp_{Y|X}(y|x)
&=&
\displaystyle \sum_{y}\frac{p(x,y)}{p_X(x)} \\ \\
&=&
\displaystyle \frac{\sum_{y}p(x,y)}{p_X(x)} \\ \\
&=&
\displaystyle \frac{p_X(x)}{p_X(x)} \\ \\
&=&
1
\end{eqnarray}
であることを利用して、\(\theta\)に関する条件は次のように導く。
\begin{eqnarray} &&\left\{ \begin{array}{l} \displaystyle\sum_{i=1}^I\frac{\theta_{i\cdot k}}{\theta_{\cdot\cdot k}}&=&1\\ \displaystyle\sum_{j=1}^J\frac{\theta_{\cdot jk}}{\theta_{\cdot\cdot k}}&=&1\\ \displaystyle\sum_{k=1}^K\theta_{\cdot\cdot k}&=&1\\ \end{array} \right. \\ \\ &\Leftrightarrow&\left\{ \begin{array}{l} \displaystyle\sum_{i=1}^I\theta_{i\cdot k}&=&\theta_{\cdot\cdot k}\\ \displaystyle\sum_{j=1}^J\theta_{\cdot jk}&=&\theta_{\cdot\cdot k}\\ \displaystyle\sum_{k=1}^K\theta_{\cdot\cdot k}&=&1\\ \end{array} \right. \\ \\ &\Rightarrow&\left\{ \begin{array}{l} \displaystyle\sum_{i=1}^I\sum_{k=1}^K\theta_{i\cdot k}&=&\displaystyle\sum_{k=1}^K\theta_{\cdot\cdot k}\\ \displaystyle\sum_{j=1}^J\sum_{k=1}^K\theta_{\cdot jk}&=&\displaystyle\sum_{k=1}^K\theta_{\cdot\cdot k}\\ \displaystyle\sum_{k=1}^K\theta_{\cdot\cdot k}&=&1\\ \end{array} \right. \\ \\ &\Leftrightarrow&\left\{ \begin{array}{l} \displaystyle\sum_{i=1}^I\sum_{k=1}^K\theta_{i\cdot k}&=&1\\ \displaystyle\sum_{j=1}^J\sum_{k=1}^K\theta_{\cdot jk}&=&1\\ \displaystyle\sum_{k=1}^K\theta_{\cdot\cdot k}&=&1\\ \end{array} \right. \\ \\ \end{eqnarray} これを利用して、ラグランジュ関数\(L(\theta_{i\cdot k},\theta_{\cdot jk},\theta_{\cdot\cdot k},\lambda_1,\lambda_2,\lambda_3)\)を求める。 \begin{eqnarray} L(\theta_{i\cdot k},\theta_{\cdot jk},\theta_{\cdot\cdot k},\lambda_1,\lambda_2,\lambda_3) &=& l-\lambda_1(\displaystyle\sum_{i=1}^I\sum_{k=1}^K\theta_{i\cdot k}-1)-\lambda_2(\displaystyle\sum_{j=1}^J\sum_{k=1}^K\theta_{\cdot jk}-1)-\lambda_3(\displaystyle\sum_{k=1}^K\theta_{\cdot\cdot k}-1) \\ \\ &=& \log \left(\frac{n!}{\displaystyle \prod_{i=1}^I\prod_{j=1}^J\prod_{k=1}^K x_{ijk}!}\right)+\displaystyle\sum_{i=1}^I\sum_{k=1}^Kx_{i\cdot k}\log \theta_{i\cdot k}+\displaystyle\sum_{j=1}^J\sum_{k=1}^Kx_{\cdot jk}\log \theta_{\cdot jk}-\displaystyle\sum_{k=1}^Kx_{\cdot\cdot k}\log \theta_{\cdot\cdot k}-\lambda_1(\displaystyle\sum_{i=1}^I\sum_{k=1}^K\theta_{i\cdot k}-1)-\lambda_2(\displaystyle\sum_{j=1}^J\sum_{k=1}^K\theta_{\cdot jk}-1)-\lambda_3(\displaystyle\sum_{k=1}^K\theta_{\cdot\cdot k}-1) \\ \\ \end{eqnarray} となる。ラグランジュの未定乗数法を用いて\(\lambda_1,\lambda_2,\lambda_3\)を求める。 \begin{eqnarray} &&\left\{ \begin{array}{l} \frac{\partial}{\partial \theta_{ i\cdot k} }L(\theta_{i\cdot k},\theta_{\cdot jk},\theta_{\cdot\cdot k},\lambda_1,\lambda_2,\lambda_3)=0\\ \frac{\partial}{\partial \theta_{\cdot jk} }L(\theta_{i\cdot k},\theta_{\cdot jk},\theta_{\cdot\cdot k},\lambda_1,\lambda_2,\lambda_3)=0\\ \frac{\partial}{\partial \theta_{\cdot\cdot k} }L(\theta_{i\cdot k},\theta_{\cdot jk},\theta_{\cdot\cdot k},\lambda_1,\lambda_2,\lambda_3)=0\\ \frac{\partial}{\partial \lambda_1 }L(\theta_{i\cdot k},\theta_{\cdot jk},\theta_{\cdot\cdot k},\lambda_1,\lambda_2,\lambda_3)=0\\ \frac{\partial}{\partial \lambda_2 }L(\theta_{i\cdot k},\theta_{\cdot jk},\theta_{\cdot\cdot k},\lambda_1,\lambda_2,\lambda_3)=0\\ \frac{\partial}{\partial \lambda_3 }L(\theta_{i\cdot k},\theta_{\cdot jk},\theta_{\cdot\cdot k},\lambda_1,\lambda_2,\lambda_3)=0\\ \end{array} \right. \\ \\ &\Leftrightarrow&\left\{ \begin{array}{l} \frac{x_{ i\cdot k} }{\theta_{i\cdot k} }-\lambda_1=0\\ \frac{x_{\cdot jk} }{\theta_{\cdot jk}}-\lambda_2=0\\ -\frac{x_{\cdot\cdot k} }{\theta_{\cdot\cdot k}}-\lambda_3=0\\ \displaystyle \sum_{i=1}^I\sum_{k=1}^K\theta_{i\cdot k}-1=0 \\ \displaystyle \sum_{j=1}^J\sum_{k=1}^K\theta_{\cdot jk}-1=0 \\ \displaystyle \sum_{k=1}^K\theta_{\cdot\cdot k}-1=0 \\ \end{array} \right. \\ \\ &\Leftrightarrow&\left\{ \begin{array}{l} \theta_{i\cdot k}=\frac{x_{i\cdot k}}{\lambda_1} \\ \theta_{\cdot jk}=\frac{x_{\cdot jk}}{\lambda_2} \\ \theta_{\cdot\cdot k}=-\frac{x_{\cdot\cdot k}}{\lambda_3} \\ \displaystyle \sum_{i=1}^I\sum_{k=1}^K\theta_{i\cdot k}-1=0 \\ \displaystyle \sum_{j=1}^J\sum_{k=1}^K\theta_{\cdot jk}-1=0 \\ \displaystyle \sum_{k=1}^K\theta_{\cdot\cdot k}-1=0 \\ \end{array} \right. \\ \\ &\Leftrightarrow&\left\{ \begin{array}{l} \displaystyle\sum_{i=1}^I\sum_{k=1}^K\theta_{i\cdot k}=\displaystyle\sum_{i=1}^I\sum_{k=1}^K\frac{x_{i\cdot k}}{\lambda_1} \\ \displaystyle\sum_{j=1}^J\sum_{k=1}^K\theta_{\cdot jk}=\displaystyle\sum_{j=1}^J\sum_{k=1}^K\frac{x_{\cdot jk}}{\lambda_2} \\ \displaystyle\sum_{i=1}^I\sum_{k=1}^K\theta_{\cdot\cdot k}=-\displaystyle\sum_{i=1}^I\sum_{k=1}^K\frac{x_{\cdot\cdot k}}{\lambda_3} \\ \displaystyle \sum_{i=1}^I\sum_{k=1}^K\theta_{i\cdot k}-1=0 \\ \displaystyle \sum_{j=1}^J\sum_{k=1}^K\theta_{\cdot jk}-1=0 \\ \displaystyle \sum_{k=1}^K\theta_{\cdot\cdot k}-1=0 \\ \end{array} \right. \\ \\ &\Leftrightarrow&\left\{ \begin{array}{l} 1=\displaystyle\sum_{i=1}^I\sum_{k=1}^K\frac{x_{i\cdot k}}{\lambda_1} \\ 1=\displaystyle\sum_{j=1}^J\sum_{k=1}^K\frac{x_{\cdot jk}}{\lambda_2} \\ 1=-\displaystyle\sum_{i=1}^I\sum_{k=1}^K\frac{x_{\cdot\cdot k}}{\lambda_3} \\ \displaystyle \sum_{i=1}^I\sum_{k=1}^K\theta_{i\cdot k}-1=0 \\ \displaystyle \sum_{j=1}^J\sum_{k=1}^K\theta_{\cdot jk}-1=0 \\ \displaystyle \sum_{k=1}^K\theta_{\cdot\cdot k}-1=0 \\ \end{array} \right. \\ \\ &\Leftrightarrow&\left\{ \begin{array}{l} 1=\frac{x_{\cdot\cdot\cdot}}{\lambda_1} \\ 1=\frac{x_{\cdot\cdot\cdot}}{\lambda_2} \\ 1=-\frac{x_{\cdot\cdot\cdot}}{\lambda_3} \\ \displaystyle \sum_{i=1}^I\sum_{k=1}^K\theta_{i\cdot k}-1=0 \\ \displaystyle \sum_{j=1}^J\sum_{k=1}^K\theta_{\cdot jk}-1=0 \\ \displaystyle \sum_{k=1}^K\theta_{\cdot\cdot k}-1=0 \\ \end{array} \right. \\ \\ &\Rightarrow&\left\{ \begin{array}{l} \lambda_1=x_{\cdot\cdot\cdot} \\ \lambda_2=x_{\cdot\cdot\cdot} \\ \lambda_3=-x_{\cdot\cdot\cdot} \\ \end{array} \right. \\ \\ \end{eqnarray} となる。 \(\lambda_1,\lambda_2,\lambda_3\)を利用して、\(\theta_{i\cdot k},\theta_{\cdot jk},\theta_{\cdot\cdot k}\)を求める。 \begin{eqnarray} &&\left\{ \begin{array}{l} \frac{x_{ i\cdot k}}{\theta_{ i\cdot k}}-\lambda_1=0\\ \frac{x_{\cdot jk}}{\theta_{\cdot jk}}-\lambda_2=0\\ -\frac{x_{\cdot\cdot k}}{\theta_{\cdot \cdot k}}-\lambda_3=0\\ \end{array} \right. \\ \\ &\Leftrightarrow&\left\{ \begin{array}{l} \frac{x_{ i\cdot k}}{\theta_{ i\cdot k}}-x_{\cdot\cdot\cdot}=0\\ \frac{x_{\cdot jk}}{\theta_{\cdot jk}}-x_{\cdot\cdot\cdot}=0\\ -\frac{x_{\cdot\cdot k}}{\theta_{\cdot \cdot k}}-(-x_{\cdot\cdot\cdot})=0\\ \end{array} \right. \\ \\ &\Leftrightarrow&\left\{ \begin{array}{l} \theta_{ i\cdot k}=\frac{x_{ i\cdot k}}{x_{\cdot\cdot\cdot}}\\ \theta_{\cdot jk}=\frac{x_{\cdot jk}}{x_{\cdot\cdot\cdot}}\\ \theta_{ \cdot\cdot k}=\frac{x_{ \cdot\cdot k}}{x_{\cdot\cdot\cdot}}\\ \end{array} \right. \\ \\ \end{eqnarray} ここで、\(\theta_{ijk}=\theta_{i\cdot k}\theta_{\cdot jk}/\theta_{\cdot\cdot k}\)を仮定したので、 \begin{eqnarray} \theta_{ijk}&=&\theta_{i\cdot k}\theta_{\cdot jk}/\theta_{\cdot\cdot k} \\ \\ &=& \frac{\frac{x_{ i\cdot k}}{x_{\cdot\cdot\cdot}}\frac{x_{\cdot jk}}{x_{\cdot\cdot\cdot}}}{\frac{x_{ \cdot\cdot k}}{x_{\cdot\cdot\cdot}}} \\ \\ &=& \frac{x_{ i\cdot k}x_{\cdot j k} }{x_{\cdot\cdot\cdot}x_{\cdot\cdot k}} \\ \\ &=& \frac{x_{ i\cdot k}x_{\cdot j k} }{nx_{\cdot\cdot k}} \\ \\ \end{eqnarray}
\begin{eqnarray} &&\left\{ \begin{array}{l} \displaystyle\sum_{i=1}^I\frac{\theta_{i\cdot k}}{\theta_{\cdot\cdot k}}&=&1\\ \displaystyle\sum_{j=1}^J\frac{\theta_{\cdot jk}}{\theta_{\cdot\cdot k}}&=&1\\ \displaystyle\sum_{k=1}^K\theta_{\cdot\cdot k}&=&1\\ \end{array} \right. \\ \\ &\Leftrightarrow&\left\{ \begin{array}{l} \displaystyle\sum_{i=1}^I\theta_{i\cdot k}&=&\theta_{\cdot\cdot k}\\ \displaystyle\sum_{j=1}^J\theta_{\cdot jk}&=&\theta_{\cdot\cdot k}\\ \displaystyle\sum_{k=1}^K\theta_{\cdot\cdot k}&=&1\\ \end{array} \right. \\ \\ &\Rightarrow&\left\{ \begin{array}{l} \displaystyle\sum_{i=1}^I\sum_{k=1}^K\theta_{i\cdot k}&=&\displaystyle\sum_{k=1}^K\theta_{\cdot\cdot k}\\ \displaystyle\sum_{j=1}^J\sum_{k=1}^K\theta_{\cdot jk}&=&\displaystyle\sum_{k=1}^K\theta_{\cdot\cdot k}\\ \displaystyle\sum_{k=1}^K\theta_{\cdot\cdot k}&=&1\\ \end{array} \right. \\ \\ &\Leftrightarrow&\left\{ \begin{array}{l} \displaystyle\sum_{i=1}^I\sum_{k=1}^K\theta_{i\cdot k}&=&1\\ \displaystyle\sum_{j=1}^J\sum_{k=1}^K\theta_{\cdot jk}&=&1\\ \displaystyle\sum_{k=1}^K\theta_{\cdot\cdot k}&=&1\\ \end{array} \right. \\ \\ \end{eqnarray} これを利用して、ラグランジュ関数\(L(\theta_{i\cdot k},\theta_{\cdot jk},\theta_{\cdot\cdot k},\lambda_1,\lambda_2,\lambda_3)\)を求める。 \begin{eqnarray} L(\theta_{i\cdot k},\theta_{\cdot jk},\theta_{\cdot\cdot k},\lambda_1,\lambda_2,\lambda_3) &=& l-\lambda_1(\displaystyle\sum_{i=1}^I\sum_{k=1}^K\theta_{i\cdot k}-1)-\lambda_2(\displaystyle\sum_{j=1}^J\sum_{k=1}^K\theta_{\cdot jk}-1)-\lambda_3(\displaystyle\sum_{k=1}^K\theta_{\cdot\cdot k}-1) \\ \\ &=& \log \left(\frac{n!}{\displaystyle \prod_{i=1}^I\prod_{j=1}^J\prod_{k=1}^K x_{ijk}!}\right)+\displaystyle\sum_{i=1}^I\sum_{k=1}^Kx_{i\cdot k}\log \theta_{i\cdot k}+\displaystyle\sum_{j=1}^J\sum_{k=1}^Kx_{\cdot jk}\log \theta_{\cdot jk}-\displaystyle\sum_{k=1}^Kx_{\cdot\cdot k}\log \theta_{\cdot\cdot k}-\lambda_1(\displaystyle\sum_{i=1}^I\sum_{k=1}^K\theta_{i\cdot k}-1)-\lambda_2(\displaystyle\sum_{j=1}^J\sum_{k=1}^K\theta_{\cdot jk}-1)-\lambda_3(\displaystyle\sum_{k=1}^K\theta_{\cdot\cdot k}-1) \\ \\ \end{eqnarray} となる。ラグランジュの未定乗数法を用いて\(\lambda_1,\lambda_2,\lambda_3\)を求める。 \begin{eqnarray} &&\left\{ \begin{array}{l} \frac{\partial}{\partial \theta_{ i\cdot k} }L(\theta_{i\cdot k},\theta_{\cdot jk},\theta_{\cdot\cdot k},\lambda_1,\lambda_2,\lambda_3)=0\\ \frac{\partial}{\partial \theta_{\cdot jk} }L(\theta_{i\cdot k},\theta_{\cdot jk},\theta_{\cdot\cdot k},\lambda_1,\lambda_2,\lambda_3)=0\\ \frac{\partial}{\partial \theta_{\cdot\cdot k} }L(\theta_{i\cdot k},\theta_{\cdot jk},\theta_{\cdot\cdot k},\lambda_1,\lambda_2,\lambda_3)=0\\ \frac{\partial}{\partial \lambda_1 }L(\theta_{i\cdot k},\theta_{\cdot jk},\theta_{\cdot\cdot k},\lambda_1,\lambda_2,\lambda_3)=0\\ \frac{\partial}{\partial \lambda_2 }L(\theta_{i\cdot k},\theta_{\cdot jk},\theta_{\cdot\cdot k},\lambda_1,\lambda_2,\lambda_3)=0\\ \frac{\partial}{\partial \lambda_3 }L(\theta_{i\cdot k},\theta_{\cdot jk},\theta_{\cdot\cdot k},\lambda_1,\lambda_2,\lambda_3)=0\\ \end{array} \right. \\ \\ &\Leftrightarrow&\left\{ \begin{array}{l} \frac{x_{ i\cdot k} }{\theta_{i\cdot k} }-\lambda_1=0\\ \frac{x_{\cdot jk} }{\theta_{\cdot jk}}-\lambda_2=0\\ -\frac{x_{\cdot\cdot k} }{\theta_{\cdot\cdot k}}-\lambda_3=0\\ \displaystyle \sum_{i=1}^I\sum_{k=1}^K\theta_{i\cdot k}-1=0 \\ \displaystyle \sum_{j=1}^J\sum_{k=1}^K\theta_{\cdot jk}-1=0 \\ \displaystyle \sum_{k=1}^K\theta_{\cdot\cdot k}-1=0 \\ \end{array} \right. \\ \\ &\Leftrightarrow&\left\{ \begin{array}{l} \theta_{i\cdot k}=\frac{x_{i\cdot k}}{\lambda_1} \\ \theta_{\cdot jk}=\frac{x_{\cdot jk}}{\lambda_2} \\ \theta_{\cdot\cdot k}=-\frac{x_{\cdot\cdot k}}{\lambda_3} \\ \displaystyle \sum_{i=1}^I\sum_{k=1}^K\theta_{i\cdot k}-1=0 \\ \displaystyle \sum_{j=1}^J\sum_{k=1}^K\theta_{\cdot jk}-1=0 \\ \displaystyle \sum_{k=1}^K\theta_{\cdot\cdot k}-1=0 \\ \end{array} \right. \\ \\ &\Leftrightarrow&\left\{ \begin{array}{l} \displaystyle\sum_{i=1}^I\sum_{k=1}^K\theta_{i\cdot k}=\displaystyle\sum_{i=1}^I\sum_{k=1}^K\frac{x_{i\cdot k}}{\lambda_1} \\ \displaystyle\sum_{j=1}^J\sum_{k=1}^K\theta_{\cdot jk}=\displaystyle\sum_{j=1}^J\sum_{k=1}^K\frac{x_{\cdot jk}}{\lambda_2} \\ \displaystyle\sum_{i=1}^I\sum_{k=1}^K\theta_{\cdot\cdot k}=-\displaystyle\sum_{i=1}^I\sum_{k=1}^K\frac{x_{\cdot\cdot k}}{\lambda_3} \\ \displaystyle \sum_{i=1}^I\sum_{k=1}^K\theta_{i\cdot k}-1=0 \\ \displaystyle \sum_{j=1}^J\sum_{k=1}^K\theta_{\cdot jk}-1=0 \\ \displaystyle \sum_{k=1}^K\theta_{\cdot\cdot k}-1=0 \\ \end{array} \right. \\ \\ &\Leftrightarrow&\left\{ \begin{array}{l} 1=\displaystyle\sum_{i=1}^I\sum_{k=1}^K\frac{x_{i\cdot k}}{\lambda_1} \\ 1=\displaystyle\sum_{j=1}^J\sum_{k=1}^K\frac{x_{\cdot jk}}{\lambda_2} \\ 1=-\displaystyle\sum_{i=1}^I\sum_{k=1}^K\frac{x_{\cdot\cdot k}}{\lambda_3} \\ \displaystyle \sum_{i=1}^I\sum_{k=1}^K\theta_{i\cdot k}-1=0 \\ \displaystyle \sum_{j=1}^J\sum_{k=1}^K\theta_{\cdot jk}-1=0 \\ \displaystyle \sum_{k=1}^K\theta_{\cdot\cdot k}-1=0 \\ \end{array} \right. \\ \\ &\Leftrightarrow&\left\{ \begin{array}{l} 1=\frac{x_{\cdot\cdot\cdot}}{\lambda_1} \\ 1=\frac{x_{\cdot\cdot\cdot}}{\lambda_2} \\ 1=-\frac{x_{\cdot\cdot\cdot}}{\lambda_3} \\ \displaystyle \sum_{i=1}^I\sum_{k=1}^K\theta_{i\cdot k}-1=0 \\ \displaystyle \sum_{j=1}^J\sum_{k=1}^K\theta_{\cdot jk}-1=0 \\ \displaystyle \sum_{k=1}^K\theta_{\cdot\cdot k}-1=0 \\ \end{array} \right. \\ \\ &\Rightarrow&\left\{ \begin{array}{l} \lambda_1=x_{\cdot\cdot\cdot} \\ \lambda_2=x_{\cdot\cdot\cdot} \\ \lambda_3=-x_{\cdot\cdot\cdot} \\ \end{array} \right. \\ \\ \end{eqnarray} となる。 \(\lambda_1,\lambda_2,\lambda_3\)を利用して、\(\theta_{i\cdot k},\theta_{\cdot jk},\theta_{\cdot\cdot k}\)を求める。 \begin{eqnarray} &&\left\{ \begin{array}{l} \frac{x_{ i\cdot k}}{\theta_{ i\cdot k}}-\lambda_1=0\\ \frac{x_{\cdot jk}}{\theta_{\cdot jk}}-\lambda_2=0\\ -\frac{x_{\cdot\cdot k}}{\theta_{\cdot \cdot k}}-\lambda_3=0\\ \end{array} \right. \\ \\ &\Leftrightarrow&\left\{ \begin{array}{l} \frac{x_{ i\cdot k}}{\theta_{ i\cdot k}}-x_{\cdot\cdot\cdot}=0\\ \frac{x_{\cdot jk}}{\theta_{\cdot jk}}-x_{\cdot\cdot\cdot}=0\\ -\frac{x_{\cdot\cdot k}}{\theta_{\cdot \cdot k}}-(-x_{\cdot\cdot\cdot})=0\\ \end{array} \right. \\ \\ &\Leftrightarrow&\left\{ \begin{array}{l} \theta_{ i\cdot k}=\frac{x_{ i\cdot k}}{x_{\cdot\cdot\cdot}}\\ \theta_{\cdot jk}=\frac{x_{\cdot jk}}{x_{\cdot\cdot\cdot}}\\ \theta_{ \cdot\cdot k}=\frac{x_{ \cdot\cdot k}}{x_{\cdot\cdot\cdot}}\\ \end{array} \right. \\ \\ \end{eqnarray} ここで、\(\theta_{ijk}=\theta_{i\cdot k}\theta_{\cdot jk}/\theta_{\cdot\cdot k}\)を仮定したので、 \begin{eqnarray} \theta_{ijk}&=&\theta_{i\cdot k}\theta_{\cdot jk}/\theta_{\cdot\cdot k} \\ \\ &=& \frac{\frac{x_{ i\cdot k}}{x_{\cdot\cdot\cdot}}\frac{x_{\cdot jk}}{x_{\cdot\cdot\cdot}}}{\frac{x_{ \cdot\cdot k}}{x_{\cdot\cdot\cdot}}} \\ \\ &=& \frac{x_{ i\cdot k}x_{\cdot j k} }{x_{\cdot\cdot\cdot}x_{\cdot\cdot k}} \\ \\ &=& \frac{x_{ i\cdot k}x_{\cdot j k} }{nx_{\cdot\cdot k}} \\ \\ \end{eqnarray}