関連性データの解析法 多次元尺度とクラスター分析法
#主座標解析
data=data.frame(prefecture=c("Hokkaido","Aomori","Iwate","Miyagi","Akita","Yamagata","Fukushima","Ibaragi","Totigi","Gumma","Saitama","Tiba","Tokyo","Kanagawa","Niigata","Toyama","Ishikawa","Fukui","Yamanashi","Nagano","Gifu","Shizuoka","Aichi","Mie","Shiga","Kyoto","Oosaka","Hyogo","Nara","Wakayama","Tottori","Shimane","Okayama","Hiroshima","Yamaguchi","Tokushima","Kagawa","Ehime","Kouchi","Hukuoka","Saga","Nagasaki","Kumamoto","Ooita","Miyazaki","Kagoshima"),x1=c(6364,7135,7266,7186,6274,6497,6740,6687,6917,6725,7928,7403,13500,8537,7315,6730,8875,7589,7249,6319,7673,7270,8149,6385,7049,10861,11548,9986,6728,7596,7324,6166,5889,8170,5147,6997,6460,6657,7979,7301,6564,8866,6928,6983,6016,7688),x2=c(90547,12634,20101,15486,12471,12446,27026,19486,19919,16611,42768,37630,147481,55648,27569,11709,13660,11161,10092,22957,20135,36377,48255,19562,9889,16925,146758,75350,16405,13007,8005,10334,22146,23029,15488,9905,7407,14795,12387,31172,8151,11117,14634,11504,11415,12546),x3=c(19.7,20,16.3,20.6,18.1,21.2,14.2,9.6,28.8,16.3,15.5,28.4,49.1,31.8,14.6,35.3,30.8,32.9,24.2,11.4,18.3,19.6,21.4,14.6,21.4,23.6,40.9,24.8,10.7,16.8,26.6,10.2,11,16.3,23.3,10.8,21.9,16.2,11.8,17.7,24.7,17.5,18.5,20.9,25.2,32.5),x4=c(640.9,128.2,113.8,171.7,97.4,134.4,166.5,225.1,187.5,212,337.3,319.4,1584.3,559.2,234.3,121.4,120,93.5,93.2,238.1,255.1,419.1,825.1,160.5,91.8,233.6,807.5,404.6,85.8,111,44.1,52.9,156.6,256.1,126.8,71.4,81.5,108.1,72.7,367,70.6,89.6,136.6,97.5,96.3,119.4),y=c(44523,10395,8669,12808,7640,7094,17556,20320,18715,16755,34601,27811,88406,48030,18213,8839,12086,8871,8961,15280,17837,37421,50998,14809,12610,35478,75497,55464,7916,14259,6244,5400,19015,36771,13827,9372,11088,9960,8516,51175,10384,10652,15987,10216,8460,12932))
X=as.matrix(data[,colnames(data) %in% c("x1","x2","x3","x4")])
n=nrow(X)
H=array(-1/n,dim=c(n,n))
diag(H)=diag(H)+1
X_star=H%*%X
P_star=X_star%*%t(X_star)
eigen(P_star)
#torgerson
mat=matrix(c(0,-2.37,-0.12,-0.62,0.23,1.56,1.09,2.02,2.23,0,0,-1.01,-1.93,-0.9,0.8,-0.47,1.05,0.78,0,0,0,0.7,-1.32,-0.67,1.07,0.7,2.62,0,0,0,0,-0.78,1.25,-1.75,0.28,-0.72,0,0,0,0,0,-1.02,-1.23,-1.65,0.49,0,0,0,0,0,0,0.57,-0.67,1.88,0,0,0,0,0,0,0,-1.18,-1.3,0,0,0,0,0,0,0,0,0.42,0,0,0,0,0,0,0,0,0),ncol=9)
mat=mat+t(mat)+3.215;S=mat;diag(S)=0
#S=matrix(c(0,2.3,3,2.65,2.6,3.1,2.7,0,2.8,2.9,2.4,3.5,3,2.8,0,2.25,3.05,3.4,2.65,2.9,2.25,0,3.2,3.25,2.6,2.4,3.05,3.2,0,3.3,3.1,3.5,3.4,3.25,3.3,0),ncol=6)
n=nrow(S)
H=array(-1/n,dim=c(n,n))
diag(H)=diag(H)+1
B=-t(H)%*%(S^2)%*%H/2
eigen_value=eigen(B)$values
eigen_vectors=eigen(B)$vectors
X=eigen_vectors
for(j in 1:ncol(X)){
X[,j]=sqrt(eigen_value[j])*X[,j]
}
phi=cumsum(eigen_value^2)/sum(eigen_value^2)
plot(X[,1:2])
#EQ法 p.92
#3.6.1 色の非類似性データの解析例
mat=matrix(c(0,-2.37,-0.12,-0.62,0.23,1.56,1.09,2.02,2.23,0,0,-1.01,-1.93,-0.9,0.8,-0.47,1.05,0.78,0,0,0,0.7,-1.32,-0.67,1.07,0.7,2.62,0,0,0,0,-0.78,1.25,-1.75,0.28,-0.72,0,0,0,0,0,-1.02,-1.23,-1.65,0.49,0,0,0,0,0,0,0.57,-0.67,1.88,0,0,0,0,0,0,0,-1.18,-1.3,0,0,0,0,0,0,0,0,0.42,0,0,0,0,0,0,0,0,0),ncol=9)
mat=mat+t(mat)
eij=-mat
#対象にする
hjk=eij+t(eij)
#行、列での合計を0にする
gjk=hjk;diag(gjk)=diag(gjk)-apply(hjk,1,sum)
lam=eigen(gjk)$values
prop=(lam-min(lam))/(max(lam)-min(lam))
vectors=eigen(gjk)$vectors
plot(-vectors[,1],-vectors[,2])
#p.75
mat=matrix(c(0,1,1,0,0,1,1,0,1,0,0,1,1,0,0,1,1,1,0,1,1,0,1,1,1,1,1,0,1,0,1,1,0,1,0,1,0,1,0,0,1,0,1,1,1,0,1,1),ncol=6)
m=nrow(mat)
Pa=(t(mat)%*%mat)/m;diag(Pa)=1
eigen_vectors=eigen(Pa)$vectors
eigen_values=eigen(Pa)$values
x=eigen_vectors
for(j in 1:length(eigen_values)){
x[,j]=x[,j]*sqrt(eigen_values[j])
}
plot(x[,1],x[,2])
Author And Source
この問題について(関連性データの解析法 多次元尺度とクラスター分析法), 我々は、より多くの情報をここで見つけました https://qiita.com/kozakai-ryouta/items/cadce65bc5ceab9953bf著者帰属:元の著者の情報は、元のURLに含まれています。著作権は原作者に属する。
Content is automatically searched and collected through network algorithms . If there is a violation . Please contact us . We will adjust (correct author information ,or delete content ) as soon as possible .