R言語で情報エントロピーを計算する
test.entropy print(d) res for(i in 1:length(d)) { if(d[i]!=0) res } return (-res) }
d1=c(0.25,0.25,0.25,0.25) d2=c(0.5,0.5,0,0) d3=c(0.8,0.2,0,0)
d4=c(1,0,0,0)
print(test.entropy(d=d1)) print(test.entropy(d=d2)) print(test.entropy(d=d3)) print(test.entropy(d=d4))
結果:
d1=c(0.25,0.25,0.25,0.25) d2=c(0.5,0.5,0,0) d3=c(0.8,0.2,0,0)
d4=c(1,0,0,0)
print(test.entropy(d=d1)) print(test.entropy(d=d2)) print(test.entropy(d=d3)) print(test.entropy(d=d4))
結果:
print(test.entropy(d=d1))
[1] 0.25 0.25 0.25 0.25
[1] 1.386294
>
>
> print(test.entropy(d=d2))
[1] 0.5 0.5 0.0 0.0
[1] 0.6931472
>
> print(test.entropy(d=d3))
[1] 0.8 0.2 0.0 0.0
[1] 0.5004024
>
> print(test.entropy(d=d4))
[1] 1 0 0 0
[1] 0