1 2 3 4 5 6 |
# モデルの作成と学習 model = Sequential()#その他学習モデル # 予測データの作成 trainPredict = model.predict(trainX) testPredict = model.predict(testX) |
1 2 3 4 5 6 |
# モデルの作成と学習 model = Sequential()#その他学習モデル # 予測データの作成 trainPredict = model.predict(trainX) testPredict = model.predict(testX) |
1 2 3 4 5 6 7 |
# LSTMモデルの作成と学習 model = Sequential() model.add(LSTM(64, return_sequences=True,input_shape=(look_back, 1))) model.add(LSTM(32)) model.add(Dense(1)) model.compile(loss='mean_squared_error', optimizer='adam') model.fit(trainX, trainY, epochs=10, batch_size=1, verbose=2) |
それぞれのYにlook_backの値を代入
1 2 3 4 5 |
# データの作成 look_back = 10 trainX, trainY = create_dataset(train, look_back) testX, testY = create_dataset(test, look_back) |
ユーザー定義関数を作成して実行結果を受け取る
1 2 3 4 5 6 7 8 |
# データセットの作成 def create_dataset(dataset, look_back): dataX, dataY = [], [] for i in range(len(dataset)-look_back-1): a = dataset[i:(i+look_back), 0] dataX.append(a) dataY.append(dataset[i + look_back, 0]) return numpy.array(dataX), numpy.array(dataY) |
実行前
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 |
[[ 0.02588999] [ 0.04530746] [ 0.09061491] [ 0.08090615] [ 0.05501619] [ 0.10032365] [ 0.14239484] [ 0.14239484] [ 0.10355988] [ 0.04854369] [ 0. ] [ 0.04530746] [ 0.03559873] [ 0.07119742] [ 0.11974111] [ 0.10032365] [ 0.06796119] [ 0.14563107] [ 0.21359226] [ 0.21359226] [ 0.17475727] [ 0.09385115] [ 0.03236246] [ 0.11650488] [ 0.13268611] [ 0.14886734] [ 0.23948219] [ 0.1909385 ] [ 0.22006473] [ 0.23948219] [ 0.30744341] [ 0.30744341] [ 0.25889972] [ 0.18770227] [ 0.13592234] [ 0.20064726] [ 0.2168285 ] [ 0.24595472] [ 0.28802589] [ 0.24919096] [ 0.25566342] [ 0.36893204] [ 0.40776703] [ 0.44660196] [ 0.33980587] [ 0.28155342] [ 0.22006473] [ 0.29126218] [ 0.29773465] [ 0.29773465] [ 0.42718449] [ 0.42394826] [ 0.40453079] [ 0.44983819] [ 0.51779938] [ 0.54368937] [ 0.43042073] [ 0.34627834] [ 0.24595472] [ 0.31391588] [ 0.32362464] [ 0.27184466] [ 0.42394826] [ 0.39805827] [ 0.42071202] [ 0.51779938] [ 0.64077675] [ 0.61165047] [ 0.50161815] [ 0.40453079] [ 0.32038835] [ 0.40453079] [ 0.44660196] [ 0.41747573] [ 0.52750814] [ 0.53398061] [ 0.5372169 ] [ 0.68284798] [ 0.84142399] [ 0.78640783] [ 0.67313921] [ 0.55016184] [ 0.43042073] [ 0.56310678] [ 0.5825243 ] [ 0.5598706 ] [ 0.68932045] [ 0.67637551] [ 0.69255674] [ 0.87378645] [ 1.00000012] [ 0.97411013] [ 0.81229782] [ 0.65372169] [ 0.54045308]] |
実行後
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 |
[[ 0.02588999 0.04530746 0.09061491 0.08090615 0.05501619 0.10032365 0.14239484 0.14239484 0.10355988 0.04854369] [ 0.04530746 0.09061491 0.08090615 0.05501619 0.10032365 0.14239484 0.14239484 0.10355988 0.04854369 0. ] [ 0.09061491 0.08090615 0.05501619 0.10032365 0.14239484 0.14239484 0.10355988 0.04854369 0. 0.04530746] [ 0.08090615 0.05501619 0.10032365 0.14239484 0.14239484 0.10355988 0.04854369 0. 0.04530746 0.03559873] [ 0.05501619 0.10032365 0.14239484 0.14239484 0.10355988 0.04854369 0. 0.04530746 0.03559873 0.07119742] [ 0.10032365 0.14239484 0.14239484 0.10355988 0.04854369 0. 0.04530746 0.03559873 0.07119742 0.11974111] [ 0.14239484 0.14239484 0.10355988 0.04854369 0. 0.04530746 0.03559873 0.07119742 0.11974111 0.10032365] [ 0.14239484 0.10355988 0.04854369 0. 0.04530746 0.03559873 0.07119742 0.11974111 0.10032365 0.06796119] [ 0.10355988 0.04854369 0. 0.04530746 0.03559873 0.07119742 0.11974111 0.10032365 0.06796119 0.14563107] [ 0.04854369 0. 0.04530746 0.03559873 0.07119742 0.11974111 0.10032365 0.06796119 0.14563107 0.21359226] [ 0. 0.04530746 0.03559873 0.07119742 0.11974111 0.10032365 0.06796119 0.14563107 0.21359226 0.21359226] [ 0.04530746 0.03559873 0.07119742 0.11974111 0.10032365 0.06796119 0.14563107 0.21359226 0.21359226 0.17475727] [ 0.03559873 0.07119742 0.11974111 0.10032365 0.06796119 0.14563107 0.21359226 0.21359226 0.17475727 0.09385115] [ 0.07119742 0.11974111 0.10032365 0.06796119 0.14563107 0.21359226 0.21359226 0.17475727 0.09385115 0.03236246] [ 0.11974111 0.10032365 0.06796119 0.14563107 0.21359226 0.21359226 0.17475727 0.09385115 0.03236246 0.11650488] [ 0.10032365 0.06796119 0.14563107 0.21359226 0.21359226 0.17475727 0.09385115 0.03236246 0.11650488 0.13268611] [ 0.06796119 0.14563107 0.21359226 0.21359226 0.17475727 0.09385115 0.03236246 0.11650488 0.13268611 0.14886734] [ 0.14563107 0.21359226 0.21359226 0.17475727 0.09385115 0.03236246 0.11650488 0.13268611 0.14886734 0.23948219] [ 0.21359226 0.21359226 0.17475727 0.09385115 0.03236246 0.11650488 0.13268611 0.14886734 0.23948219 0.1909385 ] [ 0.21359226 0.17475727 0.09385115 0.03236246 0.11650488 0.13268611 0.14886734 0.23948219 0.1909385 0.22006473] [ 0.17475727 0.09385115 0.03236246 0.11650488 0.13268611 0.14886734 0.23948219 0.1909385 0.22006473 0.23948219] [ 0.09385115 0.03236246 0.11650488 0.13268611 0.14886734 0.23948219 0.1909385 0.22006473 0.23948219 0.30744341] [ 0.03236246 0.11650488 0.13268611 0.14886734 0.23948219 0.1909385 0.22006473 0.23948219 0.30744341 0.30744341] [ 0.11650488 0.13268611 0.14886734 0.23948219 0.1909385 0.22006473 0.23948219 0.30744341 0.30744341 0.25889972] [ 0.13268611 0.14886734 0.23948219 0.1909385 0.22006473 0.23948219 0.30744341 0.30744341 0.25889972 0.18770227] [ 0.14886734 0.23948219 0.1909385 0.22006473 0.23948219 0.30744341 0.30744341 0.25889972 0.18770227 0.13592234] [ 0.23948219 0.1909385 0.22006473 0.23948219 0.30744341 0.30744341 0.25889972 0.18770227 0.13592234 0.20064726] [ 0.1909385 0.22006473 0.23948219 0.30744341 0.30744341 0.25889972 0.18770227 0.13592234 0.20064726 0.2168285 ] [ 0.22006473 0.23948219 0.30744341 0.30744341 0.25889972 0.18770227 0.13592234 0.20064726 0.2168285 0.24595472] [ 0.23948219 0.30744341 0.30744341 0.25889972 0.18770227 0.13592234 0.20064726 0.2168285 0.24595472 0.28802589] [ 0.30744341 0.30744341 0.25889972 0.18770227 0.13592234 0.20064726 0.2168285 0.24595472 0.28802589 0.24919096] [ 0.30744341 0.25889972 0.18770227 0.13592234 0.20064726 0.2168285 0.24595472 0.28802589 0.24919096 0.25566342] [ 0.25889972 0.18770227 0.13592234 0.20064726 0.2168285 0.24595472 0.28802589 0.24919096 0.25566342 0.36893204] [ 0.18770227 0.13592234 0.20064726 0.2168285 0.24595472 0.28802589 0.24919096 0.25566342 0.36893204 0.40776703] [ 0.13592234 0.20064726 0.2168285 0.24595472 0.28802589 0.24919096 0.25566342 0.36893204 0.40776703 0.44660196] [ 0.20064726 0.2168285 0.24595472 0.28802589 0.24919096 0.25566342 0.36893204 0.40776703 0.44660196 0.33980587] [ 0.2168285 0.24595472 0.28802589 0.24919096 0.25566342 0.36893204 0.40776703 0.44660196 0.33980587 0.28155342] [ 0.24595472 0.28802589 0.24919096 0.25566342 0.36893204 0.40776703 0.44660196 0.33980587 0.28155342 0.22006473] [ 0.28802589 0.24919096 0.25566342 0.36893204 0.40776703 0.44660196 0.33980587 0.28155342 0.22006473 0.29126218] [ 0.24919096 0.25566342 0.36893204 0.40776703 0.44660196 0.33980587 0.28155342 0.22006473 0.29126218 0.29773465] [ 0.25566342 0.36893204 0.40776703 0.44660196 0.33980587 0.28155342 0.22006473 0.29126218 0.29773465 0.29773465] [ 0.36893204 0.40776703 0.44660196 0.33980587 0.28155342 0.22006473 0.29126218 0.29773465 0.29773465 0.42718449] [ 0.40776703 0.44660196 0.33980587 0.28155342 0.22006473 0.29126218 0.29773465 0.29773465 0.42718449 0.42394826] [ 0.44660196 0.33980587 0.28155342 0.22006473 0.29126218 0.29773465 0.29773465 0.42718449 0.42394826 0.40453079] [ 0.33980587 0.28155342 0.22006473 0.29126218 0.29773465 0.29773465 0.42718449 0.42394826 0.40453079 0.44983819] [ 0.28155342 0.22006473 0.29126218 0.29773465 0.29773465 0.42718449 0.42394826 0.40453079 0.44983819 0.51779938] [ 0.22006473 0.29126218 0.29773465 0.29773465 0.42718449 0.42394826 0.40453079 0.44983819 0.51779938 0.54368937] [ 0.29126218 0.29773465 0.29773465 0.42718449 0.42394826 0.40453079 0.44983819 0.51779938 0.54368937 0.43042073] [ 0.29773465 0.29773465 0.42718449 0.42394826 0.40453079 0.44983819 0.51779938 0.54368937 0.43042073 0.34627834] [ 0.29773465 0.42718449 0.42394826 0.40453079 0.44983819 0.51779938 0.54368937 0.43042073 0.34627834 0.24595472] [ 0.42718449 0.42394826 0.40453079 0.44983819 0.51779938 0.54368937 0.43042073 0.34627834 0.24595472 0.31391588] [ 0.42394826 0.40453079 0.44983819 0.51779938 0.54368937 0.43042073 0.34627834 0.24595472 0.31391588 0.32362464] [ 0.40453079 0.44983819 0.51779938 0.54368937 0.43042073 0.34627834 0.24595472 0.31391588 0.32362464 0.27184466] [ 0.44983819 0.51779938 0.54368937 0.43042073 0.34627834 0.24595472 0.31391588 0.32362464 0.27184466 0.42394826] [ 0.51779938 0.54368937 0.43042073 0.34627834 0.24595472 0.31391588 0.32362464 0.27184466 0.42394826 0.39805827] [ 0.54368937 0.43042073 0.34627834 0.24595472 0.31391588 0.32362464 0.27184466 0.42394826 0.39805827 0.42071202] [ 0.43042073 0.34627834 0.24595472 0.31391588 0.32362464 0.27184466 0.42394826 0.39805827 0.42071202 0.51779938] [ 0.34627834 0.24595472 0.31391588 0.32362464 0.27184466 0.42394826 0.39805827 0.42071202 0.51779938 0.64077675] [ 0.24595472 0.31391588 0.32362464 0.27184466 0.42394826 0.39805827 0.42071202 0.51779938 0.64077675 0.61165047] [ 0.31391588 0.32362464 0.27184466 0.42394826 0.39805827 0.42071202 0.51779938 0.64077675 0.61165047 0.50161815] [ 0.32362464 0.27184466 0.42394826 0.39805827 0.42071202 0.51779938 0.64077675 0.61165047 0.50161815 0.40453079] [ 0.27184466 0.42394826 0.39805827 0.42071202 0.51779938 0.64077675 0.61165047 0.50161815 0.40453079 0.32038835] [ 0.42394826 0.39805827 0.42071202 0.51779938 0.64077675 0.61165047 0.50161815 0.40453079 0.32038835 0.40453079] [ 0.39805827 0.42071202 0.51779938 0.64077675 0.61165047 0.50161815 0.40453079 0.32038835 0.40453079 0.44660196] [ 0.42071202 0.51779938 0.64077675 0.61165047 0.50161815 0.40453079 0.32038835 0.40453079 0.44660196 0.41747573] [ 0.51779938 0.64077675 0.61165047 0.50161815 0.40453079 0.32038835 0.40453079 0.44660196 0.41747573 0.52750814] [ 0.64077675 0.61165047 0.50161815 0.40453079 0.32038835 0.40453079 0.44660196 0.41747573 0.52750814 0.53398061] [ 0.61165047 0.50161815 0.40453079 0.32038835 0.40453079 0.44660196 0.41747573 0.52750814 0.53398061 0.5372169 ] [ 0.50161815 0.40453079 0.32038835 0.40453079 0.44660196 0.41747573 0.52750814 0.53398061 0.5372169 0.68284798] [ 0.40453079 0.32038835 0.40453079 0.44660196 0.41747573 0.52750814 0.53398061 0.5372169 0.68284798 0.84142399] [ 0.32038835 0.40453079 0.44660196 0.41747573 0.52750814 0.53398061 0.5372169 0.68284798 0.84142399 0.78640783] [ 0.40453079 0.44660196 0.41747573 0.52750814 0.53398061 0.5372169 0.68284798 0.84142399 0.78640783 0.67313921] [ 0.44660196 0.41747573 0.52750814 0.53398061 0.5372169 0.68284798 0.84142399 0.78640783 0.67313921 0.55016184] [ 0.41747573 0.52750814 0.53398061 0.5372169 0.68284798 0.84142399 0.78640783 0.67313921 0.55016184 0.43042073] [ 0.52750814 0.53398061 0.5372169 0.68284798 0.84142399 0.78640783 0.67313921 0.55016184 0.43042073 0.56310678] [ 0.53398061 0.5372169 0.68284798 0.84142399 0.78640783 0.67313921 0.55016184 0.43042073 0.56310678 0.5825243 ] [ 0.5372169 0.68284798 0.84142399 0.78640783 0.67313921 0.55016184 0.43042073 0.56310678 0.5825243 0.5598706 ] [ 0.68284798 0.84142399 0.78640783 0.67313921 0.55016184 0.43042073 0.56310678 0.5825243 0.5598706 0.68932045] [ 0.84142399 0.78640783 0.67313921 0.55016184 0.43042073 0.56310678 0.5825243 0.5598706 0.68932045 0.67637551] [ 0.78640783 0.67313921 0.55016184 0.43042073 0.56310678 0.5825243 0.5598706 0.68932045 0.67637551 0.69255674] [ 0.67313921 0.55016184 0.43042073 0.56310678 0.5825243 0.5598706 0.68932045 0.67637551 0.69255674 0.87378645] [ 0.55016184 0.43042073 0.56310678 0.5825243 0.5598706 0.68932045 0.67637551 0.69255674 0.87378645 1.00000012] [ 0.43042073 0.56310678 0.5825243 0.5598706 0.68932045 0.67637551 0.69255674 0.87378645 1.00000012 0.97411013] [ 0.56310678 0.5825243 0.5598706 0.68932045 0.67637551 0.69255674 0.87378645 1.00000012 0.97411013 0.81229782]] |
動くグラフ
I simulated and animated 500 instances of the Birthday Paradox. The result is almost identical to the analytical formula [OC] from dataisbeautiful
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 |
from matplotlib import pyplot as plt import numpy as np import time from IPython.display import display, clear_output %matplotlib inline fig = plt.figure(figsize=(16,10)) axe = fig.add_subplot(111) # 繰り返す回数を決める num = 5 for i in range(num): A = np.random.randn(1000) axe.hist(A,bins=50) axe.set_xlim([-4,4]) axe.set_ylim([0,70]) display(fig) clear_output(wait = True) # 最後の出力は消さないので、if文で最後だけ消さないようにする。 if i != num-1: axe.cla() |
from IPython.display import display, clear_output
display(fig) 画像を出力します。
clear_output(wait = True) 出力結果を消します。
axeに入れられたグラフデータを削除します。
因数分解
1 2 3 4 5 6 |
import sympy x = sympy.Symbol('x') eqn = x**2 - 3*x + 2 print(sympy.factor(eqn)) # (x - 2)*(x - 1) |
素因数分解
1 2 3 |
import sympy sympy.factorint(1000) #{2: 3, 5: 3} |
1 2 3 4 5 6 7 8 9 10 11 |
# プロットのためのデータ整形 trainPredictPlot = numpy.empty_like(dataset) trainPredictPlot[:, :] = numpy.nan trainPredictPlot[look_back:len(trainPredict)+look_back, :] = trainPredict testPredictPlot = numpy.empty_like(dataset) testPredictPlot[:, :] = numpy.nan testPredictPlot[len(trainPredict)+(look_back*2)+1:len(dataset)-1, :] = testPredict # テストデータのプロット plt.plot(dataframe[round(len(dataset)*0.67):]) plt.plot(testPredictPlot) plt.show() |