• Skip to main content
  • Skip to primary sidebar

学習記録

プログラミング

LSTM時系列解析 全体像

2017年12月31日 by 河副 太智 Leave a Comment

LSTM時系列解析

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
import numpy
import matplotlib.pyplot as plt
from pandas import read_csv
import math
from keras.models import Sequential
from keras.layers import Dense
from keras.layers import LSTM
from sklearn.preprocessing import MinMaxScaler
from sklearn.metrics import mean_squared_error
 
#以下にコードを書いてください
# データセットの作成
def create_dataset(dataset, look_back):
dataX, dataY = [], []
for i in range(len(dataset)-look_back-1):
a = dataset[i:(i+look_back), 0]
dataX.append(a)
dataY.append(dataset[i + look_back, 0])
return numpy.array(dataX), numpy.array(dataY)
# 乱数設定
numpy.random.seed(7)
# データセットの読み込み 
dataframe = read_csv('nikkei225.csv', usecols=[1], engine='python', skipfooter=3)
dataset = dataframe.values
dataset = dataset.astype('float32')
# 訓練データ、テストデータに分ける
train_size = int(len(dataset) * 0.67)
test_size = len(dataset) - train_size
train, test = dataset[0:train_size,:], dataset[train_size:len(dataset),:]
 
# データのスケーリング
scaler = MinMaxScaler(feature_range=(0, 1))
scaler_train = scaler.fit(train)
train = scaler_train.transform(train)
test = scaler_train.transform(test)
 
# データの作成
look_back = 10
trainX, trainY = create_dataset(train, look_back)
testX, testY = create_dataset(test, look_back)
# データの整形
trainX = numpy.reshape(trainX, (trainX.shape[0], trainX.shape[1], 1))
testX = numpy.reshape(testX, (testX.shape[0], testX.shape[1], 1))
# LSTMモデルの作成と学習
model = Sequential()
model.add(LSTM(64, return_sequences=True,input_shape=(look_back, 1)))
model.add(LSTM(32))
model.add(Dense(1))
model.compile(loss='mean_squared_error', optimizer='adam')
model.fit(trainX, trainY, epochs=10, batch_size=1, verbose=2)
 
# 予測データの作成
trainPredict = model.predict(trainX)
testPredict = model.predict(testX)
 
# スケールしたデータを元に戻す
trainPredict = scaler_train.inverse_transform(trainPredict)
trainY = scaler_train.inverse_transform([trainY])
testPredict = scaler_train.inverse_transform(testPredict)
testY = scaler_train.inverse_transform([testY])
 
# 予測精度の計算
trainScore = math.sqrt(mean_squared_error(trainY[0], trainPredict[:,0]))
print('Train Score: %.2f RMSE' % (trainScore))
testScore = math.sqrt(mean_squared_error(testY[0], testPredict[:,0]))
print('Test Score: %.2f RMSE' % (testScore))
 
# プロットのためのデータ整形
trainPredictPlot = numpy.empty_like(dataset)
trainPredictPlot[:, :] = numpy.nan
trainPredictPlot[look_back:len(trainPredict)+look_back, :] = trainPredict
testPredictPlot = numpy.empty_like(dataset)
testPredictPlot[:, :] = numpy.nan
testPredictPlot[len(trainPredict)+(look_back*2)+1:len(dataset)-1, :] = testPredict
# テストデータのプロット
plt.plot(dataframe[round(len(dataset)*0.67):])
plt.plot(testPredictPlot)
plt.show()

結果

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
Epoch 1/10
44s - loss: 0.0040
Epoch 2/10
44s - loss: 0.0013
Epoch 3/10
43s - loss: 0.0011
Epoch 4/10
44s - loss: 7.8079e-04
Epoch 5/10
44s - loss: 5.9064e-04
Epoch 6/10
44s - loss: 5.5586e-04
Epoch 7/10
43s - loss: 5.2437e-04
Epoch 8/10
43s - loss: 5.4960e-04
Epoch 9/10
43s - loss: 5.3203e-04
Epoch 10/10
44s - loss: 4.9286e-04
Train Score: 270.96 RMSE
Test Score: 144.13 RMSE

 

 

Filed Under: 作成実績, 教師有り, 機械学習

予想データ predict

2017年12月31日 by 河副 太智 Leave a Comment

 

1
2
3
4
5
6
# モデルの作成と学習
model = Sequential()#その他学習モデル
 
# 予測データの作成
trainPredict = model.predict(trainX)
testPredict = model.predict(testX)

 

Filed Under: 教師有り, 機械学習

LSTMモデルの作成と学習

2017年12月29日 by 河副 太智 Leave a Comment

 

1
2
3
4
5
6
7
# LSTMモデルの作成と学習
model = Sequential()
model.add(LSTM(64, return_sequences=True,input_shape=(look_back, 1)))
model.add(LSTM(32))
model.add(Dense(1))
model.compile(loss='mean_squared_error', optimizer='adam')
model.fit(trainX, trainY, epochs=10, batch_size=1, verbose=2)

 

Filed Under: 教師有り, 機械学習

look_backで過去に遡る

2017年12月29日 by 河副 太智 Leave a Comment

それぞれのYにlook_backの値を代入

1
2
3
4
5
# データの作成
look_back = 10
 
trainX, trainY = create_dataset(train, look_back)
testX, testY = create_dataset(test, look_back)

ユーザー定義関数を作成して実行結果を受け取る

1
2
3
4
5
6
7
8
# データセットの作成
def create_dataset(dataset, look_back):
dataX, dataY = [], []
for i in range(len(dataset)-look_back-1):
a = dataset[i:(i+look_back), 0]
dataX.append(a)
dataY.append(dataset[i + look_back, 0])
return numpy.array(dataX), numpy.array(dataY)

 

実行前

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
[[ 0.02588999]
[ 0.04530746]
[ 0.09061491]
[ 0.08090615]
[ 0.05501619]
[ 0.10032365]
[ 0.14239484]
[ 0.14239484]
[ 0.10355988]
[ 0.04854369]
[ 0.        ]
[ 0.04530746]
[ 0.03559873]
[ 0.07119742]
[ 0.11974111]
[ 0.10032365]
[ 0.06796119]
[ 0.14563107]
[ 0.21359226]
[ 0.21359226]
[ 0.17475727]
[ 0.09385115]
[ 0.03236246]
[ 0.11650488]
[ 0.13268611]
[ 0.14886734]
[ 0.23948219]
[ 0.1909385 ]
[ 0.22006473]
[ 0.23948219]
[ 0.30744341]
[ 0.30744341]
[ 0.25889972]
[ 0.18770227]
[ 0.13592234]
[ 0.20064726]
[ 0.2168285 ]
[ 0.24595472]
[ 0.28802589]
[ 0.24919096]
[ 0.25566342]
[ 0.36893204]
[ 0.40776703]
[ 0.44660196]
[ 0.33980587]
[ 0.28155342]
[ 0.22006473]
[ 0.29126218]
[ 0.29773465]
[ 0.29773465]
[ 0.42718449]
[ 0.42394826]
[ 0.40453079]
[ 0.44983819]
[ 0.51779938]
[ 0.54368937]
[ 0.43042073]
[ 0.34627834]
[ 0.24595472]
[ 0.31391588]
[ 0.32362464]
[ 0.27184466]
[ 0.42394826]
[ 0.39805827]
[ 0.42071202]
[ 0.51779938]
[ 0.64077675]
[ 0.61165047]
[ 0.50161815]
[ 0.40453079]
[ 0.32038835]
[ 0.40453079]
[ 0.44660196]
[ 0.41747573]
[ 0.52750814]
[ 0.53398061]
[ 0.5372169 ]
[ 0.68284798]
[ 0.84142399]
[ 0.78640783]
[ 0.67313921]
[ 0.55016184]
[ 0.43042073]
[ 0.56310678]
[ 0.5825243 ]
[ 0.5598706 ]
[ 0.68932045]
[ 0.67637551]
[ 0.69255674]
[ 0.87378645]
[ 1.00000012]
[ 0.97411013]
[ 0.81229782]
[ 0.65372169]
[ 0.54045308]]

 

実行後

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
[[ 0.02588999  0.04530746  0.09061491  0.08090615  0.05501619  0.10032365
   0.14239484  0.14239484  0.10355988  0.04854369]
[ 0.04530746  0.09061491  0.08090615  0.05501619  0.10032365  0.14239484
   0.14239484  0.10355988  0.04854369  0.        ]
[ 0.09061491  0.08090615  0.05501619  0.10032365  0.14239484  0.14239484
   0.10355988  0.04854369  0.          0.04530746]
[ 0.08090615  0.05501619  0.10032365  0.14239484  0.14239484  0.10355988
   0.04854369  0.          0.04530746  0.03559873]
[ 0.05501619  0.10032365  0.14239484  0.14239484  0.10355988  0.04854369
   0.          0.04530746  0.03559873  0.07119742]
[ 0.10032365  0.14239484  0.14239484  0.10355988  0.04854369  0.
   0.04530746  0.03559873  0.07119742  0.11974111]
[ 0.14239484  0.14239484  0.10355988  0.04854369  0.          0.04530746
   0.03559873  0.07119742  0.11974111  0.10032365]
[ 0.14239484  0.10355988  0.04854369  0.          0.04530746  0.03559873
   0.07119742  0.11974111  0.10032365  0.06796119]
[ 0.10355988  0.04854369  0.          0.04530746  0.03559873  0.07119742
   0.11974111  0.10032365  0.06796119  0.14563107]
[ 0.04854369  0.          0.04530746  0.03559873  0.07119742  0.11974111
   0.10032365  0.06796119  0.14563107  0.21359226]
[ 0.          0.04530746  0.03559873  0.07119742  0.11974111  0.10032365
   0.06796119  0.14563107  0.21359226  0.21359226]
[ 0.04530746  0.03559873  0.07119742  0.11974111  0.10032365  0.06796119
   0.14563107  0.21359226  0.21359226  0.17475727]
[ 0.03559873  0.07119742  0.11974111  0.10032365  0.06796119  0.14563107
   0.21359226  0.21359226  0.17475727  0.09385115]
[ 0.07119742  0.11974111  0.10032365  0.06796119  0.14563107  0.21359226
   0.21359226  0.17475727  0.09385115  0.03236246]
[ 0.11974111  0.10032365  0.06796119  0.14563107  0.21359226  0.21359226
   0.17475727  0.09385115  0.03236246  0.11650488]
[ 0.10032365  0.06796119  0.14563107  0.21359226  0.21359226  0.17475727
   0.09385115  0.03236246  0.11650488  0.13268611]
[ 0.06796119  0.14563107  0.21359226  0.21359226  0.17475727  0.09385115
   0.03236246  0.11650488  0.13268611  0.14886734]
[ 0.14563107  0.21359226  0.21359226  0.17475727  0.09385115  0.03236246
   0.11650488  0.13268611  0.14886734  0.23948219]
[ 0.21359226  0.21359226  0.17475727  0.09385115  0.03236246  0.11650488
   0.13268611  0.14886734  0.23948219  0.1909385 ]
[ 0.21359226  0.17475727  0.09385115  0.03236246  0.11650488  0.13268611
   0.14886734  0.23948219  0.1909385   0.22006473]
[ 0.17475727  0.09385115  0.03236246  0.11650488  0.13268611  0.14886734
   0.23948219  0.1909385   0.22006473  0.23948219]
[ 0.09385115  0.03236246  0.11650488  0.13268611  0.14886734  0.23948219
   0.1909385   0.22006473  0.23948219  0.30744341]
[ 0.03236246  0.11650488  0.13268611  0.14886734  0.23948219  0.1909385
   0.22006473  0.23948219  0.30744341  0.30744341]
[ 0.11650488  0.13268611  0.14886734  0.23948219  0.1909385   0.22006473
   0.23948219  0.30744341  0.30744341  0.25889972]
[ 0.13268611  0.14886734  0.23948219  0.1909385   0.22006473  0.23948219
   0.30744341  0.30744341  0.25889972  0.18770227]
[ 0.14886734  0.23948219  0.1909385   0.22006473  0.23948219  0.30744341
   0.30744341  0.25889972  0.18770227  0.13592234]
[ 0.23948219  0.1909385   0.22006473  0.23948219  0.30744341  0.30744341
   0.25889972  0.18770227  0.13592234  0.20064726]
[ 0.1909385   0.22006473  0.23948219  0.30744341  0.30744341  0.25889972
   0.18770227  0.13592234  0.20064726  0.2168285 ]
[ 0.22006473  0.23948219  0.30744341  0.30744341  0.25889972  0.18770227
   0.13592234  0.20064726  0.2168285   0.24595472]
[ 0.23948219  0.30744341  0.30744341  0.25889972  0.18770227  0.13592234
   0.20064726  0.2168285   0.24595472  0.28802589]
[ 0.30744341  0.30744341  0.25889972  0.18770227  0.13592234  0.20064726
   0.2168285   0.24595472  0.28802589  0.24919096]
[ 0.30744341  0.25889972  0.18770227  0.13592234  0.20064726  0.2168285
   0.24595472  0.28802589  0.24919096  0.25566342]
[ 0.25889972  0.18770227  0.13592234  0.20064726  0.2168285   0.24595472
   0.28802589  0.24919096  0.25566342  0.36893204]
[ 0.18770227  0.13592234  0.20064726  0.2168285   0.24595472  0.28802589
   0.24919096  0.25566342  0.36893204  0.40776703]
[ 0.13592234  0.20064726  0.2168285   0.24595472  0.28802589  0.24919096
   0.25566342  0.36893204  0.40776703  0.44660196]
[ 0.20064726  0.2168285   0.24595472  0.28802589  0.24919096  0.25566342
   0.36893204  0.40776703  0.44660196  0.33980587]
[ 0.2168285   0.24595472  0.28802589  0.24919096  0.25566342  0.36893204
   0.40776703  0.44660196  0.33980587  0.28155342]
[ 0.24595472  0.28802589  0.24919096  0.25566342  0.36893204  0.40776703
   0.44660196  0.33980587  0.28155342  0.22006473]
[ 0.28802589  0.24919096  0.25566342  0.36893204  0.40776703  0.44660196
   0.33980587  0.28155342  0.22006473  0.29126218]
[ 0.24919096  0.25566342  0.36893204  0.40776703  0.44660196  0.33980587
   0.28155342  0.22006473  0.29126218  0.29773465]
[ 0.25566342  0.36893204  0.40776703  0.44660196  0.33980587  0.28155342
   0.22006473  0.29126218  0.29773465  0.29773465]
[ 0.36893204  0.40776703  0.44660196  0.33980587  0.28155342  0.22006473
   0.29126218  0.29773465  0.29773465  0.42718449]
[ 0.40776703  0.44660196  0.33980587  0.28155342  0.22006473  0.29126218
   0.29773465  0.29773465  0.42718449  0.42394826]
[ 0.44660196  0.33980587  0.28155342  0.22006473  0.29126218  0.29773465
   0.29773465  0.42718449  0.42394826  0.40453079]
[ 0.33980587  0.28155342  0.22006473  0.29126218  0.29773465  0.29773465
   0.42718449  0.42394826  0.40453079  0.44983819]
[ 0.28155342  0.22006473  0.29126218  0.29773465  0.29773465  0.42718449
   0.42394826  0.40453079  0.44983819  0.51779938]
[ 0.22006473  0.29126218  0.29773465  0.29773465  0.42718449  0.42394826
   0.40453079  0.44983819  0.51779938  0.54368937]
[ 0.29126218  0.29773465  0.29773465  0.42718449  0.42394826  0.40453079
   0.44983819  0.51779938  0.54368937  0.43042073]
[ 0.29773465  0.29773465  0.42718449  0.42394826  0.40453079  0.44983819
   0.51779938  0.54368937  0.43042073  0.34627834]
[ 0.29773465  0.42718449  0.42394826  0.40453079  0.44983819  0.51779938
   0.54368937  0.43042073  0.34627834  0.24595472]
[ 0.42718449  0.42394826  0.40453079  0.44983819  0.51779938  0.54368937
   0.43042073  0.34627834  0.24595472  0.31391588]
[ 0.42394826  0.40453079  0.44983819  0.51779938  0.54368937  0.43042073
   0.34627834  0.24595472  0.31391588  0.32362464]
[ 0.40453079  0.44983819  0.51779938  0.54368937  0.43042073  0.34627834
   0.24595472  0.31391588  0.32362464  0.27184466]
[ 0.44983819  0.51779938  0.54368937  0.43042073  0.34627834  0.24595472
   0.31391588  0.32362464  0.27184466  0.42394826]
[ 0.51779938  0.54368937  0.43042073  0.34627834  0.24595472  0.31391588
   0.32362464  0.27184466  0.42394826  0.39805827]
[ 0.54368937  0.43042073  0.34627834  0.24595472  0.31391588  0.32362464
   0.27184466  0.42394826  0.39805827  0.42071202]
[ 0.43042073  0.34627834  0.24595472  0.31391588  0.32362464  0.27184466
   0.42394826  0.39805827  0.42071202  0.51779938]
[ 0.34627834  0.24595472  0.31391588  0.32362464  0.27184466  0.42394826
   0.39805827  0.42071202  0.51779938  0.64077675]
[ 0.24595472  0.31391588  0.32362464  0.27184466  0.42394826  0.39805827
   0.42071202  0.51779938  0.64077675  0.61165047]
[ 0.31391588  0.32362464  0.27184466  0.42394826  0.39805827  0.42071202
   0.51779938  0.64077675  0.61165047  0.50161815]
[ 0.32362464  0.27184466  0.42394826  0.39805827  0.42071202  0.51779938
   0.64077675  0.61165047  0.50161815  0.40453079]
[ 0.27184466  0.42394826  0.39805827  0.42071202  0.51779938  0.64077675
   0.61165047  0.50161815  0.40453079  0.32038835]
[ 0.42394826  0.39805827  0.42071202  0.51779938  0.64077675  0.61165047
   0.50161815  0.40453079  0.32038835  0.40453079]
[ 0.39805827  0.42071202  0.51779938  0.64077675  0.61165047  0.50161815
   0.40453079  0.32038835  0.40453079  0.44660196]
[ 0.42071202  0.51779938  0.64077675  0.61165047  0.50161815  0.40453079
   0.32038835  0.40453079  0.44660196  0.41747573]
[ 0.51779938  0.64077675  0.61165047  0.50161815  0.40453079  0.32038835
   0.40453079  0.44660196  0.41747573  0.52750814]
[ 0.64077675  0.61165047  0.50161815  0.40453079  0.32038835  0.40453079
   0.44660196  0.41747573  0.52750814  0.53398061]
[ 0.61165047  0.50161815  0.40453079  0.32038835  0.40453079  0.44660196
   0.41747573  0.52750814  0.53398061  0.5372169 ]
[ 0.50161815  0.40453079  0.32038835  0.40453079  0.44660196  0.41747573
   0.52750814  0.53398061  0.5372169   0.68284798]
[ 0.40453079  0.32038835  0.40453079  0.44660196  0.41747573  0.52750814
   0.53398061  0.5372169   0.68284798  0.84142399]
[ 0.32038835  0.40453079  0.44660196  0.41747573  0.52750814  0.53398061
   0.5372169   0.68284798  0.84142399  0.78640783]
[ 0.40453079  0.44660196  0.41747573  0.52750814  0.53398061  0.5372169
   0.68284798  0.84142399  0.78640783  0.67313921]
[ 0.44660196  0.41747573  0.52750814  0.53398061  0.5372169   0.68284798
   0.84142399  0.78640783  0.67313921  0.55016184]
[ 0.41747573  0.52750814  0.53398061  0.5372169   0.68284798  0.84142399
   0.78640783  0.67313921  0.55016184  0.43042073]
[ 0.52750814  0.53398061  0.5372169   0.68284798  0.84142399  0.78640783
   0.67313921  0.55016184  0.43042073  0.56310678]
[ 0.53398061  0.5372169   0.68284798  0.84142399  0.78640783  0.67313921
   0.55016184  0.43042073  0.56310678  0.5825243 ]
[ 0.5372169   0.68284798  0.84142399  0.78640783  0.67313921  0.55016184
   0.43042073  0.56310678  0.5825243   0.5598706 ]
[ 0.68284798  0.84142399  0.78640783  0.67313921  0.55016184  0.43042073
   0.56310678  0.5825243   0.5598706   0.68932045]
[ 0.84142399  0.78640783  0.67313921  0.55016184  0.43042073  0.56310678
   0.5825243   0.5598706   0.68932045  0.67637551]
[ 0.78640783  0.67313921  0.55016184  0.43042073  0.56310678  0.5825243
   0.5598706   0.68932045  0.67637551  0.69255674]
[ 0.67313921  0.55016184  0.43042073  0.56310678  0.5825243   0.5598706
   0.68932045  0.67637551  0.69255674  0.87378645]
[ 0.55016184  0.43042073  0.56310678  0.5825243   0.5598706   0.68932045
   0.67637551  0.69255674  0.87378645  1.00000012]
[ 0.43042073  0.56310678  0.5825243   0.5598706   0.68932045  0.67637551
   0.69255674  0.87378645  1.00000012  0.97411013]
[ 0.56310678  0.5825243   0.5598706   0.68932045  0.67637551  0.69255674
   0.87378645  1.00000012  0.97411013  0.81229782]]

 

 

Filed Under: 教師有り, 機械学習

リアルタイム描写

2017年12月28日 by 河副 太智 Leave a Comment

動くグラフ

https://www.reddit.com/r/dataisbeautiful/comments/7l9ef7/i_simulated_and_animated_500_instances_of_the/

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
from matplotlib import pyplot as plt
import numpy as np
import time
from IPython.display import display, clear_output
%matplotlib inline
 
fig = plt.figure(figsize=(16,10))
axe = fig.add_subplot(111)
 
# 繰り返す回数を決める
num = 5
 
for i in range(num):
    A = np.random.randn(1000)
    axe.hist(A,bins=50)
    axe.set_xlim([-4,4])
    axe.set_ylim([0,70])
    display(fig)
    clear_output(wait = True)
    # 最後の出力は消さないので、if文で最後だけ消さないようにする。
    if i != num-1:
        axe.cla()

from IPython.display import display, clear_output

display(fig) 画像を出力します。
clear_output(wait = True) 出力結果を消します。
axeに入れられたグラフデータを削除します。

Filed Under: グラフ

予想結果の可視化

2017年12月28日 by 河副 太智 Leave a Comment

1
2
3
4
5
6
7
8
9
10
11
# プロットのためのデータ整形
trainPredictPlot = numpy.empty_like(dataset)
trainPredictPlot[:, :] = numpy.nan
trainPredictPlot[look_back:len(trainPredict)+look_back, :] = trainPredict
testPredictPlot = numpy.empty_like(dataset)
testPredictPlot[:, :] = numpy.nan
testPredictPlot[len(trainPredict)+(look_back*2)+1:len(dataset)-1, :] = testPredict
# テストデータのプロット
plt.plot(dataframe[round(len(dataset)*0.67):])
plt.plot(testPredictPlot)
plt.show()

 

Filed Under: 教師有り

  • « Go to Previous Page
  • Page 1
  • Interim pages omitted …
  • Page 19
  • Page 20
  • Page 21
  • Page 22
  • Page 23
  • Interim pages omitted …
  • Page 55
  • Go to Next Page »

Primary Sidebar

カテゴリー

  • AWS
  • Bootstrap
  • Dash
  • Django
  • flask
  • GIT(sourcetree)
  • Plotly/Dash
  • VPS
  • その他tool
  • ブログ
  • プログラミング
    • Bokeh
    • css
    • HoloViews
    • Jupyter
    • Numpy
    • Pandas
    • PosgreSQL
    • Python 基本
    • python3
      • webアプリ
    • python3解説
    • scikit-learn
    • scipy
    • vps
    • Wordpress
    • グラフ
    • コマンド
    • スクレイピング
    • チートシート
    • データクレンジング
    • ブロックチェーン
    • 作成実績
    • 時系列分析
    • 機械学習
      • 分析手法
      • 教師有り
    • 異常値検知
    • 自然言語処理
  • 一太郎
  • 数学
    • sympy
      • 対数関数(log)
      • 累乗根(n乗根)
    • 暗号学

Copyright © 2025 · Genesis Sample on Genesis Framework · WordPress · Log in