Commit 2addf617 authored by KangMin An's avatar KangMin An
Browse files

Update: 데이터 예측 및 전달 과정 수정.

parent 9092566a
...@@ -135,8 +135,21 @@ export const getUserWeatherData = async (req, res) => { ...@@ -135,8 +135,21 @@ export const getUserWeatherData = async (req, res) => {
logging: false, logging: false,
}); });
const weather_out = result_weather.slice(-9); let temp_weather = result_weather.slice(-6);
const weather_predict = result_weather.slice(-3); let weather = [];
temp_weather.map((data) => {
weather.push({
loc_code: data["loc_code"],
collected_at: new Date(
new Date(data["collected_at"]).getTime() + 9 * 60 * 60 * 1000
),
temp: data["temp"],
humi: data["humi"],
press: data["press"],
wind_speed: data["wind_speed"],
});
});
const pyprocess = spawn("python", [ const pyprocess = spawn("python", [
envs.inner_dir.data_processing_prediction, envs.inner_dir.data_processing_prediction,
...@@ -144,28 +157,37 @@ export const getUserWeatherData = async (req, res) => { ...@@ -144,28 +157,37 @@ export const getUserWeatherData = async (req, res) => {
]); ]);
pyprocess.stdout.on("data", (data) => { pyprocess.stdout.on("data", (data) => {
let weather_predict = [];
const str_result = data.toString(); const str_result = data.toString();
console.log(data.toString()); // Buffer to String.
const temp_predict = str_result.split(" "); let temp_predict = str_result.trim();
temp_predict = temp_predict.replace("]]", "");
temp_predict = temp_predict.replace("[[", "");
temp_predict = temp_predict.split(" ");
res.json({ let date_10m = new Date(weather[weather.length - 1]["collected_at"]);
msg: resForm.msg.ok, date_10m.setMinutes(date_10m.getMinutes() + 10);
contents: { weather_in: weather_out, weather_predict: temp_predict },
let date_20m = new Date(weather[weather.length - 1]["collected_at"]);
date_20m.setMinutes(date_20m.getMinutes() + 20);
let date_30m = new Date(weather[weather.length - 1]["collected_at"]);
date_30m.setMinutes(date_30m.getMinutes() + 30);
let dates = [date_10m, date_20m, date_30m];
temp_predict.map((temp, index) => {
weather_predict.push({
collected_at: dates[index],
temp: Number(temp),
});
}); });
});
pyprocess.stderr.on("data", (error) => {
console.log("Error in the data predicting.");
console.log(error.toString());
res.json({ res.json({
msg: resForm.msg.err, msg: resForm.msg.ok,
contents: { contents: { weather_in: weather, weather_predict: weather_predict },
weather_in: weather_out,
weather_predict: weather_predict,
error: error.toString(),
},
}); });
return;
}); });
pyprocess.on("close", () => { pyprocess.on("close", () => {
......
...@@ -61,11 +61,18 @@ for user in users: ...@@ -61,11 +61,18 @@ for user in users:
params = {"mean": mean_df.to_json(), "std": std_df.to_json()} params = {"mean": mean_df.to_json(), "std": std_df.to_json()}
cursor.execute("INSERT INTO \"Data_Processings\" (host,collected_at,model_file_path,params) VALUES (%s,%s,%s,%s)", cursor.execute(
(host["email"], "SELECT * FROM \"Data_Processings\" WHERE host=%s", (host["email"],))
collected_at, user_checker = cursor.fetchall()
model_file_path,
Json(params),)) if len(user_checker) == 0:
cursor.execute("INSERT INTO \"Data_Processings\" (host,collected_at,model_file_path,params) VALUES (%s,%s,%s,%s)",
(host["email"], collected_at, model_file_path, Json(params),))
else:
cursor.execute("UPDATE \"Data_Processings\" SET collected_at=%s, params=%s WHERE host=%s", (
collected_at, Json(params), host["email"],
))
connection.commit() connection.commit()
......
...@@ -13,9 +13,10 @@ def modeling(standard_df, host): ...@@ -13,9 +13,10 @@ def modeling(standard_df, host):
def make_dataset(data, label, window_size=24): def make_dataset(data, label, window_size=24):
feature_list = [] feature_list = []
label_list = [] label_list = []
for i in range(len(data) - window_size): for i in range(len(data) - (window_size+3)):
feature_list.append(np.array(data.iloc[i:i+window_size])) feature_list.append(np.array(data.iloc[i:i+window_size]))
label_list.append(np.array(label.iloc[i + window_size])) label_list.append(
np.array(label.iloc[i + window_size:i + window_size + 3]))
return np.array(feature_list), np.array(label_list) return np.array(feature_list), np.array(label_list)
feature_cols = ['temp_out', 'humi_out', 'press', feature_cols = ['temp_out', 'humi_out', 'press',
...@@ -37,11 +38,11 @@ def modeling(standard_df, host): ...@@ -37,11 +38,11 @@ def modeling(standard_df, host):
tf.keras.layers.LSTM(16, tf.keras.layers.LSTM(16,
return_sequences=False, return_sequences=False,
input_shape=(6, 8)), input_shape=(6, 8)),
tf.keras.layers.Dense(1) tf.keras.layers.Dense(3)
]) ])
model.compile(loss='mse', optimizer='adam') model.compile(loss='mse', optimizer='adam')
# model.fit(train_feature, train_label, epochs=50, batch_size=1000) model.fit(train_feature, train_label, epochs=50, batch_size=10)
model.save(os.getcwd() + model.save(os.getcwd() +
'/src/data_processing/models/{0}/model.h5'.format(host)) '/src/data_processing/models/{0}/model.h5'.format(host))
...@@ -85,6 +85,10 @@ if __name__ == "__main__": ...@@ -85,6 +85,10 @@ if __name__ == "__main__":
for col in feature_cols: for col in feature_cols:
new_data[col] = (new_data[col] - mean[col]) / std[col] new_data[col] = (new_data[col] - mean[col]) / std[col]
df = pd.DataFrame(new_data, columns=feature_cols)
new_data = df.to_numpy()
new_data = new_data.reshape(1, 6, 8)
model_pro = tf.keras.models.load_model(os.getcwd() + model_file_path) model_pro = tf.keras.models.load_model(os.getcwd() + model_file_path)
prediction = model_pro.predict(new_data) prediction = model_pro.predict(new_data)
prediction = prediction * std['temp_out'] + mean['temp_out'] prediction = prediction * std['temp_out'] + mean['temp_out']
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment