Commit 2addf617 authored by KangMin An's avatar KangMin An
Browse files

Update: 데이터 예측 및 전달 과정 수정.

parent 9092566a
......@@ -135,8 +135,21 @@ export const getUserWeatherData = async (req, res) => {
logging: false,
});
const weather_out = result_weather.slice(-9);
const weather_predict = result_weather.slice(-3);
let temp_weather = result_weather.slice(-6);
let weather = [];
temp_weather.map((data) => {
weather.push({
loc_code: data["loc_code"],
collected_at: new Date(
new Date(data["collected_at"]).getTime() + 9 * 60 * 60 * 1000
),
temp: data["temp"],
humi: data["humi"],
press: data["press"],
wind_speed: data["wind_speed"],
});
});
const pyprocess = spawn("python", [
envs.inner_dir.data_processing_prediction,
......@@ -144,28 +157,37 @@ export const getUserWeatherData = async (req, res) => {
]);
pyprocess.stdout.on("data", (data) => {
let weather_predict = [];
const str_result = data.toString();
console.log(data.toString()); // Buffer to String.
const temp_predict = str_result.split(" ");
let temp_predict = str_result.trim();
temp_predict = temp_predict.replace("]]", "");
temp_predict = temp_predict.replace("[[", "");
temp_predict = temp_predict.split(" ");
res.json({
msg: resForm.msg.ok,
contents: { weather_in: weather_out, weather_predict: temp_predict },
let date_10m = new Date(weather[weather.length - 1]["collected_at"]);
date_10m.setMinutes(date_10m.getMinutes() + 10);
let date_20m = new Date(weather[weather.length - 1]["collected_at"]);
date_20m.setMinutes(date_20m.getMinutes() + 20);
let date_30m = new Date(weather[weather.length - 1]["collected_at"]);
date_30m.setMinutes(date_30m.getMinutes() + 30);
let dates = [date_10m, date_20m, date_30m];
temp_predict.map((temp, index) => {
weather_predict.push({
collected_at: dates[index],
temp: Number(temp),
});
});
});
pyprocess.stderr.on("data", (error) => {
console.log("Error in the data predicting.");
console.log(error.toString());
res.json({
msg: resForm.msg.err,
contents: {
weather_in: weather_out,
weather_predict: weather_predict,
error: error.toString(),
},
msg: resForm.msg.ok,
contents: { weather_in: weather, weather_predict: weather_predict },
});
return;
});
pyprocess.on("close", () => {
......
......@@ -61,11 +61,18 @@ for user in users:
params = {"mean": mean_df.to_json(), "std": std_df.to_json()}
cursor.execute("INSERT INTO \"Data_Processings\" (host,collected_at,model_file_path,params) VALUES (%s,%s,%s,%s)",
(host["email"],
collected_at,
model_file_path,
Json(params),))
cursor.execute(
"SELECT * FROM \"Data_Processings\" WHERE host=%s", (host["email"],))
user_checker = cursor.fetchall()
if len(user_checker) == 0:
cursor.execute("INSERT INTO \"Data_Processings\" (host,collected_at,model_file_path,params) VALUES (%s,%s,%s,%s)",
(host["email"], collected_at, model_file_path, Json(params),))
else:
cursor.execute("UPDATE \"Data_Processings\" SET collected_at=%s, params=%s WHERE host=%s", (
collected_at, Json(params), host["email"],
))
connection.commit()
......
......@@ -13,9 +13,10 @@ def modeling(standard_df, host):
def make_dataset(data, label, window_size=24):
feature_list = []
label_list = []
for i in range(len(data) - window_size):
for i in range(len(data) - (window_size+3)):
feature_list.append(np.array(data.iloc[i:i+window_size]))
label_list.append(np.array(label.iloc[i + window_size]))
label_list.append(
np.array(label.iloc[i + window_size:i + window_size + 3]))
return np.array(feature_list), np.array(label_list)
feature_cols = ['temp_out', 'humi_out', 'press',
......@@ -37,11 +38,11 @@ def modeling(standard_df, host):
tf.keras.layers.LSTM(16,
return_sequences=False,
input_shape=(6, 8)),
tf.keras.layers.Dense(1)
tf.keras.layers.Dense(3)
])
model.compile(loss='mse', optimizer='adam')
# model.fit(train_feature, train_label, epochs=50, batch_size=1000)
model.fit(train_feature, train_label, epochs=50, batch_size=10)
model.save(os.getcwd() +
'/src/data_processing/models/{0}/model.h5'.format(host))
......@@ -85,6 +85,10 @@ if __name__ == "__main__":
for col in feature_cols:
new_data[col] = (new_data[col] - mean[col]) / std[col]
df = pd.DataFrame(new_data, columns=feature_cols)
new_data = df.to_numpy()
new_data = new_data.reshape(1, 6, 8)
model_pro = tf.keras.models.load_model(os.getcwd() + model_file_path)
prediction = model_pro.predict(new_data)
prediction = prediction * std['temp_out'] + mean['temp_out']
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment