Commit 9092566a authored by KangMin An's avatar KangMin An
Browse files

Update: 데이터 분석 모델 수정.

parent 41d5b0e1
......@@ -13,31 +13,37 @@ function ChartTemp() {
useEffect(() => {
if (isLogined()) {
axios.get(routesClient.userWeather, { withCredentials: true })
axios
.get(routesClient.userWeather, { withCredentials: true })
.then((res) => {
console.log('temp', res.data.contents)
const userWeather = res.data.contents.weather_in
const userWeatherPredict = res.data.contents.weather_predict
const Array = []
const Array2 = []
console.log("temp", res.data.contents);
const userWeather = res.data.contents.weather_in;
const userWeatherPredict = res.data.contents.weather_predict;
const Array = [];
const Array2 = [];
for (let i = 3; i < 9; i++) {
Array.push(userWeather[i].temp)
Array2.push(userWeather[i].collected_at.split('T')[1].split('.')[0])
Array.push(userWeather[i].temp);
Array2.push(
userWeather[i].collected_at.split("T")[1].split(".")[0]
);
}
for (let j = 0; j < 3; j++) {
Array.push(userWeatherPredict[j].temp)
Array2.push(userWeatherPredict[j].collected_at.split('T')[1].split('.')[0])
Array.push(userWeatherPredict[j].temp);
Array2.push(
userWeatherPredict[j].collected_at.split("T")[1].split(".")[0]
);
}
setTemp(Array)
setNewLabel(Array2)
})
setTemp(Array);
setNewLabel(Array2);
});
} else {
axios.get(routesClient.outsideLoc + `3743011`).then((res) => {
const outWeather = res.data.contents.weather_out;
const Array = [];
const Array2 = [];
console.log(outWeather);
let i = outWeather.length - 9;
// let i = outWeather.length - 9;
let i = 0;
for (i; i < outWeather.length; i++) {
Array.push(outWeather[i].temp);
Array2.push(outWeather[i].collected_at.split("T")[1].split(".")[0]);
......
......@@ -16,3 +16,4 @@ __pycache__
src/data_processing/temp.csv
src/data_processing/model.h5
src/data_processing/config.py
src/data_processing/models
This diff is collapsed.
......@@ -20,7 +20,6 @@ dbconfig = {"host": DB["host"], "port": DB["port"], "user": DB["user"],
"password": DB["password"], "database": DB["database"]}
data_dir = os.getcwd() + "/src/data_processing/temp.csv"
model_dir = os.getcwd() + "/src/data_processing/model.h5"
def makeDateForm():
......@@ -51,33 +50,28 @@ for user in users:
standard_df, mean_df, std_df = preprocess(cursor, host)
# 데이터 분석
modeling(standard_df)
modeling(standard_df, host["email"])
# 데이터 분석 결과 저장
collected_at = makeDateForm()
model_file = open(model_dir, 'rb')
model_file_data = model_file.read()
model_file_path = "/src/data_processing/models/{0}/model.h5".format(
host["email"])
params = {"mean": mean_df.to_json(), "std": std_df.to_json()}
cursor.execute("INSERT INTO \"Data_Processings\" (host,collected_at,model_file,params) VALUES (%s,%s,%s,%s)",
cursor.execute("INSERT INTO \"Data_Processings\" (host,collected_at,model_file_path,params) VALUES (%s,%s,%s,%s)",
(host["email"],
collected_at,
model_file_data,
model_file_path,
Json(params),))
connection.commit()
model_file.close()
if os.path.isfile(data_dir):
os.remove(data_dir)
if os.path.isfile(model_dir):
os.remove(model_dir)
# Cursor와 Connection 종료
cursor.close()
connection.close()
......@@ -3,7 +3,7 @@ import os
import tensorflow as tf
def modeling(standard_df):
def modeling(standard_df, host):
n = len(standard_df)
test_size = int(0.3 * n)
......@@ -43,4 +43,5 @@ def modeling(standard_df):
model.compile(loss='mse', optimizer='adam')
# model.fit(train_feature, train_label, epochs=50, batch_size=1000)
model.save(os.getcwd() + '/src/data_processing/model.h5')
model.save(os.getcwd() +
'/src/data_processing/models/{0}/model.h5'.format(host))
......@@ -10,7 +10,6 @@ import tensorflow as tf
import numpy as np
if __name__ == "__main__":
dbconfig = {"host": DB["host"], "port": DB["port"], "user": DB["user"],
......@@ -18,7 +17,6 @@ if __name__ == "__main__":
user_email = sys.argv[1]
data_dir = os.getcwd() + "/src/data_processing/temp.csv"
model_dir = os.getcwd() + "/src/data_processing/model.h5"
# DB Connect and Make Cursor
connection = psycopg2.connect(
......@@ -27,13 +25,10 @@ if __name__ == "__main__":
# Get Model and Params, then Make File and Variable
cursor.execute(
"SELECT model_file, params FROM \"Data_Processings\" WHERE host=%s", (user_email,))
"SELECT model_file_path, params FROM \"Data_Processings\" WHERE host=%s", (user_email,))
model_params = cursor.fetchone()
blob_model = model_params[0]
model_file = open(model_dir, "wb")
model_file.write(blob_model)
model_file.close()
model_file_path = model_params[0]
params = model_params[1]
mean = json.loads(params["mean"])
......@@ -90,16 +85,12 @@ if __name__ == "__main__":
for col in feature_cols:
new_data[col] = (new_data[col] - mean[col]) / std[col]
model_pro = tf.keras.models.load_model(model_dir)
model_pro = tf.keras.models.load_model(os.getcwd() + model_file_path)
prediction = model_pro.predict(new_data)
prediction = prediction * std['temp_out'] + mean['temp_out']
# 사용한 파일 삭제
if os.path.isfile(data_dir):
os.remove(data_dir)
if os.path.isfile(model_dir):
os.remove(model_dir)
print(prediction)
/*
# EUE Server Database Schema
- DataBase : PostgreSQL
- 실제 서버 동작시 Sequelize가 models 디렉토리의 모델들에 따라 테이블을 생성합니다. 따라서 해당 SQL파일은 참고용으로 사용합니다.
1. LOC_DO
- 행정구역 도/특별시/특별자치시 이름과 코드 저장
- LOCSIGUNGU와 LOC_EMD에 참조됨
2. LOC_SGG
- 행정구역 시/군/구 이름과 코드 저장
- LOC_DO를 참조
- LOC_EMD에 참조됨
3. LOC_EMD
- 행정구역 읍/면/동 이름과 코드
- LOC_DO와 LOC_SGG를 참조
- USERS와 WEATHER_OUT에 참조됨
4. USERS
- 사용자 ID, PassWord, 거주지역코드
- LOC_EMD를 참조
5. WEATHER_IN
- 사용자 ID, 수집 날짜 및 시간, 온도, 습도, 광도
- USERS와 LOC_EMD 참조
6. WEATHER_OUT
- 지역 코드, 수집 날짜 및 시간, 온도, 습도, 기압, 풍속
- LOC_EMD 참조
*/
CREATE TABLE DOE
(
CODE INT NOT NULL,
NAME_DO VARCHAR(20) NOT NULL,
PRIMARY KEY(CODE_DO)
);
CREATE TABLE SGG
(
CODE INT NOT NULL,
CODE_DO INT NOT NULL,
NAME_SGG VARCHAR(20) NOT NULL,
PRIMARY KEY(CODE),
FOREIGN KEY(CODE_DO) REFERENCES DO(CODE) ON UPDATE CASCADE ON DELETE RESTRICT
);
CREATE TABLE EMD
(
CODE INT NOT NULL,
CODE_DO INT NOT NULL,
CODE_SGG INT NOT NULL,
NAME_EMD VARCHAR(20) NOT NULL,
PRIMARY KEY(CODE),
FOREIGN KEY(CODE_DO) REFERENCES DO(CODE) ON UPDATE CASCADE ON DELETE RESTRICT,
FOREIGN KEY(CODE_SGG) REFERENCES SGG(CODE) ON UPDATE CASCADE ON DELETE RESTRICT
);
CREATE TABLE USERS
(
EMAIL VARCHAR(320) UNIQUE NOT NULL,
PW VARCHAR(20) NOT NULL,
LOC_CODE INT NOT NULL,
PRIMARY KEY(EMAIL),
FOREIGN KEY(LOC_CODE) REFERENCES EMD(CODE) ON UPDATE CASCADE ON DELETE RESTRICT
);
CREATE TABLE WEATHER_IN
(
HOST VARCHAR(320) NOT NULL,
COLLECTED_AT TIMESTAMP NOT NULL,
TEMP FLOAT DEFAULT 0,
HUMI FLOAT DEFAULT 0,
LIGHTS FLOAT DEFAULT 0,
PRIMARY KEY(EMAIL,COLLECTED_AT),
FOREIGN KEY(EMAIL) REFERENCES USERS(EMAIL) ON UPDATE CASCADE ON DELETE RESTRICT
);
CREATE TABLE WEATHER_OUT
(
LOC_CODE INT NOT NULL,
COLLECTED_AT TIMESTAMP NOT NULL,
TEMP FLOAT DEFAULT 0,
HUMI FLOAT DEFAULT 0,
PRESS FLOAT DEFAULT 0,
WIND_SPEED FLOAT DEFAULT 0,
PRIMARY KEY(LOC_CODE, COLLECTED_AT),
FOREIGN KEY(LOC_CODE) REFERENCES EMD(CODE) ON UPDATE CASCADE ON DELETE RESTRICT
);
\ No newline at end of file
......@@ -18,8 +18,8 @@ export class Data_Processing extends Model {
primaryKey: true,
defaultValue: Date.now(),
},
model_file: {
type: DataTypes.BLOB,
model_file_path: {
type: DataTypes.STRING,
},
params: {
type: DataTypes.JSON,
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment