Commit aa44a862 authored by KangMin An's avatar KangMin An
Browse files

Create & Update: 파이썬 Config 작성. 온도 예측 과정 일부 작성.

parent 8e672e73
......@@ -11,3 +11,8 @@ config/config.js
# Python Cache
__pycache__
# Data Processing
src/data_processing/temp.csv
src/data_processing/model.h5
src/data_processing/config.py
......@@ -41,7 +41,7 @@ const DB_DATABASE = PRODUCTION
// ## OpenWeatherMap
const OPENWEATHERMAP_API_KEY = "YOUR_OpenWeatherMap_API_KEY";
// # Nodemailer.
// ## Nodemailer.
const NODEMAILER_SERVICE = "gmail";
const NODEMAILER_USER = "YOUR_MAIL_ADDRESS";
const NODEMAILER_GAMIL_CLIENT_ID = "YOUR_API_CLIENT_ID";
......@@ -51,6 +51,10 @@ const NODEMAILER_GMAIL_REFRESH_TOKEN = "YOUR_GMAIL_REFRESH_TOKEN";
const AUTH_MAIL_SECRETKEY = "YOUR_MAIL_SECRETKEY";
const AUTH_ACCESS_TOKEN_SECRETKEY = "YOUR_ACCESS_TOKEN_SECRETKEY";
// # Server Inner DIR
const DIR_DATA_PROCESSING_MAIN = "/src/data_processing/main.py";
const DIR_DATA_PROCESSING_PREDICTION = "/src/data_processing/predict.py";
const envs = {
production: PRODUCTION,
client: {
......@@ -86,6 +90,10 @@ const envs = {
mail: AUTH_MAIL_SECRETKEY,
access_token: AUTH_ACCESS_TOKEN_SECRETKEY,
},
inner_dir: {
data_processing_main: DIR_DATA_PROCESSING_MAIN,
data_processing_prediction: DIR_DATA_PROCESSING_PREDICTION,
},
};
export default envs;
......@@ -3,6 +3,7 @@ import envs from "../../config/config";
import fetch from "node-fetch";
import jwt from "jsonwebtoken";
import resForm from "../resForm";
import { spawn } from "child_process";
// 외부 수집기로 부터 들어온 정보 처리
export const handleOutData = async (locCode, date, lat, lng) => {
......@@ -98,7 +99,7 @@ export const getDataInput = (req, res) => {
};
// 사용자의 데이터 가져오기 및 예측 값 전송
export const getUserWeatherData = (req, res) => {
export const getUserWeatherData = async (req, res) => {
const {
cookies: { acs_token },
} = req;
......@@ -118,32 +119,64 @@ export const getUserWeatherData = (req, res) => {
// logging: false,
// });
const result_user = db.User.findAll({
const result_user = await db.User.findAll({
where: {
email: decoded.email,
},
logging: false,
});
user_info = result_user[0];
const user_info = result_user[0];
const result_weather = db.Weather_Out.findAll({
const result_weather = await db.Weather_Out.findAll({
where: {
loc_code: user_info.loc_code,
},
order: [["date", "ASC"]],
order: [["collected_at", "ASC"]],
logging: false,
});
const weather_out = result_weather.slice(-9, -3);
const weather_out = result_weather.slice(-9);
const weather_predict = result_weather.slice(-3);
const pyprocess = spawn("python", [
envs.inner_dir.data_processing_prediction,
user_info.email,
]);
pyprocess.stdout.on("data", (data) => {
const str_result = data.toString();
console.log(data.toString()); // Buffer to String.
const temp_predict = str_result.split(" ");
res.json({
msg: resForm.msg.ok,
contents: { weather_in: weather_out, weather_predict: weather_predict },
contents: { weather_in: weather_out, weather_predict: temp_predict },
});
});
pyprocess.stderr.on("data", (error) => {
console.log("Error in the data predicting.");
console.log(error.toString());
res.json({
msg: resForm.msg.err,
contents: {
weather_in: weather_out,
weather_predict: weather_predict,
error: error.toString(),
},
});
});
pyprocess.on("close", () => {
console.log("The data prediction is done.");
});
} catch (err) {
console.log(err);
res.json({ msg: resForm.msg.err, contents: { error: err } });
res.json({
msg: resForm.msg.err,
contents: { weather_in: [], error: err },
});
}
};
......
PRODUCTION = False
DB = {
"host": "YOUR_PRODUCTION_DB_HOST" if PRODUCTION else "YOUR_DEVELOPMENT_DB_HOST",
"port": "YOUR_PRODUCTION_DB_PORT" if PRODUCTION else "YOUR_DEVELOPMENT_DB_PORT",
"user": "YOUR_PRODUCTION_DB_USER" if PRODUCTION else "YOUR_DEVELOPMENT_DB_USER",
"password": "YOUR_PRODUCTION_DB_PASSWORD" if PRODUCTION else "YOUR_DEVELOPMENT_DB_PASSWORD",
"database": "YOUR_PRODUCTION_DB" if PRODUCTION else "YOUR_DEVELOPMENT_DB",
}
......@@ -9,15 +9,15 @@ import datetime
import os
import psycopg2
from psycopg2.extras import Json
import sys
from config import DB
from preprocessing import preprocess
from model import modeling
# DB 환경 변수
dbconfig = {"host": sys.argv[1], "port": sys.argv[2], "user": sys.argv[3],
"password": sys.argv[4], "database": sys.argv[5]}
dbconfig = {"host": DB["host"], "port": DB["port"], "user": DB["user"],
"password": DB["password"], "database": DB["database"]}
data_dir = os.getcwd() + "/src/data_processing/temp.csv"
model_dir = os.getcwd() + "/src/data_processing/model.h5"
......
from config import DB
import datetime
from dateutil.relativedelta import relativedelta
import json
import os
import psycopg2
import sys
if __name__ == "__main__":
dbconfig = {"host": DB["host"], "port": DB["port"], "user": DB["user"],
"password": DB["password"], "database": DB["database"]}
user_email = sys.argv[1]
data_dir = os.getcwd() + "/src/data_processing/temp.csv"
model_dir = os.getcwd() + "/src/data_processing/model.h5"
# DB Connect and Make Cursor
connection = psycopg2.connect(
dbname=dbconfig["database"], user=dbconfig["user"], password=dbconfig["password"], host=dbconfig["host"], port=dbconfig["port"])
cursor = connection.cursor()
# Get Model and Params, then Make File and Variable
cursor.execute(
"SELECT model_file, params FROM \"Data_Processings\" WHERE host=%s", (user_email,))
model_params = cursor.fetchone()
blob_model = model_params[0]
model_file = open(model_dir, "wb")
model_file.write(blob_model)
model_file.close()
params = model_params[1]
mean = json.loads(params["mean"])
std = json.loads(params["std"])
# Get User Info
cursor.execute(
"SELECT using_aircon, loc_code FROM \"Users\" WHERE email=%s", (user_email,))
user_info = cursor.fetchone()
# Get Weather Data and Make File
today = datetime.date.today()
yesterday = today - relativedelta(days=1)
f_yesterday = "{0}-{1}-{2}".format(yesterday.year,
yesterday.month, yesterday.day)
cursor.execute(
"SELECT collected_at as \"date\", temp as temp_out, humi as humi_out, press, wind_speed "
+ "From \"Weather_Outs\" "
+ "WHERE loc_code = %s AND collected_at >= %s", (user_info[1], f_yesterday,)
)
results = cursor.fetchall()
data_file = open(data_dir, 'w')
# header
data_file.write("date,temp_out,humi_out,press,wind_speed\n")
# contents
for result in results:
data_file.write("{0},{1},{2},{3},{4}\n".format(
result[0], result[1], result[2], result[3], result[4]))
data_file.close()
cursor.close()
prediction = "Result_of_Prediction_Process"
# 사용한 파일 삭제
if os.path.isfile(data_dir):
os.remove(data_dir)
if os.path.isfile(model_dir):
os.remove(model_dir)
print(prediction)
......@@ -8,6 +8,7 @@
import pandas as pd
import datetime
from dateutil.relativedelta import relativedelta
import numpy as np
import os
......@@ -23,6 +24,8 @@ def preprocess(cursor, host):
CSV 파일 생성 후 pandas를 이용해 dataframe으로 만든 뒤, 정규화를 진행합니다.
"""
data_dir = os.getcwd() + "/src/data_processing/temp.csv"
# # 데이터 수집기 오류로 인해 보류
# cursor.execute(
# "SELECT t2.collected_at as \"date\", temp_out, humi_out, press, wind_speed, temp_in, humi_in, lights FROM"
......@@ -44,16 +47,21 @@ def preprocess(cursor, host):
# file.close()
today = datetime.date.today()
five_m_ago = today - relativedelta(months=5)
f_five_m_ago = "{0}-{1}-{2}".format(five_m_ago.year,
five_m_ago.month, five_m_ago.day)
# 사용자의 거주 지역의 실외 데이터 검색
cursor.execute(
"SELECT collected_at as \"date\", temp as temp_out, humi as humi_out, press, wind_speed "
+ "From \"Weather_Outs\" "
+ "WHERE loc_code = %s", (host["loc_code"],)
+ "WHERE loc_code = %s AND collected_at >= %s", (host["loc_code"], f_five_m_ago,)
)
results = cursor.fetchall()
file = open(os.getcwd() + "/src/data_processing/temp.csv", 'w')
file = open(data_dir, 'w')
# header
file.write("date,temp_out,humi_out,press,wind_speed\n")
......@@ -64,7 +72,7 @@ def preprocess(cursor, host):
file.close()
df = pd.read_csv(os.getcwd() + "/src/data_processing/temp.csv")
df = pd.read_csv(data_dir)
date_time = pd.to_datetime(df['date'], format='%Y-%m-%d %H:%M')
timestamp_s = date_time.map(datetime.datetime.timestamp)
......
import db from "./db/index";
import envs from "../config/config";
import schedule from "node-schedule";
import { spawn } from "child_process";
import { handleOutData } from "./controllers/dataController";
// Data Processing Python Codes Directory - server directory에서 실행
const DATA_PROCESSING_DIR = "./src/data_processing/main.py";
// 매일 자정에 실행할 스케줄의 규칙
const rule_dataProcessing = new schedule.RecurrenceRule();
rule_dataProcessing.hour = 0;
......@@ -23,14 +19,7 @@ const dataProcessingJob = schedule.scheduleJob(rule_dataProcessing, () => {
}.${today.getDate()} - Data Processing Start.`
);
const pyprocess = spawn("python", [
DATA_PROCESSING_DIR,
envs.db.host,
envs.db.port,
envs.db.user,
envs.db.password,
envs.db.database,
]);
const pyprocess = spawn("python", [envs.inner_dir.data_processing_main]);
pyprocess.stdout.on("data", (data) => {
console.log("Data processing is start.");
......@@ -43,7 +32,7 @@ const dataProcessingJob = schedule.scheduleJob(rule_dataProcessing, () => {
});
pyprocess.on("close", () => {
console.log("The data processing done.");
console.log("The data processing is done.");
});
});
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment