|
@@ -36,14 +36,14 @@ def Check_AlivedTimeStamp(RawData, ComparedData, idx_raw, idx_comp, unit):
|
|
isAlived = False
|
|
isAlived = False
|
|
return isAlived
|
|
return isAlived
|
|
|
|
|
|
-def detect_unknown_duplicated_zero_data_for_faciilty(raw_Data, startday, lastday, Day_Period, OrgDataRes, isRecent):
|
|
|
|
|
|
+def detect_unknown_duplicated_zero_data_for_faciilty(raw_Data, startday, lastday, Day_Period, OrgDataRes):
|
|
CumTime = datetime.datetime(int(startday.strftime('%Y')), int(startday.strftime('%m')), int(startday.strftime('%d')), 0, 0, 0)
|
|
CumTime = datetime.datetime(int(startday.strftime('%Y')), int(startday.strftime('%m')), int(startday.strftime('%d')), 0, 0, 0)
|
|
StandardTimeStamp_DayUnit = [CumTime]
|
|
StandardTimeStamp_DayUnit = [CumTime]
|
|
StandardTimeStamp_QuarterUnit = [CumTime]
|
|
StandardTimeStamp_QuarterUnit = [CumTime]
|
|
# Create intact time stamp
|
|
# Create intact time stamp
|
|
for idx_day in range(Day_Period):
|
|
for idx_day in range(Day_Period):
|
|
StandardTimeStamp_DayUnit.append(startday + datetime.timedelta(days=idx_day))
|
|
StandardTimeStamp_DayUnit.append(startday + datetime.timedelta(days=idx_day))
|
|
- if isRecent and idx_day == Day_Period-1:
|
|
|
|
|
|
+ if idx_day == Day_Period-1:
|
|
tmp_len = now.hour*4 + int(now.minute/15)
|
|
tmp_len = now.hour*4 + int(now.minute/15)
|
|
for idx_time in range(tmp_len):
|
|
for idx_time in range(tmp_len):
|
|
CumTime += datetime.timedelta(minutes = 15)
|
|
CumTime += datetime.timedelta(minutes = 15)
|
|
@@ -65,10 +65,7 @@ def detect_unknown_duplicated_zero_data_for_faciilty(raw_Data, startday, lastday
|
|
break
|
|
break
|
|
|
|
|
|
Data_len = len(Raw_Date)
|
|
Data_len = len(Raw_Date)
|
|
- if isRecent:
|
|
|
|
- DataAct_len = (Day_Period-1)*OrgDataRes + now.hour*4 + int(now.minute/15)+1
|
|
|
|
- else:
|
|
|
|
- DataAct_len = Day_Period*OrgDataRes
|
|
|
|
|
|
+ DataAct_len = (Day_Period-1)*OrgDataRes + now.hour*4 + int(now.minute/15)+1
|
|
|
|
|
|
### Unknown/duplicated data counts
|
|
### Unknown/duplicated data counts
|
|
DataCount=[]
|
|
DataCount=[]
|
|
@@ -77,7 +74,7 @@ def detect_unknown_duplicated_zero_data_for_faciilty(raw_Data, startday, lastday
|
|
for j in range(Data_len-1):
|
|
for j in range(Data_len-1):
|
|
if StandardTimeStamp_DayUnit[i] == datetime.date(Raw_Date[j].year,Raw_Date[j].month,Raw_Date[j].day):
|
|
if StandardTimeStamp_DayUnit[i] == datetime.date(Raw_Date[j].year,Raw_Date[j].month,Raw_Date[j].day):
|
|
cnt_unk += 1
|
|
cnt_unk += 1
|
|
- if isRecent and i==len(StandardTimeStamp_DayUnit)-1:
|
|
|
|
|
|
+ if i==len(StandardTimeStamp_DayUnit)-1:
|
|
DataCount.append([StandardTimeStamp_DayUnit[i], now.hour*4 + int(now.minute/15) - cnt_unk])
|
|
DataCount.append([StandardTimeStamp_DayUnit[i], now.hour*4 + int(now.minute/15) - cnt_unk])
|
|
else:
|
|
else:
|
|
DataCount.append([StandardTimeStamp_DayUnit[i], OrgDataRes-cnt_unk])
|
|
DataCount.append([StandardTimeStamp_DayUnit[i], OrgDataRes-cnt_unk])
|
|
@@ -113,7 +110,7 @@ def detect_unknown_duplicated_zero_data_for_faciilty(raw_Data, startday, lastday
|
|
### 21시 전에는 오늘 데이터를 가져오면 된다. (예보 데이터가 21시를 기점으로 업데이트되기 때문)
|
|
### 21시 전에는 오늘 데이터를 가져오면 된다. (예보 데이터가 21시를 기점으로 업데이트되기 때문)
|
|
def detect_unknown_duplicated_zero_data_for_WeatherForecast3h(raw_Data, startday, lastday, Day_Period):
|
|
def detect_unknown_duplicated_zero_data_for_WeatherForecast3h(raw_Data, startday, lastday, Day_Period):
|
|
now = datetime.datetime.now().now()
|
|
now = datetime.datetime.now().now()
|
|
- if now.hour > 21:
|
|
|
|
|
|
+ if now.hour >= 21:
|
|
Day_Period += 1
|
|
Day_Period += 1
|
|
lastday += datetime.timedelta(days=1)
|
|
lastday += datetime.timedelta(days=1)
|
|
|
|
|
|
@@ -267,7 +264,6 @@ if __name__ == "__main__" :
|
|
|
|
|
|
now=datetime.datetime.now().now()
|
|
now=datetime.datetime.now().now()
|
|
lastday = datetime.date(now.year, now.month, now.day)
|
|
lastday = datetime.date(now.year, now.month, now.day)
|
|
- isRecent = True
|
|
|
|
|
|
|
|
startday = datetime.date(2020,4,9)
|
|
startday = datetime.date(2020,4,9)
|
|
if startday < datetime.date(2020,4,8):
|
|
if startday < datetime.date(2020,4,8):
|
|
@@ -627,34 +623,34 @@ if __name__ == "__main__" :
|
|
|
|
|
|
##############################################################################################
|
|
##############################################################################################
|
|
|
|
|
|
- ChillerCalAmount_Date, ChillerCalAmount_w_nan, DataCountMat_ChillerCalAmount = detect_unknown_duplicated_zero_data_for_faciilty(rawChillerCalAmount, startday, lastday, DayPeriod, DataRes_96, isRecent)
|
|
|
|
|
|
+ ChillerCalAmount_Date, ChillerCalAmount_w_nan, DataCountMat_ChillerCalAmount = detect_unknown_duplicated_zero_data_for_faciilty(rawChillerCalAmount, startday, lastday, DayPeriod, DataRes_96)
|
|
|
|
|
|
- BrineMixedTemperature_Date, BrineMixedTemperature_w_nan, DataCountMat_BrineMixedTemperature = detect_unknown_duplicated_zero_data_for_faciilty(rawBrineMixedTemperature, startday, lastday, DayPeriod, DataRes_96, isRecent)
|
|
|
|
- BrineInletTemperature_Date, BrineInletTemperature_w_nan, DataCountMat_BrineInletTemperature = detect_unknown_duplicated_zero_data_for_faciilty(rawBrineInletTemperature, startday, lastday, DayPeriod, DataRes_96, isRecent)
|
|
|
|
- BrineOutletTemperature_Date, BrineOutletTemperature_w_nan, DataCountMat_BrineOutletTemperature = detect_unknown_duplicated_zero_data_for_faciilty(rawBrineOutletTemperature, startday, lastday, DayPeriod, DataRes_96, isRecent)
|
|
|
|
|
|
+ BrineMixedTemperature_Date, BrineMixedTemperature_w_nan, DataCountMat_BrineMixedTemperature = detect_unknown_duplicated_zero_data_for_faciilty(rawBrineMixedTemperature, startday, lastday, DayPeriod, DataRes_96)
|
|
|
|
+ BrineInletTemperature_Date, BrineInletTemperature_w_nan, DataCountMat_BrineInletTemperature = detect_unknown_duplicated_zero_data_for_faciilty(rawBrineInletTemperature, startday, lastday, DayPeriod, DataRes_96)
|
|
|
|
+ BrineOutletTemperature_Date, BrineOutletTemperature_w_nan, DataCountMat_BrineOutletTemperature = detect_unknown_duplicated_zero_data_for_faciilty(rawBrineOutletTemperature, startday, lastday, DayPeriod, DataRes_96)
|
|
|
|
|
|
- BrineFlowAmount_Date, BrineFlowAmount_w_nan, DataCountMat_BrineFlowAmount = detect_unknown_duplicated_zero_data_for_faciilty(rawBrineFlowAmount, startday, lastday, DayPeriod, DataRes_96, isRecent)
|
|
|
|
|
|
+ BrineFlowAmount_Date, BrineFlowAmount_w_nan, DataCountMat_BrineFlowAmount = detect_unknown_duplicated_zero_data_for_faciilty(rawBrineFlowAmount, startday, lastday, DayPeriod, DataRes_96)
|
|
|
|
|
|
- ChStatusIcing_Date, ChStatusIcing_w_nan, DataCountMat_ChStatusIcing = detect_unknown_duplicated_zero_data_for_faciilty(rawChillerStatusIcing, startday, lastday, DayPeriod, DataRes_96, isRecent)
|
|
|
|
- ChStatusDeicing_Date, ChStatusDeicing_w_nan, DataCountMat_ChStatusDeicing = detect_unknown_duplicated_zero_data_for_faciilty(rawChillerStatusDeicing, startday, lastday, DayPeriod, DataRes_96, isRecent)
|
|
|
|
- ChStatusParallel_Date, ChStatusParallel_w_nan, DataCountMat_ChStatusParallel = detect_unknown_duplicated_zero_data_for_faciilty(rawChillerStatusParallel, startday, lastday, DayPeriod, DataRes_96, isRecent)
|
|
|
|
- ChStatusRefOnly_Date, ChStatusRefOnly_w_nan, DataCountMat_ChStatusRefOnly = detect_unknown_duplicated_zero_data_for_faciilty(rawChillerStatusRefOnly, startday, lastday, DayPeriod, DataRes_96, isRecent)
|
|
|
|
|
|
+ ChStatusIcing_Date, ChStatusIcing_w_nan, DataCountMat_ChStatusIcing = detect_unknown_duplicated_zero_data_for_faciilty(rawChillerStatusIcing, startday, lastday, DayPeriod, DataRes_96)
|
|
|
|
+ ChStatusDeicing_Date, ChStatusDeicing_w_nan, DataCountMat_ChStatusDeicing = detect_unknown_duplicated_zero_data_for_faciilty(rawChillerStatusDeicing, startday, lastday, DayPeriod, DataRes_96)
|
|
|
|
+ ChStatusParallel_Date, ChStatusParallel_w_nan, DataCountMat_ChStatusParallel = detect_unknown_duplicated_zero_data_for_faciilty(rawChillerStatusParallel, startday, lastday, DayPeriod, DataRes_96)
|
|
|
|
+ ChStatusRefOnly_Date, ChStatusRefOnly_w_nan, DataCountMat_ChStatusRefOnly = detect_unknown_duplicated_zero_data_for_faciilty(rawChillerStatusRefOnly, startday, lastday, DayPeriod, DataRes_96)
|
|
|
|
|
|
- RefPowerConsume1_Date, RefPowerConsume1_w_nan, DataCountMat_RefPowerConsume1 = detect_unknown_duplicated_zero_data_for_faciilty(rawRefPowerConsume1, startday, lastday, DayPeriod, DataRes_96, isRecent)
|
|
|
|
- RefPowerConsume2_Date, RefPowerConsume2_w_nan, DataCountMat_RefPowerConsume2 = detect_unknown_duplicated_zero_data_for_faciilty(rawRefPowerConsume2, startday, lastday, DayPeriod, DataRes_96, isRecent)
|
|
|
|
|
|
+ RefPowerConsume1_Date, RefPowerConsume1_w_nan, DataCountMat_RefPowerConsume1 = detect_unknown_duplicated_zero_data_for_faciilty(rawRefPowerConsume1, startday, lastday, DayPeriod, DataRes_96)
|
|
|
|
+ RefPowerConsume2_Date, RefPowerConsume2_w_nan, DataCountMat_RefPowerConsume2 = detect_unknown_duplicated_zero_data_for_faciilty(rawRefPowerConsume2, startday, lastday, DayPeriod, DataRes_96)
|
|
|
|
|
|
- RefStatus1_Date, RefStatus1_w_nan, DataCountMat_RefStatus1 = detect_unknown_duplicated_zero_data_for_faciilty(rawRefStatus1, startday, lastday, DayPeriod, DataRes_96, isRecent)
|
|
|
|
- RefStatus2_Date, RefStatus2_w_nan, DataCountMat_RefStatus2 = detect_unknown_duplicated_zero_data_for_faciilty(rawRefStatus2, startday, lastday, DayPeriod, DataRes_96, isRecent)
|
|
|
|
|
|
+ RefStatus1_Date, RefStatus1_w_nan, DataCountMat_RefStatus1 = detect_unknown_duplicated_zero_data_for_faciilty(rawRefStatus1, startday, lastday, DayPeriod, DataRes_96)
|
|
|
|
+ RefStatus2_Date, RefStatus2_w_nan, DataCountMat_RefStatus2 = detect_unknown_duplicated_zero_data_for_faciilty(rawRefStatus2, startday, lastday, DayPeriod, DataRes_96)
|
|
|
|
|
|
##############################################################################################
|
|
##############################################################################################
|
|
## 2019, 2020년 냉동기 전력량이 없어서 2018년 데이터로 대체
|
|
## 2019, 2020년 냉동기 전력량이 없어서 2018년 데이터로 대체
|
|
DayPeriod_2018 = (datetime.date(2018,12,31) - datetime.date(2018,1,1)).days + 1
|
|
DayPeriod_2018 = (datetime.date(2018,12,31) - datetime.date(2018,1,1)).days + 1
|
|
|
|
|
|
- RefPowerConsume1_2018_Date, RefPowerConsume1_2018_w_nan, DataCountMat_RefPowerConsume1_2018 = detect_unknown_duplicated_zero_data_for_faciilty(rawRefPowerConsume1_2018, datetime.date(2018,1,1), datetime.date(2018,12,31), DayPeriod_2018, DataRes_96, isRecent)
|
|
|
|
- RefPowerConsume2_2018_Date, RefPowerConsume2_2018_w_nan, DataCountMat_RefPowerConsume2_2018 = detect_unknown_duplicated_zero_data_for_faciilty(rawRefPowerConsume2_2018, datetime.date(2018,1,1), datetime.date(2018,12,31), DayPeriod_2018, DataRes_96, isRecent)
|
|
|
|
|
|
+ RefPowerConsume1_2018_Date, RefPowerConsume1_2018_w_nan, DataCountMat_RefPowerConsume1_2018 = detect_unknown_duplicated_zero_data_for_faciilty(rawRefPowerConsume1_2018, datetime.date(2018,1,1), datetime.date(2018,12,31), DayPeriod_2018, DataRes_96)
|
|
|
|
+ RefPowerConsume2_2018_Date, RefPowerConsume2_2018_w_nan, DataCountMat_RefPowerConsume2_2018 = detect_unknown_duplicated_zero_data_for_faciilty(rawRefPowerConsume2_2018, datetime.date(2018,1,1), datetime.date(2018,12,31), DayPeriod_2018, DataRes_96)
|
|
|
|
|
|
- RefStatus1_Date_2018, RefStatus1_2018_w_nan, DataCountMat_RefStatus1_2018 = detect_unknown_duplicated_zero_data_for_faciilty(rawRefStatus1_2018, datetime.date(2018,1,1), datetime.date(2018,12,31), DayPeriod_2018, DataRes_96, isRecent)
|
|
|
|
- RefStatus2_2018_Date, RefStatus2_2018_w_nan, DataCountMat_RefStatus2_2018 = detect_unknown_duplicated_zero_data_for_faciilty(rawRefStatus2_2018, datetime.date(2018,1,1), datetime.date(2018,12,31), DayPeriod_2018, DataRes_96, isRecent)
|
|
|
|
|
|
+ RefStatus1_Date_2018, RefStatus1_2018_w_nan, DataCountMat_RefStatus1_2018 = detect_unknown_duplicated_zero_data_for_faciilty(rawRefStatus1_2018, datetime.date(2018,1,1), datetime.date(2018,12,31), DayPeriod_2018, DataRes_96)
|
|
|
|
+ RefStatus2_2018_Date, RefStatus2_2018_w_nan, DataCountMat_RefStatus2_2018 = detect_unknown_duplicated_zero_data_for_faciilty(rawRefStatus2_2018, datetime.date(2018,1,1), datetime.date(2018,12,31), DayPeriod_2018, DataRes_96)
|
|
|
|
|
|
################# Using the power Consumption of Refrigerator in 2018 instead of 2020 #################
|
|
################# Using the power Consumption of Refrigerator in 2018 instead of 2020 #################
|
|
#### 전력 소비량 계산
|
|
#### 전력 소비량 계산
|
|
@@ -879,8 +875,7 @@ if __name__ == "__main__" :
|
|
|
|
|
|
############################################################################################################
|
|
############################################################################################################
|
|
#################### Prediction for the Degree of Daily Deicing ############################################
|
|
#################### Prediction for the Degree of Daily Deicing ############################################
|
|
- ## 매일 21시~21시 15분 사이에 산출 및 DB 삽입
|
|
|
|
-
|
|
|
|
|
|
+ ## 매일 21시~21시 15분 사이에 산출 및 DB 삽입
|
|
if (now.hour == 21 and (now.minute > 0 or now.minute < 16)) or Init:
|
|
if (now.hour == 21 and (now.minute > 0 or now.minute < 16)) or Init:
|
|
|
|
|
|
print('************ (Start) The Degree of Daily Deicing is being predicted!! ************')
|
|
print('************ (Start) The Degree of Daily Deicing is being predicted!! ************')
|
|
@@ -888,7 +883,7 @@ if __name__ == "__main__" :
|
|
DailyDeicingAmount_kWh = []
|
|
DailyDeicingAmount_kWh = []
|
|
idx = 0
|
|
idx = 0
|
|
|
|
|
|
- if isRecent and now.hour < 21: ## 21시를 전, 후로 익일 예상 방냉량이 업데이트
|
|
|
|
|
|
+ if now.hour < 21: ## 21시를 전, 후로 익일 예상 방냉량이 업데이트
|
|
_DayPeriod = DayPeriod-1
|
|
_DayPeriod = DayPeriod-1
|
|
else:
|
|
else:
|
|
_DayPeriod = DayPeriod
|
|
_DayPeriod = DayPeriod
|
|
@@ -1347,15 +1342,15 @@ if __name__ == "__main__" :
|
|
#### 사용자 정의 데이터를 데이터 로드
|
|
#### 사용자 정의 데이터를 데이터 로드
|
|
### 계속 체킹
|
|
### 계속 체킹
|
|
|
|
|
|
- while True:
|
|
|
|
- now_ = datetime.datetime.now().now()
|
|
|
|
- ## sleep 매분 2,6,10,... 초에만 동작
|
|
|
|
- if now_.second%4==2:
|
|
|
|
- break
|
|
|
|
- time.sleep(1)
|
|
|
|
|
|
+ #while True:
|
|
|
|
+ # now_ = datetime.datetime.now().now()
|
|
|
|
+ # ## sleep 매분 2,6,10,... 초에만 동작
|
|
|
|
+ # if now_.second%4==2:
|
|
|
|
+ # break
|
|
|
|
+ # time.sleep(1)
|
|
|
|
|
|
- #time.sleep(2)
|
|
|
|
- #print('start time : ', now_)
|
|
|
|
|
|
+ time.sleep(1)
|
|
|
|
+
|
|
# MSSQL Access
|
|
# MSSQL Access
|
|
conn = pymssql.connect(host = targetDBIP, user = targetDBUserID, password = targetDBUserPW, database = targetDBName, autocommit=True)
|
|
conn = pymssql.connect(host = targetDBIP, user = targetDBUserID, password = targetDBUserPW, database = targetDBName, autocommit=True)
|
|
# Create Cursor from Connection
|
|
# Create Cursor from Connection
|