Change file structure
This commit is contained in:
114
Library/Influxclient.py
Normal file
114
Library/Influxclient.py
Normal file
@@ -0,0 +1,114 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Created on Mon Oct 17 13:45:38 2022
|
||||
|
||||
@author: Daniel Häfliger
|
||||
"""
|
||||
|
||||
import pandas as pd
|
||||
import math
|
||||
import requests, json
|
||||
from datetime import datetime, date, time
|
||||
|
||||
# Convert a date and time to to complex numbers
|
||||
# [year_real, year_imag, day_real, day_imag]
|
||||
def Convert_Date(atime):
|
||||
|
||||
y_r = math.sin(atime.timetuple().tm_yday/365*2*math.pi)
|
||||
y_i = math.cos(atime.timetuple().tm_yday/365*2*math.pi)
|
||||
d_r = math.sin((atime.hour*3600+atime.minute*60+atime.second)/((24*3600))*2*math.pi)
|
||||
d_i = math.cos((atime.hour*3600+atime.minute*60+atime.second)/((24*3600))*2*math.pi)
|
||||
return [y_r, y_i, d_r, d_i]
|
||||
|
||||
|
||||
# Return a list of datas of a specified measurement of an influxDB
|
||||
def Get_Value(client, bucket, atime, measurement, datapoint):
|
||||
|
||||
query_api = client.query_api()
|
||||
query = 'from(bucket:"'+bucket+'")\
|
||||
|> range(start: -'+atime+')\
|
||||
|> filter(fn:(r) => r._field == "'+datapoint+'" )\
|
||||
|> filter(fn:(r) => r._measurement == "'+measurement+'" )'
|
||||
result = query_api.query(org=client.org, query=query)
|
||||
thelist = []
|
||||
for table in result:
|
||||
for record in table.records:
|
||||
thelist.append((record.get_value()))
|
||||
return thelist
|
||||
|
||||
|
||||
# Return the time of datas of a specified measurement of an influxDB
|
||||
def Get_Time(client, bucket, atime, measurement, datapoint):
|
||||
|
||||
query_api = client.query_api()
|
||||
query = 'from(bucket:"'+bucket+'")\
|
||||
|> range(start: -'+atime+')\
|
||||
|> filter(fn:(r) => r._field == "'+datapoint+'" )\
|
||||
|> filter(fn:(r) => r._measurement == "'+measurement+'" )'
|
||||
result = query_api.query(org=client.org, query=query)
|
||||
thelist = []
|
||||
for table in result:
|
||||
for record in table.records:
|
||||
thelist.append((record.get_time()))
|
||||
|
||||
|
||||
a=[]
|
||||
b=[]
|
||||
c=[]
|
||||
d=[]
|
||||
|
||||
for i in thelist:
|
||||
x = Convert_Date(i)
|
||||
a.append(x[0])
|
||||
b.append(x[1])
|
||||
c.append(x[2])
|
||||
d.append(x[3])
|
||||
|
||||
|
||||
|
||||
return [a,b,c,d,thelist]
|
||||
|
||||
|
||||
|
||||
|
||||
# Need duration in hours, interval in minutes
|
||||
# Retruns array with complex time, timestamp and prediction
|
||||
# Works only for max 3 days due api returns only values until then
|
||||
def GetPrediction(duration, intervall, datapoint):
|
||||
|
||||
xfile = pd.read_xml('config.xml')
|
||||
|
||||
apikey = (xfile.loc[xfile['category'] == 'influx'].weatherapitoken[0])
|
||||
place = (xfile.loc[xfile['category'] == 'influx'].place[0])
|
||||
dats = requests.get("https://api.weatherapi.com/v1/forecast.json?key="+apikey+"&q="+place+"&days=3&aqi=yes&alerts=no")
|
||||
dats=json.loads(dats.content)
|
||||
|
||||
a=[]
|
||||
b=[]
|
||||
c=[]
|
||||
d=[]
|
||||
e=[]
|
||||
f=[]
|
||||
now = datetime.now()
|
||||
daycounter = 0
|
||||
lasthour=now.hour
|
||||
for i in range(0, int(duration*60/intervall)):
|
||||
|
||||
now = datetime.timestamp(now)+(intervall*60)
|
||||
now = datetime.fromtimestamp(now)
|
||||
|
||||
x = Convert_Date(now)
|
||||
a.append(x[0])
|
||||
b.append(x[1])
|
||||
c.append(x[2])
|
||||
d.append(x[3])
|
||||
e.append(now)
|
||||
bc = dats["forecast"]["forecastday"][daycounter]["hour"][now.hour][datapoint]
|
||||
|
||||
f.append(bc)
|
||||
if lasthour>now.hour:
|
||||
daycounter+=1
|
||||
|
||||
lasthour=now.hour
|
||||
|
||||
return[a,b,c,d,e,f]
|
||||
77
Library/Influxdataframe.py
Normal file
77
Library/Influxdataframe.py
Normal file
@@ -0,0 +1,77 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Created on Fri Dec 2 07:46:09 2022
|
||||
|
||||
@author: Daniel Häfliger
|
||||
"""
|
||||
import pandas as pd
|
||||
import Influxclient as I
|
||||
import influxdb_client
|
||||
|
||||
|
||||
# Returns values of an influxDB as they where specified in the config.xml
|
||||
# as a Pandas Dataframe
|
||||
def Get_Df():
|
||||
|
||||
|
||||
xfile = pd.read_xml('config.xml')
|
||||
org = (xfile.loc[xfile['category'] == 'influx'].organisation[0])
|
||||
token = (xfile.loc[xfile['category'] == 'influx'].influxtoken[0])
|
||||
url = (xfile.loc[xfile['category'] == 'influx'].url[0])
|
||||
|
||||
client = influxdb_client.InfluxDBClient(
|
||||
url=url,
|
||||
token=token,
|
||||
org=org
|
||||
)
|
||||
|
||||
bucket = (xfile.loc[xfile['category'] == 'influx'].bucket[0])
|
||||
measurement = (xfile.loc[xfile['category'] == 'influx'].measurement[0])
|
||||
|
||||
interval = (xfile.loc[xfile['category'] == 'influx'].interval[0])
|
||||
valuessince = (xfile.loc[xfile['category'] == 'influx'].time[0])
|
||||
|
||||
Input = xfile.loc[xfile['category'] == 'feature'].f[1]
|
||||
#numberofFeature=(xfile.loc[xfile['category']=='feature'].features[1])
|
||||
|
||||
feature = Input.split()
|
||||
pred = xfile.loc[xfile['category'] == 'pred'].p1[2]
|
||||
|
||||
|
||||
data = {
|
||||
|
||||
"y_r": I.Get_Time(client, bucket, valuessince, measurement, feature[0])[0],
|
||||
"y_i": I.Get_Time(client, bucket, valuessince, measurement, feature[0])[1],
|
||||
"d_r": I.Get_Time(client, bucket, valuessince, measurement, feature[0])[2],
|
||||
"d_i": I.Get_Time(client, bucket, valuessince, measurement, feature[0])[3],
|
||||
|
||||
}
|
||||
|
||||
for i in range(len(feature)):
|
||||
newd = {(feature[i]): I.Get_Value(client, bucket,
|
||||
valuessince, measurement, feature[i])}
|
||||
data.update(newd)
|
||||
|
||||
newd = {(pred): I.Get_Value(client, bucket,
|
||||
valuessince, measurement, pred)}
|
||||
data.update(newd)
|
||||
|
||||
return pd.DataFrame(data)
|
||||
|
||||
|
||||
|
||||
# Shifts the target variable and adds [shift_number] colums with the shiftet
|
||||
# values, then return the new Dataframe
|
||||
def ShiftTarget(df, shift_number, targetvariable):
|
||||
dfziel = df[[targetvariable]].copy()
|
||||
df = df.rename(columns={targetvariable:(targetvariable+str(0))})
|
||||
dfziel = dfziel.rename(columns={targetvariable:(targetvariable+str(0))})
|
||||
|
||||
|
||||
|
||||
for i in range(shift_number):
|
||||
dfziel = dfziel.rename(columns={(targetvariable+str(i)):targetvariable+str(i+1)})
|
||||
dfziel = dfziel.shift(1)
|
||||
df = pd.concat([df, dfziel], axis=1)
|
||||
|
||||
return df
|
||||
30
Library/config.xml
Normal file
30
Library/config.xml
Normal file
@@ -0,0 +1,30 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<config>
|
||||
|
||||
<variable category="influx">
|
||||
<organisation>enter influx organisation name</organisation>
|
||||
<url>enter influx url</url>
|
||||
<bucket>enter influx bucket name</bucket>
|
||||
<measurement>enter influx measurement name</measurement>
|
||||
<influxtoken>enter a influx-read token for specific bucket</influxtoken>
|
||||
<weatherapitoken>enter a weatherapitoken for weatherapi.com </weatherapitoken>
|
||||
<place>enter place where weatherapi should predict</place>
|
||||
<interval>5M</interval>
|
||||
<time>10d</time>
|
||||
</variable>
|
||||
|
||||
<!-- Enter features here as a array, separated with space -->
|
||||
<!-- E. g. <f>feautre1 feautre2 feature3</f> -->
|
||||
<variable category="feature">
|
||||
<f>P_AT_5M_0 </f>
|
||||
</variable>
|
||||
|
||||
|
||||
<!-- Enter target variable here as a array, separated with space -->
|
||||
<!-- Attention! Currently just one target varibale is supported! -->
|
||||
<variable category="pred">
|
||||
<p1>W_AT_5M_0</p1>
|
||||
</variable>
|
||||
|
||||
|
||||
</config>
|
||||
100
Library/example.py
Normal file
100
Library/example.py
Normal file
@@ -0,0 +1,100 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Created on Fri Dec 9 08:51:35 2022
|
||||
|
||||
@author: Daniel Häfliger
|
||||
|
||||
This file shows an example of how to use the Influxclient and Influxdataframe.
|
||||
The example try to fit an prediction of a temperature of a specific measure
|
||||
with time and a given prediction of the temperature as features.
|
||||
|
||||
There is no data cleaning ect. in this example, its just to show how the lib
|
||||
works...
|
||||
|
||||
Be aware that you need to add a influx- and weatherapi-token and aviable datas
|
||||
to the config file. Otherwise the example, as the lib to, wont work.
|
||||
|
||||
"""
|
||||
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Created on Fri Dec 2 08:51:19 2022
|
||||
|
||||
@author: Daniel Häfliger
|
||||
"""
|
||||
import pandas as pd
|
||||
import Influxdataframe as Idf
|
||||
import Influxclient as I
|
||||
from sklearn.model_selection import train_test_split
|
||||
from sklearn.preprocessing import MinMaxScaler
|
||||
from sklearn.metrics import r2_score
|
||||
from sklearn.neural_network import MLPRegressor
|
||||
|
||||
|
||||
# Get a dataframe from the influxDB with the specified values in the config.xml
|
||||
df = Idf.Get_Df()
|
||||
|
||||
|
||||
# Do a train test split and scale the datas
|
||||
train_df, test_df = train_test_split(df, test_size=0.3, random_state=42)
|
||||
|
||||
scaler = MinMaxScaler()
|
||||
|
||||
train_df = pd.DataFrame(scaler.fit_transform(
|
||||
train_df), columns=train_df.columns, index=train_df.index)
|
||||
test_df = pd.DataFrame(scaler.transform(
|
||||
test_df), columns=test_df.columns, index=test_df.index)
|
||||
|
||||
X = test_df[["y_r", "y_i", "d_r", "d_i", "P_AT_5M_0"]]
|
||||
y = test_df[["W_AT_5M_0"]]
|
||||
|
||||
X_train, X_test, y_train, y_test = train_test_split(
|
||||
X, y, test_size=0.2, random_state=42)
|
||||
|
||||
|
||||
# Train data and print r2
|
||||
dt = MLPRegressor(activation='relu')
|
||||
dt.fit(X_train, y_train)
|
||||
print(r2_score(y_test, dt.predict(X_test)))
|
||||
|
||||
|
||||
# Get a df for the current prediciton with values from weather api
|
||||
pred = I.GetPrediction(48, 5, "temp_c")
|
||||
data = {
|
||||
|
||||
"y_r": pred[0],
|
||||
"y_i": pred[1],
|
||||
"d_r": pred[2],
|
||||
"d_i": pred[3],
|
||||
"P_AT_5M_0": pred[5],
|
||||
"W_AT_5M_0": pred[5],
|
||||
}
|
||||
preddf = pd.DataFrame((data))
|
||||
|
||||
|
||||
preddf = pd.DataFrame(scaler.transform(
|
||||
preddf), columns=preddf.columns, index=preddf.index)
|
||||
|
||||
preddf = preddf[["y_r", "y_i", "d_r", "d_i", "P_AT_5M_0"]]
|
||||
|
||||
|
||||
# Predict temprature for the values returned by the api
|
||||
ATpred = dt.predict(preddf)
|
||||
Prog24 = pd.DataFrame(ATpred)
|
||||
|
||||
df_all_cols = pd.concat([preddf, Prog24], axis=1)
|
||||
|
||||
Prog24 = scaler.inverse_transform(df_all_cols)
|
||||
|
||||
# Rewrite datas to get it into a df
|
||||
data = {
|
||||
|
||||
"W_AT_5M_0": Prog24[:, 5].tolist(),
|
||||
"Timestamp": pred[4],
|
||||
|
||||
}
|
||||
|
||||
prognose = pd.DataFrame(data)
|
||||
prognose.plot(x='Timestamp', y='W_AT_5M_0')
|
||||
|
||||
|
||||
108
Library/example_timeseries.py
Normal file
108
Library/example_timeseries.py
Normal file
@@ -0,0 +1,108 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Created on Fri Dec 9 09:31:31 2022
|
||||
|
||||
@author: Daniel Häfliger
|
||||
|
||||
|
||||
This file shows an example of how to use the Influxclient and Influxdataframe.
|
||||
The example try to fit an prediction of a temperature of a specific measure
|
||||
with time and a given prediction of the temperature as features with time series
|
||||
forecasting depending ond them.
|
||||
|
||||
There is no data cleaning ect. in this example, its just to show how the lib
|
||||
works...
|
||||
|
||||
Be aware that you need to add a influx- and weatherapi-token and aviable datas
|
||||
to the config file. Otherwise the example, as the lib to, wont work.
|
||||
|
||||
|
||||
"""
|
||||
import pandas as pd
|
||||
import Influxdataframe as Idf
|
||||
import Influxclient as I
|
||||
import seaborn as sns
|
||||
from sklearn.model_selection import train_test_split
|
||||
from sklearn.preprocessing import MinMaxScaler
|
||||
from sklearn.metrics import r2_score
|
||||
from sklearn.neural_network import MLPRegressor
|
||||
|
||||
|
||||
target_variable = "W_AT_5M_0"
|
||||
interval = 60
|
||||
valuesince = 10
|
||||
prediction_hours = 24
|
||||
|
||||
shift_number=4
|
||||
|
||||
df = Idf.Get_Df()
|
||||
|
||||
df = df[::12]
|
||||
|
||||
pred =I.GetPrediction(prediction_hours, interval, "temp_c")
|
||||
|
||||
|
||||
data = {
|
||||
|
||||
"y_r": pred[0],
|
||||
"y_i": pred[1],
|
||||
"d_r": pred[2],
|
||||
"d_i": pred[3],
|
||||
|
||||
"P_AT_5M_0": pred[5],
|
||||
}
|
||||
|
||||
|
||||
preddf = pd.DataFrame(data)
|
||||
|
||||
df = pd.concat([df, preddf], axis=0).reset_index(drop=True)
|
||||
|
||||
|
||||
df = Idf.ShiftTarget(df, shift_number, target_variable)
|
||||
|
||||
|
||||
|
||||
df_predict = df.dropna()
|
||||
|
||||
train_df, test_df = train_test_split(df_predict, test_size=0.3, random_state=42)
|
||||
|
||||
scaler = MinMaxScaler()
|
||||
|
||||
train_df = pd.DataFrame(scaler.fit_transform(
|
||||
train_df), columns=train_df.columns, index=train_df.index)
|
||||
test_df = pd.DataFrame(scaler.transform(
|
||||
test_df), columns=test_df.columns, index=test_df.index)
|
||||
|
||||
y = train_df[[target_variable+str(0)]]
|
||||
|
||||
X = train_df
|
||||
X.pop(target_variable+str(0))
|
||||
|
||||
X_train, X_test, y_train, y_test = train_test_split(
|
||||
X, y, test_size=0.2, random_state=42)
|
||||
|
||||
dt = MLPRegressor(activation='relu')
|
||||
dt.fit(X_train, y_train)
|
||||
|
||||
print(r2_score(y_test, dt.predict(X_test)))
|
||||
|
||||
|
||||
|
||||
for i in range(int(prediction_hours*60/interval)):
|
||||
preddf = pd.DataFrame(df.iloc[(int(df_predict.shape[0]+shift_number)-1+i)]).transpose()
|
||||
preddf = pd.DataFrame(scaler.transform(
|
||||
preddf), columns=preddf.columns, index=preddf.index)
|
||||
preddf.pop(target_variable+str(0))
|
||||
at = dt.predict(preddf)
|
||||
preddf[target_variable+str(0)]=at
|
||||
newval = pd.DataFrame(scaler.inverse_transform(preddf))
|
||||
df.at[(int(df_predict.shape[0]+shift_number)-1+i), target_variable+str(0)] = newval.at[0, 5]
|
||||
|
||||
for j in range(shift_number):
|
||||
df.at[int(df_predict.shape[0]+shift_number)+i, target_variable+str(j+1)] = df.at[int(df_predict.shape[0]+shift_number)-1+i, target_variable+str(j)]
|
||||
|
||||
|
||||
|
||||
|
||||
sns.lineplot(df.index, df[target_variable+str(0)])
|
||||
|
||||
35
Library/xmlconifg.py
Normal file
35
Library/xmlconifg.py
Normal file
@@ -0,0 +1,35 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Created on Sun Nov 13 19:33:47 2022
|
||||
|
||||
@author: Daniel Häfliger
|
||||
"""
|
||||
|
||||
import pandas as pd
|
||||
|
||||
def GetXmlConfig(xmlfile):
|
||||
|
||||
return
|
||||
|
||||
df = pd.read_xml('config.xml')
|
||||
print(df.loc[df['category']=='influx'].organisation[0])
|
||||
|
||||
print(df.loc[df['category']=='influx'].url[0])
|
||||
print(df.loc[df['category']=='influx'].bucket[0])
|
||||
print(df.loc[df['category']=='influx'].measurement[0])
|
||||
print(df.loc[df['category']=='influx'].Influxtoken[0])
|
||||
print(df.loc[df['category']=='influx'].Weatherapitoken[0])
|
||||
print(df.loc[df['category']=='influx'].place[0])
|
||||
print(df.loc[df['category']=='influx'].interval[0])
|
||||
print(df.loc[df['category']=='influx'].time[0])
|
||||
|
||||
|
||||
|
||||
a=df.loc[df['category']=='feature'].name
|
||||
print(df.loc[df['category']=='feature'].name[1])
|
||||
print(df.loc[df['category']=='pred'].name[2])
|
||||
|
||||
|
||||
|
||||
print(df.loc[df['category']=='pred'].name[2])
|
||||
|
||||
Reference in New Issue
Block a user