new n first
This commit is contained in:
1
.gitignore
vendored
Normal file
1
.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
__pycache__
|
||||
146
dash-power.py
Normal file
146
dash-power.py
Normal file
@@ -0,0 +1,146 @@
|
||||
import dash
|
||||
from dash import dcc, html, Input, Output, State
|
||||
import plotly.express as px
|
||||
import sqlite3
|
||||
import pandas as pd
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
# Create a Dash app
|
||||
app = dash.Dash(__name__)
|
||||
|
||||
# Function to fetch data from the SQLite database
|
||||
def fetch_data():
|
||||
conn = sqlite3.connect('power_data.db')
|
||||
building_totals = pd.read_sql_query('SELECT * FROM building_totals', conn)
|
||||
room_breakdown = pd.read_sql_query('SELECT * FROM room_breakdown', conn)
|
||||
conn.close()
|
||||
return building_totals, room_breakdown
|
||||
|
||||
# Function to calculate kWh usage
|
||||
def calculate_kwh(data, power_column):
|
||||
data = data.copy() # Create a copy of the DataFrame to avoid SettingWithCopyWarning
|
||||
data.loc[:, 'timestamp'] = pd.to_datetime(data['timestamp'], format='ISO8601')
|
||||
data = data.sort_values('timestamp')
|
||||
data.loc[:, 'time_diff'] = data['timestamp'].diff().dt.total_seconds() / 3600 # Convert to hours
|
||||
data.loc[:, 'kwh'] = data[power_column] * data['time_diff']
|
||||
data.loc[:, 'cumulative_kwh'] = data['kwh'].cumsum()
|
||||
return data
|
||||
|
||||
# Define the layout of the dashboard
|
||||
app.layout = html.Div([
|
||||
html.H1("Power and Current Data Dashboard"),
|
||||
dcc.Dropdown(
|
||||
id='time-range-selector',
|
||||
options=[
|
||||
{'label': 'Last 6 Hours', 'value': 6},
|
||||
{'label': 'Last 12 Hours', 'value': 12},
|
||||
{'label': 'Last 1 Day', 'value': 24},
|
||||
{'label': 'Last 2 Days', 'value': 48},
|
||||
{'label': 'Last 1 Week', 'value': 168},
|
||||
{'label': 'Last 1 Month', 'value': 720},
|
||||
{'label': 'Last 2 Months', 'value': 1440},
|
||||
{'label': 'Last 1 Year', 'value': 8760}
|
||||
],
|
||||
value=6,
|
||||
clearable=False
|
||||
),
|
||||
dcc.Graph(id='building-totals-graph'),
|
||||
dcc.Dropdown(
|
||||
id='room-selector',
|
||||
options=[{'label': room, 'value': room} for room in fetch_data()[1]['room_number'].unique()],
|
||||
value=None,
|
||||
placeholder="Select a room"
|
||||
),
|
||||
dcc.Graph(id='room-graph')
|
||||
])
|
||||
|
||||
# Define callbacks to update the graphs
|
||||
@app.callback(
|
||||
Output('building-totals-graph', 'figure'),
|
||||
Input('time-range-selector', 'value'),
|
||||
Input('building-totals-graph', 'relayoutData')
|
||||
)
|
||||
def update_building_totals_graph(time_range, relayoutData):
|
||||
building_totals, _ = fetch_data()
|
||||
building_totals = calculate_kwh(building_totals, 'total_power')
|
||||
|
||||
# Handle time range selection
|
||||
end_time = datetime.now()
|
||||
start_time = end_time - timedelta(hours=time_range)
|
||||
filtered_data = building_totals[(building_totals['timestamp'] >= start_time) & (building_totals['timestamp'] <= end_time)]
|
||||
|
||||
# Handle zoom level
|
||||
if relayoutData and 'xaxis.range[0]' in relayoutData:
|
||||
zoom_start = pd.to_datetime(relayoutData['xaxis.range[0]'])
|
||||
zoom_end = pd.to_datetime(relayoutData['xaxis.range[1]'])
|
||||
filtered_data = filtered_data[(filtered_data['timestamp'] >= zoom_start) & (filtered_data['timestamp'] <= zoom_end)]
|
||||
|
||||
latest_data = filtered_data.iloc[-1] if not filtered_data.empty else None
|
||||
|
||||
fig = px.line(filtered_data, x='timestamp', y=['total_current', 'total_power', 'cumulative_kwh'],
|
||||
title='Building Totals', labels={'value': 'Value', 'variable': 'Metric'})
|
||||
|
||||
if latest_data is not None:
|
||||
fig.update_traces(
|
||||
name=f"Total Current: {latest_data['total_current']} A",
|
||||
selector=dict(name="total_current")
|
||||
)
|
||||
fig.update_traces(
|
||||
name=f"Total Power: {latest_data['total_power']} kW",
|
||||
selector=dict(name="total_power")
|
||||
)
|
||||
fig.update_traces(
|
||||
name=f"Cumulative kWh: {round(latest_data['cumulative_kwh'], 3)} kWh",
|
||||
selector=dict(name="cumulative_kwh")
|
||||
)
|
||||
|
||||
return fig
|
||||
|
||||
@app.callback(
|
||||
Output('room-graph', 'figure'),
|
||||
Input('time-range-selector', 'value'),
|
||||
Input('room-selector', 'value'),
|
||||
Input('room-graph', 'relayoutData')
|
||||
)
|
||||
def update_room_graph(time_range, selected_room, relayoutData):
|
||||
if selected_room:
|
||||
_, room_breakdown = fetch_data()
|
||||
room_data = room_breakdown[room_breakdown['room_number'] == selected_room]
|
||||
room_data = calculate_kwh(room_data, 'power')
|
||||
|
||||
# Handle time range selection
|
||||
end_time = datetime.now()
|
||||
start_time = end_time - timedelta(hours=time_range)
|
||||
filtered_data = room_data[(room_data['timestamp'] >= start_time) & (room_data['timestamp'] <= end_time)]
|
||||
|
||||
# Handle zoom level
|
||||
if relayoutData and 'xaxis.range[0]' in relayoutData:
|
||||
zoom_start = pd.to_datetime(relayoutData['xaxis.range[0]'])
|
||||
zoom_end = pd.to_datetime(relayoutData['xaxis.range[1]'])
|
||||
filtered_data = filtered_data[(filtered_data['timestamp'] >= zoom_start) & (filtered_data['timestamp'] <= zoom_end)]
|
||||
|
||||
latest_data = filtered_data.iloc[-1] if not filtered_data.empty else None
|
||||
|
||||
fig = px.line(filtered_data, x='timestamp', y=['current', 'power', 'cumulative_kwh'],
|
||||
title=f'Room {selected_room}', labels={'value': 'Value', 'variable': 'Metric'})
|
||||
|
||||
if latest_data is not None:
|
||||
fig.update_traces(
|
||||
name=f"Current: {latest_data['current']} A",
|
||||
selector=dict(name="current")
|
||||
)
|
||||
fig.update_traces(
|
||||
name=f"Power: {latest_data['power']} kW",
|
||||
selector=dict(name="power")
|
||||
)
|
||||
fig.update_traces(
|
||||
name=f"Cumulative kWh: {round(latest_data['cumulative_kwh'], 3)} kWh",
|
||||
selector=dict(name="cumulative_kwh")
|
||||
)
|
||||
|
||||
return fig
|
||||
return px.line(title='Select a room to display its graph')
|
||||
|
||||
# Run the app
|
||||
if __name__ == '__main__':
|
||||
app.run(host='0.0.0.0', port=8050, debug=True)
|
||||
194
get_all_by_room.py
Normal file
194
get_all_by_room.py
Normal file
@@ -0,0 +1,194 @@
|
||||
import requests
|
||||
from collections import defaultdict
|
||||
import argparse
|
||||
import sqlite3
|
||||
from datetime import datetime
|
||||
import time
|
||||
|
||||
# Configuration
|
||||
API_KEY = '{api-key}'
|
||||
LIBRENMS_IP = '{librenms_ip}'
|
||||
HEADERS = {'X-Auth-Token': API_KEY}
|
||||
|
||||
def create_db_connection(db_file):
|
||||
"""Create a database connection to a SQLite database."""
|
||||
conn = None
|
||||
try:
|
||||
conn = sqlite3.connect(db_file)
|
||||
return conn
|
||||
except sqlite3.Error as e:
|
||||
print(e)
|
||||
return conn
|
||||
|
||||
def create_tables(conn):
|
||||
"""Create tables for storing the data."""
|
||||
try:
|
||||
cursor = conn.cursor()
|
||||
cursor.execute('''
|
||||
CREATE TABLE IF NOT EXISTS building_totals (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
total_current REAL,
|
||||
total_power REAL,
|
||||
timestamp TEXT
|
||||
)
|
||||
''')
|
||||
cursor.execute('''
|
||||
CREATE TABLE IF NOT EXISTS room_breakdown (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
room_number TEXT,
|
||||
current REAL,
|
||||
power REAL,
|
||||
timestamp TEXT
|
||||
)
|
||||
''')
|
||||
conn.commit()
|
||||
except sqlite3.Error as e:
|
||||
print(e)
|
||||
|
||||
def insert_building_total(conn, total_current, total_power):
|
||||
"""Insert building total data into the database."""
|
||||
try:
|
||||
cursor = conn.cursor()
|
||||
cursor.execute('''
|
||||
INSERT INTO building_totals (total_current, total_power, timestamp)
|
||||
VALUES (?, ?, ?)
|
||||
''', (round(total_current, 3), round(total_power, 3), datetime.now().isoformat()))
|
||||
conn.commit()
|
||||
except sqlite3.Error as e:
|
||||
print(e)
|
||||
|
||||
def insert_room_breakdown(conn, room_number, current, power):
|
||||
"""Insert room breakdown data into the database."""
|
||||
try:
|
||||
cursor = conn.cursor()
|
||||
cursor.execute('''
|
||||
INSERT INTO room_breakdown (room_number, current, power, timestamp)
|
||||
VALUES (?, ?, ?, ?)
|
||||
''', (room_number, round(current, 3), round(power, 3), datetime.now().isoformat()))
|
||||
conn.commit()
|
||||
except sqlite3.Error as e:
|
||||
print(e)
|
||||
|
||||
def get_device_ids(debug=False):
|
||||
"""Fetch all device IDs from the LibreNMS API."""
|
||||
response = requests.get(f'http://{LIBRENMS_IP}/api/v0/devices', headers=HEADERS, verify=False)
|
||||
if response.status_code == 200:
|
||||
devices = response.json().get('devices', [])
|
||||
if debug:
|
||||
print(f"Devices: {devices}") # Debugging statement
|
||||
power_devices = [device['device_id'] for device in devices if device.get('type') == 'power']
|
||||
return power_devices
|
||||
else:
|
||||
raise Exception(f"Failed to fetch devices: {response.status_code}")
|
||||
|
||||
def get_device_location(device_id, debug=False):
|
||||
"""Fetch the location for a given device ID."""
|
||||
response = requests.get(f'http://{LIBRENMS_IP}/api/v0/devices/{device_id}', headers=HEADERS, verify=False)
|
||||
if response.status_code == 200:
|
||||
location = response.json().get('devices', [{}])[0].get('location', '')
|
||||
if debug:
|
||||
print(f"Location for device {device_id}: {location}") # Debugging statement
|
||||
# Extract room number from location
|
||||
room_number = location.split(',')[-1].strip() if ',' in location else 'Unknown'
|
||||
return room_number
|
||||
else:
|
||||
raise Exception(f"Failed to fetch location for device {device_id}: {response.status_code}")
|
||||
|
||||
def get_sensor_ids(device_id, sensor_group, debug=False):
|
||||
"""Fetch sensor IDs for a given device ID and sensor group."""
|
||||
response = requests.get(f'http://{LIBRENMS_IP}/api/v0/devices/{device_id}/health/{sensor_group}', headers=HEADERS, verify=False)
|
||||
if response.status_code == 200:
|
||||
graphs = response.json().get('graphs', [])
|
||||
if debug:
|
||||
print(f"Graphs for device {device_id}: {graphs}") # Debugging statement
|
||||
# Filter sensors based on descriptions
|
||||
if sensor_group == 'device_current':
|
||||
relevant_sensors = [
|
||||
sensor['sensor_id'] for sensor in graphs
|
||||
if sensor['desc'] in ["Input Phase 1.1", "Input Phase 1.2", "Input Phase 1.3", "Phase 1"]
|
||||
]
|
||||
elif sensor_group == 'device_power':
|
||||
relevant_sensors = [
|
||||
sensor['sensor_id'] for sensor in graphs
|
||||
if sensor['desc'] in ["Active power #1", "Total power"]
|
||||
]
|
||||
return relevant_sensors
|
||||
else:
|
||||
raise Exception(f"Failed to fetch sensors for device {device_id}: {response.status_code}")
|
||||
|
||||
def get_sensor_value(device_id, sensor_id, sensor_group, debug=False):
|
||||
"""Fetch the current value for a given sensor ID and sensor group."""
|
||||
response = requests.get(f'http://{LIBRENMS_IP}/api/v0/devices/{device_id}/health/{sensor_group}/{sensor_id}', headers=HEADERS, verify=False)
|
||||
if response.status_code == 200:
|
||||
sensor_data = response.json()
|
||||
if debug:
|
||||
print(f"Sensor data for device {device_id}, sensor {sensor_id}: {sensor_data}") # Debugging statement
|
||||
sensor_value = sensor_data['graphs'][0].get('sensor_current', 0)
|
||||
sensor_desc = sensor_data['graphs'][0].get('sensor_descr', '')
|
||||
|
||||
# Divide by 100 if the sensor is from an nLogic PDU and is a current sensor
|
||||
if sensor_group == 'device_current' and sensor_desc in ["Input Phase 1.1", "Input Phase 1.2", "Input Phase 1.3"]:
|
||||
sensor_value /= 100
|
||||
|
||||
return sensor_value
|
||||
else:
|
||||
raise Exception(f"Failed to fetch sensor value for sensor {sensor_id}: {response.status_code}")
|
||||
|
||||
def main(debug=False):
|
||||
try:
|
||||
device_ids = get_device_ids(debug)
|
||||
total_current = 0
|
||||
total_power_watts = 0
|
||||
room_current = defaultdict(float)
|
||||
room_power = defaultdict(float)
|
||||
|
||||
# Create a SQLite database connection
|
||||
db_file = 'power_data.db'
|
||||
conn = create_db_connection(db_file)
|
||||
create_tables(conn)
|
||||
|
||||
for device_id in device_ids:
|
||||
room_number = get_device_location(device_id, debug)
|
||||
|
||||
# Fetch and sum current values
|
||||
current_sensor_ids = get_sensor_ids(device_id, 'device_current', debug)
|
||||
for sensor_id in current_sensor_ids:
|
||||
sensor_value = get_sensor_value(device_id, sensor_id, 'device_current', debug)
|
||||
total_current += sensor_value
|
||||
room_current[room_number] += sensor_value
|
||||
|
||||
# Fetch and sum power values
|
||||
power_sensor_ids = get_sensor_ids(device_id, 'device_power', debug)
|
||||
for sensor_id in power_sensor_ids:
|
||||
sensor_value = get_sensor_value(device_id, sensor_id, 'device_power', debug)
|
||||
total_power_watts += sensor_value
|
||||
room_power[room_number] += sensor_value
|
||||
|
||||
total_power_kw = total_power_watts / 1000 # Convert watts to kilowatts
|
||||
print(f"Total Current: {round(total_current, 3)} A")
|
||||
print(f"Total Power: {round(total_power_kw, 3)} kW")
|
||||
|
||||
# Insert building total data into the database
|
||||
insert_building_total(conn, total_current, total_power_kw)
|
||||
|
||||
print("\nBreakdown by Room:")
|
||||
for room, current in room_current.items():
|
||||
power_kw = room_power[room] / 1000 # Convert watts to kilowatts
|
||||
print(f"Room {room}: Current = {round(current, 3)} A, Power = {round(power_kw, 3)} kW")
|
||||
|
||||
# Insert room breakdown data into the database
|
||||
insert_room_breakdown(conn, room, current, power_kw)
|
||||
|
||||
# Close the database connection
|
||||
conn.close()
|
||||
except Exception as e:
|
||||
print(str(e))
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser(description='Fetch and display power and current data from LibreNMS.')
|
||||
parser.add_argument('--debug', action='store_true', help='Enable debug output')
|
||||
args = parser.parse_args()
|
||||
|
||||
while True:
|
||||
main(debug=args.debug)
|
||||
time.sleep(300) # Wait for 5 minutes before the next run
|
||||
BIN
power_data.db
Normal file
BIN
power_data.db
Normal file
Binary file not shown.
Reference in New Issue
Block a user