auslieferung

This commit is contained in:
2026-02-10 12:10:35 +01:00
parent 0315122148
commit faf17a7f07
5 changed files with 245 additions and 79 deletions

View File

@ -1,6 +1,6 @@
{
"W": "Werkstatt",
"P": "Produktion",
"A": "Allgemein",
"W": "Wolff",
"A": "Austinat",
"P": "Paulisch",
"_default": "Unbekannt"
}

Binary file not shown.

View File

@ -7,8 +7,8 @@ mmp_logger.py
- Schreibt Messwerte in SQLite
- Fehlende Felder werden mit letztem Messwert ersetzt (filled + filled_fields werden protokolliert)
- Report als Markdown (und optional HTML/PDF) nach ./reports/
Projektidee: Alles in EINEM Verzeichnis.
- Outlets im Report in physischer Reihenfolge (port_index 1..N, wie von der MMP geliefert)
- Kostenstellen device-übergreifend aggregiert; nur Codes aus cost_centers.json (ohne _default)
"""
import argparse
@ -23,6 +23,7 @@ from dataclasses import dataclass
from pathlib import Path
from typing import Dict, List, Optional, Tuple
# -------------------------
# Parsing / Prompt
# -------------------------
@ -42,8 +43,6 @@ class DeviceCfg:
host: str
port: int
enabled: bool = True
username: Optional[str] = None # nicht genutzt (kein Login), bleibt für Zukunft
password: Optional[str] = None
# -------------------------
@ -89,6 +88,8 @@ def db_connect(db_path: str) -> sqlite3.Connection:
return con
def db_init(con: sqlite3.Connection) -> None:
# Hinweis: wenn du schon eine alte DB hast, ist ein Schema-Migration nötig.
# Da du ohnehin "rm -f data/mmp.sqlite" nutzt, ist das hier ok.
con.executescript("""
CREATE TABLE IF NOT EXISTS device (
id INTEGER PRIMARY KEY,
@ -99,15 +100,16 @@ def db_init(con: sqlite3.Connection) -> None:
last_seen_at TEXT
);
CREATE TABLE IF NOT EXISTS outlet (
CREATE TABLE IF NOT EXISTS outlet (
id INTEGER PRIMARY KEY,
device_id INTEGER NOT NULL,
port_index INTEGER NOT NULL,
outlet_name TEXT NOT NULL,
cost_code TEXT NOT NULL,
cost_name TEXT NOT NULL,
created_at TEXT NOT NULL,
last_seen_at TEXT,
UNIQUE(device_id, outlet_name),
UNIQUE(device_id, port_index),
FOREIGN KEY(device_id) REFERENCES device(id)
);
@ -146,6 +148,7 @@ def db_init(con: sqlite3.Connection) -> None:
CREATE INDEX IF NOT EXISTS idx_reading_ts ON reading(ts);
CREATE INDEX IF NOT EXISTS idx_reading_outlet_ts ON reading(outlet_id, ts);
CREATE INDEX IF NOT EXISTS idx_outlet_device_port ON outlet(device_id, port_index);
""")
con.commit()
@ -167,26 +170,37 @@ def get_or_create_device(con: sqlite3.Connection, d: DeviceCfg) -> int:
con.commit()
return int(cur.lastrowid)
def get_or_create_outlet(con: sqlite3.Connection, device_id: int, outlet_name: str,
def get_or_create_outlet(con: sqlite3.Connection, device_id: int, port_index: int, outlet_name: str,
cost_code: str, cost_name: str) -> int:
now = utc_now_iso()
cur = con.execute("SELECT id FROM outlet WHERE device_id=? AND outlet_name=?",
(device_id, outlet_name))
if port_index <= 0:
raise ValueError(f"port_index invalid: {port_index} for outlet_name={outlet_name!r}")
cur = con.execute("SELECT id FROM outlet WHERE device_id=? AND port_index=?",
(device_id, port_index))
row = cur.fetchone()
if row:
outlet_id = int(row[0])
# Name darf sich ändern -> immer aktualisieren
con.execute("""
UPDATE outlet
SET cost_code=?, cost_name=?, last_seen_at=?
SET outlet_name=?, cost_code=?, cost_name=?, last_seen_at=?
WHERE id=?
""", (cost_code, cost_name, now, outlet_id))
""", (outlet_name, cost_code, cost_name, now, outlet_id))
con.commit()
return outlet_id
cur = con.execute("""
INSERT INTO outlet(device_id,outlet_name,cost_code,cost_name,created_at,last_seen_at)
VALUES(?,?,?,?,?,?)
""", (device_id, outlet_name, cost_code, cost_name, now, now))
INSERT INTO outlet(device_id,port_index,outlet_name,cost_code,cost_name,created_at,last_seen_at)
VALUES(?,?,?,?,?,?,?)
""", (device_id, port_index, outlet_name, cost_code, cost_name, now, now))
con.commit()
return int(cur.lastrowid)
cur = con.execute("""
INSERT INTO outlet(device_id,port_index,outlet_name,cost_code,cost_name,created_at,last_seen_at)
VALUES(?,?,?,?,?,?,?)
""", (device_id, port_index, outlet_name, cost_code, cost_name, now, now))
con.commit()
return int(cur.lastrowid)
@ -227,12 +241,6 @@ def cost_center_for(outlet_name: str, cc_map: Dict[str, str]) -> Tuple[str, str]
# -------------------------
def tcp_read_all(sock: socket.socket, read_timeout: int, stop_on_prompt: bool = True) -> bytes:
"""
Liest Daten bis:
- prompt '>' gesehen (optional)
- remote close (recv == b'')
- timeout
"""
sock.settimeout(read_timeout)
buf = bytearray()
while True:
@ -260,15 +268,14 @@ def tcp_fetch_ostatus_raw(
stop_on_prompt: bool = True
) -> bytes:
"""
Ablauf (weil Session ggf. schließt):
1) connect
2) ENTER senden (wecken), Pause
3) kurz lesen (prompt_timeout) -> wenn kein Prompt: nochmal ENTER+Pause+kurz lesen
4) cmd senden, kurze Pause
5) lesen bis prompt/close/timeout
Bridge-Verhalten:
- Session evtl. geschlossen -> recv==b'' beachten
- Kein Login
- ENTER senden, ggf. Pause, ggf. nochmal ENTER
"""
with socket.create_connection((host, port), timeout=connect_timeout) as s:
pre = b""
if enter_first:
s.sendall(b"\r\n")
time.sleep(prompt_pause_sec)
@ -276,12 +283,10 @@ def tcp_fetch_ostatus_raw(
pre += tcp_read_all(s, read_timeout=prompt_timeout, stop_on_prompt=stop_on_prompt)
if stop_on_prompt and (PROMPT_END not in pre):
# nochmal wecken
s.sendall(b"\r\n")
time.sleep(prompt_pause_sec)
pre += tcp_read_all(s, read_timeout=prompt_timeout, stop_on_prompt=stop_on_prompt)
# Kommando
s.sendall(cmd.encode("utf-8") + b"\r\n")
time.sleep(cmd_pause_sec)
@ -290,23 +295,30 @@ def tcp_fetch_ostatus_raw(
# -------------------------
# Parse ostatus output
# Parse ostatus output (physische Reihenfolge)
# -------------------------
def parse_ostatus(text: str) -> List[dict]:
"""
Erwartet Tabellenzeilen mit '|'.
Beispiel:
| W Power1 | 0.0 A | 0.0 A | 230.3 V | 0 W | 4 VA | On |
Erwartet Datenzeilen mit '|'.
Wichtig: port_index wird als fortlaufender Index in der Tabellen-Reihenfolge vergeben.
Dadurch entspricht port_index der physischen Reihenfolge, wie sie die MMP ausgibt.
"""
rows: List[dict] = []
port_index = 0
for ln in text.splitlines():
if "|" not in ln:
continue
# Header / Trenner filtern
if "Outlet" in ln and "True RMS" in ln:
s = ln.strip()
# Header/Trenner filtern (wichtig: zweite Headerzeile!)
if ("Outlet" in ln and "True RMS" in ln):
continue
if set(ln.strip()) <= set("-| "):
if ("Name" in ln and "Current" in ln):
continue
if set(s) <= set("-| "):
continue
parts = [p.strip() for p in ln.split("|")]
@ -314,14 +326,20 @@ def parse_ostatus(text: str) -> List[dict]:
if len(parts) < 7:
continue
# Schutz: falls doch Header-Schrott durchrutscht
if parts[0].strip().lower() in ("outlet", "name"):
continue
outlet_name = parts[0]
cur_s, peak_s, volt_s, power_s, va_s, state = parts[1:7]
def parse_num(s: str) -> Optional[float]:
m = NUM_RE.search(s.replace(",", "."))
def parse_num(val: str) -> Optional[float]:
m = NUM_RE.search(val.replace(",", "."))
return float(m.group(1)) if m else None
port_index += 1
rows.append({
"port_index": port_index,
"outlet_name": outlet_name,
"current_a": parse_num(cur_s),
"peak_a": parse_num(peak_s),
@ -330,6 +348,7 @@ def parse_ostatus(text: str) -> List[dict]:
"va": parse_num(va_s),
"state": state if state else None
})
return rows
@ -399,8 +418,10 @@ def poll_device(con: sqlite3.Connection, dev: DeviceCfg, cc_map: Dict[str, str],
for r in rows:
outlet_name = r["outlet_name"].strip()
port_index = int(r.get("port_index") or 0)
cost_code, cost_name = cost_center_for(outlet_name, cc_map)
outlet_id = get_or_create_outlet(con, device_id, outlet_name, cost_code, cost_name)
outlet_id = get_or_create_outlet(con, device_id, port_index, outlet_name, cost_code, cost_name)
last = last_reading_for_outlet(con, outlet_id)
r2, filled_flag, filled_fields_cnt = apply_fill(r, last)
@ -432,7 +453,6 @@ def poll_device(con: sqlite3.Connection, dev: DeviceCfg, cc_map: Dict[str, str],
""", (utc_now_iso(), outlets_received, outlets_filled, fields_filled, duration_ms, run_id))
con.commit()
# kleine Info für CLI
print(f"{dev.name}: OK outlets={outlets_received} filled_outlets={outlets_filled} filled_fields={fields_filled}")
except Exception as e:
@ -486,8 +506,6 @@ def write_report_files(project_root: str, name: str, md_text: str, write_md: boo
html_path.write_text(html_full, encoding="utf-8")
if write_pdf:
# robust über pandoc (muss installiert sein)
# sudo apt-get install -y pandoc
if not write_md:
md_path.write_text(md_text, encoding="utf-8")
os.system(f"pandoc '{md_path}' -o '{pdf_path}'")
@ -498,9 +516,6 @@ def write_report_files(project_root: str, name: str, md_text: str, write_md: boo
# -------------------------
def parse_period_args(args) -> Tuple[str, str, str]:
"""
Returns (from_iso, to_iso, suffix_name) in UTC.
"""
now = dt.datetime.now(dt.timezone.utc).replace(microsecond=0)
if args.last_days is not None:
@ -525,10 +540,12 @@ def parse_period_args(args) -> Tuple[str, str, str]:
return start.isoformat(), now.isoformat(), suffix
def report(con: sqlite3.Connection, from_iso: str, to_iso: str, device_name: Optional[str]) -> str:
def report(con: sqlite3.Connection, from_iso: str, to_iso: str, device_name: Optional[str], cc_map: Dict[str, str]) -> str:
"""
Markdown Report als String.
Energie: approx_Wh = Sum(power_w)*1h (bei stündlichem Poll).
Layout:
- Outlets gesamt: physische Reihenfolge (port_index ASC), kein device, kein cost_code
- Kostenstellen: device-übergreifend aggregiert, nur Codes aus cost_centers.json (ohne _default)
- Job-Statistik wie bisher
"""
params = [from_iso, to_iso]
dev_filter = ""
@ -536,25 +553,69 @@ def report(con: sqlite3.Connection, from_iso: str, to_iso: str, device_name: Opt
dev_filter = "AND d.name = ?"
params.append(device_name)
q_cost = f"""
# ---- Outlets ----
q_outlets = f"""
SELECT
d.name AS device,
o.cost_code,
o.port_index,
o.outlet_name,
o.cost_name,
COUNT(*) AS samples,
SUM(CASE WHEN r.filled=1 THEN 1 ELSE 0 END) AS samples_with_fill,
ROUND(AVG(r.power_w), 2) AS avg_power_w,
SUM(CASE WHEN r.filled=1 THEN 1 ELSE 0 END) AS filled_samples,
ROUND(AVG(COALESCE(r.power_w,0.0)), 2) AS avg_power_w,
ROUND(SUM(COALESCE(r.power_w,0.0))*1.0, 2) AS approx_energy_wh
FROM reading r
JOIN outlet o ON o.id = r.outlet_id
JOIN device d ON d.id = r.device_id
WHERE r.ts >= ? AND r.ts <= ?
{dev_filter}
GROUP BY d.name, o.cost_code, o.cost_name
ORDER BY d.name, o.cost_code
GROUP BY o.id, o.port_index, o.outlet_name, o.cost_name
ORDER BY o.port_index ASC
"""
cost_rows = con.execute(q_cost, params).fetchall()
outlet_rows = con.execute(q_outlets, params).fetchall()
q_totals = f"""
SELECT
COUNT(DISTINCT o.id) AS outlets_count,
COUNT(*) AS samples_count,
SUM(CASE WHEN r.filled=1 THEN 1 ELSE 0 END) AS filled_samples_count,
ROUND(SUM(COALESCE(r.power_w,0.0))*1.0, 2) AS approx_energy_wh_total
FROM reading r
JOIN outlet o ON o.id = r.outlet_id
JOIN device d ON d.id = r.device_id
WHERE r.ts >= ? AND r.ts <= ?
{dev_filter}
"""
totals = con.execute(q_totals, params).fetchone()
outlets_count, samples_count, filled_samples_count, approx_wh_total = totals
# ---- Kostenstellen (device-übergreifend) ----
valid_codes = [k for k in cc_map.keys() if k != "_default"]
valid_codes = sorted(set([c.upper() for c in valid_codes]))
cost_rows = []
if valid_codes:
placeholders = ",".join(["?"] * len(valid_codes))
q_cost = f"""
SELECT
o.cost_name,
COUNT(DISTINCT o.id) AS outlets_in_costcenter,
COUNT(*) AS samples,
SUM(CASE WHEN r.filled=1 THEN 1 ELSE 0 END) AS filled_samples,
ROUND(AVG(COALESCE(r.power_w,0.0)), 2) AS avg_power_w,
ROUND(SUM(COALESCE(r.power_w,0.0))*1.0, 2) AS approx_energy_wh
FROM reading r
JOIN outlet o ON o.id = r.outlet_id
JOIN device d ON d.id = r.device_id
WHERE r.ts >= ? AND r.ts <= ?
{dev_filter}
AND o.cost_code IN ({placeholders})
GROUP BY o.cost_name
ORDER BY approx_energy_wh DESC, o.cost_name
"""
cost_params = params + valid_codes
cost_rows = con.execute(q_cost, cost_params).fetchall()
# ---- Job-Statistik ----
q_job = f"""
SELECT
d.name,
@ -574,21 +635,38 @@ def report(con: sqlite3.Connection, from_iso: str, to_iso: str, device_name: Opt
"""
job_rows = con.execute(q_job, params).fetchall()
# ---- Markdown ----
md: List[str] = []
md.append("# MMP Report")
md.append("")
md.append(f"- Zeitraum (UTC): **{from_iso}** .. **{to_iso}**")
if device_name:
md.append(f"- Device: **{device_name}**")
md.append(f"- Device-Filter: **{device_name}**")
md.append("")
md.append("## Outlets gesamt")
md.append("")
md.append(f"- Ports (distinct): **{outlets_count}**")
md.append(f"- Samples: **{samples_count}** (filled: **{filled_samples_count}**)")
md.append(f"- Kumuliert (approx): **{approx_wh_total} Wh**")
md.append("")
md.append("| port | outlet | cost_name | samples | filled | avg_W | approx_Wh |")
md.append("|---:|---|---|---:|---:|---:|---:|")
for port_index, outlet_name, cost_name, samples, filled, avg_w, wh in outlet_rows:
md.append(f"| {port_index} | {outlet_name} | {cost_name} | {samples} | {filled} | {avg_w} | {wh} |")
md.append("")
md.append("## Kostenstellen")
md.append("")
md.append("| device | code | name | samples | filled_samples | avg_W | approx_Wh |")
md.append("|---|---:|---|---:|---:|---:|---:|")
for device, code, name, samples, filled, avg_w, wh in cost_rows:
md.append(f"| {device} | {code} | {name} | {samples} | {filled} | {avg_w} | {wh} |")
md.append("")
if not cost_rows:
md.append("_Keine Kostenstellen aus cost_centers.json im Zeitraum gefunden._")
md.append("")
else:
md.append("| name | outlets | samples | filled | avg_W | approx_Wh |")
md.append("|---|---:|---:|---:|---:|---:|")
for name, outlets_cc, samples, filled, avg_w, wh in cost_rows:
md.append(f"| {name} | {outlets_cc} | {samples} | {filled} | {avg_w} | {wh} |")
md.append("")
md.append("## Job-Statistik")
md.append("")
@ -625,12 +703,10 @@ def main():
cfg = load_json(args.config)
proj_root = project_root_from_config(args.config)
# Projekt-Unterordner sicherstellen
safe_mkdir(proj_root, "data")
safe_mkdir(proj_root, "logs")
safe_mkdir(proj_root, "reports")
# Pfade relativ zur config.json
db_path = resolve_path(args.config, cfg["db_path"])
cc_path = resolve_path(args.config, cfg["cost_center_map"])
cc_map = load_json(cc_path)
@ -639,7 +715,7 @@ def main():
db_init(con)
# TCP / Timeouts / Verhalten
tcp_cfg = cfg.get("tcp", cfg.get("telnet", {})) # erlaubt "telnet" alt, oder "tcp" neu
tcp_cfg = cfg.get("tcp", cfg.get("telnet", {})) # akzeptiert "tcp" oder (alt) "telnet"
read_timeout = int(tcp_cfg.get("read_timeout_sec", 35))
connect_timeout = int(tcp_cfg.get("connect_timeout_sec", 10))
prompt_timeout = int(tcp_cfg.get("prompt_timeout_sec", 8))
@ -650,14 +726,6 @@ def main():
stop_on_prompt = bool(tcp_cfg.get("stop_on_prompt", True))
debug_dump_raw = bool(tcp_cfg.get("debug_dump_raw", False))
# Report-Ausgabe
rep_cfg = cfg.get("report", {})
write_md = bool(rep_cfg.get("write_markdown", True))
write_html = bool(rep_cfg.get("write_html", True))
write_pdf = bool(rep_cfg.get("write_pdf", False))
name_prefix = rep_cfg.get("report_name_prefix", "report")
# Devices
devices: List[DeviceCfg] = []
for d in cfg.get("devices", []):
devices.append(DeviceCfg(
@ -665,10 +733,15 @@ def main():
host=d["host"],
port=int(d.get("port", 20108)),
enabled=bool(d.get("enabled", True)),
username=d.get("username"),
password=d.get("password"),
))
# Report-Ausgabe
rep_cfg = cfg.get("report", {})
write_md = bool(rep_cfg.get("write_markdown", True))
write_html = bool(rep_cfg.get("write_html", True))
write_pdf = bool(rep_cfg.get("write_pdf", False))
name_prefix = rep_cfg.get("report_name_prefix", "report")
if args.cmd == "poll":
any_ran = False
for d in devices:
@ -691,7 +764,7 @@ def main():
elif args.cmd == "report":
from_iso, to_iso, suffix = parse_period_args(args)
md_text = report(con, from_iso, to_iso, args.device)
md_text = report(con, from_iso, to_iso, args.device, cc_map)
print(md_text)

View File

@ -0,0 +1,47 @@
<!doctype html><meta charset='utf-8'><style>body{font-family:system-ui,Segoe UI,Roboto,Arial,sans-serif;max-width:1100px;margin:24px auto;padding:0 16px;}table{border-collapse:collapse;width:100%;} td,th{border:1px solid #ccc;padding:6px 10px;text-align:left;}code,pre{background:#f6f8fa;padding:2px 4px;border-radius:4px;}h1,h2{margin-top:1.2em;}</style><body><pre># MMP Report
- Zeitraum (UTC): **2026-02-03T11:09:39+00:00** .. **2026-02-10T11:09:39+00:00**
## Outlets gesamt
- Ports (distinct): **20**
- Samples: **160** (filled: **0**)
- Kumuliert (approx): **173.0 Wh**
| port | outlet | cost_name | samples | filled | avg_W | approx_Wh |
|---:|---|---|---:|---:|---:|---:|
| 1 | W pc1 | Wolff | 8 | 0 | 7.25 | 58.0 |
| 2 | P pc2 | Paulisch | 8 | 0 | 14.38 | 115.0 |
| 3 | W router | Wolff | 8 | 0 | 0.0 | 0.0 |
| 4 | A pc4 | Austinat | 8 | 0 | 0.0 | 0.0 |
| 5 | MOD 1 Outlet 5 | Unbekannt | 8 | 0 | 0.0 | 0.0 |
| 6 | MOD 2 Outlet 1 | Unbekannt | 8 | 0 | 0.0 | 0.0 |
| 7 | MOD 2 Outlet 2 | Unbekannt | 8 | 0 | 0.0 | 0.0 |
| 8 | MOD 2 Outlet 3 | Unbekannt | 8 | 0 | 0.0 | 0.0 |
| 9 | MOD 2 Outlet 4 | Unbekannt | 8 | 0 | 0.0 | 0.0 |
| 10 | MOD 2 Outlet 5 | Unbekannt | 8 | 0 | 0.0 | 0.0 |
| 11 | MOD 3 Outlet 1 | Unbekannt | 8 | 0 | 0.0 | 0.0 |
| 12 | MOD 3 Outlet 2 | Unbekannt | 8 | 0 | 0.0 | 0.0 |
| 13 | MOD 3 Outlet 3 | Unbekannt | 8 | 0 | 0.0 | 0.0 |
| 14 | MOD 3 Outlet 4 | Unbekannt | 8 | 0 | 0.0 | 0.0 |
| 15 | MOD 3 Outlet 5 | Unbekannt | 8 | 0 | 0.0 | 0.0 |
| 16 | MOD 4 Outlet 1 | Unbekannt | 8 | 0 | 0.0 | 0.0 |
| 17 | MOD 4 Outlet 2 | Unbekannt | 8 | 0 | 0.0 | 0.0 |
| 18 | MOD 4 Outlet 3 | Unbekannt | 8 | 0 | 0.0 | 0.0 |
| 19 | MOD 4 Outlet 4 | Unbekannt | 8 | 0 | 0.0 | 0.0 |
| 20 | MOD 4 Outlet 5 | Unbekannt | 8 | 0 | 0.0 | 0.0 |
## Kostenstellen
| name | outlets | samples | filled | avg_W | approx_Wh |
|---|---:|---:|---:|---:|---:|
| Paulisch | 1 | 8 | 0 | 14.38 | 115.0 |
| Wolff | 2 | 16 | 0 | 3.63 | 58.0 |
| Austinat | 1 | 8 | 0 | 0.0 | 0.0 |
## Job-Statistik
| device | runs | ok | failed | outlets_total | outlets_filled | fields_filled | avg_ms |
|---|---:|---:|---:|---:|---:|---:|---:|
| mmp17-1 | 8 | 8 | 0 | 160 | 0 | 0 | 5260.4 |
</pre></body>

46
reports/report_weekly.md Normal file
View File

@ -0,0 +1,46 @@
# MMP Report
- Zeitraum (UTC): **2026-02-03T11:09:39+00:00** .. **2026-02-10T11:09:39+00:00**
## Outlets gesamt
- Ports (distinct): **20**
- Samples: **160** (filled: **0**)
- Kumuliert (approx): **173.0 Wh**
| port | outlet | cost_name | samples | filled | avg_W | approx_Wh |
|---:|---|---|---:|---:|---:|---:|
| 1 | W pc1 | Wolff | 8 | 0 | 7.25 | 58.0 |
| 2 | P pc2 | Paulisch | 8 | 0 | 14.38 | 115.0 |
| 3 | W router | Wolff | 8 | 0 | 0.0 | 0.0 |
| 4 | A pc4 | Austinat | 8 | 0 | 0.0 | 0.0 |
| 5 | MOD 1 Outlet 5 | Unbekannt | 8 | 0 | 0.0 | 0.0 |
| 6 | MOD 2 Outlet 1 | Unbekannt | 8 | 0 | 0.0 | 0.0 |
| 7 | MOD 2 Outlet 2 | Unbekannt | 8 | 0 | 0.0 | 0.0 |
| 8 | MOD 2 Outlet 3 | Unbekannt | 8 | 0 | 0.0 | 0.0 |
| 9 | MOD 2 Outlet 4 | Unbekannt | 8 | 0 | 0.0 | 0.0 |
| 10 | MOD 2 Outlet 5 | Unbekannt | 8 | 0 | 0.0 | 0.0 |
| 11 | MOD 3 Outlet 1 | Unbekannt | 8 | 0 | 0.0 | 0.0 |
| 12 | MOD 3 Outlet 2 | Unbekannt | 8 | 0 | 0.0 | 0.0 |
| 13 | MOD 3 Outlet 3 | Unbekannt | 8 | 0 | 0.0 | 0.0 |
| 14 | MOD 3 Outlet 4 | Unbekannt | 8 | 0 | 0.0 | 0.0 |
| 15 | MOD 3 Outlet 5 | Unbekannt | 8 | 0 | 0.0 | 0.0 |
| 16 | MOD 4 Outlet 1 | Unbekannt | 8 | 0 | 0.0 | 0.0 |
| 17 | MOD 4 Outlet 2 | Unbekannt | 8 | 0 | 0.0 | 0.0 |
| 18 | MOD 4 Outlet 3 | Unbekannt | 8 | 0 | 0.0 | 0.0 |
| 19 | MOD 4 Outlet 4 | Unbekannt | 8 | 0 | 0.0 | 0.0 |
| 20 | MOD 4 Outlet 5 | Unbekannt | 8 | 0 | 0.0 | 0.0 |
## Kostenstellen
| name | outlets | samples | filled | avg_W | approx_Wh |
|---|---:|---:|---:|---:|---:|
| Paulisch | 1 | 8 | 0 | 14.38 | 115.0 |
| Wolff | 2 | 16 | 0 | 3.63 | 58.0 |
| Austinat | 1 | 8 | 0 | 0.0 | 0.0 |
## Job-Statistik
| device | runs | ok | failed | outlets_total | outlets_filled | fields_filled | avg_ms |
|---|---:|---:|---:|---:|---:|---:|---:|
| mmp17-1 | 8 | 8 | 0 | 160 | 0 | 0 | 5260.4 |