feat: Windows compatibility (#836)

* feat: win compatibility

* fix

* fix: uptime parse

* opt.: linux uptime accuracy

* fix: windows temperature fetching

* opt.

* opt.: powershell exec

* refactor: address PR review feedback and improve code quality

### Major Improvements:
- **Refactored Windows status parsing**: Broke down large `_getWindowsStatus` method into 13 smaller, focused helper methods for better maintainability and readability
- **Extracted system detection logic**: Created dedicated `SystemDetector` helper class to separate OS detection concerns from ServerProvider
- **Improved concurrency handling**: Implemented proper synchronization for server updates using Future-based locks to prevent race conditions

### Bug Fixes:
- **Fixed CPU percentage parsing**: Removed incorrect '*100' multiplication in BSD CPU parsing (values were already percentages)
- **Enhanced memory parsing**: Added validation and error handling to BSD memory fallback parsing with proper logging
- **Improved uptime parsing**: Added support for multiple Windows date formats and robust error handling with validation
- **Fixed division by zero**: Added safety checks in Swap.usedPercent getter

### Code Quality Enhancements:
- **Added comprehensive documentation**: Documented Windows CPU counter limitations and approach
- **Strengthened error handling**: Added detailed logging and validation throughout parsing methods
- **Improved robustness**: Enhanced BSD CPU parsing with percentage validation and warnings
- **Better separation of concerns**: Each parsing method now has single responsibility

### Files Changed:
- `lib/data/helper/system_detector.dart` (new): System detection helper
- `lib/data/model/server/cpu.dart`: Fixed percentage parsing and added validation
- `lib/data/model/server/memory.dart`: Enhanced fallback parsing and division-by-zero protection
- `lib/data/model/server/server_status_update_req.dart`: Refactored into 13 focused parsing methods
- `lib/data/provider/server.dart`: Improved synchronization and extracted system detection

🤖 Generated with [Claude Code](https://claude.ai/code)

Co-Authored-By: Claude <noreply@anthropic.com>

* refactor: parse & shell fn struct

---------

Co-authored-by: Claude <noreply@anthropic.com>
This commit is contained in:
lollipopkit🏳️‍⚧️
2025-08-08 16:56:36 +08:00
committed by GitHub
parent 46a12bc844
commit 3a615449e3
103 changed files with 9591 additions and 1906 deletions

View File

@@ -32,7 +32,7 @@ class AmdSmi {
try {
final jsonData = json.decode(raw);
if (jsonData is! List) return [];
return jsonData
.map((gpu) => _parseGpuItem(gpu))
.where((item) => item != null)
@@ -47,28 +47,28 @@ class AmdSmi {
try {
final name = gpu['name'] ?? gpu['card_model'] ?? gpu['device_name'] ?? 'Unknown AMD GPU';
final deviceId = gpu['device_id']?.toString() ?? gpu['gpu_id']?.toString() ?? '0';
// Temperature parsing
final tempRaw = gpu['temperature'] ?? gpu['temp'] ?? gpu['gpu_temp'];
final temp = _parseIntValue(tempRaw);
// Power parsing
final powerDraw = gpu['power_draw'] ?? gpu['current_power'];
final powerCap = gpu['power_cap'] ?? gpu['power_limit'] ?? gpu['max_power'];
final power = _formatPower(powerDraw, powerCap);
// Memory parsing
final memory = _parseMemory(gpu['memory'] ?? gpu['vram'] ?? {});
// Utilization parsing
final utilization = _parseIntValue(gpu['utilization'] ?? gpu['gpu_util'] ?? gpu['activity']);
// Fan speed parsing
final fanSpeed = _parseIntValue(gpu['fan_speed'] ?? gpu['fan_rpm']);
// Clock speed parsing
final clockSpeed = _parseIntValue(gpu['clock_speed'] ?? gpu['gpu_clock'] ?? gpu['sclk']);
return AmdSmiItem(
deviceId: deviceId,
name: name,
@@ -98,7 +98,7 @@ class AmdSmi {
static String _formatPower(dynamic draw, dynamic cap) {
final drawValue = _parseIntValue(draw);
final capValue = _parseIntValue(cap);
if (drawValue == 0 && capValue == 0) return 'N/A';
if (capValue == 0) return '${drawValue}W';
return '${drawValue}W / ${capValue}W';
@@ -108,7 +108,7 @@ class AmdSmi {
final total = _parseIntValue(memData['total'] ?? memData['total_memory']);
final used = _parseIntValue(memData['used'] ?? memData['used_memory']);
final unit = memData['unit']?.toString() ?? 'MB';
final processes = <AmdSmiMemProcess>[];
final processesData = memData['processes'];
if (processesData is List) {
@@ -119,7 +119,7 @@ class AmdSmi {
}
}
}
return AmdSmiMem(total, used, unit, processes);
}
@@ -127,7 +127,7 @@ class AmdSmi {
final pid = _parseIntValue(procData['pid']);
final name = procData['name']?.toString() ?? procData['process_name']?.toString() ?? 'Unknown';
final memory = _parseIntValue(procData['memory'] ?? procData['used_memory']);
if (pid == 0) return null;
return AmdSmiMemProcess(pid, name, memory);
}
@@ -185,4 +185,4 @@ class AmdSmiMemProcess {
String toString() {
return 'AmdSmiMemProcess{pid: $pid, name: $name, memory: $memory}';
}
}
}

View File

@@ -19,13 +19,7 @@ class Battery {
final int? cycle;
final String? tech;
const Battery({
required this.status,
this.percent,
this.name,
this.cycle,
this.tech,
});
const Battery({required this.status, this.percent, this.name, this.cycle, this.tech});
factory Battery.fromRaw(String raw) {
final lines = raw.split('\n');
@@ -63,8 +57,7 @@ enum BatteryStatus {
charging,
discharging,
full,
unknown,
;
unknown;
static BatteryStatus parse(String? status) {
switch (status) {

View File

@@ -6,17 +6,11 @@ class Conn {
final int passive;
final int fail;
const Conn({
required this.maxConn,
required this.active,
required this.passive,
required this.fail,
});
const Conn({required this.maxConn, required this.active, required this.passive, required this.fail});
static Conn? parse(String raw) {
final lines = raw.split('\n');
final idx = lines.lastWhere((element) => element.startsWith('Tcp:'),
orElse: () => '');
final idx = lines.lastWhere((element) => element.startsWith('Tcp:'), orElse: () => '');
if (idx != '') {
final vals = idx.split(Miscs.blankReg);
return Conn(

View File

@@ -200,22 +200,98 @@ final class CpuBrand {
}
final _bsdCpuPercentReg = RegExp(r'(\d+\.\d+)%');
final _macCpuPercentReg = RegExp(
r'CPU usage: ([\d.]+)% user, ([\d.]+)% sys, ([\d.]+)% idle');
final _freebsdCpuPercentReg = RegExp(
r'CPU: ([\d.]+)% user, ([\d.]+)% nice, ([\d.]+)% system, '
r'([\d.]+)% interrupt, ([\d.]+)% idle');
/// TODO: Change this implementation to parse cpu status on BSD system
/// Parse CPU status on BSD system with support for different BSD variants
///
/// [raw]:
/// CPU usage: 14.70% user, 12.76% sys, 72.52% idle
/// Supports multiple formats:
/// - macOS: "CPU usage: 14.70% user, 12.76% sys, 72.52% idle"
/// - FreeBSD: "CPU: 5.2% user, 0.0% nice, 3.1% system, 0.1% interrupt, 91.6% idle"
/// - Generic BSD: fallback to percentage extraction
Cpus parseBsdCpu(String raw) {
final init = InitStatus.cpus;
// Try macOS format first
final macMatch = _macCpuPercentReg.firstMatch(raw);
if (macMatch != null) {
final userPercent = double.parse(macMatch.group(1)!).toInt();
final sysPercent = double.parse(macMatch.group(2)!).toInt();
final idlePercent = double.parse(macMatch.group(3)!).toInt();
init.add([
SingleCpuCore(
'cpu0',
userPercent,
sysPercent,
0, // nice
idlePercent,
0, // iowait
0, // irq
0, // softirq
),
]);
return init;
}
// Try FreeBSD format
final freebsdMatch = _freebsdCpuPercentReg.firstMatch(raw);
if (freebsdMatch != null) {
final userPercent = double.parse(freebsdMatch.group(1)!).toInt();
final nicePercent = double.parse(freebsdMatch.group(2)!).toInt();
final sysPercent = double.parse(freebsdMatch.group(3)!).toInt();
final irqPercent = double.parse(freebsdMatch.group(4)!).toInt();
final idlePercent = double.parse(freebsdMatch.group(5)!).toInt();
init.add([
SingleCpuCore(
'cpu0',
userPercent,
sysPercent,
nicePercent,
idlePercent,
0, // iowait
irqPercent,
0, // softirq
),
]);
return init;
}
// Fallback to generic percentage extraction
final percents = _bsdCpuPercentReg
.allMatches(raw)
.map((e) => double.parse(e.group(1) ?? '0') * 100)
.map((e) => double.parse(e.group(1) ?? '0'))
.toList();
if (percents.length != 3) return InitStatus.cpus;
final init = InitStatus.cpus;
init.add([
SingleCpuCore('cpu', percents[0].toInt(), 0, 0,
percents[2].toInt() + percents[1].toInt(), 0, 0, 0),
]);
if (percents.length >= 3) {
// Validate that percentages are reasonable (0-100 range)
final validPercents = percents.where((p) => p >= 0 && p <= 100).toList();
if (validPercents.length != percents.length) {
Loggers.app.warning('BSD CPU fallback parsing found invalid percentages in: $raw');
}
init.add([
SingleCpuCore(
'cpu0',
percents[0].toInt(), // user
percents.length > 1 ? percents[1].toInt() : 0, // sys
0, // nice
percents.length > 2 ? percents[2].toInt() : 0, // idle
0, // iowait
0, // irq
0, // softirq
),
]);
return init;
} else if (percents.isNotEmpty) {
Loggers.app.warning('BSD CPU fallback parsing found ${percents.length} percentages (expected at least 3) in: $raw');
} else {
Loggers.app.warning('BSD CPU fallback parsing found no percentages in: $raw');
}
return init;
}

View File

@@ -70,14 +70,14 @@ class Disk with EquatableMixin {
if (disk != null) {
list.add(disk);
}
// For devices with children (like physical disks with partitions),
// also process each child individually to ensure BTRFS RAID disks are properly handled
final List<dynamic> childDevices = device['children'] ?? [];
for (final childDevice in childDevices) {
final String childPath = childDevice['path']?.toString() ?? '';
final String childFsType = childDevice['fstype']?.toString() ?? '';
// If this is a BTRFS partition, add it directly to ensure it's properly represented
if (childFsType == 'btrfs' && childPath.isNotEmpty) {
final childDisk = _processSingleDevice(childDevice);
@@ -93,11 +93,11 @@ class Disk with EquatableMixin {
final fstype = device['fstype']?.toString();
final String mountpoint = device['mountpoint']?.toString() ?? '';
final String path = device['path']?.toString() ?? '';
if (path.isEmpty || (fstype == null && mountpoint.isEmpty)) {
return null;
}
if (!_shouldCalc(fstype ?? '', mountpoint)) {
return null;
}
@@ -154,8 +154,7 @@ class Disk with EquatableMixin {
}
// Handle common filesystem cases or parent devices with children
if ((fstype != null && _shouldCalc(fstype, mount)) ||
(childDisks.isNotEmpty && path.isNotEmpty)) {
if ((fstype != null && _shouldCalc(fstype, mount)) || (childDisks.isNotEmpty && path.isNotEmpty)) {
final sizeStr = device['fssize']?.toString() ?? '0';
final size = (BigInt.tryParse(sizeStr) ?? BigInt.zero) ~/ BigInt.from(1024);
@@ -221,14 +220,16 @@ class Disk with EquatableMixin {
final fs = vals[0];
final mount = vals[5];
if (!_shouldCalc(fs, mount)) continue;
list.add(Disk(
path: fs,
mount: mount,
usedPercent: int.parse(vals[4].replaceFirst('%', '')),
used: BigInt.parse(vals[2]) ~/ BigInt.from(1024),
size: BigInt.parse(vals[1]) ~/ BigInt.from(1024),
avail: BigInt.parse(vals[3]) ~/ BigInt.from(1024),
));
list.add(
Disk(
path: fs,
mount: mount,
usedPercent: int.parse(vals[4].replaceFirst('%', '')),
used: BigInt.parse(vals[2]) ~/ BigInt.from(1024),
size: BigInt.parse(vals[1]) ~/ BigInt.from(1024),
avail: BigInt.parse(vals[3]) ~/ BigInt.from(1024),
),
);
} catch (e) {
continue;
}
@@ -237,8 +238,19 @@ class Disk with EquatableMixin {
}
@override
List<Object?> get props =>
[path, name, kname, fsTyp, mount, usedPercent, used, size, avail, uuid, children];
List<Object?> get props => [
path,
name,
kname,
fsTyp,
mount,
usedPercent,
used,
size,
avail,
uuid,
children,
];
}
class DiskIO extends TimeSeq<List<DiskIOPiece>> {
@@ -314,12 +326,14 @@ class DiskIO extends TimeSeq<List<DiskIOPiece>> {
try {
final dev = vals[2];
if (dev.startsWith('loop')) continue;
items.add(DiskIOPiece(
dev: dev,
sectorsRead: int.parse(vals[5]),
sectorsWrite: int.parse(vals[9]),
time: time,
));
items.add(
DiskIOPiece(
dev: dev,
sectorsRead: int.parse(vals[5]),
sectorsWrite: int.parse(vals[9]),
time: time,
),
);
} catch (e) {
continue;
}
@@ -334,12 +348,7 @@ class DiskIOPiece extends TimeSeqIface<DiskIOPiece> {
final int sectorsWrite;
final int time;
DiskIOPiece({
required this.dev,
required this.sectorsRead,
required this.sectorsWrite,
required this.time,
});
DiskIOPiece({required this.dev, required this.sectorsRead, required this.sectorsWrite, required this.time});
@override
bool same(DiskIOPiece other) => dev == other.dev;
@@ -349,10 +358,7 @@ class DiskUsage {
final BigInt used;
final BigInt size;
DiskUsage({
required this.used,
required this.size,
});
DiskUsage({required this.used, required this.size});
double get usedPercent {
// Avoid division by zero

View File

@@ -12,7 +12,6 @@ enum Dist {
rocky,
deepin,
coreelec,
;
}
extension StringX on String {
@@ -34,6 +33,4 @@ extension StringX on String {
// Special rules
const _wrts = [
'istoreos',
];
const _wrts = ['istoreos'];

View File

@@ -5,11 +5,7 @@ class Memory {
final int free;
final int avail;
const Memory({
required this.total,
required this.free,
required this.avail,
});
const Memory({required this.total, required this.free, required this.avail});
double get availPercent {
if (avail == 0) {
@@ -23,46 +19,99 @@ class Memory {
static Memory parse(String raw) {
final items = raw.split('\n').map((e) => memItemReg.firstMatch(e)).toList();
final total = int.tryParse(items
.firstWhereOrNull((e) => e?.group(1) == 'MemTotal:')
?.group(2) ??
'1') ??
1;
final free = int.tryParse(items
.firstWhereOrNull((e) => e?.group(1) == 'MemFree:')
?.group(2) ??
'0') ??
0;
final available = int.tryParse(items
.firstWhereOrNull((e) => e?.group(1) == 'MemAvailable:')
?.group(2) ??
'0') ??
0;
final total = int.tryParse(
items.firstWhereOrNull((e) => e?.group(1) == 'MemTotal:')
?.group(2) ?? '1') ?? 1;
final free = int.tryParse(
items.firstWhereOrNull((e) => e?.group(1) == 'MemFree:')
?.group(2) ?? '0') ?? 0;
final available = int.tryParse(
items.firstWhereOrNull((e) => e?.group(1) == 'MemAvailable:')
?.group(2) ?? '0') ?? 0;
return Memory(
total: total,
free: free,
avail: available,
);
return Memory(total: total, free: free, avail: available);
}
}
final memItemReg = RegExp(r'([A-Z].+:)\s+([0-9]+) kB');
/// Parse BSD/macOS memory from top output
///
/// Supports formats like:
/// - macOS: "PhysMem: 32G used (1536M wired), 64G unused."
/// - FreeBSD: "Mem: 456M Active, 2918M Inact, 1127M Wired, 187M Cache, 829M Buf, 3535M Free"
Memory parseBsdMemory(String raw) {
// Try macOS format first: "PhysMem: 32G used (1536M wired), 64G unused."
final macMemReg = RegExp(
r'PhysMem:\s*([\d.]+)([KMGT])\s*used.*?,\s*([\d.]+)([KMGT])\s*unused');
final macMatch = macMemReg.firstMatch(raw);
if (macMatch != null) {
final usedAmount = double.parse(macMatch.group(1)!);
final usedUnit = macMatch.group(2)!;
final freeAmount = double.parse(macMatch.group(3)!);
final freeUnit = macMatch.group(4)!;
final usedKB = _convertToKB(usedAmount, usedUnit);
final freeKB = _convertToKB(freeAmount, freeUnit);
return Memory(total: usedKB + freeKB, free: freeKB, avail: freeKB);
}
// Try FreeBSD format: "Mem: 456M Active, 2918M Inact, 1127M Wired, 187M Cache, 829M Buf, 3535M Free"
final freeBsdReg = RegExp(
r'(\d+)([KMGT])\s+(Active|Inact|Wired|Cache|Buf|Free)', caseSensitive: false);
final matches = freeBsdReg.allMatches(raw);
if (matches.isNotEmpty) {
double usedKB = 0;
double freeKB = 0;
for (final match in matches) {
final amount = double.parse(match.group(1)!);
final unit = match.group(2)!;
final keyword = match.group(3)!.toLowerCase();
final kb = _convertToKB(amount, unit);
// Only sum known keywords
if (keyword == 'active' || keyword == 'inact' || keyword == 'wired' || keyword == 'cache' || keyword == 'buf') {
usedKB += kb;
} else if (keyword == 'free') {
freeKB += kb;
}
}
return Memory(total: (usedKB + freeKB).round(), free: freeKB.round(), avail: freeKB.round());
}
// If neither format matches, throw an error to avoid misinterpretation
throw FormatException('Unrecognized BSD/macOS memory format: $raw');
}
/// Convert memory size to KB based on unit
int _convertToKB(double amount, String unit) {
switch (unit.toUpperCase()) {
case 'T':
return (amount * 1024 * 1024 * 1024).round();
case 'G':
return (amount * 1024 * 1024).round();
case 'M':
return (amount * 1024).round();
case 'K':
case '':
return amount.round();
default:
return amount.round();
}
}
class Swap {
final int total;
final int free;
final int cached;
const Swap({
required this.total,
required this.free,
required this.cached,
});
const Swap({required this.total, required this.free, required this.cached});
double get usedPercent => 1 - free / total;
double get usedPercent => total == 0 ? 0.0 : 1 - free / total;
double get freePercent => free / total;
double get freePercent => total == 0 ? 0.0 : free / total;
@override
String toString() {
@@ -72,26 +121,16 @@ class Swap {
static Swap parse(String raw) {
final items = raw.split('\n').map((e) => memItemReg.firstMatch(e)).toList();
final total = int.tryParse(items
.firstWhereOrNull((e) => e?.group(1) == 'SwapTotal:')
?.group(2) ??
'1') ??
0;
final free = int.tryParse(items
.firstWhereOrNull((e) => e?.group(1) == 'SwapFree:')
?.group(2) ??
'1') ??
0;
final cached = int.tryParse(items
.firstWhereOrNull((e) => e?.group(1) == 'SwapCached:')
?.group(2) ??
'0') ??
0;
final total = int.tryParse(
items.firstWhereOrNull((e) => e?.group(1) == 'SwapTotal:')
?.group(2) ?? '1') ?? 0;
final free = int.tryParse(
items.firstWhereOrNull((e) => e?.group(1) == 'SwapFree:')
?.group(2) ?? '1') ?? 0;
final cached = int.tryParse(
items.firstWhereOrNull((e) => e?.group(1) == 'SwapCached:')
?.group(2) ?? '0') ?? 0;
return Swap(
total: total,
free: free,
cached: cached,
);
return Swap(total: total, free: free, cached: cached);
}
}

View File

@@ -16,12 +16,7 @@ class NetSpeedPart extends TimeSeqIface<NetSpeedPart> {
bool same(NetSpeedPart other) => device == other.device;
}
typedef CachedNetVals = ({
String sizeIn,
String sizeOut,
String speedIn,
String speedOut,
});
typedef CachedNetVals = ({String sizeIn, String sizeOut, String speedIn, String speedOut});
class NetSpeed extends TimeSeq<List<NetSpeedPart>> {
NetSpeed(super.init1, super.init2);
@@ -32,20 +27,14 @@ class NetSpeed extends TimeSeq<List<NetSpeedPart>> {
devices.addAll(now.map((e) => e.device).toList());
realIfaces.clear();
realIfaces.addAll(devices
.where((e) => realIfacePrefixs.any((prefix) => e.startsWith(prefix))));
realIfaces.addAll(devices.where((e) => realIfacePrefixs.any((prefix) => e.startsWith(prefix))));
final sizeIn = this.sizeIn();
final sizeOut = this.sizeOut();
final speedIn = this.speedIn();
final speedOut = this.speedOut();
cachedVals = (
sizeIn: sizeIn,
sizeOut: sizeOut,
speedIn: speedIn,
speedOut: speedOut,
);
cachedVals = (sizeIn: sizeIn, sizeOut: sizeOut, speedIn: speedIn, speedOut: speedOut);
}
/// Cached network device list
@@ -58,15 +47,13 @@ class NetSpeed extends TimeSeq<List<NetSpeedPart>> {
/// Cached non-virtual network device prefix
final realIfaces = <String>[];
CachedNetVals cachedVals =
(sizeIn: '0kb', sizeOut: '0kb', speedIn: '0kb/s', speedOut: '0kb/s');
CachedNetVals cachedVals = (sizeIn: '0kb', sizeOut: '0kb', speedIn: '0kb/s', speedOut: '0kb/s');
/// Time diff between [pre] and [now]
BigInt get _timeDiff => BigInt.from(now[0].time - pre[0].time);
double speedInBytes(int i) => (now[i].bytesIn - pre[i].bytesIn) / _timeDiff;
double speedOutBytes(int i) =>
(now[i].bytesOut - pre[i].bytesOut) / _timeDiff;
double speedOutBytes(int i) => (now[i].bytesOut - pre[i].bytesOut) / _timeDiff;
BigInt sizeInBytes(int i) => now[i].bytesIn;
BigInt sizeOutBytes(int i) => now[i].bytesOut;

View File

@@ -35,25 +35,17 @@ class NvidiaSmi {
.firstOrNull
?.innerText;
final power = gpu.findElements('gpu_power_readings').firstOrNull;
final powerDraw =
power?.findElements('power_draw').firstOrNull?.innerText;
final powerLimit =
power?.findElements('current_power_limit').firstOrNull?.innerText;
final powerDraw = power?.findElements('power_draw').firstOrNull?.innerText;
final powerLimit = power?.findElements('current_power_limit').firstOrNull?.innerText;
final memory = gpu.findElements('fb_memory_usage').firstOrNull;
final memoryUsed = memory?.findElements('used').firstOrNull?.innerText;
final memoryTotal = memory?.findElements('total').firstOrNull?.innerText;
final processes = gpu
.findElements('processes')
.firstOrNull
?.findElements('process_info');
final memoryProcesses =
List<NvidiaSmiMemProcess?>.generate(processes?.length ?? 0, (index) {
final processes = gpu.findElements('processes').firstOrNull?.findElements('process_info');
final memoryProcesses = List<NvidiaSmiMemProcess?>.generate(processes?.length ?? 0, (index) {
final process = processes?.elementAt(index);
final pid = process?.findElements('pid').firstOrNull?.innerText;
final name =
process?.findElements('process_name').firstOrNull?.innerText;
final memory =
process?.findElements('used_memory').firstOrNull?.innerText;
final name = process?.findElements('process_name').firstOrNull?.innerText;
final memory = process?.findElements('used_memory').firstOrNull?.innerText;
if (pid != null && name != null && memory != null) {
return NvidiaSmiMemProcess(
int.tryParse(pid) ?? 0,

View File

@@ -1,7 +1,6 @@
final parseFailed = Exception('Parse failed');
final seqReg = RegExp(r'seq=(.+) ttl=(.+) time=(.+) ms');
final packetReg =
RegExp(r'(.+) packets transmitted, (.+) received, (.+)% packet loss');
final packetReg = RegExp(r'(.+) packets transmitted, (.+) received, (.+)% packet loss');
final timeReg = RegExp(r'min/avg/max/mdev = (.+)/(.+)/(.+)/(.+) ms');
final timeAlpineReg = RegExp(r'round-trip min/avg/max = (.+)/(.+)/(.+) ms');
final ipReg = RegExp(r' \((\S+)\)');
@@ -15,17 +14,13 @@ class PingResult {
PingResult.parse(this.serverName, String raw) {
final lines = raw.split('\n');
lines.removeWhere((element) => element.isEmpty);
final statisticIndex =
lines.indexWhere((element) => element.startsWith('---'));
final statisticIndex = lines.indexWhere((element) => element.startsWith('---'));
if (statisticIndex == -1) {
throw parseFailed;
}
final statisticRaw = lines.sublist(statisticIndex + 1);
statistic = PingStatistics.parse(statisticRaw);
results = lines
.sublist(1, statisticIndex)
.map((e) => PingSeqResult.parse(e))
.toList();
results = lines.sublist(1, statisticIndex).map((e) => PingSeqResult.parse(e)).toList();
ip = ipReg.firstMatch(lines[0])?.group(1);
}
}

View File

@@ -8,10 +8,7 @@ class PrivateKeyInfo {
@JsonKey(name: 'private_key')
final String key;
const PrivateKeyInfo({
required this.id,
required this.key,
});
const PrivateKeyInfo({required this.id, required this.key});
factory PrivateKeyInfo.fromJson(Map<String, dynamic> json) => _$PrivateKeyInfoFromJson(json);

View File

@@ -107,10 +107,7 @@ class PsResult {
final List<Proc> procs;
final String? error;
const PsResult({
required this.procs,
this.error,
});
const PsResult({required this.procs, this.error});
factory PsResult.parse(String raw, {ProcSortMode sort = ProcSortMode.cpu}) {
final lines = raw.split('\n').map((e) => e.trim()).toList();
@@ -167,14 +164,7 @@ class PsResult {
}
}
enum ProcSortMode {
cpu,
mem,
pid,
user,
name,
;
}
enum ProcSortMode { cpu, mem, pid, user, name }
extension _StrIndex on List<String> {
int? indexOfOrNull(String val) {

View File

@@ -6,25 +6,24 @@ enum PveResType {
qemu,
node,
storage,
sdn,
;
sdn;
static PveResType? fromString(String type) => switch (type.toLowerCase()) {
'lxc' => PveResType.lxc,
'qemu' => PveResType.qemu,
'node' => PveResType.node,
'storage' => PveResType.storage,
'sdn' => PveResType.sdn,
_ => null,
};
'lxc' => PveResType.lxc,
'qemu' => PveResType.qemu,
'node' => PveResType.node,
'storage' => PveResType.storage,
'sdn' => PveResType.sdn,
_ => null,
};
String get toStr => switch (this) {
PveResType.node => l10n.node,
PveResType.qemu => 'QEMU',
PveResType.lxc => 'LXC',
PveResType.storage => l10n.storage,
PveResType.sdn => 'SDN',
};
PveResType.node => l10n.node,
PveResType.qemu => 'QEMU',
PveResType.lxc => 'LXC',
PveResType.storage => l10n.storage,
PveResType.sdn => 'SDN',
};
}
sealed class PveResIface {
@@ -334,13 +333,7 @@ final class PveSdn extends PveResIface implements PveCtrlIface {
@override
final String status;
PveSdn({
required this.id,
required this.type,
required this.sdn,
required this.node,
required this.status,
});
PveSdn({required this.id, required this.type, required this.sdn, required this.node, required this.status});
static PveSdn fromJson(Map<String, dynamic> json) {
return PveSdn(
@@ -379,8 +372,7 @@ final class PveRes {
bool get onlyOneNode => nodes.length == 1;
int get length =>
qemus.length + lxcs.length + nodes.length + storages.length + sdns.length;
int get length => qemus.length + lxcs.length + nodes.length + storages.length + sdns.length;
PveResIface operator [](int index) {
if (index < nodes.length) {
@@ -432,29 +424,13 @@ final class PveRes {
}
if (old != null) {
qemus.reorder(
order: old.qemus.map((e) => e.id).toList(),
finder: (e, s) => e.id == s);
lxcs.reorder(
order: old.lxcs.map((e) => e.id).toList(),
finder: (e, s) => e.id == s);
nodes.reorder(
order: old.nodes.map((e) => e.id).toList(),
finder: (e, s) => e.id == s);
storages.reorder(
order: old.storages.map((e) => e.id).toList(),
finder: (e, s) => e.id == s);
sdns.reorder(
order: old.sdns.map((e) => e.id).toList(),
finder: (e, s) => e.id == s);
qemus.reorder(order: old.qemus.map((e) => e.id).toList(), finder: (e, s) => e.id == s);
lxcs.reorder(order: old.lxcs.map((e) => e.id).toList(), finder: (e, s) => e.id == s);
nodes.reorder(order: old.nodes.map((e) => e.id).toList(), finder: (e, s) => e.id == s);
storages.reorder(order: old.storages.map((e) => e.id).toList(), finder: (e, s) => e.id == s);
sdns.reorder(order: old.sdns.map((e) => e.id).toList(), finder: (e, s) => e.id == s);
}
return PveRes(
qemus: qemus,
lxcs: lxcs,
nodes: nodes,
storages: storages,
sdns: sdns,
);
return PveRes(qemus: qemus, lxcs: lxcs, nodes: nodes, storages: storages, sdns: sdns);
}
}

View File

@@ -15,12 +15,12 @@ final class SensorAdaptor {
static const isa = SensorAdaptor(isaRaw);
static SensorAdaptor parse(String raw) => switch (raw) {
acpiRaw => acpi,
pciRaw => pci,
virtualRaw => virtual,
isaRaw => isa,
_ => SensorAdaptor(raw),
};
acpiRaw => acpi,
pciRaw => pci,
virtualRaw => virtual,
isaRaw => isa,
_ => SensorAdaptor(raw),
};
}
final class SensorItem {
@@ -28,11 +28,7 @@ final class SensorItem {
final SensorAdaptor adapter;
final Map<String, String> details;
const SensorItem({
required this.device,
required this.adapter,
required this.details,
});
const SensorItem({required this.device, required this.adapter, required this.details});
String get toMarkdown {
final sb = StringBuffer();
@@ -72,8 +68,7 @@ final class SensorItem {
final len = sensorLines.length;
if (len < 3) continue;
final device = sensorLines.first;
final adapter =
SensorAdaptor.parse(sensorLines[1].split(':').last.trim());
final adapter = SensorAdaptor.parse(sensorLines[1].split(':').last.trim());
final details = <String, String>{};
for (var idx = 2; idx < len; idx++) {
@@ -84,11 +79,7 @@ final class SensorItem {
final value = detailParts[1].trim();
details[key] = value;
}
sensors.add(SensorItem(
device: device,
adapter: adapter,
details: details,
));
sensors.add(SensorItem(device: device, adapter: adapter, details: details));
}
return sensors;

View File

@@ -5,6 +5,7 @@ import 'package:freezed_annotation/freezed_annotation.dart';
import 'package:server_box/data/model/app/error.dart';
import 'package:server_box/data/model/server/custom.dart';
import 'package:server_box/data/model/server/server.dart';
import 'package:server_box/data/model/server/system.dart';
import 'package:server_box/data/model/server/wol_cfg.dart';
import 'package:server_box/data/provider/server.dart';
import 'package:server_box/data/store/server.dart';
@@ -44,6 +45,9 @@ abstract class Spi with _$Spi {
/// It only applies to SSH terminal.
Map<String, String>? envs,
@Default('') @JsonKey(fromJson: Spi.parseId) String id,
/// Custom system type (unix or windows). If set, skip auto-detection.
@JsonKey(includeIfNull: false) SystemType? customSystemType,
}) = _Spi;
factory Spi.fromJson(Map<String, dynamic> json) => _$SpiFromJson(json);
@@ -119,26 +123,25 @@ extension Spix on Spi {
///
/// **NOT** the default value.
static final example = Spi(
name: 'name',
ip: 'ip',
port: 22,
user: 'root',
pwd: 'pwd',
keyId: 'private_key_id',
tags: ['tag1', 'tag2'],
alterUrl: 'user@ip:port',
autoConnect: true,
jumpId: 'jump_server_id',
custom: ServerCustom(
pveAddr: 'http://localhost:8006',
pveIgnoreCert: false,
cmds: {
'echo': 'echo hello',
},
preferTempDev: 'nvme-pci-0400',
logoUrl: 'https://example.com/logo.png',
),
id: 'id');
name: 'name',
ip: 'ip',
port: 22,
user: 'root',
pwd: 'pwd',
keyId: 'private_key_id',
tags: ['tag1', 'tag2'],
alterUrl: 'user@ip:port',
autoConnect: true,
jumpId: 'jump_server_id',
custom: ServerCustom(
pveAddr: 'http://localhost:8006',
pveIgnoreCert: false,
cmds: {'echo': 'echo hello'},
preferTempDev: 'nvme-pci-0400',
logoUrl: 'https://example.com/logo.png',
),
id: 'id',
);
bool get isRoot => user == 'root';
}

View File

@@ -19,7 +19,8 @@ mixin _$Spi {
String get name; String get ip; int get port; String get user; String? get pwd;/// [id] of private key
@JsonKey(name: 'pubKeyId') String? get keyId; List<String>? get tags; String? get alterUrl; bool get autoConnect;/// [id] of the jump server
String? get jumpId; ServerCustom? get custom; WakeOnLanCfg? get wolCfg;/// It only applies to SSH terminal.
Map<String, String>? get envs;@JsonKey(fromJson: Spi.parseId) String get id;
Map<String, String>? get envs;@JsonKey(fromJson: Spi.parseId) String get id;/// Custom system type (unix or windows). If set, skip auto-detection.
@JsonKey(includeIfNull: false) SystemType? get customSystemType;
/// Create a copy of Spi
/// with the given fields replaced by the non-null parameter values.
@JsonKey(includeFromJson: false, includeToJson: false)
@@ -32,12 +33,12 @@ $SpiCopyWith<Spi> get copyWith => _$SpiCopyWithImpl<Spi>(this as Spi, _$identity
@override
bool operator ==(Object other) {
return identical(this, other) || (other.runtimeType == runtimeType&&other is Spi&&(identical(other.name, name) || other.name == name)&&(identical(other.ip, ip) || other.ip == ip)&&(identical(other.port, port) || other.port == port)&&(identical(other.user, user) || other.user == user)&&(identical(other.pwd, pwd) || other.pwd == pwd)&&(identical(other.keyId, keyId) || other.keyId == keyId)&&const DeepCollectionEquality().equals(other.tags, tags)&&(identical(other.alterUrl, alterUrl) || other.alterUrl == alterUrl)&&(identical(other.autoConnect, autoConnect) || other.autoConnect == autoConnect)&&(identical(other.jumpId, jumpId) || other.jumpId == jumpId)&&(identical(other.custom, custom) || other.custom == custom)&&(identical(other.wolCfg, wolCfg) || other.wolCfg == wolCfg)&&const DeepCollectionEquality().equals(other.envs, envs)&&(identical(other.id, id) || other.id == id));
return identical(this, other) || (other.runtimeType == runtimeType&&other is Spi&&(identical(other.name, name) || other.name == name)&&(identical(other.ip, ip) || other.ip == ip)&&(identical(other.port, port) || other.port == port)&&(identical(other.user, user) || other.user == user)&&(identical(other.pwd, pwd) || other.pwd == pwd)&&(identical(other.keyId, keyId) || other.keyId == keyId)&&const DeepCollectionEquality().equals(other.tags, tags)&&(identical(other.alterUrl, alterUrl) || other.alterUrl == alterUrl)&&(identical(other.autoConnect, autoConnect) || other.autoConnect == autoConnect)&&(identical(other.jumpId, jumpId) || other.jumpId == jumpId)&&(identical(other.custom, custom) || other.custom == custom)&&(identical(other.wolCfg, wolCfg) || other.wolCfg == wolCfg)&&const DeepCollectionEquality().equals(other.envs, envs)&&(identical(other.id, id) || other.id == id)&&(identical(other.customSystemType, customSystemType) || other.customSystemType == customSystemType));
}
@JsonKey(includeFromJson: false, includeToJson: false)
@override
int get hashCode => Object.hash(runtimeType,name,ip,port,user,pwd,keyId,const DeepCollectionEquality().hash(tags),alterUrl,autoConnect,jumpId,custom,wolCfg,const DeepCollectionEquality().hash(envs),id);
int get hashCode => Object.hash(runtimeType,name,ip,port,user,pwd,keyId,const DeepCollectionEquality().hash(tags),alterUrl,autoConnect,jumpId,custom,wolCfg,const DeepCollectionEquality().hash(envs),id,customSystemType);
@@ -48,7 +49,7 @@ abstract mixin class $SpiCopyWith<$Res> {
factory $SpiCopyWith(Spi value, $Res Function(Spi) _then) = _$SpiCopyWithImpl;
@useResult
$Res call({
String name, String ip, int port, String user, String? pwd,@JsonKey(name: 'pubKeyId') String? keyId, List<String>? tags, String? alterUrl, bool autoConnect, String? jumpId, ServerCustom? custom, WakeOnLanCfg? wolCfg, Map<String, String>? envs,@JsonKey(fromJson: Spi.parseId) String id
String name, String ip, int port, String user, String? pwd,@JsonKey(name: 'pubKeyId') String? keyId, List<String>? tags, String? alterUrl, bool autoConnect, String? jumpId, ServerCustom? custom, WakeOnLanCfg? wolCfg, Map<String, String>? envs,@JsonKey(fromJson: Spi.parseId) String id,@JsonKey(includeIfNull: false) SystemType? customSystemType
});
@@ -65,7 +66,7 @@ class _$SpiCopyWithImpl<$Res>
/// Create a copy of Spi
/// with the given fields replaced by the non-null parameter values.
@pragma('vm:prefer-inline') @override $Res call({Object? name = null,Object? ip = null,Object? port = null,Object? user = null,Object? pwd = freezed,Object? keyId = freezed,Object? tags = freezed,Object? alterUrl = freezed,Object? autoConnect = null,Object? jumpId = freezed,Object? custom = freezed,Object? wolCfg = freezed,Object? envs = freezed,Object? id = null,}) {
@pragma('vm:prefer-inline') @override $Res call({Object? name = null,Object? ip = null,Object? port = null,Object? user = null,Object? pwd = freezed,Object? keyId = freezed,Object? tags = freezed,Object? alterUrl = freezed,Object? autoConnect = null,Object? jumpId = freezed,Object? custom = freezed,Object? wolCfg = freezed,Object? envs = freezed,Object? id = null,Object? customSystemType = freezed,}) {
return _then(_self.copyWith(
name: null == name ? _self.name : name // ignore: cast_nullable_to_non_nullable
as String,ip: null == ip ? _self.ip : ip // ignore: cast_nullable_to_non_nullable
@@ -81,7 +82,8 @@ as String?,custom: freezed == custom ? _self.custom : custom // ignore: cast_nul
as ServerCustom?,wolCfg: freezed == wolCfg ? _self.wolCfg : wolCfg // ignore: cast_nullable_to_non_nullable
as WakeOnLanCfg?,envs: freezed == envs ? _self.envs : envs // ignore: cast_nullable_to_non_nullable
as Map<String, String>?,id: null == id ? _self.id : id // ignore: cast_nullable_to_non_nullable
as String,
as String,customSystemType: freezed == customSystemType ? _self.customSystemType : customSystemType // ignore: cast_nullable_to_non_nullable
as SystemType?,
));
}
@@ -92,7 +94,7 @@ as String,
@JsonSerializable(includeIfNull: false)
class _Spi extends Spi {
const _Spi({required this.name, required this.ip, required this.port, required this.user, this.pwd, @JsonKey(name: 'pubKeyId') this.keyId, final List<String>? tags, this.alterUrl, this.autoConnect = true, this.jumpId, this.custom, this.wolCfg, final Map<String, String>? envs, @JsonKey(fromJson: Spi.parseId) this.id = ''}): _tags = tags,_envs = envs,super._();
const _Spi({required this.name, required this.ip, required this.port, required this.user, this.pwd, @JsonKey(name: 'pubKeyId') this.keyId, final List<String>? tags, this.alterUrl, this.autoConnect = true, this.jumpId, this.custom, this.wolCfg, final Map<String, String>? envs, @JsonKey(fromJson: Spi.parseId) this.id = '', @JsonKey(includeIfNull: false) this.customSystemType}): _tags = tags,_envs = envs,super._();
factory _Spi.fromJson(Map<String, dynamic> json) => _$SpiFromJson(json);
@override final String name;
@@ -129,6 +131,8 @@ class _Spi extends Spi {
}
@override@JsonKey(fromJson: Spi.parseId) final String id;
/// Custom system type (unix or windows). If set, skip auto-detection.
@override@JsonKey(includeIfNull: false) final SystemType? customSystemType;
/// Create a copy of Spi
/// with the given fields replaced by the non-null parameter values.
@@ -143,12 +147,12 @@ Map<String, dynamic> toJson() {
@override
bool operator ==(Object other) {
return identical(this, other) || (other.runtimeType == runtimeType&&other is _Spi&&(identical(other.name, name) || other.name == name)&&(identical(other.ip, ip) || other.ip == ip)&&(identical(other.port, port) || other.port == port)&&(identical(other.user, user) || other.user == user)&&(identical(other.pwd, pwd) || other.pwd == pwd)&&(identical(other.keyId, keyId) || other.keyId == keyId)&&const DeepCollectionEquality().equals(other._tags, _tags)&&(identical(other.alterUrl, alterUrl) || other.alterUrl == alterUrl)&&(identical(other.autoConnect, autoConnect) || other.autoConnect == autoConnect)&&(identical(other.jumpId, jumpId) || other.jumpId == jumpId)&&(identical(other.custom, custom) || other.custom == custom)&&(identical(other.wolCfg, wolCfg) || other.wolCfg == wolCfg)&&const DeepCollectionEquality().equals(other._envs, _envs)&&(identical(other.id, id) || other.id == id));
return identical(this, other) || (other.runtimeType == runtimeType&&other is _Spi&&(identical(other.name, name) || other.name == name)&&(identical(other.ip, ip) || other.ip == ip)&&(identical(other.port, port) || other.port == port)&&(identical(other.user, user) || other.user == user)&&(identical(other.pwd, pwd) || other.pwd == pwd)&&(identical(other.keyId, keyId) || other.keyId == keyId)&&const DeepCollectionEquality().equals(other._tags, _tags)&&(identical(other.alterUrl, alterUrl) || other.alterUrl == alterUrl)&&(identical(other.autoConnect, autoConnect) || other.autoConnect == autoConnect)&&(identical(other.jumpId, jumpId) || other.jumpId == jumpId)&&(identical(other.custom, custom) || other.custom == custom)&&(identical(other.wolCfg, wolCfg) || other.wolCfg == wolCfg)&&const DeepCollectionEquality().equals(other._envs, _envs)&&(identical(other.id, id) || other.id == id)&&(identical(other.customSystemType, customSystemType) || other.customSystemType == customSystemType));
}
@JsonKey(includeFromJson: false, includeToJson: false)
@override
int get hashCode => Object.hash(runtimeType,name,ip,port,user,pwd,keyId,const DeepCollectionEquality().hash(_tags),alterUrl,autoConnect,jumpId,custom,wolCfg,const DeepCollectionEquality().hash(_envs),id);
int get hashCode => Object.hash(runtimeType,name,ip,port,user,pwd,keyId,const DeepCollectionEquality().hash(_tags),alterUrl,autoConnect,jumpId,custom,wolCfg,const DeepCollectionEquality().hash(_envs),id,customSystemType);
@@ -159,7 +163,7 @@ abstract mixin class _$SpiCopyWith<$Res> implements $SpiCopyWith<$Res> {
factory _$SpiCopyWith(_Spi value, $Res Function(_Spi) _then) = __$SpiCopyWithImpl;
@override @useResult
$Res call({
String name, String ip, int port, String user, String? pwd,@JsonKey(name: 'pubKeyId') String? keyId, List<String>? tags, String? alterUrl, bool autoConnect, String? jumpId, ServerCustom? custom, WakeOnLanCfg? wolCfg, Map<String, String>? envs,@JsonKey(fromJson: Spi.parseId) String id
String name, String ip, int port, String user, String? pwd,@JsonKey(name: 'pubKeyId') String? keyId, List<String>? tags, String? alterUrl, bool autoConnect, String? jumpId, ServerCustom? custom, WakeOnLanCfg? wolCfg, Map<String, String>? envs,@JsonKey(fromJson: Spi.parseId) String id,@JsonKey(includeIfNull: false) SystemType? customSystemType
});
@@ -176,7 +180,7 @@ class __$SpiCopyWithImpl<$Res>
/// Create a copy of Spi
/// with the given fields replaced by the non-null parameter values.
@override @pragma('vm:prefer-inline') $Res call({Object? name = null,Object? ip = null,Object? port = null,Object? user = null,Object? pwd = freezed,Object? keyId = freezed,Object? tags = freezed,Object? alterUrl = freezed,Object? autoConnect = null,Object? jumpId = freezed,Object? custom = freezed,Object? wolCfg = freezed,Object? envs = freezed,Object? id = null,}) {
@override @pragma('vm:prefer-inline') $Res call({Object? name = null,Object? ip = null,Object? port = null,Object? user = null,Object? pwd = freezed,Object? keyId = freezed,Object? tags = freezed,Object? alterUrl = freezed,Object? autoConnect = null,Object? jumpId = freezed,Object? custom = freezed,Object? wolCfg = freezed,Object? envs = freezed,Object? id = null,Object? customSystemType = freezed,}) {
return _then(_Spi(
name: null == name ? _self.name : name // ignore: cast_nullable_to_non_nullable
as String,ip: null == ip ? _self.ip : ip // ignore: cast_nullable_to_non_nullable
@@ -192,7 +196,8 @@ as String?,custom: freezed == custom ? _self.custom : custom // ignore: cast_nul
as ServerCustom?,wolCfg: freezed == wolCfg ? _self.wolCfg : wolCfg // ignore: cast_nullable_to_non_nullable
as WakeOnLanCfg?,envs: freezed == envs ? _self._envs : envs // ignore: cast_nullable_to_non_nullable
as Map<String, String>?,id: null == id ? _self.id : id // ignore: cast_nullable_to_non_nullable
as String,
as String,customSystemType: freezed == customSystemType ? _self.customSystemType : customSystemType // ignore: cast_nullable_to_non_nullable
as SystemType?,
));
}

View File

@@ -27,6 +27,10 @@ _Spi _$SpiFromJson(Map<String, dynamic> json) => _Spi(
(k, e) => MapEntry(k, e as String),
),
id: json['id'] == null ? '' : Spi.parseId(json['id']),
customSystemType: $enumDecodeNullable(
_$SystemTypeEnumMap,
json['customSystemType'],
),
);
Map<String, dynamic> _$SpiToJson(_Spi instance) => <String, dynamic>{
@@ -44,4 +48,12 @@ Map<String, dynamic> _$SpiToJson(_Spi instance) => <String, dynamic>{
if (instance.wolCfg case final value?) 'wolCfg': value,
if (instance.envs case final value?) 'envs': value,
'id': instance.id,
if (_$SystemTypeEnumMap[instance.customSystemType] case final value?)
'customSystemType': value,
};
const _$SystemTypeEnumMap = {
SystemType.linux: 'linux',
SystemType.bsd: 'bsd',
SystemType.windows: 'windows',
};

View File

@@ -1,3 +1,5 @@
import 'dart:convert';
import 'package:fl_lib/fl_lib.dart';
import 'package:server_box/data/model/app/shell_func.dart';
import 'package:server_box/data/model/server/amd.dart';
@@ -12,6 +14,8 @@ import 'package:server_box/data/model/server/nvdia.dart';
import 'package:server_box/data/model/server/sensors.dart';
import 'package:server_box/data/model/server/server.dart';
import 'package:server_box/data/model/server/system.dart';
import 'package:server_box/data/model/server/temp.dart';
import 'package:server_box/data/model/server/windows_parser.dart';
class ServerStatusUpdateReq {
final ServerStatus ss;
@@ -31,6 +35,7 @@ Future<ServerStatus> getStatus(ServerStatusUpdateReq req) async {
return switch (req.system) {
SystemType.linux => _getLinuxStatus(req),
SystemType.bsd => _getBsdStatus(req),
SystemType.windows => _getWindowsStatus(req),
};
}
@@ -39,8 +44,7 @@ Future<ServerStatus> getStatus(ServerStatusUpdateReq req) async {
Future<ServerStatus> _getLinuxStatus(ServerStatusUpdateReq req) async {
final segments = req.segments;
final time =
int.tryParse(StatusCmdType.time.find(segments)) ??
final time = int.tryParse(StatusCmdType.time.find(segments)) ??
DateTime.now().millisecondsSinceEpoch ~/ 1000;
try {
@@ -210,11 +214,11 @@ Future<ServerStatus> _getBsdStatus(ServerStatusUpdateReq req) async {
Loggers.app.warning(e, s);
}
// try {
// req.ss.mem = parseBsdMem(BSDStatusCmdType.mem.find(segments));
// } catch (e, s) {
// Loggers.app.warning(e, s);
// }
try {
req.ss.mem = parseBsdMemory(BSDStatusCmdType.mem.find(segments));
} catch (e, s) {
Loggers.app.warning(e, s);
}
try {
final uptime = _parseUpTime(BSDStatusCmdType.uptime.find(segments));
@@ -235,13 +239,48 @@ Future<ServerStatus> _getBsdStatus(ServerStatusUpdateReq req) async {
// raw:
// 19:39:15 up 61 days, 18:16, 1 user, load average: 0.00, 0.00, 0.00
// 19:39:15 up 1 day, 2:34, 1 user, load average: 0.00, 0.00, 0.00
// 19:39:15 up 2:34, 1 user, load average: 0.00, 0.00, 0.00
// 19:39:15 up 34 min, 1 user, load average: 0.00, 0.00, 0.00
String? _parseUpTime(String raw) {
final splitedUp = raw.split('up ');
if (splitedUp.length == 2) {
final splitedComma = splitedUp[1].split(', ');
if (splitedComma.length >= 2) {
return splitedComma[0];
final uptimePart = splitedUp[1];
final splitedComma = uptimePart.split(', ');
if (splitedComma.isEmpty) return null;
// Handle different uptime formats
final firstPart = splitedComma[0].trim();
// Case 1: "61 days" or "1 day" - need to get the time part from next segment
if (firstPart.contains('day')) {
if (splitedComma.length >= 2) {
final timePart = splitedComma[1].trim();
// Check if it's in HH:MM format
if (timePart.contains(':') &&
!timePart.contains('user') &&
!timePart.contains('load')) {
return '$firstPart, $timePart';
}
}
return firstPart;
}
// Case 2: "2:34" (hours:minutes) - already in good format
if (firstPart.contains(':') &&
!firstPart.contains('user') &&
!firstPart.contains('load')) {
return firstPart;
}
// Case 3: "34 min" - already in good format
if (firstPart.contains('min')) {
return firstPart;
}
// Fallback: return first part
return firstPart;
}
return null;
}
@@ -259,3 +298,406 @@ String? _parseHostName(String raw) {
if (raw.contains(ShellFunc.scriptFile)) return null;
return raw;
}
// Windows status parsing implementation
Future<ServerStatus> _getWindowsStatus(ServerStatusUpdateReq req) async {
final segments = req.segments;
final time = int.tryParse(WindowsStatusCmdType.time.find(segments)) ??
DateTime.now().millisecondsSinceEpoch ~/ 1000;
// Parse all different resource types using helper methods
_parseWindowsNetworkData(req, segments, time);
_parseWindowsSystemData(req, segments);
_parseWindowsHostData(req, segments);
_parseWindowsCpuData(req, segments);
_parseWindowsMemoryData(req, segments);
_parseWindowsDiskData(req, segments);
_parseWindowsUptimeData(req, segments);
_parseWindowsDiskIOData(req, segments, time);
_parseWindowsConnectionData(req, segments);
_parseWindowsBatteryData(req, segments);
_parseWindowsTemperatureData(req, segments);
_parseWindowsGpuData(req, segments);
WindowsParser.parseCustomCommands(req.ss, segments, req.customCmds, req.system.segmentsLen);
return req.ss;
}
/// Parse Windows network data
void _parseWindowsNetworkData(ServerStatusUpdateReq req, List<String> segments, int time) {
try {
final netRaw = WindowsStatusCmdType.net.find(segments);
if (netRaw.isNotEmpty &&
netRaw != 'null' &&
!netRaw.contains('network_error') &&
!netRaw.contains('error') &&
!netRaw.contains('Exception')) {
final netParts = _parseWindowsNetwork(netRaw, time);
if (netParts.isNotEmpty) {
req.ss.netSpeed.update(netParts);
}
}
} catch (e, s) {
Loggers.app.warning('Windows network parsing failed: $e', s);
}
}
/// Parse Windows system information
void _parseWindowsSystemData(ServerStatusUpdateReq req, List<String> segments) {
try {
final sys = WindowsStatusCmdType.sys.find(segments);
if (sys.isNotEmpty) {
req.ss.more[StatusCmdType.sys] = sys;
}
} catch (e, s) {
Loggers.app.warning('Windows system parsing failed: $e', s);
}
}
/// Parse Windows host information
void _parseWindowsHostData(ServerStatusUpdateReq req, List<String> segments) {
try {
final host = _parseHostName(WindowsStatusCmdType.host.find(segments));
if (host != null) {
req.ss.more[StatusCmdType.host] = host;
}
} catch (e, s) {
Loggers.app.warning('Windows host parsing failed: $e', s);
}
}
/// Parse Windows CPU data and brand information
void _parseWindowsCpuData(ServerStatusUpdateReq req, List<String> segments) {
try {
// Windows CPU parsing - JSON format from PowerShell
final cpuRaw = WindowsStatusCmdType.cpu.find(segments);
if (cpuRaw.isNotEmpty &&
cpuRaw != 'null' &&
!cpuRaw.contains('error') &&
!cpuRaw.contains('Exception')) {
final cpus = WindowsParser.parseCpu(cpuRaw, req.ss);
if (cpus.isNotEmpty) {
req.ss.cpu.update(cpus);
}
}
// Windows CPU brand parsing
final brandRaw = WindowsStatusCmdType.cpuBrand.find(segments);
if (brandRaw.isNotEmpty && brandRaw != 'null') {
req.ss.cpu.brand.clear();
req.ss.cpu.brand[brandRaw.trim()] = 1;
}
} catch (e, s) {
Loggers.app.warning('Windows CPU parsing failed: $e', s);
}
}
/// Parse Windows memory data
void _parseWindowsMemoryData(ServerStatusUpdateReq req, List<String> segments) {
try {
final memRaw = WindowsStatusCmdType.mem.find(segments);
if (memRaw.isNotEmpty &&
memRaw != 'null' &&
!memRaw.contains('error') &&
!memRaw.contains('Exception')) {
final memory = WindowsParser.parseMemory(memRaw);
if (memory != null) {
req.ss.mem = memory;
}
}
} catch (e, s) {
Loggers.app.warning('Windows memory parsing failed: $e', s);
}
}
/// Parse Windows disk data
void _parseWindowsDiskData(ServerStatusUpdateReq req, List<String> segments) {
try {
final diskRaw = WindowsStatusCmdType.disk.find(segments);
if (diskRaw.isNotEmpty && diskRaw != 'null') {
final disks = WindowsParser.parseDisks(diskRaw);
req.ss.disk = disks;
req.ss.diskUsage = DiskUsage.parse(disks);
}
} catch (e, s) {
Loggers.app.warning('Windows disk parsing failed: $e', s);
}
}
/// Parse Windows uptime data
void _parseWindowsUptimeData(ServerStatusUpdateReq req, List<String> segments) {
try {
final uptime = WindowsParser.parseUpTime(WindowsStatusCmdType.uptime.find(segments));
if (uptime != null) {
req.ss.more[StatusCmdType.uptime] = uptime;
}
} catch (e, s) {
Loggers.app.warning('Windows uptime parsing failed: $e', s);
}
}
/// Parse Windows disk I/O data
void _parseWindowsDiskIOData(ServerStatusUpdateReq req, List<String> segments, int time) {
try {
final diskIOraw = WindowsStatusCmdType.diskio.find(segments);
if (diskIOraw.isNotEmpty && diskIOraw != 'null') {
final diskio = _parseWindowsDiskIO(diskIOraw, time);
req.ss.diskIO.update(diskio);
}
} catch (e, s) {
Loggers.app.warning('Windows disk I/O parsing failed: $e', s);
}
}
/// Parse Windows connection data
void _parseWindowsConnectionData(ServerStatusUpdateReq req, List<String> segments) {
try {
final connStr = WindowsStatusCmdType.conn.find(segments);
final connCount = int.tryParse(connStr.trim());
if (connCount != null) {
req.ss.tcp = Conn(maxConn: 0, active: connCount, passive: 0, fail: 0);
}
} catch (e, s) {
Loggers.app.warning('Windows connection parsing failed: $e', s);
}
}
/// Parse Windows battery data
void _parseWindowsBatteryData(ServerStatusUpdateReq req, List<String> segments) {
try {
final batteryRaw = WindowsStatusCmdType.battery.find(segments);
if (batteryRaw.isNotEmpty && batteryRaw != 'null') {
final batteries = _parseWindowsBatteries(batteryRaw);
req.ss.batteries.clear();
if (batteries.isNotEmpty) {
req.ss.batteries.addAll(batteries);
}
}
} catch (e, s) {
Loggers.app.warning('Windows battery parsing failed: $e', s);
}
}
/// Parse Windows temperature data
void _parseWindowsTemperatureData(ServerStatusUpdateReq req, List<String> segments) {
try {
final tempRaw = WindowsStatusCmdType.temp.find(segments);
if (tempRaw.isNotEmpty && tempRaw != 'null') {
_parseWindowsTemperatures(req.ss.temps, tempRaw);
}
} catch (e, s) {
Loggers.app.warning('Windows temperature parsing failed: $e', s);
}
}
/// Parse Windows GPU data (NVIDIA/AMD)
void _parseWindowsGpuData(ServerStatusUpdateReq req, List<String> segments) {
try {
req.ss.nvidia = NvidiaSmi.fromXml(WindowsStatusCmdType.nvidia.find(segments));
} catch (e, s) {
Loggers.app.warning('Windows NVIDIA GPU parsing failed: $e', s);
}
try {
req.ss.amd = AmdSmi.fromJson(WindowsStatusCmdType.amd.find(segments));
} catch (e, s) {
Loggers.app.warning('Windows AMD GPU parsing failed: $e', s);
}
}
List<Battery> _parseWindowsBatteries(String raw) {
try {
final dynamic jsonData = json.decode(raw);
final List<Battery> batteries = [];
final batteryList = jsonData is List ? jsonData : [jsonData];
for (final batteryData in batteryList) {
final chargeRemaining =
batteryData['EstimatedChargeRemaining'] as int? ?? 0;
final batteryStatus = batteryData['BatteryStatus'] as int? ?? 0;
// Windows battery status: 1=Other, 2=Unknown, 3=Full, 4=Low,
// 5=Critical, 6=Charging, 7=ChargingAndLow, 8=ChargingAndCritical,
// 9=Undefined, 10=PartiallyCharged
final isCharging = batteryStatus == 6 ||
batteryStatus == 7 ||
batteryStatus == 8;
batteries.add(
Battery(
name: 'Battery',
percent: chargeRemaining,
status: isCharging
? BatteryStatus.charging
: BatteryStatus.discharging,
),
);
}
return batteries;
} catch (e) {
return [];
}
}
List<NetSpeedPart> _parseWindowsNetwork(String raw, int currentTime) {
try {
final dynamic jsonData = json.decode(raw);
final List<NetSpeedPart> netParts = [];
// PowerShell Get-Counter returns a structure with CounterSamples
if (jsonData is Map && jsonData.containsKey('CounterSamples')) {
final samples = jsonData['CounterSamples'] as List?;
if (samples != null && samples.length >= 2) {
// We need 2 samples to calculate speed (interval between them)
final Map<String, double> interfaceRx = {};
final Map<String, double> interfaceTx = {};
for (final sample in samples) {
final path = sample['Path']?.toString() ?? '';
final cookedValue = sample['CookedValue'] as num? ?? 0;
if (path.contains('Bytes Received/sec')) {
final interfaceName = _extractInterfaceName(path);
if (interfaceName.isNotEmpty) {
interfaceRx[interfaceName] = cookedValue.toDouble();
}
} else if (path.contains('Bytes Sent/sec')) {
final interfaceName = _extractInterfaceName(path);
if (interfaceName.isNotEmpty) {
interfaceTx[interfaceName] = cookedValue.toDouble();
}
}
}
// Create NetSpeedPart for each interface
for (final interfaceName in interfaceRx.keys) {
final rx = interfaceRx[interfaceName] ?? 0;
final tx = interfaceTx[interfaceName] ?? 0;
netParts.add(
NetSpeedPart(
interfaceName,
BigInt.from(rx.toInt()),
BigInt.from(tx.toInt()),
currentTime,
),
);
}
}
}
return netParts;
} catch (e) {
return [];
}
}
String _extractInterfaceName(String path) {
// Extract interface name from path like
// "\\Computer\\NetworkInterface(Interface Name)\\..."
final match = RegExp(r'\\NetworkInterface\(([^)]+)\)\\').firstMatch(path);
return match?.group(1) ?? '';
}
List<DiskIOPiece> _parseWindowsDiskIO(String raw, int currentTime) {
try {
final dynamic jsonData = json.decode(raw);
final List<DiskIOPiece> diskParts = [];
// PowerShell Get-Counter returns a structure with CounterSamples
if (jsonData is Map && jsonData.containsKey('CounterSamples')) {
final samples = jsonData['CounterSamples'] as List?;
if (samples != null) {
final Map<String, double> diskReads = {};
final Map<String, double> diskWrites = {};
for (final sample in samples) {
final path = sample['Path']?.toString() ?? '';
final cookedValue = sample['CookedValue'] as num? ?? 0;
if (path.contains('Disk Read Bytes/sec')) {
final diskName = _extractDiskName(path);
if (diskName.isNotEmpty) {
diskReads[diskName] = cookedValue.toDouble();
}
} else if (path.contains('Disk Write Bytes/sec')) {
final diskName = _extractDiskName(path);
if (diskName.isNotEmpty) {
diskWrites[diskName] = cookedValue.toDouble();
}
}
}
// Create DiskIOPiece for each disk - convert bytes to sectors
// (assuming 512 bytes per sector)
for (final diskName in diskReads.keys) {
final readBytes = diskReads[diskName] ?? 0;
final writeBytes = diskWrites[diskName] ?? 0;
final sectorsRead = (readBytes / 512).round();
final sectorsWrite = (writeBytes / 512).round();
diskParts.add(
DiskIOPiece(
dev: diskName,
sectorsRead: sectorsRead,
sectorsWrite: sectorsWrite,
time: currentTime,
),
);
}
}
}
return diskParts;
} catch (e) {
return [];
}
}
String _extractDiskName(String path) {
// Extract disk name from path like
// "\\Computer\\PhysicalDisk(Disk Name)\\..."
final match = RegExp(r'\\PhysicalDisk\(([^)]+)\)\\').firstMatch(path);
return match?.group(1) ?? '';
}
void _parseWindowsTemperatures(Temperatures temps, String raw) {
try {
// Handle error output
if (raw.contains('Error') ||
raw.contains('Exception') ||
raw.contains('The term')) {
return;
}
final dynamic jsonData = json.decode(raw);
final tempList = jsonData is List ? jsonData : [jsonData];
// Create fake type and value strings that the existing parse method can handle
final typeLines = <String>[];
final valueLines = <String>[];
for (int i = 0; i < tempList.length; i++) {
final item = tempList[i];
final typeName = item['InstanceName']?.toString() ?? 'Unknown';
final temperature = item['Temperature'] as num?;
if (temperature != null) {
// Convert to the format expected by the existing parse method
typeLines.add('/sys/class/thermal/thermal_zone$i/$typeName');
// Convert to millicelsius (multiply by 1000)
// as expected by Linux parsing
valueLines.add((temperature * 1000).round().toString());
}
}
if (typeLines.isNotEmpty && valueLines.isNotEmpty) {
temps.parse(typeLines.join('\n'), valueLines.join('\n'));
}
} catch (e) {
// If JSON parsing fails, ignore temperature data
}
}

View File

@@ -35,23 +35,16 @@ extension SnippetX on Snippet {
static final fmtFinder = RegExp(r'\$\{[^{}]+\}');
String fmtWithSpi(Spi spi) {
return script.replaceAllMapped(
fmtFinder,
(match) {
final key = match.group(0);
final func = fmtArgs[key];
if (func != null) return func(spi);
// If not found, return the original content for further processing
return key ?? '';
},
);
return script.replaceAllMapped(fmtFinder, (match) {
final key = match.group(0);
final func = fmtArgs[key];
if (func != null) return func(spi);
// If not found, return the original content for further processing
return key ?? '';
});
}
Future<void> runInTerm(
Terminal terminal,
Spi spi, {
bool autoEnter = false,
}) async {
Future<void> runInTerm(Terminal terminal, Spi spi, {bool autoEnter = false}) async {
final argsFmted = fmtWithSpi(spi);
final matches = fmtFinder.allMatches(argsFmted);
@@ -119,11 +112,7 @@ extension SnippetX on Snippet {
if (autoEnter) terminal.keyInput(TerminalKey.enter);
}
Future<void> _doTermKeys(
Terminal terminal,
MapEntry<String, TerminalKey> termKey,
String key,
) async {
Future<void> _doTermKeys(Terminal terminal, MapEntry<String, TerminalKey> termKey, String key) async {
// if (termKey.value == TerminalKey.enter) {
// terminal.keyInput(TerminalKey.enter);
// return;
@@ -140,11 +129,7 @@ extension SnippetX on Snippet {
// `${ctrl+ad}` -> `ctrla + d`
final chars = key.substring(termKey.key.length + 1, key.length - 1);
if (chars.isEmpty) return;
final ok = terminal.charInput(
chars.codeUnitAt(0),
ctrl: ctrlAlt.ctrl,
alt: ctrlAlt.alt,
);
final ok = terminal.charInput(chars.codeUnitAt(0), ctrl: ctrlAlt.ctrl, alt: ctrlAlt.alt);
if (!ok) {
Loggers.app.warning('Failed to input: $key');
}
@@ -166,10 +151,7 @@ extension SnippetX on Snippet {
};
/// r'${ctrl+ad}' -> TerminalKey.control, a, d
static final fmtTermKeys = {
r'${ctrl': TerminalKey.control,
r'${alt': TerminalKey.alt,
};
static final fmtTermKeys = {r'${ctrl': TerminalKey.control, r'${alt': TerminalKey.alt};
}
class SnippetResult {
@@ -177,11 +159,7 @@ class SnippetResult {
final String result;
final Duration time;
SnippetResult({
required this.dest,
required this.result,
required this.time,
});
SnippetResult({required this.dest, required this.result, required this.time});
}
typedef SnippetFuncCtx = ({Terminal term, String raw});
@@ -193,10 +171,7 @@ abstract final class SnippetFuncs {
r'${enter': SnippetFuncs.enter,
};
static const help = {
'sleep': 'Sleep for a few seconds',
'enter': 'Enter a few times',
};
static const help = {'sleep': 'Sleep for a few seconds', 'enter': 'Enter a few times'};
static FutureOr<void> sleep(SnippetFuncCtx ctx) async {
final seconds = int.tryParse(ctx.raw);

View File

@@ -1,21 +1,55 @@
import 'package:fl_lib/fl_lib.dart';
import 'package:server_box/data/model/app/shell_func.dart';
enum SystemType {
linux._(linuxSign),
bsd._(bsdSign),
;
linux(linuxSign),
bsd(bsdSign),
windows(windowsSign);
final String value;
final String? value;
const SystemType._(this.value);
const SystemType([this.value]);
static const linuxSign = '__linux';
static const bsdSign = '__bsd';
static const windowsSign = '__windows';
/// Used for parsing system types from shell output.
///
/// This method looks for specific system signatures in the shell output
/// and returns the corresponding SystemType. If no signature is found,
/// it defaults to Linux but logs the detection failure for debugging.
static SystemType parse(String value) {
// Log the raw value for debugging purposes (truncated to avoid spam)
final truncatedValue = value.length > 100
? '${value.substring(0, 100)}...'
: value;
if (value.contains(windowsSign)) {
Loggers.app.info('System detected as Windows from signature in: $truncatedValue');
return SystemType.windows;
}
if (value.contains(bsdSign)) {
Loggers.app.info('System detected as BSD from signature in: $truncatedValue');
return SystemType.bsd;
}
// Log when falling back to Linux detection
if (value.trim().isEmpty) {
Loggers.app.warning(
'System detection received empty input, defaulting to Linux. '
'This may indicate a script execution issue.'
);
} else if (!value.contains(linuxSign)) {
Loggers.app.warning(
'System detection could not find any known signatures (Windows: $windowsSign, '
'BSD: $bsdSign, Linux: $linuxSign) in output: "$truncatedValue". '
'Defaulting to Linux, but this may cause incorrect parsing.'
);
} else {
Loggers.app.info('System detected as Linux from signature in: $truncatedValue');
}
return SystemType.linux;
}
@@ -27,6 +61,8 @@ enum SystemType {
return StatusCmdType.values.length;
case SystemType.bsd:
return BSDStatusCmdType.values.length;
case SystemType.windows:
return WindowsStatusCmdType.values.length;
}
}
}

View File

@@ -8,26 +8,24 @@ enum SystemdUnitFunc {
reload,
enable,
disable,
status,
;
status;
IconData get icon => switch (this) {
start => Icons.play_arrow,
stop => Icons.stop,
restart => Icons.refresh,
reload => Icons.refresh,
enable => Icons.check,
disable => Icons.close,
status => Icons.info,
};
start => Icons.play_arrow,
stop => Icons.stop,
restart => Icons.refresh,
reload => Icons.refresh,
enable => Icons.check,
disable => Icons.close,
status => Icons.info,
};
}
enum SystemdUnitType {
service,
socket,
mount,
timer,
;
timer;
static SystemdUnitType? fromString(String? value) {
return values.firstWhereOrNull((e) => e.name == value?.toLowerCase());
@@ -36,13 +34,12 @@ enum SystemdUnitType {
enum SystemdUnitScope {
system,
user,
;
user;
Color? get color => switch (this) {
system => Colors.red,
_ => null,
};
system => Colors.red,
_ => null,
};
String getCmdPrefix(bool isRoot) {
if (this == system) {
@@ -57,17 +54,16 @@ enum SystemdUnitState {
inactive,
failed,
activating,
deactivating,
;
deactivating;
static SystemdUnitState? fromString(String? value) {
return values.firstWhereOrNull((e) => e.name == value?.toLowerCase());
}
Color? get color => switch (this) {
failed => Colors.red,
_ => null,
};
failed => Colors.red,
_ => null,
};
}
final class SystemdUnit {
@@ -85,10 +81,7 @@ final class SystemdUnit {
required this.state,
});
String getCmd({
required SystemdUnitFunc func,
required bool isRoot,
}) {
String getCmd({required SystemdUnitFunc func, required bool isRoot}) {
final prefix = scope.getCmdPrefix(isRoot);
return '$prefix ${func.name} $name';
}

View File

@@ -40,11 +40,7 @@ class Fifo<T> extends ListBase<T> {
abstract class TimeSeq<T extends List<TimeSeqIface>> extends Fifo<T> {
/// Due to the design, at least two elements are required, otherwise [pre] /
/// [now] will throw.
TimeSeq(
T init1,
T init2, {
super.capacity,
}) : super(list: [init1, init2]);
TimeSeq(T init1, T init2, {super.capacity}) : super(list: [init1, init2]);
T get pre {
return _list[length - 2];

View File

@@ -0,0 +1,258 @@
import 'dart:convert';
import 'package:fl_lib/fl_lib.dart';
import 'package:intl/intl.dart';
import 'package:server_box/data/model/server/cpu.dart';
import 'package:server_box/data/model/server/disk.dart';
import 'package:server_box/data/model/server/memory.dart';
import 'package:server_box/data/model/server/server.dart';
/// Windows-specific status parsing utilities
///
/// This module handles parsing of Windows PowerShell command outputs
/// for server monitoring. It extracts the Windows parsing logic
/// to improve maintainability and readability.
class WindowsParser {
const WindowsParser._();
/// Parse Windows custom commands from segments
static void parseCustomCommands(
ServerStatus serverStatus,
List<String> segments,
Map<String, String> customCmds,
int systemSegmentsLength,
) {
try {
for (int idx = 0; idx < customCmds.length; idx++) {
final key = customCmds.keys.elementAt(idx);
// Ensure we don't go out of bounds when accessing segments
final segmentIndex = idx + systemSegmentsLength;
if (segmentIndex < segments.length) {
final value = segments[segmentIndex];
serverStatus.customCmds[key] = value;
} else {
Loggers.app.warning(
'Windows custom commands: segment index $segmentIndex out of bounds '
'(segments length: ${segments.length}, systemSegmentsLength: $systemSegmentsLength)'
);
}
}
} catch (e, s) {
Loggers.app.warning('Windows custom commands parsing failed: $e', s);
}
}
/// Parse Windows uptime from PowerShell output
static String? parseUpTime(String raw) {
try {
// Clean the input - trim whitespace and get the first non-empty line
final cleanedInput = raw.trim().split('\n')
.where((line) => line.trim().isNotEmpty)
.firstOrNull;
if (cleanedInput == null || cleanedInput.isEmpty) {
Loggers.app.warning('Windows uptime parsing: empty or null input');
return null;
}
// Try multiple date formats to handle different Windows locale/version outputs
final formatters = [
DateFormat('EEEE, MMMM d, yyyy h:mm:ss a', 'en_US'), // Original format
DateFormat('EEEE, MMMM dd, yyyy h:mm:ss a', 'en_US'), // Double-digit day
DateFormat('EEE, MMM d, yyyy h:mm:ss a', 'en_US'), // Shortened format
DateFormat('EEE, MMM dd, yyyy h:mm:ss a', 'en_US'), // Shortened with double-digit day
DateFormat('M/d/yyyy h:mm:ss a', 'en_US'), // Short US format
DateFormat('MM/dd/yyyy h:mm:ss a', 'en_US'), // Short US format with zero padding
DateFormat('d/M/yyyy h:mm:ss a', 'en_US'), // Short European format
DateFormat('dd/MM/yyyy h:mm:ss a', 'en_US'), // Short European format with zero padding
];
DateTime? dateTime;
for (final formatter in formatters) {
dateTime = formatter.tryParseLoose(cleanedInput);
if (dateTime != null) break;
}
if (dateTime == null) {
Loggers.app.warning('Windows uptime parsing: could not parse date format for: $cleanedInput');
return null;
}
final now = DateTime.now();
final uptime = now.difference(dateTime);
// Validate that the uptime is reasonable (not negative, not too far in the future)
if (uptime.isNegative || uptime.inDays > 3650) { // More than 10 years seems unreasonable
Loggers.app.warning('Windows uptime parsing: unreasonable uptime calculated: ${uptime.inDays} days for date: $cleanedInput');
return null;
}
final days = uptime.inDays;
final hours = uptime.inHours % 24;
final minutes = uptime.inMinutes % 60;
if (days > 0) {
return '$days days, $hours:${minutes.toString().padLeft(2, '0')}';
} else {
return '$hours:${minutes.toString().padLeft(2, '0')}';
}
} catch (e, s) {
Loggers.app.warning('Windows uptime parsing failed: $e for input: $raw', s);
return null;
}
}
/// Parse Windows CPU information from PowerShell output
static List<SingleCpuCore> parseCpu(String raw, ServerStatus serverStatus) {
try {
final dynamic jsonData = json.decode(raw);
final List<SingleCpuCore> cpus = [];
if (jsonData is List) {
for (int i = 0; i < jsonData.length; i++) {
final cpu = jsonData[i];
final loadPercentage = cpu['LoadPercentage'] ?? 0;
final usage = loadPercentage as int;
final idle = 100 - usage;
// Get previous CPU data to calculate cumulative values
final prevCpus = serverStatus.cpu.now;
final prevCpu = i < prevCpus.length ? prevCpus[i] : null;
// LIMITATION: Windows CPU counters approach
// PowerShell provides LoadPercentage as instantaneous percentage, not cumulative time.
// We simulate cumulative counters by adding current percentages to previous totals.
// This approach has limitations:
// 1. Not as accurate as true cumulative time counters (Linux /proc/stat)
// 2. May drift over time with variable polling intervals
// 3. Results depend on consistent polling frequency
// However, this allows compatibility with existing delta-based CPU calculation logic.
final newUser = (prevCpu?.user ?? 0) + usage;
final newIdle = (prevCpu?.idle ?? 0) + idle;
cpus.add(
SingleCpuCore(
'cpu$i',
newUser, // cumulative user time
0, // sys (not available)
0, // nice (not available)
newIdle, // cumulative idle time
0, // iowait (not available)
0, // irq (not available)
0, // softirq (not available)
),
);
}
} else if (jsonData is Map) {
// Single CPU core
final loadPercentage = jsonData['LoadPercentage'] ?? 0;
final usage = loadPercentage as int;
final idle = 100 - usage;
// Get previous CPU data to calculate cumulative values
final prevCpus = serverStatus.cpu.now;
final prevCpu = prevCpus.isNotEmpty ? prevCpus[0] : null;
// LIMITATION: See comment above for Windows CPU counter limitations
final newUser = (prevCpu?.user ?? 0) + usage;
final newIdle = (prevCpu?.idle ?? 0) + idle;
cpus.add(
SingleCpuCore(
'cpu0',
newUser, // cumulative user time
0, // sys
0, // nice
newIdle, // cumulative idle time
0, // iowait
0, // irq
0, // softirq
),
);
}
return cpus;
} catch (e) {
return [];
}
}
/// Parse Windows memory information from PowerShell output
///
/// NOTE: Windows Win32_OperatingSystem properties TotalVisibleMemorySize
/// and FreePhysicalMemory are returned in KB units.
static Memory? parseMemory(String raw) {
try {
final dynamic jsonData = json.decode(raw);
final data = jsonData is List ? jsonData.first : jsonData;
// Win32_OperatingSystem properties are in KB
final totalKB = data['TotalVisibleMemorySize'] as int? ?? 0;
final freeKB = data['FreePhysicalMemory'] as int? ?? 0;
return Memory(
total: totalKB,
free: freeKB,
avail: freeKB, // Windows doesn't distinguish between free and available
);
} catch (e) {
return null;
}
}
/// Parse Windows disk information from PowerShell output
static List<Disk> parseDisks(String raw) {
try {
final dynamic jsonData = json.decode(raw);
final List<Disk> disks = [];
final diskList = jsonData is List ? jsonData : [jsonData];
for (final diskData in diskList) {
final deviceId = diskData['DeviceID']?.toString() ?? '';
final size =
BigInt.tryParse(diskData['Size']?.toString() ?? '0') ?? BigInt.zero;
final freeSpace =
BigInt.tryParse(diskData['FreeSpace']?.toString() ?? '0') ??
BigInt.zero;
final fileSystem = diskData['FileSystem']?.toString() ?? '';
// Validate all required fields
final hasRequiredFields = deviceId.isNotEmpty &&
size != BigInt.zero &&
freeSpace != BigInt.zero &&
fileSystem.isNotEmpty;
if (!hasRequiredFields) {
Loggers.app.warning('Windows disk parsing: skipping disk with missing required fields. '
'DeviceID: $deviceId, Size: $size, FreeSpace: $freeSpace, FileSystem: $fileSystem');
continue;
}
final sizeKB = size ~/ BigInt.from(1024);
final freeKB = freeSpace ~/ BigInt.from(1024);
final usedKB = sizeKB - freeKB;
final usedPercent = sizeKB > BigInt.zero
? ((usedKB * BigInt.from(100)) ~/ sizeKB).toInt()
: 0;
disks.add(
Disk(
path: deviceId,
fsTyp: fileSystem,
size: sizeKB,
avail: freeKB,
used: usedKB,
usedPercent: usedPercent,
mount: deviceId, // Windows uses drive letters as mount points
),
);
}
return disks;
} catch (e) {
Loggers.app.warning('Windows disk parsing failed: $e');
return [];
}
}
}

View File

@@ -11,11 +11,7 @@ final class WakeOnLanCfg {
final String ip;
final String? pwd;
const WakeOnLanCfg({
required this.mac,
required this.ip,
this.pwd,
});
const WakeOnLanCfg({required this.mac, required this.ip, this.pwd});
(Object?, bool) validate() {
final macValidation = MACAddress.validate(mac);
@@ -39,10 +35,7 @@ final class WakeOnLanCfg {
final mac_ = MACAddress(mac);
final pwd_ = pwd != null ? SecureONPassword(pwd!) : null;
final obj = WakeOnLAN(ip_, mac_, password: pwd_);
return obj.wake(
repeat: 3,
repeatDelay: const Duration(milliseconds: 500),
);
return obj.wake(repeat: 3, repeatDelay: const Duration(milliseconds: 500));
}
factory WakeOnLanCfg.fromJson(Map<String, dynamic> json) => _$WakeOnLanCfgFromJson(json);