proposal: enhance AI functionalities

Fixes [#1038]
This commit is contained in:
lollipopkit🏳️‍⚧️
2026-01-30 15:24:56 +08:00
parent 6338c6ce6b
commit 28ac6145c4
18 changed files with 1308 additions and 482 deletions

View File

@@ -9,6 +9,7 @@ import 'package:server_box/data/res/build_data.dart';
import 'package:server_box/data/res/store.dart';
import 'package:server_box/generated/l10n/l10n.dart';
import 'package:server_box/view/page/home.dart';
import 'package:server_box/view/widget/ai/ai_fab_overlay.dart';
part 'intro.dart';
@@ -108,7 +109,10 @@ class _MyAppState extends State<MyApp> {
return MaterialApp(
key: ValueKey(locale),
navigatorKey: AppNavigator.key,
builder: ResponsivePoints.builder,
builder: (context, child) {
final responsiveChild = ResponsivePoints.builder(context, child);
return AiFabOverlay(child: responsiveChild);
},
locale: locale,
localizationsDelegates: const [LibLocalizations.delegate, ...AppLocalizations.localizationsDelegates],
supportedLocales: AppLocalizations.supportedLocales,

View File

@@ -30,11 +30,27 @@ class AskAiCommand {
required this.command,
this.description = '',
this.toolName,
this.risk,
this.needsConfirmation,
this.why,
this.prechecks,
});
final String command;
final String description;
final String? toolName;
/// Optional risk hint returned by the model/tool, e.g. `low|medium|high`.
final String? risk;
/// Optional explicit confirmation requirement returned by the model/tool.
final bool? needsConfirmation;
/// Optional explanation for why this command is suggested.
final String? why;
/// Optional pre-check commands / steps.
final List<String>? prechecks;
}
@immutable

View File

@@ -8,7 +8,8 @@ enum ContainerMenu {
restart,
rm,
logs,
terminal
terminal,
askAi
//stats,
;
@@ -20,10 +21,11 @@ enum ContainerMenu {
rm,
logs,
terminal,
askAi,
//stats,
];
}
return [start, rm, logs];
return [start, rm, logs, askAi];
}
IconData get icon => switch (this) {
@@ -33,6 +35,7 @@ enum ContainerMenu {
ContainerMenu.rm => Icons.delete,
ContainerMenu.logs => Icons.logo_dev,
ContainerMenu.terminal => Icons.terminal,
ContainerMenu.askAi => Icons.smart_toy_outlined,
// DockerMenuType.stats => Icons.bar_chart,
};
@@ -43,6 +46,7 @@ enum ContainerMenu {
ContainerMenu.rm => libL10n.delete,
ContainerMenu.logs => libL10n.log,
ContainerMenu.terminal => l10n.terminal,
ContainerMenu.askAi => l10n.askAi,
// DockerMenuType.stats => s.stats,
};
}

View File

@@ -0,0 +1,64 @@
import 'package:flutter_riverpod/flutter_riverpod.dart';
import 'package:meta/meta.dart';
@immutable
class AiContextSnapshot {
const AiContextSnapshot({
required this.title,
required this.scenario,
required this.blocks,
this.spiId,
this.updatedAtMs,
});
final String title;
final String scenario;
final List<String> blocks;
final String? spiId;
final int? updatedAtMs;
AiContextSnapshot copyWith({
String? title,
String? scenario,
List<String>? blocks,
String? spiId,
int? updatedAtMs,
}) {
return AiContextSnapshot(
title: title ?? this.title,
scenario: scenario ?? this.scenario,
blocks: blocks ?? this.blocks,
spiId: spiId ?? this.spiId,
updatedAtMs: updatedAtMs ?? this.updatedAtMs,
);
}
}
final aiContextProvider = NotifierProvider<AiContextNotifier, AiContextSnapshot>(AiContextNotifier.new);
class AiContextNotifier extends Notifier<AiContextSnapshot> {
@override
AiContextSnapshot build() {
return const AiContextSnapshot(
title: 'Ask AI',
scenario: 'general',
blocks: [],
updatedAtMs: 0,
);
}
void setContext({
required String title,
required String scenario,
required List<String> blocks,
String? spiId,
}) {
state = AiContextSnapshot(
title: title,
scenario: scenario,
blocks: blocks,
spiId: spiId,
updatedAtMs: DateTime.now().millisecondsSinceEpoch,
);
}
}

View File

@@ -0,0 +1,186 @@
import 'package:meta/meta.dart';
import 'package:server_box/data/model/server/server_private_info.dart';
@immutable
enum AiRedactionMode {
placeholder,
none,
}
@immutable
enum AiCommandRisk {
low,
medium,
high,
}
extension AiCommandRiskX on AiCommandRisk {
static AiCommandRisk? tryParse(Object? raw) {
if (raw is! String) return null;
final s = raw.trim().toLowerCase();
return switch (s) {
'low' => AiCommandRisk.low,
'medium' => AiCommandRisk.medium,
'high' => AiCommandRisk.high,
_ => null,
};
}
AiCommandRisk max(AiCommandRisk other) => index >= other.index ? this : other;
}
abstract final class AiSafety {
const AiSafety._();
static String redact(
String input, {
AiRedactionMode mode = AiRedactionMode.placeholder,
Spi? spi,
}) {
if (mode == AiRedactionMode.none) return input;
if (input.isEmpty) return input;
var out = input;
out = _redactPrivateKeyBlocks(out);
out = _redactBearerTokens(out);
out = _redactApiKeys(out);
if (spi != null) {
out = _redactSpiIdentity(out, spi);
}
return out;
}
static List<String> redactBlocks(
List<String> blocks, {
AiRedactionMode mode = AiRedactionMode.placeholder,
Spi? spi,
}) {
if (blocks.isEmpty) return const [];
return [
for (final b in blocks) redact(b, mode: mode, spi: spi),
];
}
static AiCommandRisk classifyRisk(String command) {
final raw = command.trim();
if (raw.isEmpty) return AiCommandRisk.low;
final s = raw.toLowerCase();
// High-risk destructive patterns.
if (_rxForkBomb.hasMatch(s)) return AiCommandRisk.high;
if (_rxMkfs.hasMatch(s)) return AiCommandRisk.high;
if (_rxDdToBlockDevice.hasMatch(s)) return AiCommandRisk.high;
if (_rxRmRf.hasMatch(s)) return AiCommandRisk.high;
if (_rxChmodChownRoot.hasMatch(s)) return AiCommandRisk.high;
if (_rxIptablesFlush.hasMatch(s) || _rxNftFlush.hasMatch(s)) return AiCommandRisk.high;
if (_rxDockerSystemPruneAll.hasMatch(s) || _rxPodmanSystemPruneAll.hasMatch(s)) return AiCommandRisk.high;
// Medium-risk operational patterns.
if (_rxRebootShutdown.hasMatch(s)) return AiCommandRisk.medium;
if (_rxSystemctlStopRestart.hasMatch(s)) return AiCommandRisk.medium;
if (_rxKill.hasMatch(s)) return AiCommandRisk.medium;
if (_rxDockerStopRm.hasMatch(s) || _rxPodmanStopRm.hasMatch(s)) return AiCommandRisk.medium;
return AiCommandRisk.low;
}
static String _redactPrivateKeyBlocks(String input) {
return input.replaceAllMapped(_rxPrivateKeyBlock, (_) => '<PRIVATE_KEY_BLOCK>');
}
static String _redactBearerTokens(String input) {
var out = input;
out = out.replaceAllMapped(
_rxAuthorizationBearer,
(m) => '${m.group(1)}Bearer <TOKEN>',
);
out = out.replaceAllMapped(
_rxBearerInline,
(m) => 'Bearer <TOKEN>',
);
return out;
}
static String _redactApiKeys(String input) {
// Keep it conservative; only match common patterns with clear prefixes.
var out = input;
out = out.replaceAllMapped(_rxOpenAiKey, (_) => '<API_KEY>');
out = out.replaceAllMapped(_rxAwsAccessKeyId, (_) => '<AWS_ACCESS_KEY_ID>');
return out;
}
static String _redactSpiIdentity(String input, Spi spi) {
var out = input;
final ip = spi.ip;
final user = spi.user;
final port = spi.port;
if (user.isNotEmpty && ip.isNotEmpty) {
out = out.replaceAll('$user@$ip:$port', '<USER_AT_HOST_PORT>');
out = out.replaceAll('$user@$ip', '<USER_AT_HOST>');
}
if (ip.isNotEmpty) {
out = out.replaceAll(ip, '<IP>');
}
if (user.isNotEmpty) {
out = out.replaceAll(user, '<USER>');
}
return out;
}
}
final _rxPrivateKeyBlock = RegExp(
r'-----BEGIN [A-Z0-9 ]*PRIVATE KEY-----[\s\S]*?-----END [A-Z0-9 ]*PRIVATE KEY-----',
multiLine: true,
);
final _rxAuthorizationBearer = RegExp(
r'(authorization\s*:\s*)bearer\s+[^\s\n\r]+',
multiLine: true,
caseSensitive: false,
);
final _rxBearerInline = RegExp(
r'\bbearer\s+[^\s\n\r]+',
caseSensitive: false,
);
final _rxOpenAiKey = RegExp(r'\bsk-[A-Za-z0-9]{16,}\b');
final _rxAwsAccessKeyId = RegExp(r'\bAKIA[0-9A-Z]{16}\b');
final _rxForkBomb = RegExp(r':\s*\(\s*\)\s*\{\s*:\s*\|\s*:\s*&\s*\}\s*;\s*:');
final _rxMkfs = RegExp(r'\bmkfs(\.[a-z0-9_-]+)?\b');
final _rxDdToBlockDevice = RegExp(r'\bdd\b[^\n\r]*\bof\s*=\s*/dev/');
final _rxRmRf = RegExp(r'\brm\b[^\n\r]*\s-[a-z-]*r[a-z-]*f[a-z-]*\b');
final _rxChmodChownRoot = RegExp(r'\b(chmod|chown)\b[^\n\r]*\s-\w*r\w*\b[^\n\r]*\s/\b');
final _rxIptablesFlush = RegExp(r'\biptables\b[^\n\r]*(\s-(f|x)\b|\s--flush\b)');
final _rxNftFlush = RegExp(r'\bnft\b[^\n\r]*\bflush\s+ruleset\b');
final _rxDockerSystemPruneAll = RegExp(r'\bdocker\b[^\n\r]*\bsystem\s+prune\b[^\n\r]*\s-a\b');
final _rxPodmanSystemPruneAll = RegExp(r'\bpodman\b[^\n\r]*\bsystem\s+prune\b[^\n\r]*\s-a\b');
final _rxRebootShutdown = RegExp(r'\b(reboot|poweroff|halt|shutdown)\b');
final _rxSystemctlStopRestart = RegExp(r'\bsystemctl\b[^\n\r]*\b(stop|restart)\b');
final _rxKill = RegExp(r'\b(kill|killall|pkill)\b');
final _rxDockerStopRm = RegExp(r'\bdocker\b[^\n\r]*\b(stop|rm)\b');
final _rxPodmanStopRm = RegExp(r'\bpodman\b[^\n\r]*\b(stop|rm)\b');

View File

@@ -21,7 +21,8 @@ class AskAiRepository {
/// Streams the AI response using the configured endpoint.
Stream<AskAiEvent> ask({
required String selection,
required AskAiScenario scenario,
required List<String> contextBlocks,
String? localeHint,
List<AskAiMessage> conversation = const [],
}) async* {
@@ -54,7 +55,8 @@ class AskAiRepository {
final requestBody = _buildRequestBody(
model: model,
selection: selection,
scenario: scenario,
contextBlocks: contextBlocks,
localeHint: localeHint,
conversation: conversation,
);
@@ -202,21 +204,27 @@ class AskAiRepository {
Map<String, dynamic> _buildRequestBody({
required String model,
required String selection,
required AskAiScenario scenario,
required List<String> contextBlocks,
required List<AskAiMessage> conversation,
String? localeHint,
}) {
final promptBuffer = StringBuffer()
..writeln('你是一个 SSH 终端助手。')
..writeln('用户提供一段终端输出或命令,请结合上下文给出解释')
..writeln('你是 ServerBox 内嵌的服务器运维助手。')
..writeln('你会基于用户提供的上下文进行解释、诊断与建议')
..writeln('默认只建议,不自动执行任何命令。')
..writeln('优先给出安全、可回滚、只读的排查步骤。')
..writeln('当需要给出可执行命令时,调用 `recommend_shell` 工具,并提供简短描述。')
..writeln('仅在非常确定命令安全时才给出建议');
..writeln('不确定时先提出澄清问题');
if (localeHint != null && localeHint.isNotEmpty) {
promptBuffer
.writeln('请优先使用用户的语言输出:$localeHint');
promptBuffer.writeln('请优先使用用户的语言输出:$localeHint');
}
promptBuffer.writeln(_scenarioPrompt(scenario));
final ctx = contextBlocks.isEmpty ? '(empty)' : contextBlocks.join('\n\n---\n\n');
final messages = <Map<String, String>>[
{
'role': 'system',
@@ -228,7 +236,7 @@ class AskAiRepository {
}),
{
'role': 'user',
'content': '以下是终端选中的内容:\n$selection',
'content': '以下是当前页面/会话上下文Markdown blocks\n\n$ctx',
},
];
@@ -254,6 +262,24 @@ class AskAiRepository {
'type': 'string',
'description': '简述该命令的作用或注意事项。',
},
'risk': {
'type': 'string',
'description': '风险等级low/medium/high。',
'enum': ['low', 'medium', 'high'],
},
'needsConfirmation': {
'type': 'boolean',
'description': '是否需要更强确认(例如倒计时确认)。',
},
'why': {
'type': 'string',
'description': '为什么要执行该命令。',
},
'prechecks': {
'type': 'array',
'items': {'type': 'string'},
'description': '建议先执行的只读预检查命令。',
},
},
},
},
@@ -262,6 +288,18 @@ class AskAiRepository {
};
}
static String _scenarioPrompt(AskAiScenario scenario) {
return switch (scenario) {
AskAiScenario.general => '场景:通用。结合上下文回答,必要时给出命令建议。',
AskAiScenario.terminal => '场景SSH 终端。解释输出/错误,给出排查命令与下一步建议。',
AskAiScenario.systemd => '场景Systemd。围绕 unit 状态/日志/依赖给出诊断与建议。',
AskAiScenario.container => '场景:容器。围绕 docker/podman 的容器状态、镜像、日志给建议。',
AskAiScenario.process => '场景进程。围绕进程异常、资源占用、kill/renice 等给建议。',
AskAiScenario.snippet => '场景Snippet。生成或改写脚本强调幂等、安全与可回滚。',
AskAiScenario.sftp => '场景SFTP。围绕路径/权限/压缩包/传输错误等给操作与命令建议。',
};
}
Uri _composeUri(String base, String path) {
final sanitizedBase = base.replaceAll(RegExp(r'/+$'), '');
final sanitizedPath = path.replaceFirst(RegExp(r'^/+'), '');
@@ -269,6 +307,34 @@ class AskAiRepository {
}
}
@immutable
enum AskAiScenario {
general,
terminal,
systemd,
container,
process,
snippet,
sftp,
}
extension AskAiScenarioX on AskAiScenario {
static AskAiScenario? tryParse(Object? raw) {
if (raw is! String) return null;
final s = raw.trim().toLowerCase();
return switch (s) {
'general' => AskAiScenario.general,
'terminal' => AskAiScenario.terminal,
'systemd' => AskAiScenario.systemd,
'container' => AskAiScenario.container,
'process' => AskAiScenario.process,
'snippet' => AskAiScenario.snippet,
'sftp' => AskAiScenario.sftp,
_ => null,
};
}
}
class _ToolCallBuilder {
_ToolCallBuilder();
@@ -289,11 +355,25 @@ class _ToolCallBuilder {
return null;
}
final description = decoded['description'] as String? ?? decoded['explanation'] as String? ?? '';
final risk = decoded['risk'] as String?;
final needsConfirmation = decoded['needsConfirmation'] as bool?;
final why = decoded['why'] as String?;
List<String>? prechecks;
final preRaw = decoded['prechecks'];
if (preRaw is List) {
prechecks = preRaw.map((e) => e.toString()).where((e) => e.trim().isNotEmpty).toList();
}
_emitted = true;
return AskAiCommand(
command: command.trim(),
description: description.trim(),
toolName: name,
risk: risk,
needsConfirmation: needsConfirmation,
why: why,
prechecks: prechecks,
);
} on FormatException {
if (force) {

View File

@@ -1,5 +1,6 @@
import 'package:fl_lib/fl_lib.dart';
import 'package:get_it/get_it.dart';
import 'package:server_box/data/store/ai_history.dart';
import 'package:server_box/data/store/connection_stats.dart';
import 'package:server_box/data/store/container.dart';
import 'package:server_box/data/store/history.dart';
@@ -17,6 +18,7 @@ abstract final class Stores {
static PrivateKeyStore get key => getIt<PrivateKeyStore>();
static SnippetStore get snippet => getIt<SnippetStore>();
static HistoryStore get history => getIt<HistoryStore>();
static AiHistoryStore get aiHistory => getIt<AiHistoryStore>();
static ConnectionStatsStore get connectionStats => getIt<ConnectionStatsStore>();
/// All stores that need backup
@@ -27,6 +29,7 @@ abstract final class Stores {
key,
snippet,
history,
aiHistory,
connectionStats,
];
@@ -37,8 +40,9 @@ abstract final class Stores {
getIt.registerLazySingleton<PrivateKeyStore>(() => PrivateKeyStore.instance);
getIt.registerLazySingleton<SnippetStore>(() => SnippetStore.instance);
getIt.registerLazySingleton<HistoryStore>(() => HistoryStore.instance);
getIt.registerLazySingleton<AiHistoryStore>(() => AiHistoryStore.instance);
getIt.registerLazySingleton<ConnectionStatsStore>(() => ConnectionStatsStore.instance);
await Future.wait(_allBackup.map((store) => store.init()));
}

View File

@@ -0,0 +1,23 @@
import 'package:fl_lib/fl_lib.dart';
/// Global persistent Ask AI conversation history.
///
/// Kept separate from [HistoryStore] to avoid mixing with SSH/SFTP history.
class AiHistoryStore extends HiveStore {
AiHistoryStore._() : super('ai_history');
static final instance = AiHistoryStore._();
/// Stored as a list of maps to avoid needing Hive type adapters.
late final history = listProperty<Map<String, dynamic>>(
'history',
defaultValue: const [],
fromObj: (val) => List<Map<String, dynamic>>.from(
(val as List).map((e) => Map<String, dynamic>.from(e as Map)),
),
);
void clearHistory() {
history.put(const []);
}
}

View File

@@ -165,6 +165,10 @@ class SettingStore extends HiveStore {
late final askAiApiKey = propertyDefault('askAiApiKey', '');
late final askAiModel = propertyDefault('askAiModel', 'gpt-4o-mini');
/// Global AI floating action button position as normalized (0..1) ratios.
late final aiFabOffsetX = propertyDefault('aiFabOffsetX', 0.92);
late final aiFabOffsetY = propertyDefault('aiFabOffsetY', 0.55);
late final serverFuncBtns = listProperty('serverBtns', defaultValue: ServerFuncBtn.defaultIdxs);
/// Docker is more popular than podman, set to `false` to use docker

View File

@@ -223,6 +223,30 @@ extension on _ContainerPageState {
);
SSHPage.route.go(context, args);
break;
case ContainerMenu.askAi:
final runtime = switch (_containerState.type) {
ContainerType.podman => 'podman',
ContainerType.docker => 'docker',
};
final blocks = <String>[
'[Container]\nruntime: $runtime',
'[Container Item]\nid: ${dItem.id}\nname: ${dItem.name}\nimage: ${dItem.image}\nstatus: ${dItem.status.displayName}',
];
showAiAssistSheet(
context,
AiAssistArgs(
title: context.l10n.askAi,
contextBlocks: blocks,
scenario: AskAiScenario.container,
applyLabel: libL10n.ok,
applyBehavior: AiApplyBehavior.openSsh,
onOpenSsh: (cmd) {
final args = SshPageArgs(spi: widget.args.spi, initCmd: cmd);
SSHPage.route.go(context, args);
},
),
);
break;
}
}

View File

@@ -12,10 +12,12 @@ import 'package:server_box/data/model/app/menu/container.dart';
import 'package:server_box/data/model/container/image.dart';
import 'package:server_box/data/model/container/ps.dart';
import 'package:server_box/data/model/container/type.dart';
import 'package:server_box/data/provider/ai/ask_ai.dart';
import 'package:server_box/data/provider/container.dart';
import 'package:server_box/data/provider/server/single.dart';
import 'package:server_box/data/res/store.dart';
import 'package:server_box/view/page/ssh/page/page.dart';
import 'package:server_box/view/widget/ai/ai_assist_sheet.dart';
part 'actions.dart';
part 'types.dart';

View File

@@ -25,6 +25,7 @@ import 'package:server_box/data/provider/server/single.dart';
import 'package:server_box/data/res/store.dart';
import 'package:server_box/view/page/pve.dart';
import 'package:server_box/view/page/server/edit/edit.dart';
import 'package:server_box/view/page/ssh/page/page.dart';
import 'package:server_box/view/widget/server_func_btns.dart';
part 'misc.dart';
@@ -125,6 +126,14 @@ class _ServerDetailPageState extends ConsumerState<ServerDetailPage> with Single
),
actions: [
QrShareBtn(data: si.spi.toJsonString(), tip: si.spi.name, tip2: '${l10n.server} ~ ServerBox'),
IconButton(
icon: const Icon(Icons.smart_toy_outlined),
tooltip: context.l10n.askAi,
onPressed: () {
final args = SshPageArgs(spi: si.spi);
SSHPage.route.go(context, args);
},
),
IconButton(
icon: const Icon(Icons.edit),
onPressed: () async {

View File

@@ -27,483 +27,44 @@ extension _AskAi on SSHPageState {
Future<void> _showAskAiSheet(String selection) async {
if (!mounted) return;
final localeHint = Localizations.maybeLocaleOf(context)?.toLanguageTag();
await showModalBottomSheet<void>(
context: context,
isScrollControlled: true,
useSafeArea: true,
builder: (ctx) {
return _AskAiSheet(selection: selection, localeHint: localeHint, onCommandApply: _applyAiCommand);
},
);
}
void _applyAiCommand(String command) {
if (command.isEmpty) {
return;
}
_terminal.textInput(command);
(widget.args.focusNode?.requestFocus ?? _termKey.currentState?.requestKeyboard)?.call();
}
}
final scrollback = _buildTerminalScrollbackTail(maxLines: 200);
class _AskAiSheet extends ConsumerStatefulWidget {
const _AskAiSheet({required this.selection, required this.localeHint, required this.onCommandApply});
final blocks = <String>[
'[Terminal Selection]\n$selection',
'[Terminal Scrollback Tail]\n$scrollback',
'[Session]\nserver: ${widget.args.spi.user}@${widget.args.spi.ip}:${widget.args.spi.port}\nsessionId: $_sessionId',
];
final String selection;
final String? localeHint;
final ValueChanged<String> onCommandApply;
final redactedBlocks = AiSafety.redactBlocks(blocks, spi: widget.args.spi);
@override
ConsumerState<_AskAiSheet> createState() => _AskAiSheetState();
}
enum _ChatEntryType { user, assistant, command }
class _ChatEntry {
const _ChatEntry._({required this.type, this.content, this.command});
const _ChatEntry.user(String content) : this._(type: _ChatEntryType.user, content: content);
const _ChatEntry.assistant(String content) : this._(type: _ChatEntryType.assistant, content: content);
const _ChatEntry.command(AskAiCommand command) : this._(type: _ChatEntryType.command, command: command);
final _ChatEntryType type;
final String? content;
final AskAiCommand? command;
}
class _AskAiSheetState extends ConsumerState<_AskAiSheet> {
StreamSubscription<AskAiEvent>? _subscription;
final _chatEntries = <_ChatEntry>[];
final _history = <AskAiMessage>[];
final _scrollController = ScrollController();
final _inputController = TextEditingController();
final _seenCommands = <String>{};
String? _streamingContent;
String? _error;
bool _isStreaming = false;
bool _isMinimized = false;
@override
void initState() {
super.initState();
_inputController.addListener(_handleInputChanged);
_startStream();
}
@override
void dispose() {
_subscription?.cancel();
_scrollController.dispose();
_inputController
..removeListener(_handleInputChanged)
..dispose();
super.dispose();
}
void _handleInputChanged() {
if (!mounted) return;
setState(() {});
}
void _startStream() {
_subscription?.cancel();
setState(() {
_isStreaming = true;
_error = null;
_streamingContent = '';
});
final messages = List<AskAiMessage>.from(_history);
_subscription = ref
.read(askAiRepositoryProvider)
.ask(selection: widget.selection, localeHint: widget.localeHint, conversation: messages)
.listen(
_handleEvent,
onError: (error, stack) {
if (!mounted) return;
setState(() {
_error = _describeError(error);
_isStreaming = false;
_streamingContent = null;
});
},
onDone: () {
if (!mounted) return;
setState(() {
_isStreaming = false;
});
},
);
}
void _handleEvent(AskAiEvent event) {
if (!mounted) return;
var shouldScroll = false;
setState(() {
if (event is AskAiContentDelta) {
_streamingContent = (_streamingContent ?? '') + event.delta;
shouldScroll = true;
} else if (event is AskAiToolSuggestion) {
final inserted = _seenCommands.add(event.command.command);
if (inserted) {
_chatEntries.add(_ChatEntry.command(event.command));
shouldScroll = true;
}
} else if (event is AskAiCompleted) {
final fullText = event.fullText.isNotEmpty ? event.fullText : (_streamingContent ?? '');
if (fullText.trim().isNotEmpty) {
final message = AskAiMessage(role: AskAiMessageRole.assistant, content: fullText);
_history.add(message);
_chatEntries.add(_ChatEntry.assistant(fullText));
}
for (final command in event.commands) {
final inserted = _seenCommands.add(command.command);
if (inserted) {
_chatEntries.add(_ChatEntry.command(command));
}
}
_streamingContent = null;
_isStreaming = false;
shouldScroll = true;
} else if (event is AskAiStreamError) {
_error = _describeError(event.error);
_streamingContent = null;
_isStreaming = false;
}
});
if (shouldScroll) {
_scheduleAutoScroll();
}
}
void _scheduleAutoScroll() {
if (!_scrollController.hasClients) return;
WidgetsBinding.instance.addPostFrameCallback((_) {
if (!_scrollController.hasClients) return;
_scrollController.animateTo(
_scrollController.position.maxScrollExtent,
duration: const Duration(milliseconds: 180),
curve: Curves.easeOutCubic,
);
});
}
String _describeError(Object error) {
final l10n = context.l10n;
if (error is AskAiConfigException) {
if (error.missingFields.isEmpty) {
if (error.hasInvalidBaseUrl) {
return 'Invalid Ask AI base URL: ${error.invalidBaseUrl}';
}
return error.toString();
}
final locale = Localizations.maybeLocaleOf(context);
final separator = switch (locale?.languageCode) {
'zh' => '',
'ja' => '',
_ => ', ',
};
final formattedFields = error.missingFields
.map(
(field) => switch (field) {
AskAiConfigField.baseUrl => l10n.askAiBaseUrl,
AskAiConfigField.apiKey => l10n.askAiApiKey,
AskAiConfigField.model => l10n.askAiModel,
},
)
.join(separator);
final message = l10n.askAiConfigMissing(formattedFields);
if (error.hasInvalidBaseUrl) {
return '$message (invalid URL: ${error.invalidBaseUrl})';
}
return message;
}
if (error is AskAiNetworkException) {
return error.message;
}
return error.toString();
}
Future<void> _handleApplyCommand(BuildContext context, AskAiCommand command) async {
final confirmed = await context.showRoundDialog<bool>(
title: context.l10n.askAiConfirmExecute,
child: SelectableText(command.command, style: const TextStyle(fontFamily: 'monospace')),
actions: [
TextButton(onPressed: context.pop, child: Text(libL10n.cancel)),
TextButton(onPressed: () => context.pop(true), child: Text(libL10n.ok)),
],
);
if (confirmed == true) {
widget.onCommandApply(command.command);
if (!mounted) return;
context.showSnackBar(context.l10n.askAiCommandInserted);
}
}
Future<void> _copyCommand(BuildContext context, AskAiCommand command) async {
await Clipboard.setData(ClipboardData(text: command.command));
if (!mounted) return;
context.showSnackBar(libL10n.success);
}
Future<void> _copyText(BuildContext context, String text) async {
if (text.trim().isEmpty) return;
await Clipboard.setData(ClipboardData(text: text));
if (!mounted) return;
context.showSnackBar(libL10n.success);
}
void _sendMessage() {
if (_isStreaming) return;
final text = _inputController.text.trim();
if (text.isEmpty) return;
setState(() {
final message = AskAiMessage(role: AskAiMessageRole.user, content: text);
_history.add(message);
_chatEntries.add(_ChatEntry.user(text));
_inputController.clear();
});
_startStream();
_scheduleAutoScroll();
}
List<Widget> _buildConversationWidgets(BuildContext context, ThemeData theme) {
final widgets = <Widget>[];
for (final entry in _chatEntries) {
widgets.add(_buildChatItem(context, theme, entry));
widgets.add(const SizedBox(height: 12));
}
if (_streamingContent != null) {
widgets.add(_buildAssistantBubble(theme, content: _streamingContent!, streaming: true));
widgets.add(const SizedBox(height: 12));
} else if (_chatEntries.isEmpty && _error == null) {
widgets.add(_buildAssistantBubble(theme, content: '', streaming: true));
widgets.add(const SizedBox(height: 12));
}
if (widgets.isNotEmpty) {
widgets.removeLast();
}
return widgets;
}
Widget _buildChatItem(BuildContext context, ThemeData theme, _ChatEntry entry) {
switch (entry.type) {
case _ChatEntryType.user:
return Align(
alignment: Alignment.centerRight,
child: CardX(
child: Padding(padding: const EdgeInsets.all(12), child: SelectableText(entry.content ?? '')),
),
);
case _ChatEntryType.assistant:
return _buildAssistantBubble(theme, content: entry.content ?? '');
case _ChatEntryType.command:
final command = entry.command!;
return _buildCommandBubble(context, theme, command);
}
}
Widget _buildAssistantBubble(ThemeData theme, {required String content, bool streaming = false}) {
final trimmed = content.trim();
final l10n = context.l10n;
final child = trimmed.isEmpty
? Text(
streaming ? l10n.askAiAwaitingResponse : l10n.askAiNoResponse,
style: theme.textTheme.bodySmall,
)
: Column(
crossAxisAlignment: CrossAxisAlignment.start,
children: [
SimpleMarkdown(data: content),
const SizedBox(height: 8),
Row(
mainAxisAlignment: MainAxisAlignment.end,
children: [
TextButton.icon(
onPressed: () => _copyText(context, content),
icon: const Icon(Icons.copy, size: 18),
label: Text(libL10n.copy),
),
],
),
],
);
return Align(
alignment: Alignment.centerLeft,
child: CardX(
child: Padding(padding: const EdgeInsets.all(12), child: child),
await showAiAssistSheet(
context,
AiAssistArgs(
title: context.l10n.askAi,
contextBlocks: redactedBlocks,
scenario: AskAiScenario.terminal,
localeHint: localeHint,
applyLabel: context.l10n.askAiInsertTerminal,
applyBehavior: AiApplyBehavior.insert,
redacted: false,
onInsert: (command) {
_terminal.textInput(command);
(widget.args.focusNode?.requestFocus ?? _termKey.currentState?.requestKeyboard)?.call();
},
),
);
}
Widget _buildCommandBubble(BuildContext context, ThemeData theme, AskAiCommand command) {
final l10n = context.l10n;
return Align(
alignment: Alignment.centerLeft,
child: CardX(
child: Padding(
padding: const EdgeInsets.all(12),
child: Column(
crossAxisAlignment: CrossAxisAlignment.start,
children: [
Text(l10n.askAiRecommendedCommand, style: theme.textTheme.labelMedium),
const SizedBox(height: 8),
SelectableText(command.command, style: const TextStyle(fontFamily: 'monospace')),
if (command.description.isNotEmpty) ...[
const SizedBox(height: 6),
Text(command.description, style: theme.textTheme.bodySmall),
],
const SizedBox(height: 12),
Row(
mainAxisAlignment: MainAxisAlignment.end,
children: [
TextButton.icon(
onPressed: () => _copyCommand(context, command),
icon: const Icon(Icons.copy, size: 18),
label: Text(libL10n.copy),
),
const SizedBox(width: 8),
FilledButton.icon(
onPressed: () => _handleApplyCommand(context, command),
icon: const Icon(Icons.terminal, size: 18),
label: Text(l10n.askAiInsertTerminal),
),
],
),
],
),
),
),
);
}
String _buildTerminalScrollbackTail({required int maxLines}) {
final lines = _terminal.buffer.lines.toList();
if (lines.isEmpty) return '';
@override
Widget build(BuildContext context) {
final theme = Theme.of(context);
final bottomPadding = MediaQuery.viewInsetsOf(context).bottom;
final heightFactor = _isMinimized ? 0.18 : 0.85;
final start = (lines.length - maxLines).clamp(0, lines.length);
final tail = lines.sublist(start);
return TweenAnimationBuilder<double>(
tween: Tween<double>(end: heightFactor),
duration: const Duration(milliseconds: 200),
curve: Curves.easeOutCubic,
builder: (context, animatedHeightFactor, child) {
return ClipRect(
child: FractionallySizedBox(
heightFactor: animatedHeightFactor,
child: child,
),
);
},
child: SafeArea(
child: Column(
children: [
Padding(
padding: const EdgeInsets.fromLTRB(16, 16, 16, 0),
child: Row(
children: [
Text(context.l10n.askAi, style: theme.textTheme.titleLarge),
const SizedBox(width: 8),
if (_isStreaming)
const SizedBox(height: 16, width: 16, child: CircularProgressIndicator(strokeWidth: 2)),
const Spacer(),
IconButton(
icon: Icon(_isMinimized ? Icons.unfold_more : Icons.unfold_less),
tooltip: libL10n.fold,
onPressed: () {
FocusManager.instance.primaryFocus?.unfocus();
setState(() {
_isMinimized = !_isMinimized;
});
},
),
IconButton(icon: const Icon(Icons.close), onPressed: () => Navigator.of(context).pop()),
],
),
),
if (!_isMinimized) ...[
Expanded(
child: Scrollbar(
controller: _scrollController,
child: ListView(
controller: _scrollController,
padding: const EdgeInsets.fromLTRB(16, 12, 16, 12),
children: [
Text(context.l10n.askAiSelectedContent, style: theme.textTheme.titleMedium),
const SizedBox(height: 6),
CardX(
child: Padding(
padding: const EdgeInsets.all(12),
child: SelectableText(
widget.selection,
style: const TextStyle(fontFamily: 'monospace'),
),
),
),
const SizedBox(height: 16),
Text(context.l10n.askAiConversation, style: theme.textTheme.titleMedium),
const SizedBox(height: 6),
..._buildConversationWidgets(context, theme),
if (_error != null) ...[
const SizedBox(height: 16),
CardX(
child: Padding(
padding: const EdgeInsets.all(12),
child: Text(_error!, style: TextStyle(color: theme.colorScheme.error)),
),
),
],
if (_isStreaming) ...[const SizedBox(height: 16), const LinearProgressIndicator()],
const SizedBox(height: 16),
],
),
),
),
Padding(
padding: const EdgeInsets.fromLTRB(16, 8, 16, 0),
child: Text(
context.l10n.askAiDisclaimer,
style: theme.textTheme.bodySmall?.copyWith(
color: theme.colorScheme.error,
fontWeight: FontWeight.bold,
),
textAlign: TextAlign.center,
),
),
Padding(
padding: EdgeInsets.fromLTRB(16, 8, 16, 16 + bottomPadding),
child: Row(
children: [
Expanded(
child: Input(
controller: _inputController,
minLines: 1,
maxLines: 4,
hint: context.l10n.askAiFollowUpHint,
action: TextInputAction.send,
onSubmitted: (_) => _sendMessage(),
),
),
const SizedBox(width: 12),
Btn.icon(
onTap: _isStreaming || _inputController.text.trim().isEmpty ? null : _sendMessage,
icon: const Icon(Icons.send, size: 18),
),
],
).cardx,
),
] else
const SizedBox(height: 8),
],
),
),
);
return tail.map((e) => e.toString()).join('\n');
}
}

View File

@@ -13,10 +13,10 @@ import 'package:server_box/core/chan.dart';
import 'package:server_box/core/extension/context/locale.dart';
import 'package:server_box/core/utils/server.dart';
import 'package:server_box/core/utils/ssh_auth.dart';
import 'package:server_box/data/model/ai/ask_ai_models.dart';
import 'package:server_box/data/model/server/server_private_info.dart';
import 'package:server_box/data/model/server/snippet.dart';
import 'package:server_box/data/model/ssh/virtual_key.dart';
import 'package:server_box/data/provider/ai/ai_safety.dart';
import 'package:server_box/data/provider/ai/ask_ai.dart';
import 'package:server_box/data/provider/server/single.dart';
import 'package:server_box/data/provider/snippet.dart';
@@ -25,6 +25,7 @@ import 'package:server_box/data/res/store.dart';
import 'package:server_box/data/res/terminal.dart';
import 'package:server_box/data/ssh/session_manager.dart';
import 'package:server_box/view/page/storage/sftp.dart';
import 'package:server_box/view/widget/ai/ai_assist_sheet.dart';
import 'package:wakelock_plus/wakelock_plus.dart';
import 'package:xterm/core.dart';
import 'package:xterm/ui.dart' hide TerminalThemes;

View File

@@ -1,11 +1,14 @@
import 'package:fl_lib/fl_lib.dart';
import 'package:flutter/material.dart';
import 'package:flutter_riverpod/flutter_riverpod.dart';
import 'package:server_box/core/extension/context/locale.dart';
import 'package:server_box/core/route.dart';
import 'package:server_box/data/model/server/server_private_info.dart';
import 'package:server_box/data/model/server/systemd.dart';
import 'package:server_box/data/provider/ai/ask_ai.dart';
import 'package:server_box/data/provider/systemd.dart';
import 'package:server_box/view/page/ssh/page/page.dart';
import 'package:server_box/view/widget/ai/ai_assist_sheet.dart';
final class SystemdPage extends ConsumerStatefulWidget {
final SpiRequiredArgs args;
@@ -28,7 +31,29 @@ final class _SystemdPageState extends ConsumerState<SystemdPage> {
return Scaffold(
appBar: CustomAppBar(
title: const Text('Systemd'),
actions: isDesktop ? [Btn.icon(icon: const Icon(Icons.refresh), onTap: _notifier.getUnits)] : null,
actions: [
if (isDesktop) Btn.icon(icon: const Icon(Icons.refresh), onTap: _notifier.getUnits),
IconButton(
icon: const Icon(Icons.smart_toy_outlined),
tooltip: context.l10n.askAi,
onPressed: () {
final blocks = <String>[
'[Systemd]\nscopeFilter: ${ref.read(_pro).scopeFilter.displayName}\nitems: ${_notifier.filteredUnits.length}',
];
showAiAssistSheet(
context,
AiAssistArgs(
title: context.l10n.askAi,
contextBlocks: blocks,
scenario: AskAiScenario.systemd,
applyLabel: libL10n.ok,
applyBehavior: AiApplyBehavior.openSsh,
onOpenSsh: _navigateToSsh,
),
);
},
),
],
),
body: RefreshIndicator(onRefresh: _notifier.getUnits, child: _buildBody()),
);

View File

@@ -0,0 +1,608 @@
import 'dart:async';
import 'package:fl_lib/fl_lib.dart';
import 'package:flutter/material.dart';
import 'package:flutter/services.dart';
import 'package:flutter_riverpod/flutter_riverpod.dart';
import 'package:server_box/core/extension/context/locale.dart';
import 'package:server_box/data/model/ai/ask_ai_models.dart';
import 'package:server_box/data/provider/ai/ai_safety.dart';
import 'package:server_box/data/provider/ai/ask_ai.dart';
@immutable
enum AiApplyBehavior {
/// Apply means "insert" into an input (terminal/editor).
insert,
/// Apply means "open SSH and prefill initCmd".
openSsh,
/// Apply means "copy to clipboard".
copy,
}
@immutable
class AiAssistArgs {
const AiAssistArgs({
required this.title,
required this.contextBlocks,
required this.scenario,
required this.applyLabel,
required this.applyBehavior,
this.localeHint,
this.redacted = true,
this.onInsert,
this.onOpenSsh,
});
final String title;
final List<String> contextBlocks;
final AskAiScenario scenario;
final String applyLabel;
final AiApplyBehavior applyBehavior;
final String? localeHint;
/// If true, apply a conservative redaction before sending.
final bool redacted;
final ValueChanged<String>? onInsert;
final ValueChanged<String>? onOpenSsh;
}
Future<void> showAiAssistSheet(BuildContext context, AiAssistArgs args) async {
await showModalBottomSheet<void>(
context: context,
isScrollControlled: true,
useSafeArea: true,
builder: (_) => AiAssistSheet(args: args),
);
}
class AiAssistSheet extends ConsumerStatefulWidget {
const AiAssistSheet({super.key, required this.args});
final AiAssistArgs args;
@override
ConsumerState<AiAssistSheet> createState() => _AiAssistSheetState();
}
enum _ChatEntryType { user, assistant, command }
class _ChatEntry {
const _ChatEntry._({required this.type, this.content, this.command, this.risk});
const _ChatEntry.user(String content) : this._(type: _ChatEntryType.user, content: content);
const _ChatEntry.assistant(String content) : this._(type: _ChatEntryType.assistant, content: content);
const _ChatEntry.command(AskAiCommand command, AiCommandRisk risk)
: this._(type: _ChatEntryType.command, command: command, risk: risk);
final _ChatEntryType type;
final String? content;
final AskAiCommand? command;
final AiCommandRisk? risk;
}
class _AiAssistSheetState extends ConsumerState<AiAssistSheet> {
StreamSubscription<AskAiEvent>? _subscription;
final _chatEntries = <_ChatEntry>[];
final _history = <AskAiMessage>[];
final _scrollController = ScrollController();
final _inputController = TextEditingController();
final _seenCommands = <String>{};
String? _streamingContent;
String? _error;
bool _isStreaming = false;
bool _isMinimized = false;
@override
void initState() {
super.initState();
_inputController.addListener(_handleInputChanged);
_startStream();
}
@override
void dispose() {
_subscription?.cancel();
_scrollController.dispose();
_inputController
..removeListener(_handleInputChanged)
..dispose();
super.dispose();
}
void _handleInputChanged() {
if (!mounted) return;
setState(() {});
}
List<String> get _preparedBlocks {
final blocks = widget.args.contextBlocks;
if (!widget.args.redacted) return blocks;
// Best-effort: redact without Spi. Pages that have Spi should pass already-redacted
// blocks or avoid including secrets directly.
return AiSafety.redactBlocks(blocks);
}
void _startStream() {
_subscription?.cancel();
setState(() {
_isStreaming = true;
_error = null;
_streamingContent = '';
});
final messages = List<AskAiMessage>.from(_history);
_subscription = ref
.read(askAiRepositoryProvider)
.ask(
scenario: widget.args.scenario,
contextBlocks: _preparedBlocks,
localeHint: widget.args.localeHint,
conversation: messages,
)
.listen(
_handleEvent,
onError: (error, stack) {
if (!mounted) return;
setState(() {
_error = _describeError(error);
_isStreaming = false;
_streamingContent = null;
});
},
onDone: () {
if (!mounted) return;
setState(() {
_isStreaming = false;
});
},
);
}
void _handleEvent(AskAiEvent event) {
if (!mounted) return;
var shouldScroll = false;
setState(() {
if (event is AskAiContentDelta) {
_streamingContent = (_streamingContent ?? '') + event.delta;
shouldScroll = true;
} else if (event is AskAiToolSuggestion) {
final inserted = _seenCommands.add(event.command.command);
if (inserted) {
final risk = event.command.risk != null
? (AiCommandRiskX.tryParse(event.command.risk) ?? AiSafety.classifyRisk(event.command.command))
: AiSafety.classifyRisk(event.command.command);
_chatEntries.add(_ChatEntry.command(event.command, risk));
shouldScroll = true;
}
} else if (event is AskAiCompleted) {
final fullText = event.fullText.isNotEmpty ? event.fullText : (_streamingContent ?? '');
if (fullText.trim().isNotEmpty) {
final message = AskAiMessage(role: AskAiMessageRole.assistant, content: fullText);
_history.add(message);
_chatEntries.add(_ChatEntry.assistant(fullText));
}
for (final command in event.commands) {
final inserted = _seenCommands.add(command.command);
if (inserted) {
final risk = command.risk != null
? (AiCommandRiskX.tryParse(command.risk) ?? AiSafety.classifyRisk(command.command))
: AiSafety.classifyRisk(command.command);
_chatEntries.add(_ChatEntry.command(command, risk));
}
}
_streamingContent = null;
_isStreaming = false;
shouldScroll = true;
} else if (event is AskAiStreamError) {
_error = _describeError(event.error);
_streamingContent = null;
_isStreaming = false;
}
});
if (shouldScroll) {
_scheduleAutoScroll();
}
}
void _scheduleAutoScroll() {
if (!_scrollController.hasClients) return;
WidgetsBinding.instance.addPostFrameCallback((_) {
if (!_scrollController.hasClients) return;
_scrollController.animateTo(
_scrollController.position.maxScrollExtent,
duration: const Duration(milliseconds: 180),
curve: Curves.easeOutCubic,
);
});
}
String _describeError(Object error) {
final l10n = context.l10n;
if (error is AskAiConfigException) {
if (error.missingFields.isEmpty) {
if (error.hasInvalidBaseUrl) {
return 'Invalid Ask AI base URL: ${error.invalidBaseUrl}';
}
return error.toString();
}
final locale = Localizations.maybeLocaleOf(context);
final separator = switch (locale?.languageCode) {
'zh' => '',
'ja' => '',
_ => ', ',
};
final formattedFields = error.missingFields
.map(
(field) => switch (field) {
AskAiConfigField.baseUrl => l10n.askAiBaseUrl,
AskAiConfigField.apiKey => l10n.askAiApiKey,
AskAiConfigField.model => l10n.askAiModel,
},
)
.join(separator);
final message = l10n.askAiConfigMissing(formattedFields);
if (error.hasInvalidBaseUrl) {
return '$message (invalid URL: ${error.invalidBaseUrl})';
}
return message;
}
if (error is AskAiNetworkException) {
return error.message;
}
return error.toString();
}
Future<void> _confirmAndApplyCommand(AskAiCommand command, AiCommandRisk risk) async {
final l10n = context.l10n;
final needsCountdown = risk == AiCommandRisk.high || command.needsConfirmation == true;
final actions = <Widget>[Btn.cancel()];
if (needsCountdown) {
actions.add(
CountDownBtn(
seconds: 3,
onTap: () => context.pop(true),
text: libL10n.ok,
afterColor: Colors.red,
),
);
} else {
actions.add(TextButton(onPressed: () => context.pop(true), child: Text(libL10n.ok)));
}
final confirmed = await context.showRoundDialog<bool>(
title: needsCountdown ? libL10n.attention : l10n.askAiConfirmExecute,
child: SimpleMarkdown(data: '```shell\n${command.command}\n```'),
actions: actions,
);
if (confirmed != true) return;
if (!mounted) return;
await _applyCommand(command.command);
}
Future<void> _applyCommand(String cmd) async {
final text = cmd.trim();
if (text.isEmpty) return;
switch (widget.args.applyBehavior) {
case AiApplyBehavior.insert:
widget.args.onInsert?.call(text);
if (!mounted) return;
context.showSnackBar(context.l10n.askAiCommandInserted);
break;
case AiApplyBehavior.openSsh:
widget.args.onOpenSsh?.call(text);
break;
case AiApplyBehavior.copy:
await Clipboard.setData(ClipboardData(text: text));
if (!mounted) return;
context.showSnackBar(libL10n.success);
break;
}
}
Future<void> _copyCommand(AskAiCommand command) async {
await Clipboard.setData(ClipboardData(text: command.command));
if (!mounted) return;
context.showSnackBar(libL10n.success);
}
Future<void> _copyText(String text) async {
if (text.trim().isEmpty) return;
await Clipboard.setData(ClipboardData(text: text));
if (!mounted) return;
context.showSnackBar(libL10n.success);
}
void _sendMessage() {
if (_isStreaming) return;
final text = _inputController.text.trim();
if (text.isEmpty) return;
setState(() {
final message = AskAiMessage(role: AskAiMessageRole.user, content: text);
_history.add(message);
_chatEntries.add(_ChatEntry.user(text));
_inputController.clear();
});
_startStream();
_scheduleAutoScroll();
}
List<Widget> _buildConversationWidgets(BuildContext context, ThemeData theme) {
final widgets = <Widget>[];
for (final entry in _chatEntries) {
widgets.add(_buildChatItem(context, theme, entry));
widgets.add(const SizedBox(height: 12));
}
if (_streamingContent != null) {
widgets.add(_buildAssistantBubble(theme, content: _streamingContent!, streaming: true));
widgets.add(const SizedBox(height: 12));
} else if (_chatEntries.isEmpty && _error == null) {
widgets.add(_buildAssistantBubble(theme, content: '', streaming: true));
widgets.add(const SizedBox(height: 12));
}
if (widgets.isNotEmpty) {
widgets.removeLast();
}
return widgets;
}
Widget _buildChatItem(BuildContext context, ThemeData theme, _ChatEntry entry) {
switch (entry.type) {
case _ChatEntryType.user:
return Align(
alignment: Alignment.centerRight,
child: CardX(
child: Padding(padding: const EdgeInsets.all(12), child: SelectableText(entry.content ?? '')),
),
);
case _ChatEntryType.assistant:
return _buildAssistantBubble(theme, content: entry.content ?? '');
case _ChatEntryType.command:
final command = entry.command!;
final risk = entry.risk ?? AiSafety.classifyRisk(command.command);
return _buildCommandBubble(context, theme, command, risk);
}
}
Widget _buildAssistantBubble(ThemeData theme, {required String content, bool streaming = false}) {
final trimmed = content.trim();
final l10n = context.l10n;
final child = trimmed.isEmpty
? Text(
streaming ? l10n.askAiAwaitingResponse : l10n.askAiNoResponse,
style: theme.textTheme.bodySmall,
)
: Column(
crossAxisAlignment: CrossAxisAlignment.start,
children: [
SimpleMarkdown(data: content),
const SizedBox(height: 8),
Row(
mainAxisAlignment: MainAxisAlignment.end,
children: [
TextButton.icon(
onPressed: () => _copyText(content),
icon: const Icon(Icons.copy, size: 18),
label: Text(libL10n.copy),
),
],
),
],
);
return Align(
alignment: Alignment.centerLeft,
child: CardX(
child: Padding(padding: const EdgeInsets.all(12), child: child),
),
);
}
Widget _buildRiskTag(ThemeData theme, AiCommandRisk risk) {
final (label, color) = switch (risk) {
AiCommandRisk.low => ('LOW', Colors.green),
AiCommandRisk.medium => ('MED', Colors.orange),
AiCommandRisk.high => ('HIGH', Colors.red),
};
return Container(
decoration: BoxDecoration(
color: color.withValues(alpha: 0.12),
borderRadius: BorderRadius.circular(6),
),
child: Text(
label,
style: theme.textTheme.labelSmall?.copyWith(color: color),
).paddingSymmetric(horizontal: 6, vertical: 2),
);
}
Widget _buildCommandBubble(BuildContext context, ThemeData theme, AskAiCommand command, AiCommandRisk risk) {
final l10n = context.l10n;
return Align(
alignment: Alignment.centerLeft,
child: CardX(
child: Padding(
padding: const EdgeInsets.all(12),
child: Column(
crossAxisAlignment: CrossAxisAlignment.start,
children: [
Row(
children: [
Text(l10n.askAiRecommendedCommand, style: theme.textTheme.labelMedium),
const SizedBox(width: 8),
_buildRiskTag(theme, risk),
],
),
const SizedBox(height: 8),
SelectableText(command.command, style: const TextStyle(fontFamily: 'monospace')),
if (command.description.isNotEmpty) ...[
const SizedBox(height: 6),
Text(command.description, style: theme.textTheme.bodySmall),
],
const SizedBox(height: 12),
Row(
mainAxisAlignment: MainAxisAlignment.end,
children: [
TextButton.icon(
onPressed: () => _copyCommand(command),
icon: const Icon(Icons.copy, size: 18),
label: Text(libL10n.copy),
),
const SizedBox(width: 8),
FilledButton.icon(
onPressed: () => _confirmAndApplyCommand(command, risk),
icon: const Icon(Icons.terminal, size: 18),
label: Text(widget.args.applyLabel),
),
],
),
],
),
),
),
);
}
@override
Widget build(BuildContext context) {
final theme = Theme.of(context);
final bottomPadding = MediaQuery.viewInsetsOf(context).bottom;
final heightFactor = _isMinimized ? 0.18 : 0.85;
return TweenAnimationBuilder<double>(
tween: Tween<double>(end: heightFactor),
duration: const Duration(milliseconds: 200),
curve: Curves.easeOutCubic,
builder: (context, animatedHeightFactor, child) {
return ClipRect(
child: FractionallySizedBox(
heightFactor: animatedHeightFactor,
child: child,
),
);
},
child: SafeArea(
child: Column(
children: [
Padding(
padding: const EdgeInsets.fromLTRB(16, 16, 16, 0),
child: Row(
children: [
Text(widget.args.title, style: theme.textTheme.titleLarge),
const SizedBox(width: 8),
if (_isStreaming)
const SizedBox(height: 16, width: 16, child: CircularProgressIndicator(strokeWidth: 2)),
const Spacer(),
IconButton(
icon: Icon(_isMinimized ? Icons.unfold_more : Icons.unfold_less),
tooltip: libL10n.fold,
onPressed: () {
FocusManager.instance.primaryFocus?.unfocus();
setState(() {
_isMinimized = !_isMinimized;
});
},
),
IconButton(icon: const Icon(Icons.close), onPressed: () => Navigator.of(context).pop()),
],
),
),
if (!_isMinimized) ...[
Expanded(
child: Scrollbar(
controller: _scrollController,
child: ListView(
controller: _scrollController,
padding: const EdgeInsets.fromLTRB(16, 12, 16, 12),
children: [
Text(context.l10n.askAiSelectedContent, style: theme.textTheme.titleMedium),
const SizedBox(height: 6),
CardX(
child: Padding(
padding: const EdgeInsets.all(12),
child: Column(
crossAxisAlignment: CrossAxisAlignment.start,
children: [
for (final block in widget.args.contextBlocks) ...[
SelectableText(block, style: const TextStyle(fontFamily: 'monospace')),
const SizedBox(height: 8),
],
],
),
),
),
const SizedBox(height: 16),
Text(context.l10n.askAiConversation, style: theme.textTheme.titleMedium),
const SizedBox(height: 6),
..._buildConversationWidgets(context, theme),
if (_error != null) ...[
const SizedBox(height: 16),
CardX(
child: Padding(
padding: const EdgeInsets.all(12),
child: Text(_error!, style: TextStyle(color: theme.colorScheme.error)),
),
),
],
if (_isStreaming) ...[const SizedBox(height: 16), const LinearProgressIndicator()],
const SizedBox(height: 16),
],
),
),
),
Padding(
padding: const EdgeInsets.fromLTRB(16, 8, 16, 0),
child: Text(
context.l10n.askAiDisclaimer,
style: theme.textTheme.bodySmall?.copyWith(
color: theme.colorScheme.error,
fontWeight: FontWeight.bold,
),
textAlign: TextAlign.center,
),
),
Padding(
padding: EdgeInsets.fromLTRB(16, 8, 16, 16 + bottomPadding),
child: Row(
children: [
Expanded(
child: Input(
controller: _inputController,
minLines: 1,
maxLines: 4,
hint: context.l10n.askAiFollowUpHint,
action: TextInputAction.send,
onSubmitted: (_) => _sendMessage(),
),
),
const SizedBox(width: 12),
Btn.icon(
onTap: _isStreaming || _inputController.text.trim().isEmpty ? null : _sendMessage,
icon: const Icon(Icons.send, size: 18),
),
],
).cardx,
),
] else
const SizedBox(height: 8),
],
),
),
);
}
}

View File

@@ -0,0 +1,137 @@
import 'package:fl_lib/fl_lib.dart';
import 'package:flutter/material.dart';
import 'package:flutter_riverpod/flutter_riverpod.dart';
import 'package:icons_plus/icons_plus.dart';
import 'package:server_box/data/model/server/server_private_info.dart';
import 'package:server_box/data/provider/ai/ai_context.dart';
import 'package:server_box/data/provider/ai/ask_ai.dart';
import 'package:server_box/data/res/store.dart';
import 'package:server_box/view/page/ssh/page/page.dart';
import 'package:server_box/view/widget/ai/ai_assist_sheet.dart';
class AiFabOverlay extends ConsumerStatefulWidget {
const AiFabOverlay({super.key, required this.child});
final Widget child;
@override
ConsumerState<AiFabOverlay> createState() => _AiFabOverlayState();
}
class _AiFabOverlayState extends ConsumerState<AiFabOverlay> {
Offset? _offsetPx;
@override
void didChangeDependencies() {
super.didChangeDependencies();
if (_offsetPx != null) return;
final media = MediaQuery.of(context);
final size = media.size;
final x = Stores.setting.aiFabOffsetX.fetch().clamp(0.0, 1.0);
final y = Stores.setting.aiFabOffsetY.fetch().clamp(0.0, 1.0);
_offsetPx = Offset(size.width * x, size.height * y);
}
void _persistOffset(Offset px) {
final size = MediaQuery.of(context).size;
if (size.width <= 0 || size.height <= 0) return;
final nx = (px.dx / size.width).clamp(0.0, 1.0);
final ny = (px.dy / size.height).clamp(0.0, 1.0);
Stores.setting.aiFabOffsetX.put(nx);
Stores.setting.aiFabOffsetY.put(ny);
}
Offset _clampToBounds(Offset px) {
final media = MediaQuery.of(context);
final size = media.size;
final padding = media.padding;
const fabSize = 56.0;
const margin = 8.0;
final minX = margin;
final maxX = (size.width - fabSize - margin).clamp(minX, size.width);
final topInset = padding.top;
final bottomInset = padding.bottom;
final minY = topInset + margin;
final maxY = (size.height - fabSize - bottomInset - margin).clamp(minY, size.height);
return Offset(px.dx.clamp(minX, maxX), px.dy.clamp(minY, maxY));
}
Future<void> _onTapFab() async {
final snapshot = ref.read(aiContextProvider);
final localeHint = Localizations.maybeLocaleOf(context)?.toLanguageTag();
final scenario = AskAiScenarioX.tryParse(snapshot.scenario) ?? AskAiScenario.general;
final applyBehavior = snapshot.spiId != null ? AiApplyBehavior.openSsh : AiApplyBehavior.copy;
await showAiAssistSheet(
context,
AiAssistArgs(
title: snapshot.title,
contextBlocks: snapshot.blocks,
scenario: scenario,
localeHint: localeHint,
applyLabel: applyBehavior == AiApplyBehavior.openSsh ? libL10n.ok : libL10n.copy,
applyBehavior: applyBehavior,
onOpenSsh: (cmd) {
final spiId = snapshot.spiId;
if (spiId == null) return;
final spi = Stores.server.get<Spi>(spiId);
if (spi == null) return;
final args = SshPageArgs(spi: spi, initCmd: cmd);
SSHPage.route.go(context, args);
},
),
);
}
@override
Widget build(BuildContext context) {
final offset = _offsetPx;
if (offset == null) {
return widget.child;
}
return Stack(
children: [
widget.child,
Positioned(
left: offset.dx,
top: offset.dy,
child: Draggable(
feedback: _buildFab(context, dragging: true),
childWhenDragging: const SizedBox.shrink(),
onDragEnd: (details) {
if (!mounted) return;
final next = _clampToBounds(details.offset);
setState(() {
_offsetPx = next;
});
_persistOffset(next);
},
child: _buildFab(context),
),
),
],
);
}
Widget _buildFab(BuildContext context, {bool dragging = false}) {
return FloatingActionButton(
heroTag: dragging ? null : 'ai_fab',
onPressed: _onTapFab,
child: const Icon(LineAwesome.robot_solid),
);
}
}

74
test/ai_safety_test.dart Normal file
View File

@@ -0,0 +1,74 @@
import 'package:flutter_test/flutter_test.dart';
import 'package:server_box/data/model/server/server_private_info.dart';
import 'package:server_box/data/provider/ai/ai_safety.dart';
void main() {
group('AiSafety.redact', () {
test('redacts private key blocks', () {
const input = '''before
-----BEGIN PRIVATE KEY-----
abc
-----END PRIVATE KEY-----
after''';
final out = AiSafety.redact(input);
expect(out, contains('<PRIVATE_KEY_BLOCK>'));
expect(out, isNot(contains('BEGIN PRIVATE KEY')));
});
test('redacts Bearer tokens', () {
const input = 'Authorization: Bearer abc.def.ghi\nnext';
final out = AiSafety.redact(input);
expect(out, contains('Authorization: Bearer <TOKEN>'));
expect(out, isNot(contains('abc.def.ghi')));
});
test('redacts OpenAI-style keys', () {
const input = 'sk-1234567890abcdef1234567890abcdef';
final out = AiSafety.redact(input);
expect(out, contains('<API_KEY>'));
expect(out, isNot(contains('sk-123456')));
});
test('replaces Spi identity with placeholders', () {
final spi = Spi(name: 'n', ip: '192.168.1.2', port: 22, user: 'root', id: 'id');
const input = 'ssh root@192.168.1.2 -p 22 && echo root && ping 192.168.1.2';
final out = AiSafety.redact(input, spi: spi);
expect(out, contains('<USER_AT_HOST>'));
expect(out, contains('<IP>'));
expect(out, isNot(contains('root@192.168.1.2')));
expect(out, isNot(contains('192.168.1.2')));
// Note: "root" may appear elsewhere and gets replaced.
expect(out, isNot(contains('echo root')));
});
test('none mode returns input unchanged', () {
const input = 'hello sk-1234567890abcdef';
final out = AiSafety.redact(input, mode: AiRedactionMode.none);
expect(out, input);
});
});
group('AiSafety.classifyRisk', () {
test('detects high risk rm -rf', () {
expect(AiSafety.classifyRisk('rm -rf /'), AiCommandRisk.high);
expect(AiSafety.classifyRisk('sudo rm -rf /var/lib/docker'), AiCommandRisk.high);
});
test('detects high risk mkfs', () {
expect(AiSafety.classifyRisk('mkfs.ext4 /dev/sda1'), AiCommandRisk.high);
});
test('detects medium risk reboot', () {
expect(AiSafety.classifyRisk('reboot'), AiCommandRisk.medium);
});
test('detects medium risk systemctl restart', () {
expect(AiSafety.classifyRisk('systemctl restart nginx'), AiCommandRisk.medium);
});
test('defaults to low risk', () {
expect(AiSafety.classifyRisk('ls -la'), AiCommandRisk.low);
expect(AiSafety.classifyRisk(''), AiCommandRisk.low);
});
});
}