Adding support for macrobenchmarking with "flutter run" (#167692)

Notable changes:
* Allows macrobenchmarks to run via `flutter run`.
* Splits macrobenchmarking between "orchestration logic" and "app
serving" (served on separate ports on the same machine to keep the
scheme consistent with the `flutter build` path).
* Adds an intercepted entrypoint for web benchmarks. We can't pass flags
to the app since it's not supported, so I've hard-coded the
orchestration server's port.
* Adding logic to connect to an existing Chrome debugger instance (vs
spawning one) for benchmarks.
This commit is contained in:
MarkZ 2025-04-28 16:39:40 -07:00 committed by GitHub
parent b261c51609
commit 2a3e27fdd9
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
5 changed files with 307 additions and 114 deletions

View File

@ -7,6 +7,7 @@ import 'dart:convert' show json;
import 'dart:js_interop';
import 'dart:math' as math;
import 'package:args/args.dart';
import 'package:web/web.dart' as web;
import 'src/web/bench_build_image.dart';
@ -81,9 +82,28 @@ final Map<String, RecorderFactory> benchmarks = <String, RecorderFactory>{
BenchImageDecoding.benchmarkName: () => BenchImageDecoding(),
};
final LocalBenchmarkServerClient _client = LocalBenchmarkServerClient();
late final LocalBenchmarkServerClient _client;
Future<void> main(List<String> args) async {
final ArgParser parser =
ArgParser()..addOption(
'port',
abbr: 'p',
help:
'The port of the local benchmark server used that implements the '
'API required for orchestrating macrobenchmarks.',
);
final ArgResults argResults = parser.parse(args);
Uri serverOrigin;
if (argResults.wasParsed('port')) {
final int port = int.parse(argResults['port'] as String);
serverOrigin = Uri.http('localhost:$port');
} else {
serverOrigin = Uri.base;
}
_client = LocalBenchmarkServerClient(serverOrigin);
Future<void> main() async {
// Check if the benchmark server wants us to run a specific benchmark.
final String nextBenchmark = await _client.requestNextBenchmark();
@ -96,6 +116,14 @@ Future<void> main() async {
web.window.location.reload();
}
/// Shared entrypoint used for DDC, which runs the macrobenchmarks server on a
/// separate port.
// TODO(markzipan): Use `main` in `'web_benchmarks.dart` when Flutter Web supports the `--dart-entrypoint-args` flag.
// ignore: unreachable_from_main
Future<void> sharedMain(List<String> args) {
return main(args);
}
Future<void> _runBenchmark(String benchmarkName) async {
final RecorderFactory? recorderFactory = benchmarks[benchmarkName];
@ -310,9 +338,15 @@ class TimeseriesVisualization {
/// implement a manual fallback. This allows debugging benchmarks using plain
/// `flutter run`.
class LocalBenchmarkServerClient {
LocalBenchmarkServerClient(this.serverOrigin);
/// This value is returned by [requestNextBenchmark].
static const String kManualFallback = '__manual_fallback__';
/// The origin (e.g., http://localhost:1234) of the benchmark server that
/// hosts the macrobenchmarking API.
final Uri serverOrigin;
/// Whether we fell back to manual mode.
///
/// This happens when you run benchmarks using plain `flutter run` rather than
@ -320,13 +354,20 @@ class LocalBenchmarkServerClient {
/// provides API for automatically picking the next benchmark to run.
bool isInManualMode = false;
Map<String, String> get headers => <String, String>{
'Access-Control-Allow-Headers': 'Origin, Content-Type, Accept',
'Access-Control-Allow-Methods': 'Post',
'Access-Control-Allow-Origin': serverOrigin.path,
};
/// Asks the local server for the name of the next benchmark to run.
///
/// Returns [kManualFallback] if local server is not available (uses 404 as a
/// signal).
Future<String> requestNextBenchmark() async {
final web.XMLHttpRequest request = await _requestXhr(
'/next-benchmark',
serverOrigin.resolve('next-benchmark'),
requestHeaders: headers,
method: 'POST',
mimeType: 'application/json',
sendData: json.encode(benchmarks.keys.toList()),
@ -358,7 +399,8 @@ class LocalBenchmarkServerClient {
Future<void> startPerformanceTracing(String benchmarkName) async {
_checkNotManualMode();
await _requestXhr(
'/start-performance-tracing?label=$benchmarkName',
serverOrigin.resolve('start-performance-tracing?label=$benchmarkName'),
requestHeaders: headers,
method: 'POST',
mimeType: 'application/json',
);
@ -367,7 +409,12 @@ class LocalBenchmarkServerClient {
/// Stops the performance tracing session started by [startPerformanceTracing].
Future<void> stopPerformanceTracing() async {
_checkNotManualMode();
await _requestXhr('/stop-performance-tracing', method: 'POST', mimeType: 'application/json');
await _requestXhr(
serverOrigin.resolve('stop-performance-tracing'),
requestHeaders: headers,
method: 'POST',
mimeType: 'application/json',
);
}
/// Sends the profile data collected by the benchmark to the local benchmark
@ -375,7 +422,8 @@ class LocalBenchmarkServerClient {
Future<void> sendProfileData(Profile profile) async {
_checkNotManualMode();
final web.XMLHttpRequest request = await _requestXhr(
'/profile-data',
serverOrigin.resolve('profile-data'),
requestHeaders: headers,
method: 'POST',
mimeType: 'application/json',
sendData: json.encode(profile.toJson()),
@ -394,7 +442,8 @@ class LocalBenchmarkServerClient {
Future<void> reportError(dynamic error, StackTrace stackTrace) async {
_checkNotManualMode();
await _requestXhr(
'/on-error',
serverOrigin.resolve('on-error'),
requestHeaders: headers,
method: 'POST',
mimeType: 'application/json',
sendData: json.encode(<String, dynamic>{'error': '$error', 'stackTrace': '$stackTrace'}),
@ -405,7 +454,8 @@ class LocalBenchmarkServerClient {
Future<void> printToConsole(String report) async {
_checkNotManualMode();
await _requestXhr(
'/print-to-console',
serverOrigin.resolve('print-to-console'),
requestHeaders: headers,
method: 'POST',
mimeType: 'text/plain',
sendData: report,
@ -415,7 +465,7 @@ class LocalBenchmarkServerClient {
/// This is the same as calling [html.HttpRequest.request] but it doesn't
/// crash on 404, which we use to detect `flutter run`.
Future<web.XMLHttpRequest> _requestXhr(
String url, {
Uri url, {
String? method,
bool? withCredentials,
String? responseType,
@ -427,7 +477,7 @@ class LocalBenchmarkServerClient {
final web.XMLHttpRequest xhr = web.XMLHttpRequest();
method ??= 'GET';
xhr.open(method, url, true);
xhr.open(method, '$url', true);
if (withCredentials != null) {
xhr.withCredentials = withCredentials;

View File

@ -0,0 +1,18 @@
// Copyright 2014 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'web_benchmarks.dart';
/// An entrypoint used by DDC for running macrobenchmarks.
///
/// DDC runs macrobenchmarks via 'flutter run', which hosts files from its own
/// local server. As a result, the macrobenchmarking orchestration server needs
/// to be hosted on a separate port. We split the entrypoint here because we
/// can't pass command line args to Dart apps on Flutter Web.
///
// TODO(markzipan): Use `main` in `'web_benchmarks.dart` when Flutter Web supports the `--dart-entrypoint-args` flag.
Future<void> main() async {
// This is hard-coded and must be the same as `benchmarkServerPort` in `flutter/dev/devicelab/lib/tasks/web_benchmarks.dart`.
await sharedMain(<String>['--port', '9999']);
}

View File

@ -18,6 +18,7 @@ dependencies:
# flutter update-packages --force-upgrade
flutter_gallery_assets: 1.0.2
args: 2.7.0
web: 1.1.1
async: 2.13.0 # THIS LINE IS AUTOGENERATED - TO UPDATE USE "flutter update-packages --force-upgrade"
@ -52,7 +53,6 @@ dev_dependencies:
_fe_analyzer_shared: 82.0.0 # THIS LINE IS AUTOGENERATED - TO UPDATE USE "flutter update-packages --force-upgrade"
analyzer: 7.4.4 # THIS LINE IS AUTOGENERATED - TO UPDATE USE "flutter update-packages --force-upgrade"
args: 2.7.0 # THIS LINE IS AUTOGENERATED - TO UPDATE USE "flutter update-packages --force-upgrade"
cli_config: 0.2.0 # THIS LINE IS AUTOGENERATED - TO UPDATE USE "flutter update-packages --force-upgrade"
convert: 3.1.2 # THIS LINE IS AUTOGENERATED - TO UPDATE USE "flutter update-packages --force-upgrade"
coverage: 1.12.0 # THIS LINE IS AUTOGENERATED - TO UPDATE USE "flutter update-packages --force-upgrade"

View File

@ -74,7 +74,7 @@ class Chrome {
});
}
/// Launches Chrome with the give [options].
/// Launches Chrome with the given [options].
///
/// The [onError] callback is called with an error message when the Chrome
/// process encounters an error. In particular, [onError] is called when the
@ -125,7 +125,28 @@ class Chrome {
WipConnection? debugConnection;
if (withDebugging) {
debugConnection = await _connectToChromeDebugPort(chromeProcess, options.debugPort!);
debugConnection = await _connectToChromeDebugPort(options.debugPort!);
}
return Chrome._(chromeProcess, onError, debugConnection);
}
/// Connects to an existing Chrome process with the given [options].
///
/// The [onError] callback is called with an error message when the Chrome
/// process encounters an error. In particular, [onError] is called when the
/// Chrome process exits prematurely, i.e. before [stop] is called.
static Future<Chrome> connect(
io.Process chromeProcess,
ChromeOptions options, {
String? workingDirectory,
required ChromeErrorCallback onError,
}) async {
final bool withDebugging = options.debugPort != null;
WipConnection? debugConnection;
if (withDebugging) {
debugConnection = await _connectToChromeDebugPort(options.debugPort!);
}
return Chrome._(chromeProcess, onError, debugConnection);
@ -260,7 +281,7 @@ String _findSystemChromeExecutable() {
}
/// Waits for Chrome to print DevTools URI and connects to it.
Future<WipConnection> _connectToChromeDebugPort(io.Process chromeProcess, int port) async {
Future<WipConnection> _connectToChromeDebugPort(int port) async {
final Uri devtoolsUri = await _getRemoteDebuggerUrl(Uri.parse('http://localhost:$port'));
print('Connecting to DevTools: $devtoolsUri');
final ChromeConnection chromeConnection = ChromeConnection('localhost', port);

View File

@ -3,7 +3,7 @@
// found in the LICENSE file.
import 'dart:async';
import 'dart:convert' show json;
import 'dart:convert' show LineSplitter, json, utf8;
import 'dart:io' as io;
import 'package:logging/logging.dart';
@ -16,10 +16,16 @@ import '../framework/browser.dart';
import '../framework/task_result.dart';
import '../framework/utils.dart';
/// The port number used by the local benchmark server.
/// The port at which the local benchmark server is served.
/// This is hard-coded and must be the same as the port used for DDC's benchmark at `flutter/dev/benchmarks/macrobenchmarks/lib/web_benchmarks_ddc.dart`.
const int benchmarkServerPort = 9999;
/// The port at which Chrome listens for a debug connection.
const int chromeDebugPort = 10000;
/// The port at which the benchmark's app is being served.
const int benchmarksAppPort = 10001;
typedef WebBenchmarkOptions =
({bool useWasm, bool forceSingleThreadedSkwasm, bool useDdc, bool withHotReload});
@ -34,21 +40,69 @@ Future<TaskResult> runWebBenchmark(WebBenchmarkOptions benchmarkOptions) async {
);
return inDirectory(macrobenchmarksDirectory, () async {
await flutter('clean');
await evalFlutter(
'build',
options: <String>[
'web',
'--no-tree-shake-icons', // local engine builds are frequently out of sync with the Dart Kernel version
if (benchmarkOptions.useWasm) ...<String>['--wasm', '--no-strip-wasm'],
'--dart-define=FLUTTER_WEB_ENABLE_PROFILING=true',
if (benchmarkOptions.useDdc) '--debug' else '--profile',
if (benchmarkOptions.useDdc && benchmarkOptions.withHotReload)
'--extra-front-end-options=--dartdevc-canary,--dartdevc-module-format=ddc',
'--no-web-resources-cdn',
'-t',
'lib/web_benchmarks.dart',
],
);
// DDC runs the benchmarks suite with 'flutter run', attaching to its
// Chrome instance instead of starting a new one.
io.Process? flutterRunProcess;
if (benchmarkOptions.useDdc) {
final Completer<void> ddcAppReady = Completer<void>();
flutterRunProcess = await startFlutter(
'run',
options: <String>[
'-d',
'chrome',
'--web-port',
'$benchmarksAppPort',
'--web-browser-debug-port',
'$chromeDebugPort',
'--web-launch-url',
'http://localhost:$benchmarksAppPort/index.html',
'--debug',
'--no-web-enable-expression-evaluation',
'--web-browser-flag=--disable-popup-blocking',
'--web-browser-flag=--bwsi',
'--web-browser-flag=--no-first-run',
'--web-browser-flag=--no-default-browser-check',
'--web-browser-flag=--disable-default-apps',
'--web-browser-flag=--disable-translate',
'--web-browser-flag=--disable-background-timer-throttling',
'--web-browser-flag=--disable-backgrounding-occluded-windows',
'--dart-define=FLUTTER_WEB_ENABLE_PROFILING=true',
if (benchmarkOptions.withHotReload) '--web-experimental-hot-reload',
'--no-web-resources-cdn',
'lib/web_benchmarks_ddc.dart',
],
);
flutterRunProcess.stdout.transform(utf8.decoder).transform(const LineSplitter()).listen((
String line,
) {
if (line.startsWith('This app is linked to the debug service')) {
ddcAppReady.complete();
}
print('[CHROME STDOUT]: $line');
});
flutterRunProcess.stderr.transform(utf8.decoder).transform(const LineSplitter()).listen((
String line,
) {
print('[CHROME STDERR]: $line');
});
// Wait for the app to load in DDC's Chrome instance before trying to
// connect the debugger.
await ddcAppReady.future;
} else {
await evalFlutter(
'build',
options: <String>[
'web',
'--no-tree-shake-icons', // local engine builds are frequently out of sync with the Dart Kernel version
if (benchmarkOptions.useWasm) ...<String>['--wasm', '--no-strip-wasm'],
'--dart-define=FLUTTER_WEB_ENABLE_PROFILING=true',
'--profile',
'--no-web-resources-cdn',
'-t',
'lib/web_benchmarks.dart',
],
);
}
final Completer<List<Map<String, dynamic>>> profileData =
Completer<List<Map<String, dynamic>>>();
final List<Map<String, dynamic>> collectedProfiles = <Map<String, dynamic>>[];
@ -63,83 +117,102 @@ Future<TaskResult> runWebBenchmark(WebBenchmarkOptions benchmarkOptions) async {
late io.HttpServer server;
Cascade cascade = Cascade();
List<Map<String, dynamic>>? latestPerformanceTrace;
cascade = cascade
.add((Request request) async {
try {
chrome ??= await whenChromeIsReady;
if (request.requestedUri.path.endsWith('/profile-data')) {
final Map<String, dynamic> profile =
json.decode(await request.readAsString()) as Map<String, dynamic>;
final String benchmarkName = profile['name'] as String;
if (benchmarkName != benchmarkIterator.current) {
profileData.completeError(
Exception(
'Browser returned benchmark results from a wrong benchmark.\n'
'Requested to run benchmark ${benchmarkIterator.current}, but '
'got results for $benchmarkName.',
),
);
unawaited(server.close());
}
final Map<String, List<String>> requestHeaders = <String, List<String>>{
'Access-Control-Allow-Headers': <String>[
'Accept',
'Access-Control-Allow-Headers',
'Access-Control-Allow-Methods',
'Access-Control-Allow-Origin',
'Content-Type',
'Origin',
],
'Access-Control-Allow-Methods': <String>['Post'],
'Access-Control-Allow-Origin': <String>['http://localhost:$benchmarksAppPort'],
};
// Trace data is null when the benchmark is not frame-based, such as RawRecorder.
if (latestPerformanceTrace != null) {
final BlinkTraceSummary traceSummary =
BlinkTraceSummary.fromJson(latestPerformanceTrace!)!;
profile['totalUiFrame.average'] =
traceSummary.averageTotalUIFrameTime.inMicroseconds;
profile['scoreKeys'] ??= <dynamic>[]; // using dynamic for consistency with JSON
(profile['scoreKeys'] as List<dynamic>).add('totalUiFrame.average');
latestPerformanceTrace = null;
}
collectedProfiles.add(profile);
return Response.ok('Profile received');
} else if (request.requestedUri.path.endsWith('/start-performance-tracing')) {
latestPerformanceTrace = null;
await chrome!.beginRecordingPerformance(
request.requestedUri.queryParameters['label']!,
);
return Response.ok('Started performance tracing');
} else if (request.requestedUri.path.endsWith('/stop-performance-tracing')) {
latestPerformanceTrace = await chrome!.endRecordingPerformance();
return Response.ok('Stopped performance tracing');
} else if (request.requestedUri.path.endsWith('/on-error')) {
final Map<String, dynamic> errorDetails =
json.decode(await request.readAsString()) as Map<String, dynamic>;
unawaited(server.close());
// Keep the stack trace as a string. It's thrown in the browser, not this Dart VM.
profileData.completeError('${errorDetails['error']}\n${errorDetails['stackTrace']}');
return Response.ok('');
} else if (request.requestedUri.path.endsWith('/next-benchmark')) {
if (benchmarks == null) {
benchmarks =
(json.decode(await request.readAsString()) as List<dynamic>).cast<String>();
benchmarkIterator = benchmarks!.iterator;
}
if (benchmarkIterator.moveNext()) {
final String nextBenchmark = benchmarkIterator.current;
print('Launching benchmark "$nextBenchmark"');
return Response.ok(nextBenchmark);
} else {
profileData.complete(collectedProfiles);
return Response.notFound('Finished running benchmarks.');
}
} else if (request.requestedUri.path.endsWith('/print-to-console')) {
// A passthrough used by
// `dev/benchmarks/macrobenchmarks/lib/web_benchmarks.dart`
// to print information.
final String message = await request.readAsString();
print('[APP] $message');
return Response.ok('Reported.');
} else {
return Response.notFound('This request is not handled by the profile-data handler.');
}
} catch (error, stackTrace) {
profileData.completeError(error, stackTrace);
return Response.internalServerError(body: '$error');
cascade = cascade.add((Request request) async {
final String requestContents = await request.readAsString();
try {
chrome ??= await whenChromeIsReady;
if (request.method == 'OPTIONS') {
return Response.ok('', headers: requestHeaders);
}
if (request.requestedUri.path.endsWith('/profile-data')) {
final Map<String, dynamic> profile = json.decode(requestContents) as Map<String, dynamic>;
final String benchmarkName = profile['name'] as String;
if (benchmarkName != benchmarkIterator.current) {
profileData.completeError(
Exception(
'Browser returned benchmark results from a wrong benchmark.\n'
'Requested to run benchmark ${benchmarkIterator.current}, but '
'got results for $benchmarkName.',
),
);
unawaited(server.close());
}
})
.add(createBuildDirectoryHandler(path.join(macrobenchmarksDirectory, 'build', 'web')));
// Trace data is null when the benchmark is not frame-based, such as RawRecorder.
if (latestPerformanceTrace != null) {
final BlinkTraceSummary traceSummary =
BlinkTraceSummary.fromJson(latestPerformanceTrace!)!;
profile['totalUiFrame.average'] = traceSummary.averageTotalUIFrameTime.inMicroseconds;
profile['scoreKeys'] ??= <dynamic>[]; // using dynamic for consistency with JSON
(profile['scoreKeys'] as List<dynamic>).add('totalUiFrame.average');
latestPerformanceTrace = null;
}
collectedProfiles.add(profile);
return Response.ok('Profile received', headers: requestHeaders);
} else if (request.requestedUri.path.endsWith('/start-performance-tracing')) {
latestPerformanceTrace = null;
await chrome!.beginRecordingPerformance(request.requestedUri.queryParameters['label']!);
return Response.ok('Started performance tracing', headers: requestHeaders);
} else if (request.requestedUri.path.endsWith('/stop-performance-tracing')) {
latestPerformanceTrace = await chrome!.endRecordingPerformance();
return Response.ok('Stopped performance tracing', headers: requestHeaders);
} else if (request.requestedUri.path.endsWith('/on-error')) {
final Map<String, dynamic> errorDetails =
json.decode(requestContents) as Map<String, dynamic>;
unawaited(server.close());
// Keep the stack trace as a string. It's thrown in the browser, not this Dart VM.
profileData.completeError('${errorDetails['error']}\n${errorDetails['stackTrace']}');
return Response.ok('', headers: requestHeaders);
} else if (request.requestedUri.path.endsWith('/next-benchmark')) {
if (benchmarks == null) {
benchmarks = (json.decode(requestContents) as List<dynamic>).cast<String>();
benchmarkIterator = benchmarks!.iterator;
}
if (benchmarkIterator.moveNext()) {
final String nextBenchmark = benchmarkIterator.current;
print('Launching benchmark "$nextBenchmark"');
return Response.ok(nextBenchmark, headers: requestHeaders);
} else {
profileData.complete(collectedProfiles);
return Response.notFound('Finished running benchmarks.', headers: requestHeaders);
}
} else if (request.requestedUri.path.endsWith('/print-to-console')) {
// A passthrough used by
// `dev/benchmarks/macrobenchmarks/lib/web_benchmarks.dart`
// to print information.
final String message = requestContents;
print('[APP] $message');
return Response.ok('Reported.', headers: requestHeaders);
} else {
return Response.notFound(
'This request is not handled by the profile-data handler.',
headers: requestHeaders,
);
}
} catch (error, stackTrace) {
profileData.completeError(error, stackTrace);
return Response.internalServerError(body: '$error', headers: requestHeaders);
}
});
// Macrobenchmarks using 'flutter build' serve files from their local build directory alongside the orchestration logic.
if (!benchmarkOptions.useDdc) {
cascade = cascade.add(
createBuildDirectoryHandler(path.join(macrobenchmarksDirectory, 'build', 'web')),
);
}
server = await io.HttpServer.bind('localhost', benchmarkServerPort);
try {
@ -157,8 +230,10 @@ Future<TaskResult> runWebBenchmark(WebBenchmarkOptions benchmarkOptions) async {
// final bool isUncalibratedSmokeTest =
// io.Platform.environment['UNCALIBRATED_SMOKE_TEST'] == 'true';
final String urlParams = benchmarkOptions.forceSingleThreadedSkwasm ? '?force_st=true' : '';
// DDC apps are served from a different port from the orchestration server.
final int appServingPort = benchmarkOptions.useDdc ? benchmarksAppPort : benchmarkServerPort;
final ChromeOptions options = ChromeOptions(
url: 'http://localhost:$benchmarkServerPort/index.html$urlParams',
url: 'http://localhost:$appServingPort/index.html$urlParams',
userDataDirectory: userDataDir,
headless: isUncalibratedSmokeTest,
debugPort: chromeDebugPort,
@ -166,13 +241,26 @@ Future<TaskResult> runWebBenchmark(WebBenchmarkOptions benchmarkOptions) async {
);
print('Launching Chrome.');
whenChromeIsReady = Chrome.launch(
options,
onError: (String error) {
profileData.completeError(Exception(error));
},
workingDirectory: cwd,
);
if (benchmarkOptions.useDdc) {
// DDC reuses the existing Chrome connection spawned via 'flutter run'.
whenChromeIsReady = Chrome.connect(
flutterRunProcess!,
options,
onError: (String error) {
profileData.completeError(Exception(error));
},
workingDirectory: cwd,
);
} else {
whenChromeIsReady = Chrome.launch(
options,
onError: (String error) {
profileData.completeError(Exception(error));
},
workingDirectory: cwd,
);
}
print('Waiting for the benchmark to report benchmark profile.');
final Map<String, dynamic> taskResult = <String, dynamic>{};
@ -216,6 +304,22 @@ Future<TaskResult> runWebBenchmark(WebBenchmarkOptions benchmarkOptions) async {
} finally {
unawaited(server.close());
chrome?.stop();
if (flutterRunProcess != null) {
// Sending a SIGINT/SIGTERM to the process here isn't reliable because [process] is
// the shell (flutter is a shell script) and doesn't pass the signal on.
// Sending a `q` is an instruction to quit using the console runner.
flutterRunProcess.stdin.write('q');
await flutterRunProcess.stdin.flush();
// Give the process a couple of seconds to exit and run shutdown hooks
// before sending kill signal.
await flutterRunProcess.exitCode.timeout(
const Duration(seconds: 2),
onTimeout: () {
flutterRunProcess!.kill(io.ProcessSignal.sigint);
return 0;
},
);
}
}
});
}