I am a newbie in flutter and this is my first real use app. This is what i want to achieve my app is going to be used to detect number-plates automatically when the camera would be pointed on the numberplate, for the text detection part i am going to use flutter ml kit. How should i write the start image stream method if i want the output of 10 images in the File data type which I would then add to my ImagePathList.
I did do research on this, but i did not find anything related to what i wanted to achieve, there are a few blog posts using this but they have not explained it properly so i turned to stack overflow.
I want the functionality to be like this
This is what my camera_screen looks like
import 'dart:typed_data';
import 'package:flutter/material.dart';
import 'package:firebase_ml_vision/firebase_ml_vision.dart';
import 'package:flutter/services.dart';
import 'package:npgroups/npgroups.dart';
import 'package:society_app/screens/result_screen.dart';
import 'package:society_app/widgets/common_drawer.dart';
import 'package:camera/camera.dart';
import 'dart:async';
import 'package:numeric_keyboard/numeric_keyboard.dart';
class CameraScreen extends StatefulWidget {
final CameraDescription camera;
static String id = 'camera_screen';
CameraScreen({required this.camera});
@override
_CameraScreenState createState() => _CameraScreenState();
}
class _CameraScreenState extends State<CameraScreen> {
Widget buildButton(String buttonText){
return Container(
height: MediaQuery.of(context).size.height * 0.1 * 0.85,
color: Colors.blueAccent,
child: FlatButton(
onPressed: () {},
child: Text(
buttonText,
style: TextStyle(
fontSize: 30.0,
fontWeight: FontWeight.normal,
color: Colors.white
),
)
),
);
}
late CameraController _controller;
late Future<void> _initializeControllerFuture;
late Npgroups _npgroups;
List imagePathList = [];
List<String?> detectedWordList = [];
static const MethodChannel _channel = const MethodChannel('tflite');
String? resultText;
late int imageHeight;
late int imageWidth;
bool? get isPaused => null;
@override
void initState() {
// TODO: implement initState
super.initState();
_controller = CameraController(
// Get a specific camera from the list of available cameras.
widget.camera,
// Define the resolution to use.
ResolutionPreset.medium,
);
_initializeControllerFuture = _controller.initialize();
initPlatformState();
}
Future<void> initPlatformState() async {
_npgroups = Npgroups(listenToNumplate);
await _npgroups.startListening();
}
@override
void dispose() {
// Dispose of the controller when the widget is disposed.
_controller.dispose();
super.dispose();
}
Future getNumberPlate(image) async {
FirebaseVisionImage mlImage = FirebaseVisionImage.fromFile(image);
TextRecognizer recognizeText = FirebaseVision.instance.textRecognizer();
VisionText readText = await recognizeText.processImage(mlImage);
for (TextBlock block in readText.blocks) {
for (TextLine line in block.lines) {
for (TextElement word in line.elements) {
resultText = word.text;
_npgroups.processNumberplate(resultText!);
}
}
}
if (resultText == null) {
print('null');
} else {
detectedWordList.add(resultText);
}
}
listenToNumplate(String numplate) {
//Consume the numplate
}
@override
Widget build(BuildContext context) {
return Scaffold(
drawer: CommonDrawer(),
appBar: AppBar(
title: Text(
'Camera'
),
),
body: FutureBuilder<void>(
future: _initializeControllerFuture,
builder: (context, snapshot) {
if (snapshot.connectionState == ConnectionState.done) {
return Column(
crossAxisAlignment: CrossAxisAlignment.start,
children: [
Stack(
children: [
AspectRatio(aspectRatio: _controller.value.aspectRatio - 0.2,
child: CameraPreview(_controller)),
Positioned(
bottom: 10,
left: 5,
child: Row(
children: [
VehicleButton(icon: Icons.motorcycle_outlined,),
SizedBox(width: 35),
VehicleButton(icon: Icons.directions_car,)
],
),
)
],
),
Row(
children: [
Container(
width: MediaQuery.of(context).size.width * .99,
child: Table(
children: [
TableRow(
children: [
buildButton('1'),
buildButton('2'),
buildButton('3')
]
),
TableRow(
children: [
buildButton('4'),
buildButton('5'),
buildButton('6')
]
),
TableRow(
children: [
buildButton('7'),
buildButton('8'),
buildButton('9')
]
),
TableRow(
children: [
buildButton('↻'),
buildButton('0'),
buildButton('⌫')
]
),
],
),
)
],
)
],
);
} else {
// Otherwise, display a loading indicator.
return const Center(child: CircularProgressIndicator());
}
},
),
);
}
}
class VehicleButton extends StatelessWidget {
final IconData icon;
VehicleButton({required this.icon});
@override
Widget build(BuildContext context) {
return ElevatedButton(
onPressed: () {},
child: Padding(
padding: const EdgeInsets.symmetric(vertical: 17, horizontal: 50),
child: Icon(
icon,
color: Colors.white,
size: 45,
),
),
style: ButtonStyle(
shape: MaterialStateProperty.all(RoundedRectangleBorder(borderRadius: BorderRadius.circular(10.0)))
),
);
}
}
Blockquote
You can do something like that
@override
void initState() {
super.initState();
controller = CameraController(widget.cameras[0], ResolutionPreset.medium,
enableAudio: false);
controller.initialize().then((_) async {
if (!mounted) {
return;
}
setState(() {});
await controller.startImageStream((CameraImage availableImage) async {
_scanText(availableImage);
});
}