flutter webrtc
flutter webrtc plugin for ios/android
usage
add flutter_webrtc
as a dependency in your pubspec.yaml file.
ios
add the following entry to your info.plist file, located in <project root>/ios/runner/info.plist
:
<key>nscamerausagedescription</key>
<string>$(product_name) camera usage!</string>
<key>nsmicrophoneusagedescription</key>
<string>$(product_name) microphone usage!</string>
<key>nsphotolibraryusagedescription</key>
this entry allows your app to access camera and microphone.
android
ensure the following permission is present in your android manifest file, located in `/android/app/src/main/androidmanifest.xml:
<uses-feature android:name="android.hardware.camera" />
<uses-feature android:name="android.hardware.camera.autofocus" />
<uses-permission android:name="android.permission.camera" />
<uses-permission android:name="android.permission.record_audio" />
<uses-permission android:name="android.permission.access_network_state" />
<uses-permission android:name="android.permission.change_network_state" />
<uses-permission android:name="android.permission.modify_audio_settings" />
the flutter project template adds it, so it may already be there.
functionality
we intend to implement support the following features:
- [ ] data channel
- [ ] port to flutter-desktop-embedding
- [ ] screen capture
- [ ] ortc api
- [ ] port to fuchsia
example
import 'package:flutter/material.dart';
import 'package:flutter_webrtc/webrtc.dart';
import 'dart:core';
/**
* getusermedia sample
*/
class getusermediasample extends statefulwidget {
static string tag = 'get_usermedia_sample';
@override
_getusermediasamplestate createstate() => new _getusermediasamplestate();
}
class _getusermediasamplestate extends state<getusermediasample> {
mediastream _localstream;
final _localrenderer = new rtcvideorenderer();
bool _incalling = false;
@override
initstate() {
super.initstate();
initrenderers();
}
@override
deactivate() {
super.deactivate();
if (_incalling) {
_hangup();
}
}
initrenderers() async {
await _localrenderer.initialize();
}
// platform messages are asynchronous, so we initialize in an async method.
_makecall() async {
final map<string, dynamic> mediaconstraints = {
"audio": true,
"video": {
"mandatory": {
"minwidth":'640', // provide your own width, height and frame rate here
"minheight": '480',
"minframerate": '30',
},
"facingmode": "user",
"optional": [],
}
};
try {
navigator.getusermedia(mediaconstraints).then((stream){
_localstream = stream;
_localrenderer.srcobject = _localstream;
});
} catch (e) {
print(e.tostring());
}
if (!mounted) return;
setstate(() {
_incalling = true;
});
}
_hangup() async {
try {
await _localstream.dispose();
_localrenderer.srcobject = null;
} catch (e) {
print(e.tostring());
}
setstate(() {
_incalling = false;
});
}
@override
widget build(buildcontext context) {
return new scaffold(
appbar: new appbar(
title: new text('getusermedia api test'),
),
body: new orientationbuilder(
builder: (context, orientation) {
return new center(
child: new container(
margin: new edgeinsets.fromltrb(0.0, 0.0, 0.0, 0.0),
width: mediaquery.of(context).size.width,
height: mediaquery.of(context).size.height,
child: rtcvideoview(_localrenderer),
decoration: new boxdecoration(color: colors.black54),
),
);
},
),
floatingactionbutton: new floatingactionbutton(
onpressed: _incalling ? _hangup : _makecall,
tooltip: _incalling ? 'hangup' : 'call',
child: new icon(_incalling ? icons.call_end : icons.phone),
),
);
}
}
for more examples, please refer to flutter-webrtc-demo.
Comments are closed.