Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- // ignore_for_file: avoid_web_libraries_in_flutter
- import 'dart:async';
- import 'dart:html';
- import 'dart:js_util';
- import 'dart:typed_data';
- import 'package:dio/dio.dart';
- import 'package:flutter/material.dart';
- import 'package:flutter/services.dart';
- import 'package:js/js.dart';
- @JS('getWaveBlob')
- external Blob toWav(Blob blob);
- final dio = Dio();
- class _Recorder {
- MediaRecorder? _mediaRecorder;
- List<Blob>? _audioBlobParts;
- String? _recordingUrl;
- Future<void> init() async {
- assert(_mediaRecorder == null);
- final stream = await window.navigator.mediaDevices?.getUserMedia({'audio': true});
- if (stream == null) return;
- _mediaRecorder = MediaRecorder(stream);
- _audioBlobParts = [];
- _mediaRecorder!.addEventListener('dataavailable', _onDataAvailable);
- }
- void _onDataAvailable(Event event) {
- final blobEvent = event as BlobEvent;
- _audioBlobParts!.add(blobEvent.data!);
- }
- Future<void> start() async {
- assert(_mediaRecorder != null);
- _mediaRecorder!.start();
- }
- /// Stops the recorder and returns an URL pointing to the recording.
- Future<String> stop() async {
- assert(_mediaRecorder != null);
- final completer = Completer<String>();
- Future<void> onStop(_) async {
- assert(_audioBlobParts != null);
- final blob = Blob(_audioBlobParts!);
- _audioBlobParts = null;
- final newBlob = await promiseToFuture<Blob>(toWav(blob));
- completer.complete(Url.createObjectUrl(newBlob));
- }
- _mediaRecorder!.addEventListener('stop', onStop);
- _mediaRecorder!.stop();
- _recordingUrl = await completer.future;
- _mediaRecorder!.removeEventListener('stop', onStop);
- return _recordingUrl!;
- }
- Future<Uint8List> toBytes() async {
- assert(_recordingUrl != null);
- final result = await Dio().get(_recordingUrl!, options: Options(responseType: ResponseType.bytes));
- return result.data;
- }
- void dispose() {
- assert(_mediaRecorder != null);
- _mediaRecorder!.removeEventListener('dataavailable', _onDataAvailable);
- _mediaRecorder = null;
- }
- }
- void main() {
- runApp(const MicrophoneExampleApp());
- }
- class MicrophoneExampleApp extends StatefulWidget {
- const MicrophoneExampleApp({super.key});
- @override
- State<MicrophoneExampleApp> createState() => _MicrophoneExampleAppState();
- }
- class _MicrophoneExampleAppState extends State<MicrophoneExampleApp> {
- _Recorder? _recorder;
- @override
- void dispose() {
- _recorder?.dispose();
- super.dispose();
- }
- Future<void> _initRecorder() async {
- _recorder?.dispose();
- await window.navigator.permissions?.query({'name': 'microphone'});
- _recorder = _Recorder();
- await _recorder!.init();
- }
- @override
- Widget build(BuildContext context) {
- return MaterialApp(
- home: Scaffold(
- body: Row(
- children: [
- OutlinedButton(
- onPressed: _initRecorder,
- child: const Text('Restart recorder'),
- ),
- OutlinedButton(
- onPressed: () {
- _recorder?.start();
- },
- child: const Text('Start recording'),
- ),
- OutlinedButton(
- onPressed: () => _recorder?.stop(),
- child: const Text('Stop recording'),
- ),
- OutlinedButton(
- onPressed: send,
- child: const Text('Stop recording'),
- ),
- ],
- ),
- ),
- );
- }
- Future<void> send() async {
- final bytes = await _recorder!.toBytes();
- final resp3 = await dio.post(
- 'https://api.openai.com/v1/audio/transcriptions',
- data: FormData.fromMap({
- 'model': 'whisper-1',
- 'file': MultipartFile.fromBytes(bytes.toList(), filename: 'file.wav'),
- }),
- options: Options(
- headers: {
- 'Content-Type': 'multipart/form-data',
- 'Authorization': 'Bearer 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',
- },
- ),
- );
- print(resp3);
- }
- }
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement