From b45016becbdd04975f62712218cdb5d81c84f99b Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Tue, 23 Apr 2024 16:40:28 +0000
Subject: [PATCH 01/11] Bump github/codeql-action from 3.25.1 to 3.25.2 (#6597)
Bumps [github/codeql-action](https://github.com/github/codeql-action) from 3.25.1 to 3.25.2.
Commits
8f596b4 Merge pull request #2254 from github/update-v3.25.2-4909c1ffb
de8916e Update changelog for v3.25.2
4909c1f Bump the npm group with 3 updates (#2253)
f45390c Merge pull request #2252 from github/henrymercer/failed-external-repo-config-...
1be8c48 Add configuration error for failing to clone external Git repo
82edfe2 Merge pull request #2246 from github/koesie10/remove-incorrect-log
8786e1f Merge pull request #2249 from github/mergeback/v3.25.1-to-main-c7f91257
3c7ac61 Update checked-in dependencies
b5bd9be Update changelog and version after v3.25.1
5d73b1b Remove incorrect log message
- See full diff in compare view
[](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores)
Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`.
---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR:
- `@dependabot rebase` will rebase this PR
- `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it
- `@dependabot merge` will merge this PR after your CI passes on it
- `@dependabot squash and merge` will squash and merge this PR after your CI passes on it
- `@dependabot cancel merge` will cancel a previously requested merge and block automerging
- `@dependabot reopen` will reopen this PR if it is closed
- `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually
- `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency
- `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself)
- `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself)
- `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
---
.github/workflows/scorecards-analysis.yml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/.github/workflows/scorecards-analysis.yml b/.github/workflows/scorecards-analysis.yml
index aa64053f823..2db7bf14273 100644
--- a/.github/workflows/scorecards-analysis.yml
+++ b/.github/workflows/scorecards-analysis.yml
@@ -49,6 +49,6 @@ jobs:
# Upload the results to GitHub's code scanning dashboard.
- name: "Upload to code-scanning"
- uses: github/codeql-action/upload-sarif@c7f9125735019aa87cfc361530512d50ea439c71 # v1.0.26
+ uses: github/codeql-action/upload-sarif@8f596b4ae3cb3c588a5c46780b86dd53fef16c52 # v1.0.26
with:
sarif_file: results.sarif
From 26b2bf4c1da61441e3fd3cb3a236428a98da4ba6 Mon Sep 17 00:00:00 2001
From: engine-flutter-autoroll
Date: Tue, 23 Apr 2024 12:46:06 -0400
Subject: [PATCH 02/11] Roll Flutter from 140edb988312 to 77043bae1a5e (21
revisions) (#6599)
https://github.com/flutter/flutter/compare/140edb988312...77043bae1a5e
2024-04-23 engine-flutter-autoroll@skia.org Roll Flutter Engine from ffd3911b1ff7 to 79f49954cce8 (2 revisions) (flutter/flutter#147235)
2024-04-23 engine-flutter-autoroll@skia.org Roll Flutter Engine from c7d9deb66bf7 to ffd3911b1ff7 (1 revision) (flutter/flutter#147227)
2024-04-23 137456488+flutter-pub-roller-bot@users.noreply.github.com Roll pub packages (flutter/flutter#147220)
2024-04-23 engine-flutter-autoroll@skia.org Roll Flutter Engine from 075d834489d0 to c7d9deb66bf7 (2 revisions) (flutter/flutter#147215)
2024-04-23 leroux_bruno@yahoo.fr Mention visualDensity impact on ButtonStyle.padding documentation (flutter/flutter#147048)
2024-04-23 engine-flutter-autoroll@skia.org Roll Flutter Engine from 9c0d24ff1cb6 to 075d834489d0 (1 revision) (flutter/flutter#147214)
2024-04-23 32538273+ValentinVignal@users.noreply.github.com Fix memory leaks in `CupertinoTextMagnifier` (flutter/flutter#147208)
2024-04-23 engine-flutter-autoroll@skia.org Roll Flutter Engine from 004e98839ed7 to 9c0d24ff1cb6 (1 revision) (flutter/flutter#147211)
2024-04-23 engine-flutter-autoroll@skia.org Roll Flutter Engine from 79f753650c6e to 004e98839ed7 (1 revision) (flutter/flutter#147209)
2024-04-23 engine-flutter-autoroll@skia.org Roll Flutter Engine from f8e373da5227 to 79f753650c6e (1 revision) (flutter/flutter#147206)
2024-04-23 102401667+Dimilkalathiya@users.noreply.github.com fixes cupertino page transition leak (flutter/flutter#147133)
2024-04-23 32538273+ValentinVignal@users.noreply.github.com Fix memory leaks in `PopupMenu` (flutter/flutter#147174)
2024-04-23 engine-flutter-autoroll@skia.org Roll Flutter Engine from 62c9f17169cf to f8e373da5227 (2 revisions) (flutter/flutter#147205)
2024-04-23 engine-flutter-autoroll@skia.org Roll Flutter Engine from 33c828fb3ff5 to 62c9f17169cf (4 revisions) (flutter/flutter#147203)
2024-04-23 49699333+dependabot[bot]@users.noreply.github.com Bump actions/upload-artifact from 4.3.2 to 4.3.3 (flutter/flutter#147192)
2024-04-23 49699333+dependabot[bot]@users.noreply.github.com Bump github/codeql-action from 3.25.1 to 3.25.2 (flutter/flutter#147193)
2024-04-23 engine-flutter-autoroll@skia.org Roll Flutter Engine from a4b71f02a1c7 to 33c828fb3ff5 (9 revisions) (flutter/flutter#147190)
2024-04-22 137456488+flutter-pub-roller-bot@users.noreply.github.com Roll pub packages (flutter/flutter#147094)
2024-04-22 polinach@google.com Re-land fix for not disposed TabController (flutter/flutter#146745)
2024-04-22 engine-flutter-autoroll@skia.org Roll Flutter Engine from 75ca2195c936 to a4b71f02a1c7 (1 revision) (flutter/flutter#147175)
2024-04-22 lamnhandev@gmail.com Update `examples/api` for android platform (flutter/flutter#147102)
If this roll has caused a breakage, revert this CL and stop the roller
using the controls here:
https://autoroll.skia.org/r/flutter-packages
Please CC camillesimon@google.com,rmistry@google.com,stuartmorgan@google.com on the revert to ensure that a human
is aware of the problem.
To file a bug in Packages: https://github.com/flutter/flutter/issues/new/choose
To report a problem with the AutoRoller itself, please file a bug:
https://issues.skia.org/issues/new?component=1389291&template=1850622
Documentation for the AutoRoller is here:
https://skia.googlesource.com/buildbot/+doc/main/autoroll/README.md
---
.ci/flutter_master.version | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/.ci/flutter_master.version b/.ci/flutter_master.version
index 19ccf0487d6..f2bd98154ee 100644
--- a/.ci/flutter_master.version
+++ b/.ci/flutter_master.version
@@ -1 +1 @@
-140edb9883122e335ecb99416934c9436d6a6d10
+77043bae1a5ee4bca22f47631ca7a5214812760e
From 080fd7459643cd08a4c05c1afe8f0cecd8e67d45 Mon Sep 17 00:00:00 2001
From: Camille Simon <43054281+camsim99@users.noreply.github.com>
Date: Tue, 23 Apr 2024 11:09:50 -0700
Subject: [PATCH 03/11] Bump legacy all_packages project AGP version to 7.0.0,
Gradle version to 7.0.2 (#6591)
Upgrades legacy all_packages project AGP version to 7.0.0 and Gradle version to 7.0.2 as Flutter will begin enforcing supported versions for these Android dependencies.
This was prompted by dependabot AGP version upgrades like https://github.com/flutter/packages/pull/6522 that are failing due to a what a clear warning* describes as a potential issue:
```
Warning: Flutter support for your project's Gradle version (6.7.1) will soon be dropped. Please upgrade your Gradle version to a version of at least 7.0.2 soon.
Alternatively, use the flag "--android-skip-build-dependency-validation" to bypass this check.
Potential fix: Your project's gradle version is typically defined in the gradle wrapper file. By default, this can be found at /b/s/w/ir/x/w/packages/legacy/all_packages/android/gradle/wrapper/gradle-wrapper.properties.
For more information, see https://docs.gradle.org/current/userguide/gradle_wrapper.html.
Warning: Flutter support for your project's Android Gradle Plugin version (4.1.0) will soon be dropped. Please upgrade your Android Gradle Plugin version to a version of at least 7.0.0 soon.
Alternatively, use the flag "--android-skip-build-dependency-validation" to bypass this check.
Potential fix: Your project's AGP version is typically defined the plugins block of the `settings.gradle` file (/b/s/w/ir/x/w/packages/legacy/all_packages/android/settings.gradle), by a plugin with the id of com.android.application.
If you don't see a plugins block, your project was likely created with an older template version. In this case it is most likely defined in the top-level build.gradle file (/b/s/w/ir/x/w/packages/legacy/all_packages/android/build.gradle) by the following line in the dependencies block of the buildscript: "classpath 'com.android.tools.build:gradle:'".
```
Other dependabot upgrades that I believe are blocked by this change:
https://github.com/flutter/packages/pull/6585
https://github.com/flutter/packages/pull/6534
https://github.com/flutter/packages/pull/6530
https://github.com/flutter/packages/pull/6528
https://github.com/flutter/packages/pull/6526
*Added in https://github.com/flutter/flutter/pull/143341
---
.ci/legacy_project/README.md | 8 ++++++++
.ci/legacy_project/all_packages/android/build.gradle | 2 +-
.../android/gradle/wrapper/gradle-wrapper.properties | 2 +-
3 files changed, 10 insertions(+), 2 deletions(-)
diff --git a/.ci/legacy_project/README.md b/.ci/legacy_project/README.md
index 0e23f626fb4..1ee7313e73d 100644
--- a/.ci/legacy_project/README.md
+++ b/.ci/legacy_project/README.md
@@ -42,3 +42,11 @@ and then deleting everything but `android/` from it:
- Updates `gradle-wrapper.properties` from `6.7` to `6.7.1`, to add
support for the Kotlin gradle plugin. If a user runs into this
error, the error message is clear on how to upgrade.
+- Modifies `build.gradle` to upgrade the Android Gradle Plugin (AGP)
+ from version 4.1.0 to 7.0.0. If a user runs into an error with
+ the AGP version, the warning is clear on how to upgrade
+ the version to one that we support.
+- Modifies `gradle-wrapper.properties` to upgrade the Gradle version
+ from 6.7.1 to 7.0.2. If a user runs into an error with the Gradle
+ version, the warning is clear on how to upgrade the version to
+ one that we support.
\ No newline at end of file
diff --git a/.ci/legacy_project/all_packages/android/build.gradle b/.ci/legacy_project/all_packages/android/build.gradle
index 0b4cf534e0a..08cb0aa3de9 100644
--- a/.ci/legacy_project/all_packages/android/build.gradle
+++ b/.ci/legacy_project/all_packages/android/build.gradle
@@ -5,7 +5,7 @@ buildscript {
}
dependencies {
- classpath 'com.android.tools.build:gradle:4.1.0'
+ classpath 'com.android.tools.build:gradle:7.0.0'
}
}
diff --git a/.ci/legacy_project/all_packages/android/gradle/wrapper/gradle-wrapper.properties b/.ci/legacy_project/all_packages/android/gradle/wrapper/gradle-wrapper.properties
index 939efa2951b..b8793d3c0d6 100644
--- a/.ci/legacy_project/all_packages/android/gradle/wrapper/gradle-wrapper.properties
+++ b/.ci/legacy_project/all_packages/android/gradle/wrapper/gradle-wrapper.properties
@@ -3,4 +3,4 @@ distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
-distributionUrl=https\://services.gradle.org/distributions/gradle-6.7.1-all.zip
+distributionUrl=https\://services.gradle.org/distributions/gradle-7.0.2-all.zip
From 7cd033915ae64e9460c95cb725e4a71f4644ac1d Mon Sep 17 00:00:00 2001
From: Reid Baker
Date: Tue, 23 Apr 2024 17:19:19 -0400
Subject: [PATCH 04/11] [in_app_purchase_android] Readme update for Alternative
billing (#6578)
- **Add readme info about user choice billing and alternative billing**
Fixes flutter/flutter/issues/144992
---
.../in_app_purchase/in_app_purchase_android/CHANGELOG.md | 6 +++++-
.../in_app_purchase/in_app_purchase_android/README.md | 8 ++++++++
.../in_app_purchase/in_app_purchase_android/pubspec.yaml | 3 +--
3 files changed, 14 insertions(+), 3 deletions(-)
diff --git a/packages/in_app_purchase/in_app_purchase_android/CHANGELOG.md b/packages/in_app_purchase/in_app_purchase_android/CHANGELOG.md
index 20d700ac410..f5e5680598a 100644
--- a/packages/in_app_purchase/in_app_purchase_android/CHANGELOG.md
+++ b/packages/in_app_purchase/in_app_purchase_android/CHANGELOG.md
@@ -1,6 +1,10 @@
+## 0.3.4+1
+
+* Adds documentation for UserChoice and Alternative Billing.
+
## 0.3.4
-* Adds `countryCode` API.
+* Adds `countryCode` API.
## 0.3.3+1
diff --git a/packages/in_app_purchase/in_app_purchase_android/README.md b/packages/in_app_purchase/in_app_purchase_android/README.md
index d49315b41a0..f5cdbf9e0e9 100644
--- a/packages/in_app_purchase/in_app_purchase_android/README.md
+++ b/packages/in_app_purchase/in_app_purchase_android/README.md
@@ -11,6 +11,14 @@ so you do not need to add it to your `pubspec.yaml`.
However, if you `import` this package to use any of its APIs directly, you
should [add it to your `pubspec.yaml` as usual][3].
+## Alternative/UserChoice Billing
+
+Alternative and UserChoice billing from Google Play is exposed from this package.
+
+Using the Alternative billing only feature requires Google Play app configuration, checking if the feature is available (`isAlternativeBillingOnlyAvailable`) and informing users that Google Play does not handle all aspects of purchase (`showAlternativeBillingOnlyInformationDialog`). After those calls then you can call `setBillingChoice` and respond when a user attempts a purchase.
+
+[Google Play documentation for Alternative billing](https://developer.android.com/google/play/billing/alternative)
+
## Migrating to 0.3.0
To migrate to version 0.3.0 from 0.2.x, have a look at the [migration guide](migration_guide.md).
diff --git a/packages/in_app_purchase/in_app_purchase_android/pubspec.yaml b/packages/in_app_purchase/in_app_purchase_android/pubspec.yaml
index 6950533f1ca..0e711948aef 100644
--- a/packages/in_app_purchase/in_app_purchase_android/pubspec.yaml
+++ b/packages/in_app_purchase/in_app_purchase_android/pubspec.yaml
@@ -2,8 +2,7 @@ name: in_app_purchase_android
description: An implementation for the Android platform of the Flutter `in_app_purchase` plugin. This uses the Android BillingClient APIs.
repository: https://github.com/flutter/packages/tree/main/packages/in_app_purchase/in_app_purchase_android
issue_tracker: https://github.com/flutter/flutter/issues?q=is%3Aissue+is%3Aopen+label%3A%22p%3A+in_app_purchase%22
-
-version: 0.3.4
+version: 0.3.4+1
environment:
sdk: ^3.1.0
From 179f92579473515d87156207b178452b81e62fde Mon Sep 17 00:00:00 2001
From: "auto-submit[bot]" <98614782+auto-submit[bot]@users.noreply.github.com>
Date: Wed, 24 Apr 2024 16:01:46 +0000
Subject: [PATCH 05/11] Reverts "Bump legacy all_packages project AGP version
to 7.0.0, Gradle version to 7.0.2 (#6591)" (#6605)
Reverts: flutter/packages#6591
Initiated by: camsim99
Reason for reverting: Warning about unsupported Android dependency versions (added in https://github.com/flutter/flutter/pull/143341) not in stable.
Original PR Author: camsim99
Reviewed By: {reidbaker, gmackall}
This change reverts the following previous change:
Upgrades legacy all_packages project AGP version to 7.0.0 and Gradle version to 7.0.2 as Flutter will begin enforcing supported versions for these Android dependencies.
This was prompted by dependabot AGP version upgrades like https://github.com/flutter/packages/pull/6522 that are failing due to a what a clear warning* describes as a potential issue:
```
Warning: Flutter support for your project's Gradle version (6.7.1) will soon be dropped. Please upgrade your Gradle version to a version of at least 7.0.2 soon.
Alternatively, use the flag "--android-skip-build-dependency-validation" to bypass this check.
Potential fix: Your project's gradle version is typically defined in the gradle wrapper file. By default, this can be found at /b/s/w/ir/x/w/packages/legacy/all_packages/android/gradle/wrapper/gradle-wrapper.properties.
For more information, see https://docs.gradle.org/current/userguide/gradle_wrapper.html.
Warning: Flutter support for your project's Android Gradle Plugin version (4.1.0) will soon be dropped. Please upgrade your Android Gradle Plugin version to a version of at least 7.0.0 soon.
Alternatively, use the flag "--android-skip-build-dependency-validation" to bypass this check.
Potential fix: Your project's AGP version is typically defined the plugins block of the `settings.gradle` file (/b/s/w/ir/x/w/packages/legacy/all_packages/android/settings.gradle), by a plugin with the id of com.android.application.
If you don't see a plugins block, your project was likely created with an older template version. In this case it is most likely defined in the top-level build.gradle file (/b/s/w/ir/x/w/packages/legacy/all_packages/android/build.gradle) by the following line in the dependencies block of the buildscript: "classpath 'com.android.tools.build:gradle:'".
```
Other dependabot upgrades that I believe are blocked by this change:
https://github.com/flutter/packages/pull/6585
https://github.com/flutter/packages/pull/6534
https://github.com/flutter/packages/pull/6530
https://github.com/flutter/packages/pull/6528
https://github.com/flutter/packages/pull/6526
*Added in https://github.com/flutter/flutter/pull/143341
---
.ci/legacy_project/README.md | 8 --------
.ci/legacy_project/all_packages/android/build.gradle | 2 +-
.../android/gradle/wrapper/gradle-wrapper.properties | 2 +-
3 files changed, 2 insertions(+), 10 deletions(-)
diff --git a/.ci/legacy_project/README.md b/.ci/legacy_project/README.md
index 1ee7313e73d..0e23f626fb4 100644
--- a/.ci/legacy_project/README.md
+++ b/.ci/legacy_project/README.md
@@ -42,11 +42,3 @@ and then deleting everything but `android/` from it:
- Updates `gradle-wrapper.properties` from `6.7` to `6.7.1`, to add
support for the Kotlin gradle plugin. If a user runs into this
error, the error message is clear on how to upgrade.
-- Modifies `build.gradle` to upgrade the Android Gradle Plugin (AGP)
- from version 4.1.0 to 7.0.0. If a user runs into an error with
- the AGP version, the warning is clear on how to upgrade
- the version to one that we support.
-- Modifies `gradle-wrapper.properties` to upgrade the Gradle version
- from 6.7.1 to 7.0.2. If a user runs into an error with the Gradle
- version, the warning is clear on how to upgrade the version to
- one that we support.
\ No newline at end of file
diff --git a/.ci/legacy_project/all_packages/android/build.gradle b/.ci/legacy_project/all_packages/android/build.gradle
index 08cb0aa3de9..0b4cf534e0a 100644
--- a/.ci/legacy_project/all_packages/android/build.gradle
+++ b/.ci/legacy_project/all_packages/android/build.gradle
@@ -5,7 +5,7 @@ buildscript {
}
dependencies {
- classpath 'com.android.tools.build:gradle:7.0.0'
+ classpath 'com.android.tools.build:gradle:4.1.0'
}
}
diff --git a/.ci/legacy_project/all_packages/android/gradle/wrapper/gradle-wrapper.properties b/.ci/legacy_project/all_packages/android/gradle/wrapper/gradle-wrapper.properties
index b8793d3c0d6..939efa2951b 100644
--- a/.ci/legacy_project/all_packages/android/gradle/wrapper/gradle-wrapper.properties
+++ b/.ci/legacy_project/all_packages/android/gradle/wrapper/gradle-wrapper.properties
@@ -3,4 +3,4 @@ distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
-distributionUrl=https\://services.gradle.org/distributions/gradle-7.0.2-all.zip
+distributionUrl=https\://services.gradle.org/distributions/gradle-6.7.1-all.zip
From 890ec3609649a1d581895d0f2cd4c7bc2a1885bf Mon Sep 17 00:00:00 2001
From: chunhtai <47866232+chunhtai@users.noreply.github.com>
Date: Wed, 24 Apr 2024 09:04:20 -0700
Subject: [PATCH 06/11] =?UTF-8?q?[go=5Frouter]=20Fixes=20an=20issue=20wher?=
=?UTF-8?q?e=20route=20future=20does=20not=20complete=20when=20=E2=80=A6?=
=?UTF-8?q?=20(#6596)?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
â¦popping shell route.
fixes https://github.com/flutter/flutter/issues/147196
---
packages/go_router/CHANGELOG.md | 4 ++
packages/go_router/lib/src/delegate.dart | 8 +++-
packages/go_router/pubspec.yaml | 2 +-
packages/go_router/test/go_router_test.dart | 47 +++++++++++++++++++++
4 files changed, 58 insertions(+), 3 deletions(-)
diff --git a/packages/go_router/CHANGELOG.md b/packages/go_router/CHANGELOG.md
index 576a71789a7..4cb037ef2ad 100644
--- a/packages/go_router/CHANGELOG.md
+++ b/packages/go_router/CHANGELOG.md
@@ -1,3 +1,7 @@
+## 13.2.5
+
+- Fixes an issue where route future does not complete when popping shell route.
+
## 13.2.4
- Updates examples to use uri.path instead of uri.toString() for accessing the current location.
diff --git a/packages/go_router/lib/src/delegate.dart b/packages/go_router/lib/src/delegate.dart
index 3b01b9e381c..bf5840cbad4 100644
--- a/packages/go_router/lib/src/delegate.dart
+++ b/packages/go_router/lib/src/delegate.dart
@@ -147,8 +147,12 @@ class GoRouterDelegate extends RouterDelegate
}
void _completeRouteMatch(Object? result, RouteMatchBase match) {
- if (match is ImperativeRouteMatch) {
- match.complete(result);
+ RouteMatchBase walker = match;
+ while (walker is ShellRouteMatch) {
+ walker = walker.matches.last;
+ }
+ if (walker is ImperativeRouteMatch) {
+ walker.complete(result);
}
currentConfiguration = currentConfiguration.remove(match);
notifyListeners();
diff --git a/packages/go_router/pubspec.yaml b/packages/go_router/pubspec.yaml
index 29dfaa27a44..25ec466df0d 100644
--- a/packages/go_router/pubspec.yaml
+++ b/packages/go_router/pubspec.yaml
@@ -1,7 +1,7 @@
name: go_router
description: A declarative router for Flutter based on Navigation 2 supporting
deep linking, data-driven routes and more
-version: 13.2.4
+version: 13.2.5
repository: https://github.com/flutter/packages/tree/main/packages/go_router
issue_tracker: https://github.com/flutter/flutter/issues?q=is%3Aissue+is%3Aopen+label%3A%22p%3A+go_router%22
diff --git a/packages/go_router/test/go_router_test.dart b/packages/go_router/test/go_router_test.dart
index 57d8612c076..42bbec0cc4f 100644
--- a/packages/go_router/test/go_router_test.dart
+++ b/packages/go_router/test/go_router_test.dart
@@ -3462,6 +3462,53 @@ void main() {
expect(find.text('Screen B'), findsOneWidget);
});
+ testWidgets('can complete leaf route', (WidgetTester tester) async {
+ Future? routeFuture;
+ final List routes = [
+ GoRoute(
+ path: '/',
+ builder: (BuildContext context, GoRouterState state) {
+ return Scaffold(
+ body: TextButton(
+ onPressed: () async {
+ routeFuture = context.push('/a');
+ },
+ child: const Text('press'),
+ ),
+ );
+ },
+ ),
+ ShellRoute(
+ builder: (BuildContext context, GoRouterState state, Widget child) {
+ return Scaffold(
+ body: child,
+ );
+ },
+ routes: [
+ GoRoute(
+ path: '/a',
+ builder: (BuildContext context, GoRouterState state) {
+ return const Scaffold(
+ body: Text('Screen A'),
+ );
+ },
+ ),
+ ],
+ ),
+ ];
+
+ final GoRouter router = await createRouter(routes, tester);
+ expect(find.text('press'), findsOneWidget);
+
+ await tester.tap(find.text('press'));
+ await tester.pumpAndSettle();
+ expect(find.text('Screen A'), findsOneWidget);
+
+ router.pop(true);
+ final bool? result = await routeFuture;
+ expect(result, isTrue);
+ });
+
testWidgets(
'Pops from the correct Navigator when the Android back button is pressed',
(WidgetTester tester) async {
From 59916a9bceb673104da3767d76a5d0414c95a5f1 Mon Sep 17 00:00:00 2001
From: stuartmorgan
Date: Wed, 24 Apr 2024 14:30:05 -0400
Subject: [PATCH 07/11] [camera] Finish converting iOS to Pigeon (#6601)
Converts all remaining Dart->host communication in the iOS implementation to use Pigeon. Given the boilerplate nature of many of the changes, it seemed easiest to just do the remaining calls all at once now that the structure is in place.
Some high-level notes:
- Many methods used to send the `cameraId` without it ever being used on the native side, so the Pigeon versions do not send them.
- `ThreadSafeTextureRegistry` is removed because I discovered that it was masking a bug, so was more trouble than it was worth (see inline comments in PR).
- A number of enums have been removed in favor of using the Pigeon-generated enums to pass data from the plugin class to `FLTCam`.
- In many cases where the completion callback (previously `result`) was being passed to `FLTCam` in a call, only to have it always just call `result(nil)`, that's now done in the plugin class since it's easier to reason about completions being called when they aren't passed around. (Long term we should consider moving almost all of the rest out, and using `FlutterError*` out params that the plugin class passes to `completion`, but that is more surgery than I wanted to do in this PR.)
Completes the iOS portion of https://github.com/flutter/flutter/issues/117905
---
.../camera/camera_avfoundation/CHANGELOG.md | 9 +-
.../ios/Runner.xcodeproj/project.pbxproj | 4 -
...eraCaptureSessionQueueRaceConditionTests.m | 31 +-
.../ios/RunnerTests/CameraFocusTests.m | 8 +-
.../RunnerTests/CameraMethodChannelTests.m | 30 +-
.../ios/RunnerTests/CameraPreviewPauseTests.m | 6 +-
.../ios/RunnerTests/CameraPropertiesTests.m | 79 +-
.../RunnerTests/CameraSessionPresetsTests.m | 7 +-
.../ios/RunnerTests/CameraSettingsTests.m | 182 +---
.../example/ios/RunnerTests/CameraTestUtils.h | 9 +-
.../example/ios/RunnerTests/CameraTestUtils.m | 73 +-
.../ios/RunnerTests/FLTCamPhotoCaptureTests.m | 17 +-
.../ios/RunnerTests/FLTCamSampleBufferTests.m | 18 +-
.../ThreadSafeTextureRegistryTests.m | 109 ---
.../ios/Classes/CameraPlugin.m | 575 +++++++-----
.../ios/Classes/CameraPlugin.modulemap | 1 -
.../ios/Classes/CameraPlugin_Test.h | 17 +-
.../ios/Classes/CameraProperties.h | 76 +-
.../ios/Classes/CameraProperties.m | 132 +--
.../camera_avfoundation/ios/Classes/FLTCam.h | 71 +-
.../camera_avfoundation/ios/Classes/FLTCam.m | 332 +++----
.../ios/Classes/FLTCamMediaSettings.h | 54 --
.../ios/Classes/FLTCamMediaSettings.m | 36 -
.../ios/Classes/FLTCam_Test.h | 22 +-
.../Classes/FLTThreadSafeTextureRegistry.h | 36 -
.../Classes/FLTThreadSafeTextureRegistry.m | 46 -
.../ios/Classes/messages.g.h | 162 ++++
.../ios/Classes/messages.g.m | 851 +++++++++++++++++
.../lib/src/avfoundation_camera.dart | 396 ++++----
.../lib/src/messages.g.dart | 879 ++++++++++++++++++
.../camera_avfoundation/lib/src/utils.dart | 15 +-
.../camera_avfoundation/pigeons/messages.dart | 201 ++++
.../camera/camera_avfoundation/pubspec.yaml | 2 +-
.../test/avfoundation_camera_test.dart | 794 ++++------------
.../test/avfoundation_camera_test.mocks.dart | 348 ++++++-
.../test/method_channel_mock.dart | 40 -
.../camera_avfoundation/test/utils_test.dart | 8 +-
37 files changed, 3497 insertions(+), 2179 deletions(-)
delete mode 100644 packages/camera/camera_avfoundation/example/ios/RunnerTests/ThreadSafeTextureRegistryTests.m
delete mode 100644 packages/camera/camera_avfoundation/ios/Classes/FLTCamMediaSettings.h
delete mode 100644 packages/camera/camera_avfoundation/ios/Classes/FLTCamMediaSettings.m
delete mode 100644 packages/camera/camera_avfoundation/ios/Classes/FLTThreadSafeTextureRegistry.h
delete mode 100644 packages/camera/camera_avfoundation/ios/Classes/FLTThreadSafeTextureRegistry.m
delete mode 100644 packages/camera/camera_avfoundation/test/method_channel_mock.dart
diff --git a/packages/camera/camera_avfoundation/CHANGELOG.md b/packages/camera/camera_avfoundation/CHANGELOG.md
index 584c95f6adb..5012f7d965b 100644
--- a/packages/camera/camera_avfoundation/CHANGELOG.md
+++ b/packages/camera/camera_avfoundation/CHANGELOG.md
@@ -1,3 +1,8 @@
+## 0.9.16
+
+* Converts Dart-to-host communcation to Pigeon.
+* Fixes a race condition in camera disposal.
+
## 0.9.15+4
* Converts host-to-Dart communcation to Pigeon.
@@ -121,11 +126,11 @@
## 0.9.8+5
-* Fixes a regression introduced in 0.9.8+4 where the stream handler is not set.
+* Fixes a regression introduced in 0.9.8+4 where the stream handler is not set.
## 0.9.8+4
-* Fixes a crash due to sending orientation change events when the engine is torn down.
+* Fixes a crash due to sending orientation change events when the engine is torn down.
## 0.9.8+3
diff --git a/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.pbxproj b/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.pbxproj
index dc00e49c042..2ed764506ef 100644
--- a/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.pbxproj
+++ b/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.pbxproj
@@ -29,7 +29,6 @@
E071CF7227B3061B006EF3BA /* FLTCamPhotoCaptureTests.m in Sources */ = {isa = PBXBuildFile; fileRef = E071CF7127B3061B006EF3BA /* FLTCamPhotoCaptureTests.m */; };
E071CF7427B31DE4006EF3BA /* FLTCamSampleBufferTests.m in Sources */ = {isa = PBXBuildFile; fileRef = E071CF7327B31DE4006EF3BA /* FLTCamSampleBufferTests.m */; };
E0B0D2BB27DFF2AF00E71E4B /* CameraPermissionTests.m in Sources */ = {isa = PBXBuildFile; fileRef = E0B0D2BA27DFF2AF00E71E4B /* CameraPermissionTests.m */; };
- E0C6E2012770F01A00EA6AA3 /* ThreadSafeTextureRegistryTests.m in Sources */ = {isa = PBXBuildFile; fileRef = E0C6E1FE2770F01A00EA6AA3 /* ThreadSafeTextureRegistryTests.m */; };
E0C6E2022770F01A00EA6AA3 /* ThreadSafeEventChannelTests.m in Sources */ = {isa = PBXBuildFile; fileRef = E0C6E1FF2770F01A00EA6AA3 /* ThreadSafeEventChannelTests.m */; };
E0CDBAC227CD9729002561D9 /* CameraTestUtils.m in Sources */ = {isa = PBXBuildFile; fileRef = E0CDBAC127CD9729002561D9 /* CameraTestUtils.m */; };
E0F95E3D27A32AB900699390 /* CameraPropertiesTests.m in Sources */ = {isa = PBXBuildFile; fileRef = E0F95E3C27A32AB900699390 /* CameraPropertiesTests.m */; };
@@ -95,7 +94,6 @@
E071CF7127B3061B006EF3BA /* FLTCamPhotoCaptureTests.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = FLTCamPhotoCaptureTests.m; sourceTree = ""; };
E071CF7327B31DE4006EF3BA /* FLTCamSampleBufferTests.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = FLTCamSampleBufferTests.m; sourceTree = ""; };
E0B0D2BA27DFF2AF00E71E4B /* CameraPermissionTests.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = CameraPermissionTests.m; sourceTree = ""; };
- E0C6E1FE2770F01A00EA6AA3 /* ThreadSafeTextureRegistryTests.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = ThreadSafeTextureRegistryTests.m; sourceTree = ""; };
E0C6E1FF2770F01A00EA6AA3 /* ThreadSafeEventChannelTests.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = ThreadSafeEventChannelTests.m; sourceTree = ""; };
E0CDBAC027CD9729002561D9 /* CameraTestUtils.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = CameraTestUtils.h; sourceTree = ""; };
E0CDBAC127CD9729002561D9 /* CameraTestUtils.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = CameraTestUtils.m; sourceTree = ""; };
@@ -132,7 +130,6 @@
03BB766C2665316900CE5A93 /* Info.plist */,
033B94BD269C40A200B4DF97 /* CameraMethodChannelTests.m */,
E0C6E1FF2770F01A00EA6AA3 /* ThreadSafeEventChannelTests.m */,
- E0C6E1FE2770F01A00EA6AA3 /* ThreadSafeTextureRegistryTests.m */,
E04F108527A87CA600573D0C /* FLTSavePhotoDelegateTests.m */,
E071CF7127B3061B006EF3BA /* FLTCamPhotoCaptureTests.m */,
E071CF7327B31DE4006EF3BA /* FLTCamSampleBufferTests.m */,
@@ -449,7 +446,6 @@
E032F250279F5E94009E9028 /* CameraCaptureSessionQueueRaceConditionTests.m in Sources */,
788A065A27B0E02900533D74 /* StreamingTest.m in Sources */,
E0C6E2022770F01A00EA6AA3 /* ThreadSafeEventChannelTests.m in Sources */,
- E0C6E2012770F01A00EA6AA3 /* ThreadSafeTextureRegistryTests.m in Sources */,
E0B0D2BB27DFF2AF00E71E4B /* CameraPermissionTests.m in Sources */,
E01EE4A82799F3A5008C1950 /* QueueUtilsTests.m in Sources */,
);
diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraCaptureSessionQueueRaceConditionTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraCaptureSessionQueueRaceConditionTests.m
index bc3713b7478..226d6bfb1a5 100644
--- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraCaptureSessionQueueRaceConditionTests.m
+++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraCaptureSessionQueueRaceConditionTests.m
@@ -18,22 +18,25 @@ - (void)testFixForCaptureSessionQueueNullPointerCrashDueToRaceCondition {
[self expectationWithDescription:@"dispose's result block must be called"];
XCTestExpectation *createExpectation =
[self expectationWithDescription:@"create's result block must be called"];
- FlutterMethodCall *disposeCall = [FlutterMethodCall methodCallWithMethodName:@"dispose"
- arguments:nil];
- FlutterMethodCall *createCall = [FlutterMethodCall
- methodCallWithMethodName:@"create"
- arguments:@{@"resolutionPreset" : @"medium", @"enableAudio" : @(1)}];
// Mimic a dispose call followed by a create call, which can be triggered by slightly dragging the
// home bar, causing the app to be inactive, and immediately regain active.
- [camera handleMethodCall:disposeCall
- result:^(id _Nullable result) {
- [disposeExpectation fulfill];
- }];
- [camera createCameraOnSessionQueueWithCreateMethodCall:createCall
- result:^(id _Nullable result) {
- [createExpectation fulfill];
- }];
- [self waitForExpectationsWithTimeout:1 handler:nil];
+ [camera disposeCamera:0
+ completion:^(FlutterError *_Nullable error) {
+ [disposeExpectation fulfill];
+ }];
+ [camera createCameraOnSessionQueueWithName:@"acamera"
+ settings:[FCPPlatformMediaSettings
+ makeWithResolutionPreset:
+ FCPPlatformResolutionPresetMedium
+ framesPerSecond:nil
+ videoBitrate:nil
+ audioBitrate:nil
+ enableAudio:YES]
+ completion:^(NSNumber *_Nullable result,
+ FlutterError *_Nullable error) {
+ [createExpectation fulfill];
+ }];
+ [self waitForExpectationsWithTimeout:30 handler:nil];
// `captureSessionQueue` must not be nil after `create` call. Otherwise a nil
// `captureSessionQueue` passed into `AVCaptureVideoDataOutput::setSampleBufferDelegate:queue:`
// API will cause a crash.
diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraFocusTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraFocusTests.m
index 577304018de..d13f5a77ced 100644
--- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraFocusTests.m
+++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraFocusTests.m
@@ -114,11 +114,9 @@ - (void)testSetFocusPointWithResult_SetsFocusPointOfInterest {
[_camera setValue:_mockDevice forKey:@"captureDevice"];
// Run test
- [_camera
- setFocusPointWithResult:^(id _Nullable result) {
- }
- x:1
- y:1];
+ [_camera setFocusPoint:[FCPPlatformPoint makeWithX:1 y:1]
+ withCompletion:^(FlutterError *_Nullable error){
+ }];
// Verify the focus point of interest has been set
OCMVerify([_mockDevice setFocusPointOfInterest:CGPointMake(1, 1)]);
diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraMethodChannelTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraMethodChannelTests.m
index 423b8e88989..55fc44e10cb 100644
--- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraMethodChannelTests.m
+++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraMethodChannelTests.m
@@ -28,22 +28,24 @@ - (void)testCreate_ShouldCallResultOnMainThread {
OCMStub([avCaptureSessionMock canSetSessionPreset:[OCMArg any]]).andReturn(YES);
// Set up method call
- FlutterMethodCall *call = [FlutterMethodCall
- methodCallWithMethodName:@"create"
- arguments:@{@"resolutionPreset" : @"medium", @"enableAudio" : @(1)}];
-
- __block id resultValue;
- [camera createCameraOnSessionQueueWithCreateMethodCall:call
- result:^(id _Nullable result) {
- resultValue = result;
- [expectation fulfill];
- }];
- [self waitForExpectationsWithTimeout:1 handler:nil];
+ __block NSNumber *resultValue;
+ [camera createCameraOnSessionQueueWithName:@"acamera"
+ settings:[FCPPlatformMediaSettings
+ makeWithResolutionPreset:
+ FCPPlatformResolutionPresetMedium
+ framesPerSecond:nil
+ videoBitrate:nil
+ audioBitrate:nil
+ enableAudio:YES]
+ completion:^(NSNumber *_Nullable result,
+ FlutterError *_Nullable error) {
+ resultValue = result;
+ [expectation fulfill];
+ }];
+ [self waitForExpectationsWithTimeout:30 handler:nil];
// Verify the result
- NSDictionary *dictionaryResult = (NSDictionary *)resultValue;
- XCTAssertNotNil(dictionaryResult);
- XCTAssert([[dictionaryResult allKeys] containsObject:@"cameraId"]);
+ XCTAssertNotNil(resultValue);
}
@end
diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPreviewPauseTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPreviewPauseTests.m
index 2ce7b8676d3..96ae19ff14d 100644
--- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPreviewPauseTests.m
+++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPreviewPauseTests.m
@@ -16,16 +16,14 @@ @implementation CameraPreviewPauseTests
- (void)testPausePreviewWithResult_shouldPausePreview {
FLTCam *camera = [[FLTCam alloc] init];
- [camera pausePreviewWithResult:^(id _Nullable result){
- }];
+ [camera pausePreview];
XCTAssertTrue(camera.isPreviewPaused);
}
- (void)testResumePreviewWithResult_shouldResumePreview {
FLTCam *camera = [[FLTCam alloc] init];
- [camera resumePreviewWithResult:^(id _Nullable result){
- }];
+ [camera resumePreview];
XCTAssertFalse(camera.isPreviewPaused);
}
diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPropertiesTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPropertiesTests.m
index 14ced24bfc1..5b865d464dc 100644
--- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPropertiesTests.m
+++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPropertiesTests.m
@@ -15,68 +15,39 @@ @implementation CameraPropertiesTests
#pragma mark - flash mode tests
-- (void)testFLTGetFLTFlashModeForString {
- XCTAssertEqual(FLTFlashModeOff, FLTGetFLTFlashModeForString(@"off"));
- XCTAssertEqual(FLTFlashModeAuto, FLTGetFLTFlashModeForString(@"auto"));
- XCTAssertEqual(FLTFlashModeAlways, FLTGetFLTFlashModeForString(@"always"));
- XCTAssertEqual(FLTFlashModeTorch, FLTGetFLTFlashModeForString(@"torch"));
- XCTAssertEqual(FLTFlashModeInvalid, FLTGetFLTFlashModeForString(@"unknown"));
-}
-
-- (void)testFLTGetAVCaptureFlashModeForFLTFlashMode {
- XCTAssertEqual(AVCaptureFlashModeOff, FLTGetAVCaptureFlashModeForFLTFlashMode(FLTFlashModeOff));
- XCTAssertEqual(AVCaptureFlashModeAuto, FLTGetAVCaptureFlashModeForFLTFlashMode(FLTFlashModeAuto));
- XCTAssertEqual(AVCaptureFlashModeOn, FLTGetAVCaptureFlashModeForFLTFlashMode(FLTFlashModeAlways));
- XCTAssertEqual(-1, FLTGetAVCaptureFlashModeForFLTFlashMode(FLTFlashModeTorch));
-}
-
-#pragma mark - exposure mode tests
-
-- (void)testFCPGetExposureModeForString {
- XCTAssertEqual(FCPPlatformExposureModeAuto, FCPGetExposureModeForString(@"auto"));
- XCTAssertEqual(FCPPlatformExposureModeLocked, FCPGetExposureModeForString(@"locked"));
-}
-
-#pragma mark - focus mode tests
-
-- (void)testFLTGetFLTFocusModeForString {
- XCTAssertEqual(FCPPlatformFocusModeAuto, FCPGetFocusModeForString(@"auto"));
- XCTAssertEqual(FCPPlatformFocusModeLocked, FCPGetFocusModeForString(@"locked"));
-}
-
-#pragma mark - resolution preset tests
-
-- (void)testFLTGetFLTResolutionPresetForString {
- XCTAssertEqual(FLTResolutionPresetVeryLow, FLTGetFLTResolutionPresetForString(@"veryLow"));
- XCTAssertEqual(FLTResolutionPresetLow, FLTGetFLTResolutionPresetForString(@"low"));
- XCTAssertEqual(FLTResolutionPresetMedium, FLTGetFLTResolutionPresetForString(@"medium"));
- XCTAssertEqual(FLTResolutionPresetHigh, FLTGetFLTResolutionPresetForString(@"high"));
- XCTAssertEqual(FLTResolutionPresetVeryHigh, FLTGetFLTResolutionPresetForString(@"veryHigh"));
- XCTAssertEqual(FLTResolutionPresetUltraHigh, FLTGetFLTResolutionPresetForString(@"ultraHigh"));
- XCTAssertEqual(FLTResolutionPresetMax, FLTGetFLTResolutionPresetForString(@"max"));
- XCTAssertEqual(FLTResolutionPresetInvalid, FLTGetFLTResolutionPresetForString(@"unknown"));
+- (void)testFCPGetAVCaptureFlashModeForPigeonFlashMode {
+ XCTAssertEqual(AVCaptureFlashModeOff,
+ FCPGetAVCaptureFlashModeForPigeonFlashMode(FCPPlatformFlashModeOff));
+ XCTAssertEqual(AVCaptureFlashModeAuto,
+ FCPGetAVCaptureFlashModeForPigeonFlashMode(FCPPlatformFlashModeAuto));
+ XCTAssertEqual(AVCaptureFlashModeOn,
+ FCPGetAVCaptureFlashModeForPigeonFlashMode(FCPPlatformFlashModeAlways));
+ XCTAssertThrows(FCPGetAVCaptureFlashModeForPigeonFlashMode(FCPPlatformFlashModeTorch));
}
#pragma mark - video format tests
-- (void)testFLTGetVideoFormatFromString {
- XCTAssertEqual(kCVPixelFormatType_32BGRA, FLTGetVideoFormatFromString(@"bgra8888"));
+- (void)testFCPGetPixelFormatForPigeonFormat {
+ XCTAssertEqual(kCVPixelFormatType_32BGRA,
+ FCPGetPixelFormatForPigeonFormat(FCPPlatformImageFormatGroupBgra8888));
XCTAssertEqual(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange,
- FLTGetVideoFormatFromString(@"yuv420"));
- XCTAssertEqual(kCVPixelFormatType_32BGRA, FLTGetVideoFormatFromString(@"unknown"));
+ FCPGetPixelFormatForPigeonFormat(FCPPlatformImageFormatGroupYuv420));
}
#pragma mark - device orientation tests
-- (void)testFLTGetUIDeviceOrientationForString {
+- (void)testFCPGetUIDeviceOrientationForPigeonDeviceOrientation {
XCTAssertEqual(UIDeviceOrientationPortraitUpsideDown,
- FLTGetUIDeviceOrientationForString(@"portraitDown"));
+ FCPGetUIDeviceOrientationForPigeonDeviceOrientation(
+ FCPPlatformDeviceOrientationPortraitDown));
XCTAssertEqual(UIDeviceOrientationLandscapeLeft,
- FLTGetUIDeviceOrientationForString(@"landscapeLeft"));
+ FCPGetUIDeviceOrientationForPigeonDeviceOrientation(
+ FCPPlatformDeviceOrientationLandscapeLeft));
XCTAssertEqual(UIDeviceOrientationLandscapeRight,
- FLTGetUIDeviceOrientationForString(@"landscapeRight"));
- XCTAssertEqual(UIDeviceOrientationPortrait, FLTGetUIDeviceOrientationForString(@"portraitUp"));
- XCTAssertEqual(UIDeviceOrientationUnknown, FLTGetUIDeviceOrientationForString(@"unknown"));
+ FCPGetUIDeviceOrientationForPigeonDeviceOrientation(
+ FCPPlatformDeviceOrientationLandscapeRight));
+ XCTAssertEqual(UIDeviceOrientationPortrait, FCPGetUIDeviceOrientationForPigeonDeviceOrientation(
+ FCPPlatformDeviceOrientationPortraitUp));
}
- (void)testFLTGetStringForUIDeviceOrientation {
@@ -93,12 +64,4 @@ - (void)testFLTGetStringForUIDeviceOrientation {
FCPGetPigeonDeviceOrientationForOrientation(-1));
}
-#pragma mark - file format tests
-
-- (void)testFLTGetFileFormatForString {
- XCTAssertEqual(FCPFileFormatJPEG, FCPGetFileFormatFromString(@"jpg"));
- XCTAssertEqual(FCPFileFormatHEIF, FCPGetFileFormatFromString(@"heif"));
- XCTAssertEqual(FCPFileFormatInvalid, FCPGetFileFormatFromString(@"unknown"));
-}
-
@end
diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSessionPresetsTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSessionPresetsTests.m
index a5130ad8288..28f8d5de4e9 100644
--- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSessionPresetsTests.m
+++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSessionPresetsTests.m
@@ -30,7 +30,8 @@ - (void)testResolutionPresetWithBestFormat_mustUpdateCaptureSessionPreset {
OCMExpect([captureDeviceMock lockForConfiguration:NULL]).andReturn(YES);
OCMExpect([videoSessionMock setSessionPreset:expectedPreset]);
- FLTCreateCamWithVideoDimensionsForFormat(videoSessionMock, @"max", captureDeviceMock,
+ FLTCreateCamWithVideoDimensionsForFormat(videoSessionMock, FCPPlatformResolutionPresetMax,
+ captureDeviceMock,
^CMVideoDimensions(AVCaptureDeviceFormat *format) {
CMVideoDimensions videoDimensions;
videoDimensions.width = 1;
@@ -53,7 +54,7 @@ - (void)testResolutionPresetWithCanSetSessionPresetMax_mustUpdateCaptureSessionP
OCMExpect([videoSessionMock setSessionPreset:expectedPreset]);
- FLTCreateCamWithVideoCaptureSession(videoSessionMock, @"max");
+ FLTCreateCamWithVideoCaptureSession(videoSessionMock, FCPPlatformResolutionPresetMax);
OCMVerifyAll(videoSessionMock);
}
@@ -70,7 +71,7 @@ - (void)testResolutionPresetWithCanSetSessionPresetUltraHigh_mustUpdateCaptureSe
// Expect that setting "ultraHigh" resolutionPreset correctly updates videoCaptureSession.
OCMExpect([videoSessionMock setSessionPreset:expectedPreset]);
- FLTCreateCamWithVideoCaptureSession(videoSessionMock, @"ultraHigh");
+ FLTCreateCamWithVideoCaptureSession(videoSessionMock, FCPPlatformResolutionPresetUltraHigh);
OCMVerifyAll(videoSessionMock);
}
diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSettingsTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSettingsTests.m
index 3177fe460ea..1962a6b7457 100644
--- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSettingsTests.m
+++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSettingsTests.m
@@ -9,134 +9,11 @@
#import
#import "CameraTestUtils.h"
-static const char *gTestResolutionPreset = "medium";
+static const FCPPlatformResolutionPreset gTestResolutionPreset = FCPPlatformResolutionPresetMedium;
static const int gTestFramesPerSecond = 15;
static const int gTestVideoBitrate = 200000;
static const int gTestAudioBitrate = 32000;
-static const bool gTestEnableAudio = YES;
-
-@interface CameraCreateWithMediaSettingsParseTests : XCTestCase
-@end
-
-/// Expect that optional positive numbers can be parsed
-@implementation CameraCreateWithMediaSettingsParseTests
-
-- (FlutterError *)failingTestWithArguments:(NSDictionary *)arguments {
- CameraPlugin *camera = [[CameraPlugin alloc] initWithRegistry:nil messenger:nil];
-
- XCTestExpectation *expectation = [self expectationWithDescription:@"Result finished"];
-
- // Set up method call
- FlutterMethodCall *call = [FlutterMethodCall methodCallWithMethodName:@"create"
- arguments:arguments];
-
- __block id resultValue;
- [camera createCameraOnSessionQueueWithCreateMethodCall:call
- result:^(id _Nullable result) {
- resultValue = result;
- [expectation fulfill];
- }];
- [self waitForExpectationsWithTimeout:1 handler:nil];
-
- // Verify the result
- XCTAssertNotNil(resultValue);
- XCTAssertTrue([resultValue isKindOfClass:[FlutterError class]]);
- return (FlutterError *)resultValue;
-}
-
-- (void)goodTestWithArguments:(NSDictionary *)arguments {
- CameraPlugin *camera = [[CameraPlugin alloc] initWithRegistry:nil messenger:nil];
-
- XCTestExpectation *expectation = [self expectationWithDescription:@"Result finished"];
-
- // Set up mocks for initWithCameraName method
- id avCaptureDeviceInputMock = OCMClassMock([AVCaptureDeviceInput class]);
- OCMStub([avCaptureDeviceInputMock deviceInputWithDevice:[OCMArg any] error:[OCMArg anyObjectRef]])
- .andReturn([AVCaptureInput alloc]);
-
- id avCaptureSessionMock = OCMClassMock([AVCaptureSession class]);
- OCMStub([avCaptureSessionMock alloc]).andReturn(avCaptureSessionMock);
- OCMStub([avCaptureSessionMock canSetSessionPreset:[OCMArg any]]).andReturn(YES);
-
- // Set up method call
- FlutterMethodCall *call = [FlutterMethodCall
- methodCallWithMethodName:@"create"
- arguments:@{@"resolutionPreset" : @"medium", @"enableAudio" : @(1)}];
-
- __block id resultValue;
- [camera createCameraOnSessionQueueWithCreateMethodCall:call
- result:^(id _Nullable result) {
- resultValue = result;
- [expectation fulfill];
- }];
- [self waitForExpectationsWithTimeout:1 handler:nil];
-
- // Verify the result
- XCTAssertNotNil(resultValue);
- XCTAssertFalse([resultValue isKindOfClass:[FlutterError class]]);
- NSDictionary *dictionaryResult = (NSDictionary *)resultValue;
- XCTAssert([[dictionaryResult allKeys] containsObject:@"cameraId"]);
-}
-
-- (void)testCameraCreateWithMediaSettings_shouldRejectNegativeIntNumbers {
- FlutterError *error =
- [self failingTestWithArguments:@{@"fps" : @(-1), @"resolutionPreset" : @"medium"}];
- XCTAssertEqualObjects(error.message, @"fps should be a positive number",
- "should reject negative int number");
-}
-
-- (void)testCameraCreateWithMediaSettings_shouldRejectNegativeFloatingPointNumbers {
- FlutterError *error =
- [self failingTestWithArguments:@{@"fps" : @(-3.7), @"resolutionPreset" : @"medium"}];
- XCTAssertEqualObjects(error.message, @"fps should be a positive number",
- "should reject negative floating point number");
-}
-
-- (void)testCameraCreateWithMediaSettings_nanShouldBeParsedAsNil {
- FlutterError *error =
- [self failingTestWithArguments:@{@"fps" : @(NAN), @"resolutionPreset" : @"medium"}];
- XCTAssertEqualObjects(error.message, @"fps should not be a nan", "should reject NAN");
-}
-
-- (void)testCameraCreateWithMediaSettings_shouldNotRejectNilArguments {
- [self goodTestWithArguments:@{@"resolutionPreset" : @"medium"}];
-}
-
-- (void)testCameraCreateWithMediaSettings_shouldAcceptNull {
- [self goodTestWithArguments:@{@"fps" : [NSNull null], @"resolutionPreset" : @"medium"}];
-}
-
-- (void)testCameraCreateWithMediaSettings_shouldAcceptPositiveDecimalNumbers {
- [self goodTestWithArguments:@{@"fps" : @(5), @"resolutionPreset" : @"medium"}];
-}
-
-- (void)testCameraCreateWithMediaSettings_shouldAcceptPositiveFloatingPointNumbers {
- [self goodTestWithArguments:@{@"fps" : @(3.7), @"resolutionPreset" : @"medium"}];
-}
-
-- (void)testCameraCreateWithMediaSettings_shouldRejectWrongVideoBitrate {
- FlutterError *error =
- [self failingTestWithArguments:@{@"videoBitrate" : @(-1), @"resolutionPreset" : @"medium"}];
- XCTAssertEqualObjects(error.message, @"videoBitrate should be a positive number",
- "should reject wrong video bitrate");
-}
-
-- (void)testCameraCreateWithMediaSettings_shouldRejectWrongAudioBitrate {
- FlutterError *error =
- [self failingTestWithArguments:@{@"audioBitrate" : @(-1), @"resolutionPreset" : @"medium"}];
- XCTAssertEqualObjects(error.message, @"audioBitrate should be a positive number",
- "should reject wrong audio bitrate");
-}
-
-- (void)testCameraCreateWithMediaSettings_shouldAcceptGoodVideoBitrate {
- [self goodTestWithArguments:@{@"videoBitrate" : @(200000), @"resolutionPreset" : @"medium"}];
-}
-
-- (void)testCameraCreateWithMediaSettings_shouldAcceptGoodAudioBitrate {
- [self goodTestWithArguments:@{@"audioBitrate" : @(32000), @"resolutionPreset" : @"medium"}];
-}
-
-@end
+static const BOOL gTestEnableAudio = YES;
@interface CameraSettingsTests : XCTestCase
@end
@@ -255,11 +132,12 @@ @implementation CameraSettingsTests
/// Expect that FPS, video and audio bitrate are passed to camera device and asset writer.
- (void)testSettings_shouldPassConfigurationToCameraDeviceAndWriter {
- FLTCamMediaSettings *settings =
- [[FLTCamMediaSettings alloc] initWithFramesPerSecond:@(gTestFramesPerSecond)
- videoBitrate:@(gTestVideoBitrate)
- audioBitrate:@(gTestAudioBitrate)
- enableAudio:gTestEnableAudio];
+ FCPPlatformMediaSettings *settings =
+ [FCPPlatformMediaSettings makeWithResolutionPreset:gTestResolutionPreset
+ framesPerSecond:@(gTestFramesPerSecond)
+ videoBitrate:@(gTestVideoBitrate)
+ audioBitrate:@(gTestAudioBitrate)
+ enableAudio:gTestEnableAudio];
TestMediaSettingsAVWrapper *injectedWrapper =
[[TestMediaSettingsAVWrapper alloc] initWithTestCase:self];
@@ -275,9 +153,10 @@ - (void)testSettings_shouldPassConfigurationToCameraDeviceAndWriter {
timeout:1
enforceOrder:YES];
- [camera startVideoRecordingWithResult:^(id _Nullable result){
-
- }];
+ [camera
+ startVideoRecordingWithCompletion:^(FlutterError *_Nullable error) {
+ }
+ messengerForStreaming:nil];
[self waitForExpectations:@[
injectedWrapper.audioSettingsExpectation, injectedWrapper.videoSettingsExpectation
@@ -300,28 +179,25 @@ - (void)testSettings_ShouldBeSupportedByMethodCall {
OCMStub([avCaptureSessionMock canSetSessionPreset:[OCMArg any]]).andReturn(YES);
// Set up method call
- FlutterMethodCall *call =
- [FlutterMethodCall methodCallWithMethodName:@"create"
- arguments:@{
- @"resolutionPreset" : @(gTestResolutionPreset),
- @"enableAudio" : @(gTestEnableAudio),
- @"fps" : @(gTestFramesPerSecond),
- @"videoBitrate" : @(gTestVideoBitrate),
- @"audioBitrate" : @(gTestAudioBitrate)
- }];
-
- __block id resultValue;
- [camera createCameraOnSessionQueueWithCreateMethodCall:call
- result:^(id _Nullable result) {
- resultValue = result;
- [expectation fulfill];
- }];
- [self waitForExpectationsWithTimeout:1 handler:nil];
+ FCPPlatformMediaSettings *mediaSettings =
+ [FCPPlatformMediaSettings makeWithResolutionPreset:gTestResolutionPreset
+ framesPerSecond:@(gTestFramesPerSecond)
+ videoBitrate:@(gTestVideoBitrate)
+ audioBitrate:@(gTestAudioBitrate)
+ enableAudio:gTestEnableAudio];
+
+ __block NSNumber *resultValue;
+ [camera createCameraOnSessionQueueWithName:@"acamera"
+ settings:mediaSettings
+ completion:^(NSNumber *result, FlutterError *error) {
+ XCTAssertNil(error);
+ resultValue = result;
+ [expectation fulfill];
+ }];
+ [self waitForExpectationsWithTimeout:30 handler:nil];
// Verify the result
- NSDictionary *dictionaryResult = (NSDictionary *)resultValue;
- XCTAssertNotNil(dictionaryResult);
- XCTAssert([[dictionaryResult allKeys] containsObject:@"cameraId"]);
+ XCTAssertNotNil(resultValue);
}
@end
diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.h b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.h
index 57c47ac1fdd..eded154995e 100644
--- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.h
+++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.h
@@ -14,7 +14,8 @@ NS_ASSUME_NONNULL_BEGIN
/// dependency injection).
/// @return an FLTCam object.
extern FLTCam *_Nullable FLTCreateCamWithCaptureSessionQueueAndMediaSettings(
- dispatch_queue_t _Nullable captureSessionQueue, FLTCamMediaSettings *_Nullable mediaSettings,
+ dispatch_queue_t _Nullable captureSessionQueue,
+ FCPPlatformMediaSettings *_Nullable mediaSettings,
FLTCamMediaSettingsAVWrapper *_Nullable mediaSettingsAVWrapper);
extern FLTCam *FLTCreateCamWithCaptureSessionQueue(dispatch_queue_t captureSessionQueue);
@@ -24,7 +25,7 @@ extern FLTCam *FLTCreateCamWithCaptureSessionQueue(dispatch_queue_t captureSessi
/// @param resolutionPreset preset for camera's captureSession resolution
/// @return an FLTCam object.
extern FLTCam *FLTCreateCamWithVideoCaptureSession(AVCaptureSession *captureSession,
- NSString *resolutionPreset);
+ FCPPlatformResolutionPreset resolutionPreset);
/// Creates an `FLTCam` with a given captureSession and resolutionPreset.
/// Allows to inject a capture device and a block to compute the video dimensions.
@@ -34,8 +35,8 @@ extern FLTCam *FLTCreateCamWithVideoCaptureSession(AVCaptureSession *captureSess
/// @param videoDimensionsForFormat custom code to determine video dimensions
/// @return an FLTCam object.
extern FLTCam *FLTCreateCamWithVideoDimensionsForFormat(
- AVCaptureSession *captureSession, NSString *resolutionPreset, AVCaptureDevice *captureDevice,
- VideoDimensionsForFormat videoDimensionsForFormat);
+ AVCaptureSession *captureSession, FCPPlatformResolutionPreset resolutionPreset,
+ AVCaptureDevice *captureDevice, VideoDimensionsForFormat videoDimensionsForFormat);
/// Creates a test sample buffer.
/// @return a test sample buffer.
diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.m
index d334576e212..0dac5c4a59b 100644
--- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.m
+++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.m
@@ -3,21 +3,29 @@
// found in the LICENSE file.
#import "CameraTestUtils.h"
+
#import
@import AVFoundation;
+@import camera_avfoundation;
+
+static FCPPlatformMediaSettings *FCPGetDefaultMediaSettings(
+ FCPPlatformResolutionPreset resolutionPreset) {
+ return [FCPPlatformMediaSettings makeWithResolutionPreset:resolutionPreset
+ framesPerSecond:nil
+ videoBitrate:nil
+ audioBitrate:nil
+ enableAudio:YES];
+}
FLTCam *FLTCreateCamWithCaptureSessionQueue(dispatch_queue_t captureSessionQueue) {
return FLTCreateCamWithCaptureSessionQueueAndMediaSettings(captureSessionQueue, nil, nil);
}
FLTCam *FLTCreateCamWithCaptureSessionQueueAndMediaSettings(
- dispatch_queue_t captureSessionQueue, FLTCamMediaSettings *mediaSettings,
+ dispatch_queue_t captureSessionQueue, FCPPlatformMediaSettings *mediaSettings,
FLTCamMediaSettingsAVWrapper *mediaSettingsAVWrapper) {
if (!mediaSettings) {
- mediaSettings = [[FLTCamMediaSettings alloc] initWithFramesPerSecond:nil
- videoBitrate:nil
- audioBitrate:nil
- enableAudio:true];
+ mediaSettings = FCPGetDefaultMediaSettings(FCPPlatformResolutionPresetMedium);
}
if (!mediaSettingsAVWrapper) {
@@ -44,7 +52,6 @@
OCMStub([audioSessionMock canSetSessionPreset:[OCMArg any]]).andReturn(YES);
id fltCam = [[FLTCam alloc] initWithCameraName:@"camera"
- resolutionPreset:@"medium"
mediaSettings:mediaSettings
mediaSettingsAVWrapper:mediaSettingsAVWrapper
orientation:UIDeviceOrientationPortrait
@@ -82,7 +89,7 @@
}
FLTCam *FLTCreateCamWithVideoCaptureSession(AVCaptureSession *captureSession,
- NSString *resolutionPreset) {
+ FCPPlatformResolutionPreset resolutionPreset) {
id inputMock = OCMClassMock([AVCaptureDeviceInput class]);
OCMStub([inputMock deviceInputWithDevice:[OCMArg any] error:[OCMArg setTo:nil]])
.andReturn(inputMock);
@@ -91,24 +98,19 @@
OCMStub([audioSessionMock addInputWithNoConnections:[OCMArg any]]);
OCMStub([audioSessionMock canSetSessionPreset:[OCMArg any]]).andReturn(YES);
- return
- [[FLTCam alloc] initWithCameraName:@"camera"
- resolutionPreset:resolutionPreset
- mediaSettings:[[FLTCamMediaSettings alloc] initWithFramesPerSecond:nil
- videoBitrate:nil
- audioBitrate:nil
- enableAudio:true]
- mediaSettingsAVWrapper:[[FLTCamMediaSettingsAVWrapper alloc] init]
- orientation:UIDeviceOrientationPortrait
- videoCaptureSession:captureSession
- audioCaptureSession:audioSessionMock
- captureSessionQueue:dispatch_queue_create("capture_session_queue", NULL)
- error:nil];
+ return [[FLTCam alloc] initWithCameraName:@"camera"
+ mediaSettings:FCPGetDefaultMediaSettings(resolutionPreset)
+ mediaSettingsAVWrapper:[[FLTCamMediaSettingsAVWrapper alloc] init]
+ orientation:UIDeviceOrientationPortrait
+ videoCaptureSession:captureSession
+ audioCaptureSession:audioSessionMock
+ captureSessionQueue:dispatch_queue_create("capture_session_queue", NULL)
+ error:nil];
}
FLTCam *FLTCreateCamWithVideoDimensionsForFormat(
- AVCaptureSession *captureSession, NSString *resolutionPreset, AVCaptureDevice *captureDevice,
- VideoDimensionsForFormat videoDimensionsForFormat) {
+ AVCaptureSession *captureSession, FCPPlatformResolutionPreset resolutionPreset,
+ AVCaptureDevice *captureDevice, VideoDimensionsForFormat videoDimensionsForFormat) {
id inputMock = OCMClassMock([AVCaptureDeviceInput class]);
OCMStub([inputMock deviceInputWithDevice:[OCMArg any] error:[OCMArg setTo:nil]])
.andReturn(inputMock);
@@ -117,22 +119,17 @@
OCMStub([audioSessionMock addInputWithNoConnections:[OCMArg any]]);
OCMStub([audioSessionMock canSetSessionPreset:[OCMArg any]]).andReturn(YES);
- return [[FLTCam alloc]
- initWithResolutionPreset:resolutionPreset
- mediaSettings:[[FLTCamMediaSettings alloc] initWithFramesPerSecond:nil
- videoBitrate:nil
- audioBitrate:nil
- enableAudio:true]
- mediaSettingsAVWrapper:[[FLTCamMediaSettingsAVWrapper alloc] init]
- orientation:UIDeviceOrientationPortrait
- videoCaptureSession:captureSession
- audioCaptureSession:audioSessionMock
- captureSessionQueue:dispatch_queue_create("capture_session_queue", NULL)
- captureDeviceFactory:^AVCaptureDevice *(void) {
- return captureDevice;
- }
- videoDimensionsForFormat:videoDimensionsForFormat
- error:nil];
+ return [[FLTCam alloc] initWithMediaSettings:FCPGetDefaultMediaSettings(resolutionPreset)
+ mediaSettingsAVWrapper:[[FLTCamMediaSettingsAVWrapper alloc] init]
+ orientation:UIDeviceOrientationPortrait
+ videoCaptureSession:captureSession
+ audioCaptureSession:audioSessionMock
+ captureSessionQueue:dispatch_queue_create("capture_session_queue", NULL)
+ captureDeviceFactory:^AVCaptureDevice *(void) {
+ return captureDevice;
+ }
+ videoDimensionsForFormat:videoDimensionsForFormat
+ error:nil];
}
CMSampleBufferRef FLTCreateTestSampleBuffer(void) {
diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamPhotoCaptureTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamPhotoCaptureTests.m
index 00c583d2412..f81625f849f 100644
--- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamPhotoCaptureTests.m
+++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamPhotoCaptureTests.m
@@ -45,8 +45,9 @@ - (void)testCaptureToFile_mustReportErrorToResultIfSavePhotoDelegateCompletionsW
// `FLTCam::captureToFile` runs on capture session queue.
dispatch_async(captureSessionQueue, ^{
- [cam captureToFile:^(id _Nullable result) {
- XCTAssertTrue([result isKindOfClass:[FlutterError class]]);
+ [cam captureToFileWithCompletion:^(NSString *result, FlutterError *error) {
+ XCTAssertNil(result);
+ XCTAssertNotNil(error);
[errorExpectation fulfill];
}];
});
@@ -84,7 +85,7 @@ - (void)testCaptureToFile_mustReportPathToResultIfSavePhotoDelegateCompletionsWi
// `FLTCam::captureToFile` runs on capture session queue.
dispatch_async(captureSessionQueue, ^{
- [cam captureToFile:^(id _Nullable result) {
+ [cam captureToFileWithCompletion:^(NSString *result, FlutterError *error) {
XCTAssertEqual(result, filePath);
[pathExpectation fulfill];
}];
@@ -100,7 +101,7 @@ - (void)testCaptureToFile_mustReportFileExtensionWithHeifWhenHEVCIsAvailableAndF
dispatch_queue_set_specific(captureSessionQueue, FLTCaptureSessionQueueSpecific,
(void *)FLTCaptureSessionQueueSpecific, NULL);
FLTCam *cam = FLTCreateCamWithCaptureSessionQueue(captureSessionQueue);
- [cam setImageFileFormat:FCPFileFormatHEIF];
+ [cam setImageFileFormat:FCPPlatformImageFileFormatHeif];
AVCapturePhotoSettings *settings =
[AVCapturePhotoSettings photoSettingsWithFormat:@{AVVideoCodecKey : AVVideoCodecTypeHEVC}];
@@ -125,8 +126,7 @@ - (void)testCaptureToFile_mustReportFileExtensionWithHeifWhenHEVCIsAvailableAndF
cam.capturePhotoOutput = mockOutput;
// `FLTCam::captureToFile` runs on capture session queue.
dispatch_async(captureSessionQueue, ^{
- [cam captureToFile:^(id _Nullable result) {
- NSString *filePath = (NSString *)result;
+ [cam captureToFileWithCompletion:^(NSString *filePath, FlutterError *error) {
XCTAssertEqualObjects([filePath pathExtension], @"heif");
[expectation fulfill];
}];
@@ -142,7 +142,7 @@ - (void)testCaptureToFile_mustReportFileExtensionWithJpgWhenHEVCNotAvailableAndF
dispatch_queue_set_specific(captureSessionQueue, FLTCaptureSessionQueueSpecific,
(void *)FLTCaptureSessionQueueSpecific, NULL);
FLTCam *cam = FLTCreateCamWithCaptureSessionQueue(captureSessionQueue);
- [cam setImageFileFormat:FCPFileFormatHEIF];
+ [cam setImageFileFormat:FCPPlatformImageFileFormatHeif];
AVCapturePhotoSettings *settings = [AVCapturePhotoSettings photoSettings];
id mockSettings = OCMClassMock([AVCapturePhotoSettings class]);
@@ -162,8 +162,7 @@ - (void)testCaptureToFile_mustReportFileExtensionWithJpgWhenHEVCNotAvailableAndF
cam.capturePhotoOutput = mockOutput;
// `FLTCam::captureToFile` runs on capture session queue.
dispatch_async(captureSessionQueue, ^{
- [cam captureToFile:^(id _Nullable result) {
- NSString *filePath = (NSString *)result;
+ [cam captureToFileWithCompletion:^(NSString *filePath, FlutterError *error) {
XCTAssertEqualObjects([filePath pathExtension], @"jpg");
[expectation fulfill];
}];
diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamSampleBufferTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamSampleBufferTests.m
index 9c036adeaca..cba488dfe5b 100644
--- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamSampleBufferTests.m
+++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamSampleBufferTests.m
@@ -55,12 +55,12 @@ - (void)testDidOutputSampleBuffer_mustNotChangeSampleBufferRetainCountAfterPause
});
// Pause then resume the recording.
- [cam startVideoRecordingWithResult:^(id _Nullable result){
- }];
- [cam pauseVideoRecordingWithResult:^(id _Nullable result){
- }];
- [cam resumeVideoRecordingWithResult:^(id _Nullable result){
- }];
+ [cam
+ startVideoRecordingWithCompletion:^(FlutterError *_Nullable error) {
+ }
+ messengerForStreaming:nil];
+ [cam pauseVideoRecording];
+ [cam resumeVideoRecording];
[cam captureOutput:cam.captureVideoOutput
didOutputSampleBuffer:sampleBuffer
@@ -111,8 +111,10 @@ - (void)testDidOutputSampleBufferIgnoreAudioSamplesBeforeVideoSamples {
writtenSamples = [writtenSamples arrayByAddingObject:@"audio"];
});
- [cam startVideoRecordingWithResult:^(id _Nullable result){
- }];
+ [cam
+ startVideoRecordingWithCompletion:^(FlutterError *_Nullable error) {
+ }
+ messengerForStreaming:nil];
[cam captureOutput:nil didOutputSampleBuffer:audioSample fromConnection:connectionMock];
[cam captureOutput:nil didOutputSampleBuffer:audioSample fromConnection:connectionMock];
diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/ThreadSafeTextureRegistryTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/ThreadSafeTextureRegistryTests.m
deleted file mode 100644
index f91896b5ff5..00000000000
--- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/ThreadSafeTextureRegistryTests.m
+++ /dev/null
@@ -1,109 +0,0 @@
-// Copyright 2013 The Flutter Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-@import camera_avfoundation;
-@import camera_avfoundation.Test;
-@import XCTest;
-#import
-
-@interface ThreadSafeTextureRegistryTests : XCTestCase
-@end
-
-@implementation ThreadSafeTextureRegistryTests
-
-- (void)testShouldStayOnMainThreadIfCalledFromMainThread {
- NSObject *mockTextureRegistry =
- OCMProtocolMock(@protocol(FlutterTextureRegistry));
- FLTThreadSafeTextureRegistry *threadSafeTextureRegistry =
- [[FLTThreadSafeTextureRegistry alloc] initWithTextureRegistry:mockTextureRegistry];
-
- XCTestExpectation *registerTextureExpectation =
- [self expectationWithDescription:@"registerTexture must be called on the main thread"];
- XCTestExpectation *unregisterTextureExpectation =
- [self expectationWithDescription:@"unregisterTexture must be called on the main thread"];
- XCTestExpectation *textureFrameAvailableExpectation =
- [self expectationWithDescription:@"textureFrameAvailable must be called on the main thread"];
- XCTestExpectation *registerTextureCompletionExpectation =
- [self expectationWithDescription:
- @"registerTexture's completion block must be called on the main thread"];
-
- OCMStub([mockTextureRegistry registerTexture:[OCMArg any]]).andDo(^(NSInvocation *invocation) {
- if (NSThread.isMainThread) {
- [registerTextureExpectation fulfill];
- }
- });
-
- OCMStub([mockTextureRegistry unregisterTexture:0]).andDo(^(NSInvocation *invocation) {
- if (NSThread.isMainThread) {
- [unregisterTextureExpectation fulfill];
- }
- });
-
- OCMStub([mockTextureRegistry textureFrameAvailable:0]).andDo(^(NSInvocation *invocation) {
- if (NSThread.isMainThread) {
- [textureFrameAvailableExpectation fulfill];
- }
- });
-
- NSObject *anyTexture = OCMProtocolMock(@protocol(FlutterTexture));
- [threadSafeTextureRegistry registerTexture:anyTexture
- completion:^(int64_t textureId) {
- if (NSThread.isMainThread) {
- [registerTextureCompletionExpectation fulfill];
- }
- }];
- [threadSafeTextureRegistry textureFrameAvailable:0];
- [threadSafeTextureRegistry unregisterTexture:0];
- [self waitForExpectationsWithTimeout:1 handler:nil];
-}
-
-- (void)testShouldDispatchToMainThreadIfCalledFromBackgroundThread {
- NSObject *mockTextureRegistry =
- OCMProtocolMock(@protocol(FlutterTextureRegistry));
- FLTThreadSafeTextureRegistry *threadSafeTextureRegistry =
- [[FLTThreadSafeTextureRegistry alloc] initWithTextureRegistry:mockTextureRegistry];
-
- XCTestExpectation *registerTextureExpectation =
- [self expectationWithDescription:@"registerTexture must be called on the main thread"];
- XCTestExpectation *unregisterTextureExpectation =
- [self expectationWithDescription:@"unregisterTexture must be called on the main thread"];
- XCTestExpectation *textureFrameAvailableExpectation =
- [self expectationWithDescription:@"textureFrameAvailable must be called on the main thread"];
- XCTestExpectation *registerTextureCompletionExpectation =
- [self expectationWithDescription:
- @"registerTexture's completion block must be called on the main thread"];
-
- OCMStub([mockTextureRegistry registerTexture:[OCMArg any]]).andDo(^(NSInvocation *invocation) {
- if (NSThread.isMainThread) {
- [registerTextureExpectation fulfill];
- }
- });
-
- OCMStub([mockTextureRegistry unregisterTexture:0]).andDo(^(NSInvocation *invocation) {
- if (NSThread.isMainThread) {
- [unregisterTextureExpectation fulfill];
- }
- });
-
- OCMStub([mockTextureRegistry textureFrameAvailable:0]).andDo(^(NSInvocation *invocation) {
- if (NSThread.isMainThread) {
- [textureFrameAvailableExpectation fulfill];
- }
- });
-
- dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
- NSObject *anyTexture = OCMProtocolMock(@protocol(FlutterTexture));
- [threadSafeTextureRegistry registerTexture:anyTexture
- completion:^(int64_t textureId) {
- if (NSThread.isMainThread) {
- [registerTextureCompletionExpectation fulfill];
- }
- }];
- [threadSafeTextureRegistry textureFrameAvailable:0];
- [threadSafeTextureRegistry unregisterTexture:0];
- });
- [self waitForExpectationsWithTimeout:1 handler:nil];
-}
-
-@end
diff --git a/packages/camera/camera_avfoundation/ios/Classes/CameraPlugin.m b/packages/camera/camera_avfoundation/ios/Classes/CameraPlugin.m
index 90a124ebafd..de89aecce22 100644
--- a/packages/camera/camera_avfoundation/ios/Classes/CameraPlugin.m
+++ b/packages/camera/camera_avfoundation/ios/Classes/CameraPlugin.m
@@ -6,12 +6,12 @@
#import "CameraPlugin_Test.h"
@import AVFoundation;
+@import Flutter;
#import "CameraPermissionUtils.h"
#import "CameraProperties.h"
#import "FLTCam.h"
#import "FLTThreadSafeEventChannel.h"
-#import "FLTThreadSafeTextureRegistry.h"
#import "QueueUtils.h"
#import "messages.g.h"
@@ -22,7 +22,7 @@
}
@interface CameraPlugin ()
-@property(readonly, nonatomic) FLTThreadSafeTextureRegistry *registry;
+@property(readonly, nonatomic) id registry;
@property(readonly, nonatomic) NSObject *messenger;
@property(nonatomic) FCPCameraGlobalEventApi *globalEventAPI;
@end
@@ -30,12 +30,8 @@ @interface CameraPlugin ()
@implementation CameraPlugin
+ (void)registerWithRegistrar:(NSObject *)registrar {
- FlutterMethodChannel *channel =
- [FlutterMethodChannel methodChannelWithName:@"plugins.flutter.io/camera_avfoundation"
- binaryMessenger:[registrar messenger]];
CameraPlugin *instance = [[CameraPlugin alloc] initWithRegistry:[registrar textures]
messenger:[registrar messenger]];
- [registrar addMethodCallDelegate:instance channel:channel];
SetUpFCPCameraApi([registrar messenger], instance);
}
@@ -52,7 +48,7 @@ - (instancetype)initWithRegistry:(NSObject *)registry
globalAPI:(FCPCameraGlobalEventApi *)globalAPI {
self = [super init];
NSAssert(self, @"super init cannot be nil");
- _registry = [[FLTThreadSafeTextureRegistry alloc] initWithTextureRegistry:registry];
+ _registry = registry;
_messenger = messenger;
_globalEventAPI = globalAPI;
_captureSessionQueue = dispatch_queue_create("io.flutter.camera.captureSessionQueue", NULL);
@@ -103,13 +99,7 @@ - (void)sendDeviceOrientation:(UIDeviceOrientation)orientation {
});
}
-- (void)handleMethodCall:(FlutterMethodCall *)call result:(FlutterResult)result {
- // Invoke the plugin on another dispatch queue to avoid blocking the UI.
- __weak typeof(self) weakSelf = self;
- dispatch_async(self.captureSessionQueue, ^{
- [weakSelf handleMethodCallAsync:call result:result];
- });
-}
+#pragma mark FCPCameraApi Implementation
- (void)availableCamerasWithCompletion:
(nonnull void (^)(NSArray *_Nullable,
@@ -148,274 +138,355 @@ - (void)availableCamerasWithCompletion:
});
}
-- (void)handleMethodCallAsync:(FlutterMethodCall *)call result:(FlutterResult)result {
- if ([@"create" isEqualToString:call.method]) {
- [self handleCreateMethodCall:call result:result];
- } else if ([@"startImageStream" isEqualToString:call.method]) {
- [_camera startImageStreamWithMessenger:_messenger];
- result(nil);
- } else if ([@"stopImageStream" isEqualToString:call.method]) {
- [_camera stopImageStream];
- result(nil);
- } else if ([@"receivedImageStreamData" isEqualToString:call.method]) {
- [_camera receivedImageStreamData];
- result(nil);
- } else {
- NSDictionary *argsMap = call.arguments;
- NSUInteger cameraId = ((NSNumber *)argsMap[@"cameraId"]).unsignedIntegerValue;
- if ([@"initialize" isEqualToString:call.method]) {
- NSString *videoFormatValue = ((NSString *)argsMap[@"imageFormatGroup"]);
-
- [_camera setVideoFormat:FLTGetVideoFormatFromString(videoFormatValue)];
+- (void)createCameraWithName:(nonnull NSString *)cameraName
+ settings:(nonnull FCPPlatformMediaSettings *)settings
+ completion:
+ (nonnull void (^)(NSNumber *_Nullable, FlutterError *_Nullable))completion {
+ // Create FLTCam only if granted camera access (and audio access if audio is enabled)
+ __weak typeof(self) weakSelf = self;
+ dispatch_async(self.captureSessionQueue, ^{
+ FLTRequestCameraPermissionWithCompletionHandler(^(FlutterError *error) {
+ typeof(self) strongSelf = weakSelf;
+ if (!strongSelf) return;
- __weak CameraPlugin *weakSelf = self;
- _camera.onFrameAvailable = ^{
- if (![weakSelf.camera isPreviewPaused]) {
- [weakSelf.registry textureFrameAvailable:cameraId];
- }
- };
- _camera.dartAPI = [[FCPCameraEventApi alloc]
- initWithBinaryMessenger:_messenger
- messageChannelSuffix:[NSString stringWithFormat:@"%ld", cameraId]];
- [_camera reportInitializationState];
- [self sendDeviceOrientation:[UIDevice currentDevice].orientation];
- [_camera start];
- result(nil);
- } else if ([@"takePicture" isEqualToString:call.method]) {
- [_camera captureToFile:result];
- } else if ([@"dispose" isEqualToString:call.method]) {
- [_registry unregisterTexture:cameraId];
- [_camera close];
- result(nil);
- } else if ([@"prepareForVideoRecording" isEqualToString:call.method]) {
- [self.camera setUpCaptureSessionForAudio];
- result(nil);
- } else if ([@"startVideoRecording" isEqualToString:call.method]) {
- BOOL enableStream = [call.arguments[@"enableStream"] boolValue];
- if (enableStream) {
- [_camera startVideoRecordingWithResult:result messengerForStreaming:_messenger];
+ if (error) {
+ completion(nil, error);
} else {
- [_camera startVideoRecordingWithResult:result];
- }
- } else if ([@"stopVideoRecording" isEqualToString:call.method]) {
- [_camera stopVideoRecordingWithResult:result];
- } else if ([@"pauseVideoRecording" isEqualToString:call.method]) {
- [_camera pauseVideoRecordingWithResult:result];
- } else if ([@"resumeVideoRecording" isEqualToString:call.method]) {
- [_camera resumeVideoRecordingWithResult:result];
- } else if ([@"getMaxZoomLevel" isEqualToString:call.method]) {
- [_camera getMaxZoomLevelWithResult:result];
- } else if ([@"getMinZoomLevel" isEqualToString:call.method]) {
- [_camera getMinZoomLevelWithResult:result];
- } else if ([@"setZoomLevel" isEqualToString:call.method]) {
- CGFloat zoom = ((NSNumber *)argsMap[@"zoom"]).floatValue;
- [_camera setZoomLevel:zoom Result:result];
- } else if ([@"setFlashMode" isEqualToString:call.method]) {
- [_camera setFlashModeWithResult:result mode:call.arguments[@"mode"]];
- } else if ([@"setExposureMode" isEqualToString:call.method]) {
- [_camera setExposureModeWithResult:result mode:call.arguments[@"mode"]];
- } else if ([@"setExposurePoint" isEqualToString:call.method]) {
- BOOL reset = ((NSNumber *)call.arguments[@"reset"]).boolValue;
- double x = 0.5;
- double y = 0.5;
- if (!reset) {
- x = ((NSNumber *)call.arguments[@"x"]).doubleValue;
- y = ((NSNumber *)call.arguments[@"y"]).doubleValue;
- }
- [_camera setExposurePointWithResult:result x:x y:y];
- } else if ([@"getMinExposureOffset" isEqualToString:call.method]) {
- result(@(_camera.captureDevice.minExposureTargetBias));
- } else if ([@"getMaxExposureOffset" isEqualToString:call.method]) {
- result(@(_camera.captureDevice.maxExposureTargetBias));
- } else if ([@"getExposureOffsetStepSize" isEqualToString:call.method]) {
- result(@(0.0));
- } else if ([@"setExposureOffset" isEqualToString:call.method]) {
- [_camera setExposureOffsetWithResult:result
- offset:((NSNumber *)call.arguments[@"offset"]).doubleValue];
- } else if ([@"lockCaptureOrientation" isEqualToString:call.method]) {
- [_camera lockCaptureOrientationWithResult:result orientation:call.arguments[@"orientation"]];
- } else if ([@"unlockCaptureOrientation" isEqualToString:call.method]) {
- [_camera unlockCaptureOrientationWithResult:result];
- } else if ([@"setFocusMode" isEqualToString:call.method]) {
- [_camera setFocusModeWithResult:result mode:call.arguments[@"mode"]];
- } else if ([@"setFocusPoint" isEqualToString:call.method]) {
- BOOL reset = ((NSNumber *)call.arguments[@"reset"]).boolValue;
- double x = 0.5;
- double y = 0.5;
- if (!reset) {
- x = ((NSNumber *)call.arguments[@"x"]).doubleValue;
- y = ((NSNumber *)call.arguments[@"y"]).doubleValue;
+ // Request audio permission on `create` call with `enableAudio` argument instead of the
+ // `prepareForVideoRecording` call. This is because `prepareForVideoRecording` call is
+ // optional, and used as a workaround to fix a missing frame issue on iOS.
+ if (settings.enableAudio) {
+ // Setup audio capture session only if granted audio access.
+ FLTRequestAudioPermissionWithCompletionHandler(^(FlutterError *error) {
+ // cannot use the outter `strongSelf`
+ typeof(self) strongSelf = weakSelf;
+ if (!strongSelf) return;
+ if (error) {
+ completion(nil, error);
+ } else {
+ [strongSelf createCameraOnSessionQueueWithName:cameraName
+ settings:settings
+ completion:completion];
+ }
+ });
+ } else {
+ [strongSelf createCameraOnSessionQueueWithName:cameraName
+ settings:settings
+ completion:completion];
+ }
}
- [_camera setFocusPointWithResult:result x:x y:y];
- } else if ([@"pausePreview" isEqualToString:call.method]) {
- [_camera pausePreviewWithResult:result];
- } else if ([@"resumePreview" isEqualToString:call.method]) {
- [_camera resumePreviewWithResult:result];
- } else if ([@"setDescriptionWhileRecording" isEqualToString:call.method]) {
- [_camera setDescriptionWhileRecording:(call.arguments[@"cameraName"]) result:result];
- } else if ([@"setImageFileFormat" isEqualToString:call.method]) {
- NSString *fileFormat = call.arguments[@"fileFormat"];
- [_camera setImageFileFormat:FCPGetFileFormatFromString(fileFormat)];
- } else {
- result(FlutterMethodNotImplemented);
- }
- }
+ });
+ });
}
-- (void)handleCreateMethodCall:(FlutterMethodCall *)call result:(FlutterResult)result {
- // Create FLTCam only if granted camera access (and audio access if audio is enabled)
+- (void)initializeCamera:(NSInteger)cameraId
+ withImageFormat:(FCPPlatformImageFormatGroup)imageFormat
+ completion:(nonnull void (^)(FlutterError *_Nullable))completion {
+ __weak typeof(self) weakSelf = self;
+ dispatch_async(self.captureSessionQueue, ^{
+ [weakSelf sessionQueueInitializeCamera:cameraId
+ withImageFormat:imageFormat
+ completion:completion];
+ });
+}
+
+- (void)startImageStreamWithCompletion:(nonnull void (^)(FlutterError *_Nullable))completion {
+ __weak typeof(self) weakSelf = self;
+ dispatch_async(self.captureSessionQueue, ^{
+ [weakSelf.camera startImageStreamWithMessenger:weakSelf.messenger];
+ completion(nil);
+ });
+}
+
+- (void)stopImageStreamWithCompletion:(nonnull void (^)(FlutterError *_Nullable))completion {
+ __weak typeof(self) weakSelf = self;
+ dispatch_async(self.captureSessionQueue, ^{
+ [weakSelf.camera stopImageStream];
+ completion(nil);
+ });
+}
+
+- (void)receivedImageStreamDataWithCompletion:
+ (nonnull void (^)(FlutterError *_Nullable))completion {
+ __weak typeof(self) weakSelf = self;
+ dispatch_async(self.captureSessionQueue, ^{
+ [weakSelf.camera receivedImageStreamData];
+ completion(nil);
+ });
+}
+
+- (void)takePictureWithCompletion:(nonnull void (^)(NSString *_Nullable,
+ FlutterError *_Nullable))completion {
+ __weak typeof(self) weakSelf = self;
+ dispatch_async(self.captureSessionQueue, ^{
+ [weakSelf.camera captureToFileWithCompletion:completion];
+ });
+}
+
+- (void)prepareForVideoRecordingWithCompletion:
+ (nonnull void (^)(FlutterError *_Nullable))completion {
+ __weak typeof(self) weakSelf = self;
+ dispatch_async(self.captureSessionQueue, ^{
+ [weakSelf.camera setUpCaptureSessionForAudio];
+ completion(nil);
+ });
+}
+
+- (void)startVideoRecordingWithStreaming:(BOOL)enableStream
+ completion:(nonnull void (^)(FlutterError *_Nullable))completion {
__weak typeof(self) weakSelf = self;
- FLTRequestCameraPermissionWithCompletionHandler(^(FlutterError *error) {
+ dispatch_async(self.captureSessionQueue, ^{
typeof(self) strongSelf = weakSelf;
if (!strongSelf) return;
+ [strongSelf.camera
+ startVideoRecordingWithCompletion:completion
+ messengerForStreaming:(enableStream ? strongSelf.messenger : nil)];
+ });
+}
- if (error) {
- result(error);
- } else {
- // Request audio permission on `create` call with `enableAudio` argument instead of the
- // `prepareForVideoRecording` call. This is because `prepareForVideoRecording` call is
- // optional, and used as a workaround to fix a missing frame issue on iOS.
- BOOL audioEnabled = [call.arguments[@"enableAudio"] boolValue];
- if (audioEnabled) {
- // Setup audio capture session only if granted audio access.
- FLTRequestAudioPermissionWithCompletionHandler(^(FlutterError *error) {
- // cannot use the outter `strongSelf`
- typeof(self) strongSelf = weakSelf;
- if (!strongSelf) return;
- if (error) {
- result(error);
- } else {
- [strongSelf createCameraOnSessionQueueWithCreateMethodCall:call result:result];
- }
- });
- } else {
- [strongSelf createCameraOnSessionQueueWithCreateMethodCall:call result:result];
- }
- }
+- (void)stopVideoRecordingWithCompletion:(nonnull void (^)(NSString *_Nullable,
+ FlutterError *_Nullable))completion {
+ __weak typeof(self) weakSelf = self;
+ dispatch_async(self.captureSessionQueue, ^{
+ [weakSelf.camera stopVideoRecordingWithCompletion:completion];
});
}
-// Returns number value if provided and positive, or nil.
-// Used to parse values like framerates and bitrates, that are positive by nature.
-// nil allows to ignore unsupported values.
-+ (NSNumber *)positiveNumberValueOrNilForArgument:(NSString *)argument
- fromMethod:(FlutterMethodCall *)flutterMethodCall
- error:(NSError **)error {
- id value = flutterMethodCall.arguments[argument];
+- (void)pauseVideoRecordingWithCompletion:(nonnull void (^)(FlutterError *_Nullable))completion {
+ __weak typeof(self) weakSelf = self;
+ dispatch_async(self.captureSessionQueue, ^{
+ [weakSelf.camera pauseVideoRecording];
+ completion(nil);
+ });
+}
- if (!value || [value isEqual:[NSNull null]]) {
- return nil;
- }
+- (void)resumeVideoRecordingWithCompletion:(nonnull void (^)(FlutterError *_Nullable))completion {
+ __weak typeof(self) weakSelf = self;
+ dispatch_async(self.captureSessionQueue, ^{
+ [weakSelf.camera resumeVideoRecording];
+ completion(nil);
+ });
+}
- if (![value isKindOfClass:[NSNumber class]]) {
- if (error) {
- *error = [NSError errorWithDomain:@"ArgumentError"
- code:0
- userInfo:@{
- NSLocalizedDescriptionKey :
- [NSString stringWithFormat:@"%@ should be a number", argument]
- }];
- }
- return nil;
- }
+- (void)getMinimumZoomLevel:(nonnull void (^)(NSNumber *_Nullable,
+ FlutterError *_Nullable))completion {
+ __weak typeof(self) weakSelf = self;
+ dispatch_async(self.captureSessionQueue, ^{
+ completion(@(weakSelf.camera.minimumAvailableZoomFactor), nil);
+ });
+}
- NSNumber *number = (NSNumber *)value;
+- (void)getMaximumZoomLevel:(nonnull void (^)(NSNumber *_Nullable,
+ FlutterError *_Nullable))completion {
+ __weak typeof(self) weakSelf = self;
+ dispatch_async(self.captureSessionQueue, ^{
+ completion(@(weakSelf.camera.maximumAvailableZoomFactor), nil);
+ });
+}
- if (isnan([number doubleValue])) {
- if (error) {
- *error = [NSError errorWithDomain:@"ArgumentError"
- code:0
- userInfo:@{
- NSLocalizedDescriptionKey :
- [NSString stringWithFormat:@"%@ should not be a nan", argument]
- }];
- }
- return nil;
- }
+- (void)setZoomLevel:(double)zoom completion:(nonnull void (^)(FlutterError *_Nullable))completion {
+ __weak typeof(self) weakSelf = self;
+ dispatch_async(self.captureSessionQueue, ^{
+ [weakSelf.camera setZoomLevel:zoom withCompletion:completion];
+ });
+}
- if ([number doubleValue] <= 0.0) {
- if (error) {
- *error = [NSError errorWithDomain:@"ArgumentError"
- code:0
- userInfo:@{
- NSLocalizedDescriptionKey : [NSString
- stringWithFormat:@"%@ should be a positive number", argument]
- }];
- }
- return nil;
- }
+- (void)setFlashMode:(FCPPlatformFlashMode)mode
+ completion:(nonnull void (^)(FlutterError *_Nullable))completion {
+ __weak typeof(self) weakSelf = self;
+ dispatch_async(self.captureSessionQueue, ^{
+ [weakSelf.camera setFlashMode:mode withCompletion:completion];
+ });
+}
- return number;
+- (void)setExposureMode:(FCPPlatformExposureMode)mode
+ completion:(nonnull void (^)(FlutterError *_Nullable))completion {
+ __weak typeof(self) weakSelf = self;
+ dispatch_async(self.captureSessionQueue, ^{
+ [weakSelf.camera setExposureMode:mode];
+ completion(nil);
+ });
}
-- (void)createCameraOnSessionQueueWithCreateMethodCall:(FlutterMethodCall *)createMethodCall
- result:(FlutterResult)result {
+- (void)setExposurePoint:(nullable FCPPlatformPoint *)point
+ completion:(nonnull void (^)(FlutterError *_Nullable))completion {
__weak typeof(self) weakSelf = self;
dispatch_async(self.captureSessionQueue, ^{
- typeof(self) strongSelf = weakSelf;
- if (!strongSelf) return;
+ [weakSelf.camera setExposurePoint:point withCompletion:completion];
+ });
+}
- NSString *cameraName = createMethodCall.arguments[@"cameraName"];
+- (void)getMinimumExposureOffset:(nonnull void (^)(NSNumber *_Nullable,
+ FlutterError *_Nullable))completion {
+ __weak typeof(self) weakSelf = self;
+ dispatch_async(self.captureSessionQueue, ^{
+ completion(@(weakSelf.camera.captureDevice.minExposureTargetBias), nil);
+ });
+}
- NSError *error;
+- (void)getMaximumExposureOffset:(nonnull void (^)(NSNumber *_Nullable,
+ FlutterError *_Nullable))completion {
+ __weak typeof(self) weakSelf = self;
+ dispatch_async(self.captureSessionQueue, ^{
+ completion(@(weakSelf.camera.captureDevice.maxExposureTargetBias), nil);
+ });
+}
- NSNumber *framesPerSecond = [CameraPlugin positiveNumberValueOrNilForArgument:@"fps"
- fromMethod:createMethodCall
- error:&error];
- if (error) {
- result(FlutterErrorFromNSError(error));
- return;
- }
+- (void)setExposureOffset:(double)offset
+ completion:(nonnull void (^)(FlutterError *_Nullable))completion {
+ __weak typeof(self) weakSelf = self;
+ dispatch_async(self.captureSessionQueue, ^{
+ [weakSelf.camera setExposureOffset:offset];
+ completion(nil);
+ });
+}
- NSNumber *videoBitrate = [CameraPlugin positiveNumberValueOrNilForArgument:@"videoBitrate"
- fromMethod:createMethodCall
- error:&error];
- if (error) {
- result(FlutterErrorFromNSError(error));
- return;
- }
+- (void)setFocusMode:(FCPPlatformFocusMode)mode
+ completion:(nonnull void (^)(FlutterError *_Nullable))completion {
+ __weak typeof(self) weakSelf = self;
+ dispatch_async(self.captureSessionQueue, ^{
+ [weakSelf.camera setFocusMode:mode];
+ completion(nil);
+ });
+}
- NSNumber *audioBitrate = [CameraPlugin positiveNumberValueOrNilForArgument:@"audioBitrate"
- fromMethod:createMethodCall
- error:&error];
- if (error) {
- result(FlutterErrorFromNSError(error));
- return;
- }
+- (void)setFocusPoint:(nullable FCPPlatformPoint *)point
+ completion:(nonnull void (^)(FlutterError *_Nullable))completion {
+ __weak typeof(self) weakSelf = self;
+ dispatch_async(self.captureSessionQueue, ^{
+ [weakSelf.camera setFocusPoint:point withCompletion:completion];
+ });
+}
- NSString *resolutionPreset = createMethodCall.arguments[@"resolutionPreset"];
- NSNumber *enableAudio = createMethodCall.arguments[@"enableAudio"];
- FLTCamMediaSettings *mediaSettings =
- [[FLTCamMediaSettings alloc] initWithFramesPerSecond:framesPerSecond
- videoBitrate:videoBitrate
- audioBitrate:audioBitrate
- enableAudio:[enableAudio boolValue]];
- FLTCamMediaSettingsAVWrapper *mediaSettingsAVWrapper =
- [[FLTCamMediaSettingsAVWrapper alloc] init];
-
- FLTCam *cam = [[FLTCam alloc] initWithCameraName:cameraName
- resolutionPreset:resolutionPreset
- mediaSettings:mediaSettings
- mediaSettingsAVWrapper:mediaSettingsAVWrapper
- orientation:[[UIDevice currentDevice] orientation]
- captureSessionQueue:strongSelf.captureSessionQueue
- error:&error];
-
- if (error) {
- result(FlutterErrorFromNSError(error));
- } else {
- if (strongSelf.camera) {
- [strongSelf.camera close];
- }
- strongSelf.camera = cam;
- [strongSelf.registry registerTexture:cam
- completion:^(int64_t textureId) {
- result(@{
- @"cameraId" : @(textureId),
- });
- }];
+- (void)lockCaptureOrientation:(FCPPlatformDeviceOrientation)orientation
+ completion:(nonnull void (^)(FlutterError *_Nullable))completion {
+ __weak typeof(self) weakSelf = self;
+ dispatch_async(self.captureSessionQueue, ^{
+ [weakSelf.camera lockCaptureOrientation:orientation];
+ completion(nil);
+ });
+}
+
+- (void)unlockCaptureOrientationWithCompletion:
+ (nonnull void (^)(FlutterError *_Nullable))completion {
+ __weak typeof(self) weakSelf = self;
+ dispatch_async(self.captureSessionQueue, ^{
+ [weakSelf.camera unlockCaptureOrientation];
+ completion(nil);
+ });
+}
+
+- (void)pausePreviewWithCompletion:(nonnull void (^)(FlutterError *_Nullable))completion {
+ __weak typeof(self) weakSelf = self;
+ dispatch_async(self.captureSessionQueue, ^{
+ [weakSelf.camera pausePreview];
+ completion(nil);
+ });
+}
+
+- (void)resumePreviewWithCompletion:(nonnull void (^)(FlutterError *_Nullable))completion {
+ __weak typeof(self) weakSelf = self;
+ dispatch_async(self.captureSessionQueue, ^{
+ [weakSelf.camera resumePreview];
+ completion(nil);
+ });
+}
+
+- (void)setImageFileFormat:(FCPPlatformImageFileFormat)format
+ completion:(nonnull void (^)(FlutterError *_Nullable))completion {
+ __weak typeof(self) weakSelf = self;
+ dispatch_async(self.captureSessionQueue, ^{
+ [weakSelf.camera setImageFileFormat:format];
+ completion(nil);
+ });
+}
+
+- (void)updateDescriptionWhileRecordingCameraName:(nonnull NSString *)cameraName
+ completion:
+ (nonnull void (^)(FlutterError *_Nullable))completion {
+ __weak typeof(self) weakSelf = self;
+ dispatch_async(self.captureSessionQueue, ^{
+ [weakSelf.camera setDescriptionWhileRecording:cameraName withCompletion:completion];
+ });
+}
+
+- (void)disposeCamera:(NSInteger)cameraId
+ completion:(nonnull void (^)(FlutterError *_Nullable))completion {
+ [_registry unregisterTexture:cameraId];
+ __weak typeof(self) weakSelf = self;
+ dispatch_async(self.captureSessionQueue, ^{
+ [weakSelf.camera close];
+ completion(nil);
+ });
+}
+
+#pragma mark Private
+
+// This must be called on captureSessionQueue. It is extracted from
+// initializeCamera:withImageFormat:completion: to make it easier to reason about strong/weak
+// self pointers.
+- (void)sessionQueueInitializeCamera:(NSInteger)cameraId
+ withImageFormat:(FCPPlatformImageFormatGroup)imageFormat
+ completion:(nonnull void (^)(FlutterError *_Nullable))completion {
+ [_camera setVideoFormat:FCPGetPixelFormatForPigeonFormat(imageFormat)];
+
+ __weak CameraPlugin *weakSelf = self;
+ _camera.onFrameAvailable = ^{
+ typeof(self) strongSelf = weakSelf;
+ if (!strongSelf) return;
+ if (![strongSelf.camera isPreviewPaused]) {
+ FLTEnsureToRunOnMainQueue(^{
+ [weakSelf.registry textureFrameAvailable:cameraId];
+ });
}
+ };
+ _camera.dartAPI = [[FCPCameraEventApi alloc]
+ initWithBinaryMessenger:_messenger
+ messageChannelSuffix:[NSString stringWithFormat:@"%ld", cameraId]];
+ [_camera reportInitializationState];
+ [self sendDeviceOrientation:[UIDevice currentDevice].orientation];
+ [_camera start];
+ completion(nil);
+}
+
+- (void)createCameraOnSessionQueueWithName:(NSString *)name
+ settings:(FCPPlatformMediaSettings *)settings
+ completion:(nonnull void (^)(NSNumber *_Nullable,
+ FlutterError *_Nullable))completion {
+ __weak typeof(self) weakSelf = self;
+ dispatch_async(self.captureSessionQueue, ^{
+ [weakSelf sessionQueueCreateCameraWithName:name settings:settings completion:completion];
});
}
+// This must be called on captureSessionQueue. It is extracted from
+// initializeCamera:withImageFormat:completion: to make it easier to reason about strong/weak
+// self pointers.
+- (void)sessionQueueCreateCameraWithName:(NSString *)name
+ settings:(FCPPlatformMediaSettings *)settings
+ completion:(nonnull void (^)(NSNumber *_Nullable,
+ FlutterError *_Nullable))completion {
+ FLTCamMediaSettingsAVWrapper *mediaSettingsAVWrapper =
+ [[FLTCamMediaSettingsAVWrapper alloc] init];
+
+ NSError *error;
+ FLTCam *cam = [[FLTCam alloc] initWithCameraName:name
+ mediaSettings:settings
+ mediaSettingsAVWrapper:mediaSettingsAVWrapper
+ orientation:[[UIDevice currentDevice] orientation]
+ captureSessionQueue:self.captureSessionQueue
+ error:&error];
+
+ if (error) {
+ completion(nil, FlutterErrorFromNSError(error));
+ } else {
+ [_camera close];
+ _camera = cam;
+ __weak typeof(self) weakSelf = self;
+ FLTEnsureToRunOnMainQueue(^{
+ completion(@([weakSelf.registry registerTexture:cam]), nil);
+ });
+ }
+}
+
@end
diff --git a/packages/camera/camera_avfoundation/ios/Classes/CameraPlugin.modulemap b/packages/camera/camera_avfoundation/ios/Classes/CameraPlugin.modulemap
index 65a82b70bc2..bc864d17492 100644
--- a/packages/camera/camera_avfoundation/ios/Classes/CameraPlugin.modulemap
+++ b/packages/camera/camera_avfoundation/ios/Classes/CameraPlugin.modulemap
@@ -12,7 +12,6 @@ framework module camera_avfoundation {
header "FLTCam_Test.h"
header "FLTSavePhotoDelegate_Test.h"
header "FLTThreadSafeEventChannel.h"
- header "FLTThreadSafeTextureRegistry.h"
header "QueueUtils.h"
}
}
diff --git a/packages/camera/camera_avfoundation/ios/Classes/CameraPlugin_Test.h b/packages/camera/camera_avfoundation/ios/Classes/CameraPlugin_Test.h
index ab6fb186ad7..c29c2f306db 100644
--- a/packages/camera/camera_avfoundation/ios/Classes/CameraPlugin_Test.h
+++ b/packages/camera/camera_avfoundation/ios/Classes/CameraPlugin_Test.h
@@ -30,13 +30,6 @@
/// Hide the default public constructor.
- (instancetype)init NS_UNAVAILABLE;
-/// Handles `FlutterMethodCall`s and ensures result is send on the main dispatch queue.
-///
-/// @param call The method call command object.
-/// @param result A wrapper around the `FlutterResult` callback which ensures the callback is called
-/// on the main dispatch queue.
-- (void)handleMethodCallAsync:(FlutterMethodCall *)call result:(FlutterResult)result;
-
/// Called by the @c NSNotificationManager each time the device's orientation is changed.
///
/// @param notification @c NSNotification instance containing a reference to the `UIDevice` object
@@ -44,8 +37,10 @@
- (void)orientationChanged:(NSNotification *)notification;
/// Creates FLTCam on session queue and reports the creation result.
-/// @param createMethodCall the create method call
-/// @param result a thread safe flutter result wrapper object to report creation result.
-- (void)createCameraOnSessionQueueWithCreateMethodCall:(FlutterMethodCall *)createMethodCall
- result:(FlutterResult)result;
+/// @param name the name of the camera.
+/// @param settings the creation settings.
+/// @param completion the callback to inform the Dart side of the plugin of creation.
+- (void)createCameraOnSessionQueueWithName:(NSString *)name
+ settings:(FCPPlatformMediaSettings *)settings
+ completion:(void (^)(NSNumber *, FlutterError *))completion;
@end
diff --git a/packages/camera/camera_avfoundation/ios/Classes/CameraProperties.h b/packages/camera/camera_avfoundation/ios/Classes/CameraProperties.h
index e19f98faa2a..ea7a4a3438a 100644
--- a/packages/camera/camera_avfoundation/ios/Classes/CameraProperties.h
+++ b/packages/camera/camera_avfoundation/ios/Classes/CameraProperties.h
@@ -9,83 +9,19 @@
NS_ASSUME_NONNULL_BEGIN
-#pragma mark - flash mode
-
-/// Represents camera's flash mode. Mirrors `FlashMode` enum in flash_mode.dart.
-typedef NS_ENUM(NSInteger, FLTFlashMode) {
- FLTFlashModeOff,
- FLTFlashModeAuto,
- FLTFlashModeAlways,
- FLTFlashModeTorch,
- // This should never occur; it indicates an unknown value was received over
- // the platform channel.
- FLTFlashModeInvalid,
-};
-
-/// Gets FLTFlashMode from its string representation.
-/// @param mode a string representation of the FLTFlashMode.
-extern FLTFlashMode FLTGetFLTFlashModeForString(NSString *mode);
-
/// Gets AVCaptureFlashMode from FLTFlashMode.
/// @param mode flash mode.
-extern AVCaptureFlashMode FLTGetAVCaptureFlashModeForFLTFlashMode(FLTFlashMode mode);
-
-#pragma mark - exposure mode
-
-/// Gets FCPPlatformExposureMode from its string representation.
-/// @param mode a string representation of the exposure mode.
-extern FCPPlatformExposureMode FCPGetExposureModeForString(NSString *mode);
-
-#pragma mark - focus mode
-
-/// Gets FCPPlatformFocusMode from its string representation.
-/// @param mode a string representation of focus mode.
-extern FCPPlatformFocusMode FCPGetFocusModeForString(NSString *mode);
-
-#pragma mark - device orientation
+extern AVCaptureFlashMode FCPGetAVCaptureFlashModeForPigeonFlashMode(FCPPlatformFlashMode mode);
-/// Gets UIDeviceOrientation from its string representation.
-extern UIDeviceOrientation FLTGetUIDeviceOrientationForString(NSString *orientation);
+/// Gets UIDeviceOrientation from its Pigeon representation.
+extern UIDeviceOrientation FCPGetUIDeviceOrientationForPigeonDeviceOrientation(
+ FCPPlatformDeviceOrientation orientation);
/// Gets a Pigeon representation of UIDeviceOrientation.
extern FCPPlatformDeviceOrientation FCPGetPigeonDeviceOrientationForOrientation(
UIDeviceOrientation orientation);
-#pragma mark - resolution preset
-
-/// Represents camera's resolution present. Mirrors ResolutionPreset in camera.dart.
-typedef NS_ENUM(NSInteger, FLTResolutionPreset) {
- FLTResolutionPresetVeryLow,
- FLTResolutionPresetLow,
- FLTResolutionPresetMedium,
- FLTResolutionPresetHigh,
- FLTResolutionPresetVeryHigh,
- FLTResolutionPresetUltraHigh,
- FLTResolutionPresetMax,
- // This should never occur; it indicates an unknown value was received over
- // the platform channel.
- FLTResolutionPresetInvalid,
-};
-
-/// Gets FLTResolutionPreset from its string representation.
-/// @param preset a string representation of FLTResolutionPreset.
-extern FLTResolutionPreset FLTGetFLTResolutionPresetForString(NSString *preset);
-
-#pragma mark - video format
-
-/// Gets VideoFormat from its string representation.
-extern OSType FLTGetVideoFormatFromString(NSString *videoFormatString);
-
-/// Represents image format. Mirrors ImageFileFormat in camera.dart.
-typedef NS_ENUM(NSInteger, FCPFileFormat) {
- FCPFileFormatJPEG,
- FCPFileFormatHEIF,
- FCPFileFormatInvalid,
-};
-
-#pragma mark - image extension
-
-/// Gets a string representation of ImageFileFormat.
-extern FCPFileFormat FCPGetFileFormatFromString(NSString *fileFormatString);
+/// Gets VideoFormat from its Pigeon representation.
+extern OSType FCPGetPixelFormatForPigeonFormat(FCPPlatformImageFormatGroup imageFormat);
NS_ASSUME_NONNULL_END
diff --git a/packages/camera/camera_avfoundation/ios/Classes/CameraProperties.m b/packages/camera/camera_avfoundation/ios/Classes/CameraProperties.m
index e068c186474..5aa1f25bb03 100644
--- a/packages/camera/camera_avfoundation/ios/Classes/CameraProperties.m
+++ b/packages/camera/camera_avfoundation/ios/Classes/CameraProperties.m
@@ -4,78 +4,32 @@
#import "CameraProperties.h"
-#pragma mark - flash mode
-
-FLTFlashMode FLTGetFLTFlashModeForString(NSString *mode) {
- if ([mode isEqualToString:@"off"]) {
- return FLTFlashModeOff;
- } else if ([mode isEqualToString:@"auto"]) {
- return FLTFlashModeAuto;
- } else if ([mode isEqualToString:@"always"]) {
- return FLTFlashModeAlways;
- } else if ([mode isEqualToString:@"torch"]) {
- return FLTFlashModeTorch;
- } else {
- return FLTFlashModeInvalid;
- }
-}
-
-AVCaptureFlashMode FLTGetAVCaptureFlashModeForFLTFlashMode(FLTFlashMode mode) {
+AVCaptureFlashMode FCPGetAVCaptureFlashModeForPigeonFlashMode(FCPPlatformFlashMode mode) {
switch (mode) {
- case FLTFlashModeOff:
+ case FCPPlatformFlashModeOff:
return AVCaptureFlashModeOff;
- case FLTFlashModeAuto:
+ case FCPPlatformFlashModeAuto:
return AVCaptureFlashModeAuto;
- case FLTFlashModeAlways:
+ case FCPPlatformFlashModeAlways:
return AVCaptureFlashModeOn;
- case FLTFlashModeTorch:
- default:
+ case FCPPlatformFlashModeTorch:
+ NSCAssert(false, @"This mode cannot be converted, and requires custom handling.");
return -1;
}
}
-#pragma mark - exposure mode
-
-FCPPlatformExposureMode FCPGetExposureModeForString(NSString *mode) {
- if ([mode isEqualToString:@"auto"]) {
- return FCPPlatformExposureModeAuto;
- } else if ([mode isEqualToString:@"locked"]) {
- return FCPPlatformExposureModeLocked;
- } else {
- // This should be unreachable; see _serializeExposureMode in avfoundation_camera.dart.
- NSCAssert(false, @"Unsupported exposure mode");
- return FCPPlatformExposureModeAuto;
- }
-}
-
-#pragma mark - focus mode
-
-FCPPlatformFocusMode FCPGetFocusModeForString(NSString *mode) {
- if ([mode isEqualToString:@"auto"]) {
- return FCPPlatformFocusModeAuto;
- } else if ([mode isEqualToString:@"locked"]) {
- return FCPPlatformFocusModeLocked;
- } else {
- // This should be unreachable; see _serializeFocusMode in avfoundation_camera.dart.
- NSCAssert(false, @"Unsupported focus mode");
- return FCPPlatformFocusModeAuto;
- }
-}
-
-#pragma mark - device orientation
-
-UIDeviceOrientation FLTGetUIDeviceOrientationForString(NSString *orientation) {
- if ([orientation isEqualToString:@"portraitDown"]) {
- return UIDeviceOrientationPortraitUpsideDown;
- } else if ([orientation isEqualToString:@"landscapeLeft"]) {
- return UIDeviceOrientationLandscapeLeft;
- } else if ([orientation isEqualToString:@"landscapeRight"]) {
- return UIDeviceOrientationLandscapeRight;
- } else if ([orientation isEqualToString:@"portraitUp"]) {
- return UIDeviceOrientationPortrait;
- } else {
- return UIDeviceOrientationUnknown;
- }
+UIDeviceOrientation FCPGetUIDeviceOrientationForPigeonDeviceOrientation(
+ FCPPlatformDeviceOrientation orientation) {
+ switch (orientation) {
+ case FCPPlatformDeviceOrientationPortraitDown:
+ return UIDeviceOrientationPortraitUpsideDown;
+ case FCPPlatformDeviceOrientationLandscapeLeft:
+ return UIDeviceOrientationLandscapeLeft;
+ case FCPPlatformDeviceOrientationLandscapeRight:
+ return UIDeviceOrientationLandscapeRight;
+ case FCPPlatformDeviceOrientationPortraitUp:
+ return UIDeviceOrientationPortrait;
+ };
}
FCPPlatformDeviceOrientation FCPGetPigeonDeviceOrientationForOrientation(
@@ -93,49 +47,11 @@ FCPPlatformDeviceOrientation FCPGetPigeonDeviceOrientationForOrientation(
};
}
-#pragma mark - resolution preset
-
-FLTResolutionPreset FLTGetFLTResolutionPresetForString(NSString *preset) {
- if ([preset isEqualToString:@"veryLow"]) {
- return FLTResolutionPresetVeryLow;
- } else if ([preset isEqualToString:@"low"]) {
- return FLTResolutionPresetLow;
- } else if ([preset isEqualToString:@"medium"]) {
- return FLTResolutionPresetMedium;
- } else if ([preset isEqualToString:@"high"]) {
- return FLTResolutionPresetHigh;
- } else if ([preset isEqualToString:@"veryHigh"]) {
- return FLTResolutionPresetVeryHigh;
- } else if ([preset isEqualToString:@"ultraHigh"]) {
- return FLTResolutionPresetUltraHigh;
- } else if ([preset isEqualToString:@"max"]) {
- return FLTResolutionPresetMax;
- } else {
- return FLTResolutionPresetInvalid;
- }
-}
-
-#pragma mark - video format
-
-OSType FLTGetVideoFormatFromString(NSString *videoFormatString) {
- if ([videoFormatString isEqualToString:@"bgra8888"]) {
- return kCVPixelFormatType_32BGRA;
- } else if ([videoFormatString isEqualToString:@"yuv420"]) {
- return kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange;
- } else {
- NSLog(@"The selected imageFormatGroup is not supported by iOS. Defaulting to brga8888");
- return kCVPixelFormatType_32BGRA;
- }
-}
-
-#pragma mark - file format
-
-FCPFileFormat FCPGetFileFormatFromString(NSString *fileFormatString) {
- if ([fileFormatString isEqualToString:@"jpg"]) {
- return FCPFileFormatJPEG;
- } else if ([fileFormatString isEqualToString:@"heif"]) {
- return FCPFileFormatHEIF;
- } else {
- return FCPFileFormatInvalid;
+OSType FCPGetPixelFormatForPigeonFormat(FCPPlatformImageFormatGroup imageFormat) {
+ switch (imageFormat) {
+ case FCPPlatformImageFormatGroupBgra8888:
+ return kCVPixelFormatType_32BGRA;
+ case FCPPlatformImageFormatGroupYuv420:
+ return kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange;
}
}
diff --git a/packages/camera/camera_avfoundation/ios/Classes/FLTCam.h b/packages/camera/camera_avfoundation/ios/Classes/FLTCam.h
index ddc1e25ded1..d8f97926b77 100644
--- a/packages/camera/camera_avfoundation/ios/Classes/FLTCam.h
+++ b/packages/camera/camera_avfoundation/ios/Classes/FLTCam.h
@@ -7,9 +7,7 @@
@import Flutter;
#import "CameraProperties.h"
-#import "FLTCamMediaSettings.h"
#import "FLTCamMediaSettingsAVWrapper.h"
-#import "FLTThreadSafeTextureRegistry.h"
#import "messages.g.h"
NS_ASSUME_NONNULL_BEGIN
@@ -24,17 +22,17 @@ NS_ASSUME_NONNULL_BEGIN
/// The API instance used to communicate with the Dart side of the plugin. Once initially set, this
/// should only ever be accessed on the main thread.
@property(nonatomic) FCPCameraEventApi *dartAPI;
-@property(assign, nonatomic) FLTResolutionPreset resolutionPreset;
@property(assign, nonatomic) FCPPlatformExposureMode exposureMode;
@property(assign, nonatomic) FCPPlatformFocusMode focusMode;
-@property(assign, nonatomic) FLTFlashMode flashMode;
+@property(assign, nonatomic) FCPPlatformFlashMode flashMode;
// Format used for video and image streaming.
@property(assign, nonatomic) FourCharCode videoFormat;
-@property(assign, nonatomic) FCPFileFormat fileFormat;
+@property(assign, nonatomic) FCPPlatformImageFileFormat fileFormat;
+@property(assign, nonatomic) CGFloat minimumAvailableZoomFactor;
+@property(assign, nonatomic) CGFloat maximumAvailableZoomFactor;
/// Initializes an `FLTCam` instance.
/// @param cameraName a name used to uniquely identify the camera.
-/// @param resolutionPreset the resolution preset
/// @param mediaSettings the media settings configuration parameters
/// @param mediaSettingsAVWrapper AVFoundation wrapper to perform media settings related operations
/// (for dependency injection in unit tests).
@@ -42,8 +40,7 @@ NS_ASSUME_NONNULL_BEGIN
/// @param captureSessionQueue the queue on which camera's capture session operations happen.
/// @param error report to the caller if any error happened creating the camera.
- (instancetype)initWithCameraName:(NSString *)cameraName
- resolutionPreset:(NSString *)resolutionPreset
- mediaSettings:(FLTCamMediaSettings *)mediaSettings
+ mediaSettings:(FCPPlatformMediaSettings *)mediaSettings
mediaSettingsAVWrapper:(FLTCamMediaSettingsAVWrapper *)mediaSettingsAVWrapper
orientation:(UIDeviceOrientation)orientation
captureSessionQueue:(dispatch_queue_t)captureSessionQueue
@@ -54,26 +51,27 @@ NS_ASSUME_NONNULL_BEGIN
- (void)start;
- (void)stop;
- (void)setDeviceOrientation:(UIDeviceOrientation)orientation;
-- (void)captureToFile:(FlutterResult)result;
+- (void)captureToFileWithCompletion:(void (^)(NSString *_Nullable,
+ FlutterError *_Nullable))completion;
- (void)close;
-- (void)startVideoRecordingWithResult:(FlutterResult)result;
-- (void)setImageFileFormat:(FCPFileFormat)fileFormat;
+- (void)setImageFileFormat:(FCPPlatformImageFileFormat)fileFormat;
/// Starts recording a video with an optional streaming messenger.
-/// If the messenger is non-null then it will be called for each
+/// If the messenger is non-nil then it will be called for each
/// captured frame, allowing streaming concurrently with recording.
///
/// @param messenger Nullable messenger for capturing each frame.
-- (void)startVideoRecordingWithResult:(FlutterResult)result
- messengerForStreaming:(nullable NSObject *)messenger;
-- (void)stopVideoRecordingWithResult:(FlutterResult)result;
-- (void)pauseVideoRecordingWithResult:(FlutterResult)result;
-- (void)resumeVideoRecordingWithResult:(FlutterResult)result;
-- (void)lockCaptureOrientationWithResult:(FlutterResult)result
- orientation:(NSString *)orientationStr;
-- (void)unlockCaptureOrientationWithResult:(FlutterResult)result;
-- (void)setFlashModeWithResult:(FlutterResult)result mode:(NSString *)modeStr;
-- (void)setExposureModeWithResult:(FlutterResult)result mode:(NSString *)modeStr;
-- (void)setFocusModeWithResult:(FlutterResult)result mode:(NSString *)modeStr;
+- (void)startVideoRecordingWithCompletion:(void (^)(FlutterError *_Nullable))completion
+ messengerForStreaming:(nullable NSObject *)messenger;
+- (void)stopVideoRecordingWithCompletion:(void (^)(NSString *_Nullable,
+ FlutterError *_Nullable))completion;
+- (void)pauseVideoRecording;
+- (void)resumeVideoRecording;
+- (void)lockCaptureOrientation:(FCPPlatformDeviceOrientation)orientation;
+- (void)unlockCaptureOrientation;
+- (void)setFlashMode:(FCPPlatformFlashMode)mode
+ withCompletion:(void (^)(FlutterError *_Nullable))completion;
+- (void)setExposureMode:(FCPPlatformExposureMode)mode;
+- (void)setFocusMode:(FCPPlatformFocusMode)mode;
- (void)applyFocusMode;
/// Acknowledges the receipt of one image stream frame.
@@ -95,17 +93,26 @@ NS_ASSUME_NONNULL_BEGIN
/// @param focusMode The focus mode that should be applied to the @captureDevice instance.
/// @param captureDevice The AVCaptureDevice to which the @focusMode will be applied.
- (void)applyFocusMode:(FCPPlatformFocusMode)focusMode onDevice:(AVCaptureDevice *)captureDevice;
-- (void)pausePreviewWithResult:(FlutterResult)result;
-- (void)resumePreviewWithResult:(FlutterResult)result;
-- (void)setDescriptionWhileRecording:(NSString *)cameraName result:(FlutterResult)result;
-- (void)setExposurePointWithResult:(FlutterResult)result x:(double)x y:(double)y;
-- (void)setFocusPointWithResult:(FlutterResult)result x:(double)x y:(double)y;
-- (void)setExposureOffsetWithResult:(FlutterResult)result offset:(double)offset;
+- (void)pausePreview;
+- (void)resumePreview;
+- (void)setDescriptionWhileRecording:(NSString *)cameraName
+ withCompletion:(void (^)(FlutterError *_Nullable))completion;
+
+/// Sets the exposure point, in a (0,1) coordinate system.
+///
+/// If @c point is nil, the exposure point will reset to the center.
+- (void)setExposurePoint:(nullable FCPPlatformPoint *)point
+ withCompletion:(void (^)(FlutterError *_Nullable))completion;
+
+/// Sets the focus point, in a (0,1) coordinate system.
+///
+/// If @c point is nil, the focus point will reset to the center.
+- (void)setFocusPoint:(nullable FCPPlatformPoint *)point
+ withCompletion:(void (^)(FlutterError *_Nullable))completion;
+- (void)setExposureOffset:(double)offset;
- (void)startImageStreamWithMessenger:(NSObject *)messenger;
- (void)stopImageStream;
-- (void)getMaxZoomLevelWithResult:(FlutterResult)result;
-- (void)getMinZoomLevelWithResult:(FlutterResult)result;
-- (void)setZoomLevel:(CGFloat)zoom Result:(FlutterResult)result;
+- (void)setZoomLevel:(CGFloat)zoom withCompletion:(void (^)(FlutterError *_Nullable))completion;
- (void)setUpCaptureSessionForAudio;
@end
diff --git a/packages/camera/camera_avfoundation/ios/Classes/FLTCam.m b/packages/camera/camera_avfoundation/ios/Classes/FLTCam.m
index f65af079418..45ab3e08e66 100644
--- a/packages/camera/camera_avfoundation/ios/Classes/FLTCam.m
+++ b/packages/camera/camera_avfoundation/ios/Classes/FLTCam.m
@@ -51,7 +51,7 @@ @interface FLTCam ()
@property(readonly, nonatomic) int64_t textureId;
-@property(readonly, nonatomic) FLTCamMediaSettings *mediaSettings;
+@property(readonly, nonatomic) FCPPlatformMediaSettings *mediaSettings;
@property(readonly, nonatomic) FLTCamMediaSettingsAVWrapper *mediaSettingsAVWrapper;
@property(nonatomic) FLTImageStreamHandler *imageStreamHandler;
@property(readonly, nonatomic) AVCaptureSession *videoCaptureSession;
@@ -114,14 +114,12 @@ @implementation FLTCam
NSString *const errorMethod = @"error";
- (instancetype)initWithCameraName:(NSString *)cameraName
- resolutionPreset:(NSString *)resolutionPreset
- mediaSettings:(FLTCamMediaSettings *)mediaSettings
+ mediaSettings:(FCPPlatformMediaSettings *)mediaSettings
mediaSettingsAVWrapper:(FLTCamMediaSettingsAVWrapper *)mediaSettingsAVWrapper
orientation:(UIDeviceOrientation)orientation
captureSessionQueue:(dispatch_queue_t)captureSessionQueue
error:(NSError **)error {
return [self initWithCameraName:cameraName
- resolutionPreset:resolutionPreset
mediaSettings:mediaSettings
mediaSettingsAVWrapper:mediaSettingsAVWrapper
orientation:orientation
@@ -132,16 +130,14 @@ - (instancetype)initWithCameraName:(NSString *)cameraName
}
- (instancetype)initWithCameraName:(NSString *)cameraName
- resolutionPreset:(NSString *)resolutionPreset
- mediaSettings:(FLTCamMediaSettings *)mediaSettings
+ mediaSettings:(FCPPlatformMediaSettings *)mediaSettings
mediaSettingsAVWrapper:(FLTCamMediaSettingsAVWrapper *)mediaSettingsAVWrapper
orientation:(UIDeviceOrientation)orientation
videoCaptureSession:(AVCaptureSession *)videoCaptureSession
audioCaptureSession:(AVCaptureSession *)audioCaptureSession
captureSessionQueue:(dispatch_queue_t)captureSessionQueue
error:(NSError **)error {
- return [self initWithResolutionPreset:resolutionPreset
- mediaSettings:mediaSettings
+ return [self initWithMediaSettings:mediaSettings
mediaSettingsAVWrapper:mediaSettingsAVWrapper
orientation:orientation
videoCaptureSession:videoCaptureSession
@@ -156,30 +152,17 @@ - (instancetype)initWithCameraName:(NSString *)cameraName
error:error];
}
-- (instancetype)initWithResolutionPreset:(NSString *)resolutionPreset
- mediaSettings:(FLTCamMediaSettings *)mediaSettings
- mediaSettingsAVWrapper:(FLTCamMediaSettingsAVWrapper *)mediaSettingsAVWrapper
- orientation:(UIDeviceOrientation)orientation
- videoCaptureSession:(AVCaptureSession *)videoCaptureSession
- audioCaptureSession:(AVCaptureSession *)audioCaptureSession
- captureSessionQueue:(dispatch_queue_t)captureSessionQueue
- captureDeviceFactory:(CaptureDeviceFactory)captureDeviceFactory
- videoDimensionsForFormat:(VideoDimensionsForFormat)videoDimensionsForFormat
- error:(NSError **)error {
+- (instancetype)initWithMediaSettings:(FCPPlatformMediaSettings *)mediaSettings
+ mediaSettingsAVWrapper:(FLTCamMediaSettingsAVWrapper *)mediaSettingsAVWrapper
+ orientation:(UIDeviceOrientation)orientation
+ videoCaptureSession:(AVCaptureSession *)videoCaptureSession
+ audioCaptureSession:(AVCaptureSession *)audioCaptureSession
+ captureSessionQueue:(dispatch_queue_t)captureSessionQueue
+ captureDeviceFactory:(CaptureDeviceFactory)captureDeviceFactory
+ videoDimensionsForFormat:(VideoDimensionsForFormat)videoDimensionsForFormat
+ error:(NSError **)error {
self = [super init];
NSAssert(self, @"super init cannot be nil");
- _resolutionPreset = FLTGetFLTResolutionPresetForString(resolutionPreset);
- if (_resolutionPreset == FLTResolutionPresetInvalid) {
- *error = [NSError
- errorWithDomain:NSCocoaErrorDomain
- code:NSURLErrorUnknown
- userInfo:@{
- NSLocalizedDescriptionKey :
- [NSString stringWithFormat:@"Unknown resolution preset %@", resolutionPreset]
- }];
- return nil;
- }
-
_mediaSettings = mediaSettings;
_mediaSettingsAVWrapper = mediaSettingsAVWrapper;
@@ -192,14 +175,14 @@ - (instancetype)initWithResolutionPreset:(NSString *)resolutionPreset
_captureDeviceFactory = captureDeviceFactory;
_captureDevice = captureDeviceFactory();
_videoDimensionsForFormat = videoDimensionsForFormat;
- _flashMode = _captureDevice.hasFlash ? FLTFlashModeAuto : FLTFlashModeOff;
+ _flashMode = _captureDevice.hasFlash ? FCPPlatformFlashModeAuto : FCPPlatformFlashModeOff;
_exposureMode = FCPPlatformExposureModeAuto;
_focusMode = FCPPlatformFocusModeAuto;
_lockedCaptureOrientation = UIDeviceOrientationUnknown;
_deviceOrientation = orientation;
_videoFormat = kCVPixelFormatType_32BGRA;
_inProgressSavePhotoDelegates = [NSMutableDictionary dictionary];
- _fileFormat = FCPFileFormatJPEG;
+ _fileFormat = FCPPlatformImageFileFormatJpeg;
// To limit memory consumption, limit the number of frames pending processing.
// After some testing, 4 was determined to be the best maximum value.
@@ -236,7 +219,7 @@ - (instancetype)initWithResolutionPreset:(NSString *)resolutionPreset
// If _resolutionPreset is not supported by camera there is
// fallback to lower resolution presets.
// If none can be selected there is error condition.
- if (![self setCaptureSessionPreset:_resolutionPreset withError:error]) {
+ if (![self setCaptureSessionPreset:_mediaSettings.resolutionPreset withError:error]) {
[_videoCaptureSession commitConfiguration];
[_captureDevice unlockForConfiguration];
return nil;
@@ -257,7 +240,7 @@ - (instancetype)initWithResolutionPreset:(NSString *)resolutionPreset
} else {
// If the frame rate is not important fall to a less restrictive
// behavior (no configuration locking).
- if (![self setCaptureSessionPreset:_resolutionPreset withError:error]) {
+ if (![self setCaptureSessionPreset:_mediaSettings.resolutionPreset withError:error]) {
return nil;
}
}
@@ -332,7 +315,7 @@ - (void)setVideoFormat:(OSType)videoFormat {
@{(NSString *)kCVPixelBufferPixelFormatTypeKey : @(videoFormat)};
}
-- (void)setImageFileFormat:(FCPFileFormat)fileFormat {
+- (void)setImageFileFormat:(FCPPlatformImageFileFormat)fileFormat {
_fileFormat = fileFormat;
}
@@ -370,10 +353,11 @@ - (void)updateOrientation:(UIDeviceOrientation)orientation
}
}
-- (void)captureToFile:(FlutterResult)result {
+- (void)captureToFileWithCompletion:(void (^)(NSString *_Nullable,
+ FlutterError *_Nullable))completion {
AVCapturePhotoSettings *settings = [AVCapturePhotoSettings photoSettings];
- if (_resolutionPreset == FLTResolutionPresetMax) {
+ if (self.mediaSettings.resolutionPreset == FCPPlatformResolutionPresetMax) {
[settings setHighResolutionPhotoEnabled:YES];
}
@@ -382,7 +366,7 @@ - (void)captureToFile:(FlutterResult)result {
BOOL isHEVCCodecAvailable =
[self.capturePhotoOutput.availablePhotoCodecTypes containsObject:AVVideoCodecTypeHEVC];
- if (_fileFormat == FCPFileFormatHEIF && isHEVCCodecAvailable) {
+ if (_fileFormat == FCPPlatformImageFileFormatHeif && isHEVCCodecAvailable) {
settings =
[AVCapturePhotoSettings photoSettingsWithFormat:@{AVVideoCodecKey : AVVideoCodecTypeHEVC}];
extension = @"heif";
@@ -390,7 +374,7 @@ - (void)captureToFile:(FlutterResult)result {
extension = @"jpg";
}
- AVCaptureFlashMode avFlashMode = FLTGetAVCaptureFlashModeForFLTFlashMode(_flashMode);
+ AVCaptureFlashMode avFlashMode = FCPGetAVCaptureFlashModeForPigeonFlashMode(_flashMode);
if (avFlashMode != -1) {
[settings setFlashMode:avFlashMode];
}
@@ -400,7 +384,7 @@ - (void)captureToFile:(FlutterResult)result {
prefix:@"CAP_"
error:error];
if (error) {
- result(FlutterErrorFromNSError(error));
+ completion(nil, FlutterErrorFromNSError(error));
return;
}
@@ -419,10 +403,10 @@ - (void)captureToFile:(FlutterResult)result {
});
if (error) {
- result(FlutterErrorFromNSError(error));
+ completion(nil, FlutterErrorFromNSError(error));
} else {
NSAssert(path, @"Path must not be nil if no error.");
- result(path);
+ completion(path, nil);
}
}];
@@ -477,9 +461,10 @@ - (NSString *)getTemporaryFilePathWithExtension:(NSString *)extension
return file;
}
-- (BOOL)setCaptureSessionPreset:(FLTResolutionPreset)resolutionPreset withError:(NSError **)error {
+- (BOOL)setCaptureSessionPreset:(FCPPlatformResolutionPreset)resolutionPreset
+ withError:(NSError **)error {
switch (resolutionPreset) {
- case FLTResolutionPresetMax: {
+ case FCPPlatformResolutionPresetMax: {
AVCaptureDeviceFormat *bestFormat =
[self highestResolutionFormatForCaptureDevice:_captureDevice];
if (bestFormat) {
@@ -497,7 +482,7 @@ - (BOOL)setCaptureSessionPreset:(FLTResolutionPreset)resolutionPreset withError:
}
}
}
- case FLTResolutionPresetUltraHigh:
+ case FCPPlatformResolutionPresetUltraHigh:
if ([_videoCaptureSession canSetSessionPreset:AVCaptureSessionPreset3840x2160]) {
_videoCaptureSession.sessionPreset = AVCaptureSessionPreset3840x2160;
_previewSize = CGSizeMake(3840, 2160);
@@ -510,25 +495,25 @@ - (BOOL)setCaptureSessionPreset:(FLTResolutionPreset)resolutionPreset withError:
_captureDevice.activeFormat.highResolutionStillImageDimensions.height);
break;
}
- case FLTResolutionPresetVeryHigh:
+ case FCPPlatformResolutionPresetVeryHigh:
if ([_videoCaptureSession canSetSessionPreset:AVCaptureSessionPreset1920x1080]) {
_videoCaptureSession.sessionPreset = AVCaptureSessionPreset1920x1080;
_previewSize = CGSizeMake(1920, 1080);
break;
}
- case FLTResolutionPresetHigh:
+ case FCPPlatformResolutionPresetHigh:
if ([_videoCaptureSession canSetSessionPreset:AVCaptureSessionPreset1280x720]) {
_videoCaptureSession.sessionPreset = AVCaptureSessionPreset1280x720;
_previewSize = CGSizeMake(1280, 720);
break;
}
- case FLTResolutionPresetMedium:
+ case FCPPlatformResolutionPresetMedium:
if ([_videoCaptureSession canSetSessionPreset:AVCaptureSessionPreset640x480]) {
_videoCaptureSession.sessionPreset = AVCaptureSessionPreset640x480;
_previewSize = CGSizeMake(640, 480);
break;
}
- case FLTResolutionPresetLow:
+ case FCPPlatformResolutionPresetLow:
if ([_videoCaptureSession canSetSessionPreset:AVCaptureSessionPreset352x288]) {
_videoCaptureSession.sessionPreset = AVCaptureSessionPreset352x288;
_previewSize = CGSizeMake(352, 288);
@@ -819,12 +804,8 @@ - (CVPixelBufferRef)copyPixelBuffer {
return pixelBuffer;
}
-- (void)startVideoRecordingWithResult:(FlutterResult)result {
- [self startVideoRecordingWithResult:result messengerForStreaming:nil];
-}
-
-- (void)startVideoRecordingWithResult:(FlutterResult)result
- messengerForStreaming:(nullable NSObject *)messenger {
+- (void)startVideoRecordingWithCompletion:(void (^)(FlutterError *_Nullable))completion
+ messengerForStreaming:(nullable NSObject *)messenger {
if (!_isRecording) {
if (messenger != nil) {
[self startImageStreamWithMessenger:messenger];
@@ -836,11 +817,13 @@ - (void)startVideoRecordingWithResult:(FlutterResult)result
prefix:@"REC_"
error:error];
if (error) {
- result(FlutterErrorFromNSError(error));
+ completion(FlutterErrorFromNSError(error));
return;
}
if (![self setupWriterForPath:_videoRecordingPath]) {
- result([FlutterError errorWithCode:@"IOError" message:@"Setup Writer Failed" details:nil]);
+ completion([FlutterError errorWithCode:@"IOError"
+ message:@"Setup Writer Failed"
+ details:nil]);
return;
}
_isRecording = YES;
@@ -849,13 +832,16 @@ - (void)startVideoRecordingWithResult:(FlutterResult)result
_audioTimeOffset = CMTimeMake(0, 1);
_videoIsDisconnected = NO;
_audioIsDisconnected = NO;
- result(nil);
+ completion(nil);
} else {
- result([FlutterError errorWithCode:@"Error" message:@"Video is already recording" details:nil]);
+ completion([FlutterError errorWithCode:@"Error"
+ message:@"Video is already recording"
+ details:nil]);
}
}
-- (void)stopVideoRecordingWithResult:(FlutterResult)result {
+- (void)stopVideoRecordingWithCompletion:(void (^)(NSString *_Nullable,
+ FlutterError *_Nullable))completion {
if (_isRecording) {
_isRecording = NO;
@@ -863,12 +849,12 @@ - (void)stopVideoRecordingWithResult:(FlutterResult)result {
[_videoWriter finishWritingWithCompletionHandler:^{
if (self->_videoWriter.status == AVAssetWriterStatusCompleted) {
[self updateOrientation];
- result(self->_videoRecordingPath);
+ completion(self->_videoRecordingPath, nil);
self->_videoRecordingPath = nil;
} else {
- result([FlutterError errorWithCode:@"IOError"
- message:@"AVAssetWriter could not finish writing!"
- details:nil]);
+ completion(nil, [FlutterError errorWithCode:@"IOError"
+ message:@"AVAssetWriter could not finish writing!"
+ details:nil]);
}
}];
}
@@ -877,75 +863,47 @@ - (void)stopVideoRecordingWithResult:(FlutterResult)result {
[NSError errorWithDomain:NSCocoaErrorDomain
code:NSURLErrorResourceUnavailable
userInfo:@{NSLocalizedDescriptionKey : @"Video is not recording!"}];
- result(FlutterErrorFromNSError(error));
+ completion(nil, FlutterErrorFromNSError(error));
}
}
-- (void)pauseVideoRecordingWithResult:(FlutterResult)result {
+- (void)pauseVideoRecording {
_isRecordingPaused = YES;
_videoIsDisconnected = YES;
_audioIsDisconnected = YES;
- result(nil);
}
-- (void)resumeVideoRecordingWithResult:(FlutterResult)result {
+- (void)resumeVideoRecording {
_isRecordingPaused = NO;
- result(nil);
}
-- (void)lockCaptureOrientationWithResult:(FlutterResult)result
- orientation:(NSString *)orientationStr {
- UIDeviceOrientation orientation = FLTGetUIDeviceOrientationForString(orientationStr);
- // "Unknown" should never be sent, so is used to represent an unexpected
- // value.
- if (orientation == UIDeviceOrientationUnknown) {
- result(FlutterErrorFromNSError([NSError
- errorWithDomain:NSCocoaErrorDomain
- code:NSURLErrorUnknown
- userInfo:@{
- NSLocalizedDescriptionKey :
- [NSString stringWithFormat:@"Unknown device orientation %@", orientationStr]
- }]));
- return;
- }
-
+- (void)lockCaptureOrientation:(FCPPlatformDeviceOrientation)pigeonOrientation {
+ UIDeviceOrientation orientation =
+ FCPGetUIDeviceOrientationForPigeonDeviceOrientation(pigeonOrientation);
if (_lockedCaptureOrientation != orientation) {
_lockedCaptureOrientation = orientation;
[self updateOrientation];
}
-
- result(nil);
}
-- (void)unlockCaptureOrientationWithResult:(FlutterResult)result {
+- (void)unlockCaptureOrientation {
_lockedCaptureOrientation = UIDeviceOrientationUnknown;
[self updateOrientation];
- result(nil);
}
-- (void)setFlashModeWithResult:(FlutterResult)result mode:(NSString *)modeStr {
- FLTFlashMode mode = FLTGetFLTFlashModeForString(modeStr);
- if (mode == FLTFlashModeInvalid) {
- result(FlutterErrorFromNSError([NSError
- errorWithDomain:NSCocoaErrorDomain
- code:NSURLErrorUnknown
- userInfo:@{
- NSLocalizedDescriptionKey :
- [NSString stringWithFormat:@"Unknown flash mode %@", modeStr]
- }]));
- return;
- }
- if (mode == FLTFlashModeTorch) {
+- (void)setFlashMode:(FCPPlatformFlashMode)mode
+ withCompletion:(void (^)(FlutterError *_Nullable))completion {
+ if (mode == FCPPlatformFlashModeTorch) {
if (!_captureDevice.hasTorch) {
- result([FlutterError errorWithCode:@"setFlashModeFailed"
- message:@"Device does not support torch mode"
- details:nil]);
+ completion([FlutterError errorWithCode:@"setFlashModeFailed"
+ message:@"Device does not support torch mode"
+ details:nil]);
return;
}
if (!_captureDevice.isTorchAvailable) {
- result([FlutterError errorWithCode:@"setFlashModeFailed"
- message:@"Torch mode is currently not available"
- details:nil]);
+ completion([FlutterError errorWithCode:@"setFlashModeFailed"
+ message:@"Torch mode is currently not available"
+ details:nil]);
return;
}
if (_captureDevice.torchMode != AVCaptureTorchModeOn) {
@@ -955,17 +913,17 @@ - (void)setFlashModeWithResult:(FlutterResult)result mode:(NSString *)modeStr {
}
} else {
if (!_captureDevice.hasFlash) {
- result([FlutterError errorWithCode:@"setFlashModeFailed"
- message:@"Device does not have flash capabilities"
- details:nil]);
+ completion([FlutterError errorWithCode:@"setFlashModeFailed"
+ message:@"Device does not have flash capabilities"
+ details:nil]);
return;
}
- AVCaptureFlashMode avFlashMode = FLTGetAVCaptureFlashModeForFLTFlashMode(mode);
+ AVCaptureFlashMode avFlashMode = FCPGetAVCaptureFlashModeForPigeonFlashMode(mode);
if (![_capturePhotoOutput.supportedFlashModes
containsObject:[NSNumber numberWithInt:((int)avFlashMode)]]) {
- result([FlutterError errorWithCode:@"setFlashModeFailed"
- message:@"Device does not support this specific flash mode"
- details:nil]);
+ completion([FlutterError errorWithCode:@"setFlashModeFailed"
+ message:@"Device does not support this specific flash mode"
+ details:nil]);
return;
}
if (_captureDevice.torchMode != AVCaptureTorchModeOff) {
@@ -975,14 +933,12 @@ - (void)setFlashModeWithResult:(FlutterResult)result mode:(NSString *)modeStr {
}
}
_flashMode = mode;
- result(nil);
+ completion(nil);
}
-- (void)setExposureModeWithResult:(FlutterResult)result mode:(NSString *)modeStr {
- FCPPlatformExposureMode mode = FCPGetExposureModeForString(modeStr);
+- (void)setExposureMode:(FCPPlatformExposureMode)mode {
_exposureMode = mode;
[self applyExposureMode];
- result(nil);
}
- (void)applyExposureMode {
@@ -1002,11 +958,9 @@ - (void)applyExposureMode {
[_captureDevice unlockForConfiguration];
}
-- (void)setFocusModeWithResult:(FlutterResult)result mode:(NSString *)modeStr {
- FCPPlatformFocusMode mode = FCPGetFocusModeForString(modeStr);
+- (void)setFocusMode:(FCPPlatformFocusMode)mode {
_focusMode = mode;
[self applyFocusMode];
- result(nil);
}
- (void)applyFocusMode {
@@ -1032,21 +986,20 @@ - (void)applyFocusMode:(FCPPlatformFocusMode)focusMode onDevice:(AVCaptureDevice
[captureDevice unlockForConfiguration];
}
-- (void)pausePreviewWithResult:(FlutterResult)result {
+- (void)pausePreview {
_isPreviewPaused = true;
- result(nil);
}
-- (void)resumePreviewWithResult:(FlutterResult)result {
+- (void)resumePreview {
_isPreviewPaused = false;
- result(nil);
}
-- (void)setDescriptionWhileRecording:(NSString *)cameraName result:(FlutterResult)result {
+- (void)setDescriptionWhileRecording:(NSString *)cameraName
+ withCompletion:(void (^)(FlutterError *_Nullable))completion {
if (!_isRecording) {
- result([FlutterError errorWithCode:@"setDescriptionWhileRecordingFailed"
- message:@"Device was not recording"
- details:nil]);
+ completion([FlutterError errorWithCode:@"setDescriptionWhileRecordingFailed"
+ message:@"Device was not recording"
+ details:nil]);
return;
}
@@ -1066,7 +1019,7 @@ - (void)setDescriptionWhileRecording:(NSString *)cameraName result:(FlutterResul
NSError *error = nil;
AVCaptureConnection *newConnection = [self createConnection:&error];
if (error) {
- result(FlutterErrorFromNSError(error));
+ completion(FlutterErrorFromNSError(error));
return;
}
@@ -1077,41 +1030,41 @@ - (void)setDescriptionWhileRecording:(NSString *)cameraName result:(FlutterResul
// Add the new connections to the session.
if (![_videoCaptureSession canAddInput:_captureVideoInput])
- result([FlutterError errorWithCode:@"VideoError"
- message:@"Unable switch video input"
- details:nil]);
+ completion([FlutterError errorWithCode:@"VideoError"
+ message:@"Unable switch video input"
+ details:nil]);
[_videoCaptureSession addInputWithNoConnections:_captureVideoInput];
if (![_videoCaptureSession canAddOutput:_captureVideoOutput])
- result([FlutterError errorWithCode:@"VideoError"
- message:@"Unable switch video output"
- details:nil]);
+ completion([FlutterError errorWithCode:@"VideoError"
+ message:@"Unable switch video output"
+ details:nil]);
[_videoCaptureSession addOutputWithNoConnections:_captureVideoOutput];
if (![_videoCaptureSession canAddConnection:newConnection])
- result([FlutterError errorWithCode:@"VideoError"
- message:@"Unable switch video connection"
- details:nil]);
+ completion([FlutterError errorWithCode:@"VideoError"
+ message:@"Unable switch video connection"
+ details:nil]);
[_videoCaptureSession addConnection:newConnection];
[_videoCaptureSession commitConfiguration];
- result(nil);
+ completion(nil);
}
-- (CGPoint)getCGPointForCoordsWithOrientation:(UIDeviceOrientation)orientation
- x:(double)x
- y:(double)y {
- double oldX = x, oldY = y;
+- (CGPoint)CGPointForPoint:(nonnull FCPPlatformPoint *)point
+ withOrientation:(UIDeviceOrientation)orientation {
+ double x = point.x;
+ double y = point.y;
switch (orientation) {
case UIDeviceOrientationPortrait: // 90 ccw
- y = 1 - oldX;
- x = oldY;
+ y = 1 - point.x;
+ x = point.y;
break;
case UIDeviceOrientationPortraitUpsideDown: // 90 cw
- x = 1 - oldY;
- y = oldX;
+ x = 1 - point.y;
+ y = point.x;
break;
case UIDeviceOrientationLandscapeRight: // 180
- x = 1 - x;
- y = 1 - y;
+ x = 1 - point.x;
+ y = 1 - point.y;
break;
case UIDeviceOrientationLandscapeLeft:
default:
@@ -1121,48 +1074,53 @@ - (CGPoint)getCGPointForCoordsWithOrientation:(UIDeviceOrientation)orientation
return CGPointMake(x, y);
}
-- (void)setExposurePointWithResult:(FlutterResult)result x:(double)x y:(double)y {
+- (void)setExposurePoint:(FCPPlatformPoint *)point
+ withCompletion:(void (^)(FlutterError *_Nullable))completion {
if (!_captureDevice.isExposurePointOfInterestSupported) {
- result([FlutterError errorWithCode:@"setExposurePointFailed"
- message:@"Device does not have exposure point capabilities"
- details:nil]);
+ completion([FlutterError errorWithCode:@"setExposurePointFailed"
+ message:@"Device does not have exposure point capabilities"
+ details:nil]);
return;
}
UIDeviceOrientation orientation = [[UIDevice currentDevice] orientation];
[_captureDevice lockForConfiguration:nil];
- [_captureDevice setExposurePointOfInterest:[self getCGPointForCoordsWithOrientation:orientation
- x:x
- y:y]];
+ // A nil point resets to the center.
+ [_captureDevice
+ setExposurePointOfInterest:[self CGPointForPoint:(point
+ ?: [FCPPlatformPoint makeWithX:0.5
+ y:0.5])
+ withOrientation:orientation]];
[_captureDevice unlockForConfiguration];
// Retrigger auto exposure
[self applyExposureMode];
- result(nil);
+ completion(nil);
}
-- (void)setFocusPointWithResult:(FlutterResult)result x:(double)x y:(double)y {
+- (void)setFocusPoint:(FCPPlatformPoint *)point
+ withCompletion:(void (^)(FlutterError *_Nullable))completion {
if (!_captureDevice.isFocusPointOfInterestSupported) {
- result([FlutterError errorWithCode:@"setFocusPointFailed"
- message:@"Device does not have focus point capabilities"
- details:nil]);
+ completion([FlutterError errorWithCode:@"setFocusPointFailed"
+ message:@"Device does not have focus point capabilities"
+ details:nil]);
return;
}
UIDeviceOrientation orientation = [[UIDevice currentDevice] orientation];
[_captureDevice lockForConfiguration:nil];
-
- [_captureDevice setFocusPointOfInterest:[self getCGPointForCoordsWithOrientation:orientation
- x:x
- y:y]];
+ // A nil point resets to the center.
+ [_captureDevice
+ setFocusPointOfInterest:[self
+ CGPointForPoint:(point ?: [FCPPlatformPoint makeWithX:0.5 y:0.5])
+ withOrientation:orientation]];
[_captureDevice unlockForConfiguration];
// Retrigger auto focus
[self applyFocusMode];
- result(nil);
+ completion(nil);
}
-- (void)setExposureOffsetWithResult:(FlutterResult)result offset:(double)offset {
+- (void)setExposureOffset:(double)offset {
[_captureDevice lockForConfiguration:nil];
[_captureDevice setExposureTargetBias:offset completionHandler:nil];
[_captureDevice unlockForConfiguration];
- result(@(offset));
}
- (void)startImageStreamWithMessenger:(NSObject *)messenger {
@@ -1214,46 +1172,34 @@ - (void)receivedImageStreamData {
self.streamingPendingFramesCount--;
}
-- (void)getMaxZoomLevelWithResult:(FlutterResult)result {
- CGFloat maxZoomFactor = [self getMaxAvailableZoomFactor];
-
- result([NSNumber numberWithFloat:maxZoomFactor]);
-}
-
-- (void)getMinZoomLevelWithResult:(FlutterResult)result {
- CGFloat minZoomFactor = [self getMinAvailableZoomFactor];
- result([NSNumber numberWithFloat:minZoomFactor]);
-}
-
-- (void)setZoomLevel:(CGFloat)zoom Result:(FlutterResult)result {
- CGFloat maxAvailableZoomFactor = [self getMaxAvailableZoomFactor];
- CGFloat minAvailableZoomFactor = [self getMinAvailableZoomFactor];
-
- if (maxAvailableZoomFactor < zoom || minAvailableZoomFactor > zoom) {
+- (void)setZoomLevel:(CGFloat)zoom withCompletion:(void (^)(FlutterError *_Nullable))completion {
+ if (_captureDevice.maxAvailableVideoZoomFactor < zoom ||
+ _captureDevice.minAvailableVideoZoomFactor > zoom) {
NSString *errorMessage = [NSString
stringWithFormat:@"Zoom level out of bounds (zoom level should be between %f and %f).",
- minAvailableZoomFactor, maxAvailableZoomFactor];
+ _captureDevice.minAvailableVideoZoomFactor,
+ _captureDevice.maxAvailableVideoZoomFactor];
- result([FlutterError errorWithCode:@"ZOOM_ERROR" message:errorMessage details:nil]);
+ completion([FlutterError errorWithCode:@"ZOOM_ERROR" message:errorMessage details:nil]);
return;
}
NSError *error = nil;
if (![_captureDevice lockForConfiguration:&error]) {
- result(FlutterErrorFromNSError(error));
+ completion(FlutterErrorFromNSError(error));
return;
}
_captureDevice.videoZoomFactor = zoom;
[_captureDevice unlockForConfiguration];
- result(nil);
+ completion(nil);
}
-- (CGFloat)getMinAvailableZoomFactor {
+- (CGFloat)minimumAvailableZoomFactor {
return _captureDevice.minAvailableVideoZoomFactor;
}
-- (CGFloat)getMaxAvailableZoomFactor {
+- (CGFloat)maximumAvailableZoomFactor {
return _captureDevice.maxAvailableVideoZoomFactor;
}
@@ -1335,7 +1281,7 @@ - (BOOL)setupWriterForPath:(NSString *)path {
[_audioOutput setSampleBufferDelegate:self queue:_captureSessionQueue];
}
- if (_flashMode == FLTFlashModeTorch) {
+ if (_flashMode == FCPPlatformFlashModeTorch) {
[self.captureDevice lockForConfiguration:nil];
[self.captureDevice setTorchMode:AVCaptureTorchModeOn];
[self.captureDevice unlockForConfiguration];
diff --git a/packages/camera/camera_avfoundation/ios/Classes/FLTCamMediaSettings.h b/packages/camera/camera_avfoundation/ios/Classes/FLTCamMediaSettings.h
deleted file mode 100644
index 004accfceb7..00000000000
--- a/packages/camera/camera_avfoundation/ios/Classes/FLTCamMediaSettings.h
+++ /dev/null
@@ -1,54 +0,0 @@
-// Copyright 2013 The Flutter Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-@import Foundation;
-
-NS_ASSUME_NONNULL_BEGIN
-
-/**
- * Media settings configuration parameters.
- */
-@interface FLTCamMediaSettings : NSObject
-
-/**
- * @property framesPerSecond optional frame rate of video being recorded.
- */
-@property(atomic, readonly, strong, nullable) NSNumber *framesPerSecond;
-
-/**
- * @property videoBitrate optional bitrate of video being recorded.
- */
-@property(atomic, readonly, strong, nullable) NSNumber *videoBitrate;
-
-/**
- * @property audioBitrate optional bitrate of audio being recorded.
- */
-@property(atomic, readonly, strong, nullable) NSNumber *audioBitrate;
-
-/**
- * @property enableAudio whether audio should be recorded.
- */
-@property(atomic, readonly) BOOL enableAudio;
-
-/**
- * @method initWithFramesPerSecond:videoBitrate:audioBitrate:enableAudio:
- *
- * @abstract Initialize `FLTCamMediaSettings`.
- *
- * @param framesPerSecond optional frame rate of video being recorded.
- * @param videoBitrate optional bitrate of video being recorded.
- * @param audioBitrate optional bitrate of audio being recorded.
- * @param enableAudio whether audio should be recorded.
- *
- * @result FLTCamMediaSettings instance
- */
-- (instancetype)initWithFramesPerSecond:(nullable NSNumber *)framesPerSecond
- videoBitrate:(nullable NSNumber *)videoBitrate
- audioBitrate:(nullable NSNumber *)audioBitrate
- enableAudio:(BOOL)enableAudio NS_DESIGNATED_INITIALIZER;
-
-- (instancetype)init NS_UNAVAILABLE;
-@end
-
-NS_ASSUME_NONNULL_END
diff --git a/packages/camera/camera_avfoundation/ios/Classes/FLTCamMediaSettings.m b/packages/camera/camera_avfoundation/ios/Classes/FLTCamMediaSettings.m
deleted file mode 100644
index 5c2ca5ae995..00000000000
--- a/packages/camera/camera_avfoundation/ios/Classes/FLTCamMediaSettings.m
+++ /dev/null
@@ -1,36 +0,0 @@
-// Copyright 2013 The Flutter Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#import "FLTCamMediaSettings.h"
-
-static void AssertPositiveNumberOrNil(NSNumber *_Nullable param, const char *_Nonnull paramName) {
- if (param != nil) {
- NSCAssert(!isnan([param doubleValue]), @"%s is NaN", paramName);
- NSCAssert([param doubleValue] > 0, @"%s is not positive: %@", paramName, param);
- }
-}
-
-@implementation FLTCamMediaSettings
-
-- (instancetype)initWithFramesPerSecond:(nullable NSNumber *)framesPerSecond
- videoBitrate:(nullable NSNumber *)videoBitrate
- audioBitrate:(nullable NSNumber *)audioBitrate
- enableAudio:(BOOL)enableAudio {
- self = [super init];
-
- if (self != nil) {
- AssertPositiveNumberOrNil(framesPerSecond, "framesPerSecond");
- AssertPositiveNumberOrNil(videoBitrate, "videoBitrate");
- AssertPositiveNumberOrNil(audioBitrate, "audioBitrate");
-
- _framesPerSecond = framesPerSecond;
- _videoBitrate = videoBitrate;
- _audioBitrate = audioBitrate;
- _enableAudio = enableAudio;
- }
-
- return self;
-}
-
-@end
diff --git a/packages/camera/camera_avfoundation/ios/Classes/FLTCam_Test.h b/packages/camera/camera_avfoundation/ios/Classes/FLTCam_Test.h
index ed9fad64d3e..d05838f49a7 100644
--- a/packages/camera/camera_avfoundation/ios/Classes/FLTCam_Test.h
+++ b/packages/camera/camera_avfoundation/ios/Classes/FLTCam_Test.h
@@ -55,8 +55,7 @@ typedef AVCaptureDevice * (^CaptureDeviceFactory)(void);
/// Initializes a camera instance.
/// Allows for injecting dependencies that are usually internal.
- (instancetype)initWithCameraName:(NSString *)cameraName
- resolutionPreset:(NSString *)resolutionPreset
- mediaSettings:(FLTCamMediaSettings *)mediaSettings
+ mediaSettings:(FCPPlatformMediaSettings *)mediaSettings
mediaSettingsAVWrapper:(FLTCamMediaSettingsAVWrapper *)mediaSettingsAVWrapper
orientation:(UIDeviceOrientation)orientation
videoCaptureSession:(AVCaptureSession *)videoCaptureSession
@@ -67,16 +66,15 @@ typedef AVCaptureDevice * (^CaptureDeviceFactory)(void);
/// Initializes a camera instance.
/// Allows for testing with specified resolution, audio preference, orientation,
/// and direct access to capture sessions and blocks.
-- (instancetype)initWithResolutionPreset:(NSString *)resolutionPreset
- mediaSettings:(FLTCamMediaSettings *)mediaSettings
- mediaSettingsAVWrapper:(FLTCamMediaSettingsAVWrapper *)mediaSettingsAVWrapper
- orientation:(UIDeviceOrientation)orientation
- videoCaptureSession:(AVCaptureSession *)videoCaptureSession
- audioCaptureSession:(AVCaptureSession *)audioCaptureSession
- captureSessionQueue:(dispatch_queue_t)captureSessionQueue
- captureDeviceFactory:(CaptureDeviceFactory)captureDeviceFactory
- videoDimensionsForFormat:(VideoDimensionsForFormat)videoDimensionsForFormat
- error:(NSError **)error;
+- (instancetype)initWithMediaSettings:(FCPPlatformMediaSettings *)mediaSettings
+ mediaSettingsAVWrapper:(FLTCamMediaSettingsAVWrapper *)mediaSettingsAVWrapper
+ orientation:(UIDeviceOrientation)orientation
+ videoCaptureSession:(AVCaptureSession *)videoCaptureSession
+ audioCaptureSession:(AVCaptureSession *)audioCaptureSession
+ captureSessionQueue:(dispatch_queue_t)captureSessionQueue
+ captureDeviceFactory:(CaptureDeviceFactory)captureDeviceFactory
+ videoDimensionsForFormat:(VideoDimensionsForFormat)videoDimensionsForFormat
+ error:(NSError **)error;
/// Start streaming images.
- (void)startImageStreamWithMessenger:(NSObject *)messenger
diff --git a/packages/camera/camera_avfoundation/ios/Classes/FLTThreadSafeTextureRegistry.h b/packages/camera/camera_avfoundation/ios/Classes/FLTThreadSafeTextureRegistry.h
deleted file mode 100644
index 2f80f684e42..00000000000
--- a/packages/camera/camera_avfoundation/ios/Classes/FLTThreadSafeTextureRegistry.h
+++ /dev/null
@@ -1,36 +0,0 @@
-// Copyright 2013 The Flutter Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#import
-
-NS_ASSUME_NONNULL_BEGIN
-
-/// A thread safe wrapper for FlutterTextureRegistry that can be called from any thread, by
-/// dispatching its underlying engine calls to the main thread.
-@interface FLTThreadSafeTextureRegistry : NSObject
-
-/// Creates a FLTThreadSafeTextureRegistry by wrapping an object conforming to
-/// FlutterTextureRegistry.
-/// @param registry The FlutterTextureRegistry object to be wrapped.
-- (instancetype)initWithTextureRegistry:(NSObject *)registry;
-
-/// Registers a `FlutterTexture` on the main thread for usage in Flutter and returns an id that can
-/// be used to reference that texture when calling into Flutter with channels.
-///
-/// On success the completion block completes with the pointer to the registered texture, else with
-/// 0. The completion block runs on the main thread.
-- (void)registerTexture:(NSObject *)texture
- completion:(void (^)(int64_t))completion;
-
-/// Notifies the Flutter engine on the main thread that the given texture has been updated.
-- (void)textureFrameAvailable:(int64_t)textureId;
-
-/// Notifies the Flutter engine on the main thread to unregister a `FlutterTexture` that has been
-/// previously registered with `registerTexture:`.
-/// @param textureId The result that was previously returned from `registerTexture:`.
-- (void)unregisterTexture:(int64_t)textureId;
-
-@end
-
-NS_ASSUME_NONNULL_END
diff --git a/packages/camera/camera_avfoundation/ios/Classes/FLTThreadSafeTextureRegistry.m b/packages/camera/camera_avfoundation/ios/Classes/FLTThreadSafeTextureRegistry.m
deleted file mode 100644
index b82d566d740..00000000000
--- a/packages/camera/camera_avfoundation/ios/Classes/FLTThreadSafeTextureRegistry.m
+++ /dev/null
@@ -1,46 +0,0 @@
-// Copyright 2013 The Flutter Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#import "FLTThreadSafeTextureRegistry.h"
-#import "QueueUtils.h"
-
-@interface FLTThreadSafeTextureRegistry ()
-@property(nonatomic, strong) NSObject *registry;
-@end
-
-@implementation FLTThreadSafeTextureRegistry
-
-- (instancetype)initWithTextureRegistry:(NSObject *)registry {
- self = [super init];
- if (self) {
- _registry = registry;
- }
- return self;
-}
-
-- (void)registerTexture:(NSObject *)texture
- completion:(void (^)(int64_t))completion {
- __weak typeof(self) weakSelf = self;
- FLTEnsureToRunOnMainQueue(^{
- typeof(self) strongSelf = weakSelf;
- if (!strongSelf) return;
- completion([strongSelf.registry registerTexture:texture]);
- });
-}
-
-- (void)textureFrameAvailable:(int64_t)textureId {
- __weak typeof(self) weakSelf = self;
- FLTEnsureToRunOnMainQueue(^{
- [weakSelf.registry textureFrameAvailable:textureId];
- });
-}
-
-- (void)unregisterTexture:(int64_t)textureId {
- __weak typeof(self) weakSelf = self;
- FLTEnsureToRunOnMainQueue(^{
- [weakSelf.registry unregisterTexture:textureId];
- });
-}
-
-@end
diff --git a/packages/camera/camera_avfoundation/ios/Classes/messages.g.h b/packages/camera/camera_avfoundation/ios/Classes/messages.g.h
index 4f17971bf7c..8e3dd431443 100644
--- a/packages/camera/camera_avfoundation/ios/Classes/messages.g.h
+++ b/packages/camera/camera_avfoundation/ios/Classes/messages.g.h
@@ -52,6 +52,19 @@ typedef NS_ENUM(NSUInteger, FCPPlatformExposureMode) {
- (instancetype)initWithValue:(FCPPlatformExposureMode)value;
@end
+typedef NS_ENUM(NSUInteger, FCPPlatformFlashMode) {
+ FCPPlatformFlashModeOff = 0,
+ FCPPlatformFlashModeAuto = 1,
+ FCPPlatformFlashModeAlways = 2,
+ FCPPlatformFlashModeTorch = 3,
+};
+
+/// Wrapper for FCPPlatformFlashMode to allow for nullability.
+@interface FCPPlatformFlashModeBox : NSObject
+@property(nonatomic, assign) FCPPlatformFlashMode value;
+- (instancetype)initWithValue:(FCPPlatformFlashMode)value;
+@end
+
typedef NS_ENUM(NSUInteger, FCPPlatformFocusMode) {
FCPPlatformFocusModeAuto = 0,
FCPPlatformFocusModeLocked = 1,
@@ -63,8 +76,48 @@ typedef NS_ENUM(NSUInteger, FCPPlatformFocusMode) {
- (instancetype)initWithValue:(FCPPlatformFocusMode)value;
@end
+/// Pigeon version of ImageFileFormat.
+typedef NS_ENUM(NSUInteger, FCPPlatformImageFileFormat) {
+ FCPPlatformImageFileFormatJpeg = 0,
+ FCPPlatformImageFileFormatHeif = 1,
+};
+
+/// Wrapper for FCPPlatformImageFileFormat to allow for nullability.
+@interface FCPPlatformImageFileFormatBox : NSObject
+@property(nonatomic, assign) FCPPlatformImageFileFormat value;
+- (instancetype)initWithValue:(FCPPlatformImageFileFormat)value;
+@end
+
+typedef NS_ENUM(NSUInteger, FCPPlatformImageFormatGroup) {
+ FCPPlatformImageFormatGroupBgra8888 = 0,
+ FCPPlatformImageFormatGroupYuv420 = 1,
+};
+
+/// Wrapper for FCPPlatformImageFormatGroup to allow for nullability.
+@interface FCPPlatformImageFormatGroupBox : NSObject
+@property(nonatomic, assign) FCPPlatformImageFormatGroup value;
+- (instancetype)initWithValue:(FCPPlatformImageFormatGroup)value;
+@end
+
+typedef NS_ENUM(NSUInteger, FCPPlatformResolutionPreset) {
+ FCPPlatformResolutionPresetLow = 0,
+ FCPPlatformResolutionPresetMedium = 1,
+ FCPPlatformResolutionPresetHigh = 2,
+ FCPPlatformResolutionPresetVeryHigh = 3,
+ FCPPlatformResolutionPresetUltraHigh = 4,
+ FCPPlatformResolutionPresetMax = 5,
+};
+
+/// Wrapper for FCPPlatformResolutionPreset to allow for nullability.
+@interface FCPPlatformResolutionPresetBox : NSObject
+@property(nonatomic, assign) FCPPlatformResolutionPreset value;
+- (instancetype)initWithValue:(FCPPlatformResolutionPreset)value;
+@end
+
@class FCPPlatformCameraDescription;
@class FCPPlatformCameraState;
+@class FCPPlatformMediaSettings;
+@class FCPPlatformPoint;
@class FCPPlatformSize;
@interface FCPPlatformCameraDescription : NSObject
@@ -98,6 +151,29 @@ typedef NS_ENUM(NSUInteger, FCPPlatformFocusMode) {
@property(nonatomic, assign) BOOL focusPointSupported;
@end
+@interface FCPPlatformMediaSettings : NSObject
+/// `init` unavailable to enforce nonnull fields, see the `make` class method.
+- (instancetype)init NS_UNAVAILABLE;
++ (instancetype)makeWithResolutionPreset:(FCPPlatformResolutionPreset)resolutionPreset
+ framesPerSecond:(nullable NSNumber *)framesPerSecond
+ videoBitrate:(nullable NSNumber *)videoBitrate
+ audioBitrate:(nullable NSNumber *)audioBitrate
+ enableAudio:(BOOL)enableAudio;
+@property(nonatomic, assign) FCPPlatformResolutionPreset resolutionPreset;
+@property(nonatomic, strong, nullable) NSNumber *framesPerSecond;
+@property(nonatomic, strong, nullable) NSNumber *videoBitrate;
+@property(nonatomic, strong, nullable) NSNumber *audioBitrate;
+@property(nonatomic, assign) BOOL enableAudio;
+@end
+
+@interface FCPPlatformPoint : NSObject
+/// `init` unavailable to enforce nonnull fields, see the `make` class method.
+- (instancetype)init NS_UNAVAILABLE;
++ (instancetype)makeWithX:(double)x y:(double)y;
+@property(nonatomic, assign) double x;
+@property(nonatomic, assign) double y;
+@end
+
@interface FCPPlatformSize : NSObject
/// `init` unavailable to enforce nonnull fields, see the `make` class method.
- (instancetype)init NS_UNAVAILABLE;
@@ -113,6 +189,92 @@ NSObject *FCPCameraApiGetCodec(void);
/// Returns the list of available cameras.
- (void)availableCamerasWithCompletion:(void (^)(NSArray *_Nullable,
FlutterError *_Nullable))completion;
+/// Create a new camera with the given settings, and returns its ID.
+- (void)createCameraWithName:(NSString *)cameraName
+ settings:(FCPPlatformMediaSettings *)settings
+ completion:(void (^)(NSNumber *_Nullable, FlutterError *_Nullable))completion;
+/// Initializes the camera with the given ID.
+- (void)initializeCamera:(NSInteger)cameraId
+ withImageFormat:(FCPPlatformImageFormatGroup)imageFormat
+ completion:(void (^)(FlutterError *_Nullable))completion;
+/// Begins streaming frames from the camera.
+- (void)startImageStreamWithCompletion:(void (^)(FlutterError *_Nullable))completion;
+/// Stops streaming frames from the camera.
+- (void)stopImageStreamWithCompletion:(void (^)(FlutterError *_Nullable))completion;
+/// Called by the Dart side of the plugin when it has received the last image
+/// frame sent.
+///
+/// This is used to throttle sending frames across the channel.
+- (void)receivedImageStreamDataWithCompletion:(void (^)(FlutterError *_Nullable))completion;
+/// Indicates that the given camera is no longer being used on the Dart side,
+/// and any associated resources can be cleaned up.
+- (void)disposeCamera:(NSInteger)cameraId completion:(void (^)(FlutterError *_Nullable))completion;
+/// Locks the camera capture to the current device orientation.
+- (void)lockCaptureOrientation:(FCPPlatformDeviceOrientation)orientation
+ completion:(void (^)(FlutterError *_Nullable))completion;
+/// Unlocks camera capture orientation, allowing it to automatically adapt to
+/// device orientation.
+- (void)unlockCaptureOrientationWithCompletion:(void (^)(FlutterError *_Nullable))completion;
+/// Takes a picture with the current settings, and returns the path to the
+/// resulting file.
+- (void)takePictureWithCompletion:(void (^)(NSString *_Nullable,
+ FlutterError *_Nullable))completion;
+/// Does any preprocessing necessary before beginning to record video.
+- (void)prepareForVideoRecordingWithCompletion:(void (^)(FlutterError *_Nullable))completion;
+/// Begins recording video, optionally enabling streaming to Dart at the same
+/// time.
+- (void)startVideoRecordingWithStreaming:(BOOL)enableStream
+ completion:(void (^)(FlutterError *_Nullable))completion;
+/// Stops recording video, and results the path to the resulting file.
+- (void)stopVideoRecordingWithCompletion:(void (^)(NSString *_Nullable,
+ FlutterError *_Nullable))completion;
+/// Pauses video recording.
+- (void)pauseVideoRecordingWithCompletion:(void (^)(FlutterError *_Nullable))completion;
+/// Resumes a previously paused video recording.
+- (void)resumeVideoRecordingWithCompletion:(void (^)(FlutterError *_Nullable))completion;
+/// Switches the camera to the given flash mode.
+- (void)setFlashMode:(FCPPlatformFlashMode)mode
+ completion:(void (^)(FlutterError *_Nullable))completion;
+/// Switches the camera to the given exposure mode.
+- (void)setExposureMode:(FCPPlatformExposureMode)mode
+ completion:(void (^)(FlutterError *_Nullable))completion;
+/// Anchors auto-exposure to the given point in (0,1) coordinate space.
+///
+/// A null value resets to the default exposure point.
+- (void)setExposurePoint:(nullable FCPPlatformPoint *)point
+ completion:(void (^)(FlutterError *_Nullable))completion;
+/// Returns the minimum exposure offset supported by the camera.
+- (void)getMinimumExposureOffset:(void (^)(NSNumber *_Nullable, FlutterError *_Nullable))completion;
+/// Returns the maximum exposure offset supported by the camera.
+- (void)getMaximumExposureOffset:(void (^)(NSNumber *_Nullable, FlutterError *_Nullable))completion;
+/// Sets the exposure offset manually to the given value.
+- (void)setExposureOffset:(double)offset completion:(void (^)(FlutterError *_Nullable))completion;
+/// Switches the camera to the given focus mode.
+- (void)setFocusMode:(FCPPlatformFocusMode)mode
+ completion:(void (^)(FlutterError *_Nullable))completion;
+/// Anchors auto-focus to the given point in (0,1) coordinate space.
+///
+/// A null value resets to the default focus point.
+- (void)setFocusPoint:(nullable FCPPlatformPoint *)point
+ completion:(void (^)(FlutterError *_Nullable))completion;
+/// Returns the minimum zoom level supported by the camera.
+- (void)getMinimumZoomLevel:(void (^)(NSNumber *_Nullable, FlutterError *_Nullable))completion;
+/// Returns the maximum zoom level supported by the camera.
+- (void)getMaximumZoomLevel:(void (^)(NSNumber *_Nullable, FlutterError *_Nullable))completion;
+/// Sets the zoom factor.
+- (void)setZoomLevel:(double)zoom completion:(void (^)(FlutterError *_Nullable))completion;
+/// Pauses streaming of preview frames.
+- (void)pausePreviewWithCompletion:(void (^)(FlutterError *_Nullable))completion;
+/// Resumes a previously paused preview stream.
+- (void)resumePreviewWithCompletion:(void (^)(FlutterError *_Nullable))completion;
+/// Changes the camera used while recording video.
+///
+/// This should only be called while video recording is active.
+- (void)updateDescriptionWhileRecordingCameraName:(NSString *)cameraName
+ completion:(void (^)(FlutterError *_Nullable))completion;
+/// Sets the file format used for taking pictures.
+- (void)setImageFileFormat:(FCPPlatformImageFileFormat)format
+ completion:(void (^)(FlutterError *_Nullable))completion;
@end
extern void SetUpFCPCameraApi(id binaryMessenger,
diff --git a/packages/camera/camera_avfoundation/ios/Classes/messages.g.m b/packages/camera/camera_avfoundation/ios/Classes/messages.g.m
index fd1100c0b77..d90b63d7a01 100644
--- a/packages/camera/camera_avfoundation/ios/Classes/messages.g.m
+++ b/packages/camera/camera_avfoundation/ios/Classes/messages.g.m
@@ -69,6 +69,16 @@ - (instancetype)initWithValue:(FCPPlatformExposureMode)value {
}
@end
+@implementation FCPPlatformFlashModeBox
+- (instancetype)initWithValue:(FCPPlatformFlashMode)value {
+ self = [super init];
+ if (self) {
+ _value = value;
+ }
+ return self;
+}
+@end
+
@implementation FCPPlatformFocusModeBox
- (instancetype)initWithValue:(FCPPlatformFocusMode)value {
self = [super init];
@@ -79,6 +89,37 @@ - (instancetype)initWithValue:(FCPPlatformFocusMode)value {
}
@end
+/// Pigeon version of ImageFileFormat.
+@implementation FCPPlatformImageFileFormatBox
+- (instancetype)initWithValue:(FCPPlatformImageFileFormat)value {
+ self = [super init];
+ if (self) {
+ _value = value;
+ }
+ return self;
+}
+@end
+
+@implementation FCPPlatformImageFormatGroupBox
+- (instancetype)initWithValue:(FCPPlatformImageFormatGroup)value {
+ self = [super init];
+ if (self) {
+ _value = value;
+ }
+ return self;
+}
+@end
+
+@implementation FCPPlatformResolutionPresetBox
+- (instancetype)initWithValue:(FCPPlatformResolutionPreset)value {
+ self = [super init];
+ if (self) {
+ _value = value;
+ }
+ return self;
+}
+@end
+
@interface FCPPlatformCameraDescription ()
+ (FCPPlatformCameraDescription *)fromList:(NSArray *)list;
+ (nullable FCPPlatformCameraDescription *)nullableFromList:(NSArray *)list;
@@ -91,6 +132,18 @@ + (nullable FCPPlatformCameraState *)nullableFromList:(NSArray *)list;
- (NSArray *)toList;
@end
+@interface FCPPlatformMediaSettings ()
++ (FCPPlatformMediaSettings *)fromList:(NSArray *)list;
++ (nullable FCPPlatformMediaSettings *)nullableFromList:(NSArray *)list;
+- (NSArray *)toList;
+@end
+
+@interface FCPPlatformPoint ()
++ (FCPPlatformPoint *)fromList:(NSArray *)list;
++ (nullable FCPPlatformPoint *)nullableFromList:(NSArray *)list;
+- (NSArray *)toList;
+@end
+
@interface FCPPlatformSize ()
+ (FCPPlatformSize *)fromList:(NSArray *)list;
+ (nullable FCPPlatformSize *)nullableFromList:(NSArray *)list;
@@ -159,6 +212,67 @@ - (NSArray *)toList {
}
@end
+@implementation FCPPlatformMediaSettings
++ (instancetype)makeWithResolutionPreset:(FCPPlatformResolutionPreset)resolutionPreset
+ framesPerSecond:(nullable NSNumber *)framesPerSecond
+ videoBitrate:(nullable NSNumber *)videoBitrate
+ audioBitrate:(nullable NSNumber *)audioBitrate
+ enableAudio:(BOOL)enableAudio {
+ FCPPlatformMediaSettings *pigeonResult = [[FCPPlatformMediaSettings alloc] init];
+ pigeonResult.resolutionPreset = resolutionPreset;
+ pigeonResult.framesPerSecond = framesPerSecond;
+ pigeonResult.videoBitrate = videoBitrate;
+ pigeonResult.audioBitrate = audioBitrate;
+ pigeonResult.enableAudio = enableAudio;
+ return pigeonResult;
+}
++ (FCPPlatformMediaSettings *)fromList:(NSArray *)list {
+ FCPPlatformMediaSettings *pigeonResult = [[FCPPlatformMediaSettings alloc] init];
+ pigeonResult.resolutionPreset = [GetNullableObjectAtIndex(list, 0) integerValue];
+ pigeonResult.framesPerSecond = GetNullableObjectAtIndex(list, 1);
+ pigeonResult.videoBitrate = GetNullableObjectAtIndex(list, 2);
+ pigeonResult.audioBitrate = GetNullableObjectAtIndex(list, 3);
+ pigeonResult.enableAudio = [GetNullableObjectAtIndex(list, 4) boolValue];
+ return pigeonResult;
+}
++ (nullable FCPPlatformMediaSettings *)nullableFromList:(NSArray *)list {
+ return (list) ? [FCPPlatformMediaSettings fromList:list] : nil;
+}
+- (NSArray *)toList {
+ return @[
+ @(self.resolutionPreset),
+ self.framesPerSecond ?: [NSNull null],
+ self.videoBitrate ?: [NSNull null],
+ self.audioBitrate ?: [NSNull null],
+ @(self.enableAudio),
+ ];
+}
+@end
+
+@implementation FCPPlatformPoint
++ (instancetype)makeWithX:(double)x y:(double)y {
+ FCPPlatformPoint *pigeonResult = [[FCPPlatformPoint alloc] init];
+ pigeonResult.x = x;
+ pigeonResult.y = y;
+ return pigeonResult;
+}
++ (FCPPlatformPoint *)fromList:(NSArray *)list {
+ FCPPlatformPoint *pigeonResult = [[FCPPlatformPoint alloc] init];
+ pigeonResult.x = [GetNullableObjectAtIndex(list, 0) doubleValue];
+ pigeonResult.y = [GetNullableObjectAtIndex(list, 1) doubleValue];
+ return pigeonResult;
+}
++ (nullable FCPPlatformPoint *)nullableFromList:(NSArray *)list {
+ return (list) ? [FCPPlatformPoint fromList:list] : nil;
+}
+- (NSArray *)toList {
+ return @[
+ @(self.x),
+ @(self.y),
+ ];
+}
+@end
+
@implementation FCPPlatformSize
+ (instancetype)makeWithWidth:(double)width height:(double)height {
FCPPlatformSize *pigeonResult = [[FCPPlatformSize alloc] init];
@@ -190,6 +304,10 @@ - (nullable id)readValueOfType:(UInt8)type {
switch (type) {
case 128:
return [FCPPlatformCameraDescription fromList:[self readValue]];
+ case 129:
+ return [FCPPlatformMediaSettings fromList:[self readValue]];
+ case 130:
+ return [FCPPlatformPoint fromList:[self readValue]];
default:
return [super readValueOfType:type];
}
@@ -203,6 +321,12 @@ - (void)writeValue:(id)value {
if ([value isKindOfClass:[FCPPlatformCameraDescription class]]) {
[self writeByte:128];
[self writeValue:[value toList]];
+ } else if ([value isKindOfClass:[FCPPlatformMediaSettings class]]) {
+ [self writeByte:129];
+ [self writeValue:[value toList]];
+ } else if ([value isKindOfClass:[FCPPlatformPoint class]]) {
+ [self writeByte:130];
+ [self writeValue:[value toList]];
} else {
[super writeValue:value];
}
@@ -264,6 +388,733 @@ void SetUpFCPCameraApiWithSuffix(id binaryMessenger,
[channel setMessageHandler:nil];
}
}
+ /// Create a new camera with the given settings, and returns its ID.
+ {
+ FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc]
+ initWithName:[NSString stringWithFormat:
+ @"%@%@",
+ @"dev.flutter.pigeon.camera_avfoundation.CameraApi.create",
+ messageChannelSuffix]
+ binaryMessenger:binaryMessenger
+ codec:FCPCameraApiGetCodec()];
+ if (api) {
+ NSCAssert([api respondsToSelector:@selector(createCameraWithName:settings:completion:)],
+ @"FCPCameraApi api (%@) doesn't respond to "
+ @"@selector(createCameraWithName:settings:completion:)",
+ api);
+ [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) {
+ NSArray *args = message;
+ NSString *arg_cameraName = GetNullableObjectAtIndex(args, 0);
+ FCPPlatformMediaSettings *arg_settings = GetNullableObjectAtIndex(args, 1);
+ [api createCameraWithName:arg_cameraName
+ settings:arg_settings
+ completion:^(NSNumber *_Nullable output, FlutterError *_Nullable error) {
+ callback(wrapResult(output, error));
+ }];
+ }];
+ } else {
+ [channel setMessageHandler:nil];
+ }
+ }
+ /// Initializes the camera with the given ID.
+ {
+ FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc]
+ initWithName:
+ [NSString
+ stringWithFormat:@"%@%@",
+ @"dev.flutter.pigeon.camera_avfoundation.CameraApi.initialize",
+ messageChannelSuffix]
+ binaryMessenger:binaryMessenger
+ codec:FCPCameraApiGetCodec()];
+ if (api) {
+ NSCAssert([api respondsToSelector:@selector(initializeCamera:withImageFormat:completion:)],
+ @"FCPCameraApi api (%@) doesn't respond to "
+ @"@selector(initializeCamera:withImageFormat:completion:)",
+ api);
+ [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) {
+ NSArray *args = message;
+ NSInteger arg_cameraId = [GetNullableObjectAtIndex(args, 0) integerValue];
+ FCPPlatformImageFormatGroup arg_imageFormat =
+ [GetNullableObjectAtIndex(args, 1) integerValue];
+ [api initializeCamera:arg_cameraId
+ withImageFormat:arg_imageFormat
+ completion:^(FlutterError *_Nullable error) {
+ callback(wrapResult(nil, error));
+ }];
+ }];
+ } else {
+ [channel setMessageHandler:nil];
+ }
+ }
+ /// Begins streaming frames from the camera.
+ {
+ FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc]
+ initWithName:[NSString stringWithFormat:@"%@%@",
+ @"dev.flutter.pigeon.camera_avfoundation."
+ @"CameraApi.startImageStream",
+ messageChannelSuffix]
+ binaryMessenger:binaryMessenger
+ codec:FCPCameraApiGetCodec()];
+ if (api) {
+ NSCAssert(
+ [api respondsToSelector:@selector(startImageStreamWithCompletion:)],
+ @"FCPCameraApi api (%@) doesn't respond to @selector(startImageStreamWithCompletion:)",
+ api);
+ [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) {
+ [api startImageStreamWithCompletion:^(FlutterError *_Nullable error) {
+ callback(wrapResult(nil, error));
+ }];
+ }];
+ } else {
+ [channel setMessageHandler:nil];
+ }
+ }
+ /// Stops streaming frames from the camera.
+ {
+ FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc]
+ initWithName:[NSString
+ stringWithFormat:
+ @"%@%@",
+ @"dev.flutter.pigeon.camera_avfoundation.CameraApi.stopImageStream",
+ messageChannelSuffix]
+ binaryMessenger:binaryMessenger
+ codec:FCPCameraApiGetCodec()];
+ if (api) {
+ NSCAssert(
+ [api respondsToSelector:@selector(stopImageStreamWithCompletion:)],
+ @"FCPCameraApi api (%@) doesn't respond to @selector(stopImageStreamWithCompletion:)",
+ api);
+ [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) {
+ [api stopImageStreamWithCompletion:^(FlutterError *_Nullable error) {
+ callback(wrapResult(nil, error));
+ }];
+ }];
+ } else {
+ [channel setMessageHandler:nil];
+ }
+ }
+ /// Called by the Dart side of the plugin when it has received the last image
+ /// frame sent.
+ ///
+ /// This is used to throttle sending frames across the channel.
+ {
+ FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc]
+ initWithName:[NSString stringWithFormat:@"%@%@",
+ @"dev.flutter.pigeon.camera_avfoundation."
+ @"CameraApi.receivedImageStreamData",
+ messageChannelSuffix]
+ binaryMessenger:binaryMessenger
+ codec:FCPCameraApiGetCodec()];
+ if (api) {
+ NSCAssert([api respondsToSelector:@selector(receivedImageStreamDataWithCompletion:)],
+ @"FCPCameraApi api (%@) doesn't respond to "
+ @"@selector(receivedImageStreamDataWithCompletion:)",
+ api);
+ [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) {
+ [api receivedImageStreamDataWithCompletion:^(FlutterError *_Nullable error) {
+ callback(wrapResult(nil, error));
+ }];
+ }];
+ } else {
+ [channel setMessageHandler:nil];
+ }
+ }
+ /// Indicates that the given camera is no longer being used on the Dart side,
+ /// and any associated resources can be cleaned up.
+ {
+ FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc]
+ initWithName:[NSString stringWithFormat:
+ @"%@%@",
+ @"dev.flutter.pigeon.camera_avfoundation.CameraApi.dispose",
+ messageChannelSuffix]
+ binaryMessenger:binaryMessenger
+ codec:FCPCameraApiGetCodec()];
+ if (api) {
+ NSCAssert([api respondsToSelector:@selector(disposeCamera:completion:)],
+ @"FCPCameraApi api (%@) doesn't respond to @selector(disposeCamera:completion:)",
+ api);
+ [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) {
+ NSArray *args = message;
+ NSInteger arg_cameraId = [GetNullableObjectAtIndex(args, 0) integerValue];
+ [api disposeCamera:arg_cameraId
+ completion:^(FlutterError *_Nullable error) {
+ callback(wrapResult(nil, error));
+ }];
+ }];
+ } else {
+ [channel setMessageHandler:nil];
+ }
+ }
+ /// Locks the camera capture to the current device orientation.
+ {
+ FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc]
+ initWithName:[NSString stringWithFormat:@"%@%@",
+ @"dev.flutter.pigeon.camera_avfoundation."
+ @"CameraApi.lockCaptureOrientation",
+ messageChannelSuffix]
+ binaryMessenger:binaryMessenger
+ codec:FCPCameraApiGetCodec()];
+ if (api) {
+ NSCAssert(
+ [api respondsToSelector:@selector(lockCaptureOrientation:completion:)],
+ @"FCPCameraApi api (%@) doesn't respond to @selector(lockCaptureOrientation:completion:)",
+ api);
+ [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) {
+ NSArray *args = message;
+ FCPPlatformDeviceOrientation arg_orientation =
+ [GetNullableObjectAtIndex(args, 0) integerValue];
+ [api lockCaptureOrientation:arg_orientation
+ completion:^(FlutterError *_Nullable error) {
+ callback(wrapResult(nil, error));
+ }];
+ }];
+ } else {
+ [channel setMessageHandler:nil];
+ }
+ }
+ /// Unlocks camera capture orientation, allowing it to automatically adapt to
+ /// device orientation.
+ {
+ FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc]
+ initWithName:[NSString stringWithFormat:@"%@%@",
+ @"dev.flutter.pigeon.camera_avfoundation."
+ @"CameraApi.unlockCaptureOrientation",
+ messageChannelSuffix]
+ binaryMessenger:binaryMessenger
+ codec:FCPCameraApiGetCodec()];
+ if (api) {
+ NSCAssert([api respondsToSelector:@selector(unlockCaptureOrientationWithCompletion:)],
+ @"FCPCameraApi api (%@) doesn't respond to "
+ @"@selector(unlockCaptureOrientationWithCompletion:)",
+ api);
+ [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) {
+ [api unlockCaptureOrientationWithCompletion:^(FlutterError *_Nullable error) {
+ callback(wrapResult(nil, error));
+ }];
+ }];
+ } else {
+ [channel setMessageHandler:nil];
+ }
+ }
+ /// Takes a picture with the current settings, and returns the path to the
+ /// resulting file.
+ {
+ FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc]
+ initWithName:
+ [NSString
+ stringWithFormat:@"%@%@",
+ @"dev.flutter.pigeon.camera_avfoundation.CameraApi.takePicture",
+ messageChannelSuffix]
+ binaryMessenger:binaryMessenger
+ codec:FCPCameraApiGetCodec()];
+ if (api) {
+ NSCAssert([api respondsToSelector:@selector(takePictureWithCompletion:)],
+ @"FCPCameraApi api (%@) doesn't respond to @selector(takePictureWithCompletion:)",
+ api);
+ [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) {
+ [api
+ takePictureWithCompletion:^(NSString *_Nullable output, FlutterError *_Nullable error) {
+ callback(wrapResult(output, error));
+ }];
+ }];
+ } else {
+ [channel setMessageHandler:nil];
+ }
+ }
+ /// Does any preprocessing necessary before beginning to record video.
+ {
+ FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc]
+ initWithName:[NSString stringWithFormat:@"%@%@",
+ @"dev.flutter.pigeon.camera_avfoundation."
+ @"CameraApi.prepareForVideoRecording",
+ messageChannelSuffix]
+ binaryMessenger:binaryMessenger
+ codec:FCPCameraApiGetCodec()];
+ if (api) {
+ NSCAssert([api respondsToSelector:@selector(prepareForVideoRecordingWithCompletion:)],
+ @"FCPCameraApi api (%@) doesn't respond to "
+ @"@selector(prepareForVideoRecordingWithCompletion:)",
+ api);
+ [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) {
+ [api prepareForVideoRecordingWithCompletion:^(FlutterError *_Nullable error) {
+ callback(wrapResult(nil, error));
+ }];
+ }];
+ } else {
+ [channel setMessageHandler:nil];
+ }
+ }
+ /// Begins recording video, optionally enabling streaming to Dart at the same
+ /// time.
+ {
+ FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc]
+ initWithName:[NSString stringWithFormat:@"%@%@",
+ @"dev.flutter.pigeon.camera_avfoundation."
+ @"CameraApi.startVideoRecording",
+ messageChannelSuffix]
+ binaryMessenger:binaryMessenger
+ codec:FCPCameraApiGetCodec()];
+ if (api) {
+ NSCAssert([api respondsToSelector:@selector(startVideoRecordingWithStreaming:completion:)],
+ @"FCPCameraApi api (%@) doesn't respond to "
+ @"@selector(startVideoRecordingWithStreaming:completion:)",
+ api);
+ [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) {
+ NSArray *args = message;
+ BOOL arg_enableStream = [GetNullableObjectAtIndex(args, 0) boolValue];
+ [api startVideoRecordingWithStreaming:arg_enableStream
+ completion:^(FlutterError *_Nullable error) {
+ callback(wrapResult(nil, error));
+ }];
+ }];
+ } else {
+ [channel setMessageHandler:nil];
+ }
+ }
+ /// Stops recording video, and results the path to the resulting file.
+ {
+ FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc]
+ initWithName:[NSString stringWithFormat:@"%@%@",
+ @"dev.flutter.pigeon.camera_avfoundation."
+ @"CameraApi.stopVideoRecording",
+ messageChannelSuffix]
+ binaryMessenger:binaryMessenger
+ codec:FCPCameraApiGetCodec()];
+ if (api) {
+ NSCAssert(
+ [api respondsToSelector:@selector(stopVideoRecordingWithCompletion:)],
+ @"FCPCameraApi api (%@) doesn't respond to @selector(stopVideoRecordingWithCompletion:)",
+ api);
+ [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) {
+ [api stopVideoRecordingWithCompletion:^(NSString *_Nullable output,
+ FlutterError *_Nullable error) {
+ callback(wrapResult(output, error));
+ }];
+ }];
+ } else {
+ [channel setMessageHandler:nil];
+ }
+ }
+ /// Pauses video recording.
+ {
+ FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc]
+ initWithName:[NSString stringWithFormat:@"%@%@",
+ @"dev.flutter.pigeon.camera_avfoundation."
+ @"CameraApi.pauseVideoRecording",
+ messageChannelSuffix]
+ binaryMessenger:binaryMessenger
+ codec:FCPCameraApiGetCodec()];
+ if (api) {
+ NSCAssert(
+ [api respondsToSelector:@selector(pauseVideoRecordingWithCompletion:)],
+ @"FCPCameraApi api (%@) doesn't respond to @selector(pauseVideoRecordingWithCompletion:)",
+ api);
+ [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) {
+ [api pauseVideoRecordingWithCompletion:^(FlutterError *_Nullable error) {
+ callback(wrapResult(nil, error));
+ }];
+ }];
+ } else {
+ [channel setMessageHandler:nil];
+ }
+ }
+ /// Resumes a previously paused video recording.
+ {
+ FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc]
+ initWithName:[NSString stringWithFormat:@"%@%@",
+ @"dev.flutter.pigeon.camera_avfoundation."
+ @"CameraApi.resumeVideoRecording",
+ messageChannelSuffix]
+ binaryMessenger:binaryMessenger
+ codec:FCPCameraApiGetCodec()];
+ if (api) {
+ NSCAssert([api respondsToSelector:@selector(resumeVideoRecordingWithCompletion:)],
+ @"FCPCameraApi api (%@) doesn't respond to "
+ @"@selector(resumeVideoRecordingWithCompletion:)",
+ api);
+ [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) {
+ [api resumeVideoRecordingWithCompletion:^(FlutterError *_Nullable error) {
+ callback(wrapResult(nil, error));
+ }];
+ }];
+ } else {
+ [channel setMessageHandler:nil];
+ }
+ }
+ /// Switches the camera to the given flash mode.
+ {
+ FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc]
+ initWithName:[NSString
+ stringWithFormat:
+ @"%@%@",
+ @"dev.flutter.pigeon.camera_avfoundation.CameraApi.setFlashMode",
+ messageChannelSuffix]
+ binaryMessenger:binaryMessenger
+ codec:FCPCameraApiGetCodec()];
+ if (api) {
+ NSCAssert([api respondsToSelector:@selector(setFlashMode:completion:)],
+ @"FCPCameraApi api (%@) doesn't respond to @selector(setFlashMode:completion:)",
+ api);
+ [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) {
+ NSArray *args = message;
+ FCPPlatformFlashMode arg_mode = [GetNullableObjectAtIndex(args, 0) integerValue];
+ [api setFlashMode:arg_mode
+ completion:^(FlutterError *_Nullable error) {
+ callback(wrapResult(nil, error));
+ }];
+ }];
+ } else {
+ [channel setMessageHandler:nil];
+ }
+ }
+ /// Switches the camera to the given exposure mode.
+ {
+ FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc]
+ initWithName:[NSString
+ stringWithFormat:
+ @"%@%@",
+ @"dev.flutter.pigeon.camera_avfoundation.CameraApi.setExposureMode",
+ messageChannelSuffix]
+ binaryMessenger:binaryMessenger
+ codec:FCPCameraApiGetCodec()];
+ if (api) {
+ NSCAssert([api respondsToSelector:@selector(setExposureMode:completion:)],
+ @"FCPCameraApi api (%@) doesn't respond to @selector(setExposureMode:completion:)",
+ api);
+ [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) {
+ NSArray *args = message;
+ FCPPlatformExposureMode arg_mode = [GetNullableObjectAtIndex(args, 0) integerValue];
+ [api setExposureMode:arg_mode
+ completion:^(FlutterError *_Nullable error) {
+ callback(wrapResult(nil, error));
+ }];
+ }];
+ } else {
+ [channel setMessageHandler:nil];
+ }
+ }
+ /// Anchors auto-exposure to the given point in (0,1) coordinate space.
+ ///
+ /// A null value resets to the default exposure point.
+ {
+ FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc]
+ initWithName:[NSString stringWithFormat:@"%@%@",
+ @"dev.flutter.pigeon.camera_avfoundation."
+ @"CameraApi.setExposurePoint",
+ messageChannelSuffix]
+ binaryMessenger:binaryMessenger
+ codec:FCPCameraApiGetCodec()];
+ if (api) {
+ NSCAssert([api respondsToSelector:@selector(setExposurePoint:completion:)],
+ @"FCPCameraApi api (%@) doesn't respond to @selector(setExposurePoint:completion:)",
+ api);
+ [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) {
+ NSArray *args = message;
+ FCPPlatformPoint *arg_point = GetNullableObjectAtIndex(args, 0);
+ [api setExposurePoint:arg_point
+ completion:^(FlutterError *_Nullable error) {
+ callback(wrapResult(nil, error));
+ }];
+ }];
+ } else {
+ [channel setMessageHandler:nil];
+ }
+ }
+ /// Returns the minimum exposure offset supported by the camera.
+ {
+ FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc]
+ initWithName:[NSString stringWithFormat:@"%@%@",
+ @"dev.flutter.pigeon.camera_avfoundation."
+ @"CameraApi.getMinExposureOffset",
+ messageChannelSuffix]
+ binaryMessenger:binaryMessenger
+ codec:FCPCameraApiGetCodec()];
+ if (api) {
+ NSCAssert([api respondsToSelector:@selector(getMinimumExposureOffset:)],
+ @"FCPCameraApi api (%@) doesn't respond to @selector(getMinimumExposureOffset:)",
+ api);
+ [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) {
+ [api getMinimumExposureOffset:^(NSNumber *_Nullable output, FlutterError *_Nullable error) {
+ callback(wrapResult(output, error));
+ }];
+ }];
+ } else {
+ [channel setMessageHandler:nil];
+ }
+ }
+ /// Returns the maximum exposure offset supported by the camera.
+ {
+ FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc]
+ initWithName:[NSString stringWithFormat:@"%@%@",
+ @"dev.flutter.pigeon.camera_avfoundation."
+ @"CameraApi.getMaxExposureOffset",
+ messageChannelSuffix]
+ binaryMessenger:binaryMessenger
+ codec:FCPCameraApiGetCodec()];
+ if (api) {
+ NSCAssert([api respondsToSelector:@selector(getMaximumExposureOffset:)],
+ @"FCPCameraApi api (%@) doesn't respond to @selector(getMaximumExposureOffset:)",
+ api);
+ [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) {
+ [api getMaximumExposureOffset:^(NSNumber *_Nullable output, FlutterError *_Nullable error) {
+ callback(wrapResult(output, error));
+ }];
+ }];
+ } else {
+ [channel setMessageHandler:nil];
+ }
+ }
+ /// Sets the exposure offset manually to the given value.
+ {
+ FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc]
+ initWithName:[NSString stringWithFormat:@"%@%@",
+ @"dev.flutter.pigeon.camera_avfoundation."
+ @"CameraApi.setExposureOffset",
+ messageChannelSuffix]
+ binaryMessenger:binaryMessenger
+ codec:FCPCameraApiGetCodec()];
+ if (api) {
+ NSCAssert(
+ [api respondsToSelector:@selector(setExposureOffset:completion:)],
+ @"FCPCameraApi api (%@) doesn't respond to @selector(setExposureOffset:completion:)",
+ api);
+ [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) {
+ NSArray *args = message;
+ double arg_offset = [GetNullableObjectAtIndex(args, 0) doubleValue];
+ [api setExposureOffset:arg_offset
+ completion:^(FlutterError *_Nullable error) {
+ callback(wrapResult(nil, error));
+ }];
+ }];
+ } else {
+ [channel setMessageHandler:nil];
+ }
+ }
+ /// Switches the camera to the given focus mode.
+ {
+ FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc]
+ initWithName:[NSString
+ stringWithFormat:
+ @"%@%@",
+ @"dev.flutter.pigeon.camera_avfoundation.CameraApi.setFocusMode",
+ messageChannelSuffix]
+ binaryMessenger:binaryMessenger
+ codec:FCPCameraApiGetCodec()];
+ if (api) {
+ NSCAssert([api respondsToSelector:@selector(setFocusMode:completion:)],
+ @"FCPCameraApi api (%@) doesn't respond to @selector(setFocusMode:completion:)",
+ api);
+ [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) {
+ NSArray *args = message;
+ FCPPlatformFocusMode arg_mode = [GetNullableObjectAtIndex(args, 0) integerValue];
+ [api setFocusMode:arg_mode
+ completion:^(FlutterError *_Nullable error) {
+ callback(wrapResult(nil, error));
+ }];
+ }];
+ } else {
+ [channel setMessageHandler:nil];
+ }
+ }
+ /// Anchors auto-focus to the given point in (0,1) coordinate space.
+ ///
+ /// A null value resets to the default focus point.
+ {
+ FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc]
+ initWithName:[NSString
+ stringWithFormat:
+ @"%@%@",
+ @"dev.flutter.pigeon.camera_avfoundation.CameraApi.setFocusPoint",
+ messageChannelSuffix]
+ binaryMessenger:binaryMessenger
+ codec:FCPCameraApiGetCodec()];
+ if (api) {
+ NSCAssert([api respondsToSelector:@selector(setFocusPoint:completion:)],
+ @"FCPCameraApi api (%@) doesn't respond to @selector(setFocusPoint:completion:)",
+ api);
+ [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) {
+ NSArray *args = message;
+ FCPPlatformPoint *arg_point = GetNullableObjectAtIndex(args, 0);
+ [api setFocusPoint:arg_point
+ completion:^(FlutterError *_Nullable error) {
+ callback(wrapResult(nil, error));
+ }];
+ }];
+ } else {
+ [channel setMessageHandler:nil];
+ }
+ }
+ /// Returns the minimum zoom level supported by the camera.
+ {
+ FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc]
+ initWithName:[NSString
+ stringWithFormat:
+ @"%@%@",
+ @"dev.flutter.pigeon.camera_avfoundation.CameraApi.getMinZoomLevel",
+ messageChannelSuffix]
+ binaryMessenger:binaryMessenger
+ codec:FCPCameraApiGetCodec()];
+ if (api) {
+ NSCAssert([api respondsToSelector:@selector(getMinimumZoomLevel:)],
+ @"FCPCameraApi api (%@) doesn't respond to @selector(getMinimumZoomLevel:)", api);
+ [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) {
+ [api getMinimumZoomLevel:^(NSNumber *_Nullable output, FlutterError *_Nullable error) {
+ callback(wrapResult(output, error));
+ }];
+ }];
+ } else {
+ [channel setMessageHandler:nil];
+ }
+ }
+ /// Returns the maximum zoom level supported by the camera.
+ {
+ FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc]
+ initWithName:[NSString
+ stringWithFormat:
+ @"%@%@",
+ @"dev.flutter.pigeon.camera_avfoundation.CameraApi.getMaxZoomLevel",
+ messageChannelSuffix]
+ binaryMessenger:binaryMessenger
+ codec:FCPCameraApiGetCodec()];
+ if (api) {
+ NSCAssert([api respondsToSelector:@selector(getMaximumZoomLevel:)],
+ @"FCPCameraApi api (%@) doesn't respond to @selector(getMaximumZoomLevel:)", api);
+ [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) {
+ [api getMaximumZoomLevel:^(NSNumber *_Nullable output, FlutterError *_Nullable error) {
+ callback(wrapResult(output, error));
+ }];
+ }];
+ } else {
+ [channel setMessageHandler:nil];
+ }
+ }
+ /// Sets the zoom factor.
+ {
+ FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc]
+ initWithName:[NSString
+ stringWithFormat:
+ @"%@%@",
+ @"dev.flutter.pigeon.camera_avfoundation.CameraApi.setZoomLevel",
+ messageChannelSuffix]
+ binaryMessenger:binaryMessenger
+ codec:FCPCameraApiGetCodec()];
+ if (api) {
+ NSCAssert([api respondsToSelector:@selector(setZoomLevel:completion:)],
+ @"FCPCameraApi api (%@) doesn't respond to @selector(setZoomLevel:completion:)",
+ api);
+ [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) {
+ NSArray *args = message;
+ double arg_zoom = [GetNullableObjectAtIndex(args, 0) doubleValue];
+ [api setZoomLevel:arg_zoom
+ completion:^(FlutterError *_Nullable error) {
+ callback(wrapResult(nil, error));
+ }];
+ }];
+ } else {
+ [channel setMessageHandler:nil];
+ }
+ }
+ /// Pauses streaming of preview frames.
+ {
+ FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc]
+ initWithName:[NSString
+ stringWithFormat:
+ @"%@%@",
+ @"dev.flutter.pigeon.camera_avfoundation.CameraApi.pausePreview",
+ messageChannelSuffix]
+ binaryMessenger:binaryMessenger
+ codec:FCPCameraApiGetCodec()];
+ if (api) {
+ NSCAssert([api respondsToSelector:@selector(pausePreviewWithCompletion:)],
+ @"FCPCameraApi api (%@) doesn't respond to @selector(pausePreviewWithCompletion:)",
+ api);
+ [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) {
+ [api pausePreviewWithCompletion:^(FlutterError *_Nullable error) {
+ callback(wrapResult(nil, error));
+ }];
+ }];
+ } else {
+ [channel setMessageHandler:nil];
+ }
+ }
+ /// Resumes a previously paused preview stream.
+ {
+ FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc]
+ initWithName:[NSString
+ stringWithFormat:
+ @"%@%@",
+ @"dev.flutter.pigeon.camera_avfoundation.CameraApi.resumePreview",
+ messageChannelSuffix]
+ binaryMessenger:binaryMessenger
+ codec:FCPCameraApiGetCodec()];
+ if (api) {
+ NSCAssert([api respondsToSelector:@selector(resumePreviewWithCompletion:)],
+ @"FCPCameraApi api (%@) doesn't respond to @selector(resumePreviewWithCompletion:)",
+ api);
+ [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) {
+ [api resumePreviewWithCompletion:^(FlutterError *_Nullable error) {
+ callback(wrapResult(nil, error));
+ }];
+ }];
+ } else {
+ [channel setMessageHandler:nil];
+ }
+ }
+ /// Changes the camera used while recording video.
+ ///
+ /// This should only be called while video recording is active.
+ {
+ FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc]
+ initWithName:[NSString stringWithFormat:@"%@%@",
+ @"dev.flutter.pigeon.camera_avfoundation."
+ @"CameraApi.updateDescriptionWhileRecording",
+ messageChannelSuffix]
+ binaryMessenger:binaryMessenger
+ codec:FCPCameraApiGetCodec()];
+ if (api) {
+ NSCAssert([api respondsToSelector:@selector(updateDescriptionWhileRecordingCameraName:
+ completion:)],
+ @"FCPCameraApi api (%@) doesn't respond to "
+ @"@selector(updateDescriptionWhileRecordingCameraName:completion:)",
+ api);
+ [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) {
+ NSArray *args = message;
+ NSString *arg_cameraName = GetNullableObjectAtIndex(args, 0);
+ [api updateDescriptionWhileRecordingCameraName:arg_cameraName
+ completion:^(FlutterError *_Nullable error) {
+ callback(wrapResult(nil, error));
+ }];
+ }];
+ } else {
+ [channel setMessageHandler:nil];
+ }
+ }
+ /// Sets the file format used for taking pictures.
+ {
+ FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc]
+ initWithName:[NSString stringWithFormat:@"%@%@",
+ @"dev.flutter.pigeon.camera_avfoundation."
+ @"CameraApi.setImageFileFormat",
+ messageChannelSuffix]
+ binaryMessenger:binaryMessenger
+ codec:FCPCameraApiGetCodec()];
+ if (api) {
+ NSCAssert(
+ [api respondsToSelector:@selector(setImageFileFormat:completion:)],
+ @"FCPCameraApi api (%@) doesn't respond to @selector(setImageFileFormat:completion:)",
+ api);
+ [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) {
+ NSArray *args = message;
+ FCPPlatformImageFileFormat arg_format = [GetNullableObjectAtIndex(args, 0) integerValue];
+ [api setImageFileFormat:arg_format
+ completion:^(FlutterError *_Nullable error) {
+ callback(wrapResult(nil, error));
+ }];
+ }];
+ } else {
+ [channel setMessageHandler:nil];
+ }
+ }
}
NSObject *FCPCameraGlobalEventApiGetCodec(void) {
static FlutterStandardMessageCodec *sSharedObject = nil;
diff --git a/packages/camera/camera_avfoundation/lib/src/avfoundation_camera.dart b/packages/camera/camera_avfoundation/lib/src/avfoundation_camera.dart
index dc9f3c74d82..6f947863a3c 100644
--- a/packages/camera/camera_avfoundation/lib/src/avfoundation_camera.dart
+++ b/packages/camera/camera_avfoundation/lib/src/avfoundation_camera.dart
@@ -6,7 +6,6 @@ import 'dart:async';
import 'dart:math';
import 'package:camera_platform_interface/camera_platform_interface.dart';
-import 'package:flutter/foundation.dart';
import 'package:flutter/material.dart';
import 'package:flutter/services.dart';
import 'package:flutter/widgets.dart';
@@ -16,9 +15,6 @@ import 'messages.g.dart';
import 'type_conversion.dart';
import 'utils.dart';
-const MethodChannel _channel =
- MethodChannel('plugins.flutter.io/camera_avfoundation');
-
/// An iOS implementation of [CameraPlatform] based on AVFoundation.
class AVFoundationCamera extends CameraPlatform {
/// Creates a new AVFoundation-based [CameraPlatform] implementation instance.
@@ -100,19 +96,16 @@ class AVFoundationCamera extends CameraPlatform {
MediaSettings? mediaSettings,
) async {
try {
- final Map? reply = await _channel
- .invokeMapMethod('create', {
- 'cameraName': cameraDescription.name,
- 'resolutionPreset': null != mediaSettings?.resolutionPreset
- ? _serializeResolutionPreset(mediaSettings!.resolutionPreset!)
- : null,
- 'fps': mediaSettings?.fps,
- 'videoBitrate': mediaSettings?.videoBitrate,
- 'audioBitrate': mediaSettings?.audioBitrate,
- 'enableAudio': mediaSettings?.enableAudio ?? true,
- });
-
- return reply!['cameraId']! as int;
+ return await _hostApi.create(
+ cameraDescription.name,
+ PlatformMediaSettings(
+ resolutionPreset:
+ _pigeonResolutionPreset(mediaSettings?.resolutionPreset),
+ framesPerSecond: mediaSettings?.fps,
+ videoBitrate: mediaSettings?.videoBitrate,
+ audioBitrate: mediaSettings?.audioBitrate,
+ enableAudio: mediaSettings?.enableAudio ?? true,
+ ));
} on PlatformException catch (e) {
throw CameraException(e.code, e.message);
}
@@ -122,38 +115,26 @@ class AVFoundationCamera extends CameraPlatform {
Future initializeCamera(
int cameraId, {
ImageFormatGroup imageFormatGroup = ImageFormatGroup.unknown,
- }) {
+ }) async {
hostCameraHandlers.putIfAbsent(cameraId,
() => HostCameraMessageHandler(cameraId, cameraEventStreamController));
final Completer completer = Completer();
- onCameraInitialized(cameraId).first.then((CameraInitializedEvent value) {
+ unawaited(onCameraInitialized(cameraId)
+ .first
+ .then((CameraInitializedEvent value) {
completer.complete();
- });
+ }));
- _channel.invokeMapMethod(
- 'initialize',
- {
- 'cameraId': cameraId,
- 'imageFormatGroup': imageFormatGroup.name(),
- },
- ).catchError(
- // TODO(srawlins): This should return a value of the future's type. This
- // will fail upcoming analysis checks with
- // https://github.com/flutter/flutter/issues/105750.
- // ignore: body_might_complete_normally_catch_error
- (Object error, StackTrace stackTrace) {
- if (error is! PlatformException) {
- // ignore: only_throw_errors
- throw error;
- }
- completer.completeError(
- CameraException(error.code, error.message),
- stackTrace,
- );
- },
- );
+ try {
+ await _hostApi.initialize(cameraId, _pigeonImageFormat(imageFormatGroup));
+ } on PlatformException catch (e, s) {
+ completer.completeError(
+ CameraException(e.code, e.message),
+ s,
+ );
+ }
return completer.future;
}
@@ -164,10 +145,7 @@ class AVFoundationCamera extends CameraPlatform {
hostCameraHandlers.remove(cameraId);
handler?.dispose();
- await _channel.invokeMethod(
- 'dispose',
- {'cameraId': cameraId},
- );
+ await _hostApi.dispose(cameraId);
}
@override
@@ -206,43 +184,25 @@ class AVFoundationCamera extends CameraPlatform {
int cameraId,
DeviceOrientation orientation,
) async {
- await _channel.invokeMethod(
- 'lockCaptureOrientation',
- {
- 'cameraId': cameraId,
- 'orientation': serializeDeviceOrientation(orientation)
- },
- );
+ await _hostApi
+ .lockCaptureOrientation(serializeDeviceOrientation(orientation));
}
@override
Future unlockCaptureOrientation(int cameraId) async {
- await _channel.invokeMethod(
- 'unlockCaptureOrientation',
- {'cameraId': cameraId},
- );
+ await _hostApi.unlockCaptureOrientation();
}
@override
Future takePicture(int cameraId) async {
- final String? path = await _channel.invokeMethod(
- 'takePicture',
- {'cameraId': cameraId},
- );
-
- if (path == null) {
- throw CameraException(
- 'INVALID_PATH',
- 'The platform "$defaultTargetPlatform" did not return a path while reporting success. The platform should always return a valid path or report an error.',
- );
- }
-
+ final String path = await _hostApi.takePicture();
return XFile(path);
}
@override
- Future prepareForVideoRecording() =>
- _channel.invokeMethod('prepareForVideoRecording');
+ Future prepareForVideoRecording() async {
+ await _hostApi.prepareForVideoRecording();
+ }
@override
Future startVideoRecording(int cameraId,
@@ -253,14 +213,8 @@ class AVFoundationCamera extends CameraPlatform {
@override
Future startVideoCapturing(VideoCaptureOptions options) async {
- await _channel.invokeMethod(
- 'startVideoRecording',
- {
- 'cameraId': options.cameraId,
- 'maxVideoDuration': options.maxDuration?.inMilliseconds,
- 'enableStream': options.streamCallback != null,
- },
- );
+ // Max video duration is currently not supported.
+ await _hostApi.startVideoRecording(options.streamCallback != null);
if (options.streamCallback != null) {
_frameStreamController = _createStreamController();
@@ -271,33 +225,19 @@ class AVFoundationCamera extends CameraPlatform {
@override
Future stopVideoRecording(int cameraId) async {
- final String? path = await _channel.invokeMethod(
- 'stopVideoRecording',
- {'cameraId': cameraId},
- );
-
- if (path == null) {
- throw CameraException(
- 'INVALID_PATH',
- 'The platform "$defaultTargetPlatform" did not return a path while reporting success. The platform should always return a valid path or report an error.',
- );
- }
-
+ final String path = await _hostApi.stopVideoRecording();
return XFile(path);
}
@override
- Future pauseVideoRecording(int cameraId) => _channel.invokeMethod(
- 'pauseVideoRecording',
- {'cameraId': cameraId},
- );
+ Future pauseVideoRecording(int cameraId) async {
+ await _hostApi.pauseVideoRecording();
+ }
@override
- Future resumeVideoRecording(int cameraId) =>
- _channel.invokeMethod(
- 'resumeVideoRecording',
- {'cameraId': cameraId},
- );
+ Future resumeVideoRecording(int cameraId) async {
+ await _hostApi.resumeVideoRecording();
+ }
@override
Stream onStreamedFrameAvailable(int cameraId,
@@ -322,7 +262,7 @@ class AVFoundationCamera extends CameraPlatform {
}
Future _startPlatformStream() async {
- await _channel.invokeMethod('startImageStream');
+ await _hostApi.startImageStream();
_startStreamListener();
}
@@ -332,7 +272,7 @@ class AVFoundationCamera extends CameraPlatform {
_platformImageStreamSubscription =
cameraEventChannel.receiveBroadcastStream().listen((dynamic imageData) {
try {
- _channel.invokeMethod('receivedImageStreamData');
+ _hostApi.receivedImageStreamData();
} on PlatformException catch (e) {
throw CameraException(e.code, e.message);
}
@@ -342,7 +282,7 @@ class AVFoundationCamera extends CameraPlatform {
}
FutureOr _onFrameStreamCancel() async {
- await _channel.invokeMethod('stopImageStream');
+ await _hostApi.stopImageStream();
await _platformImageStreamSubscription?.cancel();
_platformImageStreamSubscription = null;
_frameStreamController = null;
@@ -354,140 +294,75 @@ class AVFoundationCamera extends CameraPlatform {
}
@override
- Future setFlashMode(int cameraId, FlashMode mode) =>
- _channel.invokeMethod(
- 'setFlashMode',
- {
- 'cameraId': cameraId,
- 'mode': _serializeFlashMode(mode),
- },
- );
+ Future setFlashMode(int cameraId, FlashMode mode) async {
+ await _hostApi.setFlashMode(_pigeonFlashMode(mode));
+ }
@override
- Future setExposureMode(int cameraId, ExposureMode mode) =>
- _channel.invokeMethod(
- 'setExposureMode',
- {
- 'cameraId': cameraId,
- 'mode': _serializeExposureMode(mode),
- },
- );
+ Future setExposureMode(int cameraId, ExposureMode mode) async {
+ await _hostApi.setExposureMode(_pigeonExposureMode(mode));
+ }
@override
- Future setExposurePoint(int cameraId, Point? point) {
+ Future setExposurePoint(int cameraId, Point? point) async {
assert(point == null || point.x >= 0 && point.x <= 1);
assert(point == null || point.y >= 0 && point.y <= 1);
- return _channel.invokeMethod(
- 'setExposurePoint',
- {
- 'cameraId': cameraId,
- 'reset': point == null,
- 'x': point?.x,
- 'y': point?.y,
- },
- );
+ await _hostApi.setExposurePoint(_pigeonPoint(point));
}
@override
Future getMinExposureOffset(int cameraId) async {
- final double? minExposureOffset = await _channel.invokeMethod(
- 'getMinExposureOffset',
- {'cameraId': cameraId},
- );
-
- return minExposureOffset!;
+ return _hostApi.getMinExposureOffset();
}
@override
Future getMaxExposureOffset(int cameraId) async {
- final double? maxExposureOffset = await _channel.invokeMethod(
- 'getMaxExposureOffset',
- {'cameraId': cameraId},
- );
-
- return maxExposureOffset!;
+ return _hostApi.getMaxExposureOffset();
}
@override
Future getExposureOffsetStepSize(int cameraId) async {
- final double? stepSize = await _channel.invokeMethod(
- 'getExposureOffsetStepSize',
- {'cameraId': cameraId},
- );
-
- return stepSize!;
+ // iOS has no step size.
+ return 0;
}
@override
Future setExposureOffset(int cameraId, double offset) async {
- final double? appliedOffset = await _channel.invokeMethod(
- 'setExposureOffset',
- {
- 'cameraId': cameraId,
- 'offset': offset,
- },
- );
-
- return appliedOffset!;
+ await _hostApi.setExposureOffset(offset);
+ // The platform API allows for implementations that have to adjust the
+ // target offset and return the actual offset used, but there is never
+ // adjustment in this implementation.
+ return offset;
}
@override
- Future setFocusMode(int cameraId, FocusMode mode) =>
- _channel.invokeMethod(
- 'setFocusMode',
- {
- 'cameraId': cameraId,
- 'mode': _serializeFocusMode(mode),
- },
- );
+ Future setFocusMode(int cameraId, FocusMode mode) async {
+ await _hostApi.setFocusMode(_pigeonFocusMode(mode));
+ }
@override
- Future setFocusPoint(int cameraId, Point? point) {
+ Future setFocusPoint(int cameraId, Point? point) async {
assert(point == null || point.x >= 0 && point.x <= 1);
assert(point == null || point.y >= 0 && point.y <= 1);
- return _channel.invokeMethod(
- 'setFocusPoint',
- {
- 'cameraId': cameraId,
- 'reset': point == null,
- 'x': point?.x,
- 'y': point?.y,
- },
- );
+ await _hostApi.setFocusPoint(_pigeonPoint(point));
}
@override
Future getMaxZoomLevel(int cameraId) async {
- final double? maxZoomLevel = await _channel.invokeMethod(
- 'getMaxZoomLevel',
- {'cameraId': cameraId},
- );
-
- return maxZoomLevel!;
+ return _hostApi.getMaxZoomLevel();
}
@override
Future getMinZoomLevel(int cameraId) async {
- final double? minZoomLevel = await _channel.invokeMethod(
- 'getMinZoomLevel',
- {'cameraId': cameraId},
- );
-
- return minZoomLevel!;
+ return _hostApi.getMinZoomLevel();
}
@override
Future setZoomLevel(int cameraId, double zoom) async {
try {
- await _channel.invokeMethod(
- 'setZoomLevel',
- {
- 'cameraId': cameraId,
- 'zoom': zoom,
- },
- );
+ await _hostApi.setZoomLevel(zoom);
} on PlatformException catch (e) {
throw CameraException(e.code, e.message);
}
@@ -495,40 +370,23 @@ class AVFoundationCamera extends CameraPlatform {
@override
Future pausePreview(int cameraId) async {
- await _channel.invokeMethod(
- 'pausePreview',
- {'cameraId': cameraId},
- );
+ await _hostApi.pausePreview();
}
@override
Future resumePreview(int cameraId) async {
- await _channel.invokeMethod(
- 'resumePreview',
- {'cameraId': cameraId},
- );
+ await _hostApi.resumePreview();
}
@override
Future setDescriptionWhileRecording(
CameraDescription description) async {
- await _channel.invokeMethod(
- 'setDescriptionWhileRecording',
- {
- 'cameraName': description.name,
- },
- );
+ await _hostApi.updateDescriptionWhileRecording(description.name);
}
@override
- Future setImageFileFormat(int cameraId, ImageFileFormat format) {
- return _channel.invokeMethod(
- 'setImageFileFormat',
- {
- 'cameraId': cameraId,
- 'fileFormat': format.name,
- },
- );
+ Future setImageFileFormat(int cameraId, ImageFileFormat format) async {
+ await _hostApi.setImageFileFormat(_pigeonImageFileFormat(format));
}
@override
@@ -536,12 +394,13 @@ class AVFoundationCamera extends CameraPlatform {
return Texture(textureId: cameraId);
}
- String _serializeFocusMode(FocusMode mode) {
+ /// Returns an [FocusMode]'s Pigeon representation.
+ PlatformFocusMode _pigeonFocusMode(FocusMode mode) {
switch (mode) {
case FocusMode.locked:
- return 'locked';
+ return PlatformFocusMode.locked;
case FocusMode.auto:
- return 'auto';
+ return PlatformFocusMode.auto;
}
// The enum comes from a different package, which could get a new value at
// any time, so provide a fallback that ensures this won't break when used
@@ -549,15 +408,16 @@ class AVFoundationCamera extends CameraPlatform {
// the switch rather than a `default` so that the linter will flag the
// switch as needing an update.
// ignore: dead_code
- return 'auto';
+ return PlatformFocusMode.auto;
}
- String _serializeExposureMode(ExposureMode mode) {
+ /// Returns an [ExposureMode]'s Pigeon representation.
+ PlatformExposureMode _pigeonExposureMode(ExposureMode mode) {
switch (mode) {
case ExposureMode.locked:
- return 'locked';
+ return PlatformExposureMode.locked;
case ExposureMode.auto:
- return 'auto';
+ return PlatformExposureMode.auto;
}
// The enum comes from a different package, which could get a new value at
// any time, so provide a fallback that ensures this won't break when used
@@ -565,20 +425,20 @@ class AVFoundationCamera extends CameraPlatform {
// the switch rather than a `default` so that the linter will flag the
// switch as needing an update.
// ignore: dead_code
- return 'auto';
+ return PlatformExposureMode.auto;
}
- /// Returns the flash mode as a String.
- String _serializeFlashMode(FlashMode flashMode) {
+ /// Returns a [FlashMode]'s Pigeon representation.
+ PlatformFlashMode _pigeonFlashMode(FlashMode flashMode) {
switch (flashMode) {
case FlashMode.off:
- return 'off';
+ return PlatformFlashMode.off;
case FlashMode.auto:
- return 'auto';
+ return PlatformFlashMode.auto;
case FlashMode.always:
- return 'always';
+ return PlatformFlashMode.always;
case FlashMode.torch:
- return 'torch';
+ return PlatformFlashMode.torch;
}
// The enum comes from a different package, which could get a new value at
// any time, so provide a fallback that ensures this won't break when used
@@ -586,24 +446,30 @@ class AVFoundationCamera extends CameraPlatform {
// the switch rather than a `default` so that the linter will flag the
// switch as needing an update.
// ignore: dead_code
- return 'off';
+ return PlatformFlashMode.off;
}
- /// Returns the resolution preset as a String.
- String _serializeResolutionPreset(ResolutionPreset resolutionPreset) {
+ /// Returns a [ResolutionPreset]'s Pigeon representation.
+ PlatformResolutionPreset _pigeonResolutionPreset(
+ ResolutionPreset? resolutionPreset) {
+ if (resolutionPreset == null) {
+ // Provide a default if one isn't provided, since the native side needs
+ // to set something.
+ return PlatformResolutionPreset.high;
+ }
switch (resolutionPreset) {
case ResolutionPreset.max:
- return 'max';
+ return PlatformResolutionPreset.max;
case ResolutionPreset.ultraHigh:
- return 'ultraHigh';
+ return PlatformResolutionPreset.ultraHigh;
case ResolutionPreset.veryHigh:
- return 'veryHigh';
+ return PlatformResolutionPreset.veryHigh;
case ResolutionPreset.high:
- return 'high';
+ return PlatformResolutionPreset.high;
case ResolutionPreset.medium:
- return 'medium';
+ return PlatformResolutionPreset.medium;
case ResolutionPreset.low:
- return 'low';
+ return PlatformResolutionPreset.low;
}
// The enum comes from a different package, which could get a new value at
// any time, so provide a fallback that ensures this won't break when used
@@ -611,7 +477,59 @@ class AVFoundationCamera extends CameraPlatform {
// the switch rather than a `default` so that the linter will flag the
// switch as needing an update.
// ignore: dead_code
- return 'max';
+ return PlatformResolutionPreset.max;
+ }
+
+ /// Returns an [ImageFormatGroup]'s Pigeon representation.
+ PlatformImageFormatGroup _pigeonImageFormat(ImageFormatGroup format) {
+ switch (format) {
+ // "unknown" is used to indicate the default.
+ case ImageFormatGroup.unknown:
+ case ImageFormatGroup.bgra8888:
+ return PlatformImageFormatGroup.bgra8888;
+ case ImageFormatGroup.yuv420:
+ return PlatformImageFormatGroup.yuv420;
+ case ImageFormatGroup.jpeg:
+ case ImageFormatGroup.nv21:
+ // Fall through.
+ }
+ // The enum comes from a different package, which could get a new value at
+ // any time, so provide a fallback that ensures this won't break when used
+ // with a version that contains new values. This is deliberately outside
+ // the switch rather than a `default` so that the linter will flag the
+ // switch as needing an update.
+ // TODO(stuartmorgan): Consider throwing an UnsupportedError, instead of
+ // doing fallback, when a specific unsupported format is requested. This
+ // would require a breaking change at this layer and the app-facing layer.
+ return PlatformImageFormatGroup.bgra8888;
+ }
+
+ /// Returns an [ImageFileFormat]'s Pigeon representation.
+ PlatformImageFileFormat _pigeonImageFileFormat(ImageFileFormat format) {
+ switch (format) {
+ case ImageFileFormat.heif:
+ return PlatformImageFileFormat.heif;
+ case ImageFileFormat.jpeg:
+ return PlatformImageFileFormat.jpeg;
+ }
+ // The enum comes from a different package, which could get a new value at
+ // any time, so provide a fallback that ensures this won't break when used
+ // with a version that contains new values. This is deliberately outside
+ // the switch rather than a `default` so that the linter will flag the
+ // switch as needing an update.
+ // TODO(stuartmorgan): Consider throwing an UnsupportedError, instead of
+ // doing fallback, when a specific unsupported format is requested. This
+ // would require a breaking change at this layer and the app-facing layer.
+ // ignore: dead_code
+ return PlatformImageFileFormat.jpeg;
+ }
+
+ /// Returns a [Point]s Pigeon representation.
+ PlatformPoint? _pigeonPoint(Point? point) {
+ if (point == null) {
+ return null;
+ }
+ return PlatformPoint(x: point.x, y: point.y);
}
}
diff --git a/packages/camera/camera_avfoundation/lib/src/messages.g.dart b/packages/camera/camera_avfoundation/lib/src/messages.g.dart
index a4b399217eb..4290eb02ed2 100644
--- a/packages/camera/camera_avfoundation/lib/src/messages.g.dart
+++ b/packages/camera/camera_avfoundation/lib/src/messages.g.dart
@@ -52,11 +52,38 @@ enum PlatformExposureMode {
locked,
}
+enum PlatformFlashMode {
+ off,
+ auto,
+ always,
+ torch,
+}
+
enum PlatformFocusMode {
auto,
locked,
}
+/// Pigeon version of ImageFileFormat.
+enum PlatformImageFileFormat {
+ jpeg,
+ heif,
+}
+
+enum PlatformImageFormatGroup {
+ bgra8888,
+ yuv420,
+}
+
+enum PlatformResolutionPreset {
+ low,
+ medium,
+ high,
+ veryHigh,
+ ultraHigh,
+ max,
+}
+
class PlatformCameraDescription {
PlatformCameraDescription({
required this.name,
@@ -131,6 +158,73 @@ class PlatformCameraState {
}
}
+class PlatformMediaSettings {
+ PlatformMediaSettings({
+ required this.resolutionPreset,
+ this.framesPerSecond,
+ this.videoBitrate,
+ this.audioBitrate,
+ required this.enableAudio,
+ });
+
+ PlatformResolutionPreset resolutionPreset;
+
+ int? framesPerSecond;
+
+ int? videoBitrate;
+
+ int? audioBitrate;
+
+ bool enableAudio;
+
+ Object encode() {
+ return