ARCamera MVP: Basic screen with full screen camera and prediction labels (#684)

This commit is contained in:
Johannes Klein
2023-07-15 07:53:18 +02:00
committed by GitHub
parent 5166647b83
commit 5f81c49f85
27 changed files with 2629 additions and 112 deletions

View File

@@ -84,7 +84,7 @@ jobs:
E2E_TEST_PASSWORD: ${{ secrets.E2E_TEST_PASSWORD }}
JWT_ANONYMOUS_API_SECRET: ${{ secrets.JWT_ANONYMOUS_API_SECRET }}
GMAPS_API_KEY: ${{ secrets.GMAPS_API_KEY }}
run: printf 'API_URL=https://stagingapi.inaturalist.org/v2\nOAUTH_API_URL=https://staging.inaturalist.org\nJWT_ANONYMOUS_API_SECRET=%s\nOAUTH_CLIENT_ID=%s\nOAUTH_CLIENT_SECRET=%s\nE2E_TEST_USERNAME=%s\nE2E_TEST_PASSWORD=%s\nGMAPS_API_KEY=%s\n' "JWT_ANONYMOUS_API_SECRET" "$OAUTH_CLIENT_ID" "$OAUTH_CLIENT_SECRET" "$E2E_TEST_USERNAME" "$E2E_TEST_PASSWORD" "$GMAPS_API_KEY" > .env
run: printf 'API_URL=https://stagingapi.inaturalist.org/v2\nOAUTH_API_URL=https://staging.inaturalist.org\nJWT_ANONYMOUS_API_SECRET=%s\nOAUTH_CLIENT_ID=%s\nOAUTH_CLIENT_SECRET=%s\nE2E_TEST_USERNAME=%s\nE2E_TEST_PASSWORD=%s\nGMAPS_API_KEY=%s\nANDROID_MODEL_FILE_NAME=small_inception_tf1.tflite\nANDROID_TAXONOMY_FILE_NAME=small_export_tax.csv\nIOS_MODEL_FILE_NAME=small_inception_tf1.mlmodel\nIOS_TAXONOMY_FILE_NAME=small_export_tax.json\n' "JWT_ANONYMOUS_API_SECRET" "$OAUTH_CLIENT_ID" "$OAUTH_CLIENT_SECRET" "$E2E_TEST_USERNAME" "$E2E_TEST_PASSWORD" "$GMAPS_API_KEY" > .env
- name: Create keystore.properties file
env:
ANDROID_KEY_STORE_PASSWORD: ${{ secrets.ANDROID_KEY_STORE_PASSWORD }}
@@ -100,7 +100,11 @@ jobs:
keytool -genkeypair -v -noprompt -storetype PKCS12 -keystore release.keystore -alias "$ANDROID_ALIAS" -keyalg RSA -keysize 2048 -validity 10000 -storepass "$ANDROID_KEY_STORE_PASSWORD" -keypass "$ANDROID_KEY_PASSWORD" -dname "CN=mqttserver.ibm.com, OU=ID, O=IBM, L=Hursley, S=Hants, C=GB"
- name: Move keystore
run: mv release.keystore android/app/release.keystore
# Download the example model otherwise an error alert will be shown on app start, requires .env
- name: Download a fake cv model and taxonomy file into the assets folder
run: npm run add-fake-model
# Macos-latest runner has 3 Java versions pre-installed, if not specified as here, the build step errors with requiring at least Java 11 or higher
# So, this step is needed for the apk build step, but somehow this is breaking emulator setup, so it is placed here
- name: Set up JDK 11

View File

@@ -86,7 +86,13 @@ jobs:
E2E_TEST_USERNAME: ${{ secrets.E2E_TEST_USERNAME }}
E2E_TEST_PASSWORD: ${{ secrets.E2E_TEST_PASSWORD }}
JWT_ANONYMOUS_API_SECRET: ${{ secrets.JWT_ANONYMOUS_API_SECRET }}
run: printf 'API_URL=https://stagingapi.inaturalist.org/v2\nOAUTH_API_URL=https://staging.inaturalist.org\nJWT_ANONYMOUS_API_SECRET=%s\nOAUTH_CLIENT_ID=%s\nOAUTH_CLIENT_SECRET=%s\nE2E_TEST_USERNAME=%s\nE2E_TEST_PASSWORD=%s\n' "JWT_ANONYMOUS_API_SECRET" "$OAUTH_CLIENT_ID" "$OAUTH_CLIENT_SECRET" "$E2E_TEST_USERNAME" "$E2E_TEST_PASSWORD" > .env
run: printf 'API_URL=https://stagingapi.inaturalist.org/v2\nOAUTH_API_URL=https://staging.inaturalist.org\nJWT_ANONYMOUS_API_SECRET=%s\nOAUTH_CLIENT_ID=%s\nOAUTH_CLIENT_SECRET=%s\nE2E_TEST_USERNAME=%s\nE2E_TEST_PASSWORD=%s\nGMAPS_API_KEY=%s\nANDROID_MODEL_FILE_NAME=small_inception_tf1.tflite\nANDROID_TAXONOMY_FILE_NAME=small_export_tax.csv\nIOS_MODEL_FILE_NAME=optimized_model_v1.mlmodel\nIOS_TAXONOMY_FILE_NAME=taxonomy_v1.json\n' "JWT_ANONYMOUS_API_SECRET" "$OAUTH_CLIENT_ID" "$OAUTH_CLIENT_SECRET" "$E2E_TEST_USERNAME" "$E2E_TEST_PASSWORD" "$GMAPS_API_KEY" > .env
# Download all linked model files not included in the repository (otherwise build will error out), requires .env file
- name: Download the example cv model and taxonomy file into the ios folder
run: npm run add-example-model
- name: Download a fake cv model and taxonomy file into the ios folder
run: npm run add-fake-model
# https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#setting-an-environment-variable
# This will be available for all subsequent steps

8
.gitignore vendored
View File

@@ -78,4 +78,10 @@ artifacts/
*.log
# VisualStudioCode #
.vscode
.vscode
# CV Model #
**/assets/**/*.csv
*.tflite
*.mlmodel
**/ios/*.json

View File

@@ -17,7 +17,15 @@ See [CONTRIBUTING](CONTRIBUTING.md) for guidelines on contributing to this proje
1. Run `npm install`
1. Run `npx pod-install` or `cd ios && pod install` from the root directory
1. `cp env.example .env.staging` for staging and `cp env.example .env` for production and fill in appropriate values. This is not part of the code repo (contains secrets, such as OAuth client ID).
1. To run on Android, do this `cp android/example-keystore.properties android/keystore.properties`. Fill in the relevant values. If you are a member of iNat staff, get them from another member of iNat Staff.
1. To run on Android, do this `cp android/example-keystore.properties android/keystore.properties`. Fill in the relevant values. If you are a member of iNat staff, get them from another member of iNat Staff.
1. Add AR Camera example model and taxonomy files.
1. Add the example model files by executing `npm run add-example-models`.
1. If the download script fails: The sample model files are available in this [`small_model.zip`](https://github.com/inaturalist/SeekReactNative/releases/tag/v2.9.1-138) file.
1. On Android, these files are named `small_inception_tf1.tflite` and `small_export_tax.csv`. Create a camera folder within Android assets (i.e. `android/app/src/debug/assets/camera`) and place the files there.
1. On iOS, these files are named `small_inception_tf1.mlmodel` and `small_export_tax.json` and should be added to the `ios` folder.
1. Add AR Camera model and taxonomy files.
1. On Android, the current file names are specified in these env variables `ANDROID_MODEL_FILE_NAME` and `ANDROID_TAXONOMY_FILE_NAME`. Create a camera folder within Android assets (i.e. `android/app/src/main/assets/camera`) and place the files there.
1. On iOS, the current file names are specified in these env variables `IOS_MODEL_FILE_NAME` and `IOS_TAXONOMY_FILE_NAME` and should be added to the `ios` folder.
### Set up pre-commit hooks

View File

@@ -19,7 +19,13 @@ module.exports = {
styles: "./src/styles"
}
}],
"react-native-reanimated/plugin" // Reanimated 2 plugin has to be listed last https://docs.swmansion.com/react-native-reanimated/docs/fundamentals/installation/
// Reanimated 2 plugin has to be listed last https://docs.swmansion.com/react-native-reanimated/docs/fundamentals/installation/
[
"react-native-reanimated/plugin",
{
globals: ["__inatVision"]
}
]
],
env: {
production: {

View File

@@ -17,6 +17,12 @@ E2E_TEST_PASSWORD=test-password
GMAPS_API_KEY=some-key
# Model file names for Android and iOS
ANDROID_MODEL_FILE_NAME=small_inception_tf1.tflite
ANDROID_TAXONOMY_FILE_NAME=small_export_tax.csv
IOS_MODEL_FILE_NAME=small_inception_tf1.mlmodel
IOS_TAXONOMY_FILE_NAME=small_export_tax.json
# Fastlane
IOS_PROVISIONING_PROFILE_NAME="provisioning profile name"
IOS_SHARE_BUNDLE_ID="share bundle ID"

View File

@@ -66,7 +66,7 @@ target "iNaturalistReactNative" do
target.build_configurations.each do | build_config |
# the following two lines allow the app to build with XCode 14.3 RC
# https://stackoverflow.com/questions/72729591/fbreactnativespec-h-error-after-upgrading-from-0-68-x-to-0-69-0/74487309#74487309
build_config.build_settings["SWIFT_VERSION"] = "5.0"
build_config.build_settings["SWIFT_VERSION"] = "5.2"
build_config.build_settings["IPHONEOS_DEPLOYMENT_TARGET"] = "12.4"
if target.name == "react-native-config"
build_config.build_settings["ENVFILE"] = envfiles[build_config.name]

View File

@@ -462,6 +462,8 @@ PODS:
- React
- React-callinvoker
- React-Core
- VisionCameraPluginInatVision (0.1.0):
- React-Core
- Yoga (1.14.0)
DEPENDENCIES:
@@ -538,6 +540,7 @@ DEPENDENCIES:
- RNSVG (from `../node_modules/react-native-svg`)
- RNVectorIcons (from `../node_modules/react-native-vector-icons`)
- VisionCamera (from `../node_modules/react-native-vision-camera`)
- VisionCameraPluginInatVision (from `../node_modules/vision-camera-plugin-inatvision`)
- Yoga (from `../node_modules/react-native/ReactCommon/yoga`)
SPEC REPOS:
@@ -689,6 +692,8 @@ EXTERNAL SOURCES:
:path: "../node_modules/react-native-vector-icons"
VisionCamera:
:path: "../node_modules/react-native-vision-camera"
VisionCameraPluginInatVision:
:path: "../node_modules/vision-camera-plugin-inatvision"
Yoga:
:path: "../node_modules/react-native/ReactCommon/yoga"
@@ -767,6 +772,7 @@ SPEC CHECKSUMS:
RNSVG: 53c661b76829783cdaf9b7a57258f3d3b4c28315
RNVectorIcons: fcc2f6cb32f5735b586e66d14103a74ce6ad61f8
VisionCamera: f41b08470c9ae0a01be0d5e48f9fdd61602aa51f
VisionCameraPluginInatVision: 79bb258db75218889c74d0897ecba676492c4def
Yoga: 065f0b74dba4832d6e328238de46eb72c5de9556
PODFILE CHECKSUM: 98b5e3459383d514f704de4aef72ab8faf44b800

View File

@@ -19,6 +19,10 @@
8B65ED3129F575C10054CCEF /* MainInterface.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 8B65ED2F29F575C10054CCEF /* MainInterface.storyboard */; };
8B65ED3529F575C10054CCEF /* iNaturalistReactNative-ShareExtension.appex in Embed Foundation Extensions */ = {isa = PBXBuildFile; fileRef = 8B65ED2B29F575C10054CCEF /* iNaturalistReactNative-ShareExtension.appex */; settings = {ATTRIBUTES = (RemoveHeadersOnCopy, ); }; };
8B65ED3B29F575FE0054CCEF /* ShareViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8B65ED3A29F575FE0054CCEF /* ShareViewController.swift */; };
8FC904582A4C22A10005DBD8 /* optimized_model_v1.mlmodel in Sources */ = {isa = PBXBuildFile; fileRef = 8FC904562A4C22A10005DBD8 /* optimized_model_v1.mlmodel */; };
8FC904592A4C22A10005DBD8 /* taxonomy_v1.json in Resources */ = {isa = PBXBuildFile; fileRef = 8FC904572A4C22A10005DBD8 /* taxonomy_v1.json */; };
8FE03BBB2A5EFCB2001B35BA /* small_inception_tf1.mlmodel in Sources */ = {isa = PBXBuildFile; fileRef = 8FE03BB92A5EFCB2001B35BA /* small_inception_tf1.mlmodel */; };
8FE03BBC2A5EFCB2001B35BA /* small_export_tax.json in Resources */ = {isa = PBXBuildFile; fileRef = 8FE03BBA2A5EFCB2001B35BA /* small_export_tax.json */; };
A252B2AEA64E47C9AC1D20E8 /* Whitney-Light-Pro.otf in Resources */ = {isa = PBXBuildFile; fileRef = BA9D41ECEBFA4C38B74009B3 /* Whitney-Light-Pro.otf */; };
BA2479FA3D7B40A7BEF7B3CD /* Whitney-Medium-Pro.otf in Resources */ = {isa = PBXBuildFile; fileRef = D09FA3A0162844FF80A5EF96 /* Whitney-Medium-Pro.otf */; };
D1A158A7F6C9E77B651BB4AA /* libPods-iNaturalistReactNative-ShareExtension.a in Frameworks */ = {isa = PBXBuildFile; fileRef = ADBDD0D061046941F61CA31D /* libPods-iNaturalistReactNative-ShareExtension.a */; };
@@ -79,6 +83,10 @@
8B65ED3C29F576D00054CCEF /* iNaturalistReactNative-ShareExtension.entitlements */ = {isa = PBXFileReference; lastKnownFileType = text.plist.entitlements; path = "iNaturalistReactNative-ShareExtension.entitlements"; sourceTree = "<group>"; };
8B8BAD0429F54EB300CE5C9F /* iNaturalistReactNative.entitlements */ = {isa = PBXFileReference; lastKnownFileType = text.plist.entitlements; name = iNaturalistReactNative.entitlements; path = iNaturalistReactNative/iNaturalistReactNative.entitlements; sourceTree = "<group>"; };
8BF3756EB416D21D28518C7D /* Pods-iNaturalistReactNative.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-iNaturalistReactNative.debug.xcconfig"; path = "Target Support Files/Pods-iNaturalistReactNative/Pods-iNaturalistReactNative.debug.xcconfig"; sourceTree = "<group>"; };
8FC904562A4C22A10005DBD8 /* optimized_model_v1.mlmodel */ = {isa = PBXFileReference; lastKnownFileType = file.mlmodel; path = optimized_model_v1.mlmodel; sourceTree = "<group>"; };
8FC904572A4C22A10005DBD8 /* taxonomy_v1.json */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.json; path = taxonomy_v1.json; sourceTree = "<group>"; };
8FE03BB92A5EFCB2001B35BA /* small_inception_tf1.mlmodel */ = {isa = PBXFileReference; lastKnownFileType = file.mlmodel; path = small_inception_tf1.mlmodel; sourceTree = "<group>"; };
8FE03BBA2A5EFCB2001B35BA /* small_export_tax.json */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.json; path = small_export_tax.json; sourceTree = "<group>"; };
ADBDD0D061046941F61CA31D /* libPods-iNaturalistReactNative-ShareExtension.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = "libPods-iNaturalistReactNative-ShareExtension.a"; sourceTree = BUILT_PRODUCTS_DIR; };
B935DA49196EBFE90895C8DD /* libPods-iNaturalistReactNative.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = "libPods-iNaturalistReactNative.a"; sourceTree = BUILT_PRODUCTS_DIR; };
BA9D41ECEBFA4C38B74009B3 /* Whitney-Light-Pro.otf */ = {isa = PBXFileReference; explicitFileType = undefined; fileEncoding = 9; includeInIndex = 0; lastKnownFileType = unknown; name = "Whitney-Light-Pro.otf"; path = "../assets/fonts/Whitney-Light-Pro.otf"; sourceTree = "<group>"; };
@@ -135,6 +143,10 @@
13B07FAE1A68108700A75B9A /* iNaturalistReactNative */ = {
isa = PBXGroup;
children = (
8FE03BBA2A5EFCB2001B35BA /* small_export_tax.json */,
8FE03BB92A5EFCB2001B35BA /* small_inception_tf1.mlmodel */,
8FC904562A4C22A10005DBD8 /* optimized_model_v1.mlmodel */,
8FC904572A4C22A10005DBD8 /* taxonomy_v1.json */,
8B8BAD0429F54EB300CE5C9F /* iNaturalistReactNative.entitlements */,
13B07FAF1A68108700A75B9A /* AppDelegate.h */,
13B07FB01A68108700A75B9A /* AppDelegate.mm */,
@@ -345,9 +357,11 @@
buildActionMask = 2147483647;
files = (
81AB9BB82411601600AC10FF /* LaunchScreen.storyboard in Resources */,
8FE03BBC2A5EFCB2001B35BA /* small_export_tax.json in Resources */,
374CB22F29943E63005885ED /* Whitney-BookItalic-Pro.otf in Resources */,
13B07FBF1A68108700A75B9A /* Images.xcassets in Resources */,
A252B2AEA64E47C9AC1D20E8 /* Whitney-Light-Pro.otf in Resources */,
8FC904592A4C22A10005DBD8 /* taxonomy_v1.json in Resources */,
BA2479FA3D7B40A7BEF7B3CD /* Whitney-Medium-Pro.otf in Resources */,
78D1665778AF449EA28D8F35 /* INatIcon.ttf in Resources */,
4FB3B444D46A4115B867B9CC /* inaturalisticons.ttf in Resources */,
@@ -510,7 +524,9 @@
isa = PBXSourcesBuildPhase;
buildActionMask = 2147483647;
files = (
8FC904582A4C22A10005DBD8 /* optimized_model_v1.mlmodel in Sources */,
13B07FBC1A68108700A75B9A /* AppDelegate.mm in Sources */,
8FE03BBB2A5EFCB2001B35BA /* small_inception_tf1.mlmodel in Sources */,
13B07FC11A68108700A75B9A /* main.m in Sources */,
);
runOnlyForDeploymentPostprocessing = 0;
@@ -629,7 +645,7 @@
PRODUCT_NAME = iNaturalistReactNative;
SWIFT_OBJC_BRIDGING_HEADER = "iNaturalistReactNative-Bridging-Header.h";
SWIFT_OPTIMIZATION_LEVEL = "-Onone";
SWIFT_VERSION = 5.0;
SWIFT_VERSION = 5.2;
TARGETED_DEVICE_FAMILY = "1,2";
VERSIONING_SYSTEM = "apple-generic";
};
@@ -659,7 +675,7 @@
PRODUCT_BUNDLE_IDENTIFIER = org.inaturalist.iNatMobileBeta;
PRODUCT_NAME = iNaturalistReactNative;
SWIFT_OBJC_BRIDGING_HEADER = "iNaturalistReactNative-Bridging-Header.h";
SWIFT_VERSION = 5.0;
SWIFT_VERSION = 5.2;
TARGETED_DEVICE_FAMILY = "1,2";
VERSIONING_SYSTEM = "apple-generic";
};
@@ -840,7 +856,7 @@
SWIFT_EMIT_LOC_STRINGS = YES;
SWIFT_OBJC_BRIDGING_HEADER = "iNaturalistReactNative-ShareExtension/iNaturalistReactNative-ShareExtension-Bridging-Header.h";
SWIFT_OPTIMIZATION_LEVEL = "-Onone";
SWIFT_VERSION = 5.0;
SWIFT_VERSION = 5.2;
TARGETED_DEVICE_FAMILY = "1,2";
};
name = Debug;
@@ -883,7 +899,7 @@
SWIFT_EMIT_LOC_STRINGS = YES;
SWIFT_OBJC_BRIDGING_HEADER = "iNaturalistReactNative-ShareExtension/iNaturalistReactNative-ShareExtension-Bridging-Header.h";
SWIFT_OPTIMIZATION_LEVEL = "-O";
SWIFT_VERSION = 5.0;
SWIFT_VERSION = 5.2;
TARGETED_DEVICE_FAMILY = "1,2";
};
name = Release;

1891
package-lock.json generated
View File

File diff suppressed because it is too large Load Diff

View File

@@ -23,7 +23,9 @@
"e2e:test:ios": "MOCK_MODE=e2e npx detox test --configuration ios.release",
"e2e:test": "npm run e2e:test:ios && npm run e2e:test:android",
"e2e": "npm run e2e:build && npm run e2e:test",
"icons": "./scripts/update-icon-font.sh"
"icons": "./scripts/update-icon-font.sh",
"add-example-model": "node scripts/add-example-model.js",
"add-fake-model": "node scripts/add-fake-model.js"
},
"dependencies": {
"@babel/eslint-parser": "^7.21.3",
@@ -114,7 +116,8 @@
"react-tinder-card": "^1.6.2",
"realm": "^11.8.0",
"sanitize-html": "^2.10.0",
"use-debounce": "^9.0.4"
"use-debounce": "^9.0.4",
"vision-camera-plugin-inatvision": "github:inaturalist/vision-camera-plugin-inatvision"
},
"devDependencies": {
"@babel/core": "^7.21.4",
@@ -132,6 +135,8 @@
"babel-jest": "^29.5.0",
"babel-plugin-module-resolver": "^5.0.0",
"detox": "^20.9.0",
"dotenv": "^16.3.1",
"download": "^8.0.0",
"eslint": "^8.40.0",
"eslint-config-airbnb": "^19.0.4",
"eslint-import-resolver-babel-module": "^5.3.2",

View File

@@ -0,0 +1,46 @@
const fs = require( "fs" ).promises;
const path = require( "path" );
const download = require( "download" );
const modelURL
= "https://github.com/inaturalist/SeekReactNative/releases/download/v2.9.1-138/small_model.zip";
const modelPath = path.join( __dirname, "..", "temp", "model" );
const examplePath = path.join( modelPath, "tf1 2" );
const androidModelFile = "small_inception_tf1.tflite";
const androidTaxonomyFile = "small_export_tax.csv";
const iosModelFile = "small_inception_tf1.mlmodel";
const iosTaxonomyFile = "small_export_tax.json";
const androidModelPath = path.join( examplePath, androidModelFile );
const androidTaxonomyPath = path.join( examplePath, androidTaxonomyFile );
const iosModelPath = path.join( examplePath, iosModelFile );
const iosTaxonomyPath = path.join( examplePath, iosTaxonomyFile );
const androidDestinationPath
= path.join( __dirname, "..", "android", "app", "src", "debug", "assets", "camera" );
const iosDestinationPath = path.join( __dirname, "..", "ios" );
( async () => {
console.log( `Downloading example model from '${modelURL}'...` );
await download( modelURL, modelPath, {
extract: true
} );
console.log( "Downloaded!" );
console.log( "Copying model files to assets folder..." );
await fs.mkdir( androidDestinationPath, { recursive: true } );
await fs.copyFile( androidModelPath, path.join( androidDestinationPath, androidModelFile ) );
await fs.copyFile(
androidTaxonomyPath,
path.join( androidDestinationPath, androidTaxonomyFile )
);
await fs.mkdir( iosDestinationPath, { recursive: true } );
await fs.copyFile( iosModelPath, path.join( iosDestinationPath, iosModelFile ) );
await fs.copyFile( iosTaxonomyPath, path.join( iosDestinationPath, iosTaxonomyFile ) );
console.log( "Delete temp model folder and its contents..." );
await fs.rm( modelPath, { recursive: true } );
console.log( "Done!" );
} )();

65
scripts/add-fake-model.js Normal file
View File

@@ -0,0 +1,65 @@
const fs = require( "fs" ).promises;
const path = require( "path" );
const download = require( "download" );
require( "dotenv" ).config();
const modelURL
= "https://github.com/inaturalist/SeekReactNative/releases/download/v2.9.1-138/small_model.zip";
const modelPath = path.join( __dirname, "..", "temp", "model" );
const androidModelPath = path.join( modelPath, "tf1 2", "small_inception_tf1.tflite" );
const androidTaxonomyPath = path.join( modelPath, "tf1 2", "small_export_tax.csv" );
const iosModelPath = path.join( modelPath, "tf1 2", "small_inception_tf1.mlmodel" );
const iosTaxonomyPath = path.join( modelPath, "tf1 2", "small_export_tax.json" );
const androidDestinationPath = path.join(
__dirname,
"..",
"android",
"app",
"src",
"main",
"assets",
"camera"
);
const iosDestinationPath = path.join( __dirname, "..", "ios" );
( async () => {
console.log( `Downloading example model from '${modelURL}'...` );
await download( modelURL, modelPath, {
extract: true
} );
console.log( "Downloaded!" );
console.log( "Reading output filenames from .env file..." );
const androidModelFile = process.env.ANDROID_MODEL_FILE_NAME;
const androidTaxonomyFile = process.env.ANDROID_TAXONOMY_FILE_NAME;
const iosModelFile = process.env.IOS_MODEL_FILE_NAME;
const iosTaxonomyFile = process.env.IOS_TAXONOMY_FILE_NAME;
console.log( "Copying model files to assets folder..." );
await fs.mkdir( androidDestinationPath, { recursive: true } );
await fs.copyFile(
androidModelPath,
path.join( androidDestinationPath, androidModelFile )
);
await fs.copyFile(
androidTaxonomyPath,
path.join( androidDestinationPath, androidTaxonomyFile )
);
await fs.mkdir( iosDestinationPath, { recursive: true } );
await fs.copyFile(
iosModelPath,
path.join( iosDestinationPath, iosModelFile )
);
await fs.copyFile(
iosTaxonomyPath,
path.join( iosDestinationPath, iosTaxonomyFile )
);
console.log( "Delete temp model folder and its contents..." );
await fs.rm( modelPath, { recursive: true } );
console.log( "Done!" );
} )();

View File

@@ -10,6 +10,7 @@ import React, { useCallback, useEffect } from "react";
import { AppState, LogBox } from "react-native";
import DeviceInfo from "react-native-device-info";
import Orientation from "react-native-orientation-locker";
import { addARCameraFiles } from "sharedHelpers/cvModel";
import useCurrentUser from "sharedHooks/useCurrentUser";
import useObservationUpdatesWhenFocused from "sharedHooks/useObservationUpdatesWhenFocused";
import useShare from "sharedHooks/useShare";
@@ -70,6 +71,10 @@ const App = ( { children }: Props ): Node => {
return ( ) => subscription?.remove();
}, [] );
useEffect( () => {
addARCameraFiles();
}, [] );
useEffect( ( ) => {
const checkForSignedInUser = async ( ) => {
// check to see if this is a fresh install of the app

View File

@@ -0,0 +1,136 @@
// @flow
import { Text, View } from "components/styledComponents";
import type { Node } from "react";
import React, { useRef, useState } from "react";
import { Platform, StatusBar, StyleSheet } from "react-native";
import {
useCameraDevices
} from "react-native-vision-camera";
import FrameProcessorCamera from "./FrameProcessorCamera";
const styles = StyleSheet.create( {
label: {
position: "absolute",
top: 48,
zIndex: 1,
padding: 4,
marginHorizontal: 20,
backgroundColor: "#000000",
fontSize: 26,
color: "white",
textAlign: "center"
}
} );
const ARCamera = (): Node => {
const [results, setResult] = useState( [] );
const devices = useCameraDevices();
const device = devices.back;
// Johannes (June 2023): I did read through the native code of the legacy inatcamera
// that is triggered when using ref.current.takePictureAsync()
// and to me it seems everything should be handled by vision-camera itself.
// With the orientation stuff patched by the current fork.
// However, there is also some Exif and device orientation related code
// that I have not checked. Anyway, those parts we would hoist into JS side if not done yet.
const camera = useRef<any>( null );
const handleTaxaDetected = cvResults => {
/*
Using FrameProcessorCamera results in this as cvResults atm on Android
[
{
"stateofmatter": [
{"ancestor_ids": [Array], "name": xx, "rank": xx, "score": xx, "taxon_id": xx}
]
},
{
"order": [
{"ancestor_ids": [Array], "name": xx, "rank": xx, "score": xx, "taxon_id": xx}
]
},
{
"species": [
{"ancestor_ids": [Array], "name": xx, "rank": xx, "score": xx, "taxon_id": xx}
]
}
]
*/
/*
Using FrameProcessorCamera results in this as cvResults atm on iOS (= top prediction)
[
{"name": "Aves", "rank": 50, "score": 0.7627944946289062, "taxon_id": 3}
]
*/
console.log( "cvResults :>> ", cvResults );
let predictions = [];
if ( Platform.OS === "ios" ) {
predictions = cvResults;
} else {
predictions = cvResults.map( result => {
const rank = Object.keys( result )[0];
const prediction = result[rank][0];
prediction.rank = rank;
return prediction;
} );
}
setResult( predictions );
};
const handleClassifierError = error => {
console.log( "handleClassifierError error.message :>> ", error.message );
// TODO: when we hit this error, there is an error with the classifier.
// We should show an error message and maybe also disable the ARCamera.
};
const handleDeviceNotSupported = error => {
console.log( "handleDeviceNotSupported error.message :>> ", error.message );
// TODO: when we hit this error, something with the current device is not supported.
// We should show an error message depending on the error and change the way we use it.
};
const handleCaptureError = error => {
console.log( "handleCaptureError error.message :>> ", error.message );
// TODO: when we hit this error, taking a photo did not work correctly
// We should show an error message and do something if the error persists.
};
const handleCameraError = error => {
console.log( "handleCameraError error.message :>> ", error.message );
// TODO: This error is thrown when it does not fit in any of the above categories.
};
const handleLog = event => {
// event = { log: "string" }
console.log( "handleLog event :>> ", event );
// TODO: this handles incoming logs from the vision-camera-plugin-inatvision,
// can be used for debugging, added to a logfile, etc.
};
return (
<View className="flex-1 bg-black">
<StatusBar hidden />
{results.map( ( result: { rank: string, name: string } ) => (
<Text key={result.rank} style={styles.label}>
{result.name}
</Text>
) )}
{device && (
<FrameProcessorCamera
cameraRef={camera}
device={device}
onTaxaDetected={handleTaxaDetected}
onClassifierError={handleClassifierError}
onDeviceNotSupported={handleDeviceNotSupported}
onCaptureError={handleCaptureError}
onCameraError={handleCameraError}
onLog={handleLog}
/>
)}
</View>
);
};
export default ARCamera;

View File

@@ -0,0 +1,163 @@
import { useIsFocused } from "@react-navigation/native";
import type { Node } from "react";
import React, { useCallback, useRef, useState } from "react";
import { Animated, Platform, StyleSheet } from "react-native";
import { Gesture, GestureDetector } from "react-native-gesture-handler";
import { Camera } from "react-native-vision-camera";
import useDeviceOrientation from "sharedHooks/useDeviceOrientation";
import useIsForeground from "sharedHooks/useIsForeground";
import FocusSquare from "./FocusSquare";
type Props = {
cameraRef: Object,
device: Object,
onClassifierError: Function,
onDeviceNotSupported: Function,
onCaptureError: Function,
onCameraError: Function,
frameProcessor?: Function,
frameProcessorFps?: number,
};
// A container for the Camera component
// that has logic that applies to both use cases in StandardCamera and ARCamera
const CameraContainer = ( {
cameraRef,
device,
onClassifierError,
onDeviceNotSupported,
onCaptureError,
onCameraError,
frameProcessor,
frameProcessorFps
}: Props ): Node => {
const [focusAvailable, setFocusAvailable] = useState( true );
const [tappedCoordinates, setTappedCoordinates] = useState( null );
const singleTapToFocusAnimation = useRef( new Animated.Value( 0 ) ).current;
// check if camera page is active
const isFocused = useIsFocused();
const isForeground = useIsForeground();
const isActive = isFocused && isForeground;
const { deviceOrientation } = useDeviceOrientation();
const singleTapToFocus = async ( { x, y } ) => {
// If the device doesn't support focus, we don't want to do anything and show no animation
if ( !device.supportsFocus && focusAvailable ) {
return;
}
try {
singleTapToFocusAnimation.setValue( 1 );
setTappedCoordinates( { x, y } );
await cameraRef.current.focus( { x, y } );
} catch ( e ) {
// Android often catches the following error from the Camera X library
// but it doesn't seem to affect functionality, so we're ignoring this error
// and throwing other errors
const startFocusError = e?.message?.includes(
"Cancelled by another startFocusAndMetering"
);
if ( !startFocusError ) {
throw e;
}
}
};
const singleTap = Gesture.Tap()
.runOnJS( true )
.maxDuration( 250 )
.numberOfTaps( 1 )
.onStart( e => singleTapToFocus( e ) );
const onError = useCallback(
error => {
// error is a CameraRuntimeError =
// { code: string, message: string, cause?: {} }
console.log( "error", error );
// If there is no error code, log the error
// and return because we don't know what to do with it
if ( !error.code ) {
console.log( "Camera runtime error without error code:" );
console.log( "error", error );
return;
}
// If the error code is "device/focus-not-supported" disable focus
if ( error.code === "device/focus-not-supported" ) {
setFocusAvailable( false );
return;
}
// If it is any other "device/" error, return the error code
if ( error.code.includes( "device/" ) ) {
console.log( "error :>> ", error );
onDeviceNotSupported( error.code );
return;
}
if ( error.code.includes( "capture/" ) ) {
console.log( "error :>> ", error );
onCaptureError( error.code );
return;
}
// If the error code is "frame-processor/unavailable" handle the error as classifier error
if ( error.code === "frame-processor/unavailable" ) {
onClassifierError( error );
return;
}
if ( error.code.includes( "permission/" ) ) {
console.log( "error :>> ", error );
if ( error.code === "permission/camera-permission-denied" ) {
// No camera permission
// In Seek we do not have a PermissionGate wrapper component,
// so we need to handle this error there.
// Here we can just log it for now, it should in principle never be hit,
// because if we don't have permission the screen is not shown.
return;
}
}
onCameraError( error.code );
},
[
onClassifierError,
onDeviceNotSupported,
onCaptureError,
onCameraError
]
);
return (
<>
<GestureDetector gesture={Gesture.Exclusive( singleTap )}>
<Camera
// Shared props between StandardCamera and ARCamera
photo
enableZoomGesture
isActive={isActive}
style={[StyleSheet.absoluteFill]}
onError={e => onError( e )}
// In Android the camera won't set the orientation metadata
// correctly without this, but in iOS it won't display the
// preview correctly *with* it
orientation={Platform.OS === "android"
? deviceOrientation
: null}
ref={cameraRef}
device={device}
// Props for ARCamera only
frameProcessor={frameProcessor}
frameProcessorFps={frameProcessorFps}
/>
</GestureDetector>
<FocusSquare
singleTapToFocusAnimation={singleTapToFocusAnimation}
tappedCoordinates={tappedCoordinates}
/>
</>
);
};
export default CameraContainer;

View File

@@ -1,78 +0,0 @@
// @flow
import { useIsFocused } from "@react-navigation/native";
import type { Node } from "react";
import React, { useRef, useState } from "react";
import { Animated, StyleSheet } from "react-native";
import { Gesture, GestureDetector } from "react-native-gesture-handler";
import { Camera } from "react-native-vision-camera";
import useIsForeground from "sharedHooks/useIsForeground";
import FocusSquare from "./FocusSquare";
type Props = {
camera: Object,
device: Object,
orientation?: any
}
const CameraView = ( { camera, device, orientation }: Props ): Node => {
const [tappedCoordinates, setTappedCoordinates] = useState( null );
const singleTapToFocusAnimation = useRef( new Animated.Value( 0 ) ).current;
// check if camera page is active
const isFocused = useIsFocused( );
const isForeground = useIsForeground( );
const isActive = isFocused && isForeground;
const singleTapToFocus = async ( { x, y } ) => {
// If the device doesn't support focus, we don't want to do anything and show no animation
if ( !device.supportsFocus ) {
return;
}
try {
singleTapToFocusAnimation.setValue( 1 );
setTappedCoordinates( { x, y } );
await camera.current.focus( { x, y } );
} catch ( e ) {
// Android often catches the following error from the Camera X library
// but it doesn't seem to affect functionality, so we're ignoring this error
// and throwing other errors
const startFocusError = e?.message?.includes( "Cancelled by another startFocusAndMetering" );
if ( !startFocusError ) {
throw e;
}
}
};
const singleTap = Gesture.Tap( )
.runOnJS( true )
.maxDuration( 250 )
.numberOfTaps( 1 )
.onStart( e => {
singleTapToFocus( e );
} );
return (
<>
<GestureDetector gesture={Gesture.Exclusive( singleTap )}>
<Camera
ref={camera}
style={[
StyleSheet.absoluteFill
]}
device={device}
isActive={isActive}
photo
enableZoomGesture
orientation={orientation}
/>
</GestureDetector>
<FocusSquare
singleTapToFocusAnimation={singleTapToFocusAnimation}
tappedCoordinates={tappedCoordinates}
/>
</>
);
};
export default CameraView;

View File

@@ -0,0 +1,95 @@
// @flow
import type { Node } from "react";
import React, {
useEffect
} from "react";
import { Platform } from "react-native";
import * as REA from "react-native-reanimated";
import {
useFrameProcessor
} from "react-native-vision-camera";
import { dirModel, dirTaxonomy } from "sharedHelpers/cvModel";
import * as InatVision from "vision-camera-plugin-inatvision";
import CameraContainer from "./CameraContainer";
type Props = {
cameraRef: Object,
device: Object,
onTaxaDetected: Function,
onClassifierError: Function,
onDeviceNotSupported: Function,
onCaptureError: Function,
onCameraError: Function,
onLog: Function
};
// Johannes: when I copied over the native code from the legacy react-native-camera on Android
// this value had to be a string. On iOS I changed the API to also accept a string (was number).
// Maybe, the intention would look clearer if we refactor to use a number here.
const confidenceThreshold = "0.7";
const FrameProcessorCamera = ( {
cameraRef,
device,
onTaxaDetected,
onClassifierError,
onDeviceNotSupported,
onCaptureError,
onCameraError,
onLog
}: Props ): Node => {
useEffect( () => {
// This registers a listener for the frame processor plugin's log events
// iOS part exposes no logging, so calling it would crash
if ( Platform.OS === "android" ) {
InatVision.addLogListener( event => {
// The vision-plugin events are in this format { log: "string" }
onLog( event );
} );
}
return () => {
InatVision.removeLogListener();
};
}, [onLog] );
const frameProcessor = useFrameProcessor(
frame => {
"worklet";
// Reminder: this is a worklet, running on the UI thread.
try {
const results = InatVision.inatVision(
frame,
dirModel,
dirTaxonomy,
confidenceThreshold
);
REA.runOnJS( onTaxaDetected )( results );
} catch ( classifierError ) {
console.log( `Error: ${classifierError.message}` );
REA.runOnJS( onClassifierError )( classifierError );
}
},
[confidenceThreshold]
);
return (
<CameraContainer
cameraRef={cameraRef}
device={device}
onClassifierError={onClassifierError}
onDeviceNotSupported={onDeviceNotSupported}
onCaptureError={onCaptureError}
onCameraError={onCameraError}
frameProcessor={frameProcessor}
// A value of 1 indicates that the frame processor gets executed once per second.
// This roughly equals the setting of the legacy camera of 1000ms between predictions,
// i.e. what taxaDetectionInterval was set to.
frameProcessorFps={1}
/>
);
};
export default FrameProcessorCamera;

View File

@@ -50,7 +50,7 @@ import useDeviceOrientation, {
import useTranslation from "sharedHooks/useTranslation";
import colors from "styles/tailwindColors";
import CameraView from "./CameraView";
import CameraContainer from "./CameraContainer";
import DiscardChangesSheet from "./DiscardChangesSheet";
import FadeInOutView from "./FadeInOutView";
import PhotoPreview from "./PhotoPreview";
@@ -424,17 +424,13 @@ const StandardCamera = ( ): Node => {
/>
<View className="relative flex-1">
{device && (
<CameraView
<CameraContainer
cameraRef={camera}
device={device}
camera={camera}
orientation={
// In Android the camera won't set the orientation metadata
// correctly without this, but in iOS it won't display the
// preview correctly *with* it
Platform.OS === "android"
? deviceOrientation
: null
}
onClassifierError={error => console.log( "error :>> ", error )}
onDeviceNotSupported={error => console.log( "error :>> ", error )}
onCaptureError={error => console.log( "error :>> ", error )}
onCameraError={error => console.log( "error :>> ", error )}
/>
)}
<FadeInOutView savingPhoto={savingPhoto} />

View File

@@ -266,6 +266,8 @@ Do-not-collect-stability-and-usage-data-using-third-party-services = Do not coll
Dont-have-an-account = Don't have an account? Sign up
During-app-start-no-model-found = During app start there was no computer vision model found. There will be no AR camera.
EDIT-COMMENT = EDIT COMMENT
EDIT-LOCATION = EDIT LOCATION
@@ -479,6 +481,8 @@ No-comments-or-ids-to-display = No comments or ids to display
No-Location = No Location
No-model-found = No model found
none = none
No-photos-found = No photos found. If this is your first time opening the app and giving permissions, try restarting the app.

View File

@@ -143,6 +143,7 @@
"Display-Name": "Display Name",
"Do-not-collect-stability-and-usage-data-using-third-party-services": "Do not collect stability and usage data using third-party services",
"Dont-have-an-account": "Don't have an account? Sign up",
"During-app-start-no-model-found": "During app start there was no computer vision model found. There will be no AR camera.",
"EDIT-COMMENT": "EDIT COMMENT",
"EDIT-LOCATION": "EDIT LOCATION",
"EMAIL": "EMAIL",
@@ -285,6 +286,7 @@
},
"No-comments-or-ids-to-display": "No comments or ids to display",
"No-Location": "No Location",
"No-model-found": "No model found",
"none": "none",
"No-photos-found": "No photos found. If this is your first time opening the app and giving permissions, try restarting the app.",
"no-rights-reserved-cc0-cc0": {

View File

@@ -266,6 +266,8 @@ Do-not-collect-stability-and-usage-data-using-third-party-services = Do not coll
Dont-have-an-account = Don't have an account? Sign up
During-app-start-no-model-found = During app start there was no computer vision model found. There will be no AR camera.
EDIT-COMMENT = EDIT COMMENT
EDIT-LOCATION = EDIT LOCATION
@@ -479,6 +481,8 @@ No-comments-or-ids-to-display = No comments or ids to display
No-Location = No Location
No-model-found = No model found
none = none
No-photos-found = No photos found. If this is your first time opening the app and giving permissions, try restarting the app.

View File

@@ -77,6 +77,7 @@ const CustomTabBar = ( { state, descriptors, navigation }: Props ): Node => {
currentRoute.includes( "PhotoGallery" )
|| currentRoute.includes( "GroupPhotos" )
|| currentRoute.includes( "StandardCamera" )
|| currentRoute.includes( "ARCamera" )
|| currentRoute.includes( "SoundRecorder" )
|| currentRoute.includes( "ObsEdit" )
|| currentRoute.includes( "AddID" )

View File

@@ -1,6 +1,7 @@
import { createBottomTabNavigator } from "@react-navigation/bottom-tabs";
import About from "components/About";
import AddID from "components/AddID/AddID";
import ARCamera from "components/Camera/ARCamera";
import StandardCamera from "components/Camera/StandardCamera";
import Explore from "components/Explore/Explore";
import Identify from "components/Identify/Identify";
@@ -93,6 +94,27 @@ const StandardCameraWithPermission = ( ) => {
);
};
const ARCameraWithPermission = ( ) => {
if ( usesAndroid10Permissions ) {
// WRITE_EXTERNAL_STORAGE is deprecated after Android 10
// https://developer.android.com/training/data-storage/shared/media#access-other-apps-files
return (
<PermissionGate
permission={PermissionsAndroid.PERMISSIONS.WRITE_EXTERNAL_STORAGE}
>
<PermissionGate permission={PermissionsAndroid.PERMISSIONS.CAMERA}>
<ARCamera />
</PermissionGate>
</PermissionGate>
);
}
return (
<PermissionGate permission={PermissionsAndroid.PERMISSIONS.CAMERA}>
<ARCamera />
</PermissionGate>
);
};
const SoundRecorderWithPermission = ( ) => {
if ( usesAndroid10Permissions ) {
return (
@@ -314,7 +336,7 @@ const BottomTabs = ( ) => {
/>
<Tab.Screen
name="ARCamera"
component={PlaceholderComponent}
component={ARCameraWithPermission}
options={{ ...hideHeader, orientation: "all", unmountOnBlur: true }}
/>
<Tab.Screen

View File

@@ -0,0 +1,106 @@
// @flow
import i18next from "i18next";
import { Alert, Platform } from "react-native";
import Config from "react-native-config";
import RNFS from "react-native-fs";
import { log } from "../../react-native-logs.config";
const logger = log.extend( "cvModel" );
const modelFiles = {
IOSMODEL: `${Config.IOS_MODEL_FILE_NAME}c`,
IOSTAXONOMY: Config.IOS_TAXONOMY_FILE_NAME,
ANDROIDMODEL: Config.ANDROID_MODEL_FILE_NAME,
ANDROIDTAXONOMY: Config.ANDROID_TAXONOMY_FILE_NAME
};
export const dirModel: string = Platform.select( {
ios: `${RNFS.DocumentDirectoryPath}/${modelFiles.IOSMODEL}`,
android: `${RNFS.DocumentDirectoryPath}/${modelFiles.ANDROIDMODEL}`
} );
export const dirTaxonomy: string = Platform.select( {
ios: `${RNFS.DocumentDirectoryPath}/${modelFiles.IOSTAXONOMY}`,
android: `${RNFS.DocumentDirectoryPath}/${modelFiles.ANDROIDTAXONOMY}`
} );
const addCameraFilesAndroid = () => {
const copyFilesAndroid = ( source, destination ) => {
RNFS.copyFileAssets( source, destination )
.then( () => {
console.log( `moved file from ${source} to ${destination}` );
} )
.catch( error => {
console.log(
error,
`error moving file from ${source} to ${destination}`
);
} );
};
RNFS.readDirAssets( "camera" ).then( results => {
const model = modelFiles.ANDROIDMODEL;
const taxonomy = modelFiles.ANDROIDTAXONOMY;
const hasModel = results.find( r => r.name === model );
// Android writes over existing files
if ( hasModel !== undefined ) {
logger.debug( "Found model asset found with filename", model );
copyFilesAndroid( `camera/${model}`, dirModel );
copyFilesAndroid( `camera/${taxonomy}`, dirTaxonomy );
} else {
logger.debug( "No model asset found to copy into document directory." );
Alert.alert(
i18next.t( "No-model-found" ),
i18next.t( "During-app-start-no-model-found" )
);
}
} );
};
const addCameraFilesiOS = () => {
const copyFilesiOS = ( source, destination ) => {
RNFS.copyFile( source, destination )
.then( () => {
console.log( `moved file from ${source} to ${destination}` );
} )
.catch( error => {
console.log(
error,
`error moving file from ${source} to ${destination}`
);
} );
};
RNFS.readDir( RNFS.MainBundlePath ).then( results => {
// iOS will error out during build if those files are not found,
// because they are linked in the xcode project
const model = modelFiles.IOSMODEL;
const taxonomy = modelFiles.IOSTAXONOMY;
const hasModel = results.find( r => r.name === model );
// Android writes over existing files
if ( hasModel !== undefined ) {
copyFilesiOS( `${RNFS.MainBundlePath}/${model}`, dirModel );
copyFilesiOS( `${RNFS.MainBundlePath}/${taxonomy}`, dirTaxonomy );
} else {
logger.debug( "No model asset found to copy into document directory." );
Alert.alert(
i18next.t( "No-model-found" ),
i18next.t( "During-app-start-no-model-found" )
);
}
} );
};
export const addARCameraFiles = async () => {
// RNFS overwrites whatever files existed before
if ( Platform.OS === "android" ) {
addCameraFilesAndroid();
} else if ( Platform.OS === "ios" ) {
addCameraFilesiOS();
}
};

View File

@@ -16,6 +16,8 @@ import {
mockUseCameraDevices
} from "./vision-camera/vision-camera";
jest.mock( "vision-camera-plugin-inatvision" );
jest.mock( "@sayem314/react-native-keep-awake" );
jest.mock( "react-native/Libraries/EventEmitter/NativeEventEmitter" );
@@ -196,10 +198,18 @@ jest.mock( "react-native-fs", ( ) => {
DocumentDirectoryPath: "document/directory/path",
exists: jest.fn( async ( ) => true ),
moveFile: async ( ) => "testdata",
copyFile: async ( ) => "testdata",
stat: jest.fn( ( ) => ( {
mtime: 123
} ) ),
readFile: jest.fn( ( ) => "testdata" )
readFile: jest.fn( ( ) => "testdata" ),
readDir: jest.fn( async ( ) => ( [
{
ctime: 123,
mtime: 123,
name: "testdata"
}
] ) )
};
return RNFS;

View File

@@ -28,7 +28,7 @@ const mockValue = {
};
const mockView = <View />;
jest.mock( "components/Camera/CameraView", () => ( {
jest.mock( "components/Camera/CameraContainer", () => ( {
__esModule: true,
default: ( ) => mockView
} ) );