Use commonAncestor from offline model

Closes MOB-373

Change the schema of offline predictions to include results array and seaprate commonAncestor, matching vision api responses.

Configure offline image predictions to set common ancestor mode.
This commit is contained in:
Kirk van Gorkom
2025-03-02 23:40:47 -08:00
parent 21938c56c5
commit cff703fbfe
3 changed files with 61 additions and 22 deletions

View File

@@ -3,7 +3,11 @@ import { Alert, Platform } from "react-native";
import Config from "react-native-config";
import RNFS from "react-native-fs";
import type { Location } from "vision-camera-plugin-inatvision";
import { getPredictionsForImage, getPredictionsForLocation } from "vision-camera-plugin-inatvision";
import {
getPredictionsForImage,
getPredictionsForLocation,
MODE
} from "vision-camera-plugin-inatvision";
const modelFiles = {
// The iOS model and taxonomy files always have to be referenced in the
@@ -62,7 +66,8 @@ export const predictImage = ( uri: string, location: Location ) => {
geomodelPath,
location: hasLocation
? location
: undefined
: undefined,
mode: MODE.COMMON_ANCESTOR
} );
};

View File

@@ -30,10 +30,16 @@ const useOfflineSuggestions = (
tryOfflineSuggestions: boolean
}
): {
offlineSuggestions: OfflineSuggestion[];
offlineSuggestions: {
results: OfflineSuggestion[],
commonAncestor: OfflineSuggestion | undefined
};
} => {
const realm = useRealm( );
const [offlineSuggestions, setOfflineSuggestions] = useState<OfflineSuggestion[]>( [] );
const [offlineSuggestions, setOfflineSuggestions] = useState<{
results: OfflineSuggestion[],
commonAncestor: OfflineSuggestion | undefined
}>( { results: [], commonAncestor: undefined } );
const [error, setError] = useState( null );
const {
@@ -43,10 +49,14 @@ const useOfflineSuggestions = (
useEffect( ( ) => {
const predictOffline = async ( ) => {
let rawPredictions = [];
let commonAncestor;
try {
const location = { latitude, longitude };
const result = await predictImage( photoUri, location );
rawPredictions = result.predictions;
// Destructuring here leads to different errors from the linter.
// eslint-disable-next-line prefer-destructuring
commonAncestor = result.commonAncestor;
} catch ( predictImageError ) {
onFetchError( { isOnline: false } );
logger.error( "Error predicting image offline", predictImageError );
@@ -56,27 +66,44 @@ const useOfflineSuggestions = (
// but we're offline so we only need the local list from realm
// and don't need to fetch taxon from the API
const iconicTaxa = realm?.objects( "Taxon" ).filtered( "isIconic = true" );
const branchIDs = rawPredictions.map( t => t.taxon_id );
const branchIDs = [...rawPredictions.map( t => t.taxon_id ), ...( commonAncestor
? [commonAncestor.taxon_id]
: [] )];
const iconicTaxonName = iconicTaxa?.find( t => branchIDs.indexOf( t.id ) >= 0 )?.name;
// This function handles either regular or common ancestor predictions as input objects. I'm
// not going to define an interface for them in the middle of refactoring and changing logic.
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const formatPrediction = ( prediction: any ): OfflineSuggestion => ( {
combined_score: prediction.combined_score,
taxon: {
id: Number( prediction.taxon_id ),
name: prediction.name,
rank_level: prediction.rank_level,
iconic_taxon_name: iconicTaxonName
}
} );
// using the same rank level for displaying predictions in AI Camera
// this is all temporary, since we ultimately want predictions
// returned similarly to how we return them on web; this is returning a
// single branch like on the AI Camera 2023-12-08
const formattedPredictions = rawPredictions?.reverse( )
.filter( prediction => prediction.rank_level <= 40 )
.map( prediction => ( {
combined_score: prediction.combined_score,
taxon: {
id: Number( prediction.taxon_id ),
name: prediction.name,
rank_level: prediction.rank_level,
iconic_taxon_name: iconicTaxonName
}
} ) );
setOfflineSuggestions( formattedPredictions );
.map( prediction => formatPrediction( prediction ) );
const commonAncestorSuggestion = commonAncestor
? formatPrediction( commonAncestor )
: undefined;
const returnValue = {
results: formattedPredictions,
commonAncestor: commonAncestorSuggestion
};
setOfflineSuggestions( returnValue );
onFetched( { isOnline: false } );
return formattedPredictions;
return returnValue;
};
if ( photoUri && tryOfflineSuggestions ) {

View File

@@ -63,27 +63,34 @@ export const useSuggestions = ( photoUri, options ) => {
} );
const usingOfflineSuggestions = tryOfflineSuggestions || (
offlineSuggestions.length > 0
offlineSuggestions?.results?.length > 0
&& ( !onlineSuggestions || onlineSuggestions?.results?.length === 0 )
);
const hasOnlineSuggestionResults = onlineSuggestions?.results?.length > 0;
const unfilteredSuggestions = hasOnlineSuggestionResults
? onlineSuggestions.results
: offlineSuggestions;
const unfilteredSuggestions = useMemo(
( ) => ( hasOnlineSuggestionResults
? onlineSuggestions.results || []
: offlineSuggestions.results || [] ),
[hasOnlineSuggestionResults, onlineSuggestions, offlineSuggestions]
);
const commonAncestor = hasOnlineSuggestionResults
? onlineSuggestions?.commonAncestor
: offlineSuggestions?.commonAncestor;
// since we can calculate this, there's no need to store it in state
const suggestions = useMemo(
( ) => filterSuggestions(
unfilteredSuggestions,
usingOfflineSuggestions,
onlineSuggestions?.common_ancestor
commonAncestor
),
[
unfilteredSuggestions,
usingOfflineSuggestions,
onlineSuggestions?.common_ancestor
commonAncestor
]
);