Вот код: < /p>
Interpreter interpreter;
private volatile boolean isInitialized = false;
// countdown incase of slow download
private CountDownLatch initLatch = new CountDownLatch(1);
public void initTextModel(Context context, String textModelName) throws IOException {
this.mcontext = context;
downloadModel();
try {
boolean success = initLatch.await(30, TimeUnit.SECONDS);
if (!success) {
throw new IOException("Model download timeout took longer than 60s");
}
if (!isInitialized) {
throw new IOException("Model initialization failed");
}
Log.i(TAG, "Text model initialization completed successfully");
} catch (InterruptedException e) {
throw new IOException("Model initialization interrupted", e);
}
}
private synchronized void downloadModel() {
Log.w(TAG, "downloadModel: downloading model..." );
CustomModelDownloadConditions conditions = new CustomModelDownloadConditions.Builder()
.requireWifi() // Also possible: .requireCharging() and .requireDeviceIdle()
.build();
FirebaseModelDownloader.getInstance()
.getModel("exporter_text_model", DownloadType.LOCAL_MODEL_UPDATE_IN_BACKGROUND, conditions)
.addOnSuccessListener(new OnSuccessListener() {
@Override
public void onSuccess(CustomModel model) {
// Download complete. Depending on your app, you could enable the ML
// feature, or switch from the local model to the remote model, etc.
// The CustomModel object contains the local path of the model file,
// which you can use to instantiate a TensorFlow Lite interpreter.
File modelFile = model.getFile();
if (modelFile != null) {
interpreter = new Interpreter(modelFile);
}
Log.w(TAG, "onSuccess: Model downloaded" );
initLatch.countDown();
}
});
}
< /code>
Решения Я пробовал: < /p>
- Добавлен запасной запасной удар, но время загрузки превышает более минуты. < /li>
Сначала я думал, что эта проблема-это доступ, поэтому я добавил
Подробнее здесь: https://stackoverflow.com/questions/797 ... oid-studio
Мобильная версия