OpenGL and Camera Preview - blending together gets “over saturated” color - Android Q&A, Help & Troubleshooting

Hello everyone,
I have an Android OpenGL Problem with my Galaxy S3 (Android 4.0.4) which I really can't fix.
Everything works fine on the Galaxy S1 and on an S2 which seems to have almost the same GPU.
I try to make an AR-App but I always get a problem with transparent pixels on top of my camera preview. It only appears when the pixels are transparent and looks like "Burned colors" or value overflow or something like that.
{
"lightbox_close": "Close",
"lightbox_next": "Next",
"lightbox_previous": "Previous",
"lightbox_error": "The requested content cannot be loaded. Please try again later.",
"lightbox_start_slideshow": "Start slideshow",
"lightbox_stop_slideshow": "Stop slideshow",
"lightbox_full_screen": "Full screen",
"lightbox_thumbnails": "Thumbnails",
"lightbox_download": "Download",
"lightbox_share": "Share",
"lightbox_zoom": "Zoom",
"lightbox_new_window": "New window",
"lightbox_toggle_sidebar": "Toggle sidebar"
}
So please tell me what I'm doing wrong or try it if you have a S3. Maybe mine is just broken?? I created a small testprogram for you. Please take a look at this:
Code:
public class MainActivity extends Activity {
private GLSurfaceView mGLView;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
mGLView = new GLSurfaceView(this);
mGLView.setEGLContextClientVersion(2);
mGLView.setEGLConfigChooser(8, 8, 8, 8, 16, 0);
mGLView.getHolder().setFormat(PixelFormat.TRANSLUCENT);
mGLView.setZOrderOnTop(true);
GameRenderer renderer = new GameRenderer();
mGLView.setRenderer(renderer);
setContentView(new CameraView(this), new LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT));
addContentView(mGLView, new LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT));
}
@Override
protected void onPause() {
super.onPause();
mGLView.onPause();
}
@Override
protected void onResume() {
super.onResume();
mGLView.onResume();
}
}
The CameraView looks like that:
Code:
public class CameraView extends SurfaceView implements SurfaceHolder.Callback{
SurfaceHolder surfaceHolder;
Camera camera;
public CameraView(Context context, AttributeSet attrs) {
super(context, attrs);
surfaceHolder = getHolder();
surfaceHolder.addCallback(this);
}
/**
* @param context
*/
public CameraView(Context context) {
super(context);
surfaceHolder = getHolder();
surfaceHolder.addCallback(this);
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
try {
Camera.Parameters parameters = camera.getParameters();
parameters.setPreviewSize(w, h);
camera.setParameters(parameters);
} catch (Exception e) {
Log.w("CameraView", "Exception:" , e);
}
camera.startPreview();
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
camera = Camera.open();
try {
camera.setPreviewDisplay(holder);
} catch (IOException exception) {
camera.release();
camera = null;
}
}
@Override
public void surfaceDestroyed(SurfaceHolder arg0) {
camera.stopPreview();
camera.release();
camera = null;
}
}
The GameRenderer:
Code:
public class GameRenderer implements GLSurfaceView.Renderer {
private final String vertexShaderCode =
"uniform mat4 uMVPMatrix; \n" +
"attribute vec4 vPosition; \n" +
"void main(){ \n" +
" gl_Position = uMVPMatrix * vPosition; \n" +
"} \n";
private final String fragmentShaderCode =
"void main(){ \n" +
" gl_FragColor = vec4(1.0, 1.0, 1.0, 0.3); \n" +
"} \n";
private int loadShader(int type, String shaderCode){
int shader = GLES20.glCreateShader(type);
GLES20.glShaderSource(shader, shaderCode);
GLES20.glCompileShader(shader);
return shader;
}
// END OF SHADER STUFF
private int mProgram;
private int maPositionHandle;
private int muMVPMatrixHandle;
private FloatBuffer triangleVB;
public GameRenderer() {
}
public void onSurfaceCreated(GL10 unused, EGLConfig config) {
GLES20.glClearColor(0,0,0,0);
initShapes();
int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexShaderCode);
int fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentShaderCode);
mProgram = GLES20.glCreateProgram();
GLES20.glAttachShader(mProgram, vertexShader);
GLES20.glAttachShader(mProgram, fragmentShader);
GLES20.glLinkProgram(mProgram);
maPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition");
GLES20.glUseProgram(mProgram);
MatrixStack.initStack();
}
public void onDrawFrame(GL10 unused) {
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
drawGround();
}
private void drawGround() {
GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false, 0, triangleVB);
GLES20.glEnableVertexAttribArray(maPositionHandle);
Matrix.multiplyMM(MatrixStack.getMVPMatrix(), 0, MatrixStack.getMVMatrix(), 0, MatrixStack.getMVMatrix(), 0);
Matrix.multiplyMM(MatrixStack.getMVPMatrix(), 0, MatrixStack.getPMatrix(), 0, MatrixStack.getMVPMatrix(), 0);
GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, MatrixStack.getMVPMatrix(), 0);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
GLES20.glDisableVertexAttribArray(maPositionHandle);
}
public void onSurfaceChanged(GL10 unused, int width, int height) {
GLES20.glViewport(0, 0, width, height);
float ratio = (float) width / height;
Matrix.frustumM(MatrixStack.getPMatrix(), 0, -ratio, ratio, -1, 1, 1, 1000);
muMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
Matrix.setLookAtM(MatrixStack.getMVMatrix(), 0, 0, 0,-1, 0, 0, 0, 0, 1, 0);
}
private void initShapes(){
float triangleCoords[] = {
-15f, -2f, 15f,
15f, -2f, 15f,
-15f, -2f, -15f,
15f, -2f, -15f
};
ByteBuffer vbb = ByteBuffer.allocateDirect(triangleCoords.length * 4);
vbb.order(ByteOrder.nativeOrder());
triangleVB = vbb.asFloatBuffer();
triangleVB.put(triangleCoords);
triangleVB.position(0);
}
}
And a small MatrixStack Class but I don't think there will be a problem here:
Code:
public class MatrixStack {
private static Stack<float[]> matrixStack = new Stack<float[]>();
private static float[] MVMatrix = new float[16];
private static float[] PMatrix = new float[16];
private static float[] MVPMatrix = new float[16];
protected static void initStack(){
float[] basisMatrix = new float[16];
Matrix.setIdentityM(basisMatrix, 0);
matrixStack.push(basisMatrix);
MVMatrix = basisMatrix;
Matrix.setIdentityM(PMatrix, 0);
}
public static float[] getMVMatrix(){
return MVMatrix;
}
public static float[] getPMatrix(){
return PMatrix;
}
public static float[] getMVPMatrix(){
return MVPMatrix;
}
}
So well, thats all.
Looks like the camera preview is "overexposing" my gl fragment.
Please try this code at least.
I still hope there is just something wrong with my phone.
Thank you anyway for your help,
Tobias

Alright, I got an answer for that.
The problem was the premultiplication for the alpha values.
So in the easiest way it is enough to write:
vec3 color = clamp(textureColor.rgb * lightWeighting.xyz, 0.0, 1.0);
color *= 0.5; // premultiply by alpha
gl_FragColor = vec4(color, 0.5);
Even though I don't see why it worked perfectly fine on older systems and it I'm still not sure how to manage this on Gl1.0 where I can't write my own shader code.
So hope this helps to anyone who has the same problem!
Thanks,
Tobias

Related

Android - SurfaceView flickers and sometimes does not apply the proper paint

I have been experiencing this issue for quite a while and even after googling a while, I havent found a reasonable solution for my problem.
The issue I am experiencing is that the SurfaceView I am using flickers. Once in a while, the objects I draw flicker and they change their size or colour.
It looks like that sometimes the application skips my calls to 'Paint.setColor()' and the one to 'Paint.setStrokeWidth()'.
I have made methods which change the used paint and then, in order to try to fix this issue, to set it back to the default paint values. Still the issue persists. I have also read that the problem might be due to the double buffering. Is it the case? I am using this code for the DrawingThread:
PS. u can notice that I also tried to use a dirty Rectangle to try to see if the issue can be fixed, but still nothing. [I might not have understood what it actually does.
Code:
class DrawingThread extends Thread {
private SurfaceHolder _surfaceHolder;
private CustomView _cv;
private boolean _run = false;
public DrawingThread(SurfaceHolder surfaceHolder, CustomView cv) {
super();
_surfaceHolder = surfaceHolder;
_cv = cv;
}
public void setRunning(boolean run) {
_run = run;
}
public boolean isRunning() {
return _run;
}
public SurfaceHolder getSurfaceHolder() {
return _surfaceHolder;
}
@Override
public void run() {
Canvas c;
while (_run) {
c = null;
try {
//c = _surfaceHolder.lockCanvas(new Rect(lon - range, lon + range, lat - range, lat + range));
c = _surfaceHolder.lockCanvas();
synchronized (_surfaceHolder) {
_cv.onDraw(c);
}
} finally {
if (c != null) {
_surfaceHolder.unlockCanvasAndPost(c);
}
}
}
}
}
PS. I have read somewhere I should draw on a single bitmap and then draw the bitmap on the canvas. I have tried several times but I cannot manage to do so.
Thanks in advance,
N.
Is there anyone who can help me?
Could someone give me a hand?
I cannot find the reason why I cannot fix this issue.
I have tried to draw the items on a bitmap first and then adding it to the canvas, but I didnt manage and I either get a white canvas or a completely black one.
I believe the issue is quite easy to solve after you do it once.
Not even someone who could give some suggestions?
Hey!
As far as I can see the code you posted looks fine. Can you please post the code of your CustomView class? I think there could be an issue there.
Are you trying to make a game?
Hello!!!!
Finally someone
Here is the code I use for the onDraw(canvas):
Code:
@Override
public void onDraw(Canvas canvas) {
try {
canvas.drawColor(Color.WHITE);
pt.setAntiAlias(true);
canvas.rotate(-90, 0, 0);
canvas.translate(translateX, translateY);
canvas.scale(scale, scale);
canvas.rotate(-rotation, lat, lon);
// Drawing Points
Renderer.drawStreet(gaia.getStreets(), scale, canvas, pt);
Renderer.drawPolygon(gaia.getPolygons(), canvas, pt);
Renderer.drawPOI(myContext, gaia.getPOIs(), canvas, pt, scale, rotation);
Renderer.drawCustomPOI(myContext, gaia.getCustomPOI(), canvas, pt, scale, rotation);
if (showLocation) {
drawMarker(canvas);
}
if(drivingMode)
{
Renderer.drawRoute(gaia.getRoute(), canvas, pt);
}
} catch (Exception e) {
e.printStackTrace();
}
}
This is actually a map renderer.
I know the issue is very likely to be due to the double buffering, but I cannot find a way to fix it.
Thanks!
N.
Can you please post the code for the callback methods (onSurfaceCreated, onSurfaceChanged, onSurfaceDestroyed) and maybe the constructor for the view too? I think the problem could be due to the setting up and not the actual rendering.
How are you accessing the surfaceHolder. Are you using the getHolder() method for that? And are you sending the reference you get to the surfaceHolder from getHolder() to the DrawingThread?
Here is the light version of the code:
Code:
public class CustomMapView extends SurfaceView implements
SurfaceHolder.Callback, LocationListener {
private DrawingThread _thread;
private RenderingThread _threadStreets;
Context myContext;
Paint pt = new Paint();
final int SCREEN_HEIGHT = 600;
final int SCREEN_WIDTH = 480;
float scale;
int range = 100000;
static public int rotation = 0;
int sensibility = 100;
int lat;
int lon;
float translateX = -lat * scale - SCREEN_HEIGHT / 2;
float translateY = -lon * scale + SCREEN_WIDTH / 2;
boolean followLocation = true;
boolean isZooming = false;
boolean showLocation = true;
boolean followRotation = false;
static boolean drivingMode = false;
boolean DEBUG = true;
Handler mHandler;
public static GaiaApp gaia;
public CustomMapView(Context context, AttributeSet attrs) {
super(context, attrs);
getHolder().addCallback(this);
gaia = ((GaiaApp) context.getApplicationContext());
scale = gaia.getCurScale();
lat = (int) gaia.getCurPosition().getLatitude();
lon = (int) gaia.getCurPosition().getLongitude();
pf = new PathFinder();
startupDB();
myContext = context;
mHandler = new Handler();
_thread = new DrawingThread(getHolder(), this);
_threadStreets = new RenderingThread(this);
setFocusable(true);
pt.setFlags(Paint.DITHER_FLAG);
pt.setFilterBitmap(true);
this.invalidate();
}
@Override
public void onDraw(Canvas canvas) {
try {
canvas.drawColor(Color.WHITE);
pt.setAntiAlias(true);
canvas.rotate(-90, 0, 0);
canvas.translate(translateX, translateY);
canvas.scale(scale, scale);
canvas.rotate(-rotation, lat, lon);
// Drawing Points
Renderer.drawStreet(gaia.getStreets(), scale, canvas, pt);
Renderer.drawPolygon(gaia.getPolygons(), canvas, pt);
Renderer.drawPOI(myContext, gaia.getPOIs(), canvas, pt, scale, rotation);
Renderer.drawCustomPOI(myContext, gaia.getCustomPOI(), canvas, pt, scale, rotation);
if (showLocation) {
drawMarker(canvas);
}
if(drivingMode)
{
Renderer.drawRoute(gaia.getRoute(), canvas, pt);
}
} catch (Exception e) {
e.printStackTrace();
}
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width,
int height) {
// TODO Auto-generated method stub
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
if (!_thread.isRunning()) {
_thread.setRunning(true);
_thread.start();
}
if (!_threadStreets.isRunning()) {
_threadStreets.setRunning(true);
_threadStreets.start();
}
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
boolean retry = true;
_thread.setRunning(false);
_threadStreets.setRunning(false);
while (retry) {
try {
_thread.join();
_threadStreets.join();
retry = false;
} catch (InterruptedException e) {
// we will try it again and again...
}
}
}
class DrawingThread extends Thread {
private SurfaceHolder _surfaceHolder;
private CustomMapView _cmv;
private boolean _run = false;
public DrawingThread(SurfaceHolder surfaceHolder, CustomMapView cmv) {
super();
_surfaceHolder = surfaceHolder;
_cmv = cmv;
}
public void setRunning(boolean run) {
_run = run;
}
public boolean isRunning() {
return _run;
}
public SurfaceHolder getSurfaceHolder() {
return _surfaceHolder;
}
@Override
public void run() {
Canvas c;
while (_run) {
c = null;
try {
c = _surfaceHolder.lockCanvas();
synchronized (_surfaceHolder) {
_cmv.onDraw(c);
}
} finally {
if (c != null) {
_surfaceHolder.unlockCanvasAndPost(c);
}
}
}
}
}
class RenderingThread extends Thread {
CustomMapView _cmv;
Boolean _run = false;
public RenderingThread(CustomMapView cmv) {
super();
_cmv = cmv;
}
public void setRunning(boolean run) {
_run = run;
}
public boolean isRunning() {
return _run;
}
@Override
public void run() {
while (_run) {
try {
Log.d("CustomMapView", "Retrieving...");
new StreetRunnable(_cmv).run();
Log.d("CustomMapView", "Retrieving");
if(!followRotation)
rotation = 0;
else
rotation = gaia.getOrientation();
Log.d("CustomMapView", "Updating...");
mHandler.post(new Runnable() {
@Override
public void run() {
_cmv.invalidate();
}
});
Log.d("CustomMapView", "Updated");
} finally {
}
}
}
}
@Override
public void onProviderDisabled(String provider) {
// TODO Auto-generated method stub
}
@Override
public void onProviderEnabled(String provider) {
// TODO Auto-generated method stub
}
@Override
public void onStatusChanged(String provider, int status, Bundle extras) {
// TODO Auto-generated method stub
}
}
Hello,
have you had the chance to check it out?
Thanks
N.
Sorry about that. I did not have access to the internet for the last 2 - 3 days.
Anyways I went through your code...If that is what you call light I wouldn't want to know whats heavy
I noticed that you have two threads which are responsible for drawing to the same view. You are using the DrawingThread to call the onDraw method of your CustomMapView object and the RenderingThread to post the invalidate method of the same CustomMapView object, which also results in a call to the onDraw method of the same view.
I am not really sure if doing this is a good thing and maybe this is what is causing the problems with the rendering? As a test, you could comment out one of the Thread.start() calls to one of the two rendering threads in your onSurfaceCreated method and check if it stops flickering.
I know this isn't how you want your app to function but if it fixes the flickering you'd know its the the two drawing threads that are causing the problem.
Code:
@Override
public void surfaceCreated(SurfaceHolder holder) {
// Comment out either one of the following two if blocks
if (!_thread.isRunning()) {
_thread.setRunning(true);
_thread.start();
}
if (!_threadStreets.isRunning()) {
_threadStreets.setRunning(true);
_threadStreets.start();
}
}
Awesome dude!
It fixed it straight away.
The reason is that I got a thread first and then added a new one, without thinking about just merging them both together
Thanks again!
Yep. You're welcome!
You'll just have to redesign your code to use just one rendering thread and it should work fine. All the best!

[Q] Android Maps draw path problem

I am developing an application which draws the path of the user as he moves and calculates the area .
This is the code i am trying my problem is at the start of recording the path the path is drawn even if the user has not moved and sometimes when even if user is moving the path is not drawn .
RouteOverlay class:
public class RouteOverlay extends Overlay {
private GeoPoint gp1;
private GeoPoint gp2;
private int mode = 1;
public RouteOverlay(GeoPoint paramGeoPoint1, GeoPoint paramGeoPoint2,int paramInt)
{
this.gp1 = paramGeoPoint1;
this.gp2 = paramGeoPoint2;
this.mode = paramInt;
}
public void draw(Canvas paramCanvas, MapView paramMapView,
boolean paramShadow)
{
super.draw(paramCanvas, paramMapView, paramShadow);
Projection projection = paramMapView.getProjection();
Paint mPaint = new Paint();
mPaint.setDither(true);
mPaint.setAntiAlias(true);
mPaint.setColor(Color.RED);
mPaint.setStyle(Paint.Style.FILL_AND_STROKE);
mPaint.setStrokeJoin(Paint.Join.ROUND);
mPaint.setStrokeCap(Paint.Cap.ROUND);
mPaint.setStrokeWidth(3);
mPaint.setAlpha(120);
Point p1 = new Point();
Point p2 = new Point();
Path path = new Path();
projection.toPixels(gp1, p1);
projection.toPixels(gp2, p2);
path.moveTo(p2.x,p2.y);
path.lineTo(p1.x,p1.y);
paramCanvas.drawPath(path, mPaint);
}
MainActivity:
public class MainActivity extends MapActivity {
public final LocationListener locationListener = new LocationListener() {
public void onLocationChanged(Location location) {
coordinates.add(location);
mapView.getController().animateTo(getGeoByLocation(location));
drawRoute(coordinates, mapView);
}
};
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_area_measurement);
this.mapView = ((MapView) findViewById(R.id.mapview));
this.mapView.setBuiltInZoomControls(false);
this.mapView.getController().setZoom(17);
this.coordinates = new ArrayList<Location>();
}
protected boolean isRouteDisplayed() {
return false;
}
private GeoPoint getGeoByLocation(Location location) {
GeoPoint gp = null;
try {
if (location != null) {
double geoLatitude = location.getLatitude() * 1E6;
double geoLongitude = location.getLongitude() * 1E6;
gp = new GeoPoint((int) geoLatitude, (int) geoLongitude);
}
} catch (Exception e) {
e.printStackTrace();
}
return gp;
}
public String getLocationProvider(LocationManager paramLocationManager) {
try {
Criteria localCriteria = new Criteria();
localCriteria.setAccuracy(1);
localCriteria.setAltitudeRequired(false);
localCriteria.setBearingRequired(false);
localCriteria.setCostAllowed(true);
localCriteria.setPowerRequirement(3);
String str = paramLocationManager.getBestProvider(localCriteria,
true);
return str;
} catch (Exception localException) {
while (true) {
localException.printStackTrace();
}
}
}
private void drawRoute(ArrayList<Location> paramArrayList,MapView paramMapView) {
List<Overlay> overlays = paramMapView.getOverlays();
//Changed for smooth rendering
overlays.clear();
for (int i = 1; i < paramArrayList.size(); i++) {
overlays.add(new RouteOverlay(getGeoByLocation(paramArrayList.get(i - 1)), getGeoByLocation(paramArrayList.get(i)),2));
}
}
public void startRecording() {
this.isMeasuring = true;
lm = (LocationManager) getSystemService(LOCATION_SERVICE);
lm.requestLocationUpdates(getLocationProvider(lm),500,2,this.locationListener);
/*if (lm.isProviderEnabled(LocationManager.NETWORK_PROVIDER)){
gpsstatus.setText("Gps Is Enabled");
}else
{ gpsstatus.setText("Gps Is disabled");}*/
}
This probably has to do with the accuracy of the GPS. I didn't really look at your code but maybe you should add some kind of buffer. if (movement>5m){//draw stuff}

New to form, Help with android dev? (java help)

Hello, I am currently trying to make a livewallpaper and I keep running into crashes + images won't cylce.
MyWallpaperService.JAVA
Code:
public class MyWallpaperService extends WallpaperService
{
public void onCreate()
{
super.onCreate();
}
public void onDestroy()
{
super.onDestroy();
}
public Engine onCreateEngine()
{
return new CercleEngine();
}
class CercleEngine extends Engine
{
public Bitmap image1, image2, image3;
CercleEngine()
{
image1 = BitmapFactory.decodeResource(getResources(), R.drawable.n01);
image2 = BitmapFactory.decodeResource(getResources(), R.drawable.n02);
image3 = BitmapFactory.decodeResource(getResources(), R.drawable.n03);
}
public void onCreate(SurfaceHolder surfaceHolder)
{
super.onCreate(surfaceHolder);
}
public void onOffsetsChanged(float xOffset, float yOffset, float xStep, float yStep, int xPixels, int yPixels)
{
drawFrame();
}
void drawFrame()
{
final SurfaceHolder holder = getSurfaceHolder();
Canvas c = null;
try
{
c = holder.lockCanvas();
if (c != null)
{
c.drawBitmap(image1, 0, 0, null);
c.drawBitmap(image2, 0, 0, null);
c.drawBitmap(image3, 0, 0, null);
}
final Runnable drawRunner = new Runnable()
{
[user=439709]@override[/user]
public void run() {
drawFrame();
}
};
Handler handler = null;
handler.removeCallbacks(drawRunner);
boolean visible = true;
if (visible)
{
handler.postDelayed(drawRunner, 1); // delay 1 sec
}
} finally
{
if (c != null) holder.unlockCanvasAndPost(c);
}
}
}
}
I have no Idea what is going on, I am trying to get it to cylce these three images but it just won't. Please help???

On Device Camera Stream Text Recognition in KnowMyBoard Android App Using ML Kit, MVVM, Navigation Components

{
"lightbox_close": "Close",
"lightbox_next": "Next",
"lightbox_previous": "Previous",
"lightbox_error": "The requested content cannot be loaded. Please try again later.",
"lightbox_start_slideshow": "Start slideshow",
"lightbox_stop_slideshow": "Stop slideshow",
"lightbox_full_screen": "Full screen",
"lightbox_thumbnails": "Thumbnails",
"lightbox_download": "Download",
"lightbox_share": "Share",
"lightbox_zoom": "Zoom",
"lightbox_new_window": "New window",
"lightbox_toggle_sidebar": "Toggle sidebar"
}
Introduction
In this article, we will learn how to integrate Huawei ML kit camera stream, Map kit and Location kit in Android application KnowMyBoard. Account Kit provides seamless login functionality to the app with large user base.
The text recognition service can extract text from images of receipts, business cards, and documents. This service is useful for industries such as printing, education, and logistics. You can use it to create apps that handle data entry and check tasks.
The text recognition service is able to recognize text in both static images and dynamic camera streams with a host of APIs, which you can call synchronously or asynchronously to build your text recognition-enabled apps.
Precautions
Development Overview
You need to install Android Studio IDE and I assume that you have prior knowledge of Android application development.
Hardware Requirements
A computer (desktop or laptop) running Windows 10.
Android phone (with the USB cable), which is used for debugging.
Software Requirements
Java JDK 1.8 or later.
Android Studio software or Visual Studio or Code installed.
HMS Core (APK) 4.X or later
Integration steps
Step 1. Huawei developer account and complete identity verification in Huawei developer website, refer to register Huawei ID.
Step 2. Create project in AppGallery Connect
Step 3. Adding HMS Core SDK
Let’s start coding
navigation_graph.xml
[/B][/B]
<?xml version="1.0" encoding="utf-8"?>
<navigation xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:id="@+id/navigation_graph"
app:startDestination="@id/loginFragment">
<fragment
android:id="@+id/loginFragment"
android:name="com.huawei.hms.knowmyboard.dtse.activity.fragments.LoginFragment"
android:label="LoginFragment"/>
<fragment
android:id="@+id/mainFragment"
android:name="com.huawei.hms.knowmyboard.dtse.activity.fragments.MainFragment"
android:label="MainFragment"/>
<fragment
android:id="@+id/searchFragment"
android:name="com.huawei.hms.knowmyboard.dtse.activity.fragments.SearchFragment"
android:label="fragment_search"
tools:layout="@layout/fragment_search" />
</navigation>
[B][B]
TextRecognitionActivity.java
[/B][/B][/B]
public final class TextRecognitionActivity extends BaseActivity
implements OnRequestPermissionsResultCallback, View.OnClickListener {
private static final String TAG = "TextRecognitionActivity";
private LensEngine lensEngine = null;
private LensEnginePreview preview;
private GraphicOverlay graphicOverlay;
private ImageButton takePicture;
private ImageButton imageSwitch;
private RelativeLayout zoomImageLayout;
private ZoomImageView zoomImageView;
private ImageButton zoomImageClose;
CameraConfiguration cameraConfiguration = null;
private int facing = CameraConfiguration.CAMERA_FACING_BACK;
private Camera mCamera;
private boolean isLandScape;
private Bitmap bitmap;
private Bitmap bitmapCopy;
private LocalTextTransactor localTextTransactor;
private Handler mHandler = new MsgHandler(this);
private Dialog languageDialog;
private AddPictureDialog addPictureDialog;
private TextView textCN;
private TextView textEN;
private TextView textJN;
private TextView textKN;
private TextView textLN;
private TextView tv_language,tv_translated_txt;
private String textType = Constant.POSITION_CN;
private boolean isInitialization = false;
MLTextAnalyzer analyzer;
private static class MsgHandler extends Handler {
WeakReference<TextRecognitionActivity> mMainActivityWeakReference;
public MsgHandler(TextRecognitionActivity mainActivity) {
this.mMainActivityWeakReference = new WeakReference<>(mainActivity);
}
@Override
public void handleMessage(Message msg) {
super.handleMessage(msg);
TextRecognitionActivity mainActivity = this.mMainActivityWeakReference.get();
if (mainActivity == null) {
return;
}
//Log.d(TextRecognitionActivity.TAG, "msg what :" + msg.what);
//Log.e("TAG", "msg what :" + msg.getTarget().getMessageName(msg));
if (msg.what == Constant.SHOW_TAKE_PHOTO_BUTTON) {
mainActivity.setVisible();
} else if (msg.what == Constant.HIDE_TAKE_PHOTO_BUTTON) {
mainActivity.setGone();
}
}
}
private void setVisible() {
if (this.takePicture.getVisibility() == View.GONE) {
this.takePicture.setVisibility(View.VISIBLE);
}
}
private void setGone() {
if (this.takePicture.getVisibility() == View.VISIBLE) {
this.takePicture.setVisibility(View.GONE);
}
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
this.setContentView(R.layout.activity_text_recognition);
if (savedInstanceState != null) {
this.facing = savedInstanceState.getInt(Constant.CAMERA_FACING);
}
this.tv_language = this.findViewById(R.id.tv_lang);
this.tv_translated_txt = this.findViewById(R.id.tv_translated_txt);
this.preview = this.findViewById(R.id.live_preview);
this.graphicOverlay = this.findViewById(R.id.live_overlay);
this.cameraConfiguration = new CameraConfiguration();
this.cameraConfiguration.setCameraFacing(this.facing);
this.initViews();
this.isLandScape = (this.getResources().getConfiguration().orientation == Configuration.ORIENTATION_LANDSCAPE);
this.createLensEngine();
this.setStatusBar();
}
private void initViews() {
this.takePicture = this.findViewById(R.id.takePicture);
this.takePicture.setOnClickListener(this);
this.imageSwitch = this.findViewById(R.id.text_imageSwitch);
this.imageSwitch.setOnClickListener(this);
this.zoomImageLayout = this.findViewById(R.id.zoomImageLayout);
this.zoomImageView = this.findViewById(R.id.take_picture_overlay);
this.zoomImageClose = this.findViewById(R.id.zoomImageClose);
this.zoomImageClose.setOnClickListener(this);
this.findViewById(R.id.back).setOnClickListener(this);
this.findViewById(R.id.language_setting).setOnClickListener(this);
this.createLanguageDialog();
this.createAddPictureDialog();
}
@Override
public void onClick(View view) {
if (view.getId() == R.id.takePicture) {
this.takePicture();
} else if (view.getId() == R.id.zoomImageClose) {
this.zoomImageLayout.setVisibility(View.GONE);
this.recycleBitmap();
} else if (view.getId() == R.id.text_imageSwitch) {
this.showAddPictureDialog();
} else if (view.getId() == R.id.language_setting) {
this.showLanguageDialog();
} else if (view.getId() == R.id.simple_cn) {
SharedPreferencesUtil.getInstance(this)
.putStringValue(Constant.POSITION_KEY, Constant.POSITION_CN);
this.languageDialog.dismiss();
this.restartLensEngine(Constant.POSITION_CN);
} else if (view.getId() == R.id.english) {
SharedPreferencesUtil.getInstance(this)
.putStringValue(Constant.POSITION_KEY, Constant.POSITION_EN);
this.languageDialog.dismiss();
this.preview.release();
this.restartLensEngine(Constant.POSITION_EN);
} else if (view.getId() == R.id.japanese) {
SharedPreferencesUtil.getInstance(this)
.putStringValue(Constant.POSITION_KEY, Constant.POSITION_JA);
this.languageDialog.dismiss();
this.preview.release();
this.restartLensEngine(Constant.POSITION_JA);
} else if (view.getId() == R.id.korean) {
SharedPreferencesUtil.getInstance(this)
.putStringValue(Constant.POSITION_KEY, Constant.POSITION_KO);
this.languageDialog.dismiss();
this.preview.release();
this.restartLensEngine(Constant.POSITION_KO);
} else if (view.getId() == R.id.latin) {
SharedPreferencesUtil.getInstance(this)
.putStringValue(Constant.POSITION_KEY, Constant.POSITION_LA);
this.languageDialog.dismiss();
this.preview.release();
this.restartLensEngine(Constant.POSITION_LA);
} else if (view.getId() == R.id.back) {
releaseLensEngine();
this.finish();
}
}
private void restartLensEngine(String type) {
if (this.textType.equals(type)) {
return;
}
this.lensEngine.release();
this.lensEngine = null;
this.createLensEngine();
this.startLensEngine();
if (this.lensEngine == null || this.lensEngine.getCamera() == null) {
return;
}
this.mCamera = this.lensEngine.getCamera();
try {
this.mCamera.setPreviewDisplay(this.preview.getSurfaceHolder());
} catch (IOException e) {
Log.d(TextRecognitionActivity.TAG, "initViews IOException");
}
}
@Override
public void onBackPressed() {
if (this.zoomImageLayout.getVisibility() == View.VISIBLE) {
this.zoomImageLayout.setVisibility(View.GONE);
this.recycleBitmap();
} else {
super.onBackPressed();
releaseLensEngine();
}
}
private void createLanguageDialog() {
this.languageDialog = new Dialog(this, R.style.MyDialogStyle);
View view = View.inflate(this, R.layout.dialog_language_setting, null);
// Set up a custom layout
this.languageDialog.setContentView(view);
this.textCN = view.findViewById(R.id.simple_cn);
this.textCN.setOnClickListener(this);
this.textEN = view.findViewById(R.id.english);
this.textEN.setOnClickListener(this);
this.textJN = view.findViewById(R.id.japanese);
this.textJN.setOnClickListener(this);
this.textKN = view.findViewById(R.id.korean);
this.textKN.setOnClickListener(this);
this.textLN = view.findViewById(R.id.latin);
this.textLN.setOnClickListener(this);
this.languageDialog.setCanceledOnTouchOutside(true);
// Set the size of the dialog
Window dialogWindow = this.languageDialog.getWindow();
WindowManager.LayoutParams layoutParams = dialogWindow.getAttributes();
layoutParams.width = WindowManager.LayoutParams.MATCH_PARENT;
layoutParams.height = WindowManager.LayoutParams.WRAP_CONTENT;
layoutParams.gravity = Gravity.BOTTOM;
dialogWindow.setAttributes(layoutParams);
}
private void showLanguageDialog() {
this.initDialogViews();
this.languageDialog.show();
}
private void createAddPictureDialog() {
this.addPictureDialog = new AddPictureDialog(this, AddPictureDialog.TYPE_NORMAL);
final Intent intent = new Intent(TextRecognitionActivity.this, RemoteDetectionActivity.class);
intent.putExtra(Constant.MODEL_TYPE, Constant.CLOUD_TEXT_DETECTION);
this.addPictureDialog.setClickListener(new AddPictureDialog.ClickListener() {
@Override
public void takePicture() {
lensEngine.release();
isInitialization = false;
intent.putExtra(Constant.ADD_PICTURE_TYPE, Constant.TYPE_TAKE_PHOTO);
TextRecognitionActivity.this.startActivity(intent);
}
@Override
public void selectImage() {
intent.putExtra(Constant.ADD_PICTURE_TYPE, Constant.TYPE_SELECT_IMAGE);
TextRecognitionActivity.this.startActivity(intent);
}
@Override
public void doExtend() {
}
});
}
private void showAddPictureDialog() {
this.addPictureDialog.show();
}
private void initDialogViews() {
String position = SharedPreferencesUtil.getInstance(this).getStringValue(Constant.POSITION_KEY);
this.textType = position;
this.textCN.setSelected(false);
this.textEN.setSelected(false);
this.textJN.setSelected(false);
this.textLN.setSelected(false);
this.textKN.setSelected(false);
switch (position) {
case Constant.POSITION_CN:
this.textCN.setSelected(true);
break;
case Constant.POSITION_EN:
this.textEN.setSelected(true);
break;
case Constant.POSITION_LA:
this.textLN.setSelected(true);
break;
case Constant.POSITION_JA:
this.textJN.setSelected(true);
break;
case Constant.POSITION_KO:
this.textKN.setSelected(true);
break;
default:
}
}
@Override
protected void onSaveInstanceState(Bundle outState) {
outState.putInt(Constant.CAMERA_FACING, this.facing);
super.onSaveInstanceState(outState);
}
private void createLensEngine() {
MLLocalTextSetting setting = new MLLocalTextSetting.Factory()
.setOCRMode(MLLocalTextSetting.OCR_DETECT_MODE)
// Specify languages that can be recognized.
.setLanguage("ko")
.create();
analyzer = MLAnalyzerFactory.getInstance().getLocalTextAnalyzer(setting);
//analyzer = new MLTextAnalyzer.Factory(this).create();
if (this.lensEngine == null) {
this.lensEngine = new LensEngine(this, this.cameraConfiguration, this.graphicOverlay);
}
try {
this.localTextTransactor = new LocalTextTransactor(this.mHandler, this);
this.lensEngine.setMachineLearningFrameTransactor(this.localTextTransactor);
// this.lensEngine.setMachineLearningFrameTransactor((ImageTransactor) new ObjectAnalyzerTransactor());
isInitialization = true;
} catch (Exception e) {
Toast.makeText(
this,
"Can not create image transactor: " + e.getMessage(),
Toast.LENGTH_LONG)
.show();
}
}
private void startLensEngine() {
if (this.lensEngine != null) {
try {
this.preview.start(this.lensEngine, false);
} catch (IOException e) {
Log.e(TextRecognitionActivity.TAG, "Unable to start lensEngine.", e);
this.lensEngine.release();
this.lensEngine = null;
}
}
}
@Override
public void onResume() {
super.onResume();
if (!isInitialization){
createLensEngine();
}
this.startLensEngine();
}
@Override
protected void onStop() {
super.onStop();
this.preview.stop();
}
private void releaseLensEngine() {
if (this.lensEngine != null) {
this.lensEngine.release();
this.lensEngine = null;
}
recycleBitmap();
}
@Override
protected void onDestroy() {
super.onDestroy();
releaseLensEngine();
if (analyzer != null) {
try {
analyzer.stop();
} catch (IOException e) {
// Exception handling.
Log.e(TAG,"Error while releasing analyzer");
}
}
}
private void recycleBitmap() {
if (this.bitmap != null && !this.bitmap.isRecycled()) {
this.bitmap.recycle();
this.bitmap = null;
}
if (this.bitmapCopy != null && !this.bitmapCopy.isRecycled()) {
this.bitmapCopy.recycle();
this.bitmapCopy = null;
}
}
private void takePicture() {
this.zoomImageLayout.setVisibility(View.VISIBLE);
LocalDataProcessor localDataProcessor = new LocalDataProcessor();
localDataProcessor.setLandScape(this.isLandScape);
this.bitmap = BitmapUtils.getBitmap(this.localTextTransactor.getTransactingImage(), this.localTextTransactor.getTransactingMetaData());
float previewWidth = localDataProcessor.getMaxWidthOfImage(this.localTextTransactor.getTransactingMetaData());
float previewHeight = localDataProcessor.getMaxHeightOfImage(this.localTextTransactor.getTransactingMetaData());
if (this.isLandScape) {
previewWidth = localDataProcessor.getMaxHeightOfImage(this.localTextTransactor.getTransactingMetaData());
previewHeight = localDataProcessor.getMaxWidthOfImage(this.localTextTransactor.getTransactingMetaData());
}
this.bitmapCopy = Bitmap.createBitmap(this.bitmap).copy(Bitmap.Config.ARGB_8888, true);
Canvas canvas = new Canvas(this.bitmapCopy);
float min = Math.min(previewWidth, previewHeight);
float max = Math.max(previewWidth, previewHeight);
if (this.getResources().getConfiguration().orientation == Configuration.ORIENTATION_PORTRAIT) {
localDataProcessor.setCameraInfo(this.graphicOverlay, canvas, min, max);
} else {
localDataProcessor.setCameraInfo(this.graphicOverlay, canvas, max, min);
}
localDataProcessor.drawHmsMLVisionText(canvas, this.localTextTransactor.getLastResults().getBlocks());
this.zoomImageView.setImageBitmap(this.bitmapCopy);
// Create an MLFrame object using the bitmap, which is the image data in bitmap format.
MLFrame frame = MLFrame.fromBitmap(bitmap);
Task<MLText> task = analyzer.asyncAnalyseFrame(frame);
task.addOnSuccessListener(new OnSuccessListener<MLText>() {
@Override
public void onSuccess(MLText text) {
String detectText = text.getStringValue();
// Processing for successful recognition.
}
}).addOnFailureListener(new OnFailureListener() {
@Override
public void onFailure(Exception e) {
// Processing logic for recognition failure.
Log.e("TAG"," Text : Processing logic for recognition failure");
}
});
}
}
[B][B][B]
Result
Please refer forum page to see result.
Tricks and Tips
Makes sure that agconnect-services.json file added.
Make sure required dependencies are added
Make sure that service is enabled in AGC
Enable data binding in gradle.build file
Make sure bottom navigation id’s should be same as fragment id’s in navigation graph
Make sure that set apk key before calling service.
Make sure that you added the module-text from below link
Make changes in gradle file application to library in module-text
Conclusion
In this article, we have learnt how to integrate Huawei ML kit camera stream, where you can extract text on device camera stream in Android application KnowMyBoard. You can check the desired result in the result section. You can also go through previous article part-4 here. Hoping Huawei ML kit capabilities are helpful to you as well, like this sample, you can make use as per your requirement.
Thank you so much for reading. I hope this article helps you to understand the integration of Huawei ML kit in Android application KnowMyBoard.
Reference
Huawei ML Kit – Training video
ML Text Recognition
Module-text
Checkout in forum

On Device Text Detection and Translation from Camera Stream Using Huawei ML Kit in Android KnowMyBoard App [Navigation Components, MVVM]

{
"lightbox_close": "Close",
"lightbox_next": "Next",
"lightbox_previous": "Previous",
"lightbox_error": "The requested content cannot be loaded. Please try again later.",
"lightbox_start_slideshow": "Start slideshow",
"lightbox_stop_slideshow": "Stop slideshow",
"lightbox_full_screen": "Full screen",
"lightbox_thumbnails": "Thumbnails",
"lightbox_download": "Download",
"lightbox_share": "Share",
"lightbox_zoom": "Zoom",
"lightbox_new_window": "New window",
"lightbox_toggle_sidebar": "Toggle sidebar"
}
Introduction
In this article, we will learn how to integrate Huawei ML kit camera stream in Android application KnowMyBoard. Account Kit provides seamless login functionality to the app with large user base.
The text recognition service can extract text from images of receipts, business cards, and documents. This service is useful for industries such as printing, education, and logistics. You can use it to create apps that handle data entry and check tasks.
The text recognition service is able to recognize text in both static images and dynamic camera streams with a host of APIs, which you can call synchronously or asynchronously to build your text recognition-enabled apps.
The on-device language detection service can detect the language of text when the Internet is unavailable. ML Kit detects languages in text and returns the language codes (which comply with the ISO 639-1 standard) and their respective confidences or the language code with the highest confidence. Currently, 56 languages can be detected.
Similar to the real-time translation service, the on-device translation service can be widely used in scenarios where translation between different languages is required. For example, travel apps can integrate this service to translate road signs and menus in other languages into tourists' native languages, providing more considerate services for them. Different from real-time translation, on-device translation does not require the Internet connection. You can easily use the translation service even if the Internet is disconnected.
Precautions
Development Overview
You need to install Android Studio IDE and I assume that you have prior knowledge of Android application development.
Hardware Requirements
A computer (desktop or laptop) running Windows 10.
Android phone (with the USB cable), which is used for debugging.
Software Requirements
Java JDK 1.8 or later.
Android Studio software or Visual Studio or Code installed.
HMS Core (APK) 4.X or later
Integration steps
Step 1. Huawei developer account and complete identity verification in Huawei developer website, refer to register Huawei ID.
Step 2. Create project in AppGallery Connect
Step 3. Adding HMS Core SDK
Let's start coding
navigation_graph.xml
[/B]
<?xml version="1.0" encoding="utf-8"?>
<navigation xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:id="@+id/navigation_graph"
app:startDestination="@id/loginFragment">
<fragment
android:id="@+id/loginFragment"
android:name="com.huawei.hms.knowmyboard.dtse.activity.fragments.LoginFragment"
android:label="LoginFragment"/>
<fragment
android:id="@+id/mainFragment"
android:name="com.huawei.hms.knowmyboard.dtse.activity.fragments.MainFragment"
android:label="MainFragment"/>
<fragment
android:id="@+id/searchFragment"
android:name="com.huawei.hms.knowmyboard.dtse.activity.fragments.SearchFragment"
android:label="fragment_search"
tools:layout="@layout/fragment_search" />
</navigation>
[B]
TextRecognitionActivity.java
[/B]
public final class TextRecognitionActivity extends BaseActivity
implements OnRequestPermissionsResultCallback, View.OnClickListener {
private static final String TAG = "TextRecognitionActivity";
private LensEngine lensEngine = null;
private LensEnginePreview preview;
private GraphicOverlay graphicOverlay;
private ImageButton takePicture;
private ImageButton imageSwitch;
private RelativeLayout zoomImageLayout;
private ZoomImageView zoomImageView;
private ImageButton zoomImageClose;
CameraConfiguration cameraConfiguration = null;
private int facing = CameraConfiguration.CAMERA_FACING_BACK;
private Camera mCamera;
private boolean isLandScape;
private Bitmap bitmap;
private Bitmap bitmapCopy;
private LocalTextTransactor localTextTransactor;
private Handler mHandler = new MsgHandler(this);
private Dialog languageDialog;
private AddPictureDialog addPictureDialog;
private TextView textCN;
private TextView textEN;
private TextView textJN;
private TextView textKN;
private TextView textLN;
private TextView tv_language,tv_translated_txt;
private String textType = Constant.POSITION_CN;
private boolean isInitialization = false;
MLTextAnalyzer analyzer;
private static class MsgHandler extends Handler {
WeakReference<TextRecognitionActivity> mMainActivityWeakReference;
public MsgHandler(TextRecognitionActivity mainActivity) {
this.mMainActivityWeakReference = new WeakReference<>(mainActivity);
}
@Override
public void handleMessage(Message msg) {
super.handleMessage(msg);
TextRecognitionActivity mainActivity = this.mMainActivityWeakReference.get();
if (mainActivity == null) {
return;
}
if (msg.what == Constant.SHOW_TAKE_PHOTO_BUTTON) {
mainActivity.setVisible();
} else if (msg.what == Constant.HIDE_TAKE_PHOTO_BUTTON) {
mainActivity.setGone();
}
}
}
private void setVisible() {
if (this.takePicture.getVisibility() == View.GONE) {
this.takePicture.setVisibility(View.VISIBLE);
}
}
private void setGone() {
if (this.takePicture.getVisibility() == View.VISIBLE) {
this.takePicture.setVisibility(View.GONE);
}
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
this.setContentView(R.layout.activity_text_recognition);
if (savedInstanceState != null) {
this.facing = savedInstanceState.getInt(Constant.CAMERA_FACING);
}
this.tv_language = this.findViewById(R.id.tv_lang);
this.tv_translated_txt = this.findViewById(R.id.tv_translated_txt);
this.preview = this.findViewById(R.id.live_preview);
this.graphicOverlay = this.findViewById(R.id.live_overlay);
this.cameraConfiguration = new CameraConfiguration();
this.cameraConfiguration.setCameraFacing(this.facing);
this.initViews();
this.isLandScape = (this.getResources().getConfiguration().orientation == Configuration.ORIENTATION_LANDSCAPE);
this.createLensEngine();
this.setStatusBar();
}
private void initViews() {
this.takePicture = this.findViewById(R.id.takePicture);
this.takePicture.setOnClickListener(this);
this.imageSwitch = this.findViewById(R.id.text_imageSwitch);
this.imageSwitch.setOnClickListener(this);
this.zoomImageLayout = this.findViewById(R.id.zoomImageLayout);
this.zoomImageView = this.findViewById(R.id.take_picture_overlay);
this.zoomImageClose = this.findViewById(R.id.zoomImageClose);
this.zoomImageClose.setOnClickListener(this);
this.findViewById(R.id.back).setOnClickListener(this);
this.findViewById(R.id.language_setting).setOnClickListener(this);
this.createLanguageDialog();
this.createAddPictureDialog();
}
@Override
public void onClick(View view) {
if (view.getId() == R.id.takePicture) {
this.takePicture();
} else if (view.getId() == R.id.zoomImageClose) {
this.zoomImageLayout.setVisibility(View.GONE);
this.recycleBitmap();
} else if (view.getId() == R.id.text_imageSwitch) {
this.showAddPictureDialog();
} else if (view.getId() == R.id.language_setting) {
this.showLanguageDialog();
} else if (view.getId() == R.id.simple_cn) {
SharedPreferencesUtil.getInstance(this)
.putStringValue(Constant.POSITION_KEY, Constant.POSITION_CN);
this.languageDialog.dismiss();
this.restartLensEngine(Constant.POSITION_CN);
} else if (view.getId() == R.id.english) {
SharedPreferencesUtil.getInstance(this)
.putStringValue(Constant.POSITION_KEY, Constant.POSITION_EN);
this.languageDialog.dismiss();
this.preview.release();
this.restartLensEngine(Constant.POSITION_EN);
} else if (view.getId() == R.id.japanese) {
SharedPreferencesUtil.getInstance(this)
.putStringValue(Constant.POSITION_KEY, Constant.POSITION_JA);
this.languageDialog.dismiss();
this.preview.release();
this.restartLensEngine(Constant.POSITION_JA);
} else if (view.getId() == R.id.korean) {
SharedPreferencesUtil.getInstance(this)
.putStringValue(Constant.POSITION_KEY, Constant.POSITION_KO);
this.languageDialog.dismiss();
this.preview.release();
this.restartLensEngine(Constant.POSITION_KO);
} else if (view.getId() == R.id.latin) {
SharedPreferencesUtil.getInstance(this)
.putStringValue(Constant.POSITION_KEY, Constant.POSITION_LA);
this.languageDialog.dismiss();
this.preview.release();
this.restartLensEngine(Constant.POSITION_LA);
} else if (view.getId() == R.id.back) {
releaseLensEngine();
this.finish();
}
}
private void restartLensEngine(String type) {
if (this.textType.equals(type)) {
return;
}
this.lensEngine.release();
this.lensEngine = null;
this.createLensEngine();
this.startLensEngine();
if (this.lensEngine == null || this.lensEngine.getCamera() == null) {
return;
}
this.mCamera = this.lensEngine.getCamera();
try {
this.mCamera.setPreviewDisplay(this.preview.getSurfaceHolder());
} catch (IOException e) {
Log.d(TextRecognitionActivity.TAG, "initViews IOException");
}
}
@Override
public void onBackPressed() {
if (this.zoomImageLayout.getVisibility() == View.VISIBLE) {
this.zoomImageLayout.setVisibility(View.GONE);
this.recycleBitmap();
} else {
super.onBackPressed();
releaseLensEngine();
}
}
private void createLanguageDialog() {
this.languageDialog = new Dialog(this, R.style.MyDialogStyle);
View view = View.inflate(this, R.layout.dialog_language_setting, null);
// Set up a custom layout
this.languageDialog.setContentView(view);
this.textCN = view.findViewById(R.id.simple_cn);
this.textCN.setOnClickListener(this);
this.textEN = view.findViewById(R.id.english);
this.textEN.setOnClickListener(this);
this.textJN = view.findViewById(R.id.japanese);
this.textJN.setOnClickListener(this);
this.textKN = view.findViewById(R.id.korean);
this.textKN.setOnClickListener(this);
this.textLN = view.findViewById(R.id.latin);
this.textLN.setOnClickListener(this);
this.languageDialog.setCanceledOnTouchOutside(true);
// Set the size of the dialog
Window dialogWindow = this.languageDialog.getWindow();
WindowManager.LayoutParams layoutParams = dialogWindow.getAttributes();
layoutParams.width = WindowManager.LayoutParams.MATCH_PARENT;
layoutParams.height = WindowManager.LayoutParams.WRAP_CONTENT;
layoutParams.gravity = Gravity.BOTTOM;
dialogWindow.setAttributes(layoutParams);
}
private void showLanguageDialog() {
this.initDialogViews();
this.languageDialog.show();
}
private void createAddPictureDialog() {
this.addPictureDialog = new AddPictureDialog(this, AddPictureDialog.TYPE_NORMAL);
final Intent intent = new Intent(TextRecognitionActivity.this, RemoteDetectionActivity.class);
intent.putExtra(Constant.MODEL_TYPE, Constant.CLOUD_TEXT_DETECTION);
this.addPictureDialog.setClickListener(new AddPictureDialog.ClickListener() {
@Override
public void takePicture() {
lensEngine.release();
isInitialization = false;
intent.putExtra(Constant.ADD_PICTURE_TYPE, Constant.TYPE_TAKE_PHOTO);
TextRecognitionActivity.this.startActivity(intent);
}
@Override
public void selectImage() {
intent.putExtra(Constant.ADD_PICTURE_TYPE, Constant.TYPE_SELECT_IMAGE);
TextRecognitionActivity.this.startActivity(intent);
}
@Override
public void doExtend() {
}
});
}
private void showAddPictureDialog() {
this.addPictureDialog.show();
}
private void initDialogViews() {
String position = SharedPreferencesUtil.getInstance(this).getStringValue(Constant.POSITION_KEY);
this.textType = position;
this.textCN.setSelected(false);
this.textEN.setSelected(false);
this.textJN.setSelected(false);
this.textLN.setSelected(false);
this.textKN.setSelected(false);
switch (position) {
case Constant.POSITION_CN:
this.textCN.setSelected(true);
break;
case Constant.POSITION_EN:
this.textEN.setSelected(true);
break;
case Constant.POSITION_LA:
this.textLN.setSelected(true);
break;
case Constant.POSITION_JA:
this.textJN.setSelected(true);
break;
case Constant.POSITION_KO:
this.textKN.setSelected(true);
break;
default:
}
}
@Override
protected void onSaveInstanceState(Bundle outState) {
outState.putInt(Constant.CAMERA_FACING, this.facing);
super.onSaveInstanceState(outState);
}
private void createLensEngine() {
MLLocalTextSetting setting = new MLLocalTextSetting.Factory()
.setOCRMode(MLLocalTextSetting.OCR_DETECT_MODE)
// Specify languages that can be recognized.
.setLanguage("ko")
.create();
analyzer = MLAnalyzerFactory.getInstance().getLocalTextAnalyzer(setting);
//analyzer = new MLTextAnalyzer.Factory(this).create();
if (this.lensEngine == null) {
this.lensEngine = new LensEngine(this, this.cameraConfiguration, this.graphicOverlay);
}
try {
this.localTextTransactor = new LocalTextTransactor(this.mHandler, this);
this.lensEngine.setMachineLearningFrameTransactor(this.localTextTransactor);
// this.lensEngine.setMachineLearningFrameTransactor((ImageTransactor) new ObjectAnalyzerTransactor());
isInitialization = true;
} catch (Exception e) {
Toast.makeText(
this,
"Can not create image transactor: " + e.getMessage(),
Toast.LENGTH_LONG)
.show();
}
}
private void startLensEngine() {
if (this.lensEngine != null) {
try {
this.preview.start(this.lensEngine, false);
} catch (IOException e) {
Log.e(TextRecognitionActivity.TAG, "Unable to start lensEngine.", e);
this.lensEngine.release();
this.lensEngine = null;
}
}
}
@Override
public void onResume() {
super.onResume();
if (!isInitialization){
createLensEngine();
}
this.startLensEngine();
}
@Override
protected void onStop() {
super.onStop();
this.preview.stop();
}
private void releaseLensEngine() {
if (this.lensEngine != null) {
this.lensEngine.release();
this.lensEngine = null;
}
recycleBitmap();
}
@Override
protected void onDestroy() {
super.onDestroy();
releaseLensEngine();
if (analyzer != null) {
try {
analyzer.stop();
} catch (IOException e) {
// Exception handling.
Log.e(TAG,"Error while releasing analyzer");
}
}
}
private void recycleBitmap() {
if (this.bitmap != null && !this.bitmap.isRecycled()) {
this.bitmap.recycle();
this.bitmap = null;
}
if (this.bitmapCopy != null && !this.bitmapCopy.isRecycled()) {
this.bitmapCopy.recycle();
this.bitmapCopy = null;
}
}
private void takePicture() {
this.zoomImageLayout.setVisibility(View.VISIBLE);
LocalDataProcessor localDataProcessor = new LocalDataProcessor();
localDataProcessor.setLandScape(this.isLandScape);
this.bitmap = BitmapUtils.getBitmap(this.localTextTransactor.getTransactingImage(), this.localTextTransactor.getTransactingMetaData());
float previewWidth = localDataProcessor.getMaxWidthOfImage(this.localTextTransactor.getTransactingMetaData());
float previewHeight = localDataProcessor.getMaxHeightOfImage(this.localTextTransactor.getTransactingMetaData());
if (this.isLandScape) {
previewWidth = localDataProcessor.getMaxHeightOfImage(this.localTextTransactor.getTransactingMetaData());
previewHeight = localDataProcessor.getMaxWidthOfImage(this.localTextTransactor.getTransactingMetaData());
}
this.bitmapCopy = Bitmap.createBitmap(this.bitmap).copy(Bitmap.Config.ARGB_8888, true);
Canvas canvas = new Canvas(this.bitmapCopy);
float min = Math.min(previewWidth, previewHeight);
float max = Math.max(previewWidth, previewHeight);
if (this.getResources().getConfiguration().orientation == Configuration.ORIENTATION_PORTRAIT) {
localDataProcessor.setCameraInfo(this.graphicOverlay, canvas, min, max);
} else {
localDataProcessor.setCameraInfo(this.graphicOverlay, canvas, max, min);
}
localDataProcessor.drawHmsMLVisionText(canvas, this.localTextTransactor.getLastResults().getBlocks());
this.zoomImageView.setImageBitmap(this.bitmapCopy);
// Create an MLFrame object using the bitmap, which is the image data in bitmap format.
MLFrame frame = MLFrame.fromBitmap(bitmap);
Task<MLText> task = analyzer.asyncAnalyseFrame(frame);
task.addOnSuccessListener(new OnSuccessListener<MLText>() {
@Override
public void onSuccess(MLText text) {
String detectText = text.getStringValue();
// Processing for successful recognition.
// Create a local language detector.
MLLangDetectorFactory factory = MLLangDetectorFactory.getInstance();
MLLocalLangDetectorSetting setting = new MLLocalLangDetectorSetting.Factory()
// Set the minimum confidence threshold for language detection.
.setTrustedThreshold(0.01f)
.create();
MLLocalLangDetector myLocalLangDetector = factory.getLocalLangDetector(setting);
Task<String> firstBestDetectTask = myLocalLangDetector.firstBestDetect(detectText);
firstBestDetectTask.addOnSuccessListener(new OnSuccessListener<String>() {
@Override
public void onSuccess(String languageDetected) {
// Processing logic for detection success.
Log.d("TAG", "Lang detect :" + languageDetected);
Log.d("TAG", " detectText :" + detectText);
translate(languageDetected,detectText);
}
}).addOnFailureListener(new OnFailureListener() {
@Override
public void onFailure(Exception e) {
// Processing logic for detection failure.
Log.e("TAG", "Lang detect error:" + e.getMessage());
}
});
}
}).addOnFailureListener(new OnFailureListener() {
@Override
public void onFailure(Exception e) {
// Processing logic for recognition failure.
Log.e("TAG"," Text : Processing logic for recognition failure");
}
});
}
private void translate(String languageDetected, String detectText) {
MLApplication.initialize(getApplication());
MLApplication.getInstance().setApiKey("DAEDAF48ZIMI4ettQdTfCKlXgaln/E+TO/PrsX+LpP2BubkmED/iC0iVEps5vfx1ol27rHvuwiq64YphpPkGYWbf9La8XjnvC9qhwQ==");
// Create an offline translator.
MLLocalTranslateSetting setting = new MLLocalTranslateSetting.Factory()
// Set the source language code. The ISO 639-1 standard is used. This parameter is mandatory. If this parameter is not set, an error may occur.
.setSourceLangCode(languageDetected)
// Set the target language code. The ISO 639-1 standard is used. This parameter is mandatory. If this parameter is not set, an error may occur.
.setTargetLangCode("en")
.create();
MLLocalTranslator myLocalTranslator = MLTranslatorFactory.getInstance().getLocalTranslator(setting);
// Set the model download policy.
MLModelDownloadStrategy downloadStrategy = new MLModelDownloadStrategy.Factory()
.needWifi()// It is recommended that you download the package in a Wi-Fi environment.
.create();
// Create a download progress listener.
MLModelDownloadListener modelDownloadListener = new MLModelDownloadListener() {
@Override
public void onProcess(long alreadyDownLength, long totalLength) {
runOnUiThread(new Runnable() {
@Override
public void run() {
// Display the download progress or perform other operations.
}
});
}
};
myLocalTranslator.preparedModel(downloadStrategy, modelDownloadListener).
addOnSuccessListener(new OnSuccessListener<Void>() {
@Override
public void onSuccess(Void aVoid) {
// Called when the model package is successfully downloaded.
// input is a string of less than 5000 characters.
final Task<String> task = myLocalTranslator.asyncTranslate(detectText);
// Before translation, ensure that the models have been successfully downloaded.
task.addOnSuccessListener(new OnSuccessListener<String>() {
@Override
public void onSuccess(String translated) {
// Processing logic for detection success.
Log.e("TAG"," Translated Text : "+translated);
tv_translated_txt.setText(translated);
}
}).addOnFailureListener(new OnFailureListener() {
@Override
public void onFailure(Exception e) {
// Processing logic for detection failure.
Log.e("TAG"," Translation failed "+e.getMessage());
Toast.makeText(TextRecognitionActivity.this,"Please check internet connection.",Toast.LENGTH_SHORT).show();
}
});
}
}).addOnFailureListener(new OnFailureListener() {
@Override
public void onFailure(Exception e) {
// Called when the model package fails to be downloaded.
Log.e("TAG"," Translation failed onFailure "+e.getMessage());
}
});
}
}
[B]
MainFragment.java

Categories

Resources