2016-12-03 18 views
1

私はDemoImagetoTextの構築指示に従っていますYoutube私はDemoImagetoTextを正常に構築します。次に、多言語OCRを使ってこのアプリケーションを開発したいと考えています。私が多言語のOCRをやりたいのであれば、このコードから何をやるべきか、変えてください。今の多言語を行う方法

public class main extends Activity { 

private CropImageView mCropImageView; 
Bitmap converted; 
EditText textView; 
private TessOCR mTessOCR; 
private Uri mCropImageUri; 
public static final String lang = "eng"; 
public static final String DATA_PATH = Environment.getExternalStorageDirectory().toString() + "/DemoOCR/"; 
private ProgressDialog mProgressDialog; 

@Override 
protected void onCreate(Bundle savedInstanceState) { 
    super.onCreate(savedInstanceState); 
    setContentView(R.layout.a_main); 
    textView = (EditText)findViewById(R.id.editText); 

    mCropImageView = (CropImageView) findViewById(R.id.CropImageView); 
    String[] paths = new String[] { DATA_PATH, DATA_PATH + "tessdata/" }; 

    for (String path : paths) { 
     File dir = new File(path); 
     if (!dir.exists()) { 
      if (!dir.mkdirs()) { 
       Log.v("Main", "ERROR: Creation of directory " + path + " on sdcard failed"); 
       break; 
      } else { 
       Log.v("Main", "Created directory " + path + " on sdcard"); 
      } 
     } 

    } 
    if (!(new File(DATA_PATH + "tessdata/" + lang + ".traineddata")).exists()) { 
     try { 

      AssetManager assetManager = getAssets(); 

      InputStream in = assetManager.open(lang + ".traineddata"); 
      //GZIPInputStream gin = new GZIPInputStream(in); 
      OutputStream out = new FileOutputStream(DATA_PATH 
        + "tessdata/" + lang + ".traineddata"); 

      // Transfer bytes from in to out 
      byte[] buf = new byte[1024]; 
      int len; 
      //while ((lenf = gin.read(buff)) > 0) { 
      while ((len = in.read(buf)) > 0) { 
       out.write(buf, 0, len); 
      } 
      in.close(); 
      //gin.close(); 
      out.close(); 

      // Log.v(TAG, "Copied " + lang + " traineddata"); 
     } catch (IOException e) { 
      // Log.e(TAG, "Was unable to copy " + lang + " traineddata " + e.toString()); 
     } 


    } 
    mTessOCR =new TessOCR(); 
} 

/** 
* On load image button click, start pick image chooser activity. 
*/ 
public void onLoadImageClick(View view) { 
    startActivityForResult(getPickImageChooserIntent(), 200); 
} 

/** 
* Crop the image and set it back to the cropping view. 
*/ 

public void onCropImageClick(View view) { 
    Bitmap cropped = mCropImageView.getCroppedImage(500, 500); 
    if (cropped != null) 
     mCropImageView.setImageBitmap(cropped); 

    //mImage.setImageBitmap(converted); 
    doOCR(convertColorIntoBlackAndWhiteImage(cropped)); 

} 

public void doOCR(final Bitmap bitmap) { 
    if (mProgressDialog == null) { 
     mProgressDialog = ProgressDialog.show(this, "Processing", 
       "Please wait...", true); 
     // mResult.setVisibility(V.ViewISIBLE); 


    } 
    else { 
     mProgressDialog.show(); 
    } 

    new Thread(new Runnable() { 
     public void run() { 

      final String result = mTessOCR.getOCRResult(bitmap).toLowerCase(); 


      runOnUiThread(new Runnable() { 

       @Override 
       public void run() { 
        // TODO Auto-generated method stub 
        if (result != null && !result.equals("")) { 
         String s = result.trim(); 
         textView.setText(result); 


        } 

        mProgressDialog.dismiss(); 
       } 

      }); 

     }; 
    }).start(); 


} 
private Bitmap convertColorIntoBlackAndWhiteImage(Bitmap orginalBitmap) { 
    ColorMatrix colorMatrix = new ColorMatrix(); 
    colorMatrix.setSaturation(0); 

    ColorMatrixColorFilter colorMatrixFilter = new ColorMatrixColorFilter(
      colorMatrix); 

    Bitmap blackAndWhiteBitmap = orginalBitmap.copy(
      Bitmap.Config.ARGB_8888, true); 

    Paint paint = new Paint(); 
    paint.setColorFilter(colorMatrixFilter); 

    Canvas canvas = new Canvas(blackAndWhiteBitmap); 
    canvas.drawBitmap(blackAndWhiteBitmap, 0, 0, paint); 

    return blackAndWhiteBitmap; 
} 
@Override 
protected void onActivityResult(int requestCode, int resultCode, Intent data) { 
    if (resultCode == Activity.RESULT_OK) { 
     Uri imageUri = getPickImageResultUri(data); 

     // For API >= 23 we need to check specifically that we have permissions to read external storage, 
     // but we don't know if we need to for the URI so the simplest is to try open the stream and see if we get error. 
     boolean requirePermissions = false; 
     if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M && 
       checkSelfPermission(Manifest.permission.READ_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED && 
       isUriRequiresPermissions(imageUri)) { 

      // request permissions and handle the result in onRequestPermissionsResult() 
      requirePermissions = true; 
      mCropImageUri = imageUri; 
      requestPermissions(new String[]{Manifest.permission.READ_EXTERNAL_STORAGE}, 0); 
     } 

     if (!requirePermissions) { 
      mCropImageView.setImageUriAsync(imageUri); 
     } 
    } 
} 

@Override 
public void onRequestPermissionsResult(int requestCode, String permissions[], int[] grantResults) { 
    if (mCropImageUri != null && grantResults.length > 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED) { 
     mCropImageView.setImageUriAsync(mCropImageUri); 
    } else { 
     Toast.makeText(this, "Required permissions are not granted", Toast.LENGTH_LONG).show(); 
    } 
} 

/** 
* Create a chooser intent to select the source to get image from.<br/> 
* The source can be camera's (ACTION_IMAGE_CAPTURE) or gallery's (ACTION_GET_CONTENT).<br/> 
* All possible sources are added to the intent chooser. 
*/ 
public Intent getPickImageChooserIntent() { 

    // Determine Uri of camera image to save. 
    Uri outputFileUri = getCaptureImageOutputUri(); 

    List<Intent> allIntents = new ArrayList<>(); 
    PackageManager packageManager = getPackageManager(); 

    // collect all camera intents 
    Intent captureIntent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE); 
    List<ResolveInfo> listCam = packageManager.queryIntentActivities(captureIntent, 0); 
    for (ResolveInfo res : listCam) { 
     Intent intent = new Intent(captureIntent); 
     intent.setComponent(new ComponentName(res.activityInfo.packageName, res.activityInfo.name)); 
     intent.setPackage(res.activityInfo.packageName); 
     if (outputFileUri != null) { 
      intent.putExtra(MediaStore.EXTRA_OUTPUT, outputFileUri); 
     } 
     allIntents.add(intent); 
    } 

    // collect all gallery intents 
    Intent galleryIntent = new Intent(Intent.ACTION_GET_CONTENT); 
    galleryIntent.setType("image/*"); 
    List<ResolveInfo> listGallery = packageManager.queryIntentActivities(galleryIntent, 0); 
    for (ResolveInfo res : listGallery) { 
     Intent intent = new Intent(galleryIntent); 
     intent.setComponent(new ComponentName(res.activityInfo.packageName, res.activityInfo.name)); 
     intent.setPackage(res.activityInfo.packageName); 
     allIntents.add(intent); 
    } 

    // the main intent is the last in the list (fucking android) so pickup the useless one 
    Intent mainIntent = allIntents.get(allIntents.size() - 1); 
    for (Intent intent : allIntents) { 
     if (intent.getComponent().getClassName().equals("com.android.documentsui.DocumentsActivity")) { 
      mainIntent = intent; 
      break; 
     } 
    } 
    allIntents.remove(mainIntent); 

    // Create a chooser from the main intent 
    Intent chooserIntent = Intent.createChooser(mainIntent, "Select source"); 

    // Add all other intents 
    chooserIntent.putExtra(Intent.EXTRA_INITIAL_INTENTS, allIntents.toArray(new Parcelable[allIntents.size()])); 

    return chooserIntent; 
} 

/** 
* Get URI to image received from capture by camera. 
*/ 
private Uri getCaptureImageOutputUri() { 
    Uri outputFileUri = null; 
    File getImage = getExternalCacheDir(); 
    if (getImage != null) { 
     outputFileUri = Uri.fromFile(new File(getImage.getPath(), "pickImageResult.jpeg")); 
    } 
    return outputFileUri; 
} 

/** 
* Get the URI of the selected image from {@link #getPickImageChooserIntent()}.<br/> 
* Will return the correct URI for camera and gallery image. 
* 
* @param data the returned data of the activity result 
*/ 
public Uri getPickImageResultUri(Intent data) { 
    boolean isCamera = true; 
    if (data != null && data.getData() != null) { 
     String action = data.getAction(); 
     isCamera = action != null && action.equals(MediaStore.ACTION_IMAGE_CAPTURE); 
    } 
    return isCamera ? getCaptureImageOutputUri() : data.getData(); 
} 

/** 
* Test if we can open the given Android URI to test if permission required error is thrown.<br> 
*/ 
public boolean isUriRequiresPermissions(Uri uri) { 
    try { 
     ContentResolver resolver = getContentResolver(); 
     InputStream stream = resolver.openInputStream(uri); 
     stream.close(); 
     return false; 
    } catch (FileNotFoundException e) { 
     if (e.getCause() instanceof ErrnoException) { 
      return true; 
     } 
    } catch (Exception e) { 
    } 
    return false; 
} 
} 

、私は、このようなLANG = "ENGの+のJPN"としてLANGを変更する、などのlang +などの一部traineddataを変更 "traineddata" "eng.traineddata" + "jpn.traineddata"に、既に資産に訓練されたデータを追加しますが、の出力はjpn言語(これはまだ英語です)ではありません。

次にlang = "eng"をlang = "jpn"に変更しますが、動作しません。それでもまだeng

どうすればよいですか?私はそれらに対処する方法を知らない。 T^T前もってありがとう。

Simple OCR Android App Using Tesseractとtess-twoの違いを知りたいです。それは同じことを行うが、なぜそれらを使用するコードは同じではない

と私は同じことをすることができるleptonicaとopencvの間のdiffentを知りたい。 OCRの多くがレプトニカを開発する理由は何ですか?

答えて

0

まず、あなたが作成し、その後、あなたは私があなたがその言語

+0

を使用する場所で言語を設定する必要があります。あなたはここからhttps://github.com/tesseract-ocr/tessdataそれらをダウンロードをお試しして、フォルダにインポートすることができ、既に訓練されたファイルをダウンロードをお試しする必要がありますそれはすでにそれがまだeng言語です。どうしてか分かりません? – wanipook