Skip to content

Instantly share code, notes, and snippets.

View erdalkaymak's full-sized avatar

erdalkaymak

View GitHub Profile
{
"assignedLabels" : [
{
}
],
"mode" : "NORMAL",
"nodeDescription" : "the master Jenkins node",
"nodeName" : "",
"numExecutors" : 2,
val task: Task<MLText> = analyzer.asyncAnalyseFrame(frame)
task.addOnSuccessListener {
txt_translate_result.visibility= View.VISIBLE
txt_to.visibility= View.VISIBLE
txt_translation.visibility= View.VISIBLE
toSpinner.visibility= View.VISIBLE
selected_img.visibility= View.VISIBLE
text_recognition_device_text.visibility= View.VISIBLE
text_recognition_device_text.setText(it.stringValue)
detectAndTranslateLanguage(it.stringValue, txt_translation,languageCode.toString(),applicationContext)
dependencies{
// Import the base SDK.
implementation 'com.huawei.hms:ml-computer-vision-ocr:2.0.5.300'
// Import the Latin-based language model package.
implementation 'com.huawei.hms:ml-computer-vision-ocr-latin-model:2.0.5.300'
// Import the Japanese and Korean model package.
implementation 'com.huawei.hms:ml-computer-vision-ocr-jk-model:2.0.5.300'
// Import the Chinese and English model package.
implementation 'com.huawei.hms:ml-computer-vision-ocr-cn-model:2.0.5.300'
}
<?xml version="1.0" encoding="utf-8"?>
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
tools:context=".MainActivity">
<ScrollView
android:layout_width="match_parent"
private fun performTextRecognitionOnDevice(bitmap: Bitmap){
var myText:String?=null
val setting = MLLocalTextSetting.Factory()
.setOCRMode(MLLocalTextSetting.OCR_DETECT_MODE)
.create()
val analyzer = MLAnalyzerFactory.getInstance().getLocalTextAnalyzer(setting)
val frame = MLFrame.fromBitmap(bitmap)
val task: Task<MLText> = analyzer.asyncAnalyseFrame(frame)
task.addOnSuccessListener {
private var isEntered=false
private var detectedText:String?=null
private val myCameraRequestCode = 100
private val myStorageRequestCode = 300
private var languageCode: String?=null
private var selectedImageBitmap: Bitmap?=null
private var toLanguageList = arrayOf(
"Chinese",
"English",
"French",
val policy = StrictMode.ThreadPolicy.Builder().permitAll().build()
StrictMode.setThreadPolicy(policy)
MLApplication.getInstance().apiKey = ""
val adapterTo = ArrayAdapter(this, android.R.layout.simple_spinner_item, toLanguageList)
toSpinner.adapter = adapterTo
toSpinner.onItemSelectedListener = object :
AdapterView.OnItemSelectedListener {
override fun onItemSelected(parent: AdapterView<*>, view: View, position: Int, id: Long) {
override fun onActivityResult(requestCode: Int, resultCode: Int, data: Intent?) {
super.onActivityResult(requestCode, resultCode, data)
if (requestCode == 100) {
if (resultCode == Activity.RESULT_OK) {
selectedImageBitmap = data?.extras?.get("data") as Bitmap
selected_img.setImageBitmap(selectedImageBitmap)
performTextRecognitionOnDevice(selectedImageBitmap!!)
} else {
if (data != null) {
MediaStore.Images.Media.getBitmap(
dependencies{
implementation 'com.huawei.hms:ml-computer-language-detection:2.0.5.300'
}
fun detectAndTranslateLanguage(sourceText: String,detectedLangText:TextView,selectedLanguage: String,mContext:Context) {
val setting = MLRemoteLangDetectorSetting.Factory()
.setTrustedThreshold(0.01f)
.create()
val mlRemoteLangDetector = MLLangDetectorFactory.getInstance()
.getRemoteLangDetector(setting)
try {
fromLanguageCode = mlRemoteLangDetector!!.syncFirstBestDetect(sourceText)
Log.i("myLanguageCode",sourceText+" "+"("+ fromLanguageCode+")")
SharedPreferencesUtil.getInstance(mContext)?.putStringValue("detectedLanguage",