Skip to content

Instantly share code, notes, and snippets.

@VIkash2601
Created November 16, 2023 07:38
Show Gist options
  • Save VIkash2601/e4e0085cd2b33a3539febd613e0b21b5 to your computer and use it in GitHub Desktop.
Save VIkash2601/e4e0085cd2b33a3539febd613e0b21b5 to your computer and use it in GitHub Desktop.
FragImageAudio Gist
package com.app.spinnr.frag
import android.Manifest
import android.app.Activity
import android.content.Context
import android.content.Intent
import android.content.pm.PackageManager
import android.graphics.Bitmap
import android.graphics.BitmapFactory
import android.graphics.Matrix
import android.media.MediaPlayer
import android.media.MediaRecorder
import android.net.Uri
import android.os.Build
import android.os.Bundle
import android.os.CountDownTimer
import android.provider.MediaStore
import android.provider.Settings
import android.util.Log
import androidx.fragment.app.Fragment
import android.view.LayoutInflater
import android.view.View
import android.view.ViewGroup
import androidx.activity.result.contract.ActivityResultContracts
import androidx.core.content.ContextCompat
import androidx.core.content.FileProvider
import androidx.exifinterface.media.ExifInterface
import com.app.spinnr.App.Companion.getStr
import com.app.spinnr.BuildConfig
import com.app.spinnr.R
import com.app.spinnr.act.ActBoarding
import com.app.spinnr.act.ActHome
import com.app.spinnr.databinding.FragImageAudioBinding
import com.app.spinnr.frag.boarding.FragIntroVideo
import com.app.spinnr.listener.FragListener
import com.app.spinnr.model.BottomSheetModel
import com.app.spinnr.model.CoinModelImpl
import com.app.spinnr.model.MediaConcatModel
import com.app.spinnr.model.MediaConcatPath
import com.app.spinnr.model.ProfileModel
import com.app.spinnr.model.VideoModel
import com.app.spinnr.model.VideoUploadModel
import com.app.spinnr.service.ApiClient
import com.app.spinnr.service.ResponseBean
import com.app.spinnr.util.Loader
import com.app.spinnr.util.Preference
import com.app.spinnr.util.UriHelper
import com.app.spinnr.util.inputDialog
import com.app.spinnr.util.load
import com.app.spinnr.util.netNotConnected
import com.app.spinnr.util.postDelayed
import com.app.spinnr.util.showEarnCoin
import com.app.spinnr.util.showLoader
import com.app.spinnr.util.showMessage
import com.app.spinnr.util.showOptionDialog
import com.app.spinnr.util.showSnackBar
import com.app.spinnr.util.showUploadSuccessDialog
import com.bumptech.glide.Glide
import com.google.gson.JsonElement
import okhttp3.MediaType.Companion.toMediaTypeOrNull
import okhttp3.MultipartBody
import okhttp3.RequestBody.Companion.asRequestBody
import okhttp3.RequestBody.Companion.toRequestBody
import retrofit2.Call
import retrofit2.Callback
import retrofit2.Response
import java.io.File
import java.io.FileInputStream
import java.io.FileOutputStream
class FragImageAudio(val listener: FragListener? = null) : Fragment() {
private val TAG = FragImageAudio::class.java.simpleName
companion object {
var title = ""
var desc = ""
var index = 0
var type = ""
}
private lateinit var ctx: Context
private lateinit var binding: FragImageAudioBinding
private var loaded = false
private var recorder: MediaRecorder? = null
private var player: MediaPlayer? = null
private var timer: CountDownTimer? = null
private var selectImage: Bitmap? = null
private var loader: Loader? = null
override fun onCreateView(
inflater: LayoutInflater, container: ViewGroup?,
savedInstanceState: Bundle?
): View {
if (loaded) return binding.root
loaded = true
ctx = requireContext()
binding = FragImageAudioBinding.inflate(inflater, container, false)
setup()
return binding.root
}
private var recording = false
private val clickListener = View.OnClickListener {
it.isEnabled = false
postDelayed({ it.isEnabled = true }, 500)
when (it) {
binding.playParent -> {
if (player != null) {
stopPlayer()
} else {
preparePlayer()
}
}
binding.redo -> {
resetRecording()
}
binding.recordingParent -> {
if (recorder != null)
stopRecording()
else
prepareRecording()
}
binding.placeHolder,
binding.imageParent,
binding.imageButton -> selectImage()
binding.submit -> {
if (title.isBlank()) {
chooseName()
} else {
submit(title, index + 6L, index + 1)
}
}
binding.close -> {
(ctx as? ActBoarding)?.backPress()
(ctx as? ActHome)?.backPress()
}
}
}
private fun setup() {
binding.close.setOnClickListener(clickListener)
binding.recordingParent.setOnClickListener(clickListener)
binding.redo.setOnClickListener(clickListener)
binding.playParent.setOnClickListener(clickListener)
binding.placeHolder.setOnClickListener(clickListener)
binding.imageParent.setOnClickListener(clickListener)
binding.imageButton.setOnClickListener(clickListener)
binding.submit.setOnClickListener(clickListener)
if (title.isBlank()) {
binding.question.visibility = View.GONE
binding.selectImage.visibility = View.VISIBLE
}
binding.title.text = title
binding.desc.text = desc
Glide.with(ctx).load(R.mipmap.gif_audio_recording).into(binding.recordingStart)
}
private fun prepareRecording() {
if (title.isBlank()) {
binding.recorder.visibility = View.VISIBLE
binding.player.visibility = View.GONE
} else {
binding.question.visibility = View.VISIBLE
binding.recorder.visibility = View.VISIBLE
binding.selectImage.visibility = View.GONE
binding.player.visibility = View.GONE
}
@Suppress("DEPRECATION")
recorder = MediaRecorder().apply {
setAudioSource(MediaRecorder.AudioSource.MIC)
setOutputFormat(MediaRecorder.OutputFormat.DEFAULT)
setOutputFile("${ctx.cacheDir}/temp.m4a")
setAudioEncoder(MediaRecorder.AudioEncoder.AAC)
}
recorder?.prepare()
recorder?.start()
timer = object: CountDownTimer(12000, 1000) {
override fun onTick(millisUntilFinished: Long) {
val seconds = 12 - (millisUntilFinished / 1000).toInt()
if (seconds >= 10) {
binding.recordingTimer.text = "00:${seconds}/00:12"
} else {
binding.recordingTimer.text = "00:0${seconds}/00:12"
}
}
override fun onFinish() {
stopRecording()
}
}.start()
binding.recordingMessage.text = getStr(R.string.image_audio_recording_stop)
binding.recordingStart.visibility = View.VISIBLE
binding.recordingStop.visibility = View.GONE
recording = true
binding.recordingIcon.setImageDrawable(ContextCompat.getDrawable(ctx, R.mipmap.img_audio_stop_recording))
binding.recordingIcon.imageTintList = ContextCompat.getColorStateList(ctx, R.color.white)
}
private fun stopRecording() {
try{
timer?.cancel()
timer = null
recorder?.stop()
recorder?.release()
recorder = null
binding.recordingStart.visibility = View.GONE
binding.recordingStop.visibility = View.VISIBLE
binding.recordingIcon.setImageDrawable(ContextCompat.getDrawable(ctx, R.mipmap.img_audio_recording))
binding.recordingIcon.imageTintList = ContextCompat.getColorStateList(ctx, R.color.white)
if (title.isBlank()) {
binding.player.visibility = View.VISIBLE
binding.recorder.visibility = View.GONE
} else {
binding.question.visibility = View.GONE
binding.recorder.visibility = View.GONE
binding.selectImage.visibility = View.VISIBLE
binding.player.visibility = View.VISIBLE
}
} catch (e: Exception) {
Log.w(TAG, "stopRecording: ${e.localizedMessage}")
}
}
private fun resetRecording() {
try{
timer?.cancel()
timer = null
recorder?.stop()
recorder?.release()
recorder = null
binding.recordingStart.visibility = View.GONE
binding.recordingStop.visibility = View.VISIBLE
binding.recordingIcon.setImageDrawable(ContextCompat.getDrawable(ctx, R.mipmap.img_audio_recording))
binding.recordingIcon.imageTintList = ContextCompat.getColorStateList(ctx, R.color.white)
binding.player.visibility = View.GONE
binding.recorder.visibility = View.VISIBLE
binding.question.visibility = View.GONE
binding.selectImage.visibility = View.VISIBLE
binding.recordingMessage.text = getStr(R.string.image_audio_recording_start)
binding.recordingTimer.text = "00:00/12:00"
} catch (e: Exception) {
Log.w(TAG, "resetRecording: ${e.localizedMessage}")
}
}
private fun preparePlayer() {
player = MediaPlayer().apply {
setDataSource("${ctx.cacheDir}/temp.m4a")
prepare()
start()
binding.playIcon.setImageResource(R.mipmap.img_pause_fill)
setOnCompletionListener {
binding.playIcon.setImageResource(R.mipmap.img_play_fill)
}
}
}
private fun stopPlayer() {
try{
player?.stop()
player?.release()
player = null
binding.playIcon.setImageResource(R.mipmap.img_play_fill)
}catch (e:Exception){
}
}
private var fromCamera = false
private fun selectImage() {
BottomSheetDialog(
"Choose Profile Image",
mutableListOf(
BottomSheetModel(
"Take Photo",
R.mipmap.img_take_photo
),
BottomSheetModel(
"Photo Library",
R.mipmap.img_choose_profile
)
)
) { a ->
when(a) {
0 -> {
if (ContextCompat.checkSelfPermission(ctx, Manifest.permission.CAMERA) == PackageManager.PERMISSION_DENIED) {
if (shouldShowRequestPermissionRationale(Manifest.permission.CAMERA) || Preference.preferenceInstance.readBoolean(Preference.FTP_CAMERA, Preference.TRUE)) {
Preference.preferenceInstance.writeBoolean(Preference.FTP_CAMERA, Preference.FALSE)
(ctx as Activity).requestPermissions(arrayOf(Manifest.permission.CAMERA), 3001)
} else {
showOptionDialog(
getString(R.string.permission_alert),
getString(R.string.capture_permission),
getString(R.string.cancel),
getString(R.string.setting)
) { z ->
if (z == 1) {
val i = Intent(Settings.ACTION_APPLICATION_DETAILS_SETTINGS)
val uri = Uri.fromParts("package", ctx.packageName, null)
i.data = uri
startActivity(i)
}
}
}
return@BottomSheetDialog
}
val file = File("${ctx.cacheDir}/temp.jpeg")
val uri = FileProvider.getUriForFile(ctx, "${BuildConfig.APPLICATION_ID}.provider", file)
val i = Intent(MediaStore.ACTION_IMAGE_CAPTURE)
i.putExtra(MediaStore.EXTRA_OUTPUT, uri)
fromCamera = true
launcher.launch(i)
}
1 -> {
fromCamera = false
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.TIRAMISU) {
launcher.launch(Intent(MediaStore.ACTION_PICK_IMAGES).apply { type = "image/*" })
} else {
if (ContextCompat.checkSelfPermission(ctx, Manifest.permission.READ_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED)
showOptionDialog(getString(R.string.permission_alert), getString(R.string.capture_permission), getString(R.string.cancel), getString(R.string.setting)) { z ->
if (z == 1) {
val i = Intent(Settings.ACTION_APPLICATION_DETAILS_SETTINGS)
val uri = Uri.fromParts("package", ctx.packageName, null)
i.data = uri
startActivity(i)
}
} else {
launcher.launch(Intent(Intent.ACTION_GET_CONTENT).apply { type = "image/*" })
}
}
}
}
}
.show(childFragmentManager, BottomSheetDialog::class.java.simpleName)
}
private val launcher = registerForActivityResult(ActivityResultContracts.StartActivityForResult()) {
if (it == null || it.resultCode == Activity.RESULT_CANCELED) return@registerForActivityResult
var bitmap: Bitmap? = null
if (fromCamera) {
val exifOrientation = ExifInterface("${ctx.cacheDir}/temp.jpeg").getAttributeInt(
ExifInterface.TAG_ORIENTATION,
ExifInterface.ORIENTATION_NORMAL
)
val orientation = when (exifOrientation) {
ExifInterface.ORIENTATION_ROTATE_270 -> 270f
ExifInterface.ORIENTATION_ROTATE_180 -> 180f
ExifInterface.ORIENTATION_ROTATE_90 -> 90f
else -> 0f
}
bitmap = BitmapFactory.decodeFile("${ctx.cacheDir}/temp.jpeg")
val mat = Matrix()
mat.postRotate(orientation)
bitmap = Bitmap.createBitmap(bitmap, 0, 0, bitmap.width, bitmap.height, mat, false)
} else {
it.data?.data?.let { uri ->
try{ bitmap = BitmapFactory.decodeStream(ctx.contentResolver.openInputStream(uri))
val path = UriHelper.getInstance(ctx).getPath(uri) ?: return@registerForActivityResult
if (!File(path).exists()) return@registerForActivityResult
val exifOrientation = ExifInterface(path).getAttributeInt(
ExifInterface.TAG_ORIENTATION,
ExifInterface.ORIENTATION_NORMAL
)
val orientation = when (exifOrientation) {
ExifInterface.ORIENTATION_ROTATE_270 -> 270f
ExifInterface.ORIENTATION_ROTATE_180 -> 180f
ExifInterface.ORIENTATION_ROTATE_90 -> 90f
else -> 0f
}
val mat = Matrix()
mat.postRotate(orientation)
bitmap = Bitmap.createBitmap(bitmap!!, 0, 0, bitmap!!.width, bitmap!!.height, mat, false)
}catch (e:Exception){}
}
}
bitmap?.let { input ->
val frag = FragImageEditor(input, false) { code, result ->
if (code != Activity.RESULT_OK) return@FragImageEditor
selectImage = result
binding.placeHolder.visibility = View.GONE
binding.imageParent.visibility = View.VISIBLE
binding.image.load(result)
binding.imageButtonIcon.setImageResource(R.mipmap.img_edit)
binding.imageButtonText.setText(R.string.edit_photo)
}
frag.input = input
(ctx as? ActBoarding)?.navigateTo(frag, true)
(ctx as? ActHome)?.navigateTo(frag, true)
}
}
override fun onPause() {
/*if (recording) {
stopRecording()
stopPlayer()
}*/
super.onPause()
}
private fun chooseName() {
if (selectImage == null) {
showSnackBar("Please take a photo, or select one from your gallery to add to your recording.")
return
}
var videoName = ""
val profileModel = ProfileModel.profileData()
val videos = mutableListOf<VideoModel>()
val names = mutableListOf<Int>()
names.add(0)
profileModel.video?.forEach {
if (it.category == "P") {
videos.add(it)
try {
val a = it.name.replace("Video ", "", true).toInt()
names.add(a)
} catch (e: Exception) {
//ignore
}
}
}
names.sort()
inputDialog(
getString(R.string.name_video),
getString(R.string.name_video_desc),
getString(R.string.video_name),
"",
getString(R.string.auto_name),
getString(R.string.name_save),
maxLength = 18,
enableEmoji = false
) {
a0, a1 ->
if (a0 == "no")
videoName = "Video ${names.last() + 1}"
else if (a0 == "yes")
videoName = a1.trim()
if (videoName.length < 3 || videoName.length > 18 || !videoName.contains(Regex("^[a-zA-Z0-9 ]+$"))) {
showMessage(getString(R.string.name_video_validation), narrowPadding = true, expanded = true) { chooseName() }
return@inputDialog
}
var duplicateName = false
for (video in videos) {
if (video.name == videoName) {
duplicateName = true
break
}
}
if (duplicateName)
showMessage(getString(R.string.name_already_used_please_enter_a_new_name), narrowPadding = true, expanded = true) { chooseName() }
else
submit(videoName, System.currentTimeMillis() / 1000, videos.size + 1)
}
}
private fun submit(title: String, qId: Long, sId: Int) {
if (netNotConnected()) {
showSnackBar(getStr(R.string.no_internet))
return
}
if (selectImage == null) {
showSnackBar("Please take a photo, or select one from your gallery to add to your recording.")
return
}
loader = showLoader()
val guid = Preference.preferenceInstance
.readString(Preference.GUID)
.toRequestBody("text".toMediaTypeOrNull())
val username = Preference.preferenceInstance
.readString(Preference.USERNAME)
.toRequestBody("text".toMediaTypeOrNull())
val questionId = "$qId"
.toRequestBody("text".toMediaTypeOrNull())
val category = "P".toRequestBody("text".toMediaTypeOrNull())
val answerText = "".toRequestBody("text".toMediaTypeOrNull())
val vidStatus = "p".toRequestBody("text".toMediaTypeOrNull())
val vidsequence = "$sId".toRequestBody("text".toMediaTypeOrNull())
val name = title.toRequestBody("text".toMediaTypeOrNull())
var outputFileName = "${Preference.preferenceInstance.readString(Preference.USERNAME)}_profile_${System.currentTimeMillis()}"
var file = File.createTempFile(outputFileName, ".m4a", ctx.cacheDir)
FileInputStream("${ctx.cacheDir}/temp.m4a").use { input ->
FileOutputStream(file).use { output ->
input.copyTo(output)
}
}
val audioFile = MultipartBody.Part.createFormData(
"audio_file", file.name, file.asRequestBody("audio".toMediaTypeOrNull())
)
outputFileName = "${Preference.preferenceInstance.readString(Preference.USERNAME)}_profile_${System.currentTimeMillis()}"
file = File.createTempFile(outputFileName, ".jpg", ctx.cacheDir)
selectImage?.compress(Bitmap.CompressFormat.JPEG, 100, FileOutputStream(file.path))
val imageFile = MultipartBody.Part.createFormData(
"image", file.name, file.asRequestBody("image".toMediaTypeOrNull())
)
ApiClient.getClient().uploadAudioImageMedia(guid, username, questionId, category, answerText, vidStatus, vidsequence, name, audioFile, imageFile)
.enqueue(object: Callback<VideoUploadModel>{
override fun onResponse(
call: Call<VideoUploadModel>,
response: Response<VideoUploadModel>
) {
if (response.body()?.videolink != null && FragImageAudio.title.isBlank()) {
loader?.dismiss()
binding.close.performClick()
listener?.onFragResponse("result", "P")
} else if (response.body()?.videolink != null) {
Preference.preferenceInstance.writeString(type, response.body()?.videolink ?: "")
FragIntroVideo.files[title] = response.body()?.videolink ?: ""
FragIntroVideo.uploaded = true
concatVideo()
} else {
loader?.dismiss()
showSnackBar("${response.body()?.msg}")
}
}
override fun onFailure(call: Call<VideoUploadModel>, t: Throwable) {
loader?.dismiss()
}
})
}
private fun concatVideo() {
val concatFiles = mutableListOf<MediaConcatPath>()
FragIntroVideo.files.forEach { concatFiles.add(MediaConcatPath(it.value.substringAfterLast("/"))) }
val request = MediaConcatModel(
Preference.preferenceInstance.readString(Preference.GUID),
Preference.preferenceInstance.readString(Preference.USERNAME),
"ffmpeg-enc",
concatFiles
)
ApiClient.getMediaClient().concatMedia(request)
.enqueue(object : Callback<ResponseBean<JsonElement>> {
override fun onResponse(call: Call<ResponseBean<JsonElement>>, response: Response<ResponseBean<JsonElement>>) {
(ctx as? ActBoarding)?.updateDetail()
earnCoin()
}
override fun onFailure(call: Call<ResponseBean<JsonElement>>, t: Throwable) {
loader?.dismiss()
}
})
}
private fun earnCoin() {
ApiClient.getClient().updateCoin(
Preference.preferenceInstance.readString(Preference.USERNAME),
Preference.preferenceInstance.readString(Preference.GUID),
"1", "0", CoinModelImpl.PROFILE_VIDEO,
"Profile questions", "earned"
).enqueue(object : Callback<ResponseBean<ProfileModel>> {
override fun onResponse(call: Call<ResponseBean<ProfileModel>>, response: Response<ResponseBean<ProfileModel>>) {
loader?.dismiss()
if (response.body()?.code == 0) {
postDelayed({ ctx.showUploadSuccessDialog() }, 1000)
showEarnCoin("1")
(ctx as? ActBoarding)?.getCredApi()
Preference.preferenceInstance.writeInt(Preference.STEP, 2)
binding.close.performClick()
} else {
showMessage("${response.body()?.msg}")
}
}
override fun onFailure(call: Call<ResponseBean<ProfileModel>>, t: Throwable) {
loader?.dismiss()
}
})
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment