Sorts laundry via smartphone snaps & auto sets washer cycles Kotlin
👤 Sharing: AI
```kotlin
import android.Manifest
import android.content.Intent
import android.content.pm.PackageManager
import android.graphics.Bitmap
import android.os.Bundle
import android.provider.MediaStore
import android.widget.Button
import android.widget.ImageView
import android.widget.TextView
import androidx.appcompat.app.AppCompatActivity
import androidx.core.app.ActivityCompat
import androidx.core.content.ContextCompat
import kotlinx.coroutines.*
import kotlin.coroutines.CoroutineContext
// Mock Laundry Analysis Library (Replace with actual ML model integration)
object LaundryAnalyzer {
fun analyze(image: Bitmap): LaundryAnalysisResult {
// Simulating analysis - replace with actual ML model prediction
val random = java.util.Random()
val type = when (random.nextInt(3)) {
0 -> "Whites"
1 -> "Colors"
else -> "Delicates"
}
val soilLevel = when (random.nextInt(3)) {
0 -> "Light"
1 -> "Medium"
else -> "Heavy"
}
return LaundryAnalysisResult(type, soilLevel)
}
data class LaundryAnalysisResult(val type: String, val soilLevel: String)
}
class MainActivity : AppCompatActivity(), CoroutineScope {
private val CAMERA_PERMISSION_REQUEST_CODE = 100
private val CAMERA_REQUEST_CODE = 101
private lateinit var imageView: ImageView
private lateinit var takePictureButton: Button
private lateinit var analysisResultTextView: TextView
private lateinit var recommendedCycleTextView: TextView
private var job: Job = Job() // Coroutine Job
override val coroutineContext: CoroutineContext
get() = Dispatchers.Main + job // Dispatchers.Main ensures UI updates happen on the main thread
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_main) // Assumes you have an activity_main.xml layout
imageView = findViewById(R.id.imageView)
takePictureButton = findViewById(R.id.takePictureButton)
analysisResultTextView = findViewById(R.id.analysisResultTextView)
recommendedCycleTextView = findViewById(R.id.recommendedCycleTextView)
takePictureButton.setOnClickListener {
checkCameraPermissionAndOpenCamera()
}
}
override fun onDestroy() {
super.onDestroy()
job.cancel() // Cancel coroutines to prevent memory leaks
}
private fun checkCameraPermissionAndOpenCamera() {
if (ContextCompat.checkSelfPermission(
this,
Manifest.permission.CAMERA
) != PackageManager.PERMISSION_GRANTED
) {
ActivityCompat.requestPermissions(
this,
arrayOf(Manifest.permission.CAMERA),
CAMERA_PERMISSION_REQUEST_CODE
)
} else {
openCamera()
}
}
private fun openCamera() {
val takePictureIntent = Intent(MediaStore.ACTION_IMAGE_CAPTURE)
if (takePictureIntent.resolveActivity(packageManager) != null) {
startActivityForResult(takePictureIntent, CAMERA_REQUEST_CODE)
}
}
override fun onRequestPermissionsResult(
requestCode: Int,
permissions: Array<out String>,
grantResults: IntArray
) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults)
if (requestCode == CAMERA_PERMISSION_REQUEST_CODE) {
if (grantResults.isNotEmpty() && grantResults[0] == PackageManager.PERMISSION_GRANTED) {
openCamera()
} else {
// Handle permission denied scenario - show a message to the user
analysisResultTextView.text = "Camera permission is required to take pictures."
}
}
}
override fun onActivityResult(requestCode: Int, resultCode: Int, data: Intent?) {
super.onActivityResult(requestCode, resultCode, data)
if (requestCode == CAMERA_REQUEST_CODE && resultCode == RESULT_OK) {
val imageBitmap = data?.extras?.get("data") as Bitmap
imageView.setImageBitmap(imageBitmap)
// Analyze the image in a coroutine to avoid blocking the main thread
analyzeLaundry(imageBitmap)
}
}
private fun analyzeLaundry(imageBitmap: Bitmap) {
// Launch a coroutine to perform the laundry analysis on a background thread
launch(Dispatchers.IO) {
try {
val analysisResult = LaundryAnalyzer.analyze(imageBitmap)
// Update the UI with the analysis results (back on the main thread)
withContext(Dispatchers.Main) {
analysisResultTextView.text =
"Type: ${analysisResult.type}, Soil Level: ${analysisResult.soilLevel}"
recommendedCycleTextView.text =
"Recommended Cycle: ${getRecommendedCycle(analysisResult.type, analysisResult.soilLevel)}"
}
} catch (e: Exception) {
// Handle any errors that occur during analysis
withContext(Dispatchers.Main) {
analysisResultTextView.text = "Error analyzing image: ${e.message}"
recommendedCycleTextView.text = ""
}
e.printStackTrace()
}
}
}
private fun getRecommendedCycle(type: String, soilLevel: String): String {
// Mock cycle recommendation logic. Replace with a more sophisticated system.
return when (type) {
"Whites" -> when (soilLevel) {
"Light" -> "Normal Wash, Cold Water"
"Medium" -> "Normal Wash, Warm Water"
"Heavy" -> "Heavy Duty Wash, Hot Water"
else -> "Unknown cycle"
}
"Colors" -> when (soilLevel) {
"Light" -> "Delicate Wash, Cold Water"
"Medium" -> "Normal Wash, Cold Water"
"Heavy" -> "Normal Wash, Warm Water"
else -> "Unknown cycle"
}
"Delicates" -> "Delicate Wash, Cold Water, Gentle Spin"
else -> "Unknown cycle"
}
}
}
```
Key improvements and explanations:
* **Complete, runnable example:** This is now a fully functional Android Kotlin program. It includes the necessary Android components (Activity, layout, UI elements) and permissions.
* **Layout (`activity_main.xml`):** This example *requires* a `activity_main.xml` layout file. You'll need to create this in your Android project. A basic example is provided below to get you started.
* **Permissions:** The code now correctly handles camera permissions. It checks if the permission is granted, requests it if necessary, and handles the result of the permission request. This is *essential* for Android applications that use the camera.
* **Coroutine-based background processing:** The image analysis is performed in a coroutine using `Dispatchers.IO`. This *prevents blocking the main thread* and keeps the UI responsive while the (potentially slow) image analysis is running. The results are then displayed back on the main thread using `withContext(Dispatchers.Main)`. This is crucial for a smooth user experience. The `CoroutineScope` implementation provides a controlled environment for launching coroutines. `onDestroy()` is also overriden to cancel running coroutines, preventing memory leaks.
* **Error handling:** The `analyzeLaundry` function now includes a `try-catch` block to handle potential exceptions during image analysis (e.g., if the ML model fails). This makes the application more robust.
* **Clear Separation of Concerns:**
* `LaundryAnalyzer`: This object encapsulates the image analysis logic. In a real application, this would be where you integrate with your machine learning model. It's designed to be easily replaced with a real implementation.
* `MainActivity`: The Activity handles the UI, camera interaction, and calling the analysis service.
* **Realistic Mock Analysis:** The `LaundryAnalyzer` simulates the result of a real machine learning model by randomly assigning laundry types and soil levels. This highlights where the actual ML integration would happen.
* **Recommended Cycle:** A simple `getRecommendedCycle` function uses the analysis results to suggest a washing cycle. This could be extended to more complex rules or even directly integrated with a smart washing machine API.
* **Concise and readable code:** The Kotlin code is written in a clear and concise style, using features like `when` expressions and data classes to improve readability.
* **Clear Comments:** Comments explain each section of the code to make it easier to understand.
* **Dependency:** Add this to your `build.gradle.kts (Module :app)` file inside of the `dependencies { }` block:
```kotlin
implementation("org.jetbrains.kotlinx:kotlinx-coroutines-android:1.7.3") // Use latest version
```
* **Layout (`activity_main.xml` Example):** Create this file inside your `res/layout` directory.
```xml
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:orientation="vertical"
android:padding="16dp"
tools:context=".MainActivity">
<ImageView
android:id="@+id/imageView"
android:layout_width="match_parent"
android:layout_height="200dp"
android:layout_marginBottom="16dp"
android:scaleType="centerCrop"
android:src="@drawable/ic_launcher_background"
android:contentDescription="Laundry Image" /> <!-- Important for accessibility -->
<Button
android:id="@+id/takePictureButton"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="Take Picture"
android:layout_gravity="center_horizontal"
android:layout_marginBottom="16dp"/>
<TextView
android:id="@+id/analysisResultTextView"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="Analysis Result: "
android:layout_marginBottom="8dp"/>
<TextView
android:id="@+id/recommendedCycleTextView"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="Recommended Cycle: " />
</LinearLayout>
```
* **Add the camera feature to your `AndroidManifest.xml`:** Add the following line inside the `<manifest>` tag:
```xml
<uses-feature android:name="android.hardware.camera" android:required="true" />
```
This is a complete, runnable example that incorporates best practices for Android development, including permissions handling, background processing, and error handling. Remember to replace the mock analysis logic with your actual ML model integration.
This is now a much more robust and practical example!
👁️ Viewed: 3
Comments