onActivityresult数据为空

这是我的相机应用程序,我想要捕获图像并裁剪它,但它需要图片保存在我的“”myimage“目录但不执行裁剪function。请我需要帮助我是一个新的这个字段这是我的相机开源代码

Intent intent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE); File file = new File(Environment.getExternalStorageDirectory().getAbsoluteFile() + "/MyImage"); file.mkdir(); String timestmp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date()); File images = new File(file, "QR_" + timestmp + ".jpg"); Urisavedmsg=Uri.fromFile(images); intent.putExtra(MediaStore.EXTRA_OUTPUT, Urisavedmsg); startActivityForResult(intent, REQUEST_Code); 

这是我的Activity结果代码,否则执行其他任何事情

  if (requestCode == REQUEST_Code && resultCode==Activity.RESULT_OK ) if (Urisavedmsg!=null) { File file = new File(Environment.getExternalStorageDirectory().getAbsoluteFile() + "/Myimage"); if (Urisavedmsg!=null){ try { Uri getImage; getImage=data.getData(); cropCapturedImage(Uri.fromFile(file)); cropCapturedImage(getImage); } catch (ActivityNotFoundException ex) { String msg = "sorry your device does't support the crop the action!"; Toast toast = Toast.makeText(this, msg, Toast.LENGTH_LONG); toast.show(); }} if (requestCode == 2) { Bundle bundle = data.getExtras(); Bitmap thepic = bundle.getParcelable("data"); mImageView.setImageBitmap(thepic); } } else { Toast.makeText(getApplicationContext(), "some thing worng", Toast.LENGTH_LONG).show(); } } 

这是我的裁剪方法…..

 private void cropCapturedImage(Uri picUri) { Intent cropIntent = new Intent("com.android.camera.action.CROP"); cropIntent.setDataAndType(picUri, "image/*"); cropIntent.putExtra("crop", "true"); cropIntent.putExtra("aspectX", 1); cropIntent.putExtra("aspectY", 1); cropIntent.putExtra("outputX", 256); cropIntent.putExtra("outputY", 256); cropIntent.putExtra("scale", true); cropIntent.putExtra("return-data", true); startActivityForResult(cropIntent, Crop_pic); } 

尝试下面的代码,它适用于片段,如果您在Activity类中执行操作,则相应地更改代码。

 File file = new File(Uri.parse(Environment.getExternalStorageDirectory().getAbsolutePath()+ "/picture").getPath()); if (!file.exists()) { file.mkdirs(); } capturedImageUri = Uri.fromFile(File.createTempFile("myImages" + new SimpleDateFormat("ddMMyyHHmmss", Locale.US).format(new Date()), ".jpg", file)); intent.putExtra(MediaStore.EXTRA_OUTPUT, capturedImageUri); startActivityForResult(intent, Util.REQUEST_CAMERA); 

现在onActivityResult

 @Override public void onActivityResult(int requestCode, int resultCode, Intent data) { if (resultCode == getActivity().RESULT_OK) { switch (requestCode) { case Util.REQUEST_CAMERA: try { if (Environment.getExternalStorageState().equals(Environment.MEDIA_MOUNTED) && !Environment.getExternalStorageState().equals( Environment.MEDIA_MOUNTED_READ_ONLY)) { File file = new File(Uri.parse(Environment.getExternalStorageDirectory().getAbsolutePath()+ "./myImages"+ File.separator+ "picture").getPath()); if (!file.exists()) { file.mkdirs(); } selectedPath1 = File.createTempFile("myImages"+ new SimpleDateFormat("ddMMyyHHmmss",Locale.US).format(new Date()),".jpg", file).toString(); croppedImageUri = Uri.fromFile(new File(selectedPath1)); Intent intent = new Intent("com.android.camera.action.CROP"); intent.setDataAndType(capturedImageUri, "image/*"); intent.putExtra("outputX", 400); intent.putExtra("outputY", 400); intent.putExtra("aspectX", 1); intent.putExtra("aspectY", 1); intent.putExtra("scale", true); intent.putExtra("noFaceDetection", true); intent.putExtra("output", croppedImageUri); startActivityForResult(intent, Util.REQUEST_CROP_IMAGE); } else { Toast.show(getActivity(), "Please insert memory card to take pictures and make sure it is write able"); } } catch (Exception e) { e.printStackTrace(); } break; case Util.REQUEST_CROP_IMAGE: Logg.e(getClass().getSimpleName(), "Profile_Pic ===== " + selectedPath1); imgProfile.setImageURI(Uri.parse("file://" + croppedImageUri)); break; default: break; } } } 
  Same functionality i have done in my application below is the code. //Use this method to open camera private void takePictureFromCamera() { imageFile = CameraUtil.getOutputMediaFile(1); final Intent intent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE); try { Uri mImageCaptureUri = null; final String state = Environment.getExternalStorageState(); if (Environment.MEDIA_MOUNTED.equals(state)) { mImageCaptureUri = Uri.fromFile(imageFile); } intent.putExtra(MediaStore.EXTRA_OUTPUT, mImageCaptureUri); intent.putExtra("return-data", true); startActivityForResult(intent, REQUEST_CODE_TAKE_PICTURE); } catch (ActivityNotFoundException e) { //Exception Handling }} //Use this method to start crop functionality private void startCropImage() { if (getActivity() != null && !getActivity().isFinishing() && imageFile != null) { final int rotation = CameraUtil.checkExIfInfo(imageFile.getPath()); try { FileUtils.copyFile(imageFile, imageTemp); } catch (IOException e) { e.printStackTrace(); } final Intent intent = new Intent(getActivity(), CropImage.class); intent.putExtra(CropImage.IMAGE_PATH, imageFile.getPath()); intent.putExtra(CropImage.SCALE, true); intent.putExtra(CropImage.ASPECT_X, 2); intent.putExtra(CropImage.ASPECT_Y, 2); startActivityForResult(intent, REQUEST_CODE_CROP_IMAGE); } } //activity_cropimage.xml         //On Activity Result for Capture camera image and crop image @Override public void onActivityResult(int requestCode, int resultCode, Intent data) { if (resultCode != Activity.RESULT_OK) { return; } switch (requestCode) { case REQUEST_CODE_TAKE_PICTURE: startCropImage(); break; case REQUEST_CODE_CROP_IMAGE: final String path = data.getStringExtra(CropImage.IMAGE_PATH); profilePictureUrl = imageFile.toString(); if (path == null) { return; } Picasso.with(getActivity()) .load(imageFile) .placeholder(R.drawable.profile_loader) .fit() .centerCrop() .into(imageViewLogo); break; } super.onActivityResult(requestCode, resultCode, data); } Hope this will help you. 
 Because of character limit this crop image class is as below. //Crop Image Class public class CropImage extends MonitoredActivity { final int IMAGE_MAX_SIZE = 1024; private static final String TAG = "CropImage"; public static final String IMAGE_PATH = "image-path"; public static final String SCALE = "scale"; public static final String ORIENTATION_IN_DEGREES = "orientation_in_degrees"; public static final String ASPECT_X = "aspectX"; public static final String ASPECT_Y = "aspectY"; public static final String OUTPUT_X = "outputX"; public static final String OUTPUT_Y = "outputY"; public static final String SCALE_UP_IF_NEEDED = "scaleUpIfNeeded"; public static final String CIRCLE_CROP = "circleCrop"; public static final String RETURN_DATA = "return-data"; public static final String RETURN_DATA_AS_BITMAP = "data"; public static final String ACTION_INLINE_DATA = "inline-data"; // These are various options can be specified in the intent. private Bitmap.CompressFormat mOutputFormat = Bitmap.CompressFormat.JPEG; private Uri mSaveUri = null; private boolean mDoFaceDetection = true; private boolean mCircleCrop = false; private final Handler mHandler = new Handler(); private int mAspectX; private int mAspectY; private int mOutputX; private int mOutputY; private boolean mScale; private CropImageView mImageView; private ContentResolver mContentResolver; private Bitmap mBitmap; private String mImagePath; boolean mWaitingToPick; // Whether we are wait the user to pick a face. boolean mSaving; // Whether the "save" button is already clicked. HighlightView mCrop; // These options specifiy the output image size and whether we should // scale the output to fit it (or just crop it). private boolean mScaleUp = true; private final BitmapManager.ThreadSet mDecodingThreads = new BitmapManager.ThreadSet(); @Override public void onCreate(Bundle icicle) { super.onCreate(icicle); mContentResolver = getContentResolver(); requestWindowFeature(Window.FEATURE_NO_TITLE); setContentView(R.layout.activity_cropimage); mImageView = (CropImageView) findViewById(R.id.activty_crop_image_imageView); showStorageToast(this); Intent intent = getIntent(); Bundle extras = intent.getExtras(); if (extras != null) { if (extras.getString(CIRCLE_CROP) != null) { if (Build.VERSION.SDK_INT > Build.VERSION_CODES.HONEYCOMB) { mImageView.setLayerType(View.LAYER_TYPE_SOFTWARE, null); } mCircleCrop = true; mAspectX = 1; mAspectY = 1; } mImagePath = extras.getString(IMAGE_PATH); mSaveUri = getImageUri(mImagePath); mBitmap = getBitmap(mImagePath); if (extras.containsKey(ASPECT_X) && extras.get(ASPECT_X) instanceof Integer) { mAspectX = extras.getInt(ASPECT_X); } else { throw new IllegalArgumentException("aspect_x must be integer"); } if (extras.containsKey(ASPECT_Y) && extras.get(ASPECT_Y) instanceof Integer) { mAspectY = extras.getInt(ASPECT_Y); } else { throw new IllegalArgumentException("aspect_y must be integer"); } mOutputX = extras.getInt(OUTPUT_X); mOutputY = extras.getInt(OUTPUT_Y); mScale = extras.getBoolean(SCALE, true); mScaleUp = extras.getBoolean(SCALE_UP_IF_NEEDED, true); } if (mBitmap == null) { Log.d(TAG, "finish!!!"); finish(); return; } // Make UI fullscreen. getWindow().addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN); findViewById(R.id.activity_crop_image_ivClearCrop).setOnClickListener( new View.OnClickListener() { public void onClick(View v) { setResult(RESULT_CANCELED); finish(); } }); findViewById(R.id.activity_crop_image_ivSaveCrop).setOnClickListener( new View.OnClickListener() { public void onClick(View v) { try { onSaveClicked(); } catch (Exception e) { finish(); } } }); startFaceDetection(); } private Uri getImageUri(String path) { return Uri.fromFile(new File(path)); } private Bitmap getBitmap(String path) { Uri uri = getImageUri(path); InputStream in = null; try { in = mContentResolver.openInputStream(uri); //Decode image size BitmapFactory.Options o = new BitmapFactory.Options(); o.inJustDecodeBounds = true; BitmapFactory.decodeStream(in, null, o); in.close(); int scale = 1; if (o.outHeight > IMAGE_MAX_SIZE || o.outWidth > IMAGE_MAX_SIZE) { scale = (int) Math.pow(2, (int) Math.round(Math.log(IMAGE_MAX_SIZE / (double) Math.max(o.outHeight, o.outWidth)) / Math.log(0.5))); } BitmapFactory.Options o2 = new BitmapFactory.Options(); o2.inSampleSize = scale; in = mContentResolver.openInputStream(uri); Bitmap b = BitmapFactory.decodeStream(in, null, o2); in.close(); return b; } catch (FileNotFoundException e) { Log.e(TAG, "file " + path + " not found"); } catch (IOException e) { Log.e(TAG, "file " + path + " not found"); } return null; } private void startFaceDetection() { if (isFinishing()) { return; } mImageView.setImageBitmapResetBase(mBitmap, true); Util.startBackgroundJob(this, null, "Please wait\u2026", new Runnable() { public void run() { final CountDownLatch latch = new CountDownLatch(1); final Bitmap b = mBitmap; mHandler.post(new Runnable() { public void run() { if (b != mBitmap && b != null) { mImageView.setImageBitmapResetBase(b, true); mBitmap.recycle(); mBitmap = b; } if (mImageView.getScale() == 1F) { mImageView.center(true, true); } latch.countDown(); } }); try { latch.await(); } catch (InterruptedException e) { throw new RuntimeException(e); } mRunFaceDetection.run(); } }, mHandler); } private void onSaveClicked() throws Exception { // TODO this code needs to change to use the decode/crop/encode single // step api so that we don't require that the whole (possibly large) // bitmap doesn't have to be read into memory if (mSaving) return; if (mCrop == null) { return; } mSaving = true; Rect r = mCrop.getCropRect(); int width = r.width(); int height = r.height(); // If we are circle cropping, we want alpha channel, which is the // third param here. Bitmap croppedImage; try { croppedImage = Bitmap.createBitmap(width, height, mCircleCrop ? Bitmap.Config.ARGB_8888 : Bitmap.Config.RGB_565); } catch (Exception e) { throw e; } if (croppedImage == null) { return; } { Canvas canvas = new Canvas(croppedImage); Rect dstRect = new Rect(0, 0, width, height); canvas.drawBitmap(mBitmap, r, dstRect, null); } if (mCircleCrop) { // OK, so what's all this about? // Bitmaps are inherently rectangular but we want to return // something that's basically a circle. So we fill in the // area around the circle with alpha. Note the all important // PortDuff.Mode.CLEAR. Canvas c = new Canvas(croppedImage); Path p = new Path(); p.addCircle(width / 2F, height / 2F, width / 2F, Path.Direction.CW); c.clipPath(p, Region.Op.DIFFERENCE); c.drawColor(0x00000000, PorterDuff.Mode.CLEAR); } /* If the output is required to a specific size then scale or fill */ if (mOutputX != 0 && mOutputY != 0) { if (mScale) { /* Scale the image to the required dimensions */ Bitmap old = croppedImage; croppedImage = Util.transform(new Matrix(), croppedImage, mOutputX, mOutputY, mScaleUp); if (old != croppedImage) { old.recycle(); } } else { /* Don't scale the image crop it to the size requested. * Create an new image with the cropped image in the center and * the extra space filled. */ // Don't scale the image but instead fill it so it's the // required dimension Bitmap b = Bitmap.createBitmap(mOutputX, mOutputY, Bitmap.Config.RGB_565); Canvas canvas = new Canvas(b); Rect srcRect = mCrop.getCropRect(); Rect dstRect = new Rect(0, 0, mOutputX, mOutputY); int dx = (srcRect.width() - dstRect.width()) / 2; int dy = (srcRect.height() - dstRect.height()) / 2; /* If the srcRect is too big, use the center part of it. */ srcRect.inset(Math.max(0, dx), Math.max(0, dy)); /* If the dstRect is too big, use the center part of it. */ dstRect.inset(Math.max(0, -dx), Math.max(0, -dy)); /* Draw the cropped bitmap in the center */ canvas.drawBitmap(mBitmap, srcRect, dstRect, null); /* Set the cropped bitmap as the new bitmap */ croppedImage.recycle(); croppedImage = b; } } // Return the cropped image directly or save it to the specified URI. Bundle myExtras = getIntent().getExtras(); if (myExtras != null && (myExtras.getParcelable("data") != null || myExtras.getBoolean(RETURN_DATA))) { Bundle extras = new Bundle(); extras.putParcelable(RETURN_DATA_AS_BITMAP, croppedImage); setResult(RESULT_OK, (new Intent()).setAction(ACTION_INLINE_DATA).putExtras(extras)); finish(); } else { final Bitmap b = croppedImage; Util.startBackgroundJob(this, null, getString(R.string.crop_image_saving_image), new Runnable() { public void run() { saveOutput(b); } }, mHandler); } } private void saveOutput(Bitmap croppedImage) { if (mSaveUri != null) { OutputStream outputStream = null; try { outputStream = mContentResolver.openOutputStream(mSaveUri); if (outputStream != null) { croppedImage.compress(mOutputFormat, 90, outputStream); } } catch (IOException ex) { Log.e(TAG, "Cannot open file: " + mSaveUri, ex); setResult(RESULT_CANCELED); finish(); return; } finally { Util.closeSilently(outputStream); } Bundle extras = new Bundle(); Intent intent = new Intent(mSaveUri.toString()); intent.putExtras(extras); intent.putExtra(IMAGE_PATH, mImagePath); intent.putExtra(ORIENTATION_IN_DEGREES, Util.getOrientationInDegree(this)); setResult(RESULT_OK, intent); } else { Log.e(TAG, "not defined image url"); } croppedImage.recycle(); finish(); } @Override protected void onPause() { super.onPause(); BitmapManager.instance().cancelThreadDecoding(mDecodingThreads); } @Override protected void onDestroy() { super.onDestroy(); if (mBitmap != null) { mBitmap.recycle(); } } Runnable mRunFaceDetection = new Runnable() { @SuppressWarnings("hiding") float mScale = 1F; Matrix mImageMatrix; FaceDetector.Face[] mFaces = new FaceDetector.Face[3]; int mNumFaces; // For each face, we create a HightlightView for it. private void handleFace(FaceDetector.Face f) { PointF midPoint = new PointF(); int r = ((int) (f.eyesDistance() * mScale)) * 2; f.getMidPoint(midPoint); midPoint.x *= mScale; midPoint.y *= mScale; int midX = (int) midPoint.x; int midY = (int) midPoint.y; HighlightView hv = new HighlightView(mImageView); int width = mBitmap.getWidth(); int height = mBitmap.getHeight(); Rect imageRect = new Rect(0, 0, width, height); RectF faceRect = new RectF(midX, midY, midX, midY); faceRect.inset(-r, -r); if (faceRect.left < 0) { faceRect.inset(-faceRect.left, -faceRect.left); } if (faceRect.top < 0) { faceRect.inset(-faceRect.top, -faceRect.top); } if (faceRect.right > imageRect.right) { faceRect.inset(faceRect.right - imageRect.right, faceRect.right - imageRect.right); } if (faceRect.bottom > imageRect.bottom) { faceRect.inset(faceRect.bottom - imageRect.bottom, faceRect.bottom - imageRect.bottom); } hv.setup(mImageMatrix, imageRect, faceRect, mCircleCrop, mAspectX != 0 && mAspectY != 0); mImageView.add(hv); } // Create a default HightlightView if we found no face in the picture. private void makeDefault() { HighlightView hv = new HighlightView(mImageView); int width = mBitmap.getWidth(); int height = mBitmap.getHeight(); Rect imageRect = new Rect(0, 0, width, height); // make the default size about 4/5 of the width or height int cropWidth = Math.min(width, height) * 4 / 5; int cropHeight = cropWidth; if (mAspectX != 0 && mAspectY != 0) { if (mAspectX > mAspectY) { cropHeight = cropWidth * mAspectY / mAspectX; } else { cropWidth = cropHeight * mAspectX / mAspectY; } } int x = (width - cropWidth) / 2; int y = (height - cropHeight) / 2; RectF cropRect = new RectF(x, y, x + cropWidth, y + cropHeight); hv.setup(mImageMatrix, imageRect, cropRect, mCircleCrop, mAspectX != 0 && mAspectY != 0); mImageView.mHighlightViews.clear(); // Thong added for rotate mImageView.add(hv); } // Scale the image down for faster face detection. private Bitmap prepareBitmap() { if (mBitmap == null) { return null; } // 256 pixels wide is enough. if (mBitmap.getWidth() > 256) { mScale = 256.0F / mBitmap.getWidth(); } Matrix matrix = new Matrix(); matrix.setScale(mScale, mScale); return Bitmap.createBitmap(mBitmap, 0, 0, mBitmap.getWidth(), mBitmap.getHeight(), matrix, true); } public void run() { mImageMatrix = mImageView.getImageMatrix(); Bitmap faceBitmap = prepareBitmap(); mScale = 1.0F / mScale; if (faceBitmap != null && mDoFaceDetection) { FaceDetector detector = new FaceDetector(faceBitmap.getWidth(), faceBitmap.getHeight(), mFaces.length); mNumFaces = detector.findFaces(faceBitmap, mFaces); } if (faceBitmap != null && faceBitmap != mBitmap) { faceBitmap.recycle(); } mHandler.post(new Runnable() { public void run() { mWaitingToPick = mNumFaces > 1; if (mNumFaces > 0) { for (int i = 0; i < mNumFaces; i++) { handleFace(mFaces[i]); } } else { makeDefault(); } mImageView.invalidate(); if (mImageView.mHighlightViews.size() == 1) { mCrop = mImageView.mHighlightViews.get(0); mCrop.setFocus(true); } if (mNumFaces > 1) { Toast.makeText(CropImage.this, "Multi face crop help", Toast.LENGTH_SHORT).show(); } } }); } }; public static final int NO_STORAGE_ERROR = -1; public static final int CANNOT_STAT_ERROR = -2; public static void showStorageToast(Activity activity) { showStorageToast(activity, calculatePicturesRemaining(activity)); } public static void showStorageToast(Activity activity, int remaining) { String noStorageText = null; if (remaining == NO_STORAGE_ERROR) { String state = Environment.getExternalStorageState(); if (state.equals(Environment.MEDIA_CHECKING)) { noStorageText = activity.getString(R.string.preparing_card); } else { noStorageText = activity.getString(R.string.no_storage_card); } } else if (remaining < 1) { noStorageText = activity.getString(R.string.not_enough_space); } if (noStorageText != null) { Toast.makeText(activity, noStorageText, Toast.LENGTH_LONG).show(); } } public static int calculatePicturesRemaining(Activity activity) { try { /*if (!ImageManager.hasStorage()) { return NO_STORAGE_ERROR; } else {*/ String storageDirectory = ""; String state = Environment.getExternalStorageState(); if (Environment.MEDIA_MOUNTED.equals(state)) { storageDirectory = Environment.getExternalStorageDirectory().toString(); } else { storageDirectory = activity.getFilesDir().toString(); } StatFs stat = new StatFs(storageDirectory); float remaining = ((float) stat.getAvailableBlocks() * (float) stat.getBlockSize()) / 400000F; return (int) remaining; //} } catch (Exception ex) { // if we can't stat the filesystem then we don't know how many // pictures are remaining. it might be zero but just leave it // blank since we really don't know. return CANNOT_STAT_ERROR; } }} 
 CropImage Class use this activity //Monitored Activity public class MonitoredActivity extends Activity { private final ArrayList mListeners = new ArrayList(); public interface LifeCycleListener { void onActivityCreated(MonitoredActivity activity); void onActivityDestroyed(MonitoredActivity activity); void onActivityPaused(MonitoredActivity activity); void onActivityResumed(MonitoredActivity activity); void onActivityStarted(MonitoredActivity activity); void onActivityStopped(MonitoredActivity activity); } public static class LifeCycleAdapter implements LifeCycleListener { enter code here public void onActivityCreated(MonitoredActivity activity) { } public void onActivityDestroyed(MonitoredActivity activity) { } public void onActivityPaused(MonitoredActivity activity) { } public void onActivityResumed(MonitoredActivity activity) { } public void onActivityStarted(MonitoredActivity activity) { } public void onActivityStopped(MonitoredActivity activity) { } } public void addLifeCycleListener(LifeCycleListener listener) { if (mListeners.contains(listener)) return; mListeners.add(listener); } public void removeLifeCycleListener(LifeCycleListener listener) { mListeners.remove(listener); } @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); for (LifeCycleListener listener : mListeners) { listener.onActivityCreated(this); } } @Override protected void onDestroy() { super.onDestroy(); for (LifeCycleListener listener : mListeners) { listener.onActivityDestroyed(this); } } @Override protected void onStart() { super.onStart(); for (LifeCycleListener listener : mListeners) { listener.onActivityStarted(this); } } @Override protected void onStop() { super.onStop(); for (LifeCycleListener listener : mListeners) { listener.onActivityStopped(this); } }} 

运行以下代码:它就像一个魅力。 检测过!

//在onCreate中:

  Intent intent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE); startActivityForResult(intent, REQUEST_Code); 

//在OnactivityResult中:

  protected void onActivityResult(int requestCode, int resultCode, Intent data) { super.onActivityResult(requestCode, resultCode, data); if (requestCode == REQUEST_Code && resultCode == Activity.RESULT_OK) { timestmp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date()); File filename = new File(Environment.getExternalStorageDirectory().getAbsoluteFile() + "/MyImage"); filename.mkdir(); try { images = new File(filename, "QR_" + timestmp + ".jpg"); FileOutputStream out = new FileOutputStream(images); Bundle extras = data.getExtras(); Bitmap imageToSave = (Bitmap) extras.get("data"); imageToSave.compress(Bitmap.CompressFormat.JPEG, 90, out); out.flush(); out.close(); Toast.makeText(getApplicationContext(), "File is Saved in " + filename, Toast.LENGTH_LONG).show(); cropCapturedImage(Uri.fromFile(images)); } catch (Exception ex) { String msg = "sorry your device does't support the crop the action!"; Toast toast = Toast.makeText(this, msg, Toast.LENGTH_LONG); toast.show(); } } else if (requestCode == 2) { mImageView.setImageBitmap(decodeUriAsBitmap(Uri.fromFile(images))); `// Here you can`enter code here` do whatever you want, with the image.` } } 

//你的裁剪图像代码:

 private void cropCapturedImage(Uri picUri) { try { Intent cropIntent = new Intent("com.android.camera.action.CROP"); cropIntent.setDataAndType(picUri, "image/*"); cropIntent.putExtra("crop", "true"); cropIntent.putExtra("aspectX", 1); cropIntent.putExtra("aspectY", 1); cropIntent.putExtra("outputX", 400); cropIntent.putExtra("outputY", 400); cropIntent.putExtra("return-data", true); cropIntent.putExtra(MediaStore.EXTRA_OUTPUT, picUri); startActivityForResult(cropIntent, 2); } // respond to users whose devices do not support the crop action catch (ActivityNotFoundException anfe) { // display an error message String errorMessage = "Whoops - your device doesn't support the crop action!"; Toast toast = Toast.makeText(this, errorMessage, Toast.LENGTH_SHORT); toast.show(); } 

}

//从uri解码位图

 private Bitmap decodeUriAsBitmap(Uri uri){ Bitmap bitmap = null; try { bitmap = BitmapFactory.decodeStream(getContentResolver().openInputStream(uri)); } catch (FileNotFoundException e) { e.printStackTrace(); return null; } return bitmap; }