使用 AsyncTask 的 Android ICS 和 MJPEG

Posted

技术标签:

【中文标题】使用 AsyncTask 的 Android ICS 和 MJPEG【英文标题】:Android ICS and MJPEG using AsyncTask 【发布时间】:2012-05-19 23:28:37 【问题描述】:

我修改了 android and MJPEG 中的 MJPEG 查看器代码以使用 AsyncTask 工作(因此适用于 Ice Cream Sandwich (ICS),4.0.4),这是我的代码。

如果有人对如何优化、清理或对代码进行更适当的处理有任何建议,请告诉我。感谢您帮助解决两个问题:

如果您有设备在流中,则锁定屏幕 并解锁屏幕它不会继续播放,直到您 杀死并恢复应用程序或旋转屏幕。我所有尝试使用 OnResume() 做某事或其他事情都会导致应用崩溃。

特别是我想恢复 AsyncTask MjpegInputStream.java 但无法让它工作。

MjpegActivity.java:

package com.demo.mjpeg;

import java.io.IOException;
import java.net.URI;

import org.apache.http.HttpResponse;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.DefaultHttpClient;

import com.demo.mjpeg.MjpegView.MjpegInputStream;
import com.demo.mjpeg.MjpegView.MjpegView;
import android.app.Activity;
import android.os.AsyncTask;
import android.os.Bundle;
import android.util.Log;
import android.view.Window;
import android.view.WindowManager;
import android.widget.Toast;

public class MjpegActivity extends Activity 
    private static final String TAG = "MjpegActivity";

    private MjpegView mv;

    public void onCreate(Bundle savedInstanceState) 
        super.onCreate(savedInstanceState);

        //sample public cam
        String URL = "http://trackfield.webcam.oregonstate.edu/axis-cgi/mjpg/video.cgi?resolution=800x600&amp%3bdummy=1333689998337";

        requestWindowFeature(Window.FEATURE_NO_TITLE);
        getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, 
        WindowManager.LayoutParams.FLAG_FULLSCREEN);

        mv = new MjpegView(this);
        setContentView(mv);        

        new DoRead().execute(URL);
    

    public void onPause() 
        super.onPause();
        mv.stopPlayback();
    

    public class DoRead extends AsyncTask<String, Void, MjpegInputStream> 
        protected MjpegInputStream doInBackground(String... url) 
            //TODO: if camera has authentication deal with it and don't just not work
            HttpResponse res = null;
            DefaultHttpClient httpclient = new DefaultHttpClient();     
            Log.d(TAG, "1. Sending http request");
            try 
                res = httpclient.execute(new HttpGet(URI.create(url[0])));
                Log.d(TAG, "2. Request finished, status = " + res.getStatusLine().getStatusCode());
                if(res.getStatusLine().getStatusCode()==401)
                    //You must turn off camera User Access Control before this will work
                    return null;
                
                return new MjpegInputStream(res.getEntity().getContent());  
             catch (ClientProtocolException e) 
                e.printStackTrace();
                Log.d(TAG, "Request failed-ClientProtocolException", e);
                //Error connecting to camera
             catch (IOException e) 
                e.printStackTrace();
                Log.d(TAG, "Request failed-IOException", e);
                //Error connecting to camera
            

            return null;
        

        protected void onPostExecute(MjpegInputStream result) 
            mv.setSource(result);
            mv.setDisplayMode(MjpegView.SIZE_BEST_FIT);
            mv.showFps(true);
        
    

MjpegInputStream.java:

package com.demo.mjpeg.MjpegView;

import java.io.BufferedInputStream;
import java.io.ByteArrayInputStream;
import java.io.DataInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.Properties;

import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.util.Log;

public class MjpegInputStream extends DataInputStream 
    private static final String TAG = "MjpegInputStream";

    private final byte[] SOI_MARKER =  (byte) 0xFF, (byte) 0xD8 ;
    private final byte[] EOF_MARKER =  (byte) 0xFF, (byte) 0xD9 ;
    private final String CONTENT_LENGTH = "Content-Length";
    private final static int HEADER_MAX_LENGTH = 100;
    private final static int FRAME_MAX_LENGTH = 40000 + HEADER_MAX_LENGTH;
    private int mContentLength = -1;

    public MjpegInputStream(InputStream in) 
        super(new BufferedInputStream(in, FRAME_MAX_LENGTH));
    

    private int getEndOfSeqeunce(DataInputStream in, byte[] sequence) throws IOException 
        int seqIndex = 0;
        byte c;
        for(int i=0; i < FRAME_MAX_LENGTH; i++) 
            c = (byte) in.readUnsignedByte();
            if(c == sequence[seqIndex]) 
                seqIndex++;
                if(seqIndex == sequence.length) 
                    return i + 1;
                
             else 
                seqIndex = 0;
            
        
        return -1;
    

    private int getStartOfSequence(DataInputStream in, byte[] sequence) throws IOException 
        int end = getEndOfSeqeunce(in, sequence);
        return (end < 0) ? (-1) : (end - sequence.length);
    

    private int parseContentLength(byte[] headerBytes) throws IOException, NumberFormatException 
        ByteArrayInputStream headerIn = new ByteArrayInputStream(headerBytes);
        Properties props = new Properties();
        props.load(headerIn);
        return Integer.parseInt(props.getProperty(CONTENT_LENGTH));
       

    public Bitmap readMjpegFrame() throws IOException 
        mark(FRAME_MAX_LENGTH);
        int headerLen = getStartOfSequence(this, SOI_MARKER);
        reset();
        byte[] header = new byte[headerLen];
        readFully(header);
        try 
            mContentLength = parseContentLength(header);
         catch (NumberFormatException nfe)  
            nfe.getStackTrace();
            Log.d(TAG, "catch NumberFormatException hit", nfe);
            mContentLength = getEndOfSeqeunce(this, EOF_MARKER); 
        
        reset();
        byte[] frameData = new byte[mContentLength];
        skipBytes(headerLen);
        readFully(frameData);
        return BitmapFactory.decodeStream(new ByteArrayInputStream(frameData));
    

MjpegView.java:

package com.demo.mjpeg.MjpegView;

import java.io.IOException;

import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.PorterDuff;
import android.graphics.PorterDuffXfermode;
import android.graphics.Rect;
import android.graphics.Typeface;
import android.util.AttributeSet;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;

public class MjpegView extends SurfaceView implements SurfaceHolder.Callback 
    private static final String TAG = "MjpegView";

    public final static int POSITION_UPPER_LEFT  = 9;
    public final static int POSITION_UPPER_RIGHT = 3;
    public final static int POSITION_LOWER_LEFT  = 12;
    public final static int POSITION_LOWER_RIGHT = 6;

    public final static int SIZE_STANDARD   = 1; 
    public final static int SIZE_BEST_FIT   = 4;
    public final static int SIZE_FULLSCREEN = 8;

    private MjpegViewThread thread;
    private MjpegInputStream mIn = null;    
    private boolean showFps = false;
    private boolean mRun = false;
    private boolean surfaceDone = false;    
    private Paint overlayPaint;
    private int overlayTextColor;
    private int overlayBackgroundColor;
    private int ovlPos;
    private int dispWidth;
    private int dispHeight;
    private int displayMode;

    public class MjpegViewThread extends Thread 
        private SurfaceHolder mSurfaceHolder;
        private int frameCounter = 0;
        private long start;
        private Bitmap ovl;

        public MjpegViewThread(SurfaceHolder surfaceHolder, Context context) 
            mSurfaceHolder = surfaceHolder;
        

        private Rect destRect(int bmw, int bmh) 
            int tempx;
            int tempy;
            if (displayMode == MjpegView.SIZE_STANDARD) 
                tempx = (dispWidth / 2) - (bmw / 2);
                tempy = (dispHeight / 2) - (bmh / 2);
                return new Rect(tempx, tempy, bmw + tempx, bmh + tempy);
            
            if (displayMode == MjpegView.SIZE_BEST_FIT) 
                float bmasp = (float) bmw / (float) bmh;
                bmw = dispWidth;
                bmh = (int) (dispWidth / bmasp);
                if (bmh > dispHeight) 
                    bmh = dispHeight;
                    bmw = (int) (dispHeight * bmasp);
                
                tempx = (dispWidth / 2) - (bmw / 2);
                tempy = (dispHeight / 2) - (bmh / 2);
                return new Rect(tempx, tempy, bmw + tempx, bmh + tempy);
            
            if (displayMode == MjpegView.SIZE_FULLSCREEN)
                return new Rect(0, 0, dispWidth, dispHeight);
            
            return null;
        

        public void setSurfaceSize(int width, int height) 
            synchronized(mSurfaceHolder) 
                dispWidth = width;
                dispHeight = height;
            
        

        private Bitmap makeFpsOverlay(Paint p, String text) 
            Rect b = new Rect();
            p.getTextBounds(text, 0, text.length(), b);
            int bwidth  = b.width()+2;
            int bheight = b.height()+2;
            Bitmap bm = Bitmap.createBitmap(bwidth, bheight, Bitmap.Config.ARGB_8888);
            Canvas c = new Canvas(bm);
            p.setColor(overlayBackgroundColor);
            c.drawRect(0, 0, bwidth, bheight, p);
            p.setColor(overlayTextColor);
            c.drawText(text, -b.left+1, (bheight/2)-((p.ascent()+p.descent())/2)+1, p);
            return bm;           
        

        public void run() 
            start = System.currentTimeMillis();
            PorterDuffXfermode mode = new PorterDuffXfermode(PorterDuff.Mode.DST_OVER);
            Bitmap bm;
            int width;
            int height;
            Rect destRect;
            Canvas c = null;
            Paint p = new Paint();
            String fps;
            while (mRun) 
                if(surfaceDone) 
                    try 
                        c = mSurfaceHolder.lockCanvas();
                        synchronized (mSurfaceHolder) 
                            try 
                                bm = mIn.readMjpegFrame();
                                destRect = destRect(bm.getWidth(),bm.getHeight());
                                c.drawColor(Color.BLACK);
                                c.drawBitmap(bm, null, destRect, p);
                                if(showFps) 
                                    p.setXfermode(mode);
                                    if(ovl != null) 
                                        height = ((ovlPos & 1) == 1) ? destRect.top : destRect.bottom-ovl.getHeight();
                                        width  = ((ovlPos & 8) == 8) ? destRect.left : destRect.right -ovl.getWidth();
                                        c.drawBitmap(ovl, width, height, null);
                                    
                                    p.setXfermode(null);
                                    frameCounter++;
                                    if((System.currentTimeMillis() - start) >= 1000) 
                                        fps = String.valueOf(frameCounter)+" fps";
                                        frameCounter = 0; 
                                        start = System.currentTimeMillis();
                                        ovl = makeFpsOverlay(overlayPaint, fps);
                                    
                                
                             catch (IOException e) 
                                e.getStackTrace();
                                Log.d(TAG, "catch IOException hit in run", e);
                            
                        
                     finally  
                        if (c != null) 
                            mSurfaceHolder.unlockCanvasAndPost(c); 
                        
                    
                
            
        
    

    private void init(Context context) 
        SurfaceHolder holder = getHolder();
        holder.addCallback(this);
        thread = new MjpegViewThread(holder, context);
        setFocusable(true);
        overlayPaint = new Paint();
        overlayPaint.setTextAlign(Paint.Align.LEFT);
        overlayPaint.setTextSize(12);
        overlayPaint.setTypeface(Typeface.DEFAULT);
        overlayTextColor = Color.WHITE;
        overlayBackgroundColor = Color.BLACK;
        ovlPos = MjpegView.POSITION_LOWER_RIGHT;
        displayMode = MjpegView.SIZE_STANDARD;
        dispWidth = getWidth();
        dispHeight = getHeight();
    

    public void startPlayback()  
        if(mIn != null) 
            mRun = true;
            thread.start();         
        
    

    public void stopPlayback()  
        mRun = false;
        boolean retry = true;
        while(retry) 
            try 
                thread.join();
                retry = false;
             catch (InterruptedException e) 
                e.getStackTrace();
                Log.d(TAG, "catch IOException hit in stopPlayback", e);
            
        
    

    public MjpegView(Context context, AttributeSet attrs)  
        super(context, attrs); init(context); 
    

    public void surfaceChanged(SurfaceHolder holder, int f, int w, int h)  
        thread.setSurfaceSize(w, h); 
    

    public void surfaceDestroyed(SurfaceHolder holder)  
        surfaceDone = false; 
        stopPlayback(); 
    

    public MjpegView(Context context)  
        super(context);
        init(context); 
    

    public void surfaceCreated(SurfaceHolder holder)  
        surfaceDone = true; 
    

    public void showFps(boolean b)  
        showFps = b; 
    

    public void setSource(MjpegInputStream source)  
        mIn = source;
        startPlayback();
    

    public void setOverlayPaint(Paint p)  
        overlayPaint = p; 
    

    public void setOverlayTextColor(int c)  
        overlayTextColor = c; 
    

    public void setOverlayBackgroundColor(int c)  
        overlayBackgroundColor = c; 
    

    public void setOverlayPosition(int p)  
        ovlPos = p; 
    

    public void setDisplayMode(int s)  
        displayMode = s; 
    

【问题讨论】:

我使用 libjpeg 创建了上述示例的更快版本。代码bitbucket.org/neuralassembly/simplemjpegview演示youtube.com/watch?v=fumv9p0_vWo 如果我们要使用多个摄像头,我们应该怎么做?所以我们会有几个不同的 URL,为了将 url 作为参数传递,我们应该把它传递到哪里? @neuralassembly 我克隆了您的 bitbucket 存储库,但设法使用 MjpegInputStream 和 MjpegView 在片段中加载流 url。也可以使用 AsyncTaskLoader 来完成这项工作。奇怪的是,我可以在模拟器上成功编译并运行它,但是 mv 是全黑的!我有什么遗漏吗? @bbodenmiller 上面的代码很棒!但我只是一直在循环播放视频...... @neuralassembly 试用了您的代码,效果很好!但我延迟了三秒钟,认为这可能是缓冲区问题我想知道是否有办法最小化这个问题? 【参考方案1】:

干得好! 对于你的 onResume() 问题,将下面的代码从 onCreate() 移到 onResume() 还不够吗?

    //sample public cam 
    String URL = "http://trackfield.webcam.oregonstate.edu/axis-cgi/mjpg/video.cgi?resolution=800x600&amp%3bdummy=1333689998337"; 

    mv = new MjpegView(this); 
    setContentView(mv);         

    new DoRead().execute(URL); 

然后您只需重新创建 AsyncTask 的视图和新实例...我试过了,它对我有用...

【讨论】:

是的,我实际上是通过将一堆东西移动到 onResume() 来修复它。我在此处发布的代码中还发现了许多其他错误(基于更错误的代码),一旦我将它们全部修复,我将立即发布。首先想到的是在 try 块中它需要检查 c 是否为空。另一个原因是对中游的源没有错误处理。 你是对的......你的稳定性经验如何?我注意到在某些设备上,使用此代码可能会出现无响应(CPU @ 100%)...您是否遇到过此类问题? 我只能使用模拟器和 Galaxy Nexus 进行测试,但它肯定不是世界上最稳定的代码。我想我可能在这方面略有改进,我会看看我今天可以修复和发布的内容。如果你的 mjpeg 流每秒给你很多帧,那么手机上的流将是一个相当糟糕和缓慢的流……我不得不把我的流调低一点。 10 fps 似乎还可以,我认为如果不是硬件加速,手机处理太多了。 感谢您提供此信息,我将使用不同的相机进行一些测试。很高兴看看您改进的代码,如果您打算在这里发布它,请告诉我! 我会处理它,但可能需要几周时间,因为当时很多其他项目正处于紧要关头。【参考方案2】:

如果您想使用用户名或密码访问您的网络摄像机,您可能希望将其添加到您的DefaultHttpClient,这对新手很有帮助,上面的代码将适用于需要身份验证的摄像机

 CredentialsProvider provider = new BasicCredentialsProvider();
            UsernamePasswordCredentials credentials = new UsernamePasswordCredentials("yourusername", "yourpassword");
            provider.setCredentials(AuthScope.ANY, credentials);
            DefaultHttpClient httpclient = new DefaultHttpClient();
            httpclient.setCredentialsProvider(provider);

【讨论】:

【参考方案3】:

感谢代码,非常有用

我想推荐一些优化技巧,这些技巧已经在我的代码中使用,整体性能可以轻松提升几倍。

    在可能的情况下,我在帧读取期间删除了内存分配

    private final static int HEADER_MAX_LENGTH = 100;
    private final static int FRAME_MAX_LENGTH = 200000 + HEADER_MAX_LENGTH;
    private final String CONTENT_LENGTH = "Content-Length:";
    private final String CONTENT_END = "\r\n";
    private final static byte[] gFrameData = new byte[FRAME_MAX_LENGTH];
    private final static byte[] gHeader = new byte[HEADER_MAX_LENGTH];
    BitmapFactory.Options bitmapOptions = new BitmapFactory.Options();
    
    public Bitmap readMjpegFrame() throws IOException 
    
        mark(FRAME_MAX_LENGTH);
        int headerLen = getStartOfSequence(SOI_MARKER);
    
        if(headerLen < 0)
            return false;
    
        reset();
        readFully(gHeader, 0, headerLen);
    
        int contentLen;
    
        try
        
            contentLen = parseContentLength(gHeader, headerLen);
         catch (NumberFormatException nfe) 
        
            nfe.getStackTrace();
            Log.d(TAG, "catch NumberFormatException hit", nfe);
            contentLen = getEndOfSequence(EOF_MARKER);
        
    
        readFully(gFrameData, 0, contentLen);
    
        Bitmap bm = BitmapFactory.decodeByteArray(gFrameData, 0, contentLen, bitmapOptions);
        bitmapOptions.inBitmap = bm;
    
        return bm;
    
    

    优化parseContentLength,尽量去掉String操作

    byte[] CONTENT_LENGTH_BYTES;
    byte[] CONTENT_END_BYTES;
    
    public MjpegInputStream(InputStream in)
    
        super(new BufferedInputStream(in, FRAME_MAX_LENGTH));
    
        bitmapOptions.inSampleSize = 1;
        bitmapOptions.inPreferredConfig = Bitmap.Config.RGB_565;
        bitmapOptions.inPreferQualityOverSpeed = false;
        bitmapOptions.inPurgeable = true;
        try
        
            CONTENT_LENGTH_BYTES = CONTENT_LENGTH.getBytes("UTF-8");
            CONTENT_END_BYTES = CONTENT_END.getBytes("UTF-8");
         catch (UnsupportedEncodingException e) 
        
            e.printStackTrace();
        
    
    
    private int findPattern(byte[] buffer, int bufferLen, byte[] pattern, int offset)
    
        int seqIndex = 0;
        for(int i=offset; i < bufferLen; ++i)
        
            if(buffer[i] == pattern[seqIndex])
            
                ++seqIndex;
                if(seqIndex == pattern.length)
                
                    return i + 1;
                
             else
            
                seqIndex = 0;
            
        
    
        return -1;
    
    
    
    
    private int parseContentLength(byte[] headerBytes, int length) throws IOException, NumberFormatException
    
        int begin = findPattern(headerBytes, length, CONTENT_LENGTH_BYTES, 0);
        int end = findPattern(headerBytes, length, CONTENT_END_BYTES, begin) - CONTENT_END_BYTES.length;
    
        // converting string to int
        int number = 0;
        int radix = 1;
        for(int i = end - 1; i >= begin; --i)
        
            if(headerBytes[i] > 47 && headerBytes[i] < 58)
            
                number += (headerBytes[i] - 48) * radix;
                radix *= 10;
            
        
    
        return number;
    
    

代码中可能有错误,因为我正在为***重写它,最初我使用2个线程,一个是读取帧,另一个是渲染。

我希望它对某人有所帮助。

【讨论】:

以上是关于使用 AsyncTask 的 Android ICS 和 MJPEG的主要内容,如果未能解决你的问题,请参考以下文章

Android中的AsyncTask和接口回调使用详解

Android面试Android异步任务AsyncTask

android AsyncTask使用

安卓Android开发:使用Executor线程池代替AsyncTask进行异步操作

安卓Android开发:使用Executor线程池代替AsyncTask进行异步操作

安卓Android开发:使用Executor线程池代替AsyncTask进行异步操作