package com.webcambroadcaster; import java.io.BufferedOutputStream; import java.io.DataOutputStream; import java.io.IOException; import java.io.OutputStream; import java.net.ServerSocket; import java.net.Socket; import au.edu.jcu.v4l4j.DeviceInfo; import au.edu.jcu.v4l4j.FrameGrabber; import au.edu.jcu.v4l4j.V4L4JConstants; import au.edu.jcu.v4l4j.VideoDevice; import au.edu.jcu.v4l4j.VideoFrame; /** * A disposable class that uses JMF to serve a still sequence captured from a * webcam over a socket connection. It doesn't use TCP, it just blindly * captures a still, JPEG compresses it, and pumps it out over any incoming * socket connection. * * @author Tom Gibara * */ public class WebcamBroadcaster { public static boolean RAW = false; public static void main(String[] args) { int[] values = new int[args.length]; for (int i = 0; i < values.length; i++) { values[i] = Integer.parseInt(args[i]); } //Parse inputs WebcamBroadcaster wb; if (values.length == 0) { wb = new WebcamBroadcaster(); } else if (values.length == 1) { wb = new WebcamBroadcaster(values[0]); } else if (values.length == 2) { wb = new WebcamBroadcaster(values[0], values[1]); } else { wb = new WebcamBroadcaster(values[0], values[1], values[2]); } //Start the grabbing procedure wb.start(); } public static final int DEFAULT_PORT = 9889; public static final int DEFAULT_WIDTH = 320; public static final int DEFAULT_HEIGHT = 240; private final Object lock = new Object(); private final int width; private final int height; private final int port; private boolean running; private boolean stopping; private Worker worker; private VideoDevice vd=null; private FrameGrabber fg=null; public WebcamBroadcaster(int width, int height, int port) { this.width = width; this.height = height; this.port = port; } public WebcamBroadcaster(int width, int height) { this(width, height, DEFAULT_PORT); } public WebcamBroadcaster(int port) { this(DEFAULT_WIDTH, DEFAULT_HEIGHT, port); } public WebcamBroadcaster() { this(DEFAULT_WIDTH, DEFAULT_HEIGHT, DEFAULT_PORT); } public void start() { synchronized (lock) { if (running) return; //Starting capture startCapture(); worker = new Worker(); worker.start(); System.out.println("Start capture"); running = true; } } public void releaseCapture(){ fg.stopCapture(); vd.releaseFrameGrabber(); vd.release(); } public void startCapture(){ try{ String dev = "/dev/video0"; vd = new VideoDevice(dev); fg = vd.getJPEGFrameGrabber(width, height, 0, 0, 80); fg.startCapture(); }catch(Exception e){ e.printStackTrace(); } } public void stop() throws InterruptedException { synchronized (lock) { if (!running) return; // //Stop capture at this place releaseCapture(); // stopping = true; running = false; worker = null; } try { worker.join(); } finally { stopping = false; } } private class Worker extends Thread { private final int[] data = new int[width*height]; public byte[] b=null; @Override public void run() { ServerSocket ss; VideoFrame frm; try { ss = new ServerSocket(port); } catch (IOException e) { e.printStackTrace(); return; } while(true) { synchronized (lock) { if (stopping) break; } Socket socket = null; try { socket = ss.accept(); //Grab image here try{ // frm = fg.getVideoFrame(); System.out.println("Datagrabbed"); OutputStream out = socket.getOutputStream(); DataOutputStream dout = new DataOutputStream(new BufferedOutputStream(out)); dout.write(frm.getBytes(), 0, frm.getFrameLength()); dout.close(); System.out.println("Datasent"); frm.recycle(); // }catch(Exception e){ e.printStackTrace(); return; } socket.close(); socket = null; } catch (IOException e) { e.printStackTrace(); } finally { if (socket != null) try { socket.close(); } catch (IOException e) { /* ignore */ } } } try { ss.close(); } catch (IOException e) { /* ignore */ } } } }
public void startCapture(){ try{ String dev = "/dev/video0"; vd = new VideoDevice(dev); fg = vd.getJPEGFrameGrabber(width, height, 0, 0, 80); fg.startCapture(); }catch(Exception e){ e.printStackTrace(); } }
try{ // frm = fg.getVideoFrame(); System.out.println("Datagrabbed"); OutputStream out = socket.getOutputStream(); DataOutputStream dout = new DataOutputStream(new BufferedOutputStream(out)); dout.write(frm.getBytes(), 0, frm.getFrameLength()); dout.close(); System.out.println("Datasent"); frm.recycle(); // }catch(Exception e){ e.printStackTrace(); return; }
preview = new Preview(this); ((FrameLayout) findViewById(R.id.preview)).addView(preview);
package com.example; import android.app.Activity; import android.os.Bundle; import android.util.Log; import android.widget.Button; import android.widget.FrameLayout; public class CameraDemo extends Activity { private static final String TAG = "CameraDemo"; Preview preview; Button buttonClick; /** Called when the activity is first created. */ @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.main); preview = new Preview(this); ((FrameLayout) findViewById(R.id.preview)).addView(preview); Log.d(TAG, "It were created"); } }
package com.example; import java.io.IOException; import android.content.Context; import android.hardware.Camera; import android.view.SurfaceHolder; import android.view.SurfaceView; class Preview extends SurfaceView implements SurfaceHolder.Callback { SurfaceHolder mHolder; public Camera camera; Preview(Context context) { super(context); // Install a SurfaceHolder.Callback so we get notified when the // underlying surface is created and destroyed. mHolder = getHolder(); mHolder.addCallback(this); mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS); } public void surfaceCreated(SurfaceHolder holder) { // The Surface has been created, acquire the camera and tell it where // to draw. camera = Camera.open(); try { camera.setPreviewDisplay(holder); } catch (IOException e) { e.printStackTrace(); } } public void surfaceDestroyed(SurfaceHolder holder) { // Surface will be destroyed when we return, so stop the preview. // Because the CameraDevice object is not a shared resource, it's very // important to release it when the activity is paused. camera.stopPreview(); camera = null; } public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) { // Now that the size is known, set up the camera parameters and begin // the preview. Camera.Parameters parameters = camera.getParameters(); parameters.setPreviewSize(w, h); camera.setParameters(parameters); camera.startPreview(); } }
package com.example; import java.io.IOException; import android.content.Context; import android.hardware.Camera; import android.view.SurfaceHolder; import android.view.SurfaceView; class Preview extends SurfaceView implements SurfaceHolder.Callback { SurfaceHolder mHolder; //public Camera camera; public SocketCamera camera; Preview(Context context) { super(context); // Install a SurfaceHolder.Callback so we get notified when the // underlying surface is created and destroyed. mHolder = getHolder(); mHolder.addCallback(this); //mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS); mHolder.setType(SurfaceHolder.SURFACE_TYPE_NORMAL); } public void surfaceCreated(SurfaceHolder holder) { // The Surface has been created, acquire the camera and tell it where // to draw. //camera = Camera.open(); camera = SocketCamera.open(); try { camera.setPreviewDisplay(holder); } catch (IOException e) { e.printStackTrace(); } } public void surfaceDestroyed(SurfaceHolder holder) { // Surface will be destroyed when we return, so stop the preview. // Because the CameraDevice object is not a shared resource, it's very // important to release it when the activity is paused. camera.stopPreview(); camera = null; } public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) { // Now that the size is known, set up the camera parameters and begin // the preview. Camera.Parameters parameters = camera.getParameters(); parameters.setPreviewSize(w, h); camera.setParameters(parameters); camera.startPreview(); } }
Source: https://habr.com/ru/post/136075/
All Articles