How to record live video with rtsp url in android
I am making an android app. In my application, I need to record live streaming video . And here I am using the rtsp and ffmpeg protocol to stream and record videos. works fine, but video recording does not work . I think the url connection problem I am using here for video recording. But I don't know what the correct connection method is. I search a lot about this but cant find anything. If anyone knows this please help me.
This is my startRecording () method
private void startRecording() {
try {
URL url = new URL(path);
URLConnection urlConnection1 = url.openConnection();
in1 = new BufferedInputStream(urlConnection1.getInputStream());
} catch (IOException e) {
e.printStackTrace();
}
try {
mIn = new MjpegInputStream(in1,MIN_ARRAY_LENGTH);
mIn.resetFrameCount();
// return new MjpegInputStream(in, MIN_ARRAY_LENGTH);
recorder.start();
calltimer();
audioStatus = Micstatus.equals("1");
startTime = 0;
completedFrames = 0;
audioCompleted = 0;
isRecording = true;
if (isJelliBean) {
resetTextureLayout();
}
trial.setCanZoom(isRecording);
setMenuEnabled();
threadVideo = new VideoRecording();
threadVideo.start();
if (audioStatus) {
threadAudio = new AudioRecordRunnables();
threadAudio.start();
}
} catch (Exception e) {
try {
isRecording = false;
trial.setCanZoom(isRecording);
setMenuEnabled();
Toast.makeText(getApplicationContext(), "Try again1", Toast.LENGTH_SHORT).show();
} catch (Exception e1) {
}
}
}
This is my ViedoRecording class
private class VideoRecording extends Thread implements Runnable {
public void run() {
try {
int i = -1;
int cc = 0;
int completed = 0;
FileOutputStream out;
RecordingHelper recordingHelper;
try {
Thread.sleep(VIDEO_FRAME_RATE < 6 ? 2000 : 300);
} catch (Exception e) {
}
String file = getCacheDir() + "/temp.jpg";
opencv_core.IplImage iplImage;
runOnUiThread(videoTimeRunnable);
while (isRecording || completed < mIn.totalFrames || true) {
try {
if (cc < 10) {
cc++;
}
if (i > (MIN_ARRAY_LENGTH - 2)) {
i = -1;
}
recordingHelper = mIn.datas[i + 1];
if (recordingHelper != null && recordingHelper.length > 1) {
i++;
completed++;
if (startTime == 0) {
startTime = System.currentTimeMillis();
}
completedFrames++;
out = new FileOutputStream(file);
out.write(recordingHelper.data);
out.flush();
out.close();
Log.e("recording", "=" + recordingHelper.rotaion);
iplImage = cvLoadImage(file, 1);
OpenCVFrameConverter.ToIplImage grabberConverter = new OpenCVFrameConverter.ToIplImage();
Frame frame = grabberConverter.convert(iplImage);
recorder.record(frame);
opencv_core.cvReleaseImage(iplImage);
mIn.datas[i] = null;
totalFramesRecordedByActivity++;
runOnUiThread(videoTimeRunnable);
} else if (!isRecording && startTime > 0) {
break;
} else if (cc > 5 && mIn.totalFrames < 1) {
break;
}
} catch (Exception e) {
}
}
new File(file).delete();
endTime = System.currentTimeMillis();
videoThreadFinished = true;
finalizeRecording();
} catch (Throwable t) {
finishThis();
}
}
}
+3
source to share
No one has answered this question yet
Check out similar questions: