Skip to content

Commit

Permalink
Separate AndroidVideoTrackSource::OnFrameCaptured from adaptation
Browse files Browse the repository at this point in the history
AndroidVideoTrackSource::OnFrameCaptured currently does adaptation
before passing frames on. We want to add video processing between
adaptation and delivering the frame to the rest WebRTC C++. This
CL prepares for that by splitting OnFrameCaptured() into a separate
adaptation step and delivery step.

Bug: webrtc:10247
Change-Id: Iab759bac7f3072d4552ece80d0b81fc3e634c64c
Reviewed-on: https://webrtc-review.googlesource.com/c/119952
Commit-Queue: Magnus Jedvert <[email protected]>
Reviewed-by: Sami Kalliomäki <[email protected]>
Cr-Commit-Position: refs/heads/master@{#26571}
  • Loading branch information
Hnoo112233 authored and Commit Bot committed Feb 6, 2019
1 parent bb87f8a commit 9025bd5
Show file tree
Hide file tree
Showing 5 changed files with 130 additions and 32 deletions.
15 changes: 14 additions & 1 deletion sdk/android/api/org/webrtc/VideoSource.java
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,20 @@ public void onCapturerStopped() {

@Override
public void onFrameCaptured(VideoFrame frame) {
nativeAndroidVideoTrackSource.onFrameCaptured(frame);
final NativeAndroidVideoTrackSource.FrameAdaptationParameters parameters =
nativeAndroidVideoTrackSource.adaptFrame(frame);
if (parameters == null) {
// Drop frame.
return;
}

final VideoFrame.Buffer adaptedBuffer =
frame.getBuffer().cropAndScale(parameters.cropX, parameters.cropY, parameters.cropWidth,
parameters.cropHeight, parameters.scaleWidth, parameters.scaleHeight);
// TODO(magjed): Add video processing hook here.
nativeAndroidVideoTrackSource.onFrameCaptured(
new VideoFrame(adaptedBuffer, frame.getRotation(), parameters.timestampNs));
adaptedBuffer.release();
}
};

Expand Down
54 changes: 48 additions & 6 deletions sdk/android/src/java/org/webrtc/NativeAndroidVideoTrackSource.java
Original file line number Diff line number Diff line change
Expand Up @@ -17,11 +17,36 @@
* This class is meant to be a simple layer that only handles the JNI wrapping of a C++
* AndroidVideoTrackSource, that can easily be mocked out in Java unit tests. Refrain from adding
* any unnecessary logic to this class.
* This class is thred safe and methods can be called from any thread, but if frames A, B, ..., are
* sent to adaptFrame(), the adapted frames adaptedA, adaptedB, ..., needs to be passed in the same
* order to onFrameCaptured().
*/
class NativeAndroidVideoTrackSource {
// Pointer to webrtc::jni::AndroidVideoTrackSource.
private final long nativeAndroidVideoTrackSource;

public static class FrameAdaptationParameters {
public final int cropX;
public final int cropY;
public final int cropWidth;
public final int cropHeight;
public final int scaleWidth;
public final int scaleHeight;
public final long timestampNs;

@CalledByNative("FrameAdaptationParameters")
FrameAdaptationParameters(int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth,
int scaleHeight, long timestampNs) {
this.cropX = cropX;
this.cropY = cropY;
this.cropWidth = cropWidth;
this.cropHeight = cropHeight;
this.scaleWidth = scaleWidth;
this.scaleHeight = scaleHeight;
this.timestampNs = timestampNs;
}
}

public NativeAndroidVideoTrackSource(long nativeAndroidVideoTrackSource) {
this.nativeAndroidVideoTrackSource = nativeAndroidVideoTrackSource;
}
Expand All @@ -34,11 +59,25 @@ public void setState(boolean isLive) {
nativeSetState(nativeAndroidVideoTrackSource, isLive);
}

/** Pass a frame to the native AndroidVideoTrackSource. */
/**
* This function should be called before delivering any frame to determine if the frame should be
* dropped or what the cropping and scaling parameters should be. If the return value is null, the
* frame should be dropped, otherwise the frame should be adapted in accordance to the frame
* adaptation parameters before calling onFrameCaptured().
*/
@Nullable
public FrameAdaptationParameters adaptFrame(VideoFrame frame) {
return nativeAdaptFrame(nativeAndroidVideoTrackSource, frame.getBuffer().getWidth(),
frame.getBuffer().getHeight(), frame.getRotation(), frame.getTimestampNs());
}

/**
* Pass an adapted frame to the native AndroidVideoTrackSource. Note that adaptFrame() is
* expected to be called first and that the passed frame conforms to those parameters.
*/
public void onFrameCaptured(VideoFrame frame) {
nativeOnFrameCaptured(nativeAndroidVideoTrackSource, frame.getBuffer().getWidth(),
frame.getBuffer().getHeight(), frame.getRotation(), frame.getTimestampNs(),
frame.getBuffer());
nativeOnFrameCaptured(nativeAndroidVideoTrackSource, frame.getRotation(),
frame.getTimestampNs(), frame.getBuffer());
}

/**
Expand All @@ -59,6 +98,9 @@ private static native void nativeAdaptOutputFormat(long nativeAndroidVideoTrackS
int landscapeWidth, int landscapeHeight, @Nullable Integer maxLandscapePixelCount,
int portraitWidth, int portraitHeight, @Nullable Integer maxPortraitPixelCount,
@Nullable Integer maxFps);
private static native void nativeOnFrameCaptured(long nativeAndroidVideoTrackSource, int width,
int height, int rotation, long timestampNs, VideoFrame.Buffer buffer);
@Nullable
private static native FrameAdaptationParameters nativeAdaptFrame(
long nativeAndroidVideoTrackSource, int width, int height, int rotation, long timestampNs);
private static native void nativeOnFrameCaptured(
long nativeAndroidVideoTrackSource, int rotation, long timestampNs, VideoFrame.Buffer buffer);
}
14 changes: 13 additions & 1 deletion sdk/android/src/java/org/webrtc/NativeCapturerObserver.java
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,18 @@ public void onCapturerStopped() {

@Override
public void onFrameCaptured(VideoFrame frame) {
nativeAndroidVideoTrackSource.onFrameCaptured(frame);
final NativeAndroidVideoTrackSource.FrameAdaptationParameters parameters =
nativeAndroidVideoTrackSource.adaptFrame(frame);
if (parameters == null) {
// Drop frame.
return;
}

final VideoFrame.Buffer adaptedBuffer =
frame.getBuffer().cropAndScale(parameters.cropX, parameters.cropY, parameters.cropWidth,
parameters.cropHeight, parameters.scaleWidth, parameters.scaleHeight);
nativeAndroidVideoTrackSource.onFrameCaptured(
new VideoFrame(adaptedBuffer, frame.getRotation(), parameters.timestampNs));
adaptedBuffer.release();
}
}
55 changes: 33 additions & 22 deletions sdk/android/src/jni/android_video_track_source.cc
Original file line number Diff line number Diff line change
Expand Up @@ -86,21 +86,21 @@ bool AndroidVideoTrackSource::remote() const {
return false;
}

void AndroidVideoTrackSource::OnFrameCaptured(
ScopedJavaLocalRef<jobject> AndroidVideoTrackSource::AdaptFrame(
JNIEnv* env,
const JavaRef<jobject>& j_caller,
jint j_width,
jint j_height,
jint j_rotation,
jlong j_timestamp_ns,
const JavaRef<jobject>& j_video_frame_buffer) {
jlong j_timestamp_ns) {
const VideoRotation rotation = jintToVideoRotation(j_rotation);

int64_t camera_time_us = j_timestamp_ns / rtc::kNumNanosecsPerMicrosec;
int64_t translated_camera_time_us =
align_timestamps_ ? timestamp_aligner_.TranslateTimestamp(
camera_time_us, rtc::TimeMicros())
: camera_time_us;
const int64_t camera_time_us = j_timestamp_ns / rtc::kNumNanosecsPerMicrosec;
const int64_t aligned_timestamp_ns =
align_timestamps_ ? rtc::kNumNanosecsPerMicrosec *
timestamp_aligner_.TranslateTimestamp(
camera_time_us, rtc::TimeMicros())
: j_timestamp_ns;

int adapted_width;
int adapted_height;
Expand All @@ -109,35 +109,46 @@ void AndroidVideoTrackSource::OnFrameCaptured(
int crop_x;
int crop_y;

// TODO(magjed): Move this logic to users of NativeAndroidVideoTrackSource
// instead, in order to keep this native wrapping layer as thin as possible.
if (rotation % 180 == 0) {
if (!AdaptFrame(j_width, j_height, camera_time_us, &adapted_width,
&adapted_height, &crop_width, &crop_height, &crop_x,
&crop_y)) {
return;
if (!rtc::AdaptedVideoTrackSource::AdaptFrame(
j_width, j_height, camera_time_us, &adapted_width, &adapted_height,
&crop_width, &crop_height, &crop_x, &crop_y)) {
return nullptr;
}
} else {
// Swap all width/height and x/y.
if (!AdaptFrame(j_height, j_width, camera_time_us, &adapted_height,
&adapted_width, &crop_height, &crop_width, &crop_y,
&crop_x)) {
return;
if (!rtc::AdaptedVideoTrackSource::AdaptFrame(
j_height, j_width, camera_time_us, &adapted_height, &adapted_width,
&crop_height, &crop_width, &crop_y, &crop_x)) {
return nullptr;
}
}

return Java_FrameAdaptationParameters_Constructor(
env, crop_x, crop_y, crop_width, crop_height, adapted_width,
adapted_height, aligned_timestamp_ns);
}

void AndroidVideoTrackSource::OnFrameCaptured(
JNIEnv* env,
const JavaRef<jobject>& j_caller,
jint j_rotation,
jlong j_timestamp_ns,
const JavaRef<jobject>& j_video_frame_buffer) {
rtc::scoped_refptr<VideoFrameBuffer> buffer =
AndroidVideoBuffer::Create(env, j_video_frame_buffer)
->CropAndScale(env, crop_x, crop_y, crop_width, crop_height,
adapted_width, adapted_height);
AndroidVideoBuffer::Create(env, j_video_frame_buffer);
const VideoRotation rotation = jintToVideoRotation(j_rotation);

// AdaptedVideoTrackSource handles applying rotation for I420 frames.
if (apply_rotation() && rotation != kVideoRotation_0) {
if (apply_rotation() && rotation != kVideoRotation_0)
buffer = buffer->ToI420();
}

OnFrame(VideoFrame::Builder()
.set_video_frame_buffer(buffer)
.set_rotation(rotation)
.set_timestamp_us(translated_camera_time_us)
.set_timestamp_us(j_timestamp_ns / rtc::kNumNanosecsPerMicrosec)
.build());
}

Expand Down
24 changes: 22 additions & 2 deletions sdk/android/src/jni/android_video_track_source.h
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,11 @@
namespace webrtc {
namespace jni {

// This class needs to be used in conjunction with the Java corresponding class
// NativeAndroidVideoTrackSource. This class is thred safe and methods can be
// called from any thread, but if frames A, B, ..., are sent to adaptFrame(),
// the adapted frames adaptedA, adaptedB, ..., needs to be passed in the same
// order to onFrameCaptured().
class AndroidVideoTrackSource : public rtc::AdaptedVideoTrackSource {
public:
AndroidVideoTrackSource(rtc::Thread* signaling_thread,
Expand All @@ -45,10 +50,25 @@ class AndroidVideoTrackSource : public rtc::AdaptedVideoTrackSource {

bool remote() const override;

// This function should be called before delivering any frame to determine if
// the frame should be dropped or what the cropping and scaling parameters
// should be. This function is thread safe and can be called from any thread.
// This function returns
// NativeAndroidVideoTrackSource.FrameAdaptationParameters, or null if the
// frame should be dropped.
ScopedJavaLocalRef<jobject> AdaptFrame(JNIEnv* env,
const JavaRef<jobject>& j_caller,
jint j_width,
jint j_height,
jint j_rotation,
jlong j_timestamp_ns);

// This function converts and passes the frame on to the rest of the C++
// WebRTC layer. Note that GetFrameAdaptationParameters() is expected to be
// called first and that the delivered frame conforms to those parameters.
// This function is thread safe and can be called from any thread.
void OnFrameCaptured(JNIEnv* env,
const JavaRef<jobject>& j_caller,
jint j_width,
jint j_height,
jint j_rotation,
jlong j_timestamp_ns,
const JavaRef<jobject>& j_video_frame_buffer);
Expand Down

0 comments on commit 9025bd5

Please sign in to comment.