|
1 # HG changeset patch |
|
2 # Parent fb80f99ca86bacbcddaf203f7183e0456f194811 |
|
3 # User Mike Gorse <mgorse@suse.com> |
|
4 |
|
5 Bug 806917 - support GStreamer 1.0 |
|
6 |
|
7 diff --git a/configure.in b/configure.in |
|
8 --- a/configure.in |
|
9 +++ b/configure.in |
|
10 @@ -5758,28 +5758,36 @@ fi |
|
11 |
|
12 AC_SUBST(MOZ_PULSEAUDIO) |
|
13 AC_SUBST(MOZ_PULSEAUDIO_CFLAGS) |
|
14 AC_SUBST(MOZ_PULSEAUDIO_LIBS) |
|
15 |
|
16 dnl ======================================================== |
|
17 dnl = Enable GStreamer |
|
18 dnl ======================================================== |
|
19 -MOZ_ARG_ENABLE_BOOL(gstreamer, |
|
20 -[ --enable-gstreamer Enable GStreamer support], |
|
21 -MOZ_GSTREAMER=1, |
|
22 -MOZ_GSTREAMER=) |
|
23 +MOZ_ARG_ENABLE_STRING(gstreamer, |
|
24 +[ --enable-gstreamer[=1.0] Enable GStreamer support], |
|
25 +[ MOZ_GSTREAMER=1 |
|
26 + # API version, eg 0.10, 1.0 etc |
|
27 + if test -n "$enableval" ]; then |
|
28 + GST_API_VERSION=$enableval |
|
29 + else |
|
30 + GST_API_VERSION=0.10 |
|
31 + fi] |
|
32 +[ MOZ_GSTREAMER=]) |
|
33 |
|
34 if test "$MOZ_GSTREAMER"; then |
|
35 - # API version, eg 0.10, 1.0 etc |
|
36 - GST_API_VERSION=0.10 |
|
37 # core/base release number |
|
38 # depend on >= 0.10.33 as that's when the playbin2 source-setup signal was |
|
39 # introduced |
|
40 - GST_VERSION=0.10.33 |
|
41 + if test "$GST_API_VERSION" = "1.0"; then |
|
42 + GST_VERSION=1.0 |
|
43 + else |
|
44 + GST_VERSION=0.10.33 |
|
45 + fi |
|
46 PKG_CHECK_MODULES(GSTREAMER, |
|
47 gstreamer-$GST_API_VERSION >= $GST_VERSION |
|
48 gstreamer-app-$GST_API_VERSION |
|
49 gstreamer-plugins-base-$GST_API_VERSION) |
|
50 if test -n "$GSTREAMER_LIBS"; then |
|
51 _SAVE_LDFLAGS=$LDFLAGS |
|
52 LDFLAGS="$LDFLAGS $GSTREAMER_LIBS -lgstvideo-$GST_API_VERSION" |
|
53 AC_TRY_LINK(,[return 0;],_HAVE_LIBGSTVIDEO=1,_HAVE_LIBGSTVIDEO=) |
|
54 diff --git a/content/media/gstreamer/GStreamerReader.cpp b/content/media/gstreamer/GStreamerReader.cpp |
|
55 --- a/content/media/gstreamer/GStreamerReader.cpp |
|
56 +++ b/content/media/gstreamer/GStreamerReader.cpp |
|
57 @@ -69,18 +69,22 @@ GStreamerReader::GStreamerReader(Abstrac |
|
58 MOZ_COUNT_CTOR(GStreamerReader); |
|
59 |
|
60 mSrcCallbacks.need_data = GStreamerReader::NeedDataCb; |
|
61 mSrcCallbacks.enough_data = GStreamerReader::EnoughDataCb; |
|
62 mSrcCallbacks.seek_data = GStreamerReader::SeekDataCb; |
|
63 |
|
64 mSinkCallbacks.eos = GStreamerReader::EosCb; |
|
65 mSinkCallbacks.new_preroll = GStreamerReader::NewPrerollCb; |
|
66 +#if GST_VERSION_MAJOR == 1 |
|
67 + mSinkCallbacks.new_sample = GStreamerReader::NewBufferCb; |
|
68 +#else |
|
69 mSinkCallbacks.new_buffer = GStreamerReader::NewBufferCb; |
|
70 mSinkCallbacks.new_buffer_list = NULL; |
|
71 +#endif |
|
72 |
|
73 gst_segment_init(&mVideoSegment, GST_FORMAT_UNDEFINED); |
|
74 gst_segment_init(&mAudioSegment, GST_FORMAT_UNDEFINED); |
|
75 } |
|
76 |
|
77 GStreamerReader::~GStreamerReader() |
|
78 { |
|
79 MOZ_COUNT_DTOR(GStreamerReader); |
|
80 @@ -120,19 +124,26 @@ nsresult GStreamerReader::Init(MediaDeco |
|
81 mVideoSink = gst_parse_bin_from_description("capsfilter name=filter ! " |
|
82 "appsink name=videosink sync=true max-buffers=1 " |
|
83 "caps=video/x-raw-yuv,format=(fourcc)I420" |
|
84 , TRUE, NULL); |
|
85 mVideoAppSink = GST_APP_SINK(gst_bin_get_by_name(GST_BIN(mVideoSink), |
|
86 "videosink")); |
|
87 gst_app_sink_set_callbacks(mVideoAppSink, &mSinkCallbacks, |
|
88 (gpointer) this, NULL); |
|
89 - GstPad *sinkpad = gst_element_get_pad(GST_ELEMENT(mVideoAppSink), "sink"); |
|
90 + GstPad *sinkpad = gst_element_get_static_pad(GST_ELEMENT(mVideoAppSink), "sink"); |
|
91 +#if GST_VERSION_MAJOR == 1 |
|
92 + // TODO: Figure out whether we need UPSTREAM or DOWNSTREAM, or both |
|
93 + gst_pad_add_probe(sinkpad, |
|
94 + (GstPadProbeType) (GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM | GST_PAD_PROBE_TYPE_EVENT_UPSTREAM), |
|
95 + &GStreamerReader::EventProbeCb, this, NULL); |
|
96 +#else |
|
97 gst_pad_add_event_probe(sinkpad, |
|
98 G_CALLBACK(&GStreamerReader::EventProbeCb), this); |
|
99 +#endif |
|
100 gst_object_unref(sinkpad); |
|
101 |
|
102 mAudioSink = gst_parse_bin_from_description("capsfilter name=filter ! " |
|
103 #ifdef MOZ_SAMPLE_TYPE_FLOAT32 |
|
104 "appsink name=audiosink sync=true caps=audio/x-raw-float," |
|
105 #ifdef IS_LITTLE_ENDIAN |
|
106 "channels={1,2},rate=44100,width=32,endianness=1234", TRUE, NULL); |
|
107 #else |
|
108 @@ -145,19 +156,25 @@ nsresult GStreamerReader::Init(MediaDeco |
|
109 #else |
|
110 "channels={1,2},rate=48000,width=16,endianness=4321", TRUE, NULL); |
|
111 #endif |
|
112 #endif |
|
113 mAudioAppSink = GST_APP_SINK(gst_bin_get_by_name(GST_BIN(mAudioSink), |
|
114 "audiosink")); |
|
115 gst_app_sink_set_callbacks(mAudioAppSink, &mSinkCallbacks, |
|
116 (gpointer) this, NULL); |
|
117 - sinkpad = gst_element_get_pad(GST_ELEMENT(mAudioAppSink), "sink"); |
|
118 + sinkpad = gst_element_get_static_pad(GST_ELEMENT(mAudioAppSink), "sink"); |
|
119 +#if GST_VERSION_MAJOR == 1 |
|
120 + gst_pad_add_probe(sinkpad, |
|
121 + (GstPadProbeType) (GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM | GST_PAD_PROBE_TYPE_EVENT_UPSTREAM), |
|
122 + &GStreamerReader::EventProbeCb, this, NULL); |
|
123 +#else |
|
124 gst_pad_add_event_probe(sinkpad, |
|
125 G_CALLBACK(&GStreamerReader::EventProbeCb), this); |
|
126 +#endif |
|
127 gst_object_unref(sinkpad); |
|
128 |
|
129 g_object_set(mPlayBin, "uri", "appsrc://", |
|
130 "video-sink", mVideoSink, |
|
131 "audio-sink", mAudioSink, |
|
132 NULL); |
|
133 |
|
134 g_object_connect(mPlayBin, "signal::source-setup", |
|
135 @@ -231,17 +248,17 @@ nsresult GStreamerReader::ReadMetadata(V |
|
136 filter = gst_bin_get_by_name(GST_BIN(mAudioSink), "filter"); |
|
137 else if (!(current_flags & GST_PLAY_FLAG_VIDEO)) |
|
138 filter = gst_bin_get_by_name(GST_BIN(mVideoSink), "filter"); |
|
139 |
|
140 if (filter) { |
|
141 /* Little trick: set the target caps to "skip" so that playbin2 fails to |
|
142 * find a decoder for the stream we want to skip. |
|
143 */ |
|
144 - GstCaps *filterCaps = gst_caps_new_simple ("skip", NULL); |
|
145 + GstCaps *filterCaps = gst_caps_new_simple ("skip", NULL, NULL); |
|
146 g_object_set(filter, "caps", filterCaps, NULL); |
|
147 gst_caps_unref(filterCaps); |
|
148 gst_object_unref(filter); |
|
149 } |
|
150 |
|
151 /* start the pipeline */ |
|
152 gst_element_set_state(mPlayBin, GST_STATE_PAUSED); |
|
153 |
|
154 @@ -284,19 +301,24 @@ nsresult GStreamerReader::ReadMetadata(V |
|
155 gst_element_set_state(mPlayBin, GST_STATE_NULL); |
|
156 gst_message_unref(message); |
|
157 return NS_ERROR_FAILURE; |
|
158 } |
|
159 } |
|
160 |
|
161 /* report the duration */ |
|
162 gint64 duration; |
|
163 +#if GST_VERSION_MAJOR == 1 |
|
164 + if (gst_element_query_duration(GST_ELEMENT(mPlayBin), |
|
165 + GST_FORMAT_TIME, &duration)) { |
|
166 +#else |
|
167 GstFormat format = GST_FORMAT_TIME; |
|
168 if (gst_element_query_duration(GST_ELEMENT(mPlayBin), |
|
169 &format, &duration) && format == GST_FORMAT_TIME) { |
|
170 +#endif |
|
171 ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor()); |
|
172 LOG(PR_LOG_DEBUG, ("returning duration %" GST_TIME_FORMAT, |
|
173 GST_TIME_ARGS (duration))); |
|
174 duration = GST_TIME_AS_USECONDS (duration); |
|
175 mDecoder->SetMediaDuration(duration); |
|
176 } |
|
177 |
|
178 int n_video = 0, n_audio = 0; |
|
179 @@ -365,59 +387,87 @@ bool GStreamerReader::DecodeAudioData() |
|
180 { |
|
181 NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread."); |
|
182 |
|
183 if (!WaitForDecodedData(&mAudioSinkBufferCount)) { |
|
184 mAudioQueue.Finish(); |
|
185 return false; |
|
186 } |
|
187 |
|
188 +#if GST_VERSION_MAJOR == 1 |
|
189 + GstSample *sample = gst_app_sink_pull_sample(mAudioAppSink); |
|
190 + GstBuffer *buffer = gst_sample_get_buffer(sample); |
|
191 +#else |
|
192 GstBuffer *buffer = gst_app_sink_pull_buffer(mAudioAppSink); |
|
193 +#endif |
|
194 int64_t timestamp = GST_BUFFER_TIMESTAMP(buffer); |
|
195 timestamp = gst_segment_to_stream_time(&mAudioSegment, |
|
196 GST_FORMAT_TIME, timestamp); |
|
197 timestamp = GST_TIME_AS_USECONDS(timestamp); |
|
198 int64_t duration = 0; |
|
199 if (GST_CLOCK_TIME_IS_VALID(GST_BUFFER_DURATION(buffer))) |
|
200 duration = GST_TIME_AS_USECONDS(GST_BUFFER_DURATION(buffer)); |
|
201 |
|
202 int64_t offset = GST_BUFFER_OFFSET(buffer); |
|
203 +#if GST_VERSION_MAJOR == 1 |
|
204 + GstMapInfo info; |
|
205 + gst_buffer_map(buffer, &info, GST_MAP_READ); |
|
206 + unsigned int size = info.size; |
|
207 +#else |
|
208 unsigned int size = GST_BUFFER_SIZE(buffer); |
|
209 +#endif |
|
210 int32_t frames = (size / sizeof(AudioDataValue)) / mInfo.mAudioChannels; |
|
211 ssize_t outSize = static_cast<size_t>(size / sizeof(AudioDataValue)); |
|
212 nsAutoArrayPtr<AudioDataValue> data(new AudioDataValue[outSize]); |
|
213 +#if GST_VERSION_MAJOR == 1 |
|
214 + memcpy(data, info.data, info.size); |
|
215 + gst_buffer_unmap(buffer, &info); |
|
216 +#else |
|
217 memcpy(data, GST_BUFFER_DATA(buffer), GST_BUFFER_SIZE(buffer)); |
|
218 +#endif |
|
219 AudioData *audio = new AudioData(offset, timestamp, duration, |
|
220 frames, data.forget(), mInfo.mAudioChannels); |
|
221 |
|
222 mAudioQueue.Push(audio); |
|
223 gst_buffer_unref(buffer); |
|
224 |
|
225 return true; |
|
226 } |
|
227 |
|
228 bool GStreamerReader::DecodeVideoFrame(bool &aKeyFrameSkip, |
|
229 int64_t aTimeThreshold) |
|
230 { |
|
231 NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread."); |
|
232 |
|
233 +#if GST_VERSION_MAJOR == 1 |
|
234 + GstSample *sample = NULL; |
|
235 +#endif |
|
236 GstBuffer *buffer = NULL; |
|
237 int64_t timestamp, nextTimestamp; |
|
238 while (true) |
|
239 { |
|
240 if (!WaitForDecodedData(&mVideoSinkBufferCount)) { |
|
241 mVideoQueue.Finish(); |
|
242 break; |
|
243 } |
|
244 mDecoder->NotifyDecodedFrames(0, 1); |
|
245 |
|
246 +#if GST_VERSION_MAJOR == 1 |
|
247 + sample = gst_app_sink_pull_sample(mVideoAppSink); |
|
248 + buffer = gst_sample_get_buffer(sample); |
|
249 +#else |
|
250 buffer = gst_app_sink_pull_buffer(mVideoAppSink); |
|
251 +#endif |
|
252 bool isKeyframe = !GST_BUFFER_FLAG_IS_SET(buffer, GST_BUFFER_FLAG_DISCONT); |
|
253 if ((aKeyFrameSkip && !isKeyframe)) { |
|
254 +#if GST_VERSION_MAJOR == 1 |
|
255 + gst_sample_unref(sample); |
|
256 +#else |
|
257 gst_buffer_unref(buffer); |
|
258 +#endif |
|
259 buffer = NULL; |
|
260 continue; |
|
261 } |
|
262 |
|
263 timestamp = GST_BUFFER_TIMESTAMP(buffer); |
|
264 { |
|
265 ReentrantMonitorAutoEnter mon(mGstThreadsMonitor); |
|
266 timestamp = gst_segment_to_stream_time(&mVideoSegment, |
|
267 @@ -431,62 +481,90 @@ bool GStreamerReader::DecodeVideoFrame(b |
|
268 else if (fpsNum && fpsDen) |
|
269 /* add 1-frame duration */ |
|
270 nextTimestamp += gst_util_uint64_scale(GST_USECOND, fpsNum, fpsDen); |
|
271 |
|
272 if (timestamp < aTimeThreshold) { |
|
273 LOG(PR_LOG_DEBUG, ("skipping frame %" GST_TIME_FORMAT |
|
274 " threshold %" GST_TIME_FORMAT, |
|
275 GST_TIME_ARGS(timestamp), GST_TIME_ARGS(aTimeThreshold))); |
|
276 +#if GST_VERSION_MAJOR == 1 |
|
277 + gst_sample_unref(sample); |
|
278 +#else |
|
279 gst_buffer_unref(buffer); |
|
280 +#endif |
|
281 buffer = NULL; |
|
282 continue; |
|
283 } |
|
284 |
|
285 break; |
|
286 } |
|
287 |
|
288 if (buffer == NULL) |
|
289 /* no more frames */ |
|
290 return false; |
|
291 |
|
292 +#if GST_VERSION_MAJOR == 1 |
|
293 + GstMapInfo info; |
|
294 + gst_buffer_map(buffer, &info, GST_MAP_READ); |
|
295 + guint8 *data = info.data; |
|
296 +#else |
|
297 guint8 *data = GST_BUFFER_DATA(buffer); |
|
298 +#endif |
|
299 |
|
300 int width = mPicture.width; |
|
301 int height = mPicture.height; |
|
302 GstVideoFormat format = mFormat; |
|
303 |
|
304 VideoData::YCbCrBuffer b; |
|
305 +#if GST_VERSION_MAJOR == 1 |
|
306 + GstVideoInfo *video_info; |
|
307 + gst_video_info_set_format(video_info, format, width, height); |
|
308 + for(int i = 0; i < 3; i++) { |
|
309 + b.mPlanes[i].mData = data + GST_VIDEO_INFO_COMP_OFFSET(video_info, i); |
|
310 + b.mPlanes[i].mStride = GST_VIDEO_INFO_COMP_STRIDE(video_info, i); |
|
311 + b.mPlanes[i].mHeight = GST_VIDEO_INFO_COMP_HEIGHT(video_info, i); |
|
312 + b.mPlanes[i].mWidth = GST_VIDEO_INFO_COMP_WIDTH(video_info, i); |
|
313 + b.mPlanes[i].mOffset = 0; |
|
314 + b.mPlanes[i].mSkip = 0; |
|
315 + } |
|
316 +#else |
|
317 for(int i = 0; i < 3; i++) { |
|
318 b.mPlanes[i].mData = data + gst_video_format_get_component_offset(format, i, |
|
319 width, height); |
|
320 b.mPlanes[i].mStride = gst_video_format_get_row_stride(format, i, width); |
|
321 b.mPlanes[i].mHeight = gst_video_format_get_component_height(format, |
|
322 i, height); |
|
323 b.mPlanes[i].mWidth = gst_video_format_get_component_width(format, |
|
324 i, width); |
|
325 b.mPlanes[i].mOffset = 0; |
|
326 b.mPlanes[i].mSkip = 0; |
|
327 } |
|
328 +#endif |
|
329 |
|
330 bool isKeyframe = !GST_BUFFER_FLAG_IS_SET(buffer, |
|
331 GST_BUFFER_FLAG_DELTA_UNIT); |
|
332 /* XXX ? */ |
|
333 int64_t offset = 0; |
|
334 VideoData *video = VideoData::Create(mInfo, |
|
335 mDecoder->GetImageContainer(), |
|
336 offset, |
|
337 timestamp, |
|
338 nextTimestamp, |
|
339 b, |
|
340 isKeyframe, |
|
341 -1, |
|
342 mPicture); |
|
343 mVideoQueue.Push(video); |
|
344 +#if GST_VERSION_MAJOR == 1 |
|
345 + gst_buffer_unmap(buffer, &info); |
|
346 + gst_sample_unref(sample); |
|
347 +#else |
|
348 gst_buffer_unref(buffer); |
|
349 +#endif |
|
350 |
|
351 return true; |
|
352 } |
|
353 |
|
354 nsresult GStreamerReader::Seek(int64_t aTarget, |
|
355 int64_t aStartTime, |
|
356 int64_t aEndTime, |
|
357 int64_t aCurrentTime) |
|
358 @@ -509,52 +587,62 @@ nsresult GStreamerReader::Seek(int64_t a |
|
359 |
|
360 nsresult GStreamerReader::GetBuffered(nsTimeRanges* aBuffered, |
|
361 int64_t aStartTime) |
|
362 { |
|
363 if (!mInfo.mHasVideo && !mInfo.mHasAudio) { |
|
364 return NS_OK; |
|
365 } |
|
366 |
|
367 - GstFormat format = GST_FORMAT_TIME; |
|
368 +#if GST_VERSION_MAJOR == 0 |
|
369 + GstFormat format = GST_FORMAT_TIME; |
|
370 +#endif |
|
371 + |
|
372 MediaResource* resource = mDecoder->GetResource(); |
|
373 gint64 resourceLength = resource->GetLength(); |
|
374 nsTArray<MediaByteRange> ranges; |
|
375 resource->GetCachedRanges(ranges); |
|
376 |
|
377 if (mDecoder->OnStateMachineThread()) |
|
378 /* Report the position from here while buffering as we can't report it from |
|
379 * the gstreamer threads that are actually reading from the resource |
|
380 */ |
|
381 NotifyBytesConsumed(); |
|
382 |
|
383 if (resource->IsDataCachedToEndOfResource(0)) { |
|
384 /* fast path for local or completely cached files */ |
|
385 gint64 duration = 0; |
|
386 - GstFormat format = GST_FORMAT_TIME; |
|
387 - |
|
388 duration = QueryDuration(); |
|
389 double end = (double) duration / GST_MSECOND; |
|
390 LOG(PR_LOG_DEBUG, ("complete range [0, %f] for [0, %li]", |
|
391 end, resourceLength)); |
|
392 aBuffered->Add(0, end); |
|
393 return NS_OK; |
|
394 } |
|
395 |
|
396 for(uint32_t index = 0; index < ranges.Length(); index++) { |
|
397 int64_t startOffset = ranges[index].mStart; |
|
398 int64_t endOffset = ranges[index].mEnd; |
|
399 gint64 startTime, endTime; |
|
400 |
|
401 +#if GST_VERSION_MAJOR == 1 |
|
402 + if (!gst_element_query_convert(GST_ELEMENT(mPlayBin), GST_FORMAT_BYTES, |
|
403 + startOffset, GST_FORMAT_TIME, &startTime)) |
|
404 + continue; |
|
405 + if (!gst_element_query_convert(GST_ELEMENT(mPlayBin), GST_FORMAT_BYTES, |
|
406 + endOffset, GST_FORMAT_TIME, &endTime)) |
|
407 + continue; |
|
408 +#else |
|
409 if (!gst_element_query_convert(GST_ELEMENT(mPlayBin), GST_FORMAT_BYTES, |
|
410 startOffset, &format, &startTime) || format != GST_FORMAT_TIME) |
|
411 continue; |
|
412 if (!gst_element_query_convert(GST_ELEMENT(mPlayBin), GST_FORMAT_BYTES, |
|
413 endOffset, &format, &endTime) || format != GST_FORMAT_TIME) |
|
414 continue; |
|
415 +#endif |
|
416 |
|
417 double start = start = (double) GST_TIME_AS_USECONDS (startTime) / GST_MSECOND; |
|
418 double end = (double) GST_TIME_AS_USECONDS (endTime) / GST_MSECOND; |
|
419 LOG(PR_LOG_DEBUG, ("adding range [%f, %f] for [%li %li] size %li", |
|
420 start, end, startOffset, endOffset, resourceLength)); |
|
421 aBuffered->Add(start, end); |
|
422 } |
|
423 |
|
424 @@ -563,48 +651,64 @@ nsresult GStreamerReader::GetBuffered(ns |
|
425 |
|
426 void GStreamerReader::ReadAndPushData(guint aLength) |
|
427 { |
|
428 MediaResource* resource = mDecoder->GetResource(); |
|
429 NS_ASSERTION(resource, "Decoder has no media resource"); |
|
430 nsresult rv = NS_OK; |
|
431 |
|
432 GstBuffer *buffer = gst_buffer_new_and_alloc(aLength); |
|
433 +#if GST_VERSION_MAJOR == 1 |
|
434 + GstMapInfo info; |
|
435 + gst_buffer_map(buffer, &info, GST_MAP_WRITE); |
|
436 + guint8 *data = info.data; |
|
437 +#else |
|
438 guint8 *data = GST_BUFFER_DATA(buffer); |
|
439 +#endif |
|
440 uint32_t size = 0, bytesRead = 0; |
|
441 while(bytesRead < aLength) { |
|
442 rv = resource->Read(reinterpret_cast<char*>(data + bytesRead), |
|
443 aLength - bytesRead, &size); |
|
444 if (NS_FAILED(rv) || size == 0) |
|
445 break; |
|
446 |
|
447 bytesRead += size; |
|
448 } |
|
449 |
|
450 +#if GST_VERSION_MAJOR == 1 |
|
451 + info.size = bytesRead; |
|
452 + gst_buffer_unmap(buffer, &info); |
|
453 +#else |
|
454 GST_BUFFER_SIZE(buffer) = bytesRead; |
|
455 +#endif |
|
456 mByteOffset += bytesRead; |
|
457 |
|
458 GstFlowReturn ret = gst_app_src_push_buffer(mSource, gst_buffer_ref(buffer)); |
|
459 if (ret != GST_FLOW_OK) |
|
460 LOG(PR_LOG_ERROR, ("ReadAndPushData push ret %s", gst_flow_get_name(ret))); |
|
461 |
|
462 - if (GST_BUFFER_SIZE (buffer) < aLength) |
|
463 + if (bytesRead < aLength) |
|
464 /* If we read less than what we wanted, we reached the end */ |
|
465 gst_app_src_end_of_stream(mSource); |
|
466 |
|
467 gst_buffer_unref(buffer); |
|
468 } |
|
469 |
|
470 int64_t GStreamerReader::QueryDuration() |
|
471 { |
|
472 gint64 duration = 0; |
|
473 GstFormat format = GST_FORMAT_TIME; |
|
474 |
|
475 +#if GST_VERSION_MAJOR == 1 |
|
476 + if (gst_element_query_duration(GST_ELEMENT(mPlayBin), |
|
477 + format, &duration)) { |
|
478 +#else |
|
479 if (gst_element_query_duration(GST_ELEMENT(mPlayBin), |
|
480 &format, &duration)) { |
|
481 +#endif |
|
482 if (format == GST_FORMAT_TIME) { |
|
483 LOG(PR_LOG_DEBUG, ("pipeline duration %" GST_TIME_FORMAT, |
|
484 GST_TIME_ARGS (duration))); |
|
485 duration = GST_TIME_AS_USECONDS (duration); |
|
486 } |
|
487 } |
|
488 |
|
489 /*if (mDecoder->mDuration != -1 && |
|
490 @@ -668,60 +772,95 @@ gboolean GStreamerReader::SeekData(GstAp |
|
491 if (NS_SUCCEEDED(rv)) |
|
492 mByteOffset = mLastReportedByteOffset = aOffset; |
|
493 else |
|
494 LOG(PR_LOG_ERROR, ("seek at %lu failed", aOffset)); |
|
495 |
|
496 return NS_SUCCEEDED(rv); |
|
497 } |
|
498 |
|
499 +#if GST_VERSION_MAJOR == 1 |
|
500 +GstPadProbeReturn GStreamerReader::EventProbeCb(GstPad *aPad, |
|
501 + GstPadProbeInfo *aInfo, |
|
502 + gpointer aUserData) |
|
503 +{ |
|
504 + GStreamerReader *reader = (GStreamerReader *) aUserData; |
|
505 + GstEvent *aEvent = (GstEvent *)aInfo->data; |
|
506 + return reader->EventProbe(aPad, aEvent); |
|
507 +} |
|
508 +#else |
|
509 gboolean GStreamerReader::EventProbeCb(GstPad *aPad, |
|
510 GstEvent *aEvent, |
|
511 gpointer aUserData) |
|
512 { |
|
513 GStreamerReader *reader = (GStreamerReader *) aUserData; |
|
514 return reader->EventProbe(aPad, aEvent); |
|
515 } |
|
516 +#endif |
|
517 |
|
518 +#if GST_VERSION_MAJOR == 1 |
|
519 +GstPadProbeReturn GStreamerReader::EventProbe(GstPad *aPad, GstEvent *aEvent) |
|
520 +#else |
|
521 gboolean GStreamerReader::EventProbe(GstPad *aPad, GstEvent *aEvent) |
|
522 +#endif |
|
523 { |
|
524 GstElement *parent = GST_ELEMENT(gst_pad_get_parent(aPad)); |
|
525 switch(GST_EVENT_TYPE(aEvent)) { |
|
526 +#if GST_VERSION_MAJOR == 1 |
|
527 + case GST_EVENT_SEGMENT: |
|
528 +#else |
|
529 case GST_EVENT_NEWSEGMENT: |
|
530 +#endif |
|
531 { |
|
532 +#if GST_VERSION_MAJOR == 1 |
|
533 + const GstSegment *newSegment; |
|
534 +#else |
|
535 gboolean update; |
|
536 gdouble rate; |
|
537 GstFormat format; |
|
538 gint64 start, stop, position; |
|
539 +#endif |
|
540 GstSegment *segment; |
|
541 |
|
542 /* Store the segments so we can convert timestamps to stream time, which |
|
543 * is what the upper layers sync on. |
|
544 */ |
|
545 ReentrantMonitorAutoEnter mon(mGstThreadsMonitor); |
|
546 +#if GST_VERSION_MAJOR == 1 |
|
547 + gst_event_parse_segment(aEvent, &newSegment); |
|
548 +#else |
|
549 gst_event_parse_new_segment(aEvent, &update, &rate, &format, |
|
550 &start, &stop, &position); |
|
551 +#endif |
|
552 if (parent == GST_ELEMENT(mVideoAppSink)) |
|
553 segment = &mVideoSegment; |
|
554 else |
|
555 segment = &mAudioSegment; |
|
556 +#if GST_VERSION_MAJOR == 1 |
|
557 + gst_segment_copy_into (newSegment, segment); |
|
558 +#else |
|
559 gst_segment_set_newsegment(segment, update, rate, format, |
|
560 start, stop, position); |
|
561 +#endif |
|
562 break; |
|
563 } |
|
564 case GST_EVENT_FLUSH_STOP: |
|
565 /* Reset on seeks */ |
|
566 ResetDecode(); |
|
567 break; |
|
568 default: |
|
569 break; |
|
570 } |
|
571 gst_object_unref(parent); |
|
572 |
|
573 +#if GST_VERSION_MAJOR == 1 |
|
574 + return GST_PAD_PROBE_OK; |
|
575 +#else |
|
576 return TRUE; |
|
577 +#endif |
|
578 } |
|
579 |
|
580 GstFlowReturn GStreamerReader::NewPrerollCb(GstAppSink *aSink, |
|
581 gpointer aUserData) |
|
582 { |
|
583 GStreamerReader *reader = (GStreamerReader *) aUserData; |
|
584 |
|
585 if (aSink == reader->mVideoAppSink) |
|
586 @@ -730,18 +869,22 @@ GstFlowReturn GStreamerReader::NewPrerol |
|
587 reader->AudioPreroll(); |
|
588 return GST_FLOW_OK; |
|
589 } |
|
590 |
|
591 void GStreamerReader::AudioPreroll() |
|
592 { |
|
593 /* The first audio buffer has reached the audio sink. Get rate and channels */ |
|
594 LOG(PR_LOG_DEBUG, ("Audio preroll")); |
|
595 - GstPad *sinkpad = gst_element_get_pad(GST_ELEMENT(mAudioAppSink), "sink"); |
|
596 + GstPad *sinkpad = gst_element_get_static_pad(GST_ELEMENT(mAudioAppSink), "sink"); |
|
597 +#if GST_VERSION_MAJOR == 1 |
|
598 + GstCaps *caps = gst_pad_get_current_caps(sinkpad); |
|
599 +#else |
|
600 GstCaps *caps = gst_pad_get_negotiated_caps(sinkpad); |
|
601 +#endif |
|
602 GstStructure *s = gst_caps_get_structure(caps, 0); |
|
603 mInfo.mAudioRate = mInfo.mAudioChannels = 0; |
|
604 gst_structure_get_int(s, "rate", (gint *) &mInfo.mAudioRate); |
|
605 gst_structure_get_int(s, "channels", (gint *) &mInfo.mAudioChannels); |
|
606 NS_ASSERTION(mInfo.mAudioRate != 0, ("audio rate is zero")); |
|
607 NS_ASSERTION(mInfo.mAudioChannels != 0, ("audio channels is zero")); |
|
608 NS_ASSERTION(mInfo.mAudioChannels > 0 && mInfo.mAudioChannels <= MAX_CHANNELS, |
|
609 "invalid audio channels number"); |
|
610 @@ -749,19 +892,29 @@ void GStreamerReader::AudioPreroll() |
|
611 gst_caps_unref(caps); |
|
612 gst_object_unref(sinkpad); |
|
613 } |
|
614 |
|
615 void GStreamerReader::VideoPreroll() |
|
616 { |
|
617 /* The first video buffer has reached the video sink. Get width and height */ |
|
618 LOG(PR_LOG_DEBUG, ("Video preroll")); |
|
619 - GstPad *sinkpad = gst_element_get_pad(GST_ELEMENT(mVideoAppSink), "sink"); |
|
620 + GstPad *sinkpad = gst_element_get_static_pad(GST_ELEMENT(mVideoAppSink), "sink"); |
|
621 +#if GST_VERSION_MAJOR == 1 |
|
622 + GstCaps *caps = gst_pad_get_current_caps(sinkpad); |
|
623 + GstVideoInfo info; |
|
624 + memset (&info, 0, sizeof (info)); |
|
625 + gst_video_info_from_caps(&info, caps); |
|
626 + mFormat = info.finfo->format; |
|
627 + mPicture.width = info.width; |
|
628 + mPicture.height = info.height; |
|
629 +#else |
|
630 GstCaps *caps = gst_pad_get_negotiated_caps(sinkpad); |
|
631 gst_video_format_parse_caps(caps, &mFormat, &mPicture.width, &mPicture.height); |
|
632 +#endif |
|
633 GstStructure *structure = gst_caps_get_structure(caps, 0); |
|
634 gst_structure_get_fraction(structure, "framerate", &fpsNum, &fpsDen); |
|
635 NS_ASSERTION(mPicture.width && mPicture.height, "invalid video resolution"); |
|
636 mInfo.mDisplay = nsIntSize(mPicture.width, mPicture.height); |
|
637 mInfo.mHasVideo = true; |
|
638 gst_caps_unref(caps); |
|
639 gst_object_unref(sinkpad); |
|
640 } |
|
641 diff --git a/content/media/gstreamer/GStreamerReader.h b/content/media/gstreamer/GStreamerReader.h |
|
642 --- a/content/media/gstreamer/GStreamerReader.h |
|
643 +++ b/content/media/gstreamer/GStreamerReader.h |
|
644 @@ -71,18 +71,23 @@ private: |
|
645 |
|
646 /* Called when a seek is issued on the pipeline */ |
|
647 static gboolean SeekDataCb(GstAppSrc *aSrc, |
|
648 guint64 aOffset, |
|
649 gpointer aUserData); |
|
650 gboolean SeekData(GstAppSrc *aSrc, guint64 aOffset); |
|
651 |
|
652 /* Called when events reach the sinks. See inline comments */ |
|
653 +#if GST_VERSION_MAJOR == 1 |
|
654 + static GstPadProbeReturn EventProbeCb(GstPad *aPad, GstPadProbeInfo *aInfo, gpointer aUserData); |
|
655 + GstPadProbeReturn EventProbe(GstPad *aPad, GstEvent *aEvent); |
|
656 +#else |
|
657 static gboolean EventProbeCb(GstPad *aPad, GstEvent *aEvent, gpointer aUserData); |
|
658 gboolean EventProbe(GstPad *aPad, GstEvent *aEvent); |
|
659 +#endif |
|
660 |
|
661 /* Called when the pipeline is prerolled, that is when at start or after a |
|
662 * seek, the first audio and video buffers are queued in the sinks. |
|
663 */ |
|
664 static GstFlowReturn NewPrerollCb(GstAppSink *aSink, gpointer aUserData); |
|
665 void VideoPreroll(); |
|
666 void AudioPreroll(); |
|
667 |