Merge branch 'release/0.7' into oldabi

* release/0.7: (33 commits)
  Update for 0.7.8
  svq1dec: call avcodec_set_dimensions() after dimensions changed. Fixes NGS00148
  vp3dec: Check coefficient index in vp3_dequant() Fixes NGS00145
  qdm2dec: fix buffer overflow. Fixes NGS00144
  h264: Fix invalid interlaced progressive MB combinations for direct mode prediction. Fixes Ticket312
  mpegvideo: dont use ff_mspel_motion() for vc1 Fixes Ticket655
  imgutils: Fix illegal read.
  ac3probe: Detect Sonic Foundry Soft Encode AC3 as raw AC3. Our ac3 code chain can handle it fine. More ideal would be to write a demuxer that actually extracts what can be from the additional headers and uses it for whatever it can be used for.
  mjpeg: support mpo Fixes stereoscopic_photo.mpo
  Add a version bump and APIchanges entry for avcodec_open2 and avformat_find_stream_info.
  lavf: fix multiplication overflow in avformat_find_stream_info()
  lavf: fix invalid reads in avformat_find_stream_info()
  lavf: add avformat_find_stream_info()
  lavc: fix parentheses placement in avcodec_open2().
  lavc: introduce avcodec_open2() as a replacement for avcodec_open().
  rawdec: use a default sample rate if none is specified. Fixes "ffmpeg -f s16le -i /dev/zero"
  rawdec: add check on sample_rate
  qdm2dec: check remaining input bits in the mainloop of qdm2_fft_decode_tones() This is neccessary but likely not sufficient to prevent out of array reads.
  cinepak: check strip_size
  wma: Check channel number before init. Fixes Ticket240
  ...

Conflicts:
	RELEASE
	doc/APIchanges
	libavcodec/avcodec.h
	libavcodec/utils.c
	libavcodec/version.h
	libavdevice/v4l2.c
	libavformat/utils.c

Merged-by: Michael Niedermayer <michaelni@gmx.at>
This commit is contained in:
Michael Niedermayer 2011-11-22 01:43:58 +01:00
commit b55aca6b8b
25 changed files with 169 additions and 23 deletions

View File

@ -31,7 +31,7 @@ PROJECT_NAME = FFmpeg
# This could be handy for archiving the generated documentation or
# if some version control system is used.
PROJECT_NUMBER = 0.7.7
PROJECT_NUMBER = 0.7.8
# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute)
# base path where the generated documentation will be put.

View File

@ -1 +1 @@
0.7.7
0.7.8

View File

@ -2135,7 +2135,12 @@ static int stream_component_open(VideoState *is, int stream_index)
avctx->workaround_bugs = workaround_bugs;
avctx->lowres = lowres;
if(lowres) avctx->flags |= CODEC_FLAG_EMU_EDGE;
if(avctx->lowres > codec->max_lowres){
av_log(avctx, AV_LOG_WARNING, "The maximum value for lowres supported by the decoder is %d\n",
codec->max_lowres);
avctx->lowres= codec->max_lowres;
}
if(avctx->lowres) avctx->flags |= CODEC_FLAG_EMU_EDGE;
avctx->idct_algo= idct;
if(fast) avctx->flags2 |= CODEC_FLAG2_FAST;
avctx->skip_frame= skip_frame;

View File

@ -365,6 +365,8 @@ static int cinepak_decode (CinepakContext *s)
s->strips[i].x2 = s->avctx->width;
strip_size = AV_RB24 (&s->data[1]) - 12;
if(strip_size < 0)
return -1;
s->data += 12;
strip_size = ((s->data + strip_size) > eod) ? (eod - s->data) : strip_size;

View File

@ -1079,7 +1079,7 @@ static av_cold int cook_decode_init(AVCodecContext *avctx)
q->subpacket[s].subbands = bytestream_get_be16(&edata_ptr);
extradata_size -= 8;
}
if (avctx->extradata_size >= 8){
if (extradata_size >= 8){
bytestream_get_be32(&edata_ptr); //Unknown unused
q->subpacket[s].js_subband_start = bytestream_get_be16(&edata_ptr);
q->subpacket[s].js_vlc_bits = bytestream_get_be16(&edata_ptr);

View File

@ -253,6 +253,10 @@ static void pred_spatial_direct_motion(H264Context * const h, int *mb_type){
mb_type_col[1] = h->ref_list[1][0].f.mb_type[mb_xy + s->mb_stride];
b8_stride = 2+4*s->mb_stride;
b4_stride *= 6;
if(IS_INTERLACED(mb_type_col[0]) != IS_INTERLACED(mb_type_col[1])){
mb_type_col[0] &= ~MB_TYPE_INTERLACED;
mb_type_col[1] &= ~MB_TYPE_INTERLACED;
}
sub_mb_type |= MB_TYPE_16x16|MB_TYPE_DIRECT2; /* B_SUB_8x8 */
if( (mb_type_col[0] & MB_TYPE_16x16_OR_INTRA)

View File

@ -725,7 +725,7 @@ static av_always_inline void MPV_motion_internal(MpegEncContext *s,
0, 0, 0,
ref_picture, pix_op, qpix_op,
s->mv[dir][0][0], s->mv[dir][0][1], 16);
}else if(!is_mpeg12 && (CONFIG_WMV2_DECODER || CONFIG_WMV2_ENCODER) && s->mspel){
}else if(!is_mpeg12 && (CONFIG_WMV2_DECODER || CONFIG_WMV2_ENCODER) && s->mspel && s->codec_id == CODEC_ID_WMV2){
ff_mspel_motion(s, dest_y, dest_cb, dest_cr,
ref_picture, pix_op,
s->mv[dir][0][0], s->mv[dir][0][1], 16);

View File

@ -471,7 +471,8 @@ static int decode_frame(AVCodecContext *avctx,
avctx->pix_fmt = PIX_FMT_MONOBLACK;
} else if (s->color_type == PNG_COLOR_TYPE_PALETTE) {
avctx->pix_fmt = PIX_FMT_PAL8;
} else if (s->color_type == PNG_COLOR_TYPE_GRAY_ALPHA) {
} else if (s->bit_depth == 8 &&
s->color_type == PNG_COLOR_TYPE_GRAY_ALPHA) {
avctx->pix_fmt = PIX_FMT_GRAY8A;
} else {
goto fail;

View File

@ -77,6 +77,7 @@ do { \
#define SAMPLES_NEEDED_2(why) \
av_log (NULL,AV_LOG_INFO,"This file triggers some missing code. Please contact the developers.\nPosition: %s\n",why);
#define QDM2_MAX_FRAME_SIZE 512
typedef int8_t sb_int8_array[2][30][64];
@ -169,7 +170,7 @@ typedef struct {
/// I/O data
const uint8_t *compressed_data;
int compressed_size;
float output_buffer[1024];
float output_buffer[QDM2_MAX_FRAME_SIZE * MPA_MAX_CHANNELS * 2];
/// Synthesis filter
MPADSPContext mpadsp;
@ -1328,7 +1329,7 @@ static void qdm2_fft_decode_tones (QDM2Context *q, int duration, GetBitContext *
local_int_10 = 1 << (q->group_order - duration - 1);
offset = 1;
while (1) {
while (get_bits_left(gb)>0) {
if (q->superblocktype_2_3) {
while ((n = qdm2_get_vlc(gb, &vlc_tab_fft_tone_offset[local_int_8], 1, 2)) < 2) {
offset = 1;
@ -1823,7 +1824,8 @@ static av_cold int qdm2_decode_init(AVCodecContext *avctx)
// something like max decodable tones
s->group_order = av_log2(s->group_size) + 1;
s->frame_size = s->group_size / 16; // 16 iterations per super block
if (s->frame_size > FF_ARRAY_ELEMS(s->output_buffer) / 2)
if (s->frame_size > QDM2_MAX_FRAME_SIZE)
return AVERROR_INVALIDDATA;
s->sub_sampling = s->fft_order - 7;
@ -1894,6 +1896,9 @@ static int qdm2_decode (QDM2Context *q, const uint8_t *in, int16_t *out)
int ch, i;
const int frame_size = (q->frame_size * q->channels);
if((unsigned)frame_size > FF_ARRAY_ELEMS(q->output_buffer)/2)
return -1;
/* select input buffer */
q->compressed_data = in;
q->compressed_size = q->checksum_size;

View File

@ -658,6 +658,7 @@ static int svq1_decode_frame(AVCodecContext *avctx,
av_dlog(s->avctx, "Error in svq1_decode_frame_header %i\n",result);
return result;
}
avcodec_set_dimensions(avctx, s->width, s->height);
//FIXME this avoids some confusion for "B frames" without 2 references
//this should be removed after libavcodec can handle more flexible picture types & ordering

View File

@ -113,5 +113,8 @@
#ifndef FF_API_VERY_AGGRESSIVE
#define FF_API_VERY_AGGRESSIVE (LIBAVCODEC_VERSION_MAJOR < 54)
#endif
#ifndef FF_API_AVCODEC_OPEN
#define FF_API_AVCODEC_OPEN (LIBAVCODEC_VERSION_MAJOR < 54)
#endif
#endif /* AVCODEC_VERSION_H */

View File

@ -1308,6 +1308,10 @@ static inline int vp3_dequant(Vp3DecodeContext *s, Vp3Fragment *frag,
case 1: // zero run
s->dct_tokens[plane][i]++;
i += (token >> 2) & 0x7f;
if(i>63){
av_log(s->avctx, AV_LOG_ERROR, "Coefficient index overflow\n");
return -1;
}
block[perm[i]] = (token >> 9) * dequantizer[perm[i]];
i++;
break;

View File

@ -185,7 +185,8 @@ static void vp5_parse_coeff(VP56Context *s)
model1 = model->coeff_dccv[pt];
model2 = model->coeff_dcct[pt][ctx];
for (coeff_idx=0; coeff_idx<64; ) {
coeff_idx = 0;
for (;;) {
if (vp56_rac_get_prob(c, model2[0])) {
if (vp56_rac_get_prob(c, model2[2])) {
if (vp56_rac_get_prob(c, model2[3])) {
@ -222,8 +223,11 @@ static void vp5_parse_coeff(VP56Context *s)
ct = 0;
s->coeff_ctx[vp56_b6to4[b]][coeff_idx] = 0;
}
coeff_idx++;
if (coeff_idx >= 64)
break;
cg = vp5_coeff_groups[++coeff_idx];
cg = vp5_coeff_groups[coeff_idx];
ctx = s->coeff_ctx[vp56_b6to4[b]][coeff_idx];
model1 = model->coeff_ract[pt][ct][cg];
model2 = cg > 2 ? model1 : model->coeff_acct[pt][ct][cg][ctx];

View File

@ -442,7 +442,8 @@ static void vp6_parse_coeff(VP56Context *s)
model1 = model->coeff_dccv[pt];
model2 = model->coeff_dcct[pt][ctx];
for (coeff_idx=0; coeff_idx<64; ) {
coeff_idx = 0;
for (;;) {
if ((coeff_idx>1 && ct==0) || vp56_rac_get_prob(c, model2[0])) {
/* parse a coeff */
if (vp56_rac_get_prob(c, model2[2])) {
@ -483,8 +484,10 @@ static void vp6_parse_coeff(VP56Context *s)
run += vp56_rac_get_prob(c, model3[i+8]) << i;
}
}
cg = vp6_coeff_groups[coeff_idx+=run];
coeff_idx += run;
if (coeff_idx >= 64)
break;
cg = vp6_coeff_groups[coeff_idx];
model1 = model2 = model->coeff_ract[pt][ct][cg];
}

View File

@ -109,6 +109,11 @@ static int wma_decode_init(AVCodecContext * avctx)
}
}
if(avctx->channels > MAX_CHANNELS){
av_log(avctx, AV_LOG_ERROR, "Invalid number of channels (%d)\n", avctx->channels);
return -1;
}
if(ff_wma_init(avctx, flags2)<0)
return -1;

View File

@ -439,7 +439,7 @@ static int v4l2_set_parameters(AVFormatContext *s1, AVFormatParameters *ap)
struct v4l2_streamparm streamparm = {0};
struct v4l2_fract *tpf = &streamparm.parm.capture.timeperframe;
int i, ret;
AVRational framerate_q;
AVRational framerate_q={0};
streamparm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

View File

@ -69,16 +69,13 @@ static int query_formats(AVFilterContext *ctx)
PIX_FMT_BGR555BE, PIX_FMT_BGR555LE,
PIX_FMT_GRAY16BE, PIX_FMT_GRAY16LE,
PIX_FMT_YUV420P16LE, PIX_FMT_YUV420P16BE,
PIX_FMT_YUV422P16LE, PIX_FMT_YUV422P16BE,
PIX_FMT_YUV444P16LE, PIX_FMT_YUV444P16BE,
PIX_FMT_NV12, PIX_FMT_NV21,
PIX_FMT_RGB8, PIX_FMT_BGR8,
PIX_FMT_RGB4_BYTE, PIX_FMT_BGR4_BYTE,
PIX_FMT_YUV444P, PIX_FMT_YUV422P,
PIX_FMT_YUV444P, PIX_FMT_YUVJ444P,
PIX_FMT_YUV420P, PIX_FMT_YUVJ420P,
PIX_FMT_YUV411P, PIX_FMT_YUV410P,
PIX_FMT_YUVJ444P, PIX_FMT_YUVJ422P,
PIX_FMT_YUV440P, PIX_FMT_YUVJ440P,
PIX_FMT_YUV410P,
PIX_FMT_YUVA420P, PIX_FMT_GRAY8,
PIX_FMT_NONE
};

View File

@ -248,6 +248,7 @@ OBJS-$(CONFIG_RTPDEC) += rdt.o \
rtpdec.o \
rtpdec_amr.o \
rtpdec_asf.o \
rtpdec_g726.o \
rtpdec_h263.o \
rtpdec_h264.o \
rtpdec_latm.o \

View File

@ -40,6 +40,8 @@ static int ac3_eac3_probe(AVProbeData *p, enum CodecID expected_codec_id)
buf2 = buf;
for(frames = 0; buf2 < end; frames++) {
if(!memcmp(buf2, "\x1\x10\0\0\0\0\0\0", 8))
buf2+=16;
init_get_bits(&gbc, buf2, 54);
if(ff_ac3_parse_header(&gbc, &hdr) < 0)
break;

View File

@ -59,6 +59,12 @@ int ff_raw_read_header(AVFormatContext *s, AVFormatParameters *ap)
if (s1->sample_rate)
st->codec->sample_rate = s1->sample_rate;
if (st->codec->sample_rate <= 0) {
av_log(s, AV_LOG_WARNING, "Invalid sample rate %d specified using default of 44100\n",
st->codec->sample_rate);
st->codec->sample_rate= 44100;
}
if (s1->channels)
st->codec->channels = s1->channels;
@ -243,7 +249,7 @@ AVInputFormat ff_gsm_demuxer = {
#endif
#if CONFIG_MJPEG_DEMUXER
FF_DEF_RAWVIDEO_DEMUXER(mjpeg, "raw MJPEG video", NULL, "mjpg,mjpeg", CODEC_ID_MJPEG)
FF_DEF_RAWVIDEO_DEMUXER(mjpeg, "raw MJPEG video", NULL, "mjpg,mjpeg,mpo", CODEC_ID_MJPEG)
#endif
#if CONFIG_MLP_DEMUXER

View File

@ -83,6 +83,11 @@ void av_register_rtp_dynamic_payload_handlers(void)
ff_register_dynamic_payload_handler(&ff_qt_rtp_vid_handler);
ff_register_dynamic_payload_handler(&ff_quicktime_rtp_aud_handler);
ff_register_dynamic_payload_handler(&ff_quicktime_rtp_vid_handler);
ff_register_dynamic_payload_handler(&ff_g726_16_dynamic_handler);
ff_register_dynamic_payload_handler(&ff_g726_24_dynamic_handler);
ff_register_dynamic_payload_handler(&ff_g726_32_dynamic_handler);
ff_register_dynamic_payload_handler(&ff_g726_40_dynamic_handler);
}
RTPDynamicProtocolHandler *ff_rtp_handler_find_by_name(const char *name,

View File

@ -33,6 +33,10 @@ int ff_wms_parse_sdp_a_line(AVFormatContext *s, const char *p);
extern RTPDynamicProtocolHandler ff_amr_nb_dynamic_handler;
extern RTPDynamicProtocolHandler ff_amr_wb_dynamic_handler;
extern RTPDynamicProtocolHandler ff_g726_16_dynamic_handler;
extern RTPDynamicProtocolHandler ff_g726_24_dynamic_handler;
extern RTPDynamicProtocolHandler ff_g726_32_dynamic_handler;
extern RTPDynamicProtocolHandler ff_g726_40_dynamic_handler;
extern RTPDynamicProtocolHandler ff_h263_1998_dynamic_handler;
extern RTPDynamicProtocolHandler ff_h263_2000_dynamic_handler;
extern RTPDynamicProtocolHandler ff_h264_dynamic_handler;

94
libavformat/rtpdec_g726.c Normal file
View File

@ -0,0 +1,94 @@
/*
* Copyright (c) 2011 Miroslav Slugeň <Thunder.m@seznam.cz>
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#include "avformat.h"
#include "rtpdec_formats.h"
static int g726_16_parse_sdp_line(AVFormatContext *s, int st_index,
PayloadContext *data, const char *line)
{
AVStream *stream = s->streams[st_index];
AVCodecContext *codec = stream->codec;
codec->bit_rate = 16000;
return 0;
}
static int g726_24_parse_sdp_line(AVFormatContext *s, int st_index,
PayloadContext *data, const char *line)
{
AVStream *stream = s->streams[st_index];
AVCodecContext *codec = stream->codec;
codec->bit_rate = 24000;
return 0;
}
static int g726_32_parse_sdp_line(AVFormatContext *s, int st_index,
PayloadContext *data, const char *line)
{
AVStream *stream = s->streams[st_index];
AVCodecContext *codec = stream->codec;
codec->bit_rate = 32000;
return 0;
}
static int g726_40_parse_sdp_line(AVFormatContext *s, int st_index,
PayloadContext *data, const char *line)
{
AVStream *stream = s->streams[st_index];
AVCodecContext *codec = stream->codec;
codec->bit_rate = 40000;
return 0;
}
RTPDynamicProtocolHandler ff_g726_16_dynamic_handler = {
.enc_name = "G726-16",
.codec_type = AVMEDIA_TYPE_AUDIO,
.codec_id = CODEC_ID_ADPCM_G726,
.parse_sdp_a_line = g726_16_parse_sdp_line,
};
RTPDynamicProtocolHandler ff_g726_24_dynamic_handler = {
.enc_name = "G726-24",
.codec_type = AVMEDIA_TYPE_AUDIO,
.codec_id = CODEC_ID_ADPCM_G726,
.parse_sdp_a_line = g726_24_parse_sdp_line,
};
RTPDynamicProtocolHandler ff_g726_32_dynamic_handler = {
.enc_name = "G726-32",
.codec_type = AVMEDIA_TYPE_AUDIO,
.codec_id = CODEC_ID_ADPCM_G726,
.parse_sdp_a_line = g726_32_parse_sdp_line,
};
RTPDynamicProtocolHandler ff_g726_40_dynamic_handler = {
.enc_name = "G726-40",
.codec_type = AVMEDIA_TYPE_AUDIO,
.codec_id = CODEC_ID_ADPCM_G726,
.parse_sdp_a_line = g726_40_parse_sdp_line,
};

View File

@ -2493,7 +2493,7 @@ int avformat_find_stream_info(AVFormatContext *ic, AVDictionary **options)
for (i=1; i<FF_ARRAY_ELEMS(st->info->duration_error); i++) {
int framerate= get_std_framerate(i);
int ticks= lrintf(dur*framerate/(1001*12));
double error= dur - ticks*1001*12/(double)framerate;
double error = dur - (double)ticks*1001*12 / framerate;
st->info->duration_error[i] += error*error;
}
st->info->duration_count++;

View File

@ -126,7 +126,7 @@ int av_image_fill_pointers(uint8_t *data[4], enum PixelFormat pix_fmt, int heigh
has_plane[desc->comp[i].plane] = 1;
total_size = size[0];
for (i = 1; has_plane[i] && i < 4; i++) {
for (i = 1; i < 4 && has_plane[i]; i++) {
int h, s = (i == 1 || i == 2) ? desc->log2_chroma_h : 0;
data[i] = data[i-1] + size[i-1];
h = (height + (1 << s) - 1) >> s;