ビデオのエンコード例がhttp://www.delphiffmpeg.comからです - TBitmapのセットをYCbCr(YUV)に変換する必要があります。例では、ダミーの色が含まれていますデルファイ、TBitmap(rgb)からYCbCrの色のフォーマット
(* encode 1 second of video *)
idx := 1;
for i := 0 to 25 - 1 do
begin
av_init_packet(@pkt);
pkt.data := nil; // packet data will be allocated by the encoder
pkt.size := 0;
//fflush(stdout);
(* prepare a dummy image *)
(* Y *)
for y := 0 to c.height - 1 do
for x := 0 to c.width - 1 do
PByte(@PAnsiChar(frame.data[0])[y * frame.linesize[0] + x])^ := x + y + i * 3;
(* Cb and Cr *)
for y := 0 to c.height div 2 - 1 do
for x := 0 to c.width div 2 - 1 do
begin
PByte(@PAnsiChar(frame.data[1])[y * frame.linesize[1] + x])^ := 128 + y + i * 2;
PByte(@PAnsiChar(frame.data[2])[y * frame.linesize[2] + x])^ := 64 + x + i * 5;
end;
frame.pts := i;
(* encode the image *)
ret := avcodec_encode_video2(c, @pkt, frame, @got_output);
if ret < 0 then
begin
Writeln(ErrOutput, 'Error encoding frame');
ExitCode := 1;
Exit;
end;
if got_output <> 0 then
begin
Writeln(Format('Write frame %d (size=%d)', [idx, pkt.size]));
FileWrite(f, pkt.data^, pkt.size);
av_packet_unref(@pkt);
Inc(idx);
end;
end;
しかし、我々はダミー画像でピクセルを充填するYCbCr..insteadにビットマップを変換する必要があります。ここでは、完全なソースコードは次のとおりです。
(*
* Video encoding example
*)
procedure video_encode_example(const filename: string; codec_id: TAVCodecID);
const
endcode: array[0..3] of Byte = (0, 0, 1, $b7);
var
codec: PAVCodec;
c: PAVCodecContext;
idx, i, ret, x, y, got_output: Integer;
f: THandle;
frame: PAVFrame;
pkt: TAVPacket;
begin
Writeln(Format('Encode video file %s', [filename]));
(* find the mpeg1 video encoder *)
codec := avcodec_find_encoder(codec_id);
if not Assigned(codec) then
begin
Writeln(ErrOutput, 'Codec not found');
ExitCode := 1;
Exit;
end;
c := avcodec_alloc_context3(codec);
if not Assigned(c) then
begin
Writeln(ErrOutput, 'Could not allocate video codec context');
ExitCode := 1;
Exit;
end;
(* put sample parameters *)
c.bit_rate := 400000;
(* resolution must be a multiple of two *)
c.width := 352;
c.height := 288;
(* frames per second *)
c.time_base.num := 1;
c.time_base.den := 25;
(* emit one intra frame every ten frames
* check frame pict_type before passing frame
* to encoder, if frame->pict_type is AV_PICTURE_TYPE_I
* then gop_size is ignored and the output of encoder
* will always be I frame irrespective to gop_size
*)
c.gop_size := 10;
c.max_b_frames := 1;
c.pix_fmt := AV_PIX_FMT_YUV420P;
if codec_id = AV_CODEC_ID_H264 then
av_opt_set(c.priv_data, 'preset', 'slow', 0);
(* open it *)
if avcodec_open2(c, codec, nil) < 0 then
begin
Writeln(ErrOutput, 'Could not open codec');
ExitCode := 1;
Exit;
end;
f := FileCreate(filename);
if f = INVALID_HANDLE_VALUE then
begin
Writeln(ErrOutput, Format('Could not open %s', [filename]));
ExitCode := 1;
Exit;
end;
frame := av_frame_alloc();
if not Assigned(frame) then
begin
Writeln(ErrOutput, 'Could not allocate video frame');
ExitCode := 1;
Exit;
end;
frame.format := Ord(c.pix_fmt);
frame.width := c.width;
frame.height := c.height;
(* the image can be allocated by any means and av_image_alloc() is
* just the most convenient way if av_malloc() is to be used *)
ret := av_image_alloc(@frame.data[0], @frame.linesize[0], c.width, c.height,
c.pix_fmt, 32);
if ret < 0 then
begin
Writeln(ErrOutput, 'Could not allocate raw picture buffer');
ExitCode := 1;
Exit;
end;
(* encode 1 second of video *)
idx := 1;
for i := 0 to 25 - 1 do
begin
av_init_packet(@pkt);
pkt.data := nil; // packet data will be allocated by the encoder
pkt.size := 0;
//fflush(stdout);
(* prepare a dummy image *)
(* Y *)
for y := 0 to c.height - 1 do
for x := 0 to c.width - 1 do
PByte(@PAnsiChar(frame.data[0])[y * frame.linesize[0] + x])^ := x + y + i * 3;
(* Cb and Cr *)
for y := 0 to c.height div 2 - 1 do
for x := 0 to c.width div 2 - 1 do
begin
PByte(@PAnsiChar(frame.data[1])[y * frame.linesize[1] + x])^ := 128 + y + i * 2;
PByte(@PAnsiChar(frame.data[2])[y * frame.linesize[2] + x])^ := 64 + x + i * 5;
end;
frame.pts := i;
(* encode the image *)
ret := avcodec_encode_video2(c, @pkt, frame, @got_output);
if ret < 0 then
begin
Writeln(ErrOutput, 'Error encoding frame');
ExitCode := 1;
Exit;
end;
if got_output <> 0 then
begin
Writeln(Format('Write frame %d (size=%d)', [idx, pkt.size]));
FileWrite(f, pkt.data^, pkt.size);
av_packet_unref(@pkt);
Inc(idx);
end;
end;
(* get the delayed frames *)
repeat
//fflush(stdout);
ret := avcodec_encode_video2(c, @pkt, nil, @got_output);
if ret < 0 then
begin
Writeln(ErrOutput, 'Error encoding frame');
ExitCode := 1;
Exit;
end;
if got_output <> 0 then
begin
Writeln(Format('Write frame %d (size=%d)', [idx, pkt.size]));
FileWrite(f, pkt.data^, pkt.size);
av_packet_unref(@pkt);
Inc(idx);
end;
until got_output = 0;
(* add sequence end code to have a real mpeg file *)
FileWrite(f, endcode[0], SizeOf(endcode));
FileClose(f);
avcodec_close(c);
av_free(c);
av_freep(@frame.data[0]);
av_frame_free(@frame);
Writeln('');
end;
はい、私たちは、この公式を知っているが、我々はdivの2 c.heightまで行く(* Cb、Crの*)ループで何をすべき - 1とc.width div要素を2 - 1?これは
(Y)
for y := 0 to c.height - 1 do
begin
Line := image.ScanLine[y];
for x := 0 to c.width - 1 do
begin
Yy := Round(Line[x].R*0.29900 + Line[x].G*0.58700 + Line[x].B*0.11400);
PByte(@PAnsiChar(frame.data[0])[y * frame.linesize[0] + x])^ := Yy;
end;
end;
(Cb and Cr)
for y := 0 to c.height div 2 - 1 do
begin
Pixels := image.ScanLine[y];
for x := 0 to c.width div 2 - 1 do
begin
Cb := Round(Line[x].R -0.16874 - Line[x].G 0.33126 + Line[x].B * 0.50000) + 128;
Cr := Round(Line[x].R 0.50000 - Line[x].G 0.41869 - Line[x].B * 0.08131) + 64;
PByte(@PAnsiChar(frame.data[1])[y * frame.linesize[1] + x])^ := Cr;
PByte(@PAnsiChar(frame.data[2])[y * frame.linesize[2] + x])^ := Cb;
//PByte(@PAnsiChar(frame.data[1])[y frame.linesize[1] + x])^ := 128 + y + i 2;
//PByte(@PAnsiChar(frame.data[2])[y frame.linesize[2] + x])^ := 64 + x + i 5;
end;
end;
を固定してください方法:すべての我々の実験は、正しい画像ジオメトリーが、間違った色を作る...ここで我々が持っているものでしょうか?
問題は何ですか? –
申し訳ありませんが、メインの質問 –